diff --git a/Dockerfile b/Dockerfile index b4ceabc..85be1df 100644 --- a/Dockerfile +++ b/Dockerfile @@ -9,6 +9,9 @@ LABEL description="Signal Intelligence Platform for SDR monitoring" # Set working directory WORKDIR /app +# Pre-accept tshark non-root capture prompt for non-interactive install +RUN echo 'wireshark-common wireshark-common/install-setuid boolean true' | debconf-set-selections + # Install system dependencies for SDR tools RUN apt-get update && apt-get install -y --no-install-recommends \ # RTL-SDR tools @@ -54,11 +57,39 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ airspy \ limesuite \ hackrf \ + # GSM Intelligence (tshark for packet parsing) + tshark \ # Utilities curl \ procps \ && rm -rf /var/lib/apt/lists/* +# GSM Intelligence: gr-gsm (grgsm_scanner, grgsm_livemon) +# Install from apt if available, otherwise build from source +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + gnuradio gr-osmosdr gr-gsm 2>/dev/null \ + || ( \ + apt-get install -y --no-install-recommends \ + gnuradio gnuradio-dev gr-osmosdr \ + git cmake libboost-all-dev libcppunit-dev swig \ + doxygen liblog4cpp5-dev python3-scipy python3-numpy \ + libvolk-dev libfftw3-dev build-essential \ + && cd /tmp \ + && git clone --depth 1 https://github.com/bkerler/gr-gsm.git \ + && cd gr-gsm \ + && mkdir build && cd build \ + && cmake .. \ + && make -j$(nproc) \ + && make install \ + && ldconfig \ + && rm -rf /tmp/gr-gsm \ + && apt-get remove -y gnuradio-dev libcppunit-dev swig doxygen \ + liblog4cpp5-dev libvolk-dev build-essential git cmake \ + && apt-get autoremove -y \ + ) \ + && rm -rf /var/lib/apt/lists/* + # Build dump1090-fa and acarsdec from source (packages not available in slim repos) RUN apt-get update && apt-get install -y --no-install-recommends \ build-essential \ diff --git a/app.py b/app.py index 829c104..000c061 100644 --- a/app.py +++ b/app.py @@ -39,6 +39,7 @@ from utils.constants import ( MAX_VESSEL_AGE_SECONDS, MAX_DSC_MESSAGE_AGE_SECONDS, MAX_DEAUTH_ALERTS_AGE_SECONDS, + MAX_GSM_AGE_SECONDS, QUEUE_MAX_SIZE, ) import logging @@ -191,6 +192,16 @@ deauth_detector = None deauth_detector_queue = queue.Queue(maxsize=QUEUE_MAX_SIZE) deauth_detector_lock = threading.Lock() +# GSM Spy +gsm_spy_scanner_running = False # Flag: scanner thread active +gsm_spy_livemon_process = None # For grgsm_livemon process +gsm_spy_monitor_process = None # For tshark monitoring process +gsm_spy_queue = queue.Queue(maxsize=QUEUE_MAX_SIZE) +gsm_spy_lock = threading.Lock() +gsm_spy_active_device = None +gsm_spy_selected_arfcn = None +gsm_spy_region = 'Americas' # Default band + # ============================================ # GLOBAL STATE DICTIONARIES # ============================================ @@ -223,6 +234,16 @@ dsc_messages = DataStore(max_age_seconds=MAX_DSC_MESSAGE_AGE_SECONDS, name='dsc_ # Deauth alerts - using DataStore for automatic cleanup deauth_alerts = DataStore(max_age_seconds=MAX_DEAUTH_ALERTS_AGE_SECONDS, name='deauth_alerts') +# GSM Spy data stores +gsm_spy_towers = DataStore( + max_age_seconds=MAX_GSM_AGE_SECONDS, + name='gsm_spy_towers' +) +gsm_spy_devices = DataStore( + max_age_seconds=MAX_GSM_AGE_SECONDS, + name='gsm_spy_devices' +) + # Satellite state satellite_passes = [] # Predicted satellite passes (not auto-cleaned, calculated) @@ -235,6 +256,8 @@ cleanup_manager.register(adsb_aircraft) cleanup_manager.register(ais_vessels) cleanup_manager.register(dsc_messages) cleanup_manager.register(deauth_alerts) +cleanup_manager.register(gsm_spy_towers) +cleanup_manager.register(gsm_spy_devices) # ============================================ # SDR DEVICE REGISTRY @@ -296,6 +319,10 @@ def require_login(): if request.path.startswith('/listening/audio/'): return None + # Allow WebSocket upgrade requests (page load already required auth) + if request.path.startswith('/ws/'): + return None + # Controller API endpoints use API key auth, not session auth # Allow agent push/pull endpoints without session login if request.path.startswith('/controller/'): @@ -666,6 +693,8 @@ def kill_all() -> Response: global current_process, sensor_process, wifi_process, adsb_process, ais_process, acars_process global aprs_process, aprs_rtl_process, dsc_process, dsc_rtl_process, bt_process global dmr_process, dmr_rtl_process + global gsm_spy_livemon_process, gsm_spy_monitor_process + global gsm_spy_scanner_running, gsm_spy_active_device, gsm_spy_selected_arfcn, gsm_spy_region # Import adsb and ais modules to reset their state from routes import adsb as adsb_module @@ -679,6 +708,7 @@ def kill_all() -> Response: 'dump1090', 'acarsdec', 'direwolf', 'AIS-catcher', 'hcitool', 'bluetoothctl', 'satdump', 'dsd', 'rtl_tcp', 'rtl_power', 'rtlamr', 'ffmpeg', + 'grgsm_scanner', 'grgsm_livemon', 'tshark' ] for proc in processes_to_kill: @@ -743,10 +773,33 @@ def kill_all() -> Response: # Reset Bluetooth v2 scanner try: reset_bluetooth_scanner() - killed.append('bluetooth_scanner') + killed.append('bluetooth') except Exception: pass + # Reset GSM Spy state + with gsm_spy_lock: + gsm_spy_scanner_running = False + gsm_spy_active_device = None + gsm_spy_selected_arfcn = None + gsm_spy_region = 'Americas' + + if gsm_spy_livemon_process: + try: + if safe_terminate(gsm_spy_livemon_process): + killed.append('grgsm_livemon') + except Exception: + pass + gsm_spy_livemon_process = None + + if gsm_spy_monitor_process: + try: + if safe_terminate(gsm_spy_monitor_process): + killed.append('tshark') + except Exception: + pass + gsm_spy_monitor_process = None + # Clear SDR device registry with sdr_device_registry_lock: sdr_device_registry.clear() @@ -836,6 +889,26 @@ def main() -> None: from utils.database import init_db init_db() + # Register database cleanup functions + from utils.database import ( + cleanup_old_gsm_signals, + cleanup_old_gsm_tmsi_log, + cleanup_old_gsm_velocity_log, + cleanup_old_signal_history, + cleanup_old_timeline_entries, + cleanup_old_dsc_alerts, + cleanup_old_payloads + ) + # GSM cleanups: signals (60 days), TMSI log (24 hours), velocity (1 hour) + # Interval multiplier: cleanup every N cycles (60s interval = 1 cleanup per hour at multiplier 60) + cleanup_manager.register_db_cleanup(cleanup_old_gsm_tmsi_log, interval_multiplier=60) # Every hour + cleanup_manager.register_db_cleanup(cleanup_old_gsm_velocity_log, interval_multiplier=60) # Every hour + cleanup_manager.register_db_cleanup(cleanup_old_gsm_signals, interval_multiplier=1440) # Every 24 hours + cleanup_manager.register_db_cleanup(cleanup_old_signal_history, interval_multiplier=1440) # Every 24 hours + cleanup_manager.register_db_cleanup(cleanup_old_timeline_entries, interval_multiplier=1440) # Every 24 hours + cleanup_manager.register_db_cleanup(cleanup_old_dsc_alerts, interval_multiplier=1440) # Every 24 hours + cleanup_manager.register_db_cleanup(cleanup_old_payloads, interval_multiplier=1440) # Every 24 hours + # Start automatic cleanup of stale data entries cleanup_manager.start() @@ -875,6 +948,14 @@ def main() -> None: except ImportError as e: print(f"KiwiSDR audio proxy disabled: {e}") + # Initialize WebSocket for waterfall streaming + try: + from routes.waterfall_websocket import init_waterfall_websocket + init_waterfall_websocket(app) + print("WebSocket waterfall streaming enabled") + except ImportError as e: + print(f"WebSocket waterfall disabled: {e}") + print(f"Open http://localhost:{args.port} in your browser") print() print("Press Ctrl+C to stop") diff --git a/config.py b/config.py index 2bf9427..80b0a2d 100644 --- a/config.py +++ b/config.py @@ -228,6 +228,12 @@ ALERT_WEBHOOK_TIMEOUT = _get_env_int('ALERT_WEBHOOK_TIMEOUT', 5) ADMIN_USERNAME = _get_env('ADMIN_USERNAME', 'admin') ADMIN_PASSWORD = _get_env('ADMIN_PASSWORD', 'admin') +# GSM Spy settings +GSM_OPENCELLID_API_KEY = _get_env('GSM_OPENCELLID_API_KEY', '') +GSM_OPENCELLID_API_URL = _get_env('GSM_OPENCELLID_API_URL', 'https://opencellid.org/cell/get') +GSM_API_DAILY_LIMIT = _get_env_int('GSM_API_DAILY_LIMIT', 1000) +GSM_TA_METERS_PER_UNIT = _get_env_int('GSM_TA_METERS_PER_UNIT', 554) + def configure_logging() -> None: """Configure application logging.""" logging.basicConfig( diff --git a/routes/__init__.py b/routes/__init__.py index 0cb7e1a..f6a81ce 100644 --- a/routes/__init__.py +++ b/routes/__init__.py @@ -32,6 +32,7 @@ def register_blueprints(app): from .websdr import websdr_bp from .alerts import alerts_bp from .recordings import recordings_bp + from .gsm_spy import gsm_spy_bp app.register_blueprint(pager_bp) app.register_blueprint(sensor_bp) @@ -63,6 +64,7 @@ def register_blueprints(app): app.register_blueprint(websdr_bp) # HF/Shortwave WebSDR app.register_blueprint(alerts_bp) # Cross-mode alerts app.register_blueprint(recordings_bp) # Session recordings + app.register_blueprint(gsm_spy_bp) # GSM cellular intelligence # Initialize TSCM state with queue and lock from app import app as app_module diff --git a/routes/audio_websocket.py b/routes/audio_websocket.py index 6d70d0b..4e2acf5 100644 --- a/routes/audio_websocket.py +++ b/routes/audio_websocket.py @@ -1,10 +1,11 @@ """WebSocket-based audio streaming for SDR.""" +import json +import shutil +import socket import subprocess import threading import time -import shutil -import json from flask import Flask # Try to import flask-sock @@ -251,4 +252,19 @@ def init_audio_websocket(app: Flask): finally: with process_lock: kill_audio_processes() + # Complete WebSocket close handshake, then shut down the + # raw socket so Werkzeug cannot write its HTTP 200 response + # on top of the WebSocket stream. + try: + ws.close() + except Exception: + pass + try: + ws.sock.shutdown(socket.SHUT_RDWR) + except Exception: + pass + try: + ws.sock.close() + except Exception: + pass logger.info("WebSocket audio client disconnected") diff --git a/routes/gsm_spy.py b/routes/gsm_spy.py new file mode 100644 index 0000000..b1b5607 --- /dev/null +++ b/routes/gsm_spy.py @@ -0,0 +1,1730 @@ +"""GSM Spy route handlers for cellular tower and device tracking.""" + +from __future__ import annotations + +import json +import logging +import os +import queue +import re +import shutil +import subprocess +import threading +import time +from datetime import datetime, timedelta +from typing import Any + +import requests +from flask import Blueprint, Response, jsonify, render_template, request + +import app as app_module +import config +from config import SHARED_OBSERVER_LOCATION_ENABLED +from utils.database import get_db +from utils.process import register_process, safe_terminate, unregister_process +from utils.sse import format_sse +from utils.validation import validate_device_index + +from utils.logging import get_logger +logger = get_logger('intercept.gsm_spy') +logger.setLevel(logging.DEBUG) # GSM Spy needs verbose logging for diagnostics + +gsm_spy_bp = Blueprint('gsm_spy', __name__, url_prefix='/gsm_spy') + +# Regional band configurations (G-01) +REGIONAL_BANDS = { + 'Americas': { + 'GSM850': {'start': 869e6, 'end': 894e6, 'arfcn_start': 128, 'arfcn_end': 251}, + 'PCS1900': {'start': 1930e6, 'end': 1990e6, 'arfcn_start': 512, 'arfcn_end': 810} + }, + 'Europe': { + 'GSM800': {'start': 832e6, 'end': 862e6, 'arfcn_start': 438, 'arfcn_end': 511}, # E-GSM800 downlink + 'GSM850': {'start': 869e6, 'end': 894e6, 'arfcn_start': 128, 'arfcn_end': 251}, # Also used in some EU countries + 'EGSM900': {'start': 925e6, 'end': 960e6, 'arfcn_start': 0, 'arfcn_end': 124}, + 'DCS1800': {'start': 1805e6, 'end': 1880e6, 'arfcn_start': 512, 'arfcn_end': 885} + }, + 'Asia': { + 'EGSM900': {'start': 925e6, 'end': 960e6, 'arfcn_start': 0, 'arfcn_end': 124}, + 'DCS1800': {'start': 1805e6, 'end': 1880e6, 'arfcn_start': 512, 'arfcn_end': 885} + } +} + +# Module state tracking +gsm_using_service = False +gsm_connected = False +gsm_towers_found = 0 +gsm_devices_tracked = 0 + +# Geocoding worker state +_geocoding_worker_thread = None + + +# ============================================ +# API Usage Tracking Helper Functions +# ============================================ + +def get_api_usage_today(): + """Get OpenCellID API usage count for today.""" + from utils.database import get_setting + today = datetime.now().date().isoformat() + usage_date = get_setting('gsm.opencellid.usage_date', '') + + # Reset counter if new day + if usage_date != today: + from utils.database import set_setting + set_setting('gsm.opencellid.usage_date', today) + set_setting('gsm.opencellid.usage_count', 0) + return 0 + + return get_setting('gsm.opencellid.usage_count', 0) + + +def increment_api_usage(): + """Increment OpenCellID API usage counter.""" + from utils.database import set_setting + current = get_api_usage_today() + set_setting('gsm.opencellid.usage_count', current + 1) + return current + 1 + + +def can_use_api(): + """Check if we can make an API call within daily limit.""" + current_usage = get_api_usage_today() + return current_usage < config.GSM_API_DAILY_LIMIT + + +# ============================================ +# Background Geocoding Worker +# ============================================ + +def start_geocoding_worker(): + """Start background thread for async geocoding.""" + global _geocoding_worker_thread + if _geocoding_worker_thread is None or not _geocoding_worker_thread.is_alive(): + _geocoding_worker_thread = threading.Thread( + target=geocoding_worker, + daemon=True, + name='gsm-geocoding-worker' + ) + _geocoding_worker_thread.start() + logger.info("Started geocoding worker thread") + + +def geocoding_worker(): + """Worker thread processes pending geocoding requests.""" + from utils.gsm_geocoding import lookup_cell_from_api, get_geocoding_queue + + geocoding_queue = get_geocoding_queue() + + while True: + try: + # Wait for pending tower with timeout + tower_data = geocoding_queue.get(timeout=5) + + # Check rate limit + if not can_use_api(): + current_usage = get_api_usage_today() + logger.warning(f"OpenCellID API rate limit reached ({current_usage}/{config.GSM_API_DAILY_LIMIT})") + geocoding_queue.task_done() + continue + + # Call API + mcc = tower_data.get('mcc') + mnc = tower_data.get('mnc') + lac = tower_data.get('lac') + cid = tower_data.get('cid') + + logger.debug(f"Geocoding tower via API: MCC={mcc} MNC={mnc} LAC={lac} CID={cid}") + + coords = lookup_cell_from_api(mcc, mnc, lac, cid) + + if coords: + # Update tower data with coordinates + tower_data['lat'] = coords['lat'] + tower_data['lon'] = coords['lon'] + tower_data['source'] = 'api' + tower_data['status'] = 'resolved' + tower_data['type'] = 'tower_update' + + # Add optional fields if available + if coords.get('azimuth') is not None: + tower_data['azimuth'] = coords['azimuth'] + if coords.get('range_meters') is not None: + tower_data['range_meters'] = coords['range_meters'] + if coords.get('operator'): + tower_data['operator'] = coords['operator'] + if coords.get('radio'): + tower_data['radio'] = coords['radio'] + + # Update DataStore + key = f"{mcc}_{mnc}_{lac}_{cid}" + app_module.gsm_spy_towers[key] = tower_data + + # Send update to SSE stream + try: + app_module.gsm_spy_queue.put_nowait(tower_data) + logger.info(f"Resolved coordinates for tower: MCC={mcc} MNC={mnc} LAC={lac} CID={cid}") + except queue.Full: + logger.warning("SSE queue full, dropping tower update") + + # Increment API usage counter + usage_count = increment_api_usage() + logger.info(f"OpenCellID API call #{usage_count} today") + + else: + logger.warning(f"Could not resolve coordinates for tower: MCC={mcc} MNC={mnc} LAC={lac} CID={cid}") + + geocoding_queue.task_done() + + # Rate limiting between API calls (be nice to OpenCellID) + time.sleep(1) + + except queue.Empty: + # No pending towers, continue waiting + continue + except Exception as e: + logger.error(f"Geocoding worker error: {e}", exc_info=True) + time.sleep(1) + + +def arfcn_to_frequency(arfcn): + """Convert ARFCN to downlink frequency in Hz. + + Uses REGIONAL_BANDS to determine the correct band and conversion formula. + Returns frequency in Hz (e.g., 925800000 for 925.8 MHz). + """ + arfcn = int(arfcn) + + # Search all bands to find which one this ARFCN belongs to + for region_bands in REGIONAL_BANDS.values(): + for band_name, band_info in region_bands.items(): + arfcn_start = band_info['arfcn_start'] + arfcn_end = band_info['arfcn_end'] + + if arfcn_start <= arfcn <= arfcn_end: + # Found the right band, calculate frequency + # Downlink frequency = band_start + (arfcn - arfcn_start) * 200kHz + freq_hz = band_info['start'] + (arfcn - arfcn_start) * 200000 + return int(freq_hz) + + # If ARFCN not found in any band, raise error + raise ValueError(f"ARFCN {arfcn} not found in any known GSM band") + + +def validate_band_names(bands: list[str], region: str) -> tuple[list[str], str | None]: + """Validate band names against REGIONAL_BANDS whitelist. + + Args: + bands: List of band names from user input + region: Region name (Americas, Europe, Asia) + + Returns: + Tuple of (validated_bands, error_message) + """ + if not bands: + return [], None + + region_bands = REGIONAL_BANDS.get(region) + if not region_bands: + return [], f"Invalid region: {region}" + + valid_band_names = set(region_bands.keys()) + invalid_bands = [b for b in bands if b not in valid_band_names] + + if invalid_bands: + return [], (f"Invalid bands for {region}: {', '.join(invalid_bands)}. " + f"Valid bands: {', '.join(sorted(valid_band_names))}") + + return bands, None + + +def _start_monitoring_processes(arfcn: int, device_index: int) -> tuple[subprocess.Popen, subprocess.Popen]: + """Start grgsm_livemon and tshark processes for monitoring an ARFCN. + + Returns: + Tuple of (grgsm_process, tshark_process) + + Raises: + FileNotFoundError: If grgsm_livemon or tshark not found + RuntimeError: If grgsm_livemon exits immediately + """ + frequency_hz = arfcn_to_frequency(arfcn) + frequency_mhz = frequency_hz / 1e6 + + # Check prerequisites + if not shutil.which('grgsm_livemon'): + raise FileNotFoundError('grgsm_livemon not found. Please install gr-gsm.') + + # Start grgsm_livemon + grgsm_cmd = [ + 'grgsm_livemon', + '--args', f'rtl={device_index}', + '-f', f'{frequency_mhz}M' + ] + env = dict(os.environ, + OSMO_FSM_DUP_CHECK_DISABLED='1', + PYTHONUNBUFFERED='1', + QT_QPA_PLATFORM='offscreen') + logger.info(f"Starting grgsm_livemon: {' '.join(grgsm_cmd)}") + grgsm_proc = subprocess.Popen( + grgsm_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, + env=env + ) + register_process(grgsm_proc) + logger.info(f"Started grgsm_livemon (PID: {grgsm_proc.pid})") + + # Wait and check it didn't die immediately + time.sleep(2) + + if grgsm_proc.poll() is not None: + # Process already exited - capture stderr for diagnostics + stderr_output = '' + try: + stderr_output = grgsm_proc.stderr.read() + except Exception: + pass + exit_code = grgsm_proc.returncode + logger.error( + f"grgsm_livemon exited immediately (code: {exit_code}). " + f"stderr: {stderr_output[:500]}" + ) + unregister_process(grgsm_proc) + raise RuntimeError( + f'grgsm_livemon failed (exit code {exit_code}): {stderr_output[:200]}' + ) + + # Start stderr reader thread for grgsm_livemon diagnostics + def read_livemon_stderr(): + try: + for line in iter(grgsm_proc.stderr.readline, ''): + if line: + logger.debug(f"grgsm_livemon stderr: {line.strip()}") + except Exception: + pass + threading.Thread(target=read_livemon_stderr, daemon=True).start() + + # Start tshark + if not shutil.which('tshark'): + safe_terminate(grgsm_proc) + unregister_process(grgsm_proc) + raise FileNotFoundError('tshark not found. Please install wireshark/tshark.') + + tshark_cmd = [ + 'tshark', '-i', 'lo', + '-Y', 'gsm_a.rr.timing_advance || gsm_a.tmsi || gsm_a.imsi', + '-T', 'fields', + '-e', 'gsm_a.rr.timing_advance', + '-e', 'gsm_a.tmsi', + '-e', 'gsm_a.imsi', + '-e', 'gsm_a.lac', + '-e', 'gsm_a.cellid' + ] + logger.info(f"Starting tshark: {' '.join(tshark_cmd)}") + tshark_proc = subprocess.Popen( + tshark_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, + bufsize=1 + ) + register_process(tshark_proc) + logger.info(f"Started tshark (PID: {tshark_proc.pid})") + + return grgsm_proc, tshark_proc + + +def _start_and_register_monitor(arfcn: int, device_index: int) -> None: + """Start monitoring processes and register them in global state. + + This is shared logic between start_monitor() and auto_start_monitor(). + Must be called within gsm_spy_lock context. + + Args: + arfcn: ARFCN to monitor + device_index: SDR device index + """ + # Start monitoring processes + grgsm_proc, tshark_proc = _start_monitoring_processes(arfcn, device_index) + app_module.gsm_spy_livemon_process = grgsm_proc + app_module.gsm_spy_monitor_process = tshark_proc + app_module.gsm_spy_selected_arfcn = arfcn + + # Start monitoring thread + monitor_thread_obj = threading.Thread( + target=monitor_thread, + args=(tshark_proc,), + daemon=True + ) + monitor_thread_obj.start() + + +@gsm_spy_bp.route('/dashboard') +def dashboard(): + """Render GSM Spy dashboard.""" + return render_template( + 'gsm_spy_dashboard.html', + shared_observer_location=SHARED_OBSERVER_LOCATION_ENABLED + ) + + +@gsm_spy_bp.route('/start', methods=['POST']) +def start_scanner(): + """Start GSM scanner (G-01 BTS Scanner).""" + global gsm_towers_found, gsm_connected + + with app_module.gsm_spy_lock: + if app_module.gsm_spy_scanner_running: + return jsonify({'error': 'Scanner already running'}), 400 + + data = request.get_json() or {} + device_index = data.get('device', 0) + region = data.get('region', 'Americas') + selected_bands = data.get('bands', []) # Get user-selected bands + + # Validate device index + try: + device_index = validate_device_index(device_index) + except ValueError as e: + return jsonify({'error': str(e)}), 400 + + # Claim SDR device to prevent conflicts + from app import claim_sdr_device + claim_error = claim_sdr_device(device_index, 'GSM Spy') + if claim_error: + return jsonify({ + 'error': claim_error, + 'error_type': 'DEVICE_BUSY' + }), 409 + + # If no bands selected, use all bands for the region (backwards compatibility) + if selected_bands: + validated_bands, error = validate_band_names(selected_bands, region) + if error: + from app import release_sdr_device + release_sdr_device(device_index) + return jsonify({'error': error}), 400 + selected_bands = validated_bands + else: + region_bands = REGIONAL_BANDS.get(region, REGIONAL_BANDS['Americas']) + selected_bands = list(region_bands.keys()) + logger.warning(f"No bands specified, using all bands for {region}: {selected_bands}") + + # Build grgsm_scanner command + # Example: grgsm_scanner --args="rtl=0" -b GSM900 + if not shutil.which('grgsm_scanner'): + from app import release_sdr_device + release_sdr_device(device_index) + return jsonify({'error': 'grgsm_scanner not found. Please install gr-gsm.'}), 500 + + try: + cmd = ['grgsm_scanner'] + + # Add device argument (--args for RTL-SDR device selection) + cmd.extend(['--args', f'rtl={device_index}']) + + # Add selected band arguments + # Map EGSM900 to GSM900 since that's what grgsm_scanner expects + for band_name in selected_bands: + # Normalize band name (EGSM900 -> GSM900, remove EGSM prefix) + normalized_band = band_name.replace('EGSM', 'GSM') + cmd.extend(['-b', normalized_band]) + + logger.info(f"Starting GSM scanner: {' '.join(cmd)}") + + # Set a flag to indicate scanner should run + app_module.gsm_spy_active_device = device_index + app_module.gsm_spy_region = region + app_module.gsm_spy_scanner_running = True # Use as flag initially + + # Reset counters for new session + gsm_towers_found = 0 + gsm_devices_tracked = 0 + + # Start geocoding worker (if not already running) + start_geocoding_worker() + + # Start scanning thread (will run grgsm_scanner in a loop) + scanner_thread_obj = threading.Thread( + target=scanner_thread, + args=(cmd, device_index), + daemon=True + ) + scanner_thread_obj.start() + + gsm_connected = True + + return jsonify({ + 'status': 'started', + 'device': device_index, + 'region': region + }) + + except FileNotFoundError: + from app import release_sdr_device + release_sdr_device(device_index) + return jsonify({'error': 'grgsm_scanner not found. Please install gr-gsm.'}), 500 + except Exception as e: + from app import release_sdr_device + release_sdr_device(device_index) + logger.error(f"Error starting GSM scanner: {e}") + return jsonify({'error': str(e)}), 500 + + +@gsm_spy_bp.route('/monitor', methods=['POST']) +def start_monitor(): + """Start monitoring specific tower (G-02 Decoding).""" + with app_module.gsm_spy_lock: + if app_module.gsm_spy_monitor_process: + return jsonify({'error': 'Monitor already running'}), 400 + + data = request.get_json() or {} + arfcn = data.get('arfcn') + device_index = data.get('device', app_module.gsm_spy_active_device or 0) + + if not arfcn: + return jsonify({'error': 'ARFCN required'}), 400 + + # Validate ARFCN is valid integer and in known GSM band ranges + try: + arfcn = int(arfcn) + # This will raise ValueError if ARFCN is not in any known band + arfcn_to_frequency(arfcn) + except (ValueError, TypeError) as e: + return jsonify({'error': f'Invalid ARFCN: {e}'}), 400 + + # Validate device index + try: + device_index = validate_device_index(device_index) + except ValueError as e: + return jsonify({'error': str(e)}), 400 + + try: + # Start and register monitoring (shared logic) + _start_and_register_monitor(arfcn, device_index) + + return jsonify({ + 'status': 'monitoring', + 'arfcn': arfcn, + 'device': device_index + }) + + except FileNotFoundError as e: + return jsonify({'error': f'Tool not found: {e}'}), 500 + except Exception as e: + logger.error(f"Error starting monitor: {e}") + return jsonify({'error': str(e)}), 500 + + +@gsm_spy_bp.route('/stop', methods=['POST']) +def stop_scanner(): + """Stop GSM scanner and monitor.""" + global gsm_connected, gsm_towers_found, gsm_devices_tracked + + with app_module.gsm_spy_lock: + killed = [] + + # Stop scanner (now just a flag, thread will see it and exit) + if app_module.gsm_spy_scanner_running: + app_module.gsm_spy_scanner_running = False + killed.append('scanner') + + # Terminate livemon process + if app_module.gsm_spy_livemon_process: + unregister_process(app_module.gsm_spy_livemon_process) + if safe_terminate(app_module.gsm_spy_livemon_process, timeout=5): + killed.append('livemon') + app_module.gsm_spy_livemon_process = None + + # Terminate monitor process + if app_module.gsm_spy_monitor_process: + unregister_process(app_module.gsm_spy_monitor_process) + if safe_terminate(app_module.gsm_spy_monitor_process, timeout=5): + killed.append('monitor') + app_module.gsm_spy_monitor_process = None + + # Release SDR device from registry + if app_module.gsm_spy_active_device is not None: + from app import release_sdr_device + release_sdr_device(app_module.gsm_spy_active_device) + app_module.gsm_spy_active_device = None + app_module.gsm_spy_selected_arfcn = None + gsm_connected = False + gsm_towers_found = 0 + gsm_devices_tracked = 0 + + return jsonify({'status': 'stopped', 'killed': killed}) + + +@gsm_spy_bp.route('/stream') +def stream(): + """SSE stream for real-time GSM updates.""" + def generate(): + """Generate SSE events.""" + logger.info("SSE stream connected - client subscribed") + + # Send current state on connect (handles reconnects and late-joining clients) + existing_towers = dict(app_module.gsm_spy_towers.items()) + logger.info(f"SSE sending {len(existing_towers)} existing towers on connect") + for key, tower_data in existing_towers.items(): + yield format_sse(tower_data) + + last_keepalive = time.time() + idle_count = 0 # Track consecutive idle checks to handle transitions + + while True: + try: + # Check if scanner/monitor are still running + # Use idle counter to avoid disconnecting during scanner→monitor transition + if not app_module.gsm_spy_scanner_running and not app_module.gsm_spy_monitor_process: + idle_count += 1 + if idle_count >= 5: # 5 seconds grace period for mode transitions + logger.info("SSE stream: no active scanner or monitor, disconnecting") + yield format_sse({'type': 'disconnected'}) + break + else: + idle_count = 0 + + # Try to get data from queue + try: + data = app_module.gsm_spy_queue.get(timeout=1) + logger.info(f"SSE sending: type={data.get('type', '?')} keys={list(data.keys())}") + yield format_sse(data) + last_keepalive = time.time() + except queue.Empty: + # Send keepalive if needed + if time.time() - last_keepalive > 30: + yield format_sse({'type': 'keepalive'}) + last_keepalive = time.time() + + except GeneratorExit: + logger.info("SSE stream: client disconnected (GeneratorExit)") + break + except Exception as e: + logger.error(f"Error in GSM stream: {e}") + yield format_sse({'type': 'error', 'message': str(e)}) + break + + response = Response(generate(), mimetype='text/event-stream') + response.headers['Cache-Control'] = 'no-cache' + response.headers['X-Accel-Buffering'] = 'no' + response.headers['Connection'] = 'keep-alive' + return response + + +@gsm_spy_bp.route('/status') +def status(): + """Get current GSM Spy status.""" + api_usage = get_api_usage_today() + return jsonify({ + 'running': bool(app_module.gsm_spy_scanner_running), + 'monitoring': app_module.gsm_spy_monitor_process is not None, + 'towers_found': gsm_towers_found, + 'devices_tracked': gsm_devices_tracked, + 'device': app_module.gsm_spy_active_device, + 'region': app_module.gsm_spy_region, + 'selected_arfcn': app_module.gsm_spy_selected_arfcn, + 'api_usage_today': api_usage, + 'api_limit': config.GSM_API_DAILY_LIMIT, + 'api_remaining': config.GSM_API_DAILY_LIMIT - api_usage + }) + + +@gsm_spy_bp.route('/lookup_cell', methods=['POST']) +def lookup_cell(): + """Lookup cell tower via OpenCellID (G-05).""" + data = request.get_json() or {} + mcc = data.get('mcc') + mnc = data.get('mnc') + lac = data.get('lac') + cid = data.get('cid') + + if not all([mcc, mnc, lac, cid]): + return jsonify({'error': 'MCC, MNC, LAC, and CID required'}), 400 + + try: + # Check local cache first + with get_db() as conn: + result = conn.execute(''' + SELECT lat, lon, azimuth, range_meters, operator, radio + FROM gsm_cells + WHERE mcc = ? AND mnc = ? AND lac = ? AND cid = ? + ''', (mcc, mnc, lac, cid)).fetchone() + + if result: + return jsonify({ + 'source': 'cache', + 'lat': result['lat'], + 'lon': result['lon'], + 'azimuth': result['azimuth'], + 'range': result['range_meters'], + 'operator': result['operator'], + 'radio': result['radio'] + }) + + # Check API usage limit + if not can_use_api(): + current_usage = get_api_usage_today() + return jsonify({ + 'error': 'OpenCellID API daily limit reached', + 'usage_today': current_usage, + 'limit': config.GSM_API_DAILY_LIMIT + }), 429 + + # Call OpenCellID API + api_url = config.GSM_OPENCELLID_API_URL + params = { + 'key': config.GSM_OPENCELLID_API_KEY, + 'mcc': mcc, + 'mnc': mnc, + 'lac': lac, + 'cellid': cid, + 'format': 'json' + } + + response = requests.get(api_url, params=params, timeout=10) + + if response.status_code == 200: + cell_data = response.json() + + # Increment API usage counter + usage_count = increment_api_usage() + logger.info(f"OpenCellID API call #{usage_count} today") + + # Cache the result + conn.execute(''' + INSERT OR REPLACE INTO gsm_cells + (mcc, mnc, lac, cid, lat, lon, azimuth, range_meters, samples, radio, operator, last_verified) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP) + ''', ( + mcc, mnc, lac, cid, + cell_data.get('lat'), + cell_data.get('lon'), + cell_data.get('azimuth'), + cell_data.get('range'), + cell_data.get('samples'), + cell_data.get('radio'), + cell_data.get('operator') + )) + conn.commit() + + return jsonify({ + 'source': 'api', + 'lat': cell_data.get('lat'), + 'lon': cell_data.get('lon'), + 'azimuth': cell_data.get('azimuth'), + 'range': cell_data.get('range'), + 'operator': cell_data.get('operator'), + 'radio': cell_data.get('radio') + }) + else: + return jsonify({'error': 'Cell not found in OpenCellID'}), 404 + + except Exception as e: + logger.error(f"Error looking up cell: {e}") + return jsonify({'error': str(e)}), 500 + + +@gsm_spy_bp.route('/detect_rogue', methods=['POST']) +def detect_rogue(): + """Analyze and flag rogue towers (G-07).""" + data = request.get_json() or {} + tower_info = data.get('tower') + + if not tower_info: + return jsonify({'error': 'Tower info required'}), 400 + + try: + is_rogue = False + reasons = [] + + # Check if tower exists in OpenCellID + mcc = tower_info.get('mcc') + mnc = tower_info.get('mnc') + lac = tower_info.get('lac') + cid = tower_info.get('cid') + + if all([mcc, mnc, lac, cid]): + with get_db() as conn: + result = conn.execute(''' + SELECT id FROM gsm_cells + WHERE mcc = ? AND mnc = ? AND lac = ? AND cid = ? + ''', (mcc, mnc, lac, cid)).fetchone() + + if not result: + is_rogue = True + reasons.append('Tower not found in OpenCellID database') + + # Check signal strength anomalies + signal = tower_info.get('signal_strength', 0) + if signal > -50: # Suspiciously strong signal + is_rogue = True + reasons.append(f'Unusually strong signal: {signal} dBm') + + # If rogue, insert into database + if is_rogue: + with get_db() as conn: + conn.execute(''' + INSERT INTO gsm_rogues + (arfcn, mcc, mnc, lac, cid, signal_strength, reason, threat_level) + VALUES (?, ?, ?, ?, ?, ?, ?, ?) + ''', ( + tower_info.get('arfcn'), + mcc, mnc, lac, cid, + signal, + '; '.join(reasons), + 'high' if len(reasons) > 1 else 'medium' + )) + conn.commit() + + return jsonify({ + 'is_rogue': is_rogue, + 'reasons': reasons + }) + + except Exception as e: + logger.error(f"Error detecting rogue: {e}") + return jsonify({'error': str(e)}), 500 + + +@gsm_spy_bp.route('/towers') +def get_towers(): + """Get all detected towers.""" + towers = [] + for key, tower_data in app_module.gsm_spy_towers.items(): + towers.append(tower_data) + return jsonify(towers) + + +@gsm_spy_bp.route('/devices') +def get_devices(): + """Get all tracked devices (IMSI/TMSI).""" + devices = [] + for key, device_data in app_module.gsm_spy_devices.items(): + devices.append(device_data) + return jsonify(devices) + + +@gsm_spy_bp.route('/rogues') +def get_rogues(): + """Get all detected rogue towers.""" + try: + with get_db() as conn: + results = conn.execute(''' + SELECT * FROM gsm_rogues + WHERE acknowledged = 0 + ORDER BY detected_at DESC + LIMIT 50 + ''').fetchall() + + rogues = [dict(row) for row in results] + return jsonify(rogues) + except Exception as e: + logger.error(f"Error fetching rogues: {e}") + return jsonify({'error': str(e)}), 500 + + +# ============================================ +# Advanced Features (G-08 through G-12) +# ============================================ + +@gsm_spy_bp.route('/velocity', methods=['GET']) +def get_velocity_data(): + """Get velocity vectoring data for tracked devices (G-08).""" + try: + device_id = request.args.get('device_id') + minutes = int(request.args.get('minutes', 60)) # Last 60 minutes by default + + with get_db() as conn: + # Get velocity log entries + query = ''' + SELECT * FROM gsm_velocity_log + WHERE timestamp >= datetime('now', '-' || ? || ' minutes') + ''' + params = [minutes] + + if device_id: + query += ' AND device_id = ?' + params.append(device_id) + + query += ' ORDER BY timestamp DESC LIMIT 100' + + results = conn.execute(query, params).fetchall() + velocity_data = [dict(row) for row in results] + + return jsonify(velocity_data) + except Exception as e: + logger.error(f"Error fetching velocity data: {e}") + return jsonify({'error': str(e)}), 500 + + +@gsm_spy_bp.route('/velocity/calculate', methods=['POST']) +def calculate_velocity(): + """Calculate velocity for a device based on TA transitions (G-08).""" + data = request.get_json() or {} + device_id = data.get('device_id') + + if not device_id: + return jsonify({'error': 'device_id required'}), 400 + + try: + with get_db() as conn: + # Get last two TA readings for this device + results = conn.execute(''' + SELECT ta_value, cid, timestamp + FROM gsm_signals + WHERE (imsi = ? OR tmsi = ?) + ORDER BY timestamp DESC + LIMIT 2 + ''', (device_id, device_id)).fetchall() + + if len(results) < 2: + return jsonify({'velocity': 0, 'message': 'Insufficient data'}) + + curr = dict(results[0]) + prev = dict(results[1]) + + # Calculate distance change (TA * 554 meters) + curr_distance = curr['ta_value'] * config.GSM_TA_METERS_PER_UNIT + prev_distance = prev['ta_value'] * config.GSM_TA_METERS_PER_UNIT + distance_change = abs(curr_distance - prev_distance) + + # Calculate time difference + curr_time = datetime.fromisoformat(curr['timestamp']) + prev_time = datetime.fromisoformat(prev['timestamp']) + time_diff_seconds = (curr_time - prev_time).total_seconds() + + # Calculate velocity (m/s) + if time_diff_seconds > 0: + velocity = distance_change / time_diff_seconds + else: + velocity = 0 + + # Store in velocity log + conn.execute(''' + INSERT INTO gsm_velocity_log + (device_id, prev_ta, curr_ta, prev_cid, curr_cid, estimated_velocity) + VALUES (?, ?, ?, ?, ?, ?) + ''', (device_id, prev['ta_value'], curr['ta_value'], + prev['cid'], curr['cid'], velocity)) + conn.commit() + + return jsonify({ + 'device_id': device_id, + 'velocity_mps': round(velocity, 2), + 'velocity_kmh': round(velocity * 3.6, 2), + 'distance_change_m': round(distance_change, 2), + 'time_diff_s': round(time_diff_seconds, 2) + }) + + except Exception as e: + logger.error(f"Error calculating velocity: {e}") + return jsonify({'error': str(e)}), 500 + + +@gsm_spy_bp.route('/crowd_density', methods=['GET']) +def get_crowd_density(): + """Get crowd density data by sector (G-09).""" + try: + hours = int(request.args.get('hours', 1)) # Last 1 hour by default + cid = request.args.get('cid') # Optional: specific cell + + with get_db() as conn: + # Count unique TMSI per cell in time window + query = ''' + SELECT + cid, + lac, + COUNT(DISTINCT tmsi) as unique_devices, + COUNT(*) as total_pings, + MIN(timestamp) as first_seen, + MAX(timestamp) as last_seen + FROM gsm_tmsi_log + WHERE timestamp >= datetime('now', '-' || ? || ' hours') + ''' + params = [hours] + + if cid: + query += ' AND cid = ?' + params.append(cid) + + query += ' GROUP BY cid, lac ORDER BY unique_devices DESC' + + results = conn.execute(query, params).fetchall() + density_data = [] + + for row in results: + density_data.append({ + 'cid': row['cid'], + 'lac': row['lac'], + 'unique_devices': row['unique_devices'], + 'total_pings': row['total_pings'], + 'first_seen': row['first_seen'], + 'last_seen': row['last_seen'], + 'density_level': 'high' if row['unique_devices'] > 20 else + 'medium' if row['unique_devices'] > 10 else 'low' + }) + + return jsonify(density_data) + + except Exception as e: + logger.error(f"Error fetching crowd density: {e}") + return jsonify({'error': str(e)}), 500 + + +@gsm_spy_bp.route('/life_patterns', methods=['GET']) +def get_life_patterns(): + """Get life pattern analysis for a device (G-10).""" + try: + device_id = request.args.get('device_id') + if not device_id: + # Return empty results gracefully when no device selected + return jsonify({ + 'device_id': None, + 'patterns': [], + 'message': 'No device selected' + }), 200 + + with get_db() as conn: + # Get historical signal data + results = conn.execute(''' + SELECT + strftime('%H', timestamp) as hour, + strftime('%w', timestamp) as day_of_week, + cid, + lac, + COUNT(*) as occurrences + FROM gsm_signals + WHERE (imsi = ? OR tmsi = ?) + AND timestamp >= datetime('now', '-60 days') + GROUP BY hour, day_of_week, cid, lac + ORDER BY occurrences DESC + ''', (device_id, device_id)).fetchall() + + patterns = [] + for row in results: + patterns.append({ + 'hour': int(row['hour']), + 'day_of_week': int(row['day_of_week']), + 'cid': row['cid'], + 'lac': row['lac'], + 'occurrences': row['occurrences'], + 'day_name': ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'][int(row['day_of_week'])] + }) + + # Identify regular patterns + regular_locations = [] + for pattern in patterns[:5]: # Top 5 most frequent + if pattern['occurrences'] >= 3: # Seen at least 3 times + regular_locations.append({ + 'cid': pattern['cid'], + 'typical_time': f"{pattern['day_name']} {pattern['hour']:02d}:00", + 'frequency': pattern['occurrences'] + }) + + return jsonify({ + 'device_id': device_id, + 'patterns': patterns, + 'regular_locations': regular_locations, + 'total_observations': sum(p['occurrences'] for p in patterns) + }) + + except Exception as e: + logger.error(f"Error analyzing life patterns: {e}") + return jsonify({'error': str(e)}), 500 + + +@gsm_spy_bp.route('/neighbor_audit', methods=['GET']) +def neighbor_audit(): + """Audit neighbor cell lists for consistency (G-11).""" + try: + cid = request.args.get('cid') + if not cid: + # Return empty results gracefully when no tower selected + return jsonify({ + 'cid': None, + 'neighbors': [], + 'inconsistencies': [], + 'message': 'No tower selected' + }), 200 + + with get_db() as conn: + # Get tower info with metadata (neighbor list stored in metadata JSON) + result = conn.execute(''' + SELECT metadata FROM gsm_cells WHERE cid = ? + ''', (cid,)).fetchone() + + if not result or not result['metadata']: + return jsonify({ + 'cid': cid, + 'status': 'no_data', + 'message': 'No neighbor list data available' + }) + + # Parse metadata JSON + metadata = json.loads(result['metadata']) + neighbor_list = metadata.get('neighbors', []) + + # Audit consistency + issues = [] + for neighbor_cid in neighbor_list: + # Check if neighbor exists in database + neighbor_exists = conn.execute(''' + SELECT id FROM gsm_cells WHERE cid = ? + ''', (neighbor_cid,)).fetchone() + + if not neighbor_exists: + issues.append({ + 'type': 'missing_neighbor', + 'cid': neighbor_cid, + 'message': f'Neighbor CID {neighbor_cid} not found in database' + }) + + return jsonify({ + 'cid': cid, + 'neighbor_count': len(neighbor_list), + 'neighbors': neighbor_list, + 'issues': issues, + 'status': 'suspicious' if issues else 'normal' + }) + + except Exception as e: + logger.error(f"Error auditing neighbors: {e}") + return jsonify({'error': str(e)}), 500 + + +@gsm_spy_bp.route('/traffic_correlation', methods=['GET']) +def traffic_correlation(): + """Correlate uplink/downlink traffic for pairing analysis (G-12).""" + try: + cid = request.args.get('cid') + minutes = int(request.args.get('minutes', 5)) + + with get_db() as conn: + # Get recent signal activity for this cell + results = conn.execute(''' + SELECT + imsi, + tmsi, + ta_value, + timestamp, + metadata + FROM gsm_signals + WHERE cid = ? + AND timestamp >= datetime('now', '-' || ? || ' minutes') + ORDER BY timestamp DESC + ''', (cid, minutes)).fetchall() + + correlations = [] + seen_devices = set() + + for row in results: + device_id = row['imsi'] or row['tmsi'] + if device_id and device_id not in seen_devices: + seen_devices.add(device_id) + + # Simple correlation: count bursts + burst_count = conn.execute(''' + SELECT COUNT(*) as bursts + FROM gsm_signals + WHERE (imsi = ? OR tmsi = ?) + AND cid = ? + AND timestamp >= datetime('now', '-' || ? || ' minutes') + ''', (device_id, device_id, cid, minutes)).fetchone() + + correlations.append({ + 'device_id': device_id, + 'burst_count': burst_count['bursts'], + 'last_seen': row['timestamp'], + 'ta_value': row['ta_value'], + 'activity_level': 'high' if burst_count['bursts'] > 10 else + 'medium' if burst_count['bursts'] > 5 else 'low' + }) + + return jsonify({ + 'cid': cid, + 'time_window_minutes': minutes, + 'active_devices': len(correlations), + 'correlations': correlations + }) + + except Exception as e: + logger.error(f"Error correlating traffic: {e}") + return jsonify({'error': str(e)}), 500 + + +# ============================================ +# Helper Functions +# ============================================ + +def parse_grgsm_scanner_output(line: str) -> dict[str, Any] | None: + """Parse grgsm_scanner output line. + + Actual output format (comma-separated key-value pairs): + ARFCN: 975, Freq: 925.2M, CID: 13522, LAC: 38722, MCC: 262, MNC: 1, Pwr: -58 + """ + try: + line = line.strip() + + # Skip non-data lines (progress, config, neighbour info, blank) + if not line or 'ARFCN:' not in line: + return None + + # Parse "ARFCN: 975, Freq: 925.2M, CID: 13522, LAC: 38722, MCC: 262, MNC: 1, Pwr: -58" + fields = {} + for part in line.split(','): + part = part.strip() + if ':' in part: + key, _, value = part.partition(':') + fields[key.strip()] = value.strip() + + if 'ARFCN' in fields and 'CID' in fields: + cid = int(fields.get('CID', 0)) + mcc = int(fields.get('MCC', 0)) + mnc = int(fields.get('MNC', 0)) + + # Only skip entries with no network identity at all (MCC=0 AND MNC=0) + # CID=0 with valid MCC/MNC is a partially decoded cell - still useful + if mcc == 0 and mnc == 0: + logger.debug(f"Skipping unidentified ARFCN (MCC=0, MNC=0): {line}") + return None + + # Freq may have 'M' suffix (e.g. "925.2M") + freq_str = fields.get('Freq', '0').rstrip('Mm') + + data = { + 'type': 'tower', + 'arfcn': int(fields['ARFCN']), + 'frequency': float(freq_str), + 'cid': cid, + 'lac': int(fields.get('LAC', 0)), + 'mcc': mcc, + 'mnc': mnc, + 'signal_strength': float(fields.get('Pwr', -999)), + 'timestamp': datetime.now().isoformat() + } + return data + + except Exception as e: + logger.debug(f"Failed to parse scanner line: {line} - {e}") + + return None + + +def parse_tshark_output(line: str) -> dict[str, Any] | None: + """Parse tshark filtered GSM output.""" + try: + # tshark output format: ta_value\ttmsi\timsi\tlac\tcid + parts = line.strip().split('\t') + + if len(parts) >= 5: + data = { + 'type': 'device', + 'ta_value': int(parts[0]) if parts[0] else None, + 'tmsi': parts[1] if parts[1] else None, + 'imsi': parts[2] if parts[2] else None, + 'lac': int(parts[3]) if parts[3] else None, + 'cid': int(parts[4]) if parts[4] else None, + 'timestamp': datetime.now().isoformat() + } + + # Calculate distance from TA + if data['ta_value'] is not None: + data['distance_meters'] = data['ta_value'] * config.GSM_TA_METERS_PER_UNIT + + return data + + except Exception as e: + logger.debug(f"Failed to parse tshark line: {line} - {e}") + + return None + + +def auto_start_monitor(tower_data): + """Automatically start monitoring the strongest tower found.""" + try: + arfcn = tower_data.get('arfcn') + if not arfcn: + logger.warning("Cannot auto-monitor: no ARFCN in tower data") + return + + logger.info(f"Auto-monitoring strongest tower: ARFCN {arfcn}, Signal {tower_data.get('signal_strength')} dBm") + + # Brief delay to ensure scanner has stabilized + time.sleep(2) + + with app_module.gsm_spy_lock: + if app_module.gsm_spy_monitor_process: + logger.info("Monitor already running, skipping auto-start") + return + + device_index = app_module.gsm_spy_active_device or 0 + + # Start and register monitoring (shared logic) + _start_and_register_monitor(arfcn, device_index) + + # Send SSE notification + try: + app_module.gsm_spy_queue.put_nowait({ + 'type': 'auto_monitor_started', + 'arfcn': arfcn, + 'tower': tower_data + }) + except queue.Full: + pass + + logger.info(f"Auto-monitoring started for ARFCN {arfcn}") + + except Exception as e: + logger.error(f"Error in auto-monitoring: {e}") + + +def scanner_thread(cmd, device_index): + """Thread to continuously run grgsm_scanner in a loop with non-blocking I/O. + + grgsm_scanner scans once and exits, so we loop it to provide + continuous updates to the dashboard. + """ + global gsm_towers_found + + strongest_tower = None + auto_monitor_triggered = False # Moved outside loop - persists across scans + scan_count = 0 + crash_count = 0 + process = None + + try: + while app_module.gsm_spy_scanner_running: # Flag check + scan_count += 1 + logger.info(f"Starting GSM scan #{scan_count}") + + try: + # Start scanner process + # Set OSMO_FSM_DUP_CHECK_DISABLED to prevent libosmocore + # abort on duplicate FSM registration (common with apt gr-gsm) + env = dict(os.environ, + OSMO_FSM_DUP_CHECK_DISABLED='1', + PYTHONUNBUFFERED='1', + QT_QPA_PLATFORM='offscreen') + process = subprocess.Popen( + cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, + bufsize=1, + env=env + ) + register_process(process) + logger.info(f"Started grgsm_scanner (PID: {process.pid})") + + # Standard pattern: reader threads with queue + output_queue_local = queue.Queue() + + def read_stdout(): + try: + for line in iter(process.stdout.readline, ''): + if line: + output_queue_local.put(('stdout', line)) + except Exception as e: + logger.error(f"stdout read error: {e}") + finally: + output_queue_local.put(('eof', None)) + + def read_stderr(): + try: + for line in iter(process.stderr.readline, ''): + if line: + logger.debug(f"grgsm_scanner stderr: {line.strip()}") + # grgsm_scanner outputs scan results to stderr + output_queue_local.put(('stderr', line)) + except Exception as e: + logger.error(f"stderr read error: {e}") + + stdout_thread = threading.Thread(target=read_stdout, daemon=True) + stderr_thread = threading.Thread(target=read_stderr, daemon=True) + stdout_thread.start() + stderr_thread.start() + + # Process output with timeout + scan_start = time.time() + last_output = scan_start + scan_timeout = 300 # 5 minute maximum per scan (4 bands takes ~2-3 min) + + while app_module.gsm_spy_scanner_running: + # Check if process died + if process.poll() is not None: + logger.info(f"Scanner exited (code: {process.returncode})") + break + + # Get output from queue with timeout + try: + msg_type, line = output_queue_local.get(timeout=1.0) + + if msg_type == 'eof': + break # EOF + + last_output = time.time() + stripped = line.strip() + logger.info(f"Scanner [{msg_type}]: {stripped}") + + # Forward progress and status info to frontend + progress_match = re.match(r'Scanning:\s+([\d.]+)%\s+done', stripped) + if progress_match: + try: + app_module.gsm_spy_queue.put_nowait({ + 'type': 'progress', + 'percent': float(progress_match.group(1)), + 'scan': scan_count + }) + except queue.Full: + pass + continue + if stripped.startswith('Try scan CCCH'): + try: + app_module.gsm_spy_queue.put_nowait({ + 'type': 'status', + 'message': stripped, + 'scan': scan_count + }) + except queue.Full: + pass + + parsed = parse_grgsm_scanner_output(line) + if parsed: + # Enrich with coordinates + from utils.gsm_geocoding import enrich_tower_data + enriched = enrich_tower_data(parsed) + + # Store in DataStore + key = f"{enriched['mcc']}_{enriched['mnc']}_{enriched['lac']}_{enriched['cid']}" + app_module.gsm_spy_towers[key] = enriched + + # Track strongest tower + signal = enriched.get('signal_strength', -999) + if strongest_tower is None or signal > strongest_tower.get('signal_strength', -999): + strongest_tower = enriched + + # Queue for SSE + try: + app_module.gsm_spy_queue.put_nowait(enriched) + except queue.Full: + logger.warning("Queue full, dropping tower update") + + # Thread-safe counter update + with app_module.gsm_spy_lock: + gsm_towers_found += 1 + except queue.Empty: + # No output, check timeout + if time.time() - last_output > scan_timeout: + logger.warning(f"Scan timeout after {scan_timeout}s") + break + + # Drain remaining queue items after process exits + while not output_queue_local.empty(): + try: + msg_type, line = output_queue_local.get_nowait() + if line: + logger.info(f"Scanner [{msg_type}] (drain): {line.strip()}") + except queue.Empty: + break + + # Clean up process with timeout + if process.poll() is None: + logger.info("Terminating scanner process") + safe_terminate(process, timeout=5) + else: + process.wait() # Reap zombie + + exit_code = process.returncode + scan_duration = time.time() - scan_start + logger.info(f"Scan #{scan_count} complete (exit code: {exit_code}, duration: {scan_duration:.1f}s)") + + # Notify frontend scan completed + try: + app_module.gsm_spy_queue.put_nowait({ + 'type': 'scan_complete', + 'scan': scan_count, + 'duration': round(scan_duration, 1), + 'towers_found': gsm_towers_found + }) + except queue.Full: + pass + + # Detect crash pattern: process exits too quickly with no data + if scan_duration < 5 and exit_code != 0: + crash_count += 1 + logger.error( + f"grgsm_scanner crashed on startup (exit code: {exit_code}). " + f"Crash count: {crash_count}. Check gr-gsm/libosmocore compatibility." + ) + try: + app_module.gsm_spy_queue.put_nowait({ + 'type': 'error', + 'message': f'grgsm_scanner crashed (exit code: {exit_code}). ' + 'This may be a gr-gsm/libosmocore compatibility issue. ' + 'Try rebuilding gr-gsm from source.', + 'timestamp': time.strftime('%Y-%m-%dT%H:%M:%S') + }) + except queue.Full: + pass + if crash_count >= 3: + logger.error("grgsm_scanner crashed 3 times, stopping scanner") + break + + except FileNotFoundError: + logger.error( + "grgsm_scanner not found. Please install gr-gsm: " + "https://github.com/bkerler/gr-gsm" + ) + # Send error to SSE stream so the UI knows + try: + app_module.gsm_spy_queue.put({ + 'type': 'error', + 'message': 'grgsm_scanner not found. Please install gr-gsm.', + 'timestamp': time.strftime('%Y-%m-%dT%H:%M:%S') + }) + except Exception: + pass + break # Don't retry - binary won't appear + + except Exception as e: + logger.error(f"Scanner scan error: {e}", exc_info=True) + if process and process.poll() is None: + safe_terminate(process) + + # Check if should continue + if not app_module.gsm_spy_scanner_running: + break + + # After first scan completes: auto-switch to monitoring if towers found + # Scanner process has exited so SDR is free for grgsm_livemon + if not auto_monitor_triggered and strongest_tower and scan_count >= 1: + auto_monitor_triggered = True + arfcn = strongest_tower.get('arfcn') + signal = strongest_tower.get('signal_strength', -999) + logger.info( + f"Scan complete with towers found. Auto-switching to monitor mode " + f"on ARFCN {arfcn} (signal: {signal} dBm)" + ) + + # Stop scanner loop - SDR needed for monitoring + app_module.gsm_spy_scanner_running = False + + try: + app_module.gsm_spy_queue.put_nowait({ + 'type': 'status', + 'message': f'Switching to monitor mode on ARFCN {arfcn}...' + }) + except queue.Full: + pass + + # Start monitoring (SDR is free since scanner process exited) + try: + with app_module.gsm_spy_lock: + if not app_module.gsm_spy_monitor_process: + _start_and_register_monitor(arfcn, device_index) + logger.info(f"Auto-monitoring started for ARFCN {arfcn}") + + try: + app_module.gsm_spy_queue.put_nowait({ + 'type': 'auto_monitor_started', + 'arfcn': arfcn, + 'tower': strongest_tower + }) + except queue.Full: + pass + except Exception as e: + logger.error(f"Error starting auto-monitor: {e}", exc_info=True) + try: + app_module.gsm_spy_queue.put_nowait({ + 'type': 'error', + 'message': f'Monitor failed: {e}' + }) + except queue.Full: + pass + # Resume scanning if monitor failed + app_module.gsm_spy_scanner_running = True + + break # Exit scanner loop (monitoring takes over) + + # Wait between scans with responsive flag checking + logger.info("Waiting 5 seconds before next scan") + for i in range(5): + if not app_module.gsm_spy_scanner_running: + break + time.sleep(1) + + except Exception as e: + logger.error(f"Scanner thread fatal error: {e}", exc_info=True) + + finally: + # Always cleanup + if process and process.poll() is None: + safe_terminate(process, timeout=5) + + logger.info("Scanner thread terminated") + + # Reset global state - but don't release SDR if monitoring took over + with app_module.gsm_spy_lock: + app_module.gsm_spy_scanner_running = False + if app_module.gsm_spy_monitor_process is None: + # No monitor running - release SDR device + if app_module.gsm_spy_active_device is not None: + from app import release_sdr_device + release_sdr_device(app_module.gsm_spy_active_device) + app_module.gsm_spy_active_device = None + else: + logger.info("Monitor is running, keeping SDR device allocated") + + +def monitor_thread(process): + """Thread to read tshark output using standard iter pattern.""" + global gsm_devices_tracked + + # Standard pattern: reader thread with queue + output_queue_local = queue.Queue() + + def read_stdout(): + try: + for line in iter(process.stdout.readline, ''): + if line: + output_queue_local.put(('stdout', line)) + except Exception as e: + logger.error(f"tshark read error: {e}") + finally: + output_queue_local.put(('eof', None)) + + stdout_thread = threading.Thread(target=read_stdout, daemon=True) + stdout_thread.start() + + try: + while app_module.gsm_spy_monitor_process: + # Check if process died + if process.poll() is not None: + logger.info(f"Monitor process exited (code: {process.returncode})") + break + + # Get output from queue with timeout + try: + msg_type, line = output_queue_local.get(timeout=1.0) + except queue.Empty: + continue # Timeout, check flag again + + if msg_type == 'eof': + break # EOF + + parsed = parse_tshark_output(line) + if parsed: + # Store in DataStore + key = parsed.get('tmsi') or parsed.get('imsi') or str(time.time()) + app_module.gsm_spy_devices[key] = parsed + + # Queue for SSE stream + try: + app_module.gsm_spy_queue.put_nowait(parsed) + except queue.Full: + pass + + # Store in database for historical analysis + try: + with get_db() as conn: + # gsm_signals table + conn.execute(''' + INSERT INTO gsm_signals + (imsi, tmsi, lac, cid, ta_value, arfcn) + VALUES (?, ?, ?, ?, ?, ?) + ''', ( + parsed.get('imsi'), + parsed.get('tmsi'), + parsed.get('lac'), + parsed.get('cid'), + parsed.get('ta_value'), + app_module.gsm_spy_selected_arfcn + )) + + # gsm_tmsi_log table for crowd density + if parsed.get('tmsi'): + conn.execute(''' + INSERT INTO gsm_tmsi_log + (tmsi, lac, cid, ta_value) + VALUES (?, ?, ?, ?) + ''', ( + parsed.get('tmsi'), + parsed.get('lac'), + parsed.get('cid'), + parsed.get('ta_value') + )) + + # Velocity calculation (G-08) + device_id = parsed.get('imsi') or parsed.get('tmsi') + if device_id and parsed.get('ta_value') is not None: + # Get previous TA reading + prev_reading = conn.execute(''' + SELECT ta_value, cid, timestamp + FROM gsm_signals + WHERE (imsi = ? OR tmsi = ?) + ORDER BY timestamp DESC + LIMIT 1 OFFSET 1 + ''', (device_id, device_id)).fetchone() + + if prev_reading: + # Calculate velocity + curr_ta = parsed.get('ta_value') + prev_ta = prev_reading['ta_value'] + curr_distance = curr_ta * config.GSM_TA_METERS_PER_UNIT + prev_distance = prev_ta * config.GSM_TA_METERS_PER_UNIT + distance_change = abs(curr_distance - prev_distance) + + # Time difference + prev_time = datetime.fromisoformat(prev_reading['timestamp']) + curr_time = datetime.now() + time_diff_seconds = (curr_time - prev_time).total_seconds() + + if time_diff_seconds > 0: + velocity = distance_change / time_diff_seconds + + # Store velocity + conn.execute(''' + INSERT INTO gsm_velocity_log + (device_id, prev_ta, curr_ta, prev_cid, curr_cid, estimated_velocity) + VALUES (?, ?, ?, ?, ?, ?) + ''', ( + device_id, + prev_ta, + curr_ta, + prev_reading['cid'], + parsed.get('cid'), + velocity + )) + + conn.commit() + except Exception as e: + logger.error(f"Error storing device data: {e}") + + # Thread-safe counter + with app_module.gsm_spy_lock: + gsm_devices_tracked += 1 + + except Exception as e: + logger.error(f"Monitor thread error: {e}", exc_info=True) + + finally: + # Reap process with timeout + try: + if process.poll() is None: + process.terminate() + try: + process.wait(timeout=5) + except subprocess.TimeoutExpired: + logger.warning("Monitor process didn't terminate, killing") + process.kill() + process.wait() + else: + process.wait() + logger.info(f"Monitor process exited with code {process.returncode}") + except Exception as e: + logger.error(f"Error reaping monitor process: {e}") + + logger.info("Monitor thread terminated") diff --git a/routes/listening_post.py b/routes/listening_post.py index 658acdb..8c48402 100644 --- a/routes/listening_post.py +++ b/routes/listening_post.py @@ -19,8 +19,8 @@ from flask import Blueprint, jsonify, request, Response import app as app_module from utils.logging import get_logger -from utils.sse import format_sse -from utils.event_pipeline import process_event +from utils.sse import format_sse +from utils.event_pipeline import process_event from utils.constants import ( SSE_QUEUE_TIMEOUT, SSE_KEEPALIVE_INTERVAL, @@ -1181,13 +1181,13 @@ def stream_scanner_events() -> Response: while True: try: - msg = scanner_queue.get(timeout=SSE_QUEUE_TIMEOUT) - last_keepalive = time.time() - try: - process_event('listening_scanner', msg, msg.get('type')) - except Exception: - pass - yield format_sse(msg) + msg = scanner_queue.get(timeout=SSE_QUEUE_TIMEOUT) + last_keepalive = time.time() + try: + process_event('listening_scanner', msg, msg.get('type')) + except Exception: + pass + yield format_sse(msg) except queue.Empty: now = time.time() if now - last_keepalive >= SSE_KEEPALIVE_INTERVAL: @@ -1239,10 +1239,10 @@ def get_presets() -> Response: # MANUAL AUDIO ENDPOINTS (for direct listening) # ============================================ -@listening_post_bp.route('/audio/start', methods=['POST']) -def start_audio() -> Response: - """Start audio at specific frequency (manual mode).""" - global scanner_running, scanner_active_device, listening_active_device, scanner_power_process, scanner_thread +@listening_post_bp.route('/audio/start', methods=['POST']) +def start_audio() -> Response: + """Start audio at specific frequency (manual mode).""" + global scanner_running, scanner_active_device, listening_active_device, scanner_power_process, scanner_thread # Stop scanner if running if scanner_running: @@ -1271,7 +1271,7 @@ def start_audio() -> Response: pass time.sleep(0.5) - data = request.json or {} + data = request.json or {} try: frequency = float(data.get('frequency', 0)) @@ -1286,11 +1286,11 @@ def start_audio() -> Response: 'message': f'Invalid parameter: {e}' }), 400 - if frequency <= 0: - return jsonify({ - 'status': 'error', - 'message': 'frequency is required' - }), 400 + if frequency <= 0: + return jsonify({ + 'status': 'error', + 'message': 'frequency is required' + }), 400 valid_sdr_types = ['rtlsdr', 'hackrf', 'airspy', 'limesdr', 'sdrplay'] if sdr_type not in valid_sdr_types: @@ -1299,19 +1299,28 @@ def start_audio() -> Response: 'message': f'Invalid sdr_type. Use: {", ".join(valid_sdr_types)}' }), 400 - # Update config for audio - scanner_config['squelch'] = squelch - scanner_config['gain'] = gain - scanner_config['device'] = device - scanner_config['sdr_type'] = sdr_type - - # Stop waterfall if it's using the same SDR - if waterfall_running and waterfall_active_device == device: - _stop_waterfall_internal() - time.sleep(0.2) + # Update config for audio + scanner_config['squelch'] = squelch + scanner_config['gain'] = gain + scanner_config['device'] = device + scanner_config['sdr_type'] = sdr_type - # Claim device for listening audio - if listening_active_device is None or listening_active_device != device: + # Stop waterfall if it's using the same SDR (SSE path) + if waterfall_running and waterfall_active_device == device: + _stop_waterfall_internal() + time.sleep(0.2) + + # Release waterfall device claim if the WebSocket waterfall is still + # holding it. The JS client sends a stop command and closes the + # WebSocket before requesting audio, but the backend handler may not + # have finished its cleanup yet. + device_status = app_module.get_sdr_device_status() + if device_status.get(device) == 'waterfall': + app_module.release_sdr_device(device) + time.sleep(0.3) + + # Claim device for listening audio + if listening_active_device is None or listening_active_device != device: if listening_active_device is not None: app_module.release_sdr_device(listening_active_device) error = app_module.claim_sdr_device(device, 'listening') @@ -1524,204 +1533,204 @@ waterfall_thread: Optional[threading.Thread] = None waterfall_running = False waterfall_lock = threading.Lock() waterfall_queue: queue.Queue = queue.Queue(maxsize=200) -waterfall_active_device: Optional[int] = None -waterfall_config = { - 'start_freq': 88.0, - 'end_freq': 108.0, - 'bin_size': 10000, - 'gain': 40, - 'device': 0, - 'max_bins': 1024, - 'interval': 0.4, -} +waterfall_active_device: Optional[int] = None +waterfall_config = { + 'start_freq': 88.0, + 'end_freq': 108.0, + 'bin_size': 10000, + 'gain': 40, + 'device': 0, + 'max_bins': 1024, + 'interval': 0.4, +} -def _parse_rtl_power_line(line: str) -> tuple[str | None, float | None, float | None, list[float]]: - """Parse a single rtl_power CSV line into bins.""" - if not line or line.startswith('#'): - return None, None, None, [] - - parts = [p.strip() for p in line.split(',')] - if len(parts) < 6: - return None, None, None, [] - - # Timestamp in first two fields (YYYY-MM-DD, HH:MM:SS) - timestamp = f"{parts[0]} {parts[1]}" if len(parts) >= 2 else parts[0] - - start_idx = None - for i, tok in enumerate(parts): - try: - val = float(tok) - except ValueError: - continue - if val > 1e5: - start_idx = i - break - if start_idx is None or len(parts) < start_idx + 4: - return timestamp, None, None, [] - - try: - seg_start = float(parts[start_idx]) - seg_end = float(parts[start_idx + 1]) - raw_values = [] - for v in parts[start_idx + 3:]: - try: - raw_values.append(float(v)) - except ValueError: - continue - if raw_values and raw_values[0] >= 0 and any(val < 0 for val in raw_values[1:]): - raw_values = raw_values[1:] - return timestamp, seg_start, seg_end, raw_values - except ValueError: - return timestamp, None, None, [] - - -def _waterfall_loop(): - """Continuous rtl_power sweep loop emitting waterfall data.""" - global waterfall_running, waterfall_process - - rtl_power_path = find_rtl_power() - if not rtl_power_path: - logger.error("rtl_power not found for waterfall") - waterfall_running = False - return - - start_hz = int(waterfall_config['start_freq'] * 1e6) - end_hz = int(waterfall_config['end_freq'] * 1e6) - bin_hz = int(waterfall_config['bin_size']) - gain = waterfall_config['gain'] - device = waterfall_config['device'] - interval = float(waterfall_config.get('interval', 0.4)) - - cmd = [ - rtl_power_path, - '-f', f'{start_hz}:{end_hz}:{bin_hz}', - '-i', str(interval), - '-g', str(gain), - '-d', str(device), - ] - - try: - waterfall_process = subprocess.Popen( - cmd, - stdout=subprocess.PIPE, - stderr=subprocess.DEVNULL, - bufsize=1, - text=True, - ) - - current_ts = None - all_bins: list[float] = [] - sweep_start_hz = start_hz - sweep_end_hz = end_hz - - if not waterfall_process.stdout: - return - - for line in waterfall_process.stdout: - if not waterfall_running: - break - - ts, seg_start, seg_end, bins = _parse_rtl_power_line(line) - if ts is None or not bins: - continue - - if current_ts is None: - current_ts = ts - - if ts != current_ts and all_bins: - max_bins = int(waterfall_config.get('max_bins') or 0) - bins_to_send = all_bins - if max_bins > 0 and len(bins_to_send) > max_bins: - bins_to_send = _downsample_bins(bins_to_send, max_bins) - msg = { - 'type': 'waterfall_sweep', - 'start_freq': sweep_start_hz / 1e6, - 'end_freq': sweep_end_hz / 1e6, - 'bins': bins_to_send, - 'timestamp': datetime.now().isoformat(), - } - try: - waterfall_queue.put_nowait(msg) - except queue.Full: - try: - waterfall_queue.get_nowait() - except queue.Empty: - pass - try: - waterfall_queue.put_nowait(msg) - except queue.Full: - pass - - all_bins = [] - sweep_start_hz = start_hz - sweep_end_hz = end_hz - current_ts = ts - - all_bins.extend(bins) - if seg_start is not None: - sweep_start_hz = min(sweep_start_hz, seg_start) - if seg_end is not None: - sweep_end_hz = max(sweep_end_hz, seg_end) - - # Flush any remaining bins - if all_bins and waterfall_running: - max_bins = int(waterfall_config.get('max_bins') or 0) - bins_to_send = all_bins - if max_bins > 0 and len(bins_to_send) > max_bins: - bins_to_send = _downsample_bins(bins_to_send, max_bins) - msg = { - 'type': 'waterfall_sweep', - 'start_freq': sweep_start_hz / 1e6, - 'end_freq': sweep_end_hz / 1e6, - 'bins': bins_to_send, - 'timestamp': datetime.now().isoformat(), - } - try: - waterfall_queue.put_nowait(msg) - except queue.Full: - pass - - except Exception as e: - logger.error(f"Waterfall loop error: {e}") - finally: - waterfall_running = False - if waterfall_process and waterfall_process.poll() is None: - try: - waterfall_process.terminate() - waterfall_process.wait(timeout=1) - except Exception: - try: - waterfall_process.kill() - except Exception: - pass - waterfall_process = None - logger.info("Waterfall loop stopped") - - -def _stop_waterfall_internal() -> None: - """Stop the waterfall display and release resources.""" - global waterfall_running, waterfall_process, waterfall_active_device - - waterfall_running = False - if waterfall_process and waterfall_process.poll() is None: - try: - waterfall_process.terminate() - waterfall_process.wait(timeout=1) - except Exception: - try: - waterfall_process.kill() - except Exception: - pass - waterfall_process = None - - if waterfall_active_device is not None: - app_module.release_sdr_device(waterfall_active_device) - waterfall_active_device = None +def _parse_rtl_power_line(line: str) -> tuple[str | None, float | None, float | None, list[float]]: + """Parse a single rtl_power CSV line into bins.""" + if not line or line.startswith('#'): + return None, None, None, [] + + parts = [p.strip() for p in line.split(',')] + if len(parts) < 6: + return None, None, None, [] + + # Timestamp in first two fields (YYYY-MM-DD, HH:MM:SS) + timestamp = f"{parts[0]} {parts[1]}" if len(parts) >= 2 else parts[0] + + start_idx = None + for i, tok in enumerate(parts): + try: + val = float(tok) + except ValueError: + continue + if val > 1e5: + start_idx = i + break + if start_idx is None or len(parts) < start_idx + 4: + return timestamp, None, None, [] + + try: + seg_start = float(parts[start_idx]) + seg_end = float(parts[start_idx + 1]) + raw_values = [] + for v in parts[start_idx + 3:]: + try: + raw_values.append(float(v)) + except ValueError: + continue + if raw_values and raw_values[0] >= 0 and any(val < 0 for val in raw_values[1:]): + raw_values = raw_values[1:] + return timestamp, seg_start, seg_end, raw_values + except ValueError: + return timestamp, None, None, [] + + +def _waterfall_loop(): + """Continuous rtl_power sweep loop emitting waterfall data.""" + global waterfall_running, waterfall_process + + rtl_power_path = find_rtl_power() + if not rtl_power_path: + logger.error("rtl_power not found for waterfall") + waterfall_running = False + return + + start_hz = int(waterfall_config['start_freq'] * 1e6) + end_hz = int(waterfall_config['end_freq'] * 1e6) + bin_hz = int(waterfall_config['bin_size']) + gain = waterfall_config['gain'] + device = waterfall_config['device'] + interval = float(waterfall_config.get('interval', 0.4)) + + cmd = [ + rtl_power_path, + '-f', f'{start_hz}:{end_hz}:{bin_hz}', + '-i', str(interval), + '-g', str(gain), + '-d', str(device), + ] + + try: + waterfall_process = subprocess.Popen( + cmd, + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + bufsize=1, + text=True, + ) + + current_ts = None + all_bins: list[float] = [] + sweep_start_hz = start_hz + sweep_end_hz = end_hz + + if not waterfall_process.stdout: + return + + for line in waterfall_process.stdout: + if not waterfall_running: + break + + ts, seg_start, seg_end, bins = _parse_rtl_power_line(line) + if ts is None or not bins: + continue + + if current_ts is None: + current_ts = ts + + if ts != current_ts and all_bins: + max_bins = int(waterfall_config.get('max_bins') or 0) + bins_to_send = all_bins + if max_bins > 0 and len(bins_to_send) > max_bins: + bins_to_send = _downsample_bins(bins_to_send, max_bins) + msg = { + 'type': 'waterfall_sweep', + 'start_freq': sweep_start_hz / 1e6, + 'end_freq': sweep_end_hz / 1e6, + 'bins': bins_to_send, + 'timestamp': datetime.now().isoformat(), + } + try: + waterfall_queue.put_nowait(msg) + except queue.Full: + try: + waterfall_queue.get_nowait() + except queue.Empty: + pass + try: + waterfall_queue.put_nowait(msg) + except queue.Full: + pass + + all_bins = [] + sweep_start_hz = start_hz + sweep_end_hz = end_hz + current_ts = ts + + all_bins.extend(bins) + if seg_start is not None: + sweep_start_hz = min(sweep_start_hz, seg_start) + if seg_end is not None: + sweep_end_hz = max(sweep_end_hz, seg_end) + + # Flush any remaining bins + if all_bins and waterfall_running: + max_bins = int(waterfall_config.get('max_bins') or 0) + bins_to_send = all_bins + if max_bins > 0 and len(bins_to_send) > max_bins: + bins_to_send = _downsample_bins(bins_to_send, max_bins) + msg = { + 'type': 'waterfall_sweep', + 'start_freq': sweep_start_hz / 1e6, + 'end_freq': sweep_end_hz / 1e6, + 'bins': bins_to_send, + 'timestamp': datetime.now().isoformat(), + } + try: + waterfall_queue.put_nowait(msg) + except queue.Full: + pass + + except Exception as e: + logger.error(f"Waterfall loop error: {e}") + finally: + waterfall_running = False + if waterfall_process and waterfall_process.poll() is None: + try: + waterfall_process.terminate() + waterfall_process.wait(timeout=1) + except Exception: + try: + waterfall_process.kill() + except Exception: + pass + waterfall_process = None + logger.info("Waterfall loop stopped") + + +def _stop_waterfall_internal() -> None: + """Stop the waterfall display and release resources.""" + global waterfall_running, waterfall_process, waterfall_active_device + + waterfall_running = False + if waterfall_process and waterfall_process.poll() is None: + try: + waterfall_process.terminate() + waterfall_process.wait(timeout=1) + except Exception: + try: + waterfall_process.kill() + except Exception: + pass + waterfall_process = None + + if waterfall_active_device is not None: + app_module.release_sdr_device(waterfall_active_device) + waterfall_active_device = None @listening_post_bp.route('/waterfall/start', methods=['POST']) -def start_waterfall() -> Response: +def start_waterfall() -> Response: """Start the waterfall/spectrogram display.""" global waterfall_thread, waterfall_running, waterfall_config, waterfall_active_device @@ -1734,24 +1743,24 @@ def start_waterfall() -> Response: data = request.json or {} - try: - waterfall_config['start_freq'] = float(data.get('start_freq', 88.0)) - waterfall_config['end_freq'] = float(data.get('end_freq', 108.0)) - waterfall_config['bin_size'] = int(data.get('bin_size', 10000)) - waterfall_config['gain'] = int(data.get('gain', 40)) - waterfall_config['device'] = int(data.get('device', 0)) - if data.get('interval') is not None: - interval = float(data.get('interval', waterfall_config['interval'])) - if interval < 0.1 or interval > 5: - return jsonify({'status': 'error', 'message': 'interval must be between 0.1 and 5 seconds'}), 400 - waterfall_config['interval'] = interval - if data.get('max_bins') is not None: - max_bins = int(data.get('max_bins', waterfall_config['max_bins'])) - if max_bins < 64 or max_bins > 4096: - return jsonify({'status': 'error', 'message': 'max_bins must be between 64 and 4096'}), 400 - waterfall_config['max_bins'] = max_bins - except (ValueError, TypeError) as e: - return jsonify({'status': 'error', 'message': f'Invalid parameter: {e}'}), 400 + try: + waterfall_config['start_freq'] = float(data.get('start_freq', 88.0)) + waterfall_config['end_freq'] = float(data.get('end_freq', 108.0)) + waterfall_config['bin_size'] = int(data.get('bin_size', 10000)) + waterfall_config['gain'] = int(data.get('gain', 40)) + waterfall_config['device'] = int(data.get('device', 0)) + if data.get('interval') is not None: + interval = float(data.get('interval', waterfall_config['interval'])) + if interval < 0.1 or interval > 5: + return jsonify({'status': 'error', 'message': 'interval must be between 0.1 and 5 seconds'}), 400 + waterfall_config['interval'] = interval + if data.get('max_bins') is not None: + max_bins = int(data.get('max_bins', waterfall_config['max_bins'])) + if max_bins < 64 or max_bins > 4096: + return jsonify({'status': 'error', 'message': 'max_bins must be between 64 and 4096'}), 400 + waterfall_config['max_bins'] = max_bins + except (ValueError, TypeError) as e: + return jsonify({'status': 'error', 'message': f'Invalid parameter: {e}'}), 400 if waterfall_config['start_freq'] >= waterfall_config['end_freq']: return jsonify({'status': 'error', 'message': 'start_freq must be less than end_freq'}), 400 @@ -1777,11 +1786,11 @@ def start_waterfall() -> Response: @listening_post_bp.route('/waterfall/stop', methods=['POST']) -def stop_waterfall() -> Response: - """Stop the waterfall display.""" - _stop_waterfall_internal() - - return jsonify({'status': 'stopped'}) +def stop_waterfall() -> Response: + """Stop the waterfall display.""" + _stop_waterfall_internal() + + return jsonify({'status': 'stopped'}) @listening_post_bp.route('/waterfall/stream') @@ -1790,14 +1799,14 @@ def stream_waterfall() -> Response: def generate() -> Generator[str, None, None]: last_keepalive = time.time() while True: - try: - msg = waterfall_queue.get(timeout=SSE_QUEUE_TIMEOUT) - last_keepalive = time.time() - try: - process_event('waterfall', msg, msg.get('type')) - except Exception: - pass - yield format_sse(msg) + try: + msg = waterfall_queue.get(timeout=SSE_QUEUE_TIMEOUT) + last_keepalive = time.time() + try: + process_event('waterfall', msg, msg.get('type')) + except Exception: + pass + yield format_sse(msg) except queue.Empty: now = time.time() if now - last_keepalive >= SSE_KEEPALIVE_INTERVAL: @@ -1808,20 +1817,20 @@ def stream_waterfall() -> Response: response.headers['Cache-Control'] = 'no-cache' response.headers['X-Accel-Buffering'] = 'no' return response -def _downsample_bins(values: list[float], target: int) -> list[float]: - """Downsample bins to a target length using simple averaging.""" - if target <= 0 or len(values) <= target: - return values - - out: list[float] = [] - step = len(values) / target - for i in range(target): - start = int(i * step) - end = int((i + 1) * step) - if end <= start: - end = min(start + 1, len(values)) - chunk = values[start:end] - if not chunk: - continue - out.append(sum(chunk) / len(chunk)) - return out +def _downsample_bins(values: list[float], target: int) -> list[float]: + """Downsample bins to a target length using simple averaging.""" + if target <= 0 or len(values) <= target: + return values + + out: list[float] = [] + step = len(values) / target + for i in range(target): + start = int(i * step) + end = int((i + 1) * step) + if end <= start: + end = min(start + 1, len(values)) + chunk = values[start:end] + if not chunk: + continue + out.append(sum(chunk) / len(chunk)) + return out diff --git a/routes/pager.py b/routes/pager.py index 4ee5425..3253a6c 100644 --- a/routes/pager.py +++ b/routes/pager.py @@ -2,12 +2,14 @@ from __future__ import annotations +import math import os import pathlib import re import pty import queue import select +import struct import subprocess import threading import time @@ -22,8 +24,8 @@ from utils.validation import ( validate_frequency, validate_device_index, validate_gain, validate_ppm, validate_rtl_tcp_host, validate_rtl_tcp_port ) -from utils.sse import format_sse -from utils.event_pipeline import process_event +from utils.sse import format_sse +from utils.event_pipeline import process_event from utils.process import safe_terminate, register_process, unregister_process from utils.sdr import SDRFactory, SDRType, SDRValidationError from utils.dependencies import get_tool_path @@ -106,6 +108,62 @@ def log_message(msg: dict[str, Any]) -> None: logger.error(f"Failed to log message: {e}") +def audio_relay_thread( + rtl_stdout, + multimon_stdin, + output_queue: queue.Queue, + stop_event: threading.Event, +) -> None: + """Relay audio from rtl_fm to multimon-ng while computing signal levels. + + Reads raw 16-bit LE PCM from *rtl_stdout*, writes every chunk straight + through to *multimon_stdin*, and every ~100 ms pushes an RMS / peak scope + event onto *output_queue*. + """ + CHUNK = 4096 # bytes – 2048 samples at 16-bit mono + INTERVAL = 0.1 # seconds between scope updates + last_scope = time.monotonic() + + try: + while not stop_event.is_set(): + data = rtl_stdout.read(CHUNK) + if not data: + break + + # Forward audio untouched + try: + multimon_stdin.write(data) + multimon_stdin.flush() + except (BrokenPipeError, OSError): + break + + # Compute scope levels every ~100 ms + now = time.monotonic() + if now - last_scope >= INTERVAL: + last_scope = now + try: + n_samples = len(data) // 2 + if n_samples == 0: + continue + samples = struct.unpack(f'<{n_samples}h', data[:n_samples * 2]) + peak = max(abs(s) for s in samples) + rms = int(math.sqrt(sum(s * s for s in samples) / n_samples)) + output_queue.put_nowait({ + 'type': 'scope', + 'rms': rms, + 'peak': peak, + }) + except (struct.error, ValueError, queue.Full): + pass + except Exception as e: + logger.debug(f"Audio relay error: {e}") + finally: + try: + multimon_stdin.close() + except OSError: + pass + + def stream_decoder(master_fd: int, process: subprocess.Popen[bytes]) -> None: """Stream decoder output to queue using PTY for unbuffered output.""" try: @@ -152,6 +210,11 @@ def stream_decoder(master_fd: int, process: subprocess.Popen[bytes]) -> None: os.close(master_fd) except OSError: pass + # Signal relay thread to stop + with app_module.process_lock: + stop_relay = getattr(app_module.current_process, '_stop_relay', None) + if stop_relay: + stop_relay.set() # Cleanup companion rtl_fm process and decoder with app_module.process_lock: rtl_proc = getattr(app_module.current_process, '_rtl_process', None) @@ -319,7 +382,7 @@ def start_decoding() -> Response: multimon_process = subprocess.Popen( multimon_cmd, - stdin=rtl_process.stdout, + stdin=subprocess.PIPE, stdout=slave_fd, stderr=slave_fd, close_fds=True @@ -327,11 +390,22 @@ def start_decoding() -> Response: register_process(multimon_process) os.close(slave_fd) - rtl_process.stdout.close() + + # Spawn audio relay thread between rtl_fm and multimon-ng + stop_relay = threading.Event() + relay = threading.Thread( + target=audio_relay_thread, + args=(rtl_process.stdout, multimon_process.stdin, + app_module.output_queue, stop_relay), + ) + relay.daemon = True + relay.start() app_module.current_process = multimon_process app_module.current_process._rtl_process = rtl_process app_module.current_process._master_fd = master_fd + app_module.current_process._stop_relay = stop_relay + app_module.current_process._relay_thread = relay # Start output thread with PTY master fd thread = threading.Thread(target=stream_decoder, args=(master_fd, multimon_process)) @@ -380,6 +454,10 @@ def stop_decoding() -> Response: with app_module.process_lock: if app_module.current_process: + # Signal audio relay thread to stop + if hasattr(app_module.current_process, '_stop_relay'): + app_module.current_process._stop_relay.set() + # Kill rtl_fm process first if hasattr(app_module.current_process, '_rtl_process'): try: @@ -469,14 +547,14 @@ def stream() -> Response: keepalive_interval = 30.0 # Send keepalive every 30 seconds instead of 1 second while True: - try: - msg = app_module.output_queue.get(timeout=1) - last_keepalive = time.time() - try: - process_event('pager', msg, msg.get('type')) - except Exception: - pass - yield format_sse(msg) + try: + msg = app_module.output_queue.get(timeout=1) + last_keepalive = time.time() + try: + process_event('pager', msg, msg.get('type')) + except Exception: + pass + yield format_sse(msg) except queue.Empty: now = time.time() if now - last_keepalive >= keepalive_interval: diff --git a/routes/sensor.py b/routes/sensor.py index e5a719e..e2110fb 100644 --- a/routes/sensor.py +++ b/routes/sensor.py @@ -18,8 +18,8 @@ from utils.validation import ( validate_frequency, validate_device_index, validate_gain, validate_ppm, validate_rtl_tcp_host, validate_rtl_tcp_port ) -from utils.sse import format_sse -from utils.event_pipeline import process_event +from utils.sse import format_sse +from utils.event_pipeline import process_event from utils.process import safe_terminate, register_process, unregister_process from utils.sdr import SDRFactory, SDRType @@ -45,6 +45,21 @@ def stream_sensor_output(process: subprocess.Popen[bytes]) -> None: data['type'] = 'sensor' app_module.sensor_queue.put(data) + # Push scope event when signal level data is present + rssi = data.get('rssi') + snr = data.get('snr') + noise = data.get('noise') + if rssi is not None or snr is not None: + try: + app_module.sensor_queue.put_nowait({ + 'type': 'scope', + 'rssi': rssi if rssi is not None else 0, + 'snr': snr if snr is not None else 0, + 'noise': noise if noise is not None else 0, + }) + except queue.Full: + pass + # Log if enabled if app_module.logging_enabled: try: @@ -80,6 +95,14 @@ def stream_sensor_output(process: subprocess.Popen[bytes]) -> None: sensor_active_device = None +@sensor_bp.route('/sensor/status') +def sensor_status() -> Response: + """Check if sensor decoder is currently running.""" + with app_module.sensor_lock: + running = app_module.sensor_process is not None and app_module.sensor_process.poll() is None + return jsonify({'running': running}) + + @sensor_bp.route('/start_sensor', methods=['POST']) def start_sensor() -> Response: global sensor_active_device @@ -158,6 +181,10 @@ def start_sensor() -> Response: full_cmd = ' '.join(cmd) logger.info(f"Running: {full_cmd}") + # Add signal level metadata so the frontend scope can display RSSI/SNR + # Disable stats reporting to suppress "row count limit 50 reached" warnings + cmd.extend(['-M', 'level', '-M', 'stats:0']) + try: app_module.sensor_process = subprocess.Popen( cmd, @@ -232,13 +259,13 @@ def stream_sensor() -> Response: while True: try: - msg = app_module.sensor_queue.get(timeout=1) - last_keepalive = time.time() - try: - process_event('sensor', msg, msg.get('type')) - except Exception: - pass - yield format_sse(msg) + msg = app_module.sensor_queue.get(timeout=1) + last_keepalive = time.time() + try: + process_event('sensor', msg, msg.get('type')) + except Exception: + pass + yield format_sse(msg) except queue.Empty: now = time.time() if now - last_keepalive >= keepalive_interval: diff --git a/routes/sstv.py b/routes/sstv.py index ed3676a..1029dec 100644 --- a/routes/sstv.py +++ b/routes/sstv.py @@ -15,14 +15,12 @@ from flask import Blueprint, jsonify, request, Response, send_file import app as app_module from utils.logging import get_logger -from utils.sse import format_sse -from utils.event_pipeline import process_event +from utils.sse import format_sse +from utils.event_pipeline import process_event from utils.sstv import ( get_sstv_decoder, is_sstv_available, ISS_SSTV_FREQ, - DecodeProgress, - DopplerInfo, ) logger = get_logger('intercept.sstv') @@ -36,14 +34,14 @@ _sstv_queue: queue.Queue = queue.Queue(maxsize=100) sstv_active_device: int | None = None -def _progress_callback(progress: DecodeProgress) -> None: - """Callback to queue progress updates for SSE stream.""" +def _progress_callback(data: dict) -> None: + """Callback to queue progress/scope updates for SSE stream.""" try: - _sstv_queue.put_nowait(progress.to_dict()) + _sstv_queue.put_nowait(data) except queue.Full: try: _sstv_queue.get_nowait() - _sstv_queue.put_nowait(progress.to_dict()) + _sstv_queue.put_nowait(data) except queue.Empty: pass @@ -399,14 +397,14 @@ def stream_progress(): keepalive_interval = 30.0 while True: - try: - progress = _sstv_queue.get(timeout=1) - last_keepalive = time.time() - try: - process_event('sstv', progress, progress.get('type')) - except Exception: - pass - yield format_sse(progress) + try: + progress = _sstv_queue.get(timeout=1) + last_keepalive = time.time() + try: + process_event('sstv', progress, progress.get('type')) + except Exception: + pass + yield format_sse(progress) except queue.Empty: now = time.time() if now - last_keepalive >= keepalive_interval: diff --git a/routes/sstv_general.py b/routes/sstv_general.py index 5ebcbb2..0ddcbfb 100644 --- a/routes/sstv_general.py +++ b/routes/sstv_general.py @@ -17,7 +17,6 @@ from utils.logging import get_logger from utils.sse import format_sse from utils.event_pipeline import process_event from utils.sstv import ( - DecodeProgress, get_general_sstv_decoder, ) @@ -49,14 +48,14 @@ SSTV_FREQUENCIES = [ _FREQ_MODULATION_MAP = {entry['frequency']: entry['modulation'] for entry in SSTV_FREQUENCIES} -def _progress_callback(progress: DecodeProgress) -> None: - """Callback to queue progress updates for SSE stream.""" +def _progress_callback(data: dict) -> None: + """Callback to queue progress/scope updates for SSE stream.""" try: - _sstv_general_queue.put_nowait(progress.to_dict()) + _sstv_general_queue.put_nowait(data) except queue.Full: try: _sstv_general_queue.get_nowait() - _sstv_general_queue.put_nowait(progress.to_dict()) + _sstv_general_queue.put_nowait(data) except queue.Empty: pass diff --git a/routes/tscm.py b/routes/tscm.py index 5a3d31d..e110495 100644 --- a/routes/tscm.py +++ b/routes/tscm.py @@ -551,6 +551,12 @@ def _start_sweep_internal( } +@tscm_bp.route('/status') +def tscm_status(): + """Check if any TSCM operation is currently running.""" + return jsonify({'running': _sweep_running}) + + @tscm_bp.route('/sweep/start', methods=['POST']) def start_sweep(): """Start a TSCM sweep.""" diff --git a/routes/waterfall_websocket.py b/routes/waterfall_websocket.py new file mode 100644 index 0000000..5512d6f --- /dev/null +++ b/routes/waterfall_websocket.py @@ -0,0 +1,386 @@ +"""WebSocket-based waterfall streaming with I/Q capture and server-side FFT.""" + +import json +import queue +import socket +import subprocess +import threading +import time + +from flask import Flask + +try: + from flask_sock import Sock + WEBSOCKET_AVAILABLE = True +except ImportError: + WEBSOCKET_AVAILABLE = False + Sock = None + +from utils.logging import get_logger +from utils.process import safe_terminate, register_process, unregister_process +from utils.waterfall_fft import ( + build_binary_frame, + compute_power_spectrum, + cu8_to_complex, + quantize_to_uint8, +) +from utils.sdr import SDRFactory, SDRType +from utils.sdr.base import SDRCapabilities, SDRDevice + +logger = get_logger('intercept.waterfall_ws') + +# Maximum bandwidth per SDR type (Hz) +MAX_BANDWIDTH = { + SDRType.RTL_SDR: 2400000, + SDRType.HACKRF: 20000000, + SDRType.LIME_SDR: 20000000, + SDRType.AIRSPY: 10000000, + SDRType.SDRPLAY: 2000000, +} + + +def _resolve_sdr_type(sdr_type_str: str) -> SDRType: + """Convert client sdr_type string to SDRType enum.""" + mapping = { + 'rtlsdr': SDRType.RTL_SDR, + 'rtl_sdr': SDRType.RTL_SDR, + 'hackrf': SDRType.HACKRF, + 'limesdr': SDRType.LIME_SDR, + 'lime_sdr': SDRType.LIME_SDR, + 'airspy': SDRType.AIRSPY, + 'sdrplay': SDRType.SDRPLAY, + } + return mapping.get(sdr_type_str.lower(), SDRType.RTL_SDR) + + +def _build_dummy_device(device_index: int, sdr_type: SDRType) -> SDRDevice: + """Build a minimal SDRDevice for command building.""" + builder = SDRFactory.get_builder(sdr_type) + caps = builder.get_capabilities() + return SDRDevice( + sdr_type=sdr_type, + index=device_index, + name=f'{sdr_type.value}-{device_index}', + serial='N/A', + driver=sdr_type.value, + capabilities=caps, + ) + + +def init_waterfall_websocket(app: Flask): + """Initialize WebSocket waterfall streaming.""" + if not WEBSOCKET_AVAILABLE: + logger.warning("flask-sock not installed, WebSocket waterfall disabled") + return + + sock = Sock(app) + + @sock.route('/ws/waterfall') + def waterfall_stream(ws): + """WebSocket endpoint for real-time waterfall streaming.""" + logger.info("WebSocket waterfall client connected") + + # Import app module for device claiming + import app as app_module + + iq_process = None + reader_thread = None + stop_event = threading.Event() + claimed_device = None + # Queue for outgoing messages — only the main loop touches ws.send() + send_queue = queue.Queue(maxsize=120) + + try: + while True: + # Drain send queue first (non-blocking) + while True: + try: + outgoing = send_queue.get_nowait() + except queue.Empty: + break + try: + ws.send(outgoing) + except Exception: + stop_event.set() + break + + try: + msg = ws.receive(timeout=0.1) + except Exception as e: + err = str(e).lower() + if "closed" in err: + break + if "timed out" not in err: + logger.error(f"WebSocket receive error: {e}") + continue + + if msg is None: + # simple-websocket returns None on timeout AND on + # close; check ws.connected to tell them apart. + if not ws.connected: + break + if stop_event.is_set(): + break + continue + + try: + data = json.loads(msg) + except (json.JSONDecodeError, TypeError): + continue + + cmd = data.get('cmd') + + if cmd == 'start': + # Stop any existing capture + was_restarting = iq_process is not None + stop_event.set() + if reader_thread and reader_thread.is_alive(): + reader_thread.join(timeout=2) + if iq_process: + safe_terminate(iq_process) + unregister_process(iq_process) + iq_process = None + if claimed_device is not None: + app_module.release_sdr_device(claimed_device) + claimed_device = None + stop_event.clear() + # Flush stale frames from previous capture + while not send_queue.empty(): + try: + send_queue.get_nowait() + except queue.Empty: + break + # Allow USB device to be released by the kernel + if was_restarting: + time.sleep(0.5) + + # Parse config + center_freq = float(data.get('center_freq', 100.0)) + span_mhz = float(data.get('span_mhz', 2.0)) + gain = data.get('gain') + if gain is not None: + gain = float(gain) + device_index = int(data.get('device', 0)) + sdr_type_str = data.get('sdr_type', 'rtlsdr') + fft_size = int(data.get('fft_size', 1024)) + fps = int(data.get('fps', 25)) + avg_count = int(data.get('avg_count', 4)) + ppm = data.get('ppm') + if ppm is not None: + ppm = int(ppm) + bias_t = bool(data.get('bias_t', False)) + + # Clamp FFT size to valid powers of 2 + fft_size = max(256, min(8192, fft_size)) + + # Resolve SDR type and bandwidth + sdr_type = _resolve_sdr_type(sdr_type_str) + max_bw = MAX_BANDWIDTH.get(sdr_type, 2400000) + span_hz = int(span_mhz * 1e6) + sample_rate = min(span_hz, max_bw) + + # Compute effective frequency range + effective_span_mhz = sample_rate / 1e6 + start_freq = center_freq - effective_span_mhz / 2 + end_freq = center_freq + effective_span_mhz / 2 + + # Claim the device + claim_err = app_module.claim_sdr_device(device_index, 'waterfall') + if claim_err: + ws.send(json.dumps({ + 'status': 'error', + 'message': claim_err, + 'error_type': 'DEVICE_BUSY', + })) + continue + claimed_device = device_index + + # Build I/Q capture command + try: + builder = SDRFactory.get_builder(sdr_type) + device = _build_dummy_device(device_index, sdr_type) + iq_cmd = builder.build_iq_capture_command( + device=device, + frequency_mhz=center_freq, + sample_rate=sample_rate, + gain=gain, + ppm=ppm, + bias_t=bias_t, + ) + except NotImplementedError as e: + app_module.release_sdr_device(device_index) + claimed_device = None + ws.send(json.dumps({ + 'status': 'error', + 'message': str(e), + })) + continue + + # Spawn I/Q capture process (retry to handle USB release lag) + max_attempts = 3 if was_restarting else 1 + try: + for attempt in range(max_attempts): + logger.info( + f"Starting I/Q capture: {center_freq} MHz, " + f"span={effective_span_mhz:.1f} MHz, " + f"sr={sample_rate}, fft={fft_size}" + ) + iq_process = subprocess.Popen( + iq_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + bufsize=0, + ) + register_process(iq_process) + + # Brief check that process started + time.sleep(0.3) + if iq_process.poll() is not None: + unregister_process(iq_process) + iq_process = None + if attempt < max_attempts - 1: + logger.info( + f"I/Q process exited immediately, " + f"retrying ({attempt + 1}/{max_attempts})..." + ) + time.sleep(0.5) + continue + raise RuntimeError( + "I/Q capture process exited immediately" + ) + break # Process started successfully + except Exception as e: + logger.error(f"Failed to start I/Q capture: {e}") + if iq_process: + safe_terminate(iq_process) + unregister_process(iq_process) + iq_process = None + app_module.release_sdr_device(device_index) + claimed_device = None + ws.send(json.dumps({ + 'status': 'error', + 'message': f'Failed to start I/Q capture: {e}', + })) + continue + + # Send started confirmation + ws.send(json.dumps({ + 'status': 'started', + 'start_freq': start_freq, + 'end_freq': end_freq, + 'fft_size': fft_size, + 'sample_rate': sample_rate, + })) + + # Start reader thread — puts frames on queue, never calls ws.send() + def fft_reader( + proc, _send_q, stop_evt, + _fft_size, _avg_count, _fps, + _start_freq, _end_freq, + ): + """Read I/Q from subprocess, compute FFT, enqueue binary frames.""" + bytes_per_frame = _fft_size * _avg_count * 2 + frame_interval = 1.0 / _fps + + try: + while not stop_evt.is_set(): + if proc.poll() is not None: + break + + frame_start = time.monotonic() + + # Read raw I/Q bytes + raw = b'' + remaining = bytes_per_frame + while remaining > 0 and not stop_evt.is_set(): + chunk = proc.stdout.read(min(remaining, 65536)) + if not chunk: + break + raw += chunk + remaining -= len(chunk) + + if len(raw) < _fft_size * 2: + break + + # Process FFT pipeline + samples = cu8_to_complex(raw) + power_db = compute_power_spectrum( + samples, + fft_size=_fft_size, + avg_count=_avg_count, + ) + quantized = quantize_to_uint8(power_db) + frame = build_binary_frame( + _start_freq, _end_freq, quantized, + ) + + try: + _send_q.put_nowait(frame) + except queue.Full: + # Drop frame if main loop can't keep up + pass + + # Pace to target FPS + elapsed = time.monotonic() - frame_start + sleep_time = frame_interval - elapsed + if sleep_time > 0: + stop_evt.wait(sleep_time) + + except Exception as e: + logger.debug(f"FFT reader stopped: {e}") + + reader_thread = threading.Thread( + target=fft_reader, + args=( + iq_process, send_queue, stop_event, + fft_size, avg_count, fps, + start_freq, end_freq, + ), + daemon=True, + ) + reader_thread.start() + + elif cmd == 'stop': + stop_event.set() + if reader_thread and reader_thread.is_alive(): + reader_thread.join(timeout=2) + reader_thread = None + if iq_process: + safe_terminate(iq_process) + unregister_process(iq_process) + iq_process = None + if claimed_device is not None: + app_module.release_sdr_device(claimed_device) + claimed_device = None + stop_event.clear() + ws.send(json.dumps({'status': 'stopped'})) + + except Exception as e: + logger.info(f"WebSocket waterfall closed: {e}") + finally: + # Cleanup + stop_event.set() + if reader_thread and reader_thread.is_alive(): + reader_thread.join(timeout=2) + if iq_process: + safe_terminate(iq_process) + unregister_process(iq_process) + if claimed_device is not None: + app_module.release_sdr_device(claimed_device) + # Complete WebSocket close handshake, then shut down the + # raw socket so Werkzeug cannot write its HTTP 200 response + # on top of the WebSocket stream (which browsers see as + # "Invalid frame header"). + try: + ws.close() + except Exception: + pass + try: + ws.sock.shutdown(socket.SHUT_RDWR) + except Exception: + pass + try: + ws.sock.close() + except Exception: + pass + logger.info("WebSocket waterfall client disconnected") diff --git a/setup.sh b/setup.sh index 7cdf6f1..f41218a 100755 --- a/setup.sh +++ b/setup.sh @@ -165,6 +165,7 @@ detect_dragonos() { # Required tool checks (with alternates) # ---------------------------- missing_required=() +missing_recommended=() check_required() { local label="$1"; shift @@ -178,6 +179,18 @@ check_required() { fi } +check_recommended() { + local label="$1"; shift + local desc="$1"; shift + + if have_any "$@"; then + ok "${label} - ${desc}" + else + warn "${label} - ${desc} (missing, recommended)" + missing_recommended+=("$label") + fi +} + check_optional() { local label="$1"; shift local desc="$1"; shift @@ -230,6 +243,12 @@ check_tools() { check_required "hcitool" "Bluetooth scan utility" hcitool check_required "hciconfig" "Bluetooth adapter config" hciconfig + echo + info "GSM Intelligence:" + check_recommended "grgsm_scanner" "GSM tower scanner (gr-gsm)" grgsm_scanner + check_recommended "grgsm_livemon" "GSM live monitor (gr-gsm)" grgsm_livemon + check_recommended "tshark" "Packet analysis (Wireshark)" tshark + echo info "SoapySDR:" check_required "SoapySDRUtil" "SoapySDR CLI utility" SoapySDRUtil @@ -605,7 +624,7 @@ install_aiscatcher_from_source_macos() { } install_macos_packages() { - TOTAL_STEPS=17 + TOTAL_STEPS=18 CURRENT_STEP=0 progress "Checking Homebrew" @@ -694,6 +713,47 @@ install_macos_packages() { progress "Installing gpsd" brew_install gpsd + # gr-gsm for GSM Intelligence + progress "Installing gr-gsm" + if ! cmd_exists grgsm_scanner; then + brew_install gnuradio + (brew_install gr-gsm) || { + warn "gr-gsm not available in Homebrew, building from source..." + ( + tmp_dir="$(mktemp -d)" + trap 'rm -rf "$tmp_dir"' EXIT + + info "Cloning gr-gsm repository..." + git clone --depth 1 https://github.com/bkerler/gr-gsm.git "$tmp_dir/gr-gsm" >/dev/null 2>&1 \ + || { warn "Failed to clone gr-gsm. GSM Spy feature will not work."; exit 1; } + + cd "$tmp_dir/gr-gsm" + mkdir -p build && cd build + info "Compiling gr-gsm (this may take several minutes)..." + if cmake .. >/dev/null 2>&1 && make -j$(sysctl -n hw.ncpu) >/dev/null 2>&1; then + if [[ -w /usr/local/lib ]]; then + make install >/dev/null 2>&1 + else + sudo make install >/dev/null 2>&1 + fi + ok "gr-gsm installed successfully from source" + else + warn "Failed to build gr-gsm. GSM Spy feature will not work." + fi + ) + } + else + ok "gr-gsm already installed" + fi + + # Wireshark (tshark) for GSM packet analysis + progress "Installing tshark" + if ! cmd_exists tshark; then + brew_install wireshark + else + ok "tshark already installed" + fi + progress "Installing Ubertooth tools (optional)" if ! cmd_exists ubertooth-btle; then echo @@ -979,7 +1039,7 @@ install_debian_packages() { export NEEDRESTART_MODE=a fi - TOTAL_STEPS=22 + TOTAL_STEPS=25 CURRENT_STEP=0 progress "Updating APT package lists" @@ -1104,6 +1164,82 @@ install_debian_packages() { progress "Installing gpsd" apt_install gpsd gpsd-clients || true + # gr-gsm for GSM Intelligence + progress "Installing GNU Radio and gr-gsm" + if ! cmd_exists grgsm_scanner; then + # Try to install gr-gsm directly from package repositories + apt_install gnuradio gnuradio-dev gr-osmosdr gr-gsm || { + warn "gr-gsm package not available in repositories. Attempting source build..." + + # Fallback: Build from source + progress "Building gr-gsm from source" + apt_install git cmake libboost-all-dev libcppunit-dev swig \ + doxygen liblog4cpp5-dev python3-scipy python3-numpy \ + libvolk-dev libuhd-dev libfftw3-dev || true + + info "Cloning gr-gsm repository..." + if [ -d /tmp/gr-gsm ]; then + rm -rf /tmp/gr-gsm + fi + + git clone https://github.com/bkerler/gr-gsm.git /tmp/gr-gsm || { + warn "Failed to clone gr-gsm repository. GSM Spy will not be available." + return 0 + } + + cd /tmp/gr-gsm + mkdir -p build && cd build + + # Try to find GNU Radio cmake files + if [ -d /usr/lib/x86_64-linux-gnu/cmake/gnuradio ]; then + export CMAKE_PREFIX_PATH="/usr/lib/x86_64-linux-gnu/cmake/gnuradio:$CMAKE_PREFIX_PATH" + fi + + info "Running CMake configuration..." + if cmake .. 2>/dev/null; then + info "Compiling gr-gsm (this may take several minutes)..." + if make -j$(nproc) 2>/dev/null; then + $SUDO make install + $SUDO ldconfig + cd ~ + rm -rf /tmp/gr-gsm + ok "gr-gsm built and installed successfully" + else + warn "gr-gsm compilation failed. GSM Spy feature will not work." + cd ~ + rm -rf /tmp/gr-gsm + fi + else + warn "gr-gsm CMake configuration failed. GNU Radio 3.8+ may not be available." + cd ~ + rm -rf /tmp/gr-gsm + fi + } + + # Verify installation + if cmd_exists grgsm_scanner; then + ok "gr-gsm installed successfully" + else + warn "gr-gsm installation incomplete. GSM Spy feature will not work." + fi + else + ok "gr-gsm already installed" + fi + + # Wireshark (tshark) for GSM packet analysis + progress "Installing tshark" + if ! cmd_exists tshark; then + # Pre-accept non-root capture prompt for non-interactive install + echo 'wireshark-common wireshark-common/install-setuid boolean true' | $SUDO debconf-set-selections + apt_install tshark || true + # Allow non-root capture + $SUDO dpkg-reconfigure wireshark-common 2>/dev/null || true + $SUDO usermod -a -G wireshark $USER 2>/dev/null || true + ok "tshark installed. You may need to re-login for wireshark group permissions." + else + ok "tshark already installed" + fi + progress "Installing Python packages" apt_install python3-venv python3-pip || true # Install Python packages via apt (more reliable than pip on modern Debian/Ubuntu) @@ -1185,6 +1321,14 @@ final_summary_and_hard_fail() { exit 1 fi fi + + if [[ "${#missing_recommended[@]}" -gt 0 ]]; then + echo + warn "Missing RECOMMENDED tools (some features will not work):" + for t in "${missing_recommended[@]}"; do echo " - $t"; done + echo + warn "Install these for full functionality (GSM Intelligence, etc.)" + fi } # ---------------------------- diff --git a/static/css/components/function-strip.css b/static/css/components/function-strip.css index 8ff5c65..878ef84 100644 --- a/static/css/components/function-strip.css +++ b/static/css/components/function-strip.css @@ -19,6 +19,17 @@ min-width: max-content; } +/* Strip title badge */ +.function-strip .strip-title { + font-size: 9px; + font-weight: 700; + letter-spacing: 1.5px; + text-transform: uppercase; + color: var(--text-muted); + white-space: nowrap; + padding: 4px 0; +} + /* Stats */ .function-strip .strip-stat { display: flex; diff --git a/static/js/modes/listening-post.js b/static/js/modes/listening-post.js index afcea11..2987e39 100644 --- a/static/js/modes/listening-post.js +++ b/static/js/modes/listening-post.js @@ -69,6 +69,24 @@ const scannerPresets = { amateur70cm: { start: 420, end: 450, step: 25, mod: 'fm' } }; +/** + * Suggest the appropriate modulation for a given frequency (in MHz). + * Uses standard band allocations to pick AM, NFM, WFM, or USB. + */ +function suggestModulation(freqMhz) { + if (freqMhz < 0.52) return 'am'; // LW/MW AM broadcast + if (freqMhz < 1.7) return 'am'; // MW AM broadcast + if (freqMhz < 30) return 'usb'; // HF/Shortwave + if (freqMhz < 88) return 'fm'; // VHF Low (public safety) + if (freqMhz < 108) return 'wfm'; // FM Broadcast + if (freqMhz < 137) return 'am'; // Airband + if (freqMhz < 174) return 'fm'; // VHF marine, 2m ham, pagers + if (freqMhz < 216) return 'wfm'; // VHF TV/DAB + if (freqMhz < 470) return 'fm'; // UHF various, 70cm, business/GMRS + if (freqMhz < 960) return 'wfm'; // UHF TV + return 'am'; // Microwave/ADS-B +} + const audioPresets = { fm: { freq: 98.1, mod: 'wfm' }, airband: { freq: 121.5, mod: 'am' }, // Emergency/guard frequency @@ -1886,6 +1904,8 @@ function initListeningPost() { // Connect radio knobs to scanner controls initRadioKnobControls(); + initWaterfallZoomControls(); + // Step dropdown - sync with scanner when changed const stepSelect = document.getElementById('radioScanStep'); if (stepSelect) { @@ -2312,8 +2332,7 @@ async function _startDirectListenInternal() { isDirectListening = false; updateDirectListenUI(false); if (resumeRfWaterfallAfterListening) { - resumeRfWaterfallAfterListening = false; - setTimeout(() => startWaterfall(), 200); + scheduleWaterfallResume(); } return; } @@ -2366,8 +2385,7 @@ async function _startDirectListenInternal() { isWaterfallRunning = true; const waterfallPanel = document.getElementById('waterfallPanel'); if (waterfallPanel) waterfallPanel.style.display = 'block'; - document.getElementById('startWaterfallBtn').style.display = 'none'; - document.getElementById('stopWaterfallBtn').style.display = 'block'; + setWaterfallControlButtons(true); startAudioWaterfall(); } updateDirectListenUI(true, freq); @@ -2379,8 +2397,7 @@ async function _startDirectListenInternal() { isDirectListening = false; updateDirectListenUI(false); if (resumeRfWaterfallAfterListening) { - resumeRfWaterfallAfterListening = false; - setTimeout(() => startWaterfall(), 200); + scheduleWaterfallResume(); } } finally { isRestarting = false; @@ -2537,7 +2554,7 @@ async function startWebSocketListen(config, audioPlayer) { /** * Stop direct listening */ -function stopDirectListen() { +async function stopDirectListen() { console.log('[LISTEN] Stopping'); // Clear all pending state @@ -2572,7 +2589,7 @@ function stopDirectListen() { } // Also stop via HTTP (fallback) - fetch('/listening/audio/stop', { method: 'POST' }).catch(() => {}); + const audioStopPromise = fetch('/listening/audio/stop', { method: 'POST' }).catch(() => {}); isDirectListening = false; currentSignalLevel = 0; @@ -2584,13 +2601,16 @@ function stopDirectListen() { } if (resumeRfWaterfallAfterListening) { - resumeRfWaterfallAfterListening = false; isWaterfallRunning = false; - setTimeout(() => startWaterfall(), 200); + setWaterfallControlButtons(false); + await Promise.race([ + audioStopPromise, + new Promise(resolve => setTimeout(resolve, 400)) + ]); + scheduleWaterfallResume(); } else if (waterfallMode === 'audio' && isWaterfallRunning) { isWaterfallRunning = false; - document.getElementById('startWaterfallBtn').style.display = 'block'; - document.getElementById('stopWaterfallBtn').style.display = 'none'; + setWaterfallControlButtons(false); } } @@ -3067,6 +3087,17 @@ let waterfallMode = 'rf'; let audioWaterfallAnimId = null; let lastAudioWaterfallDraw = 0; let resumeRfWaterfallAfterListening = false; +let waterfallResumeTimer = null; +let waterfallResumeAttempts = 0; +const WATERFALL_RESUME_MAX_ATTEMPTS = 8; +const WATERFALL_RESUME_RETRY_MS = 350; +const WATERFALL_ZOOM_MIN_MHZ = 0.1; +const WATERFALL_ZOOM_MAX_MHZ = 500; +const WATERFALL_DEFAULT_SPAN_MHZ = 2.0; + +// WebSocket waterfall state +let waterfallWebSocket = null; +let waterfallUseWebSocket = false; function resizeCanvasToDisplaySize(canvas) { if (!canvas) return false; @@ -3137,6 +3168,214 @@ function initWaterfallCanvas() { } } +function setWaterfallControlButtons(running) { + const startBtn = document.getElementById('startWaterfallBtn'); + const stopBtn = document.getElementById('stopWaterfallBtn'); + if (!startBtn || !stopBtn) return; + startBtn.style.display = running ? 'none' : 'inline-block'; + stopBtn.style.display = running ? 'inline-block' : 'none'; + const dot = document.getElementById('waterfallStripDot'); + if (dot) { + dot.className = running ? 'status-dot sweeping' : 'status-dot inactive'; + } +} + +function getWaterfallRangeFromInputs() { + const startInput = document.getElementById('waterfallStartFreq'); + const endInput = document.getElementById('waterfallEndFreq'); + const startVal = parseFloat(startInput?.value); + const endVal = parseFloat(endInput?.value); + const start = Number.isFinite(startVal) ? startVal : waterfallStartFreq; + const end = Number.isFinite(endVal) ? endVal : waterfallEndFreq; + return { start, end }; +} + +function updateWaterfallZoomLabel(start, end) { + const label = document.getElementById('waterfallZoomSpan'); + if (!label) return; + if (!Number.isFinite(start) || !Number.isFinite(end)) return; + const span = Math.max(0, end - start); + if (span >= 1) { + label.textContent = `${span.toFixed(1)} MHz`; + } else { + label.textContent = `${Math.round(span * 1000)} kHz`; + } +} + +function setWaterfallRange(center, span) { + if (!Number.isFinite(center) || !Number.isFinite(span)) return; + const clampedSpan = Math.max(WATERFALL_ZOOM_MIN_MHZ, Math.min(WATERFALL_ZOOM_MAX_MHZ, span)); + const half = clampedSpan / 2; + let start = center - half; + let end = center + half; + const minFreq = 0.01; + if (start < minFreq) { + end += (minFreq - start); + start = minFreq; + } + if (end <= start) { + end = start + WATERFALL_ZOOM_MIN_MHZ; + } + + waterfallStartFreq = start; + waterfallEndFreq = end; + + const startInput = document.getElementById('waterfallStartFreq'); + const endInput = document.getElementById('waterfallEndFreq'); + if (startInput) startInput.value = start.toFixed(3); + if (endInput) endInput.value = end.toFixed(3); + + const rangeLabel = document.getElementById('waterfallFreqRange'); + if (rangeLabel && !isWaterfallRunning) { + rangeLabel.textContent = `${start.toFixed(1)} - ${end.toFixed(1)} MHz`; + } + updateWaterfallZoomLabel(start, end); +} + +function getWaterfallCenterForZoom(start, end) { + const tuned = parseFloat(document.getElementById('radioScanStart')?.value || ''); + if (Number.isFinite(tuned) && tuned > 0) return tuned; + return (start + end) / 2; +} + +async function syncWaterfallToFrequency(freq, options = {}) { + const { autoStart = false, restartIfRunning = true, silent = true } = options; + const numericFreq = parseFloat(freq); + if (!Number.isFinite(numericFreq) || numericFreq <= 0) return { started: false }; + + const { start, end } = getWaterfallRangeFromInputs(); + const span = (Number.isFinite(start) && Number.isFinite(end) && end > start) + ? (end - start) + : WATERFALL_DEFAULT_SPAN_MHZ; + + setWaterfallRange(numericFreq, span); + + if (!autoStart) return { started: false }; + if (isDirectListening || waterfallMode === 'audio') return { started: false }; + + if (isWaterfallRunning && waterfallMode === 'rf' && restartIfRunning) { + // Reuse existing WebSocket to avoid USB device release race + if (waterfallUseWebSocket && waterfallWebSocket && waterfallWebSocket.readyState === WebSocket.OPEN) { + const sf = parseFloat(document.getElementById('waterfallStartFreq')?.value || 88); + const ef = parseFloat(document.getElementById('waterfallEndFreq')?.value || 108); + const fft = parseInt(document.getElementById('waterfallFftSize')?.value || document.getElementById('waterfallBinSize')?.value || 1024); + const g = parseInt(document.getElementById('waterfallGain')?.value || 40); + const dev = typeof getSelectedDevice === 'function' ? getSelectedDevice() : 0; + waterfallWebSocket.send(JSON.stringify({ + cmd: 'start', + center_freq: (sf + ef) / 2, + span_mhz: Math.max(0.1, ef - sf), + gain: g, + device: dev, + sdr_type: (typeof getSelectedSdrType === 'function') ? getSelectedSdrType() : 'rtlsdr', + fft_size: fft, + fps: 25, + avg_count: 4, + })); + return { started: true }; + } + await stopWaterfall(); + return await startWaterfall({ silent: silent }); + } + + if (!isWaterfallRunning) { + return await startWaterfall({ silent: silent }); + } + + return { started: true }; +} + +async function zoomWaterfall(direction) { + const { start, end } = getWaterfallRangeFromInputs(); + if (!Number.isFinite(start) || !Number.isFinite(end) || end <= start) return; + + const zoomIn = direction === 'in' || direction === '+'; + const zoomOut = direction === 'out' || direction === '-'; + if (!zoomIn && !zoomOut) return; + + const span = end - start; + const newSpan = zoomIn ? span / 2 : span * 2; + const center = getWaterfallCenterForZoom(start, end); + setWaterfallRange(center, newSpan); + + if (isWaterfallRunning && waterfallMode === 'rf' && !isDirectListening) { + // Reuse existing WebSocket to avoid USB device release race + if (waterfallUseWebSocket && waterfallWebSocket && waterfallWebSocket.readyState === WebSocket.OPEN) { + const sf = parseFloat(document.getElementById('waterfallStartFreq')?.value || 88); + const ef = parseFloat(document.getElementById('waterfallEndFreq')?.value || 108); + const fft = parseInt(document.getElementById('waterfallFftSize')?.value || document.getElementById('waterfallBinSize')?.value || 1024); + const g = parseInt(document.getElementById('waterfallGain')?.value || 40); + const dev = typeof getSelectedDevice === 'function' ? getSelectedDevice() : 0; + waterfallWebSocket.send(JSON.stringify({ + cmd: 'start', + center_freq: (sf + ef) / 2, + span_mhz: Math.max(0.1, ef - sf), + gain: g, + device: dev, + sdr_type: (typeof getSelectedSdrType === 'function') ? getSelectedSdrType() : 'rtlsdr', + fft_size: fft, + fps: 25, + avg_count: 4, + })); + } else { + await stopWaterfall(); + await startWaterfall({ silent: true }); + } + } +} + +function initWaterfallZoomControls() { + const startInput = document.getElementById('waterfallStartFreq'); + const endInput = document.getElementById('waterfallEndFreq'); + if (!startInput && !endInput) return; + + const sync = () => { + const { start, end } = getWaterfallRangeFromInputs(); + if (!Number.isFinite(start) || !Number.isFinite(end) || end <= start) return; + waterfallStartFreq = start; + waterfallEndFreq = end; + updateWaterfallZoomLabel(start, end); + }; + + if (startInput) startInput.addEventListener('input', sync); + if (endInput) endInput.addEventListener('input', sync); + sync(); +} + +function scheduleWaterfallResume() { + if (!resumeRfWaterfallAfterListening) return; + if (waterfallResumeTimer) { + clearTimeout(waterfallResumeTimer); + waterfallResumeTimer = null; + } + waterfallResumeAttempts = 0; + waterfallResumeTimer = setTimeout(attemptWaterfallResume, 200); +} + +async function attemptWaterfallResume() { + if (!resumeRfWaterfallAfterListening) return; + if (isDirectListening) { + waterfallResumeTimer = setTimeout(attemptWaterfallResume, WATERFALL_RESUME_RETRY_MS); + return; + } + + const result = await startWaterfall({ silent: true, resume: true }); + if (result && result.started) { + waterfallResumeTimer = null; + return; + } + + const retryable = result ? result.retryable : true; + if (retryable && waterfallResumeAttempts < WATERFALL_RESUME_MAX_ATTEMPTS) { + waterfallResumeAttempts += 1; + waterfallResumeTimer = setTimeout(attemptWaterfallResume, WATERFALL_RESUME_RETRY_MS); + return; + } + + resumeRfWaterfallAfterListening = false; + waterfallResumeTimer = null; +} + function setWaterfallMode(mode) { waterfallMode = mode; const header = document.getElementById('waterfallFreqRange'); @@ -3334,18 +3573,209 @@ function drawSpectrumLine(bins, startFreq, endFreq, labelUnit) { spectrumCtx.fill(); } -function startWaterfall() { +function connectWaterfallWebSocket(config) { + const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:'; + const wsUrl = `${protocol}//${window.location.host}/ws/waterfall`; + + return new Promise((resolve, reject) => { + try { + const ws = new WebSocket(wsUrl); + ws.binaryType = 'arraybuffer'; + + const timeout = setTimeout(() => { + ws.close(); + reject(new Error('WebSocket connection timeout')); + }, 5000); + + ws.onopen = () => { + clearTimeout(timeout); + ws.send(JSON.stringify({ cmd: 'start', ...config })); + }; + + ws.onmessage = (event) => { + if (typeof event.data === 'string') { + const msg = JSON.parse(event.data); + if (msg.status === 'started') { + waterfallWebSocket = ws; + waterfallUseWebSocket = true; + if (typeof msg.start_freq === 'number') waterfallStartFreq = msg.start_freq; + if (typeof msg.end_freq === 'number') waterfallEndFreq = msg.end_freq; + const rangeLabel = document.getElementById('waterfallFreqRange'); + if (rangeLabel) { + rangeLabel.textContent = `${waterfallStartFreq.toFixed(1)} - ${waterfallEndFreq.toFixed(1)} MHz`; + } + updateWaterfallZoomLabel(waterfallStartFreq, waterfallEndFreq); + resolve(ws); + } else if (msg.status === 'error') { + ws.close(); + reject(new Error(msg.message || 'WebSocket waterfall error')); + } else if (msg.status === 'stopped') { + // Server confirmed stop + } + } else if (event.data instanceof ArrayBuffer) { + const now = Date.now(); + if (now - lastWaterfallDraw < WATERFALL_MIN_INTERVAL_MS) return; + lastWaterfallDraw = now; + parseBinaryWaterfallFrame(event.data); + } + }; + + ws.onerror = () => { + clearTimeout(timeout); + reject(new Error('WebSocket connection failed')); + }; + + ws.onclose = () => { + if (waterfallUseWebSocket && isWaterfallRunning) { + waterfallWebSocket = null; + waterfallUseWebSocket = false; + isWaterfallRunning = false; + setWaterfallControlButtons(false); + if (typeof releaseDevice === 'function') { + releaseDevice('waterfall'); + } + } + }; + } catch (e) { + reject(e); + } + }); +} + +function parseBinaryWaterfallFrame(buffer) { + if (buffer.byteLength < 11) return; + const view = new DataView(buffer); + const msgType = view.getUint8(0); + if (msgType !== 0x01) return; + + const startFreq = view.getFloat32(1, true); + const endFreq = view.getFloat32(5, true); + const binCount = view.getUint16(9, true); + + if (buffer.byteLength < 11 + binCount) return; + + const bins = new Uint8Array(buffer, 11, binCount); + + waterfallStartFreq = startFreq; + waterfallEndFreq = endFreq; + const rangeLabel = document.getElementById('waterfallFreqRange'); + if (rangeLabel) { + rangeLabel.textContent = `${startFreq.toFixed(1)} - ${endFreq.toFixed(1)} MHz`; + } + updateWaterfallZoomLabel(startFreq, endFreq); + + drawWaterfallRowBinary(bins); + drawSpectrumLineBinary(bins, startFreq, endFreq); +} + +function drawWaterfallRowBinary(bins) { + if (!waterfallCtx || !waterfallCanvas) return; + const w = waterfallCanvas.width; + const h = waterfallCanvas.height; + const rowHeight = waterfallRowImage ? waterfallRowImage.height : 1; + + // Scroll existing content down + waterfallCtx.drawImage(waterfallCanvas, 0, 0, w, h - rowHeight, 0, rowHeight, w, h - rowHeight); + + if (!waterfallRowImage || waterfallRowImage.width !== w || waterfallRowImage.height !== rowHeight) { + waterfallRowImage = waterfallCtx.createImageData(w, rowHeight); + } + const rowData = waterfallRowImage.data; + const palette = waterfallPalette || buildWaterfallPalette(); + const binCount = bins.length; + + for (let x = 0; x < w; x++) { + const pos = (x / (w - 1)) * (binCount - 1); + const i0 = Math.floor(pos); + const i1 = Math.min(binCount - 1, i0 + 1); + const t = pos - i0; + // Interpolate between bins (already uint8, 0-255) + const val = Math.round(bins[i0] * (1 - t) + bins[i1] * t); + const color = palette[Math.max(0, Math.min(255, val))] || [0, 0, 0]; + for (let y = 0; y < rowHeight; y++) { + const offset = (y * w + x) * 4; + rowData[offset] = color[0]; + rowData[offset + 1] = color[1]; + rowData[offset + 2] = color[2]; + rowData[offset + 3] = 255; + } + } + waterfallCtx.putImageData(waterfallRowImage, 0, 0); +} + +function drawSpectrumLineBinary(bins, startFreq, endFreq) { + if (!spectrumCtx || !spectrumCanvas) return; + const w = spectrumCanvas.width; + const h = spectrumCanvas.height; + + spectrumCtx.clearRect(0, 0, w, h); + + // Background + spectrumCtx.fillStyle = 'rgba(0, 0, 0, 0.8)'; + spectrumCtx.fillRect(0, 0, w, h); + + // Grid lines + spectrumCtx.strokeStyle = 'rgba(0, 200, 255, 0.1)'; + spectrumCtx.lineWidth = 0.5; + for (let i = 0; i < 5; i++) { + const y = (h / 5) * i; + spectrumCtx.beginPath(); + spectrumCtx.moveTo(0, y); + spectrumCtx.lineTo(w, y); + spectrumCtx.stroke(); + } + + // Frequency labels + const dpr = window.devicePixelRatio || 1; + spectrumCtx.fillStyle = 'rgba(0, 200, 255, 0.5)'; + spectrumCtx.font = `${9 * dpr}px monospace`; + const freqRange = endFreq - startFreq; + for (let i = 0; i <= 4; i++) { + const freq = startFreq + (freqRange / 4) * i; + const x = (w / 4) * i; + spectrumCtx.fillText(freq.toFixed(1), x + 2, h - 2); + } + + if (bins.length === 0) return; + + // Draw spectrum line — bins are pre-quantized 0-255 + spectrumCtx.strokeStyle = 'rgba(0, 255, 255, 0.9)'; + spectrumCtx.lineWidth = 1.5; + spectrumCtx.beginPath(); + for (let i = 0; i < bins.length; i++) { + const x = (i / (bins.length - 1)) * w; + const normalized = bins[i] / 255; + const y = h - 12 - normalized * (h - 16); + if (i === 0) spectrumCtx.moveTo(x, y); + else spectrumCtx.lineTo(x, y); + } + spectrumCtx.stroke(); + + // Fill under line + const lastX = w; + const lastY = h - 12 - (bins[bins.length - 1] / 255) * (h - 16); + spectrumCtx.lineTo(lastX, h); + spectrumCtx.lineTo(0, h); + spectrumCtx.closePath(); + spectrumCtx.fillStyle = 'rgba(0, 255, 255, 0.08)'; + spectrumCtx.fill(); +} + +async function startWaterfall(options = {}) { + const { silent = false, resume = false } = options; const startFreq = parseFloat(document.getElementById('waterfallStartFreq')?.value || 88); const endFreq = parseFloat(document.getElementById('waterfallEndFreq')?.value || 108); - const binSize = parseInt(document.getElementById('waterfallBinSize')?.value || 10000); + const fftSize = parseInt(document.getElementById('waterfallFftSize')?.value || document.getElementById('waterfallBinSize')?.value || 1024); const gain = parseInt(document.getElementById('waterfallGain')?.value || 40); const device = typeof getSelectedDevice === 'function' ? getSelectedDevice() : 0; initWaterfallCanvas(); const maxBins = Math.min(4096, Math.max(128, waterfallCanvas ? waterfallCanvas.width : 800)); if (startFreq >= endFreq) { - if (typeof showNotification === 'function') showNotification('Error', 'End frequency must be greater than start'); - return; + if (!silent && typeof showNotification === 'function') { + showNotification('Error', 'End frequency must be greater than start'); + } + return { started: false, retryable: false }; } waterfallStartFreq = startFreq; @@ -3354,69 +3784,165 @@ function startWaterfall() { if (rangeLabel) { rangeLabel.textContent = `${startFreq.toFixed(1)} - ${endFreq.toFixed(1)} MHz`; } + updateWaterfallZoomLabel(startFreq, endFreq); - if (isDirectListening) { + if (isDirectListening && !resume) { isWaterfallRunning = true; const waterfallPanel = document.getElementById('waterfallPanel'); if (waterfallPanel) waterfallPanel.style.display = 'block'; - document.getElementById('startWaterfallBtn').style.display = 'none'; - document.getElementById('stopWaterfallBtn').style.display = 'block'; + setWaterfallControlButtons(true); startAudioWaterfall(); - return; + resumeRfWaterfallAfterListening = true; + return { started: true }; + } + + if (isDirectListening && resume) { + return { started: false, retryable: true }; } setWaterfallMode('rf'); - const spanMhz = Math.max(0.1, waterfallEndFreq - waterfallStartFreq); + + // Try WebSocket path first (I/Q + server-side FFT) + const centerFreq = (startFreq + endFreq) / 2; + const spanMhz = Math.max(0.1, endFreq - startFreq); + + try { + const wsConfig = { + center_freq: centerFreq, + span_mhz: spanMhz, + gain: gain, + device: device, + sdr_type: (typeof getSelectedSdrType === 'function') ? getSelectedSdrType() : 'rtlsdr', + fft_size: fftSize, + fps: 25, + avg_count: 4, + }; + await connectWaterfallWebSocket(wsConfig); + + isWaterfallRunning = true; + setWaterfallControlButtons(true); + const waterfallPanel = document.getElementById('waterfallPanel'); + if (waterfallPanel) waterfallPanel.style.display = 'block'; + lastWaterfallDraw = 0; + initWaterfallCanvas(); + if (typeof reserveDevice === 'function') { + reserveDevice(parseInt(device), 'waterfall'); + } + if (resume || resumeRfWaterfallAfterListening) { + resumeRfWaterfallAfterListening = false; + } + if (waterfallResumeTimer) { + clearTimeout(waterfallResumeTimer); + waterfallResumeTimer = null; + } + console.log('[WATERFALL] WebSocket connected'); + return { started: true }; + } catch (wsErr) { + console.log('[WATERFALL] WebSocket unavailable, falling back to SSE:', wsErr.message); + } + + // Fallback: SSE / rtl_power path const segments = Math.max(1, Math.ceil(spanMhz / 2.4)); const targetSweepSeconds = 0.8; const interval = Math.max(0.1, Math.min(0.3, targetSweepSeconds / segments)); + const binSize = fftSize; - fetch('/listening/waterfall/start', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ - start_freq: startFreq, - end_freq: endFreq, - bin_size: binSize, - gain: gain, - device: device, - max_bins: maxBins, - interval: interval, - }) - }) - .then(r => r.json()) - .then(data => { - if (data.status === 'started') { - isWaterfallRunning = true; - document.getElementById('startWaterfallBtn').style.display = 'none'; - document.getElementById('stopWaterfallBtn').style.display = 'block'; - const waterfallPanel = document.getElementById('waterfallPanel'); - if (waterfallPanel) waterfallPanel.style.display = 'block'; - lastWaterfallDraw = 0; - initWaterfallCanvas(); - connectWaterfallSSE(); - } else { - if (typeof showNotification === 'function') showNotification('Error', data.message || 'Failed to start waterfall'); + try { + const response = await fetch('/listening/waterfall/start', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + start_freq: startFreq, + end_freq: endFreq, + bin_size: binSize, + gain: gain, + device: device, + max_bins: maxBins, + interval: interval, + }) + }); + + let data = {}; + try { + data = await response.json(); + } catch (e) {} + + if (!response.ok || data.status !== 'started') { + if (!silent && typeof showNotification === 'function') { + showNotification('Error', data.message || 'Failed to start waterfall'); + } + return { + started: false, + retryable: response.status === 409 || data.error_type === 'DEVICE_BUSY' + }; } - }) - .catch(err => console.error('[WATERFALL] Start error:', err)); + + isWaterfallRunning = true; + setWaterfallControlButtons(true); + const waterfallPanel = document.getElementById('waterfallPanel'); + if (waterfallPanel) waterfallPanel.style.display = 'block'; + lastWaterfallDraw = 0; + initWaterfallCanvas(); + connectWaterfallSSE(); + if (typeof reserveDevice === 'function') { + reserveDevice(parseInt(device), 'waterfall'); + } + if (resume || resumeRfWaterfallAfterListening) { + resumeRfWaterfallAfterListening = false; + } + if (waterfallResumeTimer) { + clearTimeout(waterfallResumeTimer); + waterfallResumeTimer = null; + } + return { started: true }; + } catch (err) { + console.error('[WATERFALL] Start error:', err); + if (!silent && typeof showNotification === 'function') { + showNotification('Error', 'Failed to start waterfall'); + } + return { started: false, retryable: true }; + } } async function stopWaterfall() { if (waterfallMode === 'audio') { stopAudioWaterfall(); isWaterfallRunning = false; - document.getElementById('startWaterfallBtn').style.display = 'block'; - document.getElementById('stopWaterfallBtn').style.display = 'none'; + setWaterfallControlButtons(false); return; } + // WebSocket path + if (waterfallUseWebSocket && waterfallWebSocket) { + try { + if (waterfallWebSocket.readyState === WebSocket.OPEN) { + waterfallWebSocket.send(JSON.stringify({ cmd: 'stop' })); + } + waterfallWebSocket.close(); + } catch (e) { + console.error('[WATERFALL] WebSocket stop error:', e); + } + waterfallWebSocket = null; + waterfallUseWebSocket = false; + isWaterfallRunning = false; + setWaterfallControlButtons(false); + if (typeof releaseDevice === 'function') { + releaseDevice('waterfall'); + } + // Allow backend WebSocket handler to finish cleanup and release SDR + await new Promise(resolve => setTimeout(resolve, 300)); + return; + } + + // SSE fallback path try { await fetch('/listening/waterfall/stop', { method: 'POST' }); isWaterfallRunning = false; if (waterfallEventSource) { waterfallEventSource.close(); waterfallEventSource = null; } - document.getElementById('startWaterfallBtn').style.display = 'block'; - document.getElementById('stopWaterfallBtn').style.display = 'none'; + setWaterfallControlButtons(false); + if (typeof releaseDevice === 'function') { + releaseDevice('waterfall'); + } } catch (err) { console.error('[WATERFALL] Stop error:', err); } @@ -3436,6 +3962,7 @@ function connectWaterfallSSE() { if (rangeLabel) { rangeLabel.textContent = `${waterfallStartFreq.toFixed(1)} - ${waterfallEndFreq.toFixed(1)} MHz`; } + updateWaterfallZoomLabel(waterfallStartFreq, waterfallEndFreq); const now = Date.now(); if (now - lastWaterfallDraw < WATERFALL_MIN_INTERVAL_MS) return; lastWaterfallDraw = now; @@ -3462,17 +3989,51 @@ function bindWaterfallInteraction() { const ratio = Math.max(0, Math.min(1, x / rect.width)); const freq = waterfallStartFreq + ratio * (waterfallEndFreq - waterfallStartFreq); if (typeof tuneToFrequency === 'function') { - tuneToFrequency(freq, typeof currentModulation !== 'undefined' ? currentModulation : undefined); + tuneToFrequency(freq, suggestModulation(freq)); } }; + // Tooltip for showing frequency + modulation on hover + let tooltip = document.getElementById('waterfallTooltip'); + if (!tooltip) { + tooltip = document.createElement('div'); + tooltip.id = 'waterfallTooltip'; + tooltip.style.cssText = 'position:fixed;pointer-events:none;background:rgba(0,0,0,0.85);color:#0f0;padding:4px 8px;border-radius:4px;font-size:12px;font-family:monospace;z-index:9999;display:none;white-space:nowrap;border:1px solid #333;'; + document.body.appendChild(tooltip); + } + + const hoverHandler = (event) => { + if (waterfallMode === 'audio') { + tooltip.style.display = 'none'; + return; + } + const canvas = event.currentTarget; + const rect = canvas.getBoundingClientRect(); + const x = event.clientX - rect.left; + const ratio = Math.max(0, Math.min(1, x / rect.width)); + const freq = waterfallStartFreq + ratio * (waterfallEndFreq - waterfallStartFreq); + const mod = suggestModulation(freq); + tooltip.textContent = `${freq.toFixed(3)} MHz \u00b7 ${mod.toUpperCase()}`; + tooltip.style.left = (event.clientX + 12) + 'px'; + tooltip.style.top = (event.clientY - 28) + 'px'; + tooltip.style.display = 'block'; + }; + + const leaveHandler = () => { + tooltip.style.display = 'none'; + }; + if (waterfallCanvas) { waterfallCanvas.style.cursor = 'crosshair'; waterfallCanvas.addEventListener('click', handler); + waterfallCanvas.addEventListener('mousemove', hoverHandler); + waterfallCanvas.addEventListener('mouseleave', leaveHandler); } if (spectrumCanvas) { spectrumCanvas.style.cursor = 'crosshair'; spectrumCanvas.addEventListener('click', handler); + spectrumCanvas.addEventListener('mousemove', hoverHandler); + spectrumCanvas.addEventListener('mouseleave', leaveHandler); } } @@ -3497,3 +4058,5 @@ window.manualSignalGuess = manualSignalGuess; window.guessSignal = guessSignal; window.startWaterfall = startWaterfall; window.stopWaterfall = stopWaterfall; +window.zoomWaterfall = zoomWaterfall; +window.syncWaterfallToFrequency = syncWaterfallToFrequency; diff --git a/static/js/modes/sstv-general.js b/static/js/modes/sstv-general.js index 0b89efe..3419315 100644 --- a/static/js/modes/sstv-general.js +++ b/static/js/modes/sstv-general.js @@ -11,6 +11,18 @@ const SSTVGeneral = (function() { let currentMode = null; let progress = 0; + // Signal scope state + let sstvGeneralScopeCtx = null; + let sstvGeneralScopeAnim = null; + let sstvGeneralScopeHistory = []; + const SSTV_GENERAL_SCOPE_LEN = 200; + let sstvGeneralScopeRms = 0; + let sstvGeneralScopePeak = 0; + let sstvGeneralScopeTargetRms = 0; + let sstvGeneralScopeTargetPeak = 0; + let sstvGeneralScopeMsgBurst = 0; + let sstvGeneralScopeTone = null; + /** * Initialize the SSTV General mode */ @@ -190,6 +202,136 @@ const SSTVGeneral = (function() { `; } + /** + * Initialize signal scope canvas + */ + function initSstvGeneralScope() { + const canvas = document.getElementById('sstvGeneralScopeCanvas'); + if (!canvas) return; + const rect = canvas.getBoundingClientRect(); + canvas.width = rect.width * (window.devicePixelRatio || 1); + canvas.height = rect.height * (window.devicePixelRatio || 1); + sstvGeneralScopeCtx = canvas.getContext('2d'); + sstvGeneralScopeHistory = new Array(SSTV_GENERAL_SCOPE_LEN).fill(0); + sstvGeneralScopeRms = 0; + sstvGeneralScopePeak = 0; + sstvGeneralScopeTargetRms = 0; + sstvGeneralScopeTargetPeak = 0; + sstvGeneralScopeMsgBurst = 0; + sstvGeneralScopeTone = null; + drawSstvGeneralScope(); + } + + /** + * Draw signal scope animation frame + */ + function drawSstvGeneralScope() { + const ctx = sstvGeneralScopeCtx; + if (!ctx) return; + const W = ctx.canvas.width; + const H = ctx.canvas.height; + const midY = H / 2; + + // Phosphor persistence + ctx.fillStyle = 'rgba(5, 5, 16, 0.3)'; + ctx.fillRect(0, 0, W, H); + + // Smooth towards target + sstvGeneralScopeRms += (sstvGeneralScopeTargetRms - sstvGeneralScopeRms) * 0.25; + sstvGeneralScopePeak += (sstvGeneralScopeTargetPeak - sstvGeneralScopePeak) * 0.15; + + // Push to history + sstvGeneralScopeHistory.push(Math.min(sstvGeneralScopeRms / 32768, 1.0)); + if (sstvGeneralScopeHistory.length > SSTV_GENERAL_SCOPE_LEN) sstvGeneralScopeHistory.shift(); + + // Grid lines + ctx.strokeStyle = 'rgba(60, 40, 80, 0.4)'; + ctx.lineWidth = 0.5; + for (let i = 1; i < 4; i++) { + const y = (H / 4) * i; + ctx.beginPath(); ctx.moveTo(0, y); ctx.lineTo(W, y); ctx.stroke(); + } + for (let i = 1; i < 8; i++) { + const x = (W / 8) * i; + ctx.beginPath(); ctx.moveTo(x, 0); ctx.lineTo(x, H); ctx.stroke(); + } + + // Waveform + const stepX = W / (SSTV_GENERAL_SCOPE_LEN - 1); + ctx.strokeStyle = '#c080ff'; + ctx.lineWidth = 1.5; + ctx.shadowColor = '#c080ff'; + ctx.shadowBlur = 4; + + // Upper half + ctx.beginPath(); + for (let i = 0; i < sstvGeneralScopeHistory.length; i++) { + const x = i * stepX; + const amp = sstvGeneralScopeHistory[i] * midY * 0.9; + const y = midY - amp; + if (i === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y); + } + ctx.stroke(); + + // Lower half (mirror) + ctx.beginPath(); + for (let i = 0; i < sstvGeneralScopeHistory.length; i++) { + const x = i * stepX; + const amp = sstvGeneralScopeHistory[i] * midY * 0.9; + const y = midY + amp; + if (i === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y); + } + ctx.stroke(); + ctx.shadowBlur = 0; + + // Peak indicator + const peakNorm = Math.min(sstvGeneralScopePeak / 32768, 1.0); + if (peakNorm > 0.01) { + const peakY = midY - peakNorm * midY * 0.9; + ctx.strokeStyle = 'rgba(255, 68, 68, 0.6)'; + ctx.lineWidth = 1; + ctx.setLineDash([4, 4]); + ctx.beginPath(); ctx.moveTo(0, peakY); ctx.lineTo(W, peakY); ctx.stroke(); + ctx.setLineDash([]); + } + + // Image decode flash + if (sstvGeneralScopeMsgBurst > 0.01) { + ctx.fillStyle = `rgba(0, 255, 100, ${sstvGeneralScopeMsgBurst * 0.15})`; + ctx.fillRect(0, 0, W, H); + sstvGeneralScopeMsgBurst *= 0.88; + } + + // Update labels + const rmsLabel = document.getElementById('sstvGeneralScopeRmsLabel'); + const peakLabel = document.getElementById('sstvGeneralScopePeakLabel'); + const toneLabel = document.getElementById('sstvGeneralScopeToneLabel'); + const statusLabel = document.getElementById('sstvGeneralScopeStatusLabel'); + if (rmsLabel) rmsLabel.textContent = Math.round(sstvGeneralScopeRms); + if (peakLabel) peakLabel.textContent = Math.round(sstvGeneralScopePeak); + if (toneLabel) { + if (sstvGeneralScopeTone === 'leader') { toneLabel.textContent = 'LEADER'; toneLabel.style.color = '#0f0'; } + else if (sstvGeneralScopeTone === 'sync') { toneLabel.textContent = 'SYNC'; toneLabel.style.color = '#0ff'; } + else if (sstvGeneralScopeTone === 'decoding') { toneLabel.textContent = 'DECODING'; toneLabel.style.color = '#fa0'; } + else if (sstvGeneralScopeTone === 'noise') { toneLabel.textContent = 'NOISE'; toneLabel.style.color = '#555'; } + else { toneLabel.textContent = 'QUIET'; toneLabel.style.color = '#444'; } + } + if (statusLabel) { + if (sstvGeneralScopeRms > 500) { statusLabel.textContent = 'SIGNAL'; statusLabel.style.color = '#0f0'; } + else { statusLabel.textContent = 'MONITORING'; statusLabel.style.color = '#555'; } + } + + sstvGeneralScopeAnim = requestAnimationFrame(drawSstvGeneralScope); + } + + /** + * Stop signal scope + */ + function stopSstvGeneralScope() { + if (sstvGeneralScopeAnim) { cancelAnimationFrame(sstvGeneralScopeAnim); sstvGeneralScopeAnim = null; } + sstvGeneralScopeCtx = null; + } + /** * Start SSE stream */ @@ -198,6 +340,11 @@ const SSTVGeneral = (function() { eventSource.close(); } + // Show and init scope + const scopePanel = document.getElementById('sstvGeneralScopePanel'); + if (scopePanel) scopePanel.style.display = 'block'; + initSstvGeneralScope(); + eventSource = new EventSource('/sstv-general/stream'); eventSource.onmessage = (e) => { @@ -205,6 +352,10 @@ const SSTVGeneral = (function() { const data = JSON.parse(e.data); if (data.type === 'sstv_progress') { handleProgress(data); + } else if (data.type === 'sstv_scope') { + sstvGeneralScopeTargetRms = data.rms; + sstvGeneralScopeTargetPeak = data.peak; + if (data.tone !== undefined) sstvGeneralScopeTone = data.tone; } } catch (err) { console.error('Failed to parse SSE message:', err); @@ -227,6 +378,9 @@ const SSTVGeneral = (function() { eventSource.close(); eventSource = null; } + stopSstvGeneralScope(); + const scopePanel = document.getElementById('sstvGeneralScopePanel'); + if (scopePanel) scopePanel.style.display = 'none'; } /** @@ -245,6 +399,7 @@ const SSTVGeneral = (function() { renderGallery(); showNotification('SSTV', 'New image decoded!'); updateStatusUI('listening', 'Listening...'); + sstvGeneralScopeMsgBurst = 1.0; // Clear decode progress so signal monitor can take over const liveContent = document.getElementById('sstvGeneralLiveContent'); if (liveContent) liveContent.innerHTML = ''; diff --git a/static/js/modes/sstv.js b/static/js/modes/sstv.js index 6bafdb0..ed6d13e 100644 --- a/static/js/modes/sstv.js +++ b/static/js/modes/sstv.js @@ -21,6 +21,18 @@ const SSTV = (function() { // ISS frequency const ISS_FREQ = 145.800; + // Signal scope state + let sstvScopeCtx = null; + let sstvScopeAnim = null; + let sstvScopeHistory = []; + const SSTV_SCOPE_LEN = 200; + let sstvScopeRms = 0; + let sstvScopePeak = 0; + let sstvScopeTargetRms = 0; + let sstvScopeTargetPeak = 0; + let sstvScopeMsgBurst = 0; + let sstvScopeTone = null; + /** * Initialize the SSTV mode */ @@ -634,6 +646,136 @@ const SSTV = (function() { `; } + /** + * Initialize signal scope canvas + */ + function initSstvScope() { + const canvas = document.getElementById('sstvScopeCanvas'); + if (!canvas) return; + const rect = canvas.getBoundingClientRect(); + canvas.width = rect.width * (window.devicePixelRatio || 1); + canvas.height = rect.height * (window.devicePixelRatio || 1); + sstvScopeCtx = canvas.getContext('2d'); + sstvScopeHistory = new Array(SSTV_SCOPE_LEN).fill(0); + sstvScopeRms = 0; + sstvScopePeak = 0; + sstvScopeTargetRms = 0; + sstvScopeTargetPeak = 0; + sstvScopeMsgBurst = 0; + sstvScopeTone = null; + drawSstvScope(); + } + + /** + * Draw signal scope animation frame + */ + function drawSstvScope() { + const ctx = sstvScopeCtx; + if (!ctx) return; + const W = ctx.canvas.width; + const H = ctx.canvas.height; + const midY = H / 2; + + // Phosphor persistence + ctx.fillStyle = 'rgba(5, 5, 16, 0.3)'; + ctx.fillRect(0, 0, W, H); + + // Smooth towards target + sstvScopeRms += (sstvScopeTargetRms - sstvScopeRms) * 0.25; + sstvScopePeak += (sstvScopeTargetPeak - sstvScopePeak) * 0.15; + + // Push to history + sstvScopeHistory.push(Math.min(sstvScopeRms / 32768, 1.0)); + if (sstvScopeHistory.length > SSTV_SCOPE_LEN) sstvScopeHistory.shift(); + + // Grid lines + ctx.strokeStyle = 'rgba(60, 40, 80, 0.4)'; + ctx.lineWidth = 0.5; + for (let i = 1; i < 4; i++) { + const y = (H / 4) * i; + ctx.beginPath(); ctx.moveTo(0, y); ctx.lineTo(W, y); ctx.stroke(); + } + for (let i = 1; i < 8; i++) { + const x = (W / 8) * i; + ctx.beginPath(); ctx.moveTo(x, 0); ctx.lineTo(x, H); ctx.stroke(); + } + + // Waveform + const stepX = W / (SSTV_SCOPE_LEN - 1); + ctx.strokeStyle = '#c080ff'; + ctx.lineWidth = 1.5; + ctx.shadowColor = '#c080ff'; + ctx.shadowBlur = 4; + + // Upper half + ctx.beginPath(); + for (let i = 0; i < sstvScopeHistory.length; i++) { + const x = i * stepX; + const amp = sstvScopeHistory[i] * midY * 0.9; + const y = midY - amp; + if (i === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y); + } + ctx.stroke(); + + // Lower half (mirror) + ctx.beginPath(); + for (let i = 0; i < sstvScopeHistory.length; i++) { + const x = i * stepX; + const amp = sstvScopeHistory[i] * midY * 0.9; + const y = midY + amp; + if (i === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y); + } + ctx.stroke(); + ctx.shadowBlur = 0; + + // Peak indicator + const peakNorm = Math.min(sstvScopePeak / 32768, 1.0); + if (peakNorm > 0.01) { + const peakY = midY - peakNorm * midY * 0.9; + ctx.strokeStyle = 'rgba(255, 68, 68, 0.6)'; + ctx.lineWidth = 1; + ctx.setLineDash([4, 4]); + ctx.beginPath(); ctx.moveTo(0, peakY); ctx.lineTo(W, peakY); ctx.stroke(); + ctx.setLineDash([]); + } + + // Image decode flash + if (sstvScopeMsgBurst > 0.01) { + ctx.fillStyle = `rgba(0, 255, 100, ${sstvScopeMsgBurst * 0.15})`; + ctx.fillRect(0, 0, W, H); + sstvScopeMsgBurst *= 0.88; + } + + // Update labels + const rmsLabel = document.getElementById('sstvScopeRmsLabel'); + const peakLabel = document.getElementById('sstvScopePeakLabel'); + const toneLabel = document.getElementById('sstvScopeToneLabel'); + const statusLabel = document.getElementById('sstvScopeStatusLabel'); + if (rmsLabel) rmsLabel.textContent = Math.round(sstvScopeRms); + if (peakLabel) peakLabel.textContent = Math.round(sstvScopePeak); + if (toneLabel) { + if (sstvScopeTone === 'leader') { toneLabel.textContent = 'LEADER'; toneLabel.style.color = '#0f0'; } + else if (sstvScopeTone === 'sync') { toneLabel.textContent = 'SYNC'; toneLabel.style.color = '#0ff'; } + else if (sstvScopeTone === 'decoding') { toneLabel.textContent = 'DECODING'; toneLabel.style.color = '#fa0'; } + else if (sstvScopeTone === 'noise') { toneLabel.textContent = 'NOISE'; toneLabel.style.color = '#555'; } + else { toneLabel.textContent = 'QUIET'; toneLabel.style.color = '#444'; } + } + if (statusLabel) { + if (sstvScopeRms > 500) { statusLabel.textContent = 'SIGNAL'; statusLabel.style.color = '#0f0'; } + else { statusLabel.textContent = 'MONITORING'; statusLabel.style.color = '#555'; } + } + + sstvScopeAnim = requestAnimationFrame(drawSstvScope); + } + + /** + * Stop signal scope + */ + function stopSstvScope() { + if (sstvScopeAnim) { cancelAnimationFrame(sstvScopeAnim); sstvScopeAnim = null; } + sstvScopeCtx = null; + } + /** * Start SSE stream */ @@ -642,6 +784,11 @@ const SSTV = (function() { eventSource.close(); } + // Show and init scope + const scopePanel = document.getElementById('sstvScopePanel'); + if (scopePanel) scopePanel.style.display = 'block'; + initSstvScope(); + eventSource = new EventSource('/sstv/stream'); eventSource.onmessage = (e) => { @@ -649,6 +796,10 @@ const SSTV = (function() { const data = JSON.parse(e.data); if (data.type === 'sstv_progress') { handleProgress(data); + } else if (data.type === 'sstv_scope') { + sstvScopeTargetRms = data.rms; + sstvScopeTargetPeak = data.peak; + if (data.tone !== undefined) sstvScopeTone = data.tone; } } catch (err) { console.error('Failed to parse SSE message:', err); @@ -671,6 +822,9 @@ const SSTV = (function() { eventSource.close(); eventSource = null; } + stopSstvScope(); + const scopePanel = document.getElementById('sstvScopePanel'); + if (scopePanel) scopePanel.style.display = 'none'; } /** @@ -691,6 +845,7 @@ const SSTV = (function() { renderGallery(); showNotification('SSTV', 'New image decoded!'); updateStatusUI('listening', 'Listening...'); + sstvScopeMsgBurst = 1.0; // Clear decode progress so signal monitor can take over const liveContent = document.getElementById('sstvLiveContent'); if (liveContent) liveContent.innerHTML = ''; diff --git a/templates/gsm_spy_dashboard.html b/templates/gsm_spy_dashboard.html new file mode 100644 index 0000000..dc673ad --- /dev/null +++ b/templates/gsm_spy_dashboard.html @@ -0,0 +1,2530 @@ + + + + + + GSM SPY // INTERCEPT - See the Invisible + + {% if offline_settings.fonts_source == 'local' %} + + {% else %} + + {% endif %} + + {% if offline_settings.assets_source == 'local' %} + + + {% else %} + + + {% endif %} + + + + + + + + + + + +
+
+ +
+ +
+
+ STANDBY +
+
+ + {% set active_mode = 'gsm' %} + {% include 'partials/nav.html' with context %} + + +
+
+
+ 0 + TOWERS +
+
+ 0 + DEVICES +
+
+ 0 + ROGUES +
+
+ 0 + SIGNALS +
+
+ - + CROWD +
+
+
+
+ STANDBY +
+
--:--:-- UTC
+ +
+
+ + +
+
+
+
Analytics Overview
+ +
+
+
+ +
+
+
📍
+
Velocity Tracking
+
+
+ Track device movement by analyzing Timing Advance transitions and cell handovers. + Estimates velocity and direction based on TA delta and cell sector patterns. +
+
+
+
0
+
Devices Tracked
+
+
+
- km/h
+
Avg Velocity
+
+
+
+ + +
+
+
👥
+
Crowd Density
+
+
+ Aggregate TMSI pings per cell sector to estimate crowd density. + Visualizes hotspots and congestion patterns across towers. +
+
+
+
0
+
Total Devices
+
+
+
0
+
Peak Sector
+
+
+
+ + +
+
+
📊
+
Life Patterns
+
+
+ Analyze 60-day historical data to identify recurring patterns in device behavior. + Detects work locations, commute routes, and daily routines. +
+
+
+
0
+
Patterns Found
+
+
+
0%
+
Confidence
+
+
+
+ + +
+
+
🔍
+
Neighbor Audit
+
+
+ Validate neighbor cell lists against expected network topology. + Detects inconsistencies that may indicate rogue towers. +
+
+
+
0
+
Neighbors
+
+
+
0
+
Anomalies
+
+
+
+ + +
+
+
📡
+
Traffic Correlation
+
+
+ Correlate uplink and downlink timing to identify communication patterns. + Maps device-to-device interactions and network flows. +
+
+
+
0
+
Paired Flows
+
+
+
0
+
Active Now
+
+
+
+
+
+
+
+ +
+ + + + +
+ +
+
+ + + + + +
+ +
+ GPS LOCATION +
+ + + +
+
+ + +
+ GSM SCANNER +
+ + + +
+
+
+
+ + + + + + + diff --git a/templates/index.html b/templates/index.html index fa82842..31305a6 100644 --- a/templates/index.html +++ b/templates/index.html @@ -64,6 +64,7 @@ + @@ -173,6 +174,10 @@ Vessels + + + GSM SPY + - - - {% include 'partials/modes/pager.html' %} {% include 'partials/modes/sensor.html' %} @@ -608,16 +585,6 @@ - - -