diff --git a/Dockerfile b/Dockerfile
index b4ceabc..85be1df 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -9,6 +9,9 @@ LABEL description="Signal Intelligence Platform for SDR monitoring"
# Set working directory
WORKDIR /app
+# Pre-accept tshark non-root capture prompt for non-interactive install
+RUN echo 'wireshark-common wireshark-common/install-setuid boolean true' | debconf-set-selections
+
# Install system dependencies for SDR tools
RUN apt-get update && apt-get install -y --no-install-recommends \
# RTL-SDR tools
@@ -54,11 +57,39 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
airspy \
limesuite \
hackrf \
+ # GSM Intelligence (tshark for packet parsing)
+ tshark \
# Utilities
curl \
procps \
&& rm -rf /var/lib/apt/lists/*
+# GSM Intelligence: gr-gsm (grgsm_scanner, grgsm_livemon)
+# Install from apt if available, otherwise build from source
+RUN apt-get update \
+ && apt-get install -y --no-install-recommends \
+ gnuradio gr-osmosdr gr-gsm 2>/dev/null \
+ || ( \
+ apt-get install -y --no-install-recommends \
+ gnuradio gnuradio-dev gr-osmosdr \
+ git cmake libboost-all-dev libcppunit-dev swig \
+ doxygen liblog4cpp5-dev python3-scipy python3-numpy \
+ libvolk-dev libfftw3-dev build-essential \
+ && cd /tmp \
+ && git clone --depth 1 https://github.com/bkerler/gr-gsm.git \
+ && cd gr-gsm \
+ && mkdir build && cd build \
+ && cmake .. \
+ && make -j$(nproc) \
+ && make install \
+ && ldconfig \
+ && rm -rf /tmp/gr-gsm \
+ && apt-get remove -y gnuradio-dev libcppunit-dev swig doxygen \
+ liblog4cpp5-dev libvolk-dev build-essential git cmake \
+ && apt-get autoremove -y \
+ ) \
+ && rm -rf /var/lib/apt/lists/*
+
# Build dump1090-fa and acarsdec from source (packages not available in slim repos)
RUN apt-get update && apt-get install -y --no-install-recommends \
build-essential \
diff --git a/app.py b/app.py
index 829c104..000c061 100644
--- a/app.py
+++ b/app.py
@@ -39,6 +39,7 @@ from utils.constants import (
MAX_VESSEL_AGE_SECONDS,
MAX_DSC_MESSAGE_AGE_SECONDS,
MAX_DEAUTH_ALERTS_AGE_SECONDS,
+ MAX_GSM_AGE_SECONDS,
QUEUE_MAX_SIZE,
)
import logging
@@ -191,6 +192,16 @@ deauth_detector = None
deauth_detector_queue = queue.Queue(maxsize=QUEUE_MAX_SIZE)
deauth_detector_lock = threading.Lock()
+# GSM Spy
+gsm_spy_scanner_running = False # Flag: scanner thread active
+gsm_spy_livemon_process = None # For grgsm_livemon process
+gsm_spy_monitor_process = None # For tshark monitoring process
+gsm_spy_queue = queue.Queue(maxsize=QUEUE_MAX_SIZE)
+gsm_spy_lock = threading.Lock()
+gsm_spy_active_device = None
+gsm_spy_selected_arfcn = None
+gsm_spy_region = 'Americas' # Default band
+
# ============================================
# GLOBAL STATE DICTIONARIES
# ============================================
@@ -223,6 +234,16 @@ dsc_messages = DataStore(max_age_seconds=MAX_DSC_MESSAGE_AGE_SECONDS, name='dsc_
# Deauth alerts - using DataStore for automatic cleanup
deauth_alerts = DataStore(max_age_seconds=MAX_DEAUTH_ALERTS_AGE_SECONDS, name='deauth_alerts')
+# GSM Spy data stores
+gsm_spy_towers = DataStore(
+ max_age_seconds=MAX_GSM_AGE_SECONDS,
+ name='gsm_spy_towers'
+)
+gsm_spy_devices = DataStore(
+ max_age_seconds=MAX_GSM_AGE_SECONDS,
+ name='gsm_spy_devices'
+)
+
# Satellite state
satellite_passes = [] # Predicted satellite passes (not auto-cleaned, calculated)
@@ -235,6 +256,8 @@ cleanup_manager.register(adsb_aircraft)
cleanup_manager.register(ais_vessels)
cleanup_manager.register(dsc_messages)
cleanup_manager.register(deauth_alerts)
+cleanup_manager.register(gsm_spy_towers)
+cleanup_manager.register(gsm_spy_devices)
# ============================================
# SDR DEVICE REGISTRY
@@ -296,6 +319,10 @@ def require_login():
if request.path.startswith('/listening/audio/'):
return None
+ # Allow WebSocket upgrade requests (page load already required auth)
+ if request.path.startswith('/ws/'):
+ return None
+
# Controller API endpoints use API key auth, not session auth
# Allow agent push/pull endpoints without session login
if request.path.startswith('/controller/'):
@@ -666,6 +693,8 @@ def kill_all() -> Response:
global current_process, sensor_process, wifi_process, adsb_process, ais_process, acars_process
global aprs_process, aprs_rtl_process, dsc_process, dsc_rtl_process, bt_process
global dmr_process, dmr_rtl_process
+ global gsm_spy_livemon_process, gsm_spy_monitor_process
+ global gsm_spy_scanner_running, gsm_spy_active_device, gsm_spy_selected_arfcn, gsm_spy_region
# Import adsb and ais modules to reset their state
from routes import adsb as adsb_module
@@ -679,6 +708,7 @@ def kill_all() -> Response:
'dump1090', 'acarsdec', 'direwolf', 'AIS-catcher',
'hcitool', 'bluetoothctl', 'satdump', 'dsd',
'rtl_tcp', 'rtl_power', 'rtlamr', 'ffmpeg',
+ 'grgsm_scanner', 'grgsm_livemon', 'tshark'
]
for proc in processes_to_kill:
@@ -743,10 +773,33 @@ def kill_all() -> Response:
# Reset Bluetooth v2 scanner
try:
reset_bluetooth_scanner()
- killed.append('bluetooth_scanner')
+ killed.append('bluetooth')
except Exception:
pass
+ # Reset GSM Spy state
+ with gsm_spy_lock:
+ gsm_spy_scanner_running = False
+ gsm_spy_active_device = None
+ gsm_spy_selected_arfcn = None
+ gsm_spy_region = 'Americas'
+
+ if gsm_spy_livemon_process:
+ try:
+ if safe_terminate(gsm_spy_livemon_process):
+ killed.append('grgsm_livemon')
+ except Exception:
+ pass
+ gsm_spy_livemon_process = None
+
+ if gsm_spy_monitor_process:
+ try:
+ if safe_terminate(gsm_spy_monitor_process):
+ killed.append('tshark')
+ except Exception:
+ pass
+ gsm_spy_monitor_process = None
+
# Clear SDR device registry
with sdr_device_registry_lock:
sdr_device_registry.clear()
@@ -836,6 +889,26 @@ def main() -> None:
from utils.database import init_db
init_db()
+ # Register database cleanup functions
+ from utils.database import (
+ cleanup_old_gsm_signals,
+ cleanup_old_gsm_tmsi_log,
+ cleanup_old_gsm_velocity_log,
+ cleanup_old_signal_history,
+ cleanup_old_timeline_entries,
+ cleanup_old_dsc_alerts,
+ cleanup_old_payloads
+ )
+ # GSM cleanups: signals (60 days), TMSI log (24 hours), velocity (1 hour)
+ # Interval multiplier: cleanup every N cycles (60s interval = 1 cleanup per hour at multiplier 60)
+ cleanup_manager.register_db_cleanup(cleanup_old_gsm_tmsi_log, interval_multiplier=60) # Every hour
+ cleanup_manager.register_db_cleanup(cleanup_old_gsm_velocity_log, interval_multiplier=60) # Every hour
+ cleanup_manager.register_db_cleanup(cleanup_old_gsm_signals, interval_multiplier=1440) # Every 24 hours
+ cleanup_manager.register_db_cleanup(cleanup_old_signal_history, interval_multiplier=1440) # Every 24 hours
+ cleanup_manager.register_db_cleanup(cleanup_old_timeline_entries, interval_multiplier=1440) # Every 24 hours
+ cleanup_manager.register_db_cleanup(cleanup_old_dsc_alerts, interval_multiplier=1440) # Every 24 hours
+ cleanup_manager.register_db_cleanup(cleanup_old_payloads, interval_multiplier=1440) # Every 24 hours
+
# Start automatic cleanup of stale data entries
cleanup_manager.start()
@@ -875,6 +948,14 @@ def main() -> None:
except ImportError as e:
print(f"KiwiSDR audio proxy disabled: {e}")
+ # Initialize WebSocket for waterfall streaming
+ try:
+ from routes.waterfall_websocket import init_waterfall_websocket
+ init_waterfall_websocket(app)
+ print("WebSocket waterfall streaming enabled")
+ except ImportError as e:
+ print(f"WebSocket waterfall disabled: {e}")
+
print(f"Open http://localhost:{args.port} in your browser")
print()
print("Press Ctrl+C to stop")
diff --git a/config.py b/config.py
index 2bf9427..80b0a2d 100644
--- a/config.py
+++ b/config.py
@@ -228,6 +228,12 @@ ALERT_WEBHOOK_TIMEOUT = _get_env_int('ALERT_WEBHOOK_TIMEOUT', 5)
ADMIN_USERNAME = _get_env('ADMIN_USERNAME', 'admin')
ADMIN_PASSWORD = _get_env('ADMIN_PASSWORD', 'admin')
+# GSM Spy settings
+GSM_OPENCELLID_API_KEY = _get_env('GSM_OPENCELLID_API_KEY', '')
+GSM_OPENCELLID_API_URL = _get_env('GSM_OPENCELLID_API_URL', 'https://opencellid.org/cell/get')
+GSM_API_DAILY_LIMIT = _get_env_int('GSM_API_DAILY_LIMIT', 1000)
+GSM_TA_METERS_PER_UNIT = _get_env_int('GSM_TA_METERS_PER_UNIT', 554)
+
def configure_logging() -> None:
"""Configure application logging."""
logging.basicConfig(
diff --git a/routes/__init__.py b/routes/__init__.py
index 0cb7e1a..f6a81ce 100644
--- a/routes/__init__.py
+++ b/routes/__init__.py
@@ -32,6 +32,7 @@ def register_blueprints(app):
from .websdr import websdr_bp
from .alerts import alerts_bp
from .recordings import recordings_bp
+ from .gsm_spy import gsm_spy_bp
app.register_blueprint(pager_bp)
app.register_blueprint(sensor_bp)
@@ -63,6 +64,7 @@ def register_blueprints(app):
app.register_blueprint(websdr_bp) # HF/Shortwave WebSDR
app.register_blueprint(alerts_bp) # Cross-mode alerts
app.register_blueprint(recordings_bp) # Session recordings
+ app.register_blueprint(gsm_spy_bp) # GSM cellular intelligence
# Initialize TSCM state with queue and lock from app
import app as app_module
diff --git a/routes/audio_websocket.py b/routes/audio_websocket.py
index 6d70d0b..4e2acf5 100644
--- a/routes/audio_websocket.py
+++ b/routes/audio_websocket.py
@@ -1,10 +1,11 @@
"""WebSocket-based audio streaming for SDR."""
+import json
+import shutil
+import socket
import subprocess
import threading
import time
-import shutil
-import json
from flask import Flask
# Try to import flask-sock
@@ -251,4 +252,19 @@ def init_audio_websocket(app: Flask):
finally:
with process_lock:
kill_audio_processes()
+ # Complete WebSocket close handshake, then shut down the
+ # raw socket so Werkzeug cannot write its HTTP 200 response
+ # on top of the WebSocket stream.
+ try:
+ ws.close()
+ except Exception:
+ pass
+ try:
+ ws.sock.shutdown(socket.SHUT_RDWR)
+ except Exception:
+ pass
+ try:
+ ws.sock.close()
+ except Exception:
+ pass
logger.info("WebSocket audio client disconnected")
diff --git a/routes/gsm_spy.py b/routes/gsm_spy.py
new file mode 100644
index 0000000..b1b5607
--- /dev/null
+++ b/routes/gsm_spy.py
@@ -0,0 +1,1730 @@
+"""GSM Spy route handlers for cellular tower and device tracking."""
+
+from __future__ import annotations
+
+import json
+import logging
+import os
+import queue
+import re
+import shutil
+import subprocess
+import threading
+import time
+from datetime import datetime, timedelta
+from typing import Any
+
+import requests
+from flask import Blueprint, Response, jsonify, render_template, request
+
+import app as app_module
+import config
+from config import SHARED_OBSERVER_LOCATION_ENABLED
+from utils.database import get_db
+from utils.process import register_process, safe_terminate, unregister_process
+from utils.sse import format_sse
+from utils.validation import validate_device_index
+
+from utils.logging import get_logger
+logger = get_logger('intercept.gsm_spy')
+logger.setLevel(logging.DEBUG) # GSM Spy needs verbose logging for diagnostics
+
+gsm_spy_bp = Blueprint('gsm_spy', __name__, url_prefix='/gsm_spy')
+
+# Regional band configurations (G-01)
+REGIONAL_BANDS = {
+ 'Americas': {
+ 'GSM850': {'start': 869e6, 'end': 894e6, 'arfcn_start': 128, 'arfcn_end': 251},
+ 'PCS1900': {'start': 1930e6, 'end': 1990e6, 'arfcn_start': 512, 'arfcn_end': 810}
+ },
+ 'Europe': {
+ 'GSM800': {'start': 832e6, 'end': 862e6, 'arfcn_start': 438, 'arfcn_end': 511}, # E-GSM800 downlink
+ 'GSM850': {'start': 869e6, 'end': 894e6, 'arfcn_start': 128, 'arfcn_end': 251}, # Also used in some EU countries
+ 'EGSM900': {'start': 925e6, 'end': 960e6, 'arfcn_start': 0, 'arfcn_end': 124},
+ 'DCS1800': {'start': 1805e6, 'end': 1880e6, 'arfcn_start': 512, 'arfcn_end': 885}
+ },
+ 'Asia': {
+ 'EGSM900': {'start': 925e6, 'end': 960e6, 'arfcn_start': 0, 'arfcn_end': 124},
+ 'DCS1800': {'start': 1805e6, 'end': 1880e6, 'arfcn_start': 512, 'arfcn_end': 885}
+ }
+}
+
+# Module state tracking
+gsm_using_service = False
+gsm_connected = False
+gsm_towers_found = 0
+gsm_devices_tracked = 0
+
+# Geocoding worker state
+_geocoding_worker_thread = None
+
+
+# ============================================
+# API Usage Tracking Helper Functions
+# ============================================
+
+def get_api_usage_today():
+ """Get OpenCellID API usage count for today."""
+ from utils.database import get_setting
+ today = datetime.now().date().isoformat()
+ usage_date = get_setting('gsm.opencellid.usage_date', '')
+
+ # Reset counter if new day
+ if usage_date != today:
+ from utils.database import set_setting
+ set_setting('gsm.opencellid.usage_date', today)
+ set_setting('gsm.opencellid.usage_count', 0)
+ return 0
+
+ return get_setting('gsm.opencellid.usage_count', 0)
+
+
+def increment_api_usage():
+ """Increment OpenCellID API usage counter."""
+ from utils.database import set_setting
+ current = get_api_usage_today()
+ set_setting('gsm.opencellid.usage_count', current + 1)
+ return current + 1
+
+
+def can_use_api():
+ """Check if we can make an API call within daily limit."""
+ current_usage = get_api_usage_today()
+ return current_usage < config.GSM_API_DAILY_LIMIT
+
+
+# ============================================
+# Background Geocoding Worker
+# ============================================
+
+def start_geocoding_worker():
+ """Start background thread for async geocoding."""
+ global _geocoding_worker_thread
+ if _geocoding_worker_thread is None or not _geocoding_worker_thread.is_alive():
+ _geocoding_worker_thread = threading.Thread(
+ target=geocoding_worker,
+ daemon=True,
+ name='gsm-geocoding-worker'
+ )
+ _geocoding_worker_thread.start()
+ logger.info("Started geocoding worker thread")
+
+
+def geocoding_worker():
+ """Worker thread processes pending geocoding requests."""
+ from utils.gsm_geocoding import lookup_cell_from_api, get_geocoding_queue
+
+ geocoding_queue = get_geocoding_queue()
+
+ while True:
+ try:
+ # Wait for pending tower with timeout
+ tower_data = geocoding_queue.get(timeout=5)
+
+ # Check rate limit
+ if not can_use_api():
+ current_usage = get_api_usage_today()
+ logger.warning(f"OpenCellID API rate limit reached ({current_usage}/{config.GSM_API_DAILY_LIMIT})")
+ geocoding_queue.task_done()
+ continue
+
+ # Call API
+ mcc = tower_data.get('mcc')
+ mnc = tower_data.get('mnc')
+ lac = tower_data.get('lac')
+ cid = tower_data.get('cid')
+
+ logger.debug(f"Geocoding tower via API: MCC={mcc} MNC={mnc} LAC={lac} CID={cid}")
+
+ coords = lookup_cell_from_api(mcc, mnc, lac, cid)
+
+ if coords:
+ # Update tower data with coordinates
+ tower_data['lat'] = coords['lat']
+ tower_data['lon'] = coords['lon']
+ tower_data['source'] = 'api'
+ tower_data['status'] = 'resolved'
+ tower_data['type'] = 'tower_update'
+
+ # Add optional fields if available
+ if coords.get('azimuth') is not None:
+ tower_data['azimuth'] = coords['azimuth']
+ if coords.get('range_meters') is not None:
+ tower_data['range_meters'] = coords['range_meters']
+ if coords.get('operator'):
+ tower_data['operator'] = coords['operator']
+ if coords.get('radio'):
+ tower_data['radio'] = coords['radio']
+
+ # Update DataStore
+ key = f"{mcc}_{mnc}_{lac}_{cid}"
+ app_module.gsm_spy_towers[key] = tower_data
+
+ # Send update to SSE stream
+ try:
+ app_module.gsm_spy_queue.put_nowait(tower_data)
+ logger.info(f"Resolved coordinates for tower: MCC={mcc} MNC={mnc} LAC={lac} CID={cid}")
+ except queue.Full:
+ logger.warning("SSE queue full, dropping tower update")
+
+ # Increment API usage counter
+ usage_count = increment_api_usage()
+ logger.info(f"OpenCellID API call #{usage_count} today")
+
+ else:
+ logger.warning(f"Could not resolve coordinates for tower: MCC={mcc} MNC={mnc} LAC={lac} CID={cid}")
+
+ geocoding_queue.task_done()
+
+ # Rate limiting between API calls (be nice to OpenCellID)
+ time.sleep(1)
+
+ except queue.Empty:
+ # No pending towers, continue waiting
+ continue
+ except Exception as e:
+ logger.error(f"Geocoding worker error: {e}", exc_info=True)
+ time.sleep(1)
+
+
+def arfcn_to_frequency(arfcn):
+ """Convert ARFCN to downlink frequency in Hz.
+
+ Uses REGIONAL_BANDS to determine the correct band and conversion formula.
+ Returns frequency in Hz (e.g., 925800000 for 925.8 MHz).
+ """
+ arfcn = int(arfcn)
+
+ # Search all bands to find which one this ARFCN belongs to
+ for region_bands in REGIONAL_BANDS.values():
+ for band_name, band_info in region_bands.items():
+ arfcn_start = band_info['arfcn_start']
+ arfcn_end = band_info['arfcn_end']
+
+ if arfcn_start <= arfcn <= arfcn_end:
+ # Found the right band, calculate frequency
+ # Downlink frequency = band_start + (arfcn - arfcn_start) * 200kHz
+ freq_hz = band_info['start'] + (arfcn - arfcn_start) * 200000
+ return int(freq_hz)
+
+ # If ARFCN not found in any band, raise error
+ raise ValueError(f"ARFCN {arfcn} not found in any known GSM band")
+
+
+def validate_band_names(bands: list[str], region: str) -> tuple[list[str], str | None]:
+ """Validate band names against REGIONAL_BANDS whitelist.
+
+ Args:
+ bands: List of band names from user input
+ region: Region name (Americas, Europe, Asia)
+
+ Returns:
+ Tuple of (validated_bands, error_message)
+ """
+ if not bands:
+ return [], None
+
+ region_bands = REGIONAL_BANDS.get(region)
+ if not region_bands:
+ return [], f"Invalid region: {region}"
+
+ valid_band_names = set(region_bands.keys())
+ invalid_bands = [b for b in bands if b not in valid_band_names]
+
+ if invalid_bands:
+ return [], (f"Invalid bands for {region}: {', '.join(invalid_bands)}. "
+ f"Valid bands: {', '.join(sorted(valid_band_names))}")
+
+ return bands, None
+
+
+def _start_monitoring_processes(arfcn: int, device_index: int) -> tuple[subprocess.Popen, subprocess.Popen]:
+ """Start grgsm_livemon and tshark processes for monitoring an ARFCN.
+
+ Returns:
+ Tuple of (grgsm_process, tshark_process)
+
+ Raises:
+ FileNotFoundError: If grgsm_livemon or tshark not found
+ RuntimeError: If grgsm_livemon exits immediately
+ """
+ frequency_hz = arfcn_to_frequency(arfcn)
+ frequency_mhz = frequency_hz / 1e6
+
+ # Check prerequisites
+ if not shutil.which('grgsm_livemon'):
+ raise FileNotFoundError('grgsm_livemon not found. Please install gr-gsm.')
+
+ # Start grgsm_livemon
+ grgsm_cmd = [
+ 'grgsm_livemon',
+ '--args', f'rtl={device_index}',
+ '-f', f'{frequency_mhz}M'
+ ]
+ env = dict(os.environ,
+ OSMO_FSM_DUP_CHECK_DISABLED='1',
+ PYTHONUNBUFFERED='1',
+ QT_QPA_PLATFORM='offscreen')
+ logger.info(f"Starting grgsm_livemon: {' '.join(grgsm_cmd)}")
+ grgsm_proc = subprocess.Popen(
+ grgsm_cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ universal_newlines=True,
+ env=env
+ )
+ register_process(grgsm_proc)
+ logger.info(f"Started grgsm_livemon (PID: {grgsm_proc.pid})")
+
+ # Wait and check it didn't die immediately
+ time.sleep(2)
+
+ if grgsm_proc.poll() is not None:
+ # Process already exited - capture stderr for diagnostics
+ stderr_output = ''
+ try:
+ stderr_output = grgsm_proc.stderr.read()
+ except Exception:
+ pass
+ exit_code = grgsm_proc.returncode
+ logger.error(
+ f"grgsm_livemon exited immediately (code: {exit_code}). "
+ f"stderr: {stderr_output[:500]}"
+ )
+ unregister_process(grgsm_proc)
+ raise RuntimeError(
+ f'grgsm_livemon failed (exit code {exit_code}): {stderr_output[:200]}'
+ )
+
+ # Start stderr reader thread for grgsm_livemon diagnostics
+ def read_livemon_stderr():
+ try:
+ for line in iter(grgsm_proc.stderr.readline, ''):
+ if line:
+ logger.debug(f"grgsm_livemon stderr: {line.strip()}")
+ except Exception:
+ pass
+ threading.Thread(target=read_livemon_stderr, daemon=True).start()
+
+ # Start tshark
+ if not shutil.which('tshark'):
+ safe_terminate(grgsm_proc)
+ unregister_process(grgsm_proc)
+ raise FileNotFoundError('tshark not found. Please install wireshark/tshark.')
+
+ tshark_cmd = [
+ 'tshark', '-i', 'lo',
+ '-Y', 'gsm_a.rr.timing_advance || gsm_a.tmsi || gsm_a.imsi',
+ '-T', 'fields',
+ '-e', 'gsm_a.rr.timing_advance',
+ '-e', 'gsm_a.tmsi',
+ '-e', 'gsm_a.imsi',
+ '-e', 'gsm_a.lac',
+ '-e', 'gsm_a.cellid'
+ ]
+ logger.info(f"Starting tshark: {' '.join(tshark_cmd)}")
+ tshark_proc = subprocess.Popen(
+ tshark_cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ universal_newlines=True,
+ bufsize=1
+ )
+ register_process(tshark_proc)
+ logger.info(f"Started tshark (PID: {tshark_proc.pid})")
+
+ return grgsm_proc, tshark_proc
+
+
+def _start_and_register_monitor(arfcn: int, device_index: int) -> None:
+ """Start monitoring processes and register them in global state.
+
+ This is shared logic between start_monitor() and auto_start_monitor().
+ Must be called within gsm_spy_lock context.
+
+ Args:
+ arfcn: ARFCN to monitor
+ device_index: SDR device index
+ """
+ # Start monitoring processes
+ grgsm_proc, tshark_proc = _start_monitoring_processes(arfcn, device_index)
+ app_module.gsm_spy_livemon_process = grgsm_proc
+ app_module.gsm_spy_monitor_process = tshark_proc
+ app_module.gsm_spy_selected_arfcn = arfcn
+
+ # Start monitoring thread
+ monitor_thread_obj = threading.Thread(
+ target=monitor_thread,
+ args=(tshark_proc,),
+ daemon=True
+ )
+ monitor_thread_obj.start()
+
+
+@gsm_spy_bp.route('/dashboard')
+def dashboard():
+ """Render GSM Spy dashboard."""
+ return render_template(
+ 'gsm_spy_dashboard.html',
+ shared_observer_location=SHARED_OBSERVER_LOCATION_ENABLED
+ )
+
+
+@gsm_spy_bp.route('/start', methods=['POST'])
+def start_scanner():
+ """Start GSM scanner (G-01 BTS Scanner)."""
+ global gsm_towers_found, gsm_connected
+
+ with app_module.gsm_spy_lock:
+ if app_module.gsm_spy_scanner_running:
+ return jsonify({'error': 'Scanner already running'}), 400
+
+ data = request.get_json() or {}
+ device_index = data.get('device', 0)
+ region = data.get('region', 'Americas')
+ selected_bands = data.get('bands', []) # Get user-selected bands
+
+ # Validate device index
+ try:
+ device_index = validate_device_index(device_index)
+ except ValueError as e:
+ return jsonify({'error': str(e)}), 400
+
+ # Claim SDR device to prevent conflicts
+ from app import claim_sdr_device
+ claim_error = claim_sdr_device(device_index, 'GSM Spy')
+ if claim_error:
+ return jsonify({
+ 'error': claim_error,
+ 'error_type': 'DEVICE_BUSY'
+ }), 409
+
+ # If no bands selected, use all bands for the region (backwards compatibility)
+ if selected_bands:
+ validated_bands, error = validate_band_names(selected_bands, region)
+ if error:
+ from app import release_sdr_device
+ release_sdr_device(device_index)
+ return jsonify({'error': error}), 400
+ selected_bands = validated_bands
+ else:
+ region_bands = REGIONAL_BANDS.get(region, REGIONAL_BANDS['Americas'])
+ selected_bands = list(region_bands.keys())
+ logger.warning(f"No bands specified, using all bands for {region}: {selected_bands}")
+
+ # Build grgsm_scanner command
+ # Example: grgsm_scanner --args="rtl=0" -b GSM900
+ if not shutil.which('grgsm_scanner'):
+ from app import release_sdr_device
+ release_sdr_device(device_index)
+ return jsonify({'error': 'grgsm_scanner not found. Please install gr-gsm.'}), 500
+
+ try:
+ cmd = ['grgsm_scanner']
+
+ # Add device argument (--args for RTL-SDR device selection)
+ cmd.extend(['--args', f'rtl={device_index}'])
+
+ # Add selected band arguments
+ # Map EGSM900 to GSM900 since that's what grgsm_scanner expects
+ for band_name in selected_bands:
+ # Normalize band name (EGSM900 -> GSM900, remove EGSM prefix)
+ normalized_band = band_name.replace('EGSM', 'GSM')
+ cmd.extend(['-b', normalized_band])
+
+ logger.info(f"Starting GSM scanner: {' '.join(cmd)}")
+
+ # Set a flag to indicate scanner should run
+ app_module.gsm_spy_active_device = device_index
+ app_module.gsm_spy_region = region
+ app_module.gsm_spy_scanner_running = True # Use as flag initially
+
+ # Reset counters for new session
+ gsm_towers_found = 0
+ gsm_devices_tracked = 0
+
+ # Start geocoding worker (if not already running)
+ start_geocoding_worker()
+
+ # Start scanning thread (will run grgsm_scanner in a loop)
+ scanner_thread_obj = threading.Thread(
+ target=scanner_thread,
+ args=(cmd, device_index),
+ daemon=True
+ )
+ scanner_thread_obj.start()
+
+ gsm_connected = True
+
+ return jsonify({
+ 'status': 'started',
+ 'device': device_index,
+ 'region': region
+ })
+
+ except FileNotFoundError:
+ from app import release_sdr_device
+ release_sdr_device(device_index)
+ return jsonify({'error': 'grgsm_scanner not found. Please install gr-gsm.'}), 500
+ except Exception as e:
+ from app import release_sdr_device
+ release_sdr_device(device_index)
+ logger.error(f"Error starting GSM scanner: {e}")
+ return jsonify({'error': str(e)}), 500
+
+
+@gsm_spy_bp.route('/monitor', methods=['POST'])
+def start_monitor():
+ """Start monitoring specific tower (G-02 Decoding)."""
+ with app_module.gsm_spy_lock:
+ if app_module.gsm_spy_monitor_process:
+ return jsonify({'error': 'Monitor already running'}), 400
+
+ data = request.get_json() or {}
+ arfcn = data.get('arfcn')
+ device_index = data.get('device', app_module.gsm_spy_active_device or 0)
+
+ if not arfcn:
+ return jsonify({'error': 'ARFCN required'}), 400
+
+ # Validate ARFCN is valid integer and in known GSM band ranges
+ try:
+ arfcn = int(arfcn)
+ # This will raise ValueError if ARFCN is not in any known band
+ arfcn_to_frequency(arfcn)
+ except (ValueError, TypeError) as e:
+ return jsonify({'error': f'Invalid ARFCN: {e}'}), 400
+
+ # Validate device index
+ try:
+ device_index = validate_device_index(device_index)
+ except ValueError as e:
+ return jsonify({'error': str(e)}), 400
+
+ try:
+ # Start and register monitoring (shared logic)
+ _start_and_register_monitor(arfcn, device_index)
+
+ return jsonify({
+ 'status': 'monitoring',
+ 'arfcn': arfcn,
+ 'device': device_index
+ })
+
+ except FileNotFoundError as e:
+ return jsonify({'error': f'Tool not found: {e}'}), 500
+ except Exception as e:
+ logger.error(f"Error starting monitor: {e}")
+ return jsonify({'error': str(e)}), 500
+
+
+@gsm_spy_bp.route('/stop', methods=['POST'])
+def stop_scanner():
+ """Stop GSM scanner and monitor."""
+ global gsm_connected, gsm_towers_found, gsm_devices_tracked
+
+ with app_module.gsm_spy_lock:
+ killed = []
+
+ # Stop scanner (now just a flag, thread will see it and exit)
+ if app_module.gsm_spy_scanner_running:
+ app_module.gsm_spy_scanner_running = False
+ killed.append('scanner')
+
+ # Terminate livemon process
+ if app_module.gsm_spy_livemon_process:
+ unregister_process(app_module.gsm_spy_livemon_process)
+ if safe_terminate(app_module.gsm_spy_livemon_process, timeout=5):
+ killed.append('livemon')
+ app_module.gsm_spy_livemon_process = None
+
+ # Terminate monitor process
+ if app_module.gsm_spy_monitor_process:
+ unregister_process(app_module.gsm_spy_monitor_process)
+ if safe_terminate(app_module.gsm_spy_monitor_process, timeout=5):
+ killed.append('monitor')
+ app_module.gsm_spy_monitor_process = None
+
+ # Release SDR device from registry
+ if app_module.gsm_spy_active_device is not None:
+ from app import release_sdr_device
+ release_sdr_device(app_module.gsm_spy_active_device)
+ app_module.gsm_spy_active_device = None
+ app_module.gsm_spy_selected_arfcn = None
+ gsm_connected = False
+ gsm_towers_found = 0
+ gsm_devices_tracked = 0
+
+ return jsonify({'status': 'stopped', 'killed': killed})
+
+
+@gsm_spy_bp.route('/stream')
+def stream():
+ """SSE stream for real-time GSM updates."""
+ def generate():
+ """Generate SSE events."""
+ logger.info("SSE stream connected - client subscribed")
+
+ # Send current state on connect (handles reconnects and late-joining clients)
+ existing_towers = dict(app_module.gsm_spy_towers.items())
+ logger.info(f"SSE sending {len(existing_towers)} existing towers on connect")
+ for key, tower_data in existing_towers.items():
+ yield format_sse(tower_data)
+
+ last_keepalive = time.time()
+ idle_count = 0 # Track consecutive idle checks to handle transitions
+
+ while True:
+ try:
+ # Check if scanner/monitor are still running
+ # Use idle counter to avoid disconnecting during scanner→monitor transition
+ if not app_module.gsm_spy_scanner_running and not app_module.gsm_spy_monitor_process:
+ idle_count += 1
+ if idle_count >= 5: # 5 seconds grace period for mode transitions
+ logger.info("SSE stream: no active scanner or monitor, disconnecting")
+ yield format_sse({'type': 'disconnected'})
+ break
+ else:
+ idle_count = 0
+
+ # Try to get data from queue
+ try:
+ data = app_module.gsm_spy_queue.get(timeout=1)
+ logger.info(f"SSE sending: type={data.get('type', '?')} keys={list(data.keys())}")
+ yield format_sse(data)
+ last_keepalive = time.time()
+ except queue.Empty:
+ # Send keepalive if needed
+ if time.time() - last_keepalive > 30:
+ yield format_sse({'type': 'keepalive'})
+ last_keepalive = time.time()
+
+ except GeneratorExit:
+ logger.info("SSE stream: client disconnected (GeneratorExit)")
+ break
+ except Exception as e:
+ logger.error(f"Error in GSM stream: {e}")
+ yield format_sse({'type': 'error', 'message': str(e)})
+ break
+
+ response = Response(generate(), mimetype='text/event-stream')
+ response.headers['Cache-Control'] = 'no-cache'
+ response.headers['X-Accel-Buffering'] = 'no'
+ response.headers['Connection'] = 'keep-alive'
+ return response
+
+
+@gsm_spy_bp.route('/status')
+def status():
+ """Get current GSM Spy status."""
+ api_usage = get_api_usage_today()
+ return jsonify({
+ 'running': bool(app_module.gsm_spy_scanner_running),
+ 'monitoring': app_module.gsm_spy_monitor_process is not None,
+ 'towers_found': gsm_towers_found,
+ 'devices_tracked': gsm_devices_tracked,
+ 'device': app_module.gsm_spy_active_device,
+ 'region': app_module.gsm_spy_region,
+ 'selected_arfcn': app_module.gsm_spy_selected_arfcn,
+ 'api_usage_today': api_usage,
+ 'api_limit': config.GSM_API_DAILY_LIMIT,
+ 'api_remaining': config.GSM_API_DAILY_LIMIT - api_usage
+ })
+
+
+@gsm_spy_bp.route('/lookup_cell', methods=['POST'])
+def lookup_cell():
+ """Lookup cell tower via OpenCellID (G-05)."""
+ data = request.get_json() or {}
+ mcc = data.get('mcc')
+ mnc = data.get('mnc')
+ lac = data.get('lac')
+ cid = data.get('cid')
+
+ if not all([mcc, mnc, lac, cid]):
+ return jsonify({'error': 'MCC, MNC, LAC, and CID required'}), 400
+
+ try:
+ # Check local cache first
+ with get_db() as conn:
+ result = conn.execute('''
+ SELECT lat, lon, azimuth, range_meters, operator, radio
+ FROM gsm_cells
+ WHERE mcc = ? AND mnc = ? AND lac = ? AND cid = ?
+ ''', (mcc, mnc, lac, cid)).fetchone()
+
+ if result:
+ return jsonify({
+ 'source': 'cache',
+ 'lat': result['lat'],
+ 'lon': result['lon'],
+ 'azimuth': result['azimuth'],
+ 'range': result['range_meters'],
+ 'operator': result['operator'],
+ 'radio': result['radio']
+ })
+
+ # Check API usage limit
+ if not can_use_api():
+ current_usage = get_api_usage_today()
+ return jsonify({
+ 'error': 'OpenCellID API daily limit reached',
+ 'usage_today': current_usage,
+ 'limit': config.GSM_API_DAILY_LIMIT
+ }), 429
+
+ # Call OpenCellID API
+ api_url = config.GSM_OPENCELLID_API_URL
+ params = {
+ 'key': config.GSM_OPENCELLID_API_KEY,
+ 'mcc': mcc,
+ 'mnc': mnc,
+ 'lac': lac,
+ 'cellid': cid,
+ 'format': 'json'
+ }
+
+ response = requests.get(api_url, params=params, timeout=10)
+
+ if response.status_code == 200:
+ cell_data = response.json()
+
+ # Increment API usage counter
+ usage_count = increment_api_usage()
+ logger.info(f"OpenCellID API call #{usage_count} today")
+
+ # Cache the result
+ conn.execute('''
+ INSERT OR REPLACE INTO gsm_cells
+ (mcc, mnc, lac, cid, lat, lon, azimuth, range_meters, samples, radio, operator, last_verified)
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
+ ''', (
+ mcc, mnc, lac, cid,
+ cell_data.get('lat'),
+ cell_data.get('lon'),
+ cell_data.get('azimuth'),
+ cell_data.get('range'),
+ cell_data.get('samples'),
+ cell_data.get('radio'),
+ cell_data.get('operator')
+ ))
+ conn.commit()
+
+ return jsonify({
+ 'source': 'api',
+ 'lat': cell_data.get('lat'),
+ 'lon': cell_data.get('lon'),
+ 'azimuth': cell_data.get('azimuth'),
+ 'range': cell_data.get('range'),
+ 'operator': cell_data.get('operator'),
+ 'radio': cell_data.get('radio')
+ })
+ else:
+ return jsonify({'error': 'Cell not found in OpenCellID'}), 404
+
+ except Exception as e:
+ logger.error(f"Error looking up cell: {e}")
+ return jsonify({'error': str(e)}), 500
+
+
+@gsm_spy_bp.route('/detect_rogue', methods=['POST'])
+def detect_rogue():
+ """Analyze and flag rogue towers (G-07)."""
+ data = request.get_json() or {}
+ tower_info = data.get('tower')
+
+ if not tower_info:
+ return jsonify({'error': 'Tower info required'}), 400
+
+ try:
+ is_rogue = False
+ reasons = []
+
+ # Check if tower exists in OpenCellID
+ mcc = tower_info.get('mcc')
+ mnc = tower_info.get('mnc')
+ lac = tower_info.get('lac')
+ cid = tower_info.get('cid')
+
+ if all([mcc, mnc, lac, cid]):
+ with get_db() as conn:
+ result = conn.execute('''
+ SELECT id FROM gsm_cells
+ WHERE mcc = ? AND mnc = ? AND lac = ? AND cid = ?
+ ''', (mcc, mnc, lac, cid)).fetchone()
+
+ if not result:
+ is_rogue = True
+ reasons.append('Tower not found in OpenCellID database')
+
+ # Check signal strength anomalies
+ signal = tower_info.get('signal_strength', 0)
+ if signal > -50: # Suspiciously strong signal
+ is_rogue = True
+ reasons.append(f'Unusually strong signal: {signal} dBm')
+
+ # If rogue, insert into database
+ if is_rogue:
+ with get_db() as conn:
+ conn.execute('''
+ INSERT INTO gsm_rogues
+ (arfcn, mcc, mnc, lac, cid, signal_strength, reason, threat_level)
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
+ ''', (
+ tower_info.get('arfcn'),
+ mcc, mnc, lac, cid,
+ signal,
+ '; '.join(reasons),
+ 'high' if len(reasons) > 1 else 'medium'
+ ))
+ conn.commit()
+
+ return jsonify({
+ 'is_rogue': is_rogue,
+ 'reasons': reasons
+ })
+
+ except Exception as e:
+ logger.error(f"Error detecting rogue: {e}")
+ return jsonify({'error': str(e)}), 500
+
+
+@gsm_spy_bp.route('/towers')
+def get_towers():
+ """Get all detected towers."""
+ towers = []
+ for key, tower_data in app_module.gsm_spy_towers.items():
+ towers.append(tower_data)
+ return jsonify(towers)
+
+
+@gsm_spy_bp.route('/devices')
+def get_devices():
+ """Get all tracked devices (IMSI/TMSI)."""
+ devices = []
+ for key, device_data in app_module.gsm_spy_devices.items():
+ devices.append(device_data)
+ return jsonify(devices)
+
+
+@gsm_spy_bp.route('/rogues')
+def get_rogues():
+ """Get all detected rogue towers."""
+ try:
+ with get_db() as conn:
+ results = conn.execute('''
+ SELECT * FROM gsm_rogues
+ WHERE acknowledged = 0
+ ORDER BY detected_at DESC
+ LIMIT 50
+ ''').fetchall()
+
+ rogues = [dict(row) for row in results]
+ return jsonify(rogues)
+ except Exception as e:
+ logger.error(f"Error fetching rogues: {e}")
+ return jsonify({'error': str(e)}), 500
+
+
+# ============================================
+# Advanced Features (G-08 through G-12)
+# ============================================
+
+@gsm_spy_bp.route('/velocity', methods=['GET'])
+def get_velocity_data():
+ """Get velocity vectoring data for tracked devices (G-08)."""
+ try:
+ device_id = request.args.get('device_id')
+ minutes = int(request.args.get('minutes', 60)) # Last 60 minutes by default
+
+ with get_db() as conn:
+ # Get velocity log entries
+ query = '''
+ SELECT * FROM gsm_velocity_log
+ WHERE timestamp >= datetime('now', '-' || ? || ' minutes')
+ '''
+ params = [minutes]
+
+ if device_id:
+ query += ' AND device_id = ?'
+ params.append(device_id)
+
+ query += ' ORDER BY timestamp DESC LIMIT 100'
+
+ results = conn.execute(query, params).fetchall()
+ velocity_data = [dict(row) for row in results]
+
+ return jsonify(velocity_data)
+ except Exception as e:
+ logger.error(f"Error fetching velocity data: {e}")
+ return jsonify({'error': str(e)}), 500
+
+
+@gsm_spy_bp.route('/velocity/calculate', methods=['POST'])
+def calculate_velocity():
+ """Calculate velocity for a device based on TA transitions (G-08)."""
+ data = request.get_json() or {}
+ device_id = data.get('device_id')
+
+ if not device_id:
+ return jsonify({'error': 'device_id required'}), 400
+
+ try:
+ with get_db() as conn:
+ # Get last two TA readings for this device
+ results = conn.execute('''
+ SELECT ta_value, cid, timestamp
+ FROM gsm_signals
+ WHERE (imsi = ? OR tmsi = ?)
+ ORDER BY timestamp DESC
+ LIMIT 2
+ ''', (device_id, device_id)).fetchall()
+
+ if len(results) < 2:
+ return jsonify({'velocity': 0, 'message': 'Insufficient data'})
+
+ curr = dict(results[0])
+ prev = dict(results[1])
+
+ # Calculate distance change (TA * 554 meters)
+ curr_distance = curr['ta_value'] * config.GSM_TA_METERS_PER_UNIT
+ prev_distance = prev['ta_value'] * config.GSM_TA_METERS_PER_UNIT
+ distance_change = abs(curr_distance - prev_distance)
+
+ # Calculate time difference
+ curr_time = datetime.fromisoformat(curr['timestamp'])
+ prev_time = datetime.fromisoformat(prev['timestamp'])
+ time_diff_seconds = (curr_time - prev_time).total_seconds()
+
+ # Calculate velocity (m/s)
+ if time_diff_seconds > 0:
+ velocity = distance_change / time_diff_seconds
+ else:
+ velocity = 0
+
+ # Store in velocity log
+ conn.execute('''
+ INSERT INTO gsm_velocity_log
+ (device_id, prev_ta, curr_ta, prev_cid, curr_cid, estimated_velocity)
+ VALUES (?, ?, ?, ?, ?, ?)
+ ''', (device_id, prev['ta_value'], curr['ta_value'],
+ prev['cid'], curr['cid'], velocity))
+ conn.commit()
+
+ return jsonify({
+ 'device_id': device_id,
+ 'velocity_mps': round(velocity, 2),
+ 'velocity_kmh': round(velocity * 3.6, 2),
+ 'distance_change_m': round(distance_change, 2),
+ 'time_diff_s': round(time_diff_seconds, 2)
+ })
+
+ except Exception as e:
+ logger.error(f"Error calculating velocity: {e}")
+ return jsonify({'error': str(e)}), 500
+
+
+@gsm_spy_bp.route('/crowd_density', methods=['GET'])
+def get_crowd_density():
+ """Get crowd density data by sector (G-09)."""
+ try:
+ hours = int(request.args.get('hours', 1)) # Last 1 hour by default
+ cid = request.args.get('cid') # Optional: specific cell
+
+ with get_db() as conn:
+ # Count unique TMSI per cell in time window
+ query = '''
+ SELECT
+ cid,
+ lac,
+ COUNT(DISTINCT tmsi) as unique_devices,
+ COUNT(*) as total_pings,
+ MIN(timestamp) as first_seen,
+ MAX(timestamp) as last_seen
+ FROM gsm_tmsi_log
+ WHERE timestamp >= datetime('now', '-' || ? || ' hours')
+ '''
+ params = [hours]
+
+ if cid:
+ query += ' AND cid = ?'
+ params.append(cid)
+
+ query += ' GROUP BY cid, lac ORDER BY unique_devices DESC'
+
+ results = conn.execute(query, params).fetchall()
+ density_data = []
+
+ for row in results:
+ density_data.append({
+ 'cid': row['cid'],
+ 'lac': row['lac'],
+ 'unique_devices': row['unique_devices'],
+ 'total_pings': row['total_pings'],
+ 'first_seen': row['first_seen'],
+ 'last_seen': row['last_seen'],
+ 'density_level': 'high' if row['unique_devices'] > 20 else
+ 'medium' if row['unique_devices'] > 10 else 'low'
+ })
+
+ return jsonify(density_data)
+
+ except Exception as e:
+ logger.error(f"Error fetching crowd density: {e}")
+ return jsonify({'error': str(e)}), 500
+
+
+@gsm_spy_bp.route('/life_patterns', methods=['GET'])
+def get_life_patterns():
+ """Get life pattern analysis for a device (G-10)."""
+ try:
+ device_id = request.args.get('device_id')
+ if not device_id:
+ # Return empty results gracefully when no device selected
+ return jsonify({
+ 'device_id': None,
+ 'patterns': [],
+ 'message': 'No device selected'
+ }), 200
+
+ with get_db() as conn:
+ # Get historical signal data
+ results = conn.execute('''
+ SELECT
+ strftime('%H', timestamp) as hour,
+ strftime('%w', timestamp) as day_of_week,
+ cid,
+ lac,
+ COUNT(*) as occurrences
+ FROM gsm_signals
+ WHERE (imsi = ? OR tmsi = ?)
+ AND timestamp >= datetime('now', '-60 days')
+ GROUP BY hour, day_of_week, cid, lac
+ ORDER BY occurrences DESC
+ ''', (device_id, device_id)).fetchall()
+
+ patterns = []
+ for row in results:
+ patterns.append({
+ 'hour': int(row['hour']),
+ 'day_of_week': int(row['day_of_week']),
+ 'cid': row['cid'],
+ 'lac': row['lac'],
+ 'occurrences': row['occurrences'],
+ 'day_name': ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'][int(row['day_of_week'])]
+ })
+
+ # Identify regular patterns
+ regular_locations = []
+ for pattern in patterns[:5]: # Top 5 most frequent
+ if pattern['occurrences'] >= 3: # Seen at least 3 times
+ regular_locations.append({
+ 'cid': pattern['cid'],
+ 'typical_time': f"{pattern['day_name']} {pattern['hour']:02d}:00",
+ 'frequency': pattern['occurrences']
+ })
+
+ return jsonify({
+ 'device_id': device_id,
+ 'patterns': patterns,
+ 'regular_locations': regular_locations,
+ 'total_observations': sum(p['occurrences'] for p in patterns)
+ })
+
+ except Exception as e:
+ logger.error(f"Error analyzing life patterns: {e}")
+ return jsonify({'error': str(e)}), 500
+
+
+@gsm_spy_bp.route('/neighbor_audit', methods=['GET'])
+def neighbor_audit():
+ """Audit neighbor cell lists for consistency (G-11)."""
+ try:
+ cid = request.args.get('cid')
+ if not cid:
+ # Return empty results gracefully when no tower selected
+ return jsonify({
+ 'cid': None,
+ 'neighbors': [],
+ 'inconsistencies': [],
+ 'message': 'No tower selected'
+ }), 200
+
+ with get_db() as conn:
+ # Get tower info with metadata (neighbor list stored in metadata JSON)
+ result = conn.execute('''
+ SELECT metadata FROM gsm_cells WHERE cid = ?
+ ''', (cid,)).fetchone()
+
+ if not result or not result['metadata']:
+ return jsonify({
+ 'cid': cid,
+ 'status': 'no_data',
+ 'message': 'No neighbor list data available'
+ })
+
+ # Parse metadata JSON
+ metadata = json.loads(result['metadata'])
+ neighbor_list = metadata.get('neighbors', [])
+
+ # Audit consistency
+ issues = []
+ for neighbor_cid in neighbor_list:
+ # Check if neighbor exists in database
+ neighbor_exists = conn.execute('''
+ SELECT id FROM gsm_cells WHERE cid = ?
+ ''', (neighbor_cid,)).fetchone()
+
+ if not neighbor_exists:
+ issues.append({
+ 'type': 'missing_neighbor',
+ 'cid': neighbor_cid,
+ 'message': f'Neighbor CID {neighbor_cid} not found in database'
+ })
+
+ return jsonify({
+ 'cid': cid,
+ 'neighbor_count': len(neighbor_list),
+ 'neighbors': neighbor_list,
+ 'issues': issues,
+ 'status': 'suspicious' if issues else 'normal'
+ })
+
+ except Exception as e:
+ logger.error(f"Error auditing neighbors: {e}")
+ return jsonify({'error': str(e)}), 500
+
+
+@gsm_spy_bp.route('/traffic_correlation', methods=['GET'])
+def traffic_correlation():
+ """Correlate uplink/downlink traffic for pairing analysis (G-12)."""
+ try:
+ cid = request.args.get('cid')
+ minutes = int(request.args.get('minutes', 5))
+
+ with get_db() as conn:
+ # Get recent signal activity for this cell
+ results = conn.execute('''
+ SELECT
+ imsi,
+ tmsi,
+ ta_value,
+ timestamp,
+ metadata
+ FROM gsm_signals
+ WHERE cid = ?
+ AND timestamp >= datetime('now', '-' || ? || ' minutes')
+ ORDER BY timestamp DESC
+ ''', (cid, minutes)).fetchall()
+
+ correlations = []
+ seen_devices = set()
+
+ for row in results:
+ device_id = row['imsi'] or row['tmsi']
+ if device_id and device_id not in seen_devices:
+ seen_devices.add(device_id)
+
+ # Simple correlation: count bursts
+ burst_count = conn.execute('''
+ SELECT COUNT(*) as bursts
+ FROM gsm_signals
+ WHERE (imsi = ? OR tmsi = ?)
+ AND cid = ?
+ AND timestamp >= datetime('now', '-' || ? || ' minutes')
+ ''', (device_id, device_id, cid, minutes)).fetchone()
+
+ correlations.append({
+ 'device_id': device_id,
+ 'burst_count': burst_count['bursts'],
+ 'last_seen': row['timestamp'],
+ 'ta_value': row['ta_value'],
+ 'activity_level': 'high' if burst_count['bursts'] > 10 else
+ 'medium' if burst_count['bursts'] > 5 else 'low'
+ })
+
+ return jsonify({
+ 'cid': cid,
+ 'time_window_minutes': minutes,
+ 'active_devices': len(correlations),
+ 'correlations': correlations
+ })
+
+ except Exception as e:
+ logger.error(f"Error correlating traffic: {e}")
+ return jsonify({'error': str(e)}), 500
+
+
+# ============================================
+# Helper Functions
+# ============================================
+
+def parse_grgsm_scanner_output(line: str) -> dict[str, Any] | None:
+ """Parse grgsm_scanner output line.
+
+ Actual output format (comma-separated key-value pairs):
+ ARFCN: 975, Freq: 925.2M, CID: 13522, LAC: 38722, MCC: 262, MNC: 1, Pwr: -58
+ """
+ try:
+ line = line.strip()
+
+ # Skip non-data lines (progress, config, neighbour info, blank)
+ if not line or 'ARFCN:' not in line:
+ return None
+
+ # Parse "ARFCN: 975, Freq: 925.2M, CID: 13522, LAC: 38722, MCC: 262, MNC: 1, Pwr: -58"
+ fields = {}
+ for part in line.split(','):
+ part = part.strip()
+ if ':' in part:
+ key, _, value = part.partition(':')
+ fields[key.strip()] = value.strip()
+
+ if 'ARFCN' in fields and 'CID' in fields:
+ cid = int(fields.get('CID', 0))
+ mcc = int(fields.get('MCC', 0))
+ mnc = int(fields.get('MNC', 0))
+
+ # Only skip entries with no network identity at all (MCC=0 AND MNC=0)
+ # CID=0 with valid MCC/MNC is a partially decoded cell - still useful
+ if mcc == 0 and mnc == 0:
+ logger.debug(f"Skipping unidentified ARFCN (MCC=0, MNC=0): {line}")
+ return None
+
+ # Freq may have 'M' suffix (e.g. "925.2M")
+ freq_str = fields.get('Freq', '0').rstrip('Mm')
+
+ data = {
+ 'type': 'tower',
+ 'arfcn': int(fields['ARFCN']),
+ 'frequency': float(freq_str),
+ 'cid': cid,
+ 'lac': int(fields.get('LAC', 0)),
+ 'mcc': mcc,
+ 'mnc': mnc,
+ 'signal_strength': float(fields.get('Pwr', -999)),
+ 'timestamp': datetime.now().isoformat()
+ }
+ return data
+
+ except Exception as e:
+ logger.debug(f"Failed to parse scanner line: {line} - {e}")
+
+ return None
+
+
+def parse_tshark_output(line: str) -> dict[str, Any] | None:
+ """Parse tshark filtered GSM output."""
+ try:
+ # tshark output format: ta_value\ttmsi\timsi\tlac\tcid
+ parts = line.strip().split('\t')
+
+ if len(parts) >= 5:
+ data = {
+ 'type': 'device',
+ 'ta_value': int(parts[0]) if parts[0] else None,
+ 'tmsi': parts[1] if parts[1] else None,
+ 'imsi': parts[2] if parts[2] else None,
+ 'lac': int(parts[3]) if parts[3] else None,
+ 'cid': int(parts[4]) if parts[4] else None,
+ 'timestamp': datetime.now().isoformat()
+ }
+
+ # Calculate distance from TA
+ if data['ta_value'] is not None:
+ data['distance_meters'] = data['ta_value'] * config.GSM_TA_METERS_PER_UNIT
+
+ return data
+
+ except Exception as e:
+ logger.debug(f"Failed to parse tshark line: {line} - {e}")
+
+ return None
+
+
+def auto_start_monitor(tower_data):
+ """Automatically start monitoring the strongest tower found."""
+ try:
+ arfcn = tower_data.get('arfcn')
+ if not arfcn:
+ logger.warning("Cannot auto-monitor: no ARFCN in tower data")
+ return
+
+ logger.info(f"Auto-monitoring strongest tower: ARFCN {arfcn}, Signal {tower_data.get('signal_strength')} dBm")
+
+ # Brief delay to ensure scanner has stabilized
+ time.sleep(2)
+
+ with app_module.gsm_spy_lock:
+ if app_module.gsm_spy_monitor_process:
+ logger.info("Monitor already running, skipping auto-start")
+ return
+
+ device_index = app_module.gsm_spy_active_device or 0
+
+ # Start and register monitoring (shared logic)
+ _start_and_register_monitor(arfcn, device_index)
+
+ # Send SSE notification
+ try:
+ app_module.gsm_spy_queue.put_nowait({
+ 'type': 'auto_monitor_started',
+ 'arfcn': arfcn,
+ 'tower': tower_data
+ })
+ except queue.Full:
+ pass
+
+ logger.info(f"Auto-monitoring started for ARFCN {arfcn}")
+
+ except Exception as e:
+ logger.error(f"Error in auto-monitoring: {e}")
+
+
+def scanner_thread(cmd, device_index):
+ """Thread to continuously run grgsm_scanner in a loop with non-blocking I/O.
+
+ grgsm_scanner scans once and exits, so we loop it to provide
+ continuous updates to the dashboard.
+ """
+ global gsm_towers_found
+
+ strongest_tower = None
+ auto_monitor_triggered = False # Moved outside loop - persists across scans
+ scan_count = 0
+ crash_count = 0
+ process = None
+
+ try:
+ while app_module.gsm_spy_scanner_running: # Flag check
+ scan_count += 1
+ logger.info(f"Starting GSM scan #{scan_count}")
+
+ try:
+ # Start scanner process
+ # Set OSMO_FSM_DUP_CHECK_DISABLED to prevent libosmocore
+ # abort on duplicate FSM registration (common with apt gr-gsm)
+ env = dict(os.environ,
+ OSMO_FSM_DUP_CHECK_DISABLED='1',
+ PYTHONUNBUFFERED='1',
+ QT_QPA_PLATFORM='offscreen')
+ process = subprocess.Popen(
+ cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ universal_newlines=True,
+ bufsize=1,
+ env=env
+ )
+ register_process(process)
+ logger.info(f"Started grgsm_scanner (PID: {process.pid})")
+
+ # Standard pattern: reader threads with queue
+ output_queue_local = queue.Queue()
+
+ def read_stdout():
+ try:
+ for line in iter(process.stdout.readline, ''):
+ if line:
+ output_queue_local.put(('stdout', line))
+ except Exception as e:
+ logger.error(f"stdout read error: {e}")
+ finally:
+ output_queue_local.put(('eof', None))
+
+ def read_stderr():
+ try:
+ for line in iter(process.stderr.readline, ''):
+ if line:
+ logger.debug(f"grgsm_scanner stderr: {line.strip()}")
+ # grgsm_scanner outputs scan results to stderr
+ output_queue_local.put(('stderr', line))
+ except Exception as e:
+ logger.error(f"stderr read error: {e}")
+
+ stdout_thread = threading.Thread(target=read_stdout, daemon=True)
+ stderr_thread = threading.Thread(target=read_stderr, daemon=True)
+ stdout_thread.start()
+ stderr_thread.start()
+
+ # Process output with timeout
+ scan_start = time.time()
+ last_output = scan_start
+ scan_timeout = 300 # 5 minute maximum per scan (4 bands takes ~2-3 min)
+
+ while app_module.gsm_spy_scanner_running:
+ # Check if process died
+ if process.poll() is not None:
+ logger.info(f"Scanner exited (code: {process.returncode})")
+ break
+
+ # Get output from queue with timeout
+ try:
+ msg_type, line = output_queue_local.get(timeout=1.0)
+
+ if msg_type == 'eof':
+ break # EOF
+
+ last_output = time.time()
+ stripped = line.strip()
+ logger.info(f"Scanner [{msg_type}]: {stripped}")
+
+ # Forward progress and status info to frontend
+ progress_match = re.match(r'Scanning:\s+([\d.]+)%\s+done', stripped)
+ if progress_match:
+ try:
+ app_module.gsm_spy_queue.put_nowait({
+ 'type': 'progress',
+ 'percent': float(progress_match.group(1)),
+ 'scan': scan_count
+ })
+ except queue.Full:
+ pass
+ continue
+ if stripped.startswith('Try scan CCCH'):
+ try:
+ app_module.gsm_spy_queue.put_nowait({
+ 'type': 'status',
+ 'message': stripped,
+ 'scan': scan_count
+ })
+ except queue.Full:
+ pass
+
+ parsed = parse_grgsm_scanner_output(line)
+ if parsed:
+ # Enrich with coordinates
+ from utils.gsm_geocoding import enrich_tower_data
+ enriched = enrich_tower_data(parsed)
+
+ # Store in DataStore
+ key = f"{enriched['mcc']}_{enriched['mnc']}_{enriched['lac']}_{enriched['cid']}"
+ app_module.gsm_spy_towers[key] = enriched
+
+ # Track strongest tower
+ signal = enriched.get('signal_strength', -999)
+ if strongest_tower is None or signal > strongest_tower.get('signal_strength', -999):
+ strongest_tower = enriched
+
+ # Queue for SSE
+ try:
+ app_module.gsm_spy_queue.put_nowait(enriched)
+ except queue.Full:
+ logger.warning("Queue full, dropping tower update")
+
+ # Thread-safe counter update
+ with app_module.gsm_spy_lock:
+ gsm_towers_found += 1
+ except queue.Empty:
+ # No output, check timeout
+ if time.time() - last_output > scan_timeout:
+ logger.warning(f"Scan timeout after {scan_timeout}s")
+ break
+
+ # Drain remaining queue items after process exits
+ while not output_queue_local.empty():
+ try:
+ msg_type, line = output_queue_local.get_nowait()
+ if line:
+ logger.info(f"Scanner [{msg_type}] (drain): {line.strip()}")
+ except queue.Empty:
+ break
+
+ # Clean up process with timeout
+ if process.poll() is None:
+ logger.info("Terminating scanner process")
+ safe_terminate(process, timeout=5)
+ else:
+ process.wait() # Reap zombie
+
+ exit_code = process.returncode
+ scan_duration = time.time() - scan_start
+ logger.info(f"Scan #{scan_count} complete (exit code: {exit_code}, duration: {scan_duration:.1f}s)")
+
+ # Notify frontend scan completed
+ try:
+ app_module.gsm_spy_queue.put_nowait({
+ 'type': 'scan_complete',
+ 'scan': scan_count,
+ 'duration': round(scan_duration, 1),
+ 'towers_found': gsm_towers_found
+ })
+ except queue.Full:
+ pass
+
+ # Detect crash pattern: process exits too quickly with no data
+ if scan_duration < 5 and exit_code != 0:
+ crash_count += 1
+ logger.error(
+ f"grgsm_scanner crashed on startup (exit code: {exit_code}). "
+ f"Crash count: {crash_count}. Check gr-gsm/libosmocore compatibility."
+ )
+ try:
+ app_module.gsm_spy_queue.put_nowait({
+ 'type': 'error',
+ 'message': f'grgsm_scanner crashed (exit code: {exit_code}). '
+ 'This may be a gr-gsm/libosmocore compatibility issue. '
+ 'Try rebuilding gr-gsm from source.',
+ 'timestamp': time.strftime('%Y-%m-%dT%H:%M:%S')
+ })
+ except queue.Full:
+ pass
+ if crash_count >= 3:
+ logger.error("grgsm_scanner crashed 3 times, stopping scanner")
+ break
+
+ except FileNotFoundError:
+ logger.error(
+ "grgsm_scanner not found. Please install gr-gsm: "
+ "https://github.com/bkerler/gr-gsm"
+ )
+ # Send error to SSE stream so the UI knows
+ try:
+ app_module.gsm_spy_queue.put({
+ 'type': 'error',
+ 'message': 'grgsm_scanner not found. Please install gr-gsm.',
+ 'timestamp': time.strftime('%Y-%m-%dT%H:%M:%S')
+ })
+ except Exception:
+ pass
+ break # Don't retry - binary won't appear
+
+ except Exception as e:
+ logger.error(f"Scanner scan error: {e}", exc_info=True)
+ if process and process.poll() is None:
+ safe_terminate(process)
+
+ # Check if should continue
+ if not app_module.gsm_spy_scanner_running:
+ break
+
+ # After first scan completes: auto-switch to monitoring if towers found
+ # Scanner process has exited so SDR is free for grgsm_livemon
+ if not auto_monitor_triggered and strongest_tower and scan_count >= 1:
+ auto_monitor_triggered = True
+ arfcn = strongest_tower.get('arfcn')
+ signal = strongest_tower.get('signal_strength', -999)
+ logger.info(
+ f"Scan complete with towers found. Auto-switching to monitor mode "
+ f"on ARFCN {arfcn} (signal: {signal} dBm)"
+ )
+
+ # Stop scanner loop - SDR needed for monitoring
+ app_module.gsm_spy_scanner_running = False
+
+ try:
+ app_module.gsm_spy_queue.put_nowait({
+ 'type': 'status',
+ 'message': f'Switching to monitor mode on ARFCN {arfcn}...'
+ })
+ except queue.Full:
+ pass
+
+ # Start monitoring (SDR is free since scanner process exited)
+ try:
+ with app_module.gsm_spy_lock:
+ if not app_module.gsm_spy_monitor_process:
+ _start_and_register_monitor(arfcn, device_index)
+ logger.info(f"Auto-monitoring started for ARFCN {arfcn}")
+
+ try:
+ app_module.gsm_spy_queue.put_nowait({
+ 'type': 'auto_monitor_started',
+ 'arfcn': arfcn,
+ 'tower': strongest_tower
+ })
+ except queue.Full:
+ pass
+ except Exception as e:
+ logger.error(f"Error starting auto-monitor: {e}", exc_info=True)
+ try:
+ app_module.gsm_spy_queue.put_nowait({
+ 'type': 'error',
+ 'message': f'Monitor failed: {e}'
+ })
+ except queue.Full:
+ pass
+ # Resume scanning if monitor failed
+ app_module.gsm_spy_scanner_running = True
+
+ break # Exit scanner loop (monitoring takes over)
+
+ # Wait between scans with responsive flag checking
+ logger.info("Waiting 5 seconds before next scan")
+ for i in range(5):
+ if not app_module.gsm_spy_scanner_running:
+ break
+ time.sleep(1)
+
+ except Exception as e:
+ logger.error(f"Scanner thread fatal error: {e}", exc_info=True)
+
+ finally:
+ # Always cleanup
+ if process and process.poll() is None:
+ safe_terminate(process, timeout=5)
+
+ logger.info("Scanner thread terminated")
+
+ # Reset global state - but don't release SDR if monitoring took over
+ with app_module.gsm_spy_lock:
+ app_module.gsm_spy_scanner_running = False
+ if app_module.gsm_spy_monitor_process is None:
+ # No monitor running - release SDR device
+ if app_module.gsm_spy_active_device is not None:
+ from app import release_sdr_device
+ release_sdr_device(app_module.gsm_spy_active_device)
+ app_module.gsm_spy_active_device = None
+ else:
+ logger.info("Monitor is running, keeping SDR device allocated")
+
+
+def monitor_thread(process):
+ """Thread to read tshark output using standard iter pattern."""
+ global gsm_devices_tracked
+
+ # Standard pattern: reader thread with queue
+ output_queue_local = queue.Queue()
+
+ def read_stdout():
+ try:
+ for line in iter(process.stdout.readline, ''):
+ if line:
+ output_queue_local.put(('stdout', line))
+ except Exception as e:
+ logger.error(f"tshark read error: {e}")
+ finally:
+ output_queue_local.put(('eof', None))
+
+ stdout_thread = threading.Thread(target=read_stdout, daemon=True)
+ stdout_thread.start()
+
+ try:
+ while app_module.gsm_spy_monitor_process:
+ # Check if process died
+ if process.poll() is not None:
+ logger.info(f"Monitor process exited (code: {process.returncode})")
+ break
+
+ # Get output from queue with timeout
+ try:
+ msg_type, line = output_queue_local.get(timeout=1.0)
+ except queue.Empty:
+ continue # Timeout, check flag again
+
+ if msg_type == 'eof':
+ break # EOF
+
+ parsed = parse_tshark_output(line)
+ if parsed:
+ # Store in DataStore
+ key = parsed.get('tmsi') or parsed.get('imsi') or str(time.time())
+ app_module.gsm_spy_devices[key] = parsed
+
+ # Queue for SSE stream
+ try:
+ app_module.gsm_spy_queue.put_nowait(parsed)
+ except queue.Full:
+ pass
+
+ # Store in database for historical analysis
+ try:
+ with get_db() as conn:
+ # gsm_signals table
+ conn.execute('''
+ INSERT INTO gsm_signals
+ (imsi, tmsi, lac, cid, ta_value, arfcn)
+ VALUES (?, ?, ?, ?, ?, ?)
+ ''', (
+ parsed.get('imsi'),
+ parsed.get('tmsi'),
+ parsed.get('lac'),
+ parsed.get('cid'),
+ parsed.get('ta_value'),
+ app_module.gsm_spy_selected_arfcn
+ ))
+
+ # gsm_tmsi_log table for crowd density
+ if parsed.get('tmsi'):
+ conn.execute('''
+ INSERT INTO gsm_tmsi_log
+ (tmsi, lac, cid, ta_value)
+ VALUES (?, ?, ?, ?)
+ ''', (
+ parsed.get('tmsi'),
+ parsed.get('lac'),
+ parsed.get('cid'),
+ parsed.get('ta_value')
+ ))
+
+ # Velocity calculation (G-08)
+ device_id = parsed.get('imsi') or parsed.get('tmsi')
+ if device_id and parsed.get('ta_value') is not None:
+ # Get previous TA reading
+ prev_reading = conn.execute('''
+ SELECT ta_value, cid, timestamp
+ FROM gsm_signals
+ WHERE (imsi = ? OR tmsi = ?)
+ ORDER BY timestamp DESC
+ LIMIT 1 OFFSET 1
+ ''', (device_id, device_id)).fetchone()
+
+ if prev_reading:
+ # Calculate velocity
+ curr_ta = parsed.get('ta_value')
+ prev_ta = prev_reading['ta_value']
+ curr_distance = curr_ta * config.GSM_TA_METERS_PER_UNIT
+ prev_distance = prev_ta * config.GSM_TA_METERS_PER_UNIT
+ distance_change = abs(curr_distance - prev_distance)
+
+ # Time difference
+ prev_time = datetime.fromisoformat(prev_reading['timestamp'])
+ curr_time = datetime.now()
+ time_diff_seconds = (curr_time - prev_time).total_seconds()
+
+ if time_diff_seconds > 0:
+ velocity = distance_change / time_diff_seconds
+
+ # Store velocity
+ conn.execute('''
+ INSERT INTO gsm_velocity_log
+ (device_id, prev_ta, curr_ta, prev_cid, curr_cid, estimated_velocity)
+ VALUES (?, ?, ?, ?, ?, ?)
+ ''', (
+ device_id,
+ prev_ta,
+ curr_ta,
+ prev_reading['cid'],
+ parsed.get('cid'),
+ velocity
+ ))
+
+ conn.commit()
+ except Exception as e:
+ logger.error(f"Error storing device data: {e}")
+
+ # Thread-safe counter
+ with app_module.gsm_spy_lock:
+ gsm_devices_tracked += 1
+
+ except Exception as e:
+ logger.error(f"Monitor thread error: {e}", exc_info=True)
+
+ finally:
+ # Reap process with timeout
+ try:
+ if process.poll() is None:
+ process.terminate()
+ try:
+ process.wait(timeout=5)
+ except subprocess.TimeoutExpired:
+ logger.warning("Monitor process didn't terminate, killing")
+ process.kill()
+ process.wait()
+ else:
+ process.wait()
+ logger.info(f"Monitor process exited with code {process.returncode}")
+ except Exception as e:
+ logger.error(f"Error reaping monitor process: {e}")
+
+ logger.info("Monitor thread terminated")
diff --git a/routes/listening_post.py b/routes/listening_post.py
index 658acdb..8c48402 100644
--- a/routes/listening_post.py
+++ b/routes/listening_post.py
@@ -19,8 +19,8 @@ from flask import Blueprint, jsonify, request, Response
import app as app_module
from utils.logging import get_logger
-from utils.sse import format_sse
-from utils.event_pipeline import process_event
+from utils.sse import format_sse
+from utils.event_pipeline import process_event
from utils.constants import (
SSE_QUEUE_TIMEOUT,
SSE_KEEPALIVE_INTERVAL,
@@ -1181,13 +1181,13 @@ def stream_scanner_events() -> Response:
while True:
try:
- msg = scanner_queue.get(timeout=SSE_QUEUE_TIMEOUT)
- last_keepalive = time.time()
- try:
- process_event('listening_scanner', msg, msg.get('type'))
- except Exception:
- pass
- yield format_sse(msg)
+ msg = scanner_queue.get(timeout=SSE_QUEUE_TIMEOUT)
+ last_keepalive = time.time()
+ try:
+ process_event('listening_scanner', msg, msg.get('type'))
+ except Exception:
+ pass
+ yield format_sse(msg)
except queue.Empty:
now = time.time()
if now - last_keepalive >= SSE_KEEPALIVE_INTERVAL:
@@ -1239,10 +1239,10 @@ def get_presets() -> Response:
# MANUAL AUDIO ENDPOINTS (for direct listening)
# ============================================
-@listening_post_bp.route('/audio/start', methods=['POST'])
-def start_audio() -> Response:
- """Start audio at specific frequency (manual mode)."""
- global scanner_running, scanner_active_device, listening_active_device, scanner_power_process, scanner_thread
+@listening_post_bp.route('/audio/start', methods=['POST'])
+def start_audio() -> Response:
+ """Start audio at specific frequency (manual mode)."""
+ global scanner_running, scanner_active_device, listening_active_device, scanner_power_process, scanner_thread
# Stop scanner if running
if scanner_running:
@@ -1271,7 +1271,7 @@ def start_audio() -> Response:
pass
time.sleep(0.5)
- data = request.json or {}
+ data = request.json or {}
try:
frequency = float(data.get('frequency', 0))
@@ -1286,11 +1286,11 @@ def start_audio() -> Response:
'message': f'Invalid parameter: {e}'
}), 400
- if frequency <= 0:
- return jsonify({
- 'status': 'error',
- 'message': 'frequency is required'
- }), 400
+ if frequency <= 0:
+ return jsonify({
+ 'status': 'error',
+ 'message': 'frequency is required'
+ }), 400
valid_sdr_types = ['rtlsdr', 'hackrf', 'airspy', 'limesdr', 'sdrplay']
if sdr_type not in valid_sdr_types:
@@ -1299,19 +1299,28 @@ def start_audio() -> Response:
'message': f'Invalid sdr_type. Use: {", ".join(valid_sdr_types)}'
}), 400
- # Update config for audio
- scanner_config['squelch'] = squelch
- scanner_config['gain'] = gain
- scanner_config['device'] = device
- scanner_config['sdr_type'] = sdr_type
-
- # Stop waterfall if it's using the same SDR
- if waterfall_running and waterfall_active_device == device:
- _stop_waterfall_internal()
- time.sleep(0.2)
+ # Update config for audio
+ scanner_config['squelch'] = squelch
+ scanner_config['gain'] = gain
+ scanner_config['device'] = device
+ scanner_config['sdr_type'] = sdr_type
- # Claim device for listening audio
- if listening_active_device is None or listening_active_device != device:
+ # Stop waterfall if it's using the same SDR (SSE path)
+ if waterfall_running and waterfall_active_device == device:
+ _stop_waterfall_internal()
+ time.sleep(0.2)
+
+ # Release waterfall device claim if the WebSocket waterfall is still
+ # holding it. The JS client sends a stop command and closes the
+ # WebSocket before requesting audio, but the backend handler may not
+ # have finished its cleanup yet.
+ device_status = app_module.get_sdr_device_status()
+ if device_status.get(device) == 'waterfall':
+ app_module.release_sdr_device(device)
+ time.sleep(0.3)
+
+ # Claim device for listening audio
+ if listening_active_device is None or listening_active_device != device:
if listening_active_device is not None:
app_module.release_sdr_device(listening_active_device)
error = app_module.claim_sdr_device(device, 'listening')
@@ -1524,204 +1533,204 @@ waterfall_thread: Optional[threading.Thread] = None
waterfall_running = False
waterfall_lock = threading.Lock()
waterfall_queue: queue.Queue = queue.Queue(maxsize=200)
-waterfall_active_device: Optional[int] = None
-waterfall_config = {
- 'start_freq': 88.0,
- 'end_freq': 108.0,
- 'bin_size': 10000,
- 'gain': 40,
- 'device': 0,
- 'max_bins': 1024,
- 'interval': 0.4,
-}
+waterfall_active_device: Optional[int] = None
+waterfall_config = {
+ 'start_freq': 88.0,
+ 'end_freq': 108.0,
+ 'bin_size': 10000,
+ 'gain': 40,
+ 'device': 0,
+ 'max_bins': 1024,
+ 'interval': 0.4,
+}
-def _parse_rtl_power_line(line: str) -> tuple[str | None, float | None, float | None, list[float]]:
- """Parse a single rtl_power CSV line into bins."""
- if not line or line.startswith('#'):
- return None, None, None, []
-
- parts = [p.strip() for p in line.split(',')]
- if len(parts) < 6:
- return None, None, None, []
-
- # Timestamp in first two fields (YYYY-MM-DD, HH:MM:SS)
- timestamp = f"{parts[0]} {parts[1]}" if len(parts) >= 2 else parts[0]
-
- start_idx = None
- for i, tok in enumerate(parts):
- try:
- val = float(tok)
- except ValueError:
- continue
- if val > 1e5:
- start_idx = i
- break
- if start_idx is None or len(parts) < start_idx + 4:
- return timestamp, None, None, []
-
- try:
- seg_start = float(parts[start_idx])
- seg_end = float(parts[start_idx + 1])
- raw_values = []
- for v in parts[start_idx + 3:]:
- try:
- raw_values.append(float(v))
- except ValueError:
- continue
- if raw_values and raw_values[0] >= 0 and any(val < 0 for val in raw_values[1:]):
- raw_values = raw_values[1:]
- return timestamp, seg_start, seg_end, raw_values
- except ValueError:
- return timestamp, None, None, []
-
-
-def _waterfall_loop():
- """Continuous rtl_power sweep loop emitting waterfall data."""
- global waterfall_running, waterfall_process
-
- rtl_power_path = find_rtl_power()
- if not rtl_power_path:
- logger.error("rtl_power not found for waterfall")
- waterfall_running = False
- return
-
- start_hz = int(waterfall_config['start_freq'] * 1e6)
- end_hz = int(waterfall_config['end_freq'] * 1e6)
- bin_hz = int(waterfall_config['bin_size'])
- gain = waterfall_config['gain']
- device = waterfall_config['device']
- interval = float(waterfall_config.get('interval', 0.4))
-
- cmd = [
- rtl_power_path,
- '-f', f'{start_hz}:{end_hz}:{bin_hz}',
- '-i', str(interval),
- '-g', str(gain),
- '-d', str(device),
- ]
-
- try:
- waterfall_process = subprocess.Popen(
- cmd,
- stdout=subprocess.PIPE,
- stderr=subprocess.DEVNULL,
- bufsize=1,
- text=True,
- )
-
- current_ts = None
- all_bins: list[float] = []
- sweep_start_hz = start_hz
- sweep_end_hz = end_hz
-
- if not waterfall_process.stdout:
- return
-
- for line in waterfall_process.stdout:
- if not waterfall_running:
- break
-
- ts, seg_start, seg_end, bins = _parse_rtl_power_line(line)
- if ts is None or not bins:
- continue
-
- if current_ts is None:
- current_ts = ts
-
- if ts != current_ts and all_bins:
- max_bins = int(waterfall_config.get('max_bins') or 0)
- bins_to_send = all_bins
- if max_bins > 0 and len(bins_to_send) > max_bins:
- bins_to_send = _downsample_bins(bins_to_send, max_bins)
- msg = {
- 'type': 'waterfall_sweep',
- 'start_freq': sweep_start_hz / 1e6,
- 'end_freq': sweep_end_hz / 1e6,
- 'bins': bins_to_send,
- 'timestamp': datetime.now().isoformat(),
- }
- try:
- waterfall_queue.put_nowait(msg)
- except queue.Full:
- try:
- waterfall_queue.get_nowait()
- except queue.Empty:
- pass
- try:
- waterfall_queue.put_nowait(msg)
- except queue.Full:
- pass
-
- all_bins = []
- sweep_start_hz = start_hz
- sweep_end_hz = end_hz
- current_ts = ts
-
- all_bins.extend(bins)
- if seg_start is not None:
- sweep_start_hz = min(sweep_start_hz, seg_start)
- if seg_end is not None:
- sweep_end_hz = max(sweep_end_hz, seg_end)
-
- # Flush any remaining bins
- if all_bins and waterfall_running:
- max_bins = int(waterfall_config.get('max_bins') or 0)
- bins_to_send = all_bins
- if max_bins > 0 and len(bins_to_send) > max_bins:
- bins_to_send = _downsample_bins(bins_to_send, max_bins)
- msg = {
- 'type': 'waterfall_sweep',
- 'start_freq': sweep_start_hz / 1e6,
- 'end_freq': sweep_end_hz / 1e6,
- 'bins': bins_to_send,
- 'timestamp': datetime.now().isoformat(),
- }
- try:
- waterfall_queue.put_nowait(msg)
- except queue.Full:
- pass
-
- except Exception as e:
- logger.error(f"Waterfall loop error: {e}")
- finally:
- waterfall_running = False
- if waterfall_process and waterfall_process.poll() is None:
- try:
- waterfall_process.terminate()
- waterfall_process.wait(timeout=1)
- except Exception:
- try:
- waterfall_process.kill()
- except Exception:
- pass
- waterfall_process = None
- logger.info("Waterfall loop stopped")
-
-
-def _stop_waterfall_internal() -> None:
- """Stop the waterfall display and release resources."""
- global waterfall_running, waterfall_process, waterfall_active_device
-
- waterfall_running = False
- if waterfall_process and waterfall_process.poll() is None:
- try:
- waterfall_process.terminate()
- waterfall_process.wait(timeout=1)
- except Exception:
- try:
- waterfall_process.kill()
- except Exception:
- pass
- waterfall_process = None
-
- if waterfall_active_device is not None:
- app_module.release_sdr_device(waterfall_active_device)
- waterfall_active_device = None
+def _parse_rtl_power_line(line: str) -> tuple[str | None, float | None, float | None, list[float]]:
+ """Parse a single rtl_power CSV line into bins."""
+ if not line or line.startswith('#'):
+ return None, None, None, []
+
+ parts = [p.strip() for p in line.split(',')]
+ if len(parts) < 6:
+ return None, None, None, []
+
+ # Timestamp in first two fields (YYYY-MM-DD, HH:MM:SS)
+ timestamp = f"{parts[0]} {parts[1]}" if len(parts) >= 2 else parts[0]
+
+ start_idx = None
+ for i, tok in enumerate(parts):
+ try:
+ val = float(tok)
+ except ValueError:
+ continue
+ if val > 1e5:
+ start_idx = i
+ break
+ if start_idx is None or len(parts) < start_idx + 4:
+ return timestamp, None, None, []
+
+ try:
+ seg_start = float(parts[start_idx])
+ seg_end = float(parts[start_idx + 1])
+ raw_values = []
+ for v in parts[start_idx + 3:]:
+ try:
+ raw_values.append(float(v))
+ except ValueError:
+ continue
+ if raw_values and raw_values[0] >= 0 and any(val < 0 for val in raw_values[1:]):
+ raw_values = raw_values[1:]
+ return timestamp, seg_start, seg_end, raw_values
+ except ValueError:
+ return timestamp, None, None, []
+
+
+def _waterfall_loop():
+ """Continuous rtl_power sweep loop emitting waterfall data."""
+ global waterfall_running, waterfall_process
+
+ rtl_power_path = find_rtl_power()
+ if not rtl_power_path:
+ logger.error("rtl_power not found for waterfall")
+ waterfall_running = False
+ return
+
+ start_hz = int(waterfall_config['start_freq'] * 1e6)
+ end_hz = int(waterfall_config['end_freq'] * 1e6)
+ bin_hz = int(waterfall_config['bin_size'])
+ gain = waterfall_config['gain']
+ device = waterfall_config['device']
+ interval = float(waterfall_config.get('interval', 0.4))
+
+ cmd = [
+ rtl_power_path,
+ '-f', f'{start_hz}:{end_hz}:{bin_hz}',
+ '-i', str(interval),
+ '-g', str(gain),
+ '-d', str(device),
+ ]
+
+ try:
+ waterfall_process = subprocess.Popen(
+ cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.DEVNULL,
+ bufsize=1,
+ text=True,
+ )
+
+ current_ts = None
+ all_bins: list[float] = []
+ sweep_start_hz = start_hz
+ sweep_end_hz = end_hz
+
+ if not waterfall_process.stdout:
+ return
+
+ for line in waterfall_process.stdout:
+ if not waterfall_running:
+ break
+
+ ts, seg_start, seg_end, bins = _parse_rtl_power_line(line)
+ if ts is None or not bins:
+ continue
+
+ if current_ts is None:
+ current_ts = ts
+
+ if ts != current_ts and all_bins:
+ max_bins = int(waterfall_config.get('max_bins') or 0)
+ bins_to_send = all_bins
+ if max_bins > 0 and len(bins_to_send) > max_bins:
+ bins_to_send = _downsample_bins(bins_to_send, max_bins)
+ msg = {
+ 'type': 'waterfall_sweep',
+ 'start_freq': sweep_start_hz / 1e6,
+ 'end_freq': sweep_end_hz / 1e6,
+ 'bins': bins_to_send,
+ 'timestamp': datetime.now().isoformat(),
+ }
+ try:
+ waterfall_queue.put_nowait(msg)
+ except queue.Full:
+ try:
+ waterfall_queue.get_nowait()
+ except queue.Empty:
+ pass
+ try:
+ waterfall_queue.put_nowait(msg)
+ except queue.Full:
+ pass
+
+ all_bins = []
+ sweep_start_hz = start_hz
+ sweep_end_hz = end_hz
+ current_ts = ts
+
+ all_bins.extend(bins)
+ if seg_start is not None:
+ sweep_start_hz = min(sweep_start_hz, seg_start)
+ if seg_end is not None:
+ sweep_end_hz = max(sweep_end_hz, seg_end)
+
+ # Flush any remaining bins
+ if all_bins and waterfall_running:
+ max_bins = int(waterfall_config.get('max_bins') or 0)
+ bins_to_send = all_bins
+ if max_bins > 0 and len(bins_to_send) > max_bins:
+ bins_to_send = _downsample_bins(bins_to_send, max_bins)
+ msg = {
+ 'type': 'waterfall_sweep',
+ 'start_freq': sweep_start_hz / 1e6,
+ 'end_freq': sweep_end_hz / 1e6,
+ 'bins': bins_to_send,
+ 'timestamp': datetime.now().isoformat(),
+ }
+ try:
+ waterfall_queue.put_nowait(msg)
+ except queue.Full:
+ pass
+
+ except Exception as e:
+ logger.error(f"Waterfall loop error: {e}")
+ finally:
+ waterfall_running = False
+ if waterfall_process and waterfall_process.poll() is None:
+ try:
+ waterfall_process.terminate()
+ waterfall_process.wait(timeout=1)
+ except Exception:
+ try:
+ waterfall_process.kill()
+ except Exception:
+ pass
+ waterfall_process = None
+ logger.info("Waterfall loop stopped")
+
+
+def _stop_waterfall_internal() -> None:
+ """Stop the waterfall display and release resources."""
+ global waterfall_running, waterfall_process, waterfall_active_device
+
+ waterfall_running = False
+ if waterfall_process and waterfall_process.poll() is None:
+ try:
+ waterfall_process.terminate()
+ waterfall_process.wait(timeout=1)
+ except Exception:
+ try:
+ waterfall_process.kill()
+ except Exception:
+ pass
+ waterfall_process = None
+
+ if waterfall_active_device is not None:
+ app_module.release_sdr_device(waterfall_active_device)
+ waterfall_active_device = None
@listening_post_bp.route('/waterfall/start', methods=['POST'])
-def start_waterfall() -> Response:
+def start_waterfall() -> Response:
"""Start the waterfall/spectrogram display."""
global waterfall_thread, waterfall_running, waterfall_config, waterfall_active_device
@@ -1734,24 +1743,24 @@ def start_waterfall() -> Response:
data = request.json or {}
- try:
- waterfall_config['start_freq'] = float(data.get('start_freq', 88.0))
- waterfall_config['end_freq'] = float(data.get('end_freq', 108.0))
- waterfall_config['bin_size'] = int(data.get('bin_size', 10000))
- waterfall_config['gain'] = int(data.get('gain', 40))
- waterfall_config['device'] = int(data.get('device', 0))
- if data.get('interval') is not None:
- interval = float(data.get('interval', waterfall_config['interval']))
- if interval < 0.1 or interval > 5:
- return jsonify({'status': 'error', 'message': 'interval must be between 0.1 and 5 seconds'}), 400
- waterfall_config['interval'] = interval
- if data.get('max_bins') is not None:
- max_bins = int(data.get('max_bins', waterfall_config['max_bins']))
- if max_bins < 64 or max_bins > 4096:
- return jsonify({'status': 'error', 'message': 'max_bins must be between 64 and 4096'}), 400
- waterfall_config['max_bins'] = max_bins
- except (ValueError, TypeError) as e:
- return jsonify({'status': 'error', 'message': f'Invalid parameter: {e}'}), 400
+ try:
+ waterfall_config['start_freq'] = float(data.get('start_freq', 88.0))
+ waterfall_config['end_freq'] = float(data.get('end_freq', 108.0))
+ waterfall_config['bin_size'] = int(data.get('bin_size', 10000))
+ waterfall_config['gain'] = int(data.get('gain', 40))
+ waterfall_config['device'] = int(data.get('device', 0))
+ if data.get('interval') is not None:
+ interval = float(data.get('interval', waterfall_config['interval']))
+ if interval < 0.1 or interval > 5:
+ return jsonify({'status': 'error', 'message': 'interval must be between 0.1 and 5 seconds'}), 400
+ waterfall_config['interval'] = interval
+ if data.get('max_bins') is not None:
+ max_bins = int(data.get('max_bins', waterfall_config['max_bins']))
+ if max_bins < 64 or max_bins > 4096:
+ return jsonify({'status': 'error', 'message': 'max_bins must be between 64 and 4096'}), 400
+ waterfall_config['max_bins'] = max_bins
+ except (ValueError, TypeError) as e:
+ return jsonify({'status': 'error', 'message': f'Invalid parameter: {e}'}), 400
if waterfall_config['start_freq'] >= waterfall_config['end_freq']:
return jsonify({'status': 'error', 'message': 'start_freq must be less than end_freq'}), 400
@@ -1777,11 +1786,11 @@ def start_waterfall() -> Response:
@listening_post_bp.route('/waterfall/stop', methods=['POST'])
-def stop_waterfall() -> Response:
- """Stop the waterfall display."""
- _stop_waterfall_internal()
-
- return jsonify({'status': 'stopped'})
+def stop_waterfall() -> Response:
+ """Stop the waterfall display."""
+ _stop_waterfall_internal()
+
+ return jsonify({'status': 'stopped'})
@listening_post_bp.route('/waterfall/stream')
@@ -1790,14 +1799,14 @@ def stream_waterfall() -> Response:
def generate() -> Generator[str, None, None]:
last_keepalive = time.time()
while True:
- try:
- msg = waterfall_queue.get(timeout=SSE_QUEUE_TIMEOUT)
- last_keepalive = time.time()
- try:
- process_event('waterfall', msg, msg.get('type'))
- except Exception:
- pass
- yield format_sse(msg)
+ try:
+ msg = waterfall_queue.get(timeout=SSE_QUEUE_TIMEOUT)
+ last_keepalive = time.time()
+ try:
+ process_event('waterfall', msg, msg.get('type'))
+ except Exception:
+ pass
+ yield format_sse(msg)
except queue.Empty:
now = time.time()
if now - last_keepalive >= SSE_KEEPALIVE_INTERVAL:
@@ -1808,20 +1817,20 @@ def stream_waterfall() -> Response:
response.headers['Cache-Control'] = 'no-cache'
response.headers['X-Accel-Buffering'] = 'no'
return response
-def _downsample_bins(values: list[float], target: int) -> list[float]:
- """Downsample bins to a target length using simple averaging."""
- if target <= 0 or len(values) <= target:
- return values
-
- out: list[float] = []
- step = len(values) / target
- for i in range(target):
- start = int(i * step)
- end = int((i + 1) * step)
- if end <= start:
- end = min(start + 1, len(values))
- chunk = values[start:end]
- if not chunk:
- continue
- out.append(sum(chunk) / len(chunk))
- return out
+def _downsample_bins(values: list[float], target: int) -> list[float]:
+ """Downsample bins to a target length using simple averaging."""
+ if target <= 0 or len(values) <= target:
+ return values
+
+ out: list[float] = []
+ step = len(values) / target
+ for i in range(target):
+ start = int(i * step)
+ end = int((i + 1) * step)
+ if end <= start:
+ end = min(start + 1, len(values))
+ chunk = values[start:end]
+ if not chunk:
+ continue
+ out.append(sum(chunk) / len(chunk))
+ return out
diff --git a/routes/pager.py b/routes/pager.py
index 4ee5425..3253a6c 100644
--- a/routes/pager.py
+++ b/routes/pager.py
@@ -2,12 +2,14 @@
from __future__ import annotations
+import math
import os
import pathlib
import re
import pty
import queue
import select
+import struct
import subprocess
import threading
import time
@@ -22,8 +24,8 @@ from utils.validation import (
validate_frequency, validate_device_index, validate_gain, validate_ppm,
validate_rtl_tcp_host, validate_rtl_tcp_port
)
-from utils.sse import format_sse
-from utils.event_pipeline import process_event
+from utils.sse import format_sse
+from utils.event_pipeline import process_event
from utils.process import safe_terminate, register_process, unregister_process
from utils.sdr import SDRFactory, SDRType, SDRValidationError
from utils.dependencies import get_tool_path
@@ -106,6 +108,62 @@ def log_message(msg: dict[str, Any]) -> None:
logger.error(f"Failed to log message: {e}")
+def audio_relay_thread(
+ rtl_stdout,
+ multimon_stdin,
+ output_queue: queue.Queue,
+ stop_event: threading.Event,
+) -> None:
+ """Relay audio from rtl_fm to multimon-ng while computing signal levels.
+
+ Reads raw 16-bit LE PCM from *rtl_stdout*, writes every chunk straight
+ through to *multimon_stdin*, and every ~100 ms pushes an RMS / peak scope
+ event onto *output_queue*.
+ """
+ CHUNK = 4096 # bytes – 2048 samples at 16-bit mono
+ INTERVAL = 0.1 # seconds between scope updates
+ last_scope = time.monotonic()
+
+ try:
+ while not stop_event.is_set():
+ data = rtl_stdout.read(CHUNK)
+ if not data:
+ break
+
+ # Forward audio untouched
+ try:
+ multimon_stdin.write(data)
+ multimon_stdin.flush()
+ except (BrokenPipeError, OSError):
+ break
+
+ # Compute scope levels every ~100 ms
+ now = time.monotonic()
+ if now - last_scope >= INTERVAL:
+ last_scope = now
+ try:
+ n_samples = len(data) // 2
+ if n_samples == 0:
+ continue
+ samples = struct.unpack(f'<{n_samples}h', data[:n_samples * 2])
+ peak = max(abs(s) for s in samples)
+ rms = int(math.sqrt(sum(s * s for s in samples) / n_samples))
+ output_queue.put_nowait({
+ 'type': 'scope',
+ 'rms': rms,
+ 'peak': peak,
+ })
+ except (struct.error, ValueError, queue.Full):
+ pass
+ except Exception as e:
+ logger.debug(f"Audio relay error: {e}")
+ finally:
+ try:
+ multimon_stdin.close()
+ except OSError:
+ pass
+
+
def stream_decoder(master_fd: int, process: subprocess.Popen[bytes]) -> None:
"""Stream decoder output to queue using PTY for unbuffered output."""
try:
@@ -152,6 +210,11 @@ def stream_decoder(master_fd: int, process: subprocess.Popen[bytes]) -> None:
os.close(master_fd)
except OSError:
pass
+ # Signal relay thread to stop
+ with app_module.process_lock:
+ stop_relay = getattr(app_module.current_process, '_stop_relay', None)
+ if stop_relay:
+ stop_relay.set()
# Cleanup companion rtl_fm process and decoder
with app_module.process_lock:
rtl_proc = getattr(app_module.current_process, '_rtl_process', None)
@@ -319,7 +382,7 @@ def start_decoding() -> Response:
multimon_process = subprocess.Popen(
multimon_cmd,
- stdin=rtl_process.stdout,
+ stdin=subprocess.PIPE,
stdout=slave_fd,
stderr=slave_fd,
close_fds=True
@@ -327,11 +390,22 @@ def start_decoding() -> Response:
register_process(multimon_process)
os.close(slave_fd)
- rtl_process.stdout.close()
+
+ # Spawn audio relay thread between rtl_fm and multimon-ng
+ stop_relay = threading.Event()
+ relay = threading.Thread(
+ target=audio_relay_thread,
+ args=(rtl_process.stdout, multimon_process.stdin,
+ app_module.output_queue, stop_relay),
+ )
+ relay.daemon = True
+ relay.start()
app_module.current_process = multimon_process
app_module.current_process._rtl_process = rtl_process
app_module.current_process._master_fd = master_fd
+ app_module.current_process._stop_relay = stop_relay
+ app_module.current_process._relay_thread = relay
# Start output thread with PTY master fd
thread = threading.Thread(target=stream_decoder, args=(master_fd, multimon_process))
@@ -380,6 +454,10 @@ def stop_decoding() -> Response:
with app_module.process_lock:
if app_module.current_process:
+ # Signal audio relay thread to stop
+ if hasattr(app_module.current_process, '_stop_relay'):
+ app_module.current_process._stop_relay.set()
+
# Kill rtl_fm process first
if hasattr(app_module.current_process, '_rtl_process'):
try:
@@ -469,14 +547,14 @@ def stream() -> Response:
keepalive_interval = 30.0 # Send keepalive every 30 seconds instead of 1 second
while True:
- try:
- msg = app_module.output_queue.get(timeout=1)
- last_keepalive = time.time()
- try:
- process_event('pager', msg, msg.get('type'))
- except Exception:
- pass
- yield format_sse(msg)
+ try:
+ msg = app_module.output_queue.get(timeout=1)
+ last_keepalive = time.time()
+ try:
+ process_event('pager', msg, msg.get('type'))
+ except Exception:
+ pass
+ yield format_sse(msg)
except queue.Empty:
now = time.time()
if now - last_keepalive >= keepalive_interval:
diff --git a/routes/sensor.py b/routes/sensor.py
index e5a719e..e2110fb 100644
--- a/routes/sensor.py
+++ b/routes/sensor.py
@@ -18,8 +18,8 @@ from utils.validation import (
validate_frequency, validate_device_index, validate_gain, validate_ppm,
validate_rtl_tcp_host, validate_rtl_tcp_port
)
-from utils.sse import format_sse
-from utils.event_pipeline import process_event
+from utils.sse import format_sse
+from utils.event_pipeline import process_event
from utils.process import safe_terminate, register_process, unregister_process
from utils.sdr import SDRFactory, SDRType
@@ -45,6 +45,21 @@ def stream_sensor_output(process: subprocess.Popen[bytes]) -> None:
data['type'] = 'sensor'
app_module.sensor_queue.put(data)
+ # Push scope event when signal level data is present
+ rssi = data.get('rssi')
+ snr = data.get('snr')
+ noise = data.get('noise')
+ if rssi is not None or snr is not None:
+ try:
+ app_module.sensor_queue.put_nowait({
+ 'type': 'scope',
+ 'rssi': rssi if rssi is not None else 0,
+ 'snr': snr if snr is not None else 0,
+ 'noise': noise if noise is not None else 0,
+ })
+ except queue.Full:
+ pass
+
# Log if enabled
if app_module.logging_enabled:
try:
@@ -80,6 +95,14 @@ def stream_sensor_output(process: subprocess.Popen[bytes]) -> None:
sensor_active_device = None
+@sensor_bp.route('/sensor/status')
+def sensor_status() -> Response:
+ """Check if sensor decoder is currently running."""
+ with app_module.sensor_lock:
+ running = app_module.sensor_process is not None and app_module.sensor_process.poll() is None
+ return jsonify({'running': running})
+
+
@sensor_bp.route('/start_sensor', methods=['POST'])
def start_sensor() -> Response:
global sensor_active_device
@@ -158,6 +181,10 @@ def start_sensor() -> Response:
full_cmd = ' '.join(cmd)
logger.info(f"Running: {full_cmd}")
+ # Add signal level metadata so the frontend scope can display RSSI/SNR
+ # Disable stats reporting to suppress "row count limit 50 reached" warnings
+ cmd.extend(['-M', 'level', '-M', 'stats:0'])
+
try:
app_module.sensor_process = subprocess.Popen(
cmd,
@@ -232,13 +259,13 @@ def stream_sensor() -> Response:
while True:
try:
- msg = app_module.sensor_queue.get(timeout=1)
- last_keepalive = time.time()
- try:
- process_event('sensor', msg, msg.get('type'))
- except Exception:
- pass
- yield format_sse(msg)
+ msg = app_module.sensor_queue.get(timeout=1)
+ last_keepalive = time.time()
+ try:
+ process_event('sensor', msg, msg.get('type'))
+ except Exception:
+ pass
+ yield format_sse(msg)
except queue.Empty:
now = time.time()
if now - last_keepalive >= keepalive_interval:
diff --git a/routes/sstv.py b/routes/sstv.py
index ed3676a..1029dec 100644
--- a/routes/sstv.py
+++ b/routes/sstv.py
@@ -15,14 +15,12 @@ from flask import Blueprint, jsonify, request, Response, send_file
import app as app_module
from utils.logging import get_logger
-from utils.sse import format_sse
-from utils.event_pipeline import process_event
+from utils.sse import format_sse
+from utils.event_pipeline import process_event
from utils.sstv import (
get_sstv_decoder,
is_sstv_available,
ISS_SSTV_FREQ,
- DecodeProgress,
- DopplerInfo,
)
logger = get_logger('intercept.sstv')
@@ -36,14 +34,14 @@ _sstv_queue: queue.Queue = queue.Queue(maxsize=100)
sstv_active_device: int | None = None
-def _progress_callback(progress: DecodeProgress) -> None:
- """Callback to queue progress updates for SSE stream."""
+def _progress_callback(data: dict) -> None:
+ """Callback to queue progress/scope updates for SSE stream."""
try:
- _sstv_queue.put_nowait(progress.to_dict())
+ _sstv_queue.put_nowait(data)
except queue.Full:
try:
_sstv_queue.get_nowait()
- _sstv_queue.put_nowait(progress.to_dict())
+ _sstv_queue.put_nowait(data)
except queue.Empty:
pass
@@ -399,14 +397,14 @@ def stream_progress():
keepalive_interval = 30.0
while True:
- try:
- progress = _sstv_queue.get(timeout=1)
- last_keepalive = time.time()
- try:
- process_event('sstv', progress, progress.get('type'))
- except Exception:
- pass
- yield format_sse(progress)
+ try:
+ progress = _sstv_queue.get(timeout=1)
+ last_keepalive = time.time()
+ try:
+ process_event('sstv', progress, progress.get('type'))
+ except Exception:
+ pass
+ yield format_sse(progress)
except queue.Empty:
now = time.time()
if now - last_keepalive >= keepalive_interval:
diff --git a/routes/sstv_general.py b/routes/sstv_general.py
index 5ebcbb2..0ddcbfb 100644
--- a/routes/sstv_general.py
+++ b/routes/sstv_general.py
@@ -17,7 +17,6 @@ from utils.logging import get_logger
from utils.sse import format_sse
from utils.event_pipeline import process_event
from utils.sstv import (
- DecodeProgress,
get_general_sstv_decoder,
)
@@ -49,14 +48,14 @@ SSTV_FREQUENCIES = [
_FREQ_MODULATION_MAP = {entry['frequency']: entry['modulation'] for entry in SSTV_FREQUENCIES}
-def _progress_callback(progress: DecodeProgress) -> None:
- """Callback to queue progress updates for SSE stream."""
+def _progress_callback(data: dict) -> None:
+ """Callback to queue progress/scope updates for SSE stream."""
try:
- _sstv_general_queue.put_nowait(progress.to_dict())
+ _sstv_general_queue.put_nowait(data)
except queue.Full:
try:
_sstv_general_queue.get_nowait()
- _sstv_general_queue.put_nowait(progress.to_dict())
+ _sstv_general_queue.put_nowait(data)
except queue.Empty:
pass
diff --git a/routes/tscm.py b/routes/tscm.py
index 5a3d31d..e110495 100644
--- a/routes/tscm.py
+++ b/routes/tscm.py
@@ -551,6 +551,12 @@ def _start_sweep_internal(
}
+@tscm_bp.route('/status')
+def tscm_status():
+ """Check if any TSCM operation is currently running."""
+ return jsonify({'running': _sweep_running})
+
+
@tscm_bp.route('/sweep/start', methods=['POST'])
def start_sweep():
"""Start a TSCM sweep."""
diff --git a/routes/waterfall_websocket.py b/routes/waterfall_websocket.py
new file mode 100644
index 0000000..5512d6f
--- /dev/null
+++ b/routes/waterfall_websocket.py
@@ -0,0 +1,386 @@
+"""WebSocket-based waterfall streaming with I/Q capture and server-side FFT."""
+
+import json
+import queue
+import socket
+import subprocess
+import threading
+import time
+
+from flask import Flask
+
+try:
+ from flask_sock import Sock
+ WEBSOCKET_AVAILABLE = True
+except ImportError:
+ WEBSOCKET_AVAILABLE = False
+ Sock = None
+
+from utils.logging import get_logger
+from utils.process import safe_terminate, register_process, unregister_process
+from utils.waterfall_fft import (
+ build_binary_frame,
+ compute_power_spectrum,
+ cu8_to_complex,
+ quantize_to_uint8,
+)
+from utils.sdr import SDRFactory, SDRType
+from utils.sdr.base import SDRCapabilities, SDRDevice
+
+logger = get_logger('intercept.waterfall_ws')
+
+# Maximum bandwidth per SDR type (Hz)
+MAX_BANDWIDTH = {
+ SDRType.RTL_SDR: 2400000,
+ SDRType.HACKRF: 20000000,
+ SDRType.LIME_SDR: 20000000,
+ SDRType.AIRSPY: 10000000,
+ SDRType.SDRPLAY: 2000000,
+}
+
+
+def _resolve_sdr_type(sdr_type_str: str) -> SDRType:
+ """Convert client sdr_type string to SDRType enum."""
+ mapping = {
+ 'rtlsdr': SDRType.RTL_SDR,
+ 'rtl_sdr': SDRType.RTL_SDR,
+ 'hackrf': SDRType.HACKRF,
+ 'limesdr': SDRType.LIME_SDR,
+ 'lime_sdr': SDRType.LIME_SDR,
+ 'airspy': SDRType.AIRSPY,
+ 'sdrplay': SDRType.SDRPLAY,
+ }
+ return mapping.get(sdr_type_str.lower(), SDRType.RTL_SDR)
+
+
+def _build_dummy_device(device_index: int, sdr_type: SDRType) -> SDRDevice:
+ """Build a minimal SDRDevice for command building."""
+ builder = SDRFactory.get_builder(sdr_type)
+ caps = builder.get_capabilities()
+ return SDRDevice(
+ sdr_type=sdr_type,
+ index=device_index,
+ name=f'{sdr_type.value}-{device_index}',
+ serial='N/A',
+ driver=sdr_type.value,
+ capabilities=caps,
+ )
+
+
+def init_waterfall_websocket(app: Flask):
+ """Initialize WebSocket waterfall streaming."""
+ if not WEBSOCKET_AVAILABLE:
+ logger.warning("flask-sock not installed, WebSocket waterfall disabled")
+ return
+
+ sock = Sock(app)
+
+ @sock.route('/ws/waterfall')
+ def waterfall_stream(ws):
+ """WebSocket endpoint for real-time waterfall streaming."""
+ logger.info("WebSocket waterfall client connected")
+
+ # Import app module for device claiming
+ import app as app_module
+
+ iq_process = None
+ reader_thread = None
+ stop_event = threading.Event()
+ claimed_device = None
+ # Queue for outgoing messages — only the main loop touches ws.send()
+ send_queue = queue.Queue(maxsize=120)
+
+ try:
+ while True:
+ # Drain send queue first (non-blocking)
+ while True:
+ try:
+ outgoing = send_queue.get_nowait()
+ except queue.Empty:
+ break
+ try:
+ ws.send(outgoing)
+ except Exception:
+ stop_event.set()
+ break
+
+ try:
+ msg = ws.receive(timeout=0.1)
+ except Exception as e:
+ err = str(e).lower()
+ if "closed" in err:
+ break
+ if "timed out" not in err:
+ logger.error(f"WebSocket receive error: {e}")
+ continue
+
+ if msg is None:
+ # simple-websocket returns None on timeout AND on
+ # close; check ws.connected to tell them apart.
+ if not ws.connected:
+ break
+ if stop_event.is_set():
+ break
+ continue
+
+ try:
+ data = json.loads(msg)
+ except (json.JSONDecodeError, TypeError):
+ continue
+
+ cmd = data.get('cmd')
+
+ if cmd == 'start':
+ # Stop any existing capture
+ was_restarting = iq_process is not None
+ stop_event.set()
+ if reader_thread and reader_thread.is_alive():
+ reader_thread.join(timeout=2)
+ if iq_process:
+ safe_terminate(iq_process)
+ unregister_process(iq_process)
+ iq_process = None
+ if claimed_device is not None:
+ app_module.release_sdr_device(claimed_device)
+ claimed_device = None
+ stop_event.clear()
+ # Flush stale frames from previous capture
+ while not send_queue.empty():
+ try:
+ send_queue.get_nowait()
+ except queue.Empty:
+ break
+ # Allow USB device to be released by the kernel
+ if was_restarting:
+ time.sleep(0.5)
+
+ # Parse config
+ center_freq = float(data.get('center_freq', 100.0))
+ span_mhz = float(data.get('span_mhz', 2.0))
+ gain = data.get('gain')
+ if gain is not None:
+ gain = float(gain)
+ device_index = int(data.get('device', 0))
+ sdr_type_str = data.get('sdr_type', 'rtlsdr')
+ fft_size = int(data.get('fft_size', 1024))
+ fps = int(data.get('fps', 25))
+ avg_count = int(data.get('avg_count', 4))
+ ppm = data.get('ppm')
+ if ppm is not None:
+ ppm = int(ppm)
+ bias_t = bool(data.get('bias_t', False))
+
+ # Clamp FFT size to valid powers of 2
+ fft_size = max(256, min(8192, fft_size))
+
+ # Resolve SDR type and bandwidth
+ sdr_type = _resolve_sdr_type(sdr_type_str)
+ max_bw = MAX_BANDWIDTH.get(sdr_type, 2400000)
+ span_hz = int(span_mhz * 1e6)
+ sample_rate = min(span_hz, max_bw)
+
+ # Compute effective frequency range
+ effective_span_mhz = sample_rate / 1e6
+ start_freq = center_freq - effective_span_mhz / 2
+ end_freq = center_freq + effective_span_mhz / 2
+
+ # Claim the device
+ claim_err = app_module.claim_sdr_device(device_index, 'waterfall')
+ if claim_err:
+ ws.send(json.dumps({
+ 'status': 'error',
+ 'message': claim_err,
+ 'error_type': 'DEVICE_BUSY',
+ }))
+ continue
+ claimed_device = device_index
+
+ # Build I/Q capture command
+ try:
+ builder = SDRFactory.get_builder(sdr_type)
+ device = _build_dummy_device(device_index, sdr_type)
+ iq_cmd = builder.build_iq_capture_command(
+ device=device,
+ frequency_mhz=center_freq,
+ sample_rate=sample_rate,
+ gain=gain,
+ ppm=ppm,
+ bias_t=bias_t,
+ )
+ except NotImplementedError as e:
+ app_module.release_sdr_device(device_index)
+ claimed_device = None
+ ws.send(json.dumps({
+ 'status': 'error',
+ 'message': str(e),
+ }))
+ continue
+
+ # Spawn I/Q capture process (retry to handle USB release lag)
+ max_attempts = 3 if was_restarting else 1
+ try:
+ for attempt in range(max_attempts):
+ logger.info(
+ f"Starting I/Q capture: {center_freq} MHz, "
+ f"span={effective_span_mhz:.1f} MHz, "
+ f"sr={sample_rate}, fft={fft_size}"
+ )
+ iq_process = subprocess.Popen(
+ iq_cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.DEVNULL,
+ bufsize=0,
+ )
+ register_process(iq_process)
+
+ # Brief check that process started
+ time.sleep(0.3)
+ if iq_process.poll() is not None:
+ unregister_process(iq_process)
+ iq_process = None
+ if attempt < max_attempts - 1:
+ logger.info(
+ f"I/Q process exited immediately, "
+ f"retrying ({attempt + 1}/{max_attempts})..."
+ )
+ time.sleep(0.5)
+ continue
+ raise RuntimeError(
+ "I/Q capture process exited immediately"
+ )
+ break # Process started successfully
+ except Exception as e:
+ logger.error(f"Failed to start I/Q capture: {e}")
+ if iq_process:
+ safe_terminate(iq_process)
+ unregister_process(iq_process)
+ iq_process = None
+ app_module.release_sdr_device(device_index)
+ claimed_device = None
+ ws.send(json.dumps({
+ 'status': 'error',
+ 'message': f'Failed to start I/Q capture: {e}',
+ }))
+ continue
+
+ # Send started confirmation
+ ws.send(json.dumps({
+ 'status': 'started',
+ 'start_freq': start_freq,
+ 'end_freq': end_freq,
+ 'fft_size': fft_size,
+ 'sample_rate': sample_rate,
+ }))
+
+ # Start reader thread — puts frames on queue, never calls ws.send()
+ def fft_reader(
+ proc, _send_q, stop_evt,
+ _fft_size, _avg_count, _fps,
+ _start_freq, _end_freq,
+ ):
+ """Read I/Q from subprocess, compute FFT, enqueue binary frames."""
+ bytes_per_frame = _fft_size * _avg_count * 2
+ frame_interval = 1.0 / _fps
+
+ try:
+ while not stop_evt.is_set():
+ if proc.poll() is not None:
+ break
+
+ frame_start = time.monotonic()
+
+ # Read raw I/Q bytes
+ raw = b''
+ remaining = bytes_per_frame
+ while remaining > 0 and not stop_evt.is_set():
+ chunk = proc.stdout.read(min(remaining, 65536))
+ if not chunk:
+ break
+ raw += chunk
+ remaining -= len(chunk)
+
+ if len(raw) < _fft_size * 2:
+ break
+
+ # Process FFT pipeline
+ samples = cu8_to_complex(raw)
+ power_db = compute_power_spectrum(
+ samples,
+ fft_size=_fft_size,
+ avg_count=_avg_count,
+ )
+ quantized = quantize_to_uint8(power_db)
+ frame = build_binary_frame(
+ _start_freq, _end_freq, quantized,
+ )
+
+ try:
+ _send_q.put_nowait(frame)
+ except queue.Full:
+ # Drop frame if main loop can't keep up
+ pass
+
+ # Pace to target FPS
+ elapsed = time.monotonic() - frame_start
+ sleep_time = frame_interval - elapsed
+ if sleep_time > 0:
+ stop_evt.wait(sleep_time)
+
+ except Exception as e:
+ logger.debug(f"FFT reader stopped: {e}")
+
+ reader_thread = threading.Thread(
+ target=fft_reader,
+ args=(
+ iq_process, send_queue, stop_event,
+ fft_size, avg_count, fps,
+ start_freq, end_freq,
+ ),
+ daemon=True,
+ )
+ reader_thread.start()
+
+ elif cmd == 'stop':
+ stop_event.set()
+ if reader_thread and reader_thread.is_alive():
+ reader_thread.join(timeout=2)
+ reader_thread = None
+ if iq_process:
+ safe_terminate(iq_process)
+ unregister_process(iq_process)
+ iq_process = None
+ if claimed_device is not None:
+ app_module.release_sdr_device(claimed_device)
+ claimed_device = None
+ stop_event.clear()
+ ws.send(json.dumps({'status': 'stopped'}))
+
+ except Exception as e:
+ logger.info(f"WebSocket waterfall closed: {e}")
+ finally:
+ # Cleanup
+ stop_event.set()
+ if reader_thread and reader_thread.is_alive():
+ reader_thread.join(timeout=2)
+ if iq_process:
+ safe_terminate(iq_process)
+ unregister_process(iq_process)
+ if claimed_device is not None:
+ app_module.release_sdr_device(claimed_device)
+ # Complete WebSocket close handshake, then shut down the
+ # raw socket so Werkzeug cannot write its HTTP 200 response
+ # on top of the WebSocket stream (which browsers see as
+ # "Invalid frame header").
+ try:
+ ws.close()
+ except Exception:
+ pass
+ try:
+ ws.sock.shutdown(socket.SHUT_RDWR)
+ except Exception:
+ pass
+ try:
+ ws.sock.close()
+ except Exception:
+ pass
+ logger.info("WebSocket waterfall client disconnected")
diff --git a/setup.sh b/setup.sh
index 7cdf6f1..f41218a 100755
--- a/setup.sh
+++ b/setup.sh
@@ -165,6 +165,7 @@ detect_dragonos() {
# Required tool checks (with alternates)
# ----------------------------
missing_required=()
+missing_recommended=()
check_required() {
local label="$1"; shift
@@ -178,6 +179,18 @@ check_required() {
fi
}
+check_recommended() {
+ local label="$1"; shift
+ local desc="$1"; shift
+
+ if have_any "$@"; then
+ ok "${label} - ${desc}"
+ else
+ warn "${label} - ${desc} (missing, recommended)"
+ missing_recommended+=("$label")
+ fi
+}
+
check_optional() {
local label="$1"; shift
local desc="$1"; shift
@@ -230,6 +243,12 @@ check_tools() {
check_required "hcitool" "Bluetooth scan utility" hcitool
check_required "hciconfig" "Bluetooth adapter config" hciconfig
+ echo
+ info "GSM Intelligence:"
+ check_recommended "grgsm_scanner" "GSM tower scanner (gr-gsm)" grgsm_scanner
+ check_recommended "grgsm_livemon" "GSM live monitor (gr-gsm)" grgsm_livemon
+ check_recommended "tshark" "Packet analysis (Wireshark)" tshark
+
echo
info "SoapySDR:"
check_required "SoapySDRUtil" "SoapySDR CLI utility" SoapySDRUtil
@@ -605,7 +624,7 @@ install_aiscatcher_from_source_macos() {
}
install_macos_packages() {
- TOTAL_STEPS=17
+ TOTAL_STEPS=18
CURRENT_STEP=0
progress "Checking Homebrew"
@@ -694,6 +713,47 @@ install_macos_packages() {
progress "Installing gpsd"
brew_install gpsd
+ # gr-gsm for GSM Intelligence
+ progress "Installing gr-gsm"
+ if ! cmd_exists grgsm_scanner; then
+ brew_install gnuradio
+ (brew_install gr-gsm) || {
+ warn "gr-gsm not available in Homebrew, building from source..."
+ (
+ tmp_dir="$(mktemp -d)"
+ trap 'rm -rf "$tmp_dir"' EXIT
+
+ info "Cloning gr-gsm repository..."
+ git clone --depth 1 https://github.com/bkerler/gr-gsm.git "$tmp_dir/gr-gsm" >/dev/null 2>&1 \
+ || { warn "Failed to clone gr-gsm. GSM Spy feature will not work."; exit 1; }
+
+ cd "$tmp_dir/gr-gsm"
+ mkdir -p build && cd build
+ info "Compiling gr-gsm (this may take several minutes)..."
+ if cmake .. >/dev/null 2>&1 && make -j$(sysctl -n hw.ncpu) >/dev/null 2>&1; then
+ if [[ -w /usr/local/lib ]]; then
+ make install >/dev/null 2>&1
+ else
+ sudo make install >/dev/null 2>&1
+ fi
+ ok "gr-gsm installed successfully from source"
+ else
+ warn "Failed to build gr-gsm. GSM Spy feature will not work."
+ fi
+ )
+ }
+ else
+ ok "gr-gsm already installed"
+ fi
+
+ # Wireshark (tshark) for GSM packet analysis
+ progress "Installing tshark"
+ if ! cmd_exists tshark; then
+ brew_install wireshark
+ else
+ ok "tshark already installed"
+ fi
+
progress "Installing Ubertooth tools (optional)"
if ! cmd_exists ubertooth-btle; then
echo
@@ -979,7 +1039,7 @@ install_debian_packages() {
export NEEDRESTART_MODE=a
fi
- TOTAL_STEPS=22
+ TOTAL_STEPS=25
CURRENT_STEP=0
progress "Updating APT package lists"
@@ -1104,6 +1164,82 @@ install_debian_packages() {
progress "Installing gpsd"
apt_install gpsd gpsd-clients || true
+ # gr-gsm for GSM Intelligence
+ progress "Installing GNU Radio and gr-gsm"
+ if ! cmd_exists grgsm_scanner; then
+ # Try to install gr-gsm directly from package repositories
+ apt_install gnuradio gnuradio-dev gr-osmosdr gr-gsm || {
+ warn "gr-gsm package not available in repositories. Attempting source build..."
+
+ # Fallback: Build from source
+ progress "Building gr-gsm from source"
+ apt_install git cmake libboost-all-dev libcppunit-dev swig \
+ doxygen liblog4cpp5-dev python3-scipy python3-numpy \
+ libvolk-dev libuhd-dev libfftw3-dev || true
+
+ info "Cloning gr-gsm repository..."
+ if [ -d /tmp/gr-gsm ]; then
+ rm -rf /tmp/gr-gsm
+ fi
+
+ git clone https://github.com/bkerler/gr-gsm.git /tmp/gr-gsm || {
+ warn "Failed to clone gr-gsm repository. GSM Spy will not be available."
+ return 0
+ }
+
+ cd /tmp/gr-gsm
+ mkdir -p build && cd build
+
+ # Try to find GNU Radio cmake files
+ if [ -d /usr/lib/x86_64-linux-gnu/cmake/gnuradio ]; then
+ export CMAKE_PREFIX_PATH="/usr/lib/x86_64-linux-gnu/cmake/gnuradio:$CMAKE_PREFIX_PATH"
+ fi
+
+ info "Running CMake configuration..."
+ if cmake .. 2>/dev/null; then
+ info "Compiling gr-gsm (this may take several minutes)..."
+ if make -j$(nproc) 2>/dev/null; then
+ $SUDO make install
+ $SUDO ldconfig
+ cd ~
+ rm -rf /tmp/gr-gsm
+ ok "gr-gsm built and installed successfully"
+ else
+ warn "gr-gsm compilation failed. GSM Spy feature will not work."
+ cd ~
+ rm -rf /tmp/gr-gsm
+ fi
+ else
+ warn "gr-gsm CMake configuration failed. GNU Radio 3.8+ may not be available."
+ cd ~
+ rm -rf /tmp/gr-gsm
+ fi
+ }
+
+ # Verify installation
+ if cmd_exists grgsm_scanner; then
+ ok "gr-gsm installed successfully"
+ else
+ warn "gr-gsm installation incomplete. GSM Spy feature will not work."
+ fi
+ else
+ ok "gr-gsm already installed"
+ fi
+
+ # Wireshark (tshark) for GSM packet analysis
+ progress "Installing tshark"
+ if ! cmd_exists tshark; then
+ # Pre-accept non-root capture prompt for non-interactive install
+ echo 'wireshark-common wireshark-common/install-setuid boolean true' | $SUDO debconf-set-selections
+ apt_install tshark || true
+ # Allow non-root capture
+ $SUDO dpkg-reconfigure wireshark-common 2>/dev/null || true
+ $SUDO usermod -a -G wireshark $USER 2>/dev/null || true
+ ok "tshark installed. You may need to re-login for wireshark group permissions."
+ else
+ ok "tshark already installed"
+ fi
+
progress "Installing Python packages"
apt_install python3-venv python3-pip || true
# Install Python packages via apt (more reliable than pip on modern Debian/Ubuntu)
@@ -1185,6 +1321,14 @@ final_summary_and_hard_fail() {
exit 1
fi
fi
+
+ if [[ "${#missing_recommended[@]}" -gt 0 ]]; then
+ echo
+ warn "Missing RECOMMENDED tools (some features will not work):"
+ for t in "${missing_recommended[@]}"; do echo " - $t"; done
+ echo
+ warn "Install these for full functionality (GSM Intelligence, etc.)"
+ fi
}
# ----------------------------
diff --git a/static/css/components/function-strip.css b/static/css/components/function-strip.css
index 8ff5c65..878ef84 100644
--- a/static/css/components/function-strip.css
+++ b/static/css/components/function-strip.css
@@ -19,6 +19,17 @@
min-width: max-content;
}
+/* Strip title badge */
+.function-strip .strip-title {
+ font-size: 9px;
+ font-weight: 700;
+ letter-spacing: 1.5px;
+ text-transform: uppercase;
+ color: var(--text-muted);
+ white-space: nowrap;
+ padding: 4px 0;
+}
+
/* Stats */
.function-strip .strip-stat {
display: flex;
diff --git a/static/js/modes/listening-post.js b/static/js/modes/listening-post.js
index afcea11..2987e39 100644
--- a/static/js/modes/listening-post.js
+++ b/static/js/modes/listening-post.js
@@ -69,6 +69,24 @@ const scannerPresets = {
amateur70cm: { start: 420, end: 450, step: 25, mod: 'fm' }
};
+/**
+ * Suggest the appropriate modulation for a given frequency (in MHz).
+ * Uses standard band allocations to pick AM, NFM, WFM, or USB.
+ */
+function suggestModulation(freqMhz) {
+ if (freqMhz < 0.52) return 'am'; // LW/MW AM broadcast
+ if (freqMhz < 1.7) return 'am'; // MW AM broadcast
+ if (freqMhz < 30) return 'usb'; // HF/Shortwave
+ if (freqMhz < 88) return 'fm'; // VHF Low (public safety)
+ if (freqMhz < 108) return 'wfm'; // FM Broadcast
+ if (freqMhz < 137) return 'am'; // Airband
+ if (freqMhz < 174) return 'fm'; // VHF marine, 2m ham, pagers
+ if (freqMhz < 216) return 'wfm'; // VHF TV/DAB
+ if (freqMhz < 470) return 'fm'; // UHF various, 70cm, business/GMRS
+ if (freqMhz < 960) return 'wfm'; // UHF TV
+ return 'am'; // Microwave/ADS-B
+}
+
const audioPresets = {
fm: { freq: 98.1, mod: 'wfm' },
airband: { freq: 121.5, mod: 'am' }, // Emergency/guard frequency
@@ -1886,6 +1904,8 @@ function initListeningPost() {
// Connect radio knobs to scanner controls
initRadioKnobControls();
+ initWaterfallZoomControls();
+
// Step dropdown - sync with scanner when changed
const stepSelect = document.getElementById('radioScanStep');
if (stepSelect) {
@@ -2312,8 +2332,7 @@ async function _startDirectListenInternal() {
isDirectListening = false;
updateDirectListenUI(false);
if (resumeRfWaterfallAfterListening) {
- resumeRfWaterfallAfterListening = false;
- setTimeout(() => startWaterfall(), 200);
+ scheduleWaterfallResume();
}
return;
}
@@ -2366,8 +2385,7 @@ async function _startDirectListenInternal() {
isWaterfallRunning = true;
const waterfallPanel = document.getElementById('waterfallPanel');
if (waterfallPanel) waterfallPanel.style.display = 'block';
- document.getElementById('startWaterfallBtn').style.display = 'none';
- document.getElementById('stopWaterfallBtn').style.display = 'block';
+ setWaterfallControlButtons(true);
startAudioWaterfall();
}
updateDirectListenUI(true, freq);
@@ -2379,8 +2397,7 @@ async function _startDirectListenInternal() {
isDirectListening = false;
updateDirectListenUI(false);
if (resumeRfWaterfallAfterListening) {
- resumeRfWaterfallAfterListening = false;
- setTimeout(() => startWaterfall(), 200);
+ scheduleWaterfallResume();
}
} finally {
isRestarting = false;
@@ -2537,7 +2554,7 @@ async function startWebSocketListen(config, audioPlayer) {
/**
* Stop direct listening
*/
-function stopDirectListen() {
+async function stopDirectListen() {
console.log('[LISTEN] Stopping');
// Clear all pending state
@@ -2572,7 +2589,7 @@ function stopDirectListen() {
}
// Also stop via HTTP (fallback)
- fetch('/listening/audio/stop', { method: 'POST' }).catch(() => {});
+ const audioStopPromise = fetch('/listening/audio/stop', { method: 'POST' }).catch(() => {});
isDirectListening = false;
currentSignalLevel = 0;
@@ -2584,13 +2601,16 @@ function stopDirectListen() {
}
if (resumeRfWaterfallAfterListening) {
- resumeRfWaterfallAfterListening = false;
isWaterfallRunning = false;
- setTimeout(() => startWaterfall(), 200);
+ setWaterfallControlButtons(false);
+ await Promise.race([
+ audioStopPromise,
+ new Promise(resolve => setTimeout(resolve, 400))
+ ]);
+ scheduleWaterfallResume();
} else if (waterfallMode === 'audio' && isWaterfallRunning) {
isWaterfallRunning = false;
- document.getElementById('startWaterfallBtn').style.display = 'block';
- document.getElementById('stopWaterfallBtn').style.display = 'none';
+ setWaterfallControlButtons(false);
}
}
@@ -3067,6 +3087,17 @@ let waterfallMode = 'rf';
let audioWaterfallAnimId = null;
let lastAudioWaterfallDraw = 0;
let resumeRfWaterfallAfterListening = false;
+let waterfallResumeTimer = null;
+let waterfallResumeAttempts = 0;
+const WATERFALL_RESUME_MAX_ATTEMPTS = 8;
+const WATERFALL_RESUME_RETRY_MS = 350;
+const WATERFALL_ZOOM_MIN_MHZ = 0.1;
+const WATERFALL_ZOOM_MAX_MHZ = 500;
+const WATERFALL_DEFAULT_SPAN_MHZ = 2.0;
+
+// WebSocket waterfall state
+let waterfallWebSocket = null;
+let waterfallUseWebSocket = false;
function resizeCanvasToDisplaySize(canvas) {
if (!canvas) return false;
@@ -3137,6 +3168,214 @@ function initWaterfallCanvas() {
}
}
+function setWaterfallControlButtons(running) {
+ const startBtn = document.getElementById('startWaterfallBtn');
+ const stopBtn = document.getElementById('stopWaterfallBtn');
+ if (!startBtn || !stopBtn) return;
+ startBtn.style.display = running ? 'none' : 'inline-block';
+ stopBtn.style.display = running ? 'inline-block' : 'none';
+ const dot = document.getElementById('waterfallStripDot');
+ if (dot) {
+ dot.className = running ? 'status-dot sweeping' : 'status-dot inactive';
+ }
+}
+
+function getWaterfallRangeFromInputs() {
+ const startInput = document.getElementById('waterfallStartFreq');
+ const endInput = document.getElementById('waterfallEndFreq');
+ const startVal = parseFloat(startInput?.value);
+ const endVal = parseFloat(endInput?.value);
+ const start = Number.isFinite(startVal) ? startVal : waterfallStartFreq;
+ const end = Number.isFinite(endVal) ? endVal : waterfallEndFreq;
+ return { start, end };
+}
+
+function updateWaterfallZoomLabel(start, end) {
+ const label = document.getElementById('waterfallZoomSpan');
+ if (!label) return;
+ if (!Number.isFinite(start) || !Number.isFinite(end)) return;
+ const span = Math.max(0, end - start);
+ if (span >= 1) {
+ label.textContent = `${span.toFixed(1)} MHz`;
+ } else {
+ label.textContent = `${Math.round(span * 1000)} kHz`;
+ }
+}
+
+function setWaterfallRange(center, span) {
+ if (!Number.isFinite(center) || !Number.isFinite(span)) return;
+ const clampedSpan = Math.max(WATERFALL_ZOOM_MIN_MHZ, Math.min(WATERFALL_ZOOM_MAX_MHZ, span));
+ const half = clampedSpan / 2;
+ let start = center - half;
+ let end = center + half;
+ const minFreq = 0.01;
+ if (start < minFreq) {
+ end += (minFreq - start);
+ start = minFreq;
+ }
+ if (end <= start) {
+ end = start + WATERFALL_ZOOM_MIN_MHZ;
+ }
+
+ waterfallStartFreq = start;
+ waterfallEndFreq = end;
+
+ const startInput = document.getElementById('waterfallStartFreq');
+ const endInput = document.getElementById('waterfallEndFreq');
+ if (startInput) startInput.value = start.toFixed(3);
+ if (endInput) endInput.value = end.toFixed(3);
+
+ const rangeLabel = document.getElementById('waterfallFreqRange');
+ if (rangeLabel && !isWaterfallRunning) {
+ rangeLabel.textContent = `${start.toFixed(1)} - ${end.toFixed(1)} MHz`;
+ }
+ updateWaterfallZoomLabel(start, end);
+}
+
+function getWaterfallCenterForZoom(start, end) {
+ const tuned = parseFloat(document.getElementById('radioScanStart')?.value || '');
+ if (Number.isFinite(tuned) && tuned > 0) return tuned;
+ return (start + end) / 2;
+}
+
+async function syncWaterfallToFrequency(freq, options = {}) {
+ const { autoStart = false, restartIfRunning = true, silent = true } = options;
+ const numericFreq = parseFloat(freq);
+ if (!Number.isFinite(numericFreq) || numericFreq <= 0) return { started: false };
+
+ const { start, end } = getWaterfallRangeFromInputs();
+ const span = (Number.isFinite(start) && Number.isFinite(end) && end > start)
+ ? (end - start)
+ : WATERFALL_DEFAULT_SPAN_MHZ;
+
+ setWaterfallRange(numericFreq, span);
+
+ if (!autoStart) return { started: false };
+ if (isDirectListening || waterfallMode === 'audio') return { started: false };
+
+ if (isWaterfallRunning && waterfallMode === 'rf' && restartIfRunning) {
+ // Reuse existing WebSocket to avoid USB device release race
+ if (waterfallUseWebSocket && waterfallWebSocket && waterfallWebSocket.readyState === WebSocket.OPEN) {
+ const sf = parseFloat(document.getElementById('waterfallStartFreq')?.value || 88);
+ const ef = parseFloat(document.getElementById('waterfallEndFreq')?.value || 108);
+ const fft = parseInt(document.getElementById('waterfallFftSize')?.value || document.getElementById('waterfallBinSize')?.value || 1024);
+ const g = parseInt(document.getElementById('waterfallGain')?.value || 40);
+ const dev = typeof getSelectedDevice === 'function' ? getSelectedDevice() : 0;
+ waterfallWebSocket.send(JSON.stringify({
+ cmd: 'start',
+ center_freq: (sf + ef) / 2,
+ span_mhz: Math.max(0.1, ef - sf),
+ gain: g,
+ device: dev,
+ sdr_type: (typeof getSelectedSdrType === 'function') ? getSelectedSdrType() : 'rtlsdr',
+ fft_size: fft,
+ fps: 25,
+ avg_count: 4,
+ }));
+ return { started: true };
+ }
+ await stopWaterfall();
+ return await startWaterfall({ silent: silent });
+ }
+
+ if (!isWaterfallRunning) {
+ return await startWaterfall({ silent: silent });
+ }
+
+ return { started: true };
+}
+
+async function zoomWaterfall(direction) {
+ const { start, end } = getWaterfallRangeFromInputs();
+ if (!Number.isFinite(start) || !Number.isFinite(end) || end <= start) return;
+
+ const zoomIn = direction === 'in' || direction === '+';
+ const zoomOut = direction === 'out' || direction === '-';
+ if (!zoomIn && !zoomOut) return;
+
+ const span = end - start;
+ const newSpan = zoomIn ? span / 2 : span * 2;
+ const center = getWaterfallCenterForZoom(start, end);
+ setWaterfallRange(center, newSpan);
+
+ if (isWaterfallRunning && waterfallMode === 'rf' && !isDirectListening) {
+ // Reuse existing WebSocket to avoid USB device release race
+ if (waterfallUseWebSocket && waterfallWebSocket && waterfallWebSocket.readyState === WebSocket.OPEN) {
+ const sf = parseFloat(document.getElementById('waterfallStartFreq')?.value || 88);
+ const ef = parseFloat(document.getElementById('waterfallEndFreq')?.value || 108);
+ const fft = parseInt(document.getElementById('waterfallFftSize')?.value || document.getElementById('waterfallBinSize')?.value || 1024);
+ const g = parseInt(document.getElementById('waterfallGain')?.value || 40);
+ const dev = typeof getSelectedDevice === 'function' ? getSelectedDevice() : 0;
+ waterfallWebSocket.send(JSON.stringify({
+ cmd: 'start',
+ center_freq: (sf + ef) / 2,
+ span_mhz: Math.max(0.1, ef - sf),
+ gain: g,
+ device: dev,
+ sdr_type: (typeof getSelectedSdrType === 'function') ? getSelectedSdrType() : 'rtlsdr',
+ fft_size: fft,
+ fps: 25,
+ avg_count: 4,
+ }));
+ } else {
+ await stopWaterfall();
+ await startWaterfall({ silent: true });
+ }
+ }
+}
+
+function initWaterfallZoomControls() {
+ const startInput = document.getElementById('waterfallStartFreq');
+ const endInput = document.getElementById('waterfallEndFreq');
+ if (!startInput && !endInput) return;
+
+ const sync = () => {
+ const { start, end } = getWaterfallRangeFromInputs();
+ if (!Number.isFinite(start) || !Number.isFinite(end) || end <= start) return;
+ waterfallStartFreq = start;
+ waterfallEndFreq = end;
+ updateWaterfallZoomLabel(start, end);
+ };
+
+ if (startInput) startInput.addEventListener('input', sync);
+ if (endInput) endInput.addEventListener('input', sync);
+ sync();
+}
+
+function scheduleWaterfallResume() {
+ if (!resumeRfWaterfallAfterListening) return;
+ if (waterfallResumeTimer) {
+ clearTimeout(waterfallResumeTimer);
+ waterfallResumeTimer = null;
+ }
+ waterfallResumeAttempts = 0;
+ waterfallResumeTimer = setTimeout(attemptWaterfallResume, 200);
+}
+
+async function attemptWaterfallResume() {
+ if (!resumeRfWaterfallAfterListening) return;
+ if (isDirectListening) {
+ waterfallResumeTimer = setTimeout(attemptWaterfallResume, WATERFALL_RESUME_RETRY_MS);
+ return;
+ }
+
+ const result = await startWaterfall({ silent: true, resume: true });
+ if (result && result.started) {
+ waterfallResumeTimer = null;
+ return;
+ }
+
+ const retryable = result ? result.retryable : true;
+ if (retryable && waterfallResumeAttempts < WATERFALL_RESUME_MAX_ATTEMPTS) {
+ waterfallResumeAttempts += 1;
+ waterfallResumeTimer = setTimeout(attemptWaterfallResume, WATERFALL_RESUME_RETRY_MS);
+ return;
+ }
+
+ resumeRfWaterfallAfterListening = false;
+ waterfallResumeTimer = null;
+}
+
function setWaterfallMode(mode) {
waterfallMode = mode;
const header = document.getElementById('waterfallFreqRange');
@@ -3334,18 +3573,209 @@ function drawSpectrumLine(bins, startFreq, endFreq, labelUnit) {
spectrumCtx.fill();
}
-function startWaterfall() {
+function connectWaterfallWebSocket(config) {
+ const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
+ const wsUrl = `${protocol}//${window.location.host}/ws/waterfall`;
+
+ return new Promise((resolve, reject) => {
+ try {
+ const ws = new WebSocket(wsUrl);
+ ws.binaryType = 'arraybuffer';
+
+ const timeout = setTimeout(() => {
+ ws.close();
+ reject(new Error('WebSocket connection timeout'));
+ }, 5000);
+
+ ws.onopen = () => {
+ clearTimeout(timeout);
+ ws.send(JSON.stringify({ cmd: 'start', ...config }));
+ };
+
+ ws.onmessage = (event) => {
+ if (typeof event.data === 'string') {
+ const msg = JSON.parse(event.data);
+ if (msg.status === 'started') {
+ waterfallWebSocket = ws;
+ waterfallUseWebSocket = true;
+ if (typeof msg.start_freq === 'number') waterfallStartFreq = msg.start_freq;
+ if (typeof msg.end_freq === 'number') waterfallEndFreq = msg.end_freq;
+ const rangeLabel = document.getElementById('waterfallFreqRange');
+ if (rangeLabel) {
+ rangeLabel.textContent = `${waterfallStartFreq.toFixed(1)} - ${waterfallEndFreq.toFixed(1)} MHz`;
+ }
+ updateWaterfallZoomLabel(waterfallStartFreq, waterfallEndFreq);
+ resolve(ws);
+ } else if (msg.status === 'error') {
+ ws.close();
+ reject(new Error(msg.message || 'WebSocket waterfall error'));
+ } else if (msg.status === 'stopped') {
+ // Server confirmed stop
+ }
+ } else if (event.data instanceof ArrayBuffer) {
+ const now = Date.now();
+ if (now - lastWaterfallDraw < WATERFALL_MIN_INTERVAL_MS) return;
+ lastWaterfallDraw = now;
+ parseBinaryWaterfallFrame(event.data);
+ }
+ };
+
+ ws.onerror = () => {
+ clearTimeout(timeout);
+ reject(new Error('WebSocket connection failed'));
+ };
+
+ ws.onclose = () => {
+ if (waterfallUseWebSocket && isWaterfallRunning) {
+ waterfallWebSocket = null;
+ waterfallUseWebSocket = false;
+ isWaterfallRunning = false;
+ setWaterfallControlButtons(false);
+ if (typeof releaseDevice === 'function') {
+ releaseDevice('waterfall');
+ }
+ }
+ };
+ } catch (e) {
+ reject(e);
+ }
+ });
+}
+
+function parseBinaryWaterfallFrame(buffer) {
+ if (buffer.byteLength < 11) return;
+ const view = new DataView(buffer);
+ const msgType = view.getUint8(0);
+ if (msgType !== 0x01) return;
+
+ const startFreq = view.getFloat32(1, true);
+ const endFreq = view.getFloat32(5, true);
+ const binCount = view.getUint16(9, true);
+
+ if (buffer.byteLength < 11 + binCount) return;
+
+ const bins = new Uint8Array(buffer, 11, binCount);
+
+ waterfallStartFreq = startFreq;
+ waterfallEndFreq = endFreq;
+ const rangeLabel = document.getElementById('waterfallFreqRange');
+ if (rangeLabel) {
+ rangeLabel.textContent = `${startFreq.toFixed(1)} - ${endFreq.toFixed(1)} MHz`;
+ }
+ updateWaterfallZoomLabel(startFreq, endFreq);
+
+ drawWaterfallRowBinary(bins);
+ drawSpectrumLineBinary(bins, startFreq, endFreq);
+}
+
+function drawWaterfallRowBinary(bins) {
+ if (!waterfallCtx || !waterfallCanvas) return;
+ const w = waterfallCanvas.width;
+ const h = waterfallCanvas.height;
+ const rowHeight = waterfallRowImage ? waterfallRowImage.height : 1;
+
+ // Scroll existing content down
+ waterfallCtx.drawImage(waterfallCanvas, 0, 0, w, h - rowHeight, 0, rowHeight, w, h - rowHeight);
+
+ if (!waterfallRowImage || waterfallRowImage.width !== w || waterfallRowImage.height !== rowHeight) {
+ waterfallRowImage = waterfallCtx.createImageData(w, rowHeight);
+ }
+ const rowData = waterfallRowImage.data;
+ const palette = waterfallPalette || buildWaterfallPalette();
+ const binCount = bins.length;
+
+ for (let x = 0; x < w; x++) {
+ const pos = (x / (w - 1)) * (binCount - 1);
+ const i0 = Math.floor(pos);
+ const i1 = Math.min(binCount - 1, i0 + 1);
+ const t = pos - i0;
+ // Interpolate between bins (already uint8, 0-255)
+ const val = Math.round(bins[i0] * (1 - t) + bins[i1] * t);
+ const color = palette[Math.max(0, Math.min(255, val))] || [0, 0, 0];
+ for (let y = 0; y < rowHeight; y++) {
+ const offset = (y * w + x) * 4;
+ rowData[offset] = color[0];
+ rowData[offset + 1] = color[1];
+ rowData[offset + 2] = color[2];
+ rowData[offset + 3] = 255;
+ }
+ }
+ waterfallCtx.putImageData(waterfallRowImage, 0, 0);
+}
+
+function drawSpectrumLineBinary(bins, startFreq, endFreq) {
+ if (!spectrumCtx || !spectrumCanvas) return;
+ const w = spectrumCanvas.width;
+ const h = spectrumCanvas.height;
+
+ spectrumCtx.clearRect(0, 0, w, h);
+
+ // Background
+ spectrumCtx.fillStyle = 'rgba(0, 0, 0, 0.8)';
+ spectrumCtx.fillRect(0, 0, w, h);
+
+ // Grid lines
+ spectrumCtx.strokeStyle = 'rgba(0, 200, 255, 0.1)';
+ spectrumCtx.lineWidth = 0.5;
+ for (let i = 0; i < 5; i++) {
+ const y = (h / 5) * i;
+ spectrumCtx.beginPath();
+ spectrumCtx.moveTo(0, y);
+ spectrumCtx.lineTo(w, y);
+ spectrumCtx.stroke();
+ }
+
+ // Frequency labels
+ const dpr = window.devicePixelRatio || 1;
+ spectrumCtx.fillStyle = 'rgba(0, 200, 255, 0.5)';
+ spectrumCtx.font = `${9 * dpr}px monospace`;
+ const freqRange = endFreq - startFreq;
+ for (let i = 0; i <= 4; i++) {
+ const freq = startFreq + (freqRange / 4) * i;
+ const x = (w / 4) * i;
+ spectrumCtx.fillText(freq.toFixed(1), x + 2, h - 2);
+ }
+
+ if (bins.length === 0) return;
+
+ // Draw spectrum line — bins are pre-quantized 0-255
+ spectrumCtx.strokeStyle = 'rgba(0, 255, 255, 0.9)';
+ spectrumCtx.lineWidth = 1.5;
+ spectrumCtx.beginPath();
+ for (let i = 0; i < bins.length; i++) {
+ const x = (i / (bins.length - 1)) * w;
+ const normalized = bins[i] / 255;
+ const y = h - 12 - normalized * (h - 16);
+ if (i === 0) spectrumCtx.moveTo(x, y);
+ else spectrumCtx.lineTo(x, y);
+ }
+ spectrumCtx.stroke();
+
+ // Fill under line
+ const lastX = w;
+ const lastY = h - 12 - (bins[bins.length - 1] / 255) * (h - 16);
+ spectrumCtx.lineTo(lastX, h);
+ spectrumCtx.lineTo(0, h);
+ spectrumCtx.closePath();
+ spectrumCtx.fillStyle = 'rgba(0, 255, 255, 0.08)';
+ spectrumCtx.fill();
+}
+
+async function startWaterfall(options = {}) {
+ const { silent = false, resume = false } = options;
const startFreq = parseFloat(document.getElementById('waterfallStartFreq')?.value || 88);
const endFreq = parseFloat(document.getElementById('waterfallEndFreq')?.value || 108);
- const binSize = parseInt(document.getElementById('waterfallBinSize')?.value || 10000);
+ const fftSize = parseInt(document.getElementById('waterfallFftSize')?.value || document.getElementById('waterfallBinSize')?.value || 1024);
const gain = parseInt(document.getElementById('waterfallGain')?.value || 40);
const device = typeof getSelectedDevice === 'function' ? getSelectedDevice() : 0;
initWaterfallCanvas();
const maxBins = Math.min(4096, Math.max(128, waterfallCanvas ? waterfallCanvas.width : 800));
if (startFreq >= endFreq) {
- if (typeof showNotification === 'function') showNotification('Error', 'End frequency must be greater than start');
- return;
+ if (!silent && typeof showNotification === 'function') {
+ showNotification('Error', 'End frequency must be greater than start');
+ }
+ return { started: false, retryable: false };
}
waterfallStartFreq = startFreq;
@@ -3354,69 +3784,165 @@ function startWaterfall() {
if (rangeLabel) {
rangeLabel.textContent = `${startFreq.toFixed(1)} - ${endFreq.toFixed(1)} MHz`;
}
+ updateWaterfallZoomLabel(startFreq, endFreq);
- if (isDirectListening) {
+ if (isDirectListening && !resume) {
isWaterfallRunning = true;
const waterfallPanel = document.getElementById('waterfallPanel');
if (waterfallPanel) waterfallPanel.style.display = 'block';
- document.getElementById('startWaterfallBtn').style.display = 'none';
- document.getElementById('stopWaterfallBtn').style.display = 'block';
+ setWaterfallControlButtons(true);
startAudioWaterfall();
- return;
+ resumeRfWaterfallAfterListening = true;
+ return { started: true };
+ }
+
+ if (isDirectListening && resume) {
+ return { started: false, retryable: true };
}
setWaterfallMode('rf');
- const spanMhz = Math.max(0.1, waterfallEndFreq - waterfallStartFreq);
+
+ // Try WebSocket path first (I/Q + server-side FFT)
+ const centerFreq = (startFreq + endFreq) / 2;
+ const spanMhz = Math.max(0.1, endFreq - startFreq);
+
+ try {
+ const wsConfig = {
+ center_freq: centerFreq,
+ span_mhz: spanMhz,
+ gain: gain,
+ device: device,
+ sdr_type: (typeof getSelectedSdrType === 'function') ? getSelectedSdrType() : 'rtlsdr',
+ fft_size: fftSize,
+ fps: 25,
+ avg_count: 4,
+ };
+ await connectWaterfallWebSocket(wsConfig);
+
+ isWaterfallRunning = true;
+ setWaterfallControlButtons(true);
+ const waterfallPanel = document.getElementById('waterfallPanel');
+ if (waterfallPanel) waterfallPanel.style.display = 'block';
+ lastWaterfallDraw = 0;
+ initWaterfallCanvas();
+ if (typeof reserveDevice === 'function') {
+ reserveDevice(parseInt(device), 'waterfall');
+ }
+ if (resume || resumeRfWaterfallAfterListening) {
+ resumeRfWaterfallAfterListening = false;
+ }
+ if (waterfallResumeTimer) {
+ clearTimeout(waterfallResumeTimer);
+ waterfallResumeTimer = null;
+ }
+ console.log('[WATERFALL] WebSocket connected');
+ return { started: true };
+ } catch (wsErr) {
+ console.log('[WATERFALL] WebSocket unavailable, falling back to SSE:', wsErr.message);
+ }
+
+ // Fallback: SSE / rtl_power path
const segments = Math.max(1, Math.ceil(spanMhz / 2.4));
const targetSweepSeconds = 0.8;
const interval = Math.max(0.1, Math.min(0.3, targetSweepSeconds / segments));
+ const binSize = fftSize;
- fetch('/listening/waterfall/start', {
- method: 'POST',
- headers: { 'Content-Type': 'application/json' },
- body: JSON.stringify({
- start_freq: startFreq,
- end_freq: endFreq,
- bin_size: binSize,
- gain: gain,
- device: device,
- max_bins: maxBins,
- interval: interval,
- })
- })
- .then(r => r.json())
- .then(data => {
- if (data.status === 'started') {
- isWaterfallRunning = true;
- document.getElementById('startWaterfallBtn').style.display = 'none';
- document.getElementById('stopWaterfallBtn').style.display = 'block';
- const waterfallPanel = document.getElementById('waterfallPanel');
- if (waterfallPanel) waterfallPanel.style.display = 'block';
- lastWaterfallDraw = 0;
- initWaterfallCanvas();
- connectWaterfallSSE();
- } else {
- if (typeof showNotification === 'function') showNotification('Error', data.message || 'Failed to start waterfall');
+ try {
+ const response = await fetch('/listening/waterfall/start', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ start_freq: startFreq,
+ end_freq: endFreq,
+ bin_size: binSize,
+ gain: gain,
+ device: device,
+ max_bins: maxBins,
+ interval: interval,
+ })
+ });
+
+ let data = {};
+ try {
+ data = await response.json();
+ } catch (e) {}
+
+ if (!response.ok || data.status !== 'started') {
+ if (!silent && typeof showNotification === 'function') {
+ showNotification('Error', data.message || 'Failed to start waterfall');
+ }
+ return {
+ started: false,
+ retryable: response.status === 409 || data.error_type === 'DEVICE_BUSY'
+ };
}
- })
- .catch(err => console.error('[WATERFALL] Start error:', err));
+
+ isWaterfallRunning = true;
+ setWaterfallControlButtons(true);
+ const waterfallPanel = document.getElementById('waterfallPanel');
+ if (waterfallPanel) waterfallPanel.style.display = 'block';
+ lastWaterfallDraw = 0;
+ initWaterfallCanvas();
+ connectWaterfallSSE();
+ if (typeof reserveDevice === 'function') {
+ reserveDevice(parseInt(device), 'waterfall');
+ }
+ if (resume || resumeRfWaterfallAfterListening) {
+ resumeRfWaterfallAfterListening = false;
+ }
+ if (waterfallResumeTimer) {
+ clearTimeout(waterfallResumeTimer);
+ waterfallResumeTimer = null;
+ }
+ return { started: true };
+ } catch (err) {
+ console.error('[WATERFALL] Start error:', err);
+ if (!silent && typeof showNotification === 'function') {
+ showNotification('Error', 'Failed to start waterfall');
+ }
+ return { started: false, retryable: true };
+ }
}
async function stopWaterfall() {
if (waterfallMode === 'audio') {
stopAudioWaterfall();
isWaterfallRunning = false;
- document.getElementById('startWaterfallBtn').style.display = 'block';
- document.getElementById('stopWaterfallBtn').style.display = 'none';
+ setWaterfallControlButtons(false);
return;
}
+ // WebSocket path
+ if (waterfallUseWebSocket && waterfallWebSocket) {
+ try {
+ if (waterfallWebSocket.readyState === WebSocket.OPEN) {
+ waterfallWebSocket.send(JSON.stringify({ cmd: 'stop' }));
+ }
+ waterfallWebSocket.close();
+ } catch (e) {
+ console.error('[WATERFALL] WebSocket stop error:', e);
+ }
+ waterfallWebSocket = null;
+ waterfallUseWebSocket = false;
+ isWaterfallRunning = false;
+ setWaterfallControlButtons(false);
+ if (typeof releaseDevice === 'function') {
+ releaseDevice('waterfall');
+ }
+ // Allow backend WebSocket handler to finish cleanup and release SDR
+ await new Promise(resolve => setTimeout(resolve, 300));
+ return;
+ }
+
+ // SSE fallback path
try {
await fetch('/listening/waterfall/stop', { method: 'POST' });
isWaterfallRunning = false;
if (waterfallEventSource) { waterfallEventSource.close(); waterfallEventSource = null; }
- document.getElementById('startWaterfallBtn').style.display = 'block';
- document.getElementById('stopWaterfallBtn').style.display = 'none';
+ setWaterfallControlButtons(false);
+ if (typeof releaseDevice === 'function') {
+ releaseDevice('waterfall');
+ }
} catch (err) {
console.error('[WATERFALL] Stop error:', err);
}
@@ -3436,6 +3962,7 @@ function connectWaterfallSSE() {
if (rangeLabel) {
rangeLabel.textContent = `${waterfallStartFreq.toFixed(1)} - ${waterfallEndFreq.toFixed(1)} MHz`;
}
+ updateWaterfallZoomLabel(waterfallStartFreq, waterfallEndFreq);
const now = Date.now();
if (now - lastWaterfallDraw < WATERFALL_MIN_INTERVAL_MS) return;
lastWaterfallDraw = now;
@@ -3462,17 +3989,51 @@ function bindWaterfallInteraction() {
const ratio = Math.max(0, Math.min(1, x / rect.width));
const freq = waterfallStartFreq + ratio * (waterfallEndFreq - waterfallStartFreq);
if (typeof tuneToFrequency === 'function') {
- tuneToFrequency(freq, typeof currentModulation !== 'undefined' ? currentModulation : undefined);
+ tuneToFrequency(freq, suggestModulation(freq));
}
};
+ // Tooltip for showing frequency + modulation on hover
+ let tooltip = document.getElementById('waterfallTooltip');
+ if (!tooltip) {
+ tooltip = document.createElement('div');
+ tooltip.id = 'waterfallTooltip';
+ tooltip.style.cssText = 'position:fixed;pointer-events:none;background:rgba(0,0,0,0.85);color:#0f0;padding:4px 8px;border-radius:4px;font-size:12px;font-family:monospace;z-index:9999;display:none;white-space:nowrap;border:1px solid #333;';
+ document.body.appendChild(tooltip);
+ }
+
+ const hoverHandler = (event) => {
+ if (waterfallMode === 'audio') {
+ tooltip.style.display = 'none';
+ return;
+ }
+ const canvas = event.currentTarget;
+ const rect = canvas.getBoundingClientRect();
+ const x = event.clientX - rect.left;
+ const ratio = Math.max(0, Math.min(1, x / rect.width));
+ const freq = waterfallStartFreq + ratio * (waterfallEndFreq - waterfallStartFreq);
+ const mod = suggestModulation(freq);
+ tooltip.textContent = `${freq.toFixed(3)} MHz \u00b7 ${mod.toUpperCase()}`;
+ tooltip.style.left = (event.clientX + 12) + 'px';
+ tooltip.style.top = (event.clientY - 28) + 'px';
+ tooltip.style.display = 'block';
+ };
+
+ const leaveHandler = () => {
+ tooltip.style.display = 'none';
+ };
+
if (waterfallCanvas) {
waterfallCanvas.style.cursor = 'crosshair';
waterfallCanvas.addEventListener('click', handler);
+ waterfallCanvas.addEventListener('mousemove', hoverHandler);
+ waterfallCanvas.addEventListener('mouseleave', leaveHandler);
}
if (spectrumCanvas) {
spectrumCanvas.style.cursor = 'crosshair';
spectrumCanvas.addEventListener('click', handler);
+ spectrumCanvas.addEventListener('mousemove', hoverHandler);
+ spectrumCanvas.addEventListener('mouseleave', leaveHandler);
}
}
@@ -3497,3 +4058,5 @@ window.manualSignalGuess = manualSignalGuess;
window.guessSignal = guessSignal;
window.startWaterfall = startWaterfall;
window.stopWaterfall = stopWaterfall;
+window.zoomWaterfall = zoomWaterfall;
+window.syncWaterfallToFrequency = syncWaterfallToFrequency;
diff --git a/static/js/modes/sstv-general.js b/static/js/modes/sstv-general.js
index 0b89efe..3419315 100644
--- a/static/js/modes/sstv-general.js
+++ b/static/js/modes/sstv-general.js
@@ -11,6 +11,18 @@ const SSTVGeneral = (function() {
let currentMode = null;
let progress = 0;
+ // Signal scope state
+ let sstvGeneralScopeCtx = null;
+ let sstvGeneralScopeAnim = null;
+ let sstvGeneralScopeHistory = [];
+ const SSTV_GENERAL_SCOPE_LEN = 200;
+ let sstvGeneralScopeRms = 0;
+ let sstvGeneralScopePeak = 0;
+ let sstvGeneralScopeTargetRms = 0;
+ let sstvGeneralScopeTargetPeak = 0;
+ let sstvGeneralScopeMsgBurst = 0;
+ let sstvGeneralScopeTone = null;
+
/**
* Initialize the SSTV General mode
*/
@@ -190,6 +202,136 @@ const SSTVGeneral = (function() {
`;
}
+ /**
+ * Initialize signal scope canvas
+ */
+ function initSstvGeneralScope() {
+ const canvas = document.getElementById('sstvGeneralScopeCanvas');
+ if (!canvas) return;
+ const rect = canvas.getBoundingClientRect();
+ canvas.width = rect.width * (window.devicePixelRatio || 1);
+ canvas.height = rect.height * (window.devicePixelRatio || 1);
+ sstvGeneralScopeCtx = canvas.getContext('2d');
+ sstvGeneralScopeHistory = new Array(SSTV_GENERAL_SCOPE_LEN).fill(0);
+ sstvGeneralScopeRms = 0;
+ sstvGeneralScopePeak = 0;
+ sstvGeneralScopeTargetRms = 0;
+ sstvGeneralScopeTargetPeak = 0;
+ sstvGeneralScopeMsgBurst = 0;
+ sstvGeneralScopeTone = null;
+ drawSstvGeneralScope();
+ }
+
+ /**
+ * Draw signal scope animation frame
+ */
+ function drawSstvGeneralScope() {
+ const ctx = sstvGeneralScopeCtx;
+ if (!ctx) return;
+ const W = ctx.canvas.width;
+ const H = ctx.canvas.height;
+ const midY = H / 2;
+
+ // Phosphor persistence
+ ctx.fillStyle = 'rgba(5, 5, 16, 0.3)';
+ ctx.fillRect(0, 0, W, H);
+
+ // Smooth towards target
+ sstvGeneralScopeRms += (sstvGeneralScopeTargetRms - sstvGeneralScopeRms) * 0.25;
+ sstvGeneralScopePeak += (sstvGeneralScopeTargetPeak - sstvGeneralScopePeak) * 0.15;
+
+ // Push to history
+ sstvGeneralScopeHistory.push(Math.min(sstvGeneralScopeRms / 32768, 1.0));
+ if (sstvGeneralScopeHistory.length > SSTV_GENERAL_SCOPE_LEN) sstvGeneralScopeHistory.shift();
+
+ // Grid lines
+ ctx.strokeStyle = 'rgba(60, 40, 80, 0.4)';
+ ctx.lineWidth = 0.5;
+ for (let i = 1; i < 4; i++) {
+ const y = (H / 4) * i;
+ ctx.beginPath(); ctx.moveTo(0, y); ctx.lineTo(W, y); ctx.stroke();
+ }
+ for (let i = 1; i < 8; i++) {
+ const x = (W / 8) * i;
+ ctx.beginPath(); ctx.moveTo(x, 0); ctx.lineTo(x, H); ctx.stroke();
+ }
+
+ // Waveform
+ const stepX = W / (SSTV_GENERAL_SCOPE_LEN - 1);
+ ctx.strokeStyle = '#c080ff';
+ ctx.lineWidth = 1.5;
+ ctx.shadowColor = '#c080ff';
+ ctx.shadowBlur = 4;
+
+ // Upper half
+ ctx.beginPath();
+ for (let i = 0; i < sstvGeneralScopeHistory.length; i++) {
+ const x = i * stepX;
+ const amp = sstvGeneralScopeHistory[i] * midY * 0.9;
+ const y = midY - amp;
+ if (i === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y);
+ }
+ ctx.stroke();
+
+ // Lower half (mirror)
+ ctx.beginPath();
+ for (let i = 0; i < sstvGeneralScopeHistory.length; i++) {
+ const x = i * stepX;
+ const amp = sstvGeneralScopeHistory[i] * midY * 0.9;
+ const y = midY + amp;
+ if (i === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y);
+ }
+ ctx.stroke();
+ ctx.shadowBlur = 0;
+
+ // Peak indicator
+ const peakNorm = Math.min(sstvGeneralScopePeak / 32768, 1.0);
+ if (peakNorm > 0.01) {
+ const peakY = midY - peakNorm * midY * 0.9;
+ ctx.strokeStyle = 'rgba(255, 68, 68, 0.6)';
+ ctx.lineWidth = 1;
+ ctx.setLineDash([4, 4]);
+ ctx.beginPath(); ctx.moveTo(0, peakY); ctx.lineTo(W, peakY); ctx.stroke();
+ ctx.setLineDash([]);
+ }
+
+ // Image decode flash
+ if (sstvGeneralScopeMsgBurst > 0.01) {
+ ctx.fillStyle = `rgba(0, 255, 100, ${sstvGeneralScopeMsgBurst * 0.15})`;
+ ctx.fillRect(0, 0, W, H);
+ sstvGeneralScopeMsgBurst *= 0.88;
+ }
+
+ // Update labels
+ const rmsLabel = document.getElementById('sstvGeneralScopeRmsLabel');
+ const peakLabel = document.getElementById('sstvGeneralScopePeakLabel');
+ const toneLabel = document.getElementById('sstvGeneralScopeToneLabel');
+ const statusLabel = document.getElementById('sstvGeneralScopeStatusLabel');
+ if (rmsLabel) rmsLabel.textContent = Math.round(sstvGeneralScopeRms);
+ if (peakLabel) peakLabel.textContent = Math.round(sstvGeneralScopePeak);
+ if (toneLabel) {
+ if (sstvGeneralScopeTone === 'leader') { toneLabel.textContent = 'LEADER'; toneLabel.style.color = '#0f0'; }
+ else if (sstvGeneralScopeTone === 'sync') { toneLabel.textContent = 'SYNC'; toneLabel.style.color = '#0ff'; }
+ else if (sstvGeneralScopeTone === 'decoding') { toneLabel.textContent = 'DECODING'; toneLabel.style.color = '#fa0'; }
+ else if (sstvGeneralScopeTone === 'noise') { toneLabel.textContent = 'NOISE'; toneLabel.style.color = '#555'; }
+ else { toneLabel.textContent = 'QUIET'; toneLabel.style.color = '#444'; }
+ }
+ if (statusLabel) {
+ if (sstvGeneralScopeRms > 500) { statusLabel.textContent = 'SIGNAL'; statusLabel.style.color = '#0f0'; }
+ else { statusLabel.textContent = 'MONITORING'; statusLabel.style.color = '#555'; }
+ }
+
+ sstvGeneralScopeAnim = requestAnimationFrame(drawSstvGeneralScope);
+ }
+
+ /**
+ * Stop signal scope
+ */
+ function stopSstvGeneralScope() {
+ if (sstvGeneralScopeAnim) { cancelAnimationFrame(sstvGeneralScopeAnim); sstvGeneralScopeAnim = null; }
+ sstvGeneralScopeCtx = null;
+ }
+
/**
* Start SSE stream
*/
@@ -198,6 +340,11 @@ const SSTVGeneral = (function() {
eventSource.close();
}
+ // Show and init scope
+ const scopePanel = document.getElementById('sstvGeneralScopePanel');
+ if (scopePanel) scopePanel.style.display = 'block';
+ initSstvGeneralScope();
+
eventSource = new EventSource('/sstv-general/stream');
eventSource.onmessage = (e) => {
@@ -205,6 +352,10 @@ const SSTVGeneral = (function() {
const data = JSON.parse(e.data);
if (data.type === 'sstv_progress') {
handleProgress(data);
+ } else if (data.type === 'sstv_scope') {
+ sstvGeneralScopeTargetRms = data.rms;
+ sstvGeneralScopeTargetPeak = data.peak;
+ if (data.tone !== undefined) sstvGeneralScopeTone = data.tone;
}
} catch (err) {
console.error('Failed to parse SSE message:', err);
@@ -227,6 +378,9 @@ const SSTVGeneral = (function() {
eventSource.close();
eventSource = null;
}
+ stopSstvGeneralScope();
+ const scopePanel = document.getElementById('sstvGeneralScopePanel');
+ if (scopePanel) scopePanel.style.display = 'none';
}
/**
@@ -245,6 +399,7 @@ const SSTVGeneral = (function() {
renderGallery();
showNotification('SSTV', 'New image decoded!');
updateStatusUI('listening', 'Listening...');
+ sstvGeneralScopeMsgBurst = 1.0;
// Clear decode progress so signal monitor can take over
const liveContent = document.getElementById('sstvGeneralLiveContent');
if (liveContent) liveContent.innerHTML = '';
diff --git a/static/js/modes/sstv.js b/static/js/modes/sstv.js
index 6bafdb0..ed6d13e 100644
--- a/static/js/modes/sstv.js
+++ b/static/js/modes/sstv.js
@@ -21,6 +21,18 @@ const SSTV = (function() {
// ISS frequency
const ISS_FREQ = 145.800;
+ // Signal scope state
+ let sstvScopeCtx = null;
+ let sstvScopeAnim = null;
+ let sstvScopeHistory = [];
+ const SSTV_SCOPE_LEN = 200;
+ let sstvScopeRms = 0;
+ let sstvScopePeak = 0;
+ let sstvScopeTargetRms = 0;
+ let sstvScopeTargetPeak = 0;
+ let sstvScopeMsgBurst = 0;
+ let sstvScopeTone = null;
+
/**
* Initialize the SSTV mode
*/
@@ -634,6 +646,136 @@ const SSTV = (function() {
`;
}
+ /**
+ * Initialize signal scope canvas
+ */
+ function initSstvScope() {
+ const canvas = document.getElementById('sstvScopeCanvas');
+ if (!canvas) return;
+ const rect = canvas.getBoundingClientRect();
+ canvas.width = rect.width * (window.devicePixelRatio || 1);
+ canvas.height = rect.height * (window.devicePixelRatio || 1);
+ sstvScopeCtx = canvas.getContext('2d');
+ sstvScopeHistory = new Array(SSTV_SCOPE_LEN).fill(0);
+ sstvScopeRms = 0;
+ sstvScopePeak = 0;
+ sstvScopeTargetRms = 0;
+ sstvScopeTargetPeak = 0;
+ sstvScopeMsgBurst = 0;
+ sstvScopeTone = null;
+ drawSstvScope();
+ }
+
+ /**
+ * Draw signal scope animation frame
+ */
+ function drawSstvScope() {
+ const ctx = sstvScopeCtx;
+ if (!ctx) return;
+ const W = ctx.canvas.width;
+ const H = ctx.canvas.height;
+ const midY = H / 2;
+
+ // Phosphor persistence
+ ctx.fillStyle = 'rgba(5, 5, 16, 0.3)';
+ ctx.fillRect(0, 0, W, H);
+
+ // Smooth towards target
+ sstvScopeRms += (sstvScopeTargetRms - sstvScopeRms) * 0.25;
+ sstvScopePeak += (sstvScopeTargetPeak - sstvScopePeak) * 0.15;
+
+ // Push to history
+ sstvScopeHistory.push(Math.min(sstvScopeRms / 32768, 1.0));
+ if (sstvScopeHistory.length > SSTV_SCOPE_LEN) sstvScopeHistory.shift();
+
+ // Grid lines
+ ctx.strokeStyle = 'rgba(60, 40, 80, 0.4)';
+ ctx.lineWidth = 0.5;
+ for (let i = 1; i < 4; i++) {
+ const y = (H / 4) * i;
+ ctx.beginPath(); ctx.moveTo(0, y); ctx.lineTo(W, y); ctx.stroke();
+ }
+ for (let i = 1; i < 8; i++) {
+ const x = (W / 8) * i;
+ ctx.beginPath(); ctx.moveTo(x, 0); ctx.lineTo(x, H); ctx.stroke();
+ }
+
+ // Waveform
+ const stepX = W / (SSTV_SCOPE_LEN - 1);
+ ctx.strokeStyle = '#c080ff';
+ ctx.lineWidth = 1.5;
+ ctx.shadowColor = '#c080ff';
+ ctx.shadowBlur = 4;
+
+ // Upper half
+ ctx.beginPath();
+ for (let i = 0; i < sstvScopeHistory.length; i++) {
+ const x = i * stepX;
+ const amp = sstvScopeHistory[i] * midY * 0.9;
+ const y = midY - amp;
+ if (i === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y);
+ }
+ ctx.stroke();
+
+ // Lower half (mirror)
+ ctx.beginPath();
+ for (let i = 0; i < sstvScopeHistory.length; i++) {
+ const x = i * stepX;
+ const amp = sstvScopeHistory[i] * midY * 0.9;
+ const y = midY + amp;
+ if (i === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y);
+ }
+ ctx.stroke();
+ ctx.shadowBlur = 0;
+
+ // Peak indicator
+ const peakNorm = Math.min(sstvScopePeak / 32768, 1.0);
+ if (peakNorm > 0.01) {
+ const peakY = midY - peakNorm * midY * 0.9;
+ ctx.strokeStyle = 'rgba(255, 68, 68, 0.6)';
+ ctx.lineWidth = 1;
+ ctx.setLineDash([4, 4]);
+ ctx.beginPath(); ctx.moveTo(0, peakY); ctx.lineTo(W, peakY); ctx.stroke();
+ ctx.setLineDash([]);
+ }
+
+ // Image decode flash
+ if (sstvScopeMsgBurst > 0.01) {
+ ctx.fillStyle = `rgba(0, 255, 100, ${sstvScopeMsgBurst * 0.15})`;
+ ctx.fillRect(0, 0, W, H);
+ sstvScopeMsgBurst *= 0.88;
+ }
+
+ // Update labels
+ const rmsLabel = document.getElementById('sstvScopeRmsLabel');
+ const peakLabel = document.getElementById('sstvScopePeakLabel');
+ const toneLabel = document.getElementById('sstvScopeToneLabel');
+ const statusLabel = document.getElementById('sstvScopeStatusLabel');
+ if (rmsLabel) rmsLabel.textContent = Math.round(sstvScopeRms);
+ if (peakLabel) peakLabel.textContent = Math.round(sstvScopePeak);
+ if (toneLabel) {
+ if (sstvScopeTone === 'leader') { toneLabel.textContent = 'LEADER'; toneLabel.style.color = '#0f0'; }
+ else if (sstvScopeTone === 'sync') { toneLabel.textContent = 'SYNC'; toneLabel.style.color = '#0ff'; }
+ else if (sstvScopeTone === 'decoding') { toneLabel.textContent = 'DECODING'; toneLabel.style.color = '#fa0'; }
+ else if (sstvScopeTone === 'noise') { toneLabel.textContent = 'NOISE'; toneLabel.style.color = '#555'; }
+ else { toneLabel.textContent = 'QUIET'; toneLabel.style.color = '#444'; }
+ }
+ if (statusLabel) {
+ if (sstvScopeRms > 500) { statusLabel.textContent = 'SIGNAL'; statusLabel.style.color = '#0f0'; }
+ else { statusLabel.textContent = 'MONITORING'; statusLabel.style.color = '#555'; }
+ }
+
+ sstvScopeAnim = requestAnimationFrame(drawSstvScope);
+ }
+
+ /**
+ * Stop signal scope
+ */
+ function stopSstvScope() {
+ if (sstvScopeAnim) { cancelAnimationFrame(sstvScopeAnim); sstvScopeAnim = null; }
+ sstvScopeCtx = null;
+ }
+
/**
* Start SSE stream
*/
@@ -642,6 +784,11 @@ const SSTV = (function() {
eventSource.close();
}
+ // Show and init scope
+ const scopePanel = document.getElementById('sstvScopePanel');
+ if (scopePanel) scopePanel.style.display = 'block';
+ initSstvScope();
+
eventSource = new EventSource('/sstv/stream');
eventSource.onmessage = (e) => {
@@ -649,6 +796,10 @@ const SSTV = (function() {
const data = JSON.parse(e.data);
if (data.type === 'sstv_progress') {
handleProgress(data);
+ } else if (data.type === 'sstv_scope') {
+ sstvScopeTargetRms = data.rms;
+ sstvScopeTargetPeak = data.peak;
+ if (data.tone !== undefined) sstvScopeTone = data.tone;
}
} catch (err) {
console.error('Failed to parse SSE message:', err);
@@ -671,6 +822,9 @@ const SSTV = (function() {
eventSource.close();
eventSource = null;
}
+ stopSstvScope();
+ const scopePanel = document.getElementById('sstvScopePanel');
+ if (scopePanel) scopePanel.style.display = 'none';
}
/**
@@ -691,6 +845,7 @@ const SSTV = (function() {
renderGallery();
showNotification('SSTV', 'New image decoded!');
updateStatusUI('listening', 'Listening...');
+ sstvScopeMsgBurst = 1.0;
// Clear decode progress so signal monitor can take over
const liveContent = document.getElementById('sstvLiveContent');
if (liveContent) liveContent.innerHTML = '';
diff --git a/templates/gsm_spy_dashboard.html b/templates/gsm_spy_dashboard.html
new file mode 100644
index 0000000..dc673ad
--- /dev/null
+++ b/templates/gsm_spy_dashboard.html
@@ -0,0 +1,2530 @@
+
+
+
+
+
+ GSM SPY // INTERCEPT - See the Invisible
+
+ {% if offline_settings.fonts_source == 'local' %}
+
+ {% else %}
+
+ {% endif %}
+
+ {% if offline_settings.assets_source == 'local' %}
+
+
+ {% else %}
+
+
+ {% endif %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% set active_mode = 'gsm' %}
+ {% include 'partials/nav.html' with context %}
+
+
+
+
+
+ 0
+ TOWERS
+
+
+ 0
+ DEVICES
+
+
+ 0
+ ROGUES
+
+
+ 0
+ SIGNALS
+
+
+ -
+ CROWD
+
+
+
+
--:--:-- UTC
+
+ Analytics Overview
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Track device movement by analyzing Timing Advance transitions and cell handovers.
+ Estimates velocity and direction based on TA delta and cell sector patterns.
+
+
+
+
+
- km/h
+
Avg Velocity
+
+
+
+
+
+
+
+
+ Aggregate TMSI pings per cell sector to estimate crowd density.
+ Visualizes hotspots and congestion patterns across towers.
+
+
+
+
+
+
+
+
+ Analyze 60-day historical data to identify recurring patterns in device behavior.
+ Detects work locations, commute routes, and daily routines.
+
+
+
+
+
+
+
+
+ Validate neighbor cell lists against expected network topology.
+ Detects inconsistencies that may indicate rogue towers.
+
+
+
+
+
+
+
+
+ Correlate uplink and downlink timing to identify communication patterns.
+ Maps device-to-device interactions and network flows.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
GSM SCANNER
+
+
+ Americas
+ Europe
+ Asia
+
+
+
+
+ START
+
+
+
+
+
+
+
+
+
+
+
diff --git a/templates/index.html b/templates/index.html
index fa82842..31305a6 100644
--- a/templates/index.html
+++ b/templates/index.html
@@ -64,6 +64,7 @@
+
@@ -173,6 +174,10 @@
Vessels
+
+
+ GSM SPY
+
APRS
@@ -512,34 +517,6 @@
-
-
-
Waterfall
-
- Start (MHz)
-
-
-
- End (MHz)
-
-
-
- Bin Size
-
- 5 kHz
- 10 kHz
- 25 kHz
- 100 kHz
-
-
-
- Gain
-
-
-
Start Waterfall
-
Stop Waterfall
-
-
{% include 'partials/modes/pager.html' %}
{% include 'partials/modes/sensor.html' %}
@@ -608,16 +585,6 @@
-
-
-
-
-
-
-
@@ -1073,6 +1040,68 @@
+
+
+
+
WATERFALL
+
+
+
+ 20.0 MHz
+ SPAN
+
+
+
+
+ START
+
+
+
+ END
+
+
+
+
+
−
+
+
+
+
+
+ FFT
+
+ 512
+ 1024
+ 2048
+ 4096
+
+
+
+
+ GAIN
+
+
+
+
+
▶ START
+
◼ STOP
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Signal Scope
+
+ RMS: 0
+ PEAK: 0
+ QUIET
+ IDLE
+
+
+
+
+
+
@@ -2291,6 +2336,22 @@
+
+
+
+
+
Signal Scope
+
+ RMS: 0
+ PEAK: 0
+ QUIET
+ IDLE
+
+
+
+
+
+
@@ -2367,8 +2428,39 @@
+
+
+
+
+
+
+
+
+
Signal Scope
+
+ RSSI: dB
+ SNR: -- dB
+ IDLE
+
+
+
+
+
+
@@ -3232,15 +3324,11 @@
const rtlDeviceSection = document.getElementById('rtlDeviceSection');
if (rtlDeviceSection) rtlDeviceSection.style.display = (mode === 'pager' || mode === 'sensor' || mode === 'rtlamr' || mode === 'listening' || mode === 'aprs' || mode === 'sstv' || mode === 'weathersat' || mode === 'sstv_general' || mode === 'dmr') ? 'block' : 'none';
- // Show shared waterfall controls for supported modes
- const waterfallControlsSection = document.getElementById('waterfallControlsSection');
+ // Show waterfall panel if running in listening mode
const waterfallPanel = document.getElementById('waterfallPanel');
- const waterfallModes = ['pager', 'sensor', 'rtlamr', 'dmr', 'sstv', 'sstv_general', 'listening'];
- const waterfallSupported = waterfallModes.includes(mode);
- if (waterfallControlsSection) waterfallControlsSection.style.display = waterfallSupported ? 'block' : 'none';
if (waterfallPanel) {
const running = (typeof isWaterfallRunning !== 'undefined' && isWaterfallRunning);
- waterfallPanel.style.display = (waterfallSupported && running) ? 'block' : 'none';
+ waterfallPanel.style.display = (mode === 'listening' && running) ? 'block' : 'none';
}
// Toggle mode-specific tool status displays
@@ -3348,6 +3436,160 @@
}
}
+ // --- Sensor Signal Scope ---
+ let sensorScopeCtx = null;
+ let sensorScopeAnim = null;
+ let sensorScopeHistory = [];
+ const SENSOR_SCOPE_LEN = 200;
+ let sensorScopeRssi = 0;
+ let sensorScopeSnr = 0;
+ let sensorScopeTargetRssi = 0;
+ let sensorScopeTargetSnr = 0;
+ let sensorScopeMsgBurst = 0;
+ let sensorScopeLastPulse = 0;
+
+ function initSensorScope() {
+ const canvas = document.getElementById('sensorScopeCanvas');
+ if (!canvas) return;
+ const rect = canvas.getBoundingClientRect();
+ canvas.width = rect.width * (window.devicePixelRatio || 1);
+ canvas.height = rect.height * (window.devicePixelRatio || 1);
+ sensorScopeCtx = canvas.getContext('2d');
+ sensorScopeHistory = new Array(SENSOR_SCOPE_LEN).fill(0);
+ sensorScopeRssi = 0;
+ sensorScopeSnr = 0;
+ sensorScopeTargetRssi = 0;
+ sensorScopeTargetSnr = 0;
+ sensorScopeMsgBurst = 0;
+ sensorScopeLastPulse = 0;
+ drawSensorScope();
+ }
+
+ function drawSensorScope() {
+ const ctx = sensorScopeCtx;
+ if (!ctx) return;
+ const W = ctx.canvas.width;
+ const H = ctx.canvas.height;
+ const midY = H / 2;
+
+ // Phosphor persistence
+ ctx.fillStyle = 'rgba(5, 5, 16, 0.3)';
+ ctx.fillRect(0, 0, W, H);
+
+ // Smooth towards targets (decay when no new packets)
+ sensorScopeRssi += (sensorScopeTargetRssi - sensorScopeRssi) * 0.25;
+ sensorScopeSnr += (sensorScopeTargetSnr - sensorScopeSnr) * 0.15;
+
+ // Decay targets back to zero between packets
+ sensorScopeTargetRssi *= 0.97;
+ sensorScopeTargetSnr *= 0.97;
+
+ // RSSI is typically negative dBm (e.g. -0.1 to -30+)
+ // Normalize: map absolute RSSI to 0-1 range (0 dB = max, -40 dB = min)
+ const rssiNorm = Math.min(Math.max(Math.abs(sensorScopeRssi) / 40, 0), 1.0);
+ sensorScopeHistory.push(rssiNorm);
+ if (sensorScopeHistory.length > SENSOR_SCOPE_LEN) {
+ sensorScopeHistory.shift();
+ }
+
+ // Grid lines
+ ctx.strokeStyle = 'rgba(40, 80, 40, 0.4)';
+ ctx.lineWidth = 1;
+ for (let g = 0.25; g < 1; g += 0.25) {
+ const gy = midY - g * midY;
+ const gy2 = midY + g * midY;
+ ctx.beginPath();
+ ctx.moveTo(0, gy); ctx.lineTo(W, gy);
+ ctx.moveTo(0, gy2); ctx.lineTo(W, gy2);
+ ctx.stroke();
+ }
+
+ // Center baseline
+ ctx.strokeStyle = 'rgba(60, 100, 60, 0.5)';
+ ctx.beginPath();
+ ctx.moveTo(0, midY);
+ ctx.lineTo(W, midY);
+ ctx.stroke();
+
+ // Waveform (mirrored, green theme for 433)
+ const stepX = W / SENSOR_SCOPE_LEN;
+ ctx.strokeStyle = '#0f0';
+ ctx.lineWidth = 1.5;
+ ctx.shadowColor = '#0f0';
+ ctx.shadowBlur = 4;
+
+ // Upper half
+ ctx.beginPath();
+ for (let i = 0; i < sensorScopeHistory.length; i++) {
+ const x = i * stepX;
+ const amp = sensorScopeHistory[i] * midY * 0.9;
+ const y = midY - amp;
+ if (i === 0) ctx.moveTo(x, y);
+ else ctx.lineTo(x, y);
+ }
+ ctx.stroke();
+
+ // Lower half (mirror)
+ ctx.beginPath();
+ for (let i = 0; i < sensorScopeHistory.length; i++) {
+ const x = i * stepX;
+ const amp = sensorScopeHistory[i] * midY * 0.9;
+ const y = midY + amp;
+ if (i === 0) ctx.moveTo(x, y);
+ else ctx.lineTo(x, y);
+ }
+ ctx.stroke();
+
+ ctx.shadowBlur = 0;
+
+ // SNR indicator (amber dashed line)
+ const snrNorm = Math.min(Math.max(Math.abs(sensorScopeSnr) / 40, 0), 1.0);
+ if (snrNorm > 0.01) {
+ const snrY = midY - snrNorm * midY * 0.9;
+ ctx.strokeStyle = 'rgba(255, 170, 0, 0.6)';
+ ctx.lineWidth = 1;
+ ctx.setLineDash([4, 4]);
+ ctx.beginPath();
+ ctx.moveTo(0, snrY);
+ ctx.lineTo(W, snrY);
+ ctx.stroke();
+ ctx.setLineDash([]);
+ }
+
+ // Sensor decode flash (green overlay)
+ if (sensorScopeMsgBurst > 0.01) {
+ ctx.fillStyle = `rgba(0, 255, 100, ${sensorScopeMsgBurst * 0.15})`;
+ ctx.fillRect(0, 0, W, H);
+ sensorScopeMsgBurst *= 0.88;
+ }
+
+ // Update labels
+ const rssiLabel = document.getElementById('sensorScopeRssiLabel');
+ const snrLabel = document.getElementById('sensorScopeSnrLabel');
+ const statusLabel = document.getElementById('sensorScopeStatusLabel');
+ if (rssiLabel) rssiLabel.textContent = sensorScopeRssi < -0.5 ? sensorScopeRssi.toFixed(1) : '--';
+ if (snrLabel) snrLabel.textContent = sensorScopeSnr > 0.5 ? sensorScopeSnr.toFixed(1) : '--';
+ if (statusLabel) {
+ if (Math.abs(sensorScopeRssi) > 1) {
+ statusLabel.textContent = 'SIGNAL';
+ statusLabel.style.color = '#0f0';
+ } else {
+ statusLabel.textContent = 'MONITORING';
+ statusLabel.style.color = '#555';
+ }
+ }
+
+ sensorScopeAnim = requestAnimationFrame(drawSensorScope);
+ }
+
+ function stopSensorScope() {
+ if (sensorScopeAnim) {
+ cancelAnimationFrame(sensorScopeAnim);
+ sensorScopeAnim = null;
+ }
+ sensorScopeCtx = null;
+ }
+
// Start sensor decoding
function startSensorDecoding() {
const freq = document.getElementById('sensorFrequency').value;
@@ -3537,6 +3779,18 @@
document.getElementById('statusText').textContent = running ? 'Listening...' : 'Idle';
document.getElementById('startSensorBtn').style.display = running ? 'none' : 'block';
document.getElementById('stopSensorBtn').style.display = running ? 'block' : 'none';
+
+ // Signal scope
+ const scopePanel = document.getElementById('sensorScopePanel');
+ if (scopePanel) {
+ if (running) {
+ scopePanel.style.display = 'block';
+ initSensorScope();
+ } else {
+ stopSensorScope();
+ scopePanel.style.display = 'none';
+ }
+ }
}
function startSensorStream() {
@@ -3554,6 +3808,9 @@
const data = JSON.parse(e.data);
if (data.type === 'sensor') {
addSensorReading(data);
+ } else if (data.type === 'scope') {
+ sensorScopeTargetRssi = data.rssi;
+ sensorScopeTargetSnr = data.snr;
} else if (data.type === 'status') {
if (data.text === 'stopped') {
setSensorRunning(false);
@@ -3578,6 +3835,9 @@
playAlert();
pulseSignal();
+ // Flash sensor scope green on decode
+ sensorScopeMsgBurst = 1.0;
+
sensorCount++;
document.getElementById('sensorCount').textContent = sensorCount;
@@ -4443,6 +4703,153 @@
// Pager mode polling timer for agent mode
let pagerPollTimer = null;
+ // --- Pager Signal Scope ---
+ let pagerScopeCtx = null;
+ let pagerScopeAnim = null;
+ let pagerScopeHistory = [];
+ const SCOPE_HISTORY_LEN = 200;
+ let pagerScopeRms = 0;
+ let pagerScopePeak = 0;
+ let pagerScopeTargetRms = 0;
+ let pagerScopeTargetPeak = 0;
+ let pagerScopeMsgBurst = 0;
+
+ function initPagerScope() {
+ const canvas = document.getElementById('pagerScopeCanvas');
+ if (!canvas) return;
+ // Set actual pixel resolution
+ const rect = canvas.getBoundingClientRect();
+ canvas.width = rect.width * (window.devicePixelRatio || 1);
+ canvas.height = rect.height * (window.devicePixelRatio || 1);
+ pagerScopeCtx = canvas.getContext('2d');
+ pagerScopeHistory = new Array(SCOPE_HISTORY_LEN).fill(0);
+ pagerScopeRms = 0;
+ pagerScopePeak = 0;
+ pagerScopeTargetRms = 0;
+ pagerScopeTargetPeak = 0;
+ pagerScopeMsgBurst = 0;
+ drawPagerScope();
+ }
+
+ function drawPagerScope() {
+ const ctx = pagerScopeCtx;
+ if (!ctx) return;
+ const W = ctx.canvas.width;
+ const H = ctx.canvas.height;
+ const midY = H / 2;
+
+ // Phosphor persistence: semi-transparent clear
+ ctx.fillStyle = 'rgba(5, 5, 16, 0.3)';
+ ctx.fillRect(0, 0, W, H);
+
+ // Smooth towards target values
+ pagerScopeRms += (pagerScopeTargetRms - pagerScopeRms) * 0.25;
+ pagerScopePeak += (pagerScopeTargetPeak - pagerScopePeak) * 0.15;
+
+ // Push current RMS into history (normalized 0-1 against 32768)
+ pagerScopeHistory.push(Math.min(pagerScopeRms / 32768, 1.0));
+ if (pagerScopeHistory.length > SCOPE_HISTORY_LEN) {
+ pagerScopeHistory.shift();
+ }
+
+ // Grid lines
+ ctx.strokeStyle = 'rgba(40, 40, 80, 0.4)';
+ ctx.lineWidth = 1;
+ for (let g = 0.25; g < 1; g += 0.25) {
+ const gy = midY - g * midY;
+ const gy2 = midY + g * midY;
+ ctx.beginPath();
+ ctx.moveTo(0, gy); ctx.lineTo(W, gy);
+ ctx.moveTo(0, gy2); ctx.lineTo(W, gy2);
+ ctx.stroke();
+ }
+
+ // Center baseline
+ ctx.strokeStyle = 'rgba(60, 60, 100, 0.5)';
+ ctx.beginPath();
+ ctx.moveTo(0, midY);
+ ctx.lineTo(W, midY);
+ ctx.stroke();
+
+ // Waveform (mirrored)
+ const stepX = W / SCOPE_HISTORY_LEN;
+ ctx.strokeStyle = '#0ff';
+ ctx.lineWidth = 1.5;
+ ctx.shadowColor = '#0ff';
+ ctx.shadowBlur = 4;
+
+ // Upper half
+ ctx.beginPath();
+ for (let i = 0; i < pagerScopeHistory.length; i++) {
+ const x = i * stepX;
+ const amp = pagerScopeHistory[i] * midY * 0.9;
+ const y = midY - amp;
+ if (i === 0) ctx.moveTo(x, y);
+ else ctx.lineTo(x, y);
+ }
+ ctx.stroke();
+
+ // Lower half (mirror)
+ ctx.beginPath();
+ for (let i = 0; i < pagerScopeHistory.length; i++) {
+ const x = i * stepX;
+ const amp = pagerScopeHistory[i] * midY * 0.9;
+ const y = midY + amp;
+ if (i === 0) ctx.moveTo(x, y);
+ else ctx.lineTo(x, y);
+ }
+ ctx.stroke();
+
+ ctx.shadowBlur = 0;
+
+ // Peak indicator (dashed red line)
+ const peakNorm = Math.min(pagerScopePeak / 32768, 1.0);
+ if (peakNorm > 0.01) {
+ const peakY = midY - peakNorm * midY * 0.9;
+ ctx.strokeStyle = 'rgba(255, 68, 68, 0.6)';
+ ctx.lineWidth = 1;
+ ctx.setLineDash([4, 4]);
+ ctx.beginPath();
+ ctx.moveTo(0, peakY);
+ ctx.lineTo(W, peakY);
+ ctx.stroke();
+ ctx.setLineDash([]);
+ }
+
+ // Message decode flash (green overlay)
+ if (pagerScopeMsgBurst > 0.01) {
+ ctx.fillStyle = `rgba(0, 255, 100, ${pagerScopeMsgBurst * 0.15})`;
+ ctx.fillRect(0, 0, W, H);
+ pagerScopeMsgBurst *= 0.88;
+ }
+
+ // Update labels
+ const rmsLabel = document.getElementById('scopeRmsLabel');
+ const peakLabel = document.getElementById('scopePeakLabel');
+ const statusLabel = document.getElementById('scopeStatusLabel');
+ if (rmsLabel) rmsLabel.textContent = Math.round(pagerScopeRms);
+ if (peakLabel) peakLabel.textContent = Math.round(pagerScopePeak);
+ if (statusLabel) {
+ if (pagerScopeRms > 500) {
+ statusLabel.textContent = 'SIGNAL';
+ statusLabel.style.color = '#0f0';
+ } else {
+ statusLabel.textContent = 'MONITORING';
+ statusLabel.style.color = '#555';
+ }
+ }
+
+ pagerScopeAnim = requestAnimationFrame(drawPagerScope);
+ }
+
+ function stopPagerScope() {
+ if (pagerScopeAnim) {
+ cancelAnimationFrame(pagerScopeAnim);
+ pagerScopeAnim = null;
+ }
+ pagerScopeCtx = null;
+ }
+
function startDecoding() {
const freq = document.getElementById('frequency').value;
const gain = document.getElementById('gain').value;
@@ -4571,7 +4978,7 @@
eventSource.close();
eventSource = null;
}
- showInfo('Killed all processes: ' + (data.processes.length ? data.processes.join(', ') : 'none running'));
+ showInfo('All processes stopped' + (data.processes.length ? ` (${data.processes.length} killed)` : ' (none were running)'));
});
}
@@ -4622,6 +5029,18 @@
document.getElementById('statusText').textContent = running ? 'Decoding...' : 'Idle';
document.getElementById('startBtn').style.display = running ? 'none' : 'block';
document.getElementById('stopBtn').style.display = running ? 'block' : 'none';
+
+ // Signal scope
+ const scopePanel = document.getElementById('pagerScopePanel');
+ if (scopePanel) {
+ if (running) {
+ scopePanel.style.display = 'block';
+ initPagerScope();
+ } else {
+ stopPagerScope();
+ scopePanel.style.display = 'none';
+ }
+ }
}
function startStream(isAgentMode = false) {
@@ -4657,6 +5076,9 @@
}
} else if (payload.type === 'info') {
showInfo(`[${data.agent_name}] ${payload.text}`);
+ } else if (payload.type === 'scope') {
+ pagerScopeTargetRms = payload.rms;
+ pagerScopeTargetPeak = payload.peak;
}
} else if (data.type === 'keepalive') {
// Ignore keepalive messages
@@ -4675,6 +5097,9 @@
showInfo(data.text);
} else if (data.type === 'raw') {
showInfo(data.text);
+ } else if (data.type === 'scope') {
+ pagerScopeTargetRms = data.rms;
+ pagerScopeTargetPeak = data.peak;
}
}
};
@@ -4782,6 +5207,9 @@
// Update signal meter
pulseSignal();
+ // Flash signal scope green on decode
+ pagerScopeMsgBurst = 1.0;
+
// Use SignalCards component to create the message card (auto-detects status)
const msgEl = SignalCards.createPagerCard(msg);
diff --git a/templates/partials/nav.html b/templates/partials/nav.html
index 79e1cb4..116946c 100644
--- a/templates/partials/nav.html
+++ b/templates/partials/nav.html
@@ -67,6 +67,7 @@
{{ mode_item('rtlamr', 'Meters', '
') }}
{{ mode_item('adsb', 'Aircraft', '
', '/adsb/dashboard') }}
{{ mode_item('ais', 'Vessels', '
', '/ais/dashboard') }}
+ {{ mode_item('gsm', 'GSM SPY', '
', '/gsm_spy/dashboard') }}
{{ mode_item('aprs', 'APRS', '
') }}
{{ mode_item('listening', 'Listening Post', '
') }}
{{ mode_item('spystations', 'Spy Stations', '
') }}
diff --git a/tests/test_gsm_spy.py b/tests/test_gsm_spy.py
new file mode 100644
index 0000000..fbcf6bb
--- /dev/null
+++ b/tests/test_gsm_spy.py
@@ -0,0 +1,332 @@
+"""Unit tests for GSM Spy parsing and validation functions."""
+
+import pytest
+from routes.gsm_spy import (
+ parse_grgsm_scanner_output,
+ parse_tshark_output,
+ arfcn_to_frequency,
+ validate_band_names,
+ REGIONAL_BANDS
+)
+
+
+class TestParseGrgsmScannerOutput:
+ """Tests for parse_grgsm_scanner_output()."""
+
+ def test_valid_output_line(self):
+ """Test parsing a valid grgsm_scanner output line."""
+ line = "ARFCN: 23, Freq: 940.6M, CID: 31245, LAC: 1234, MCC: 214, MNC: 01, Pwr: -48"
+ result = parse_grgsm_scanner_output(line)
+
+ assert result is not None
+ assert result['type'] == 'tower'
+ assert result['arfcn'] == 23
+ assert result['frequency'] == 940.6
+ assert result['cid'] == 31245
+ assert result['lac'] == 1234
+ assert result['mcc'] == 214
+ assert result['mnc'] == 1
+ assert result['signal_strength'] == -48.0
+ assert 'timestamp' in result
+
+ def test_freq_without_suffix(self):
+ """Test parsing frequency without M suffix."""
+ line = "ARFCN: 975, Freq: 925.2, CID: 13522, LAC: 38722, MCC: 262, MNC: 1, Pwr: -58"
+ result = parse_grgsm_scanner_output(line)
+ assert result is not None
+ assert result['frequency'] == 925.2
+
+ def test_config_line(self):
+ """Test that configuration lines are skipped."""
+ line = " Configuration: 1 CCCH, not combined"
+ result = parse_grgsm_scanner_output(line)
+ assert result is None
+
+ def test_neighbour_line(self):
+ """Test that neighbour cell lines are skipped."""
+ line = " Neighbour Cells: 57, 61, 70, 71, 72, 86"
+ result = parse_grgsm_scanner_output(line)
+ assert result is None
+
+ def test_cell_arfcn_line(self):
+ """Test that cell ARFCN lines are skipped."""
+ line = " Cell ARFCNs: 63, 76"
+ result = parse_grgsm_scanner_output(line)
+ assert result is None
+
+ def test_progress_line(self):
+ """Test that progress/status lines are skipped."""
+ line = "Scanning GSM900 band..."
+ result = parse_grgsm_scanner_output(line)
+ assert result is None
+
+ def test_empty_line(self):
+ """Test handling of empty lines."""
+ result = parse_grgsm_scanner_output("")
+ assert result is None
+
+ def test_invalid_data(self):
+ """Test handling of non-numeric values."""
+ line = "ARFCN: abc, Freq: xyz, CID: bad, LAC: data, MCC: bad, MNC: bad, Pwr: bad"
+ result = parse_grgsm_scanner_output(line)
+ assert result is None
+
+ def test_no_identity_filtered(self):
+ """Test that MCC=0/MNC=0 entries (no network identity) are filtered out."""
+ line = "ARFCN: 115, Freq: 925.0M, CID: 0, LAC: 0, MCC: 0, MNC: 0, Pwr: -100"
+ result = parse_grgsm_scanner_output(line)
+ assert result is None
+
+ def test_mcc_zero_mnc_zero_filtered(self):
+ """Test that MCC=0/MNC=0 even with valid CID is filtered out."""
+ line = "ARFCN: 113, Freq: 924.6M, CID: 1234, LAC: 5678, MCC: 0, MNC: 0, Pwr: -90"
+ result = parse_grgsm_scanner_output(line)
+ assert result is None
+
+ def test_cid_zero_valid_mcc_passes(self):
+ """Test that CID=0 with valid MCC/MNC passes (partially decoded cell)."""
+ line = "ARFCN: 115, Freq: 958.0M, CID: 0, LAC: 21864, MCC: 234, MNC: 10, Pwr: -51"
+ result = parse_grgsm_scanner_output(line)
+ assert result is not None
+ assert result['cid'] == 0
+ assert result['mcc'] == 234
+ assert result['signal_strength'] == -51.0
+
+ def test_valid_cid_nonzero(self):
+ """Test that valid non-zero CID/MCC entries pass through."""
+ line = "ARFCN: 115, Freq: 925.0M, CID: 19088, LAC: 21864, MCC: 234, MNC: 10, Pwr: -58"
+ result = parse_grgsm_scanner_output(line)
+ assert result is not None
+ assert result['cid'] == 19088
+ assert result['signal_strength'] == -58.0
+
+
+class TestParseTsharkOutput:
+ """Tests for parse_tshark_output()."""
+
+ def test_valid_full_output(self):
+ """Test parsing tshark output with all fields."""
+ line = "5\t0xABCD1234\t123456789012345\t1234\t31245"
+ result = parse_tshark_output(line)
+
+ assert result is not None
+ assert result['type'] == 'device'
+ assert result['ta_value'] == 5
+ assert result['tmsi'] == '0xABCD1234'
+ assert result['imsi'] == '123456789012345'
+ assert result['lac'] == 1234
+ assert result['cid'] == 31245
+ assert result['distance_meters'] == 5 * 554 # TA * 554 meters
+ assert 'timestamp' in result
+
+ def test_missing_optional_fields(self):
+ """Test parsing with missing optional fields (empty tabs)."""
+ line = "3\t\t\t1234\t31245"
+ result = parse_tshark_output(line)
+
+ assert result is not None
+ assert result['ta_value'] == 3
+ assert result['tmsi'] is None
+ assert result['imsi'] is None
+ assert result['lac'] == 1234
+ assert result['cid'] == 31245
+
+ def test_no_ta_value(self):
+ """Test parsing without TA value (empty field)."""
+ # When TA is empty, int('') will fail, so the parse returns None
+ # This is the current behavior - the function expects valid integers or valid empty handling
+ line = "\t0xABCD1234\t123456789012345\t1234\t31245"
+ result = parse_tshark_output(line)
+ # Current implementation will fail to parse this due to int('') failing
+ assert result is None
+
+ def test_invalid_line(self):
+ """Test handling of invalid tshark output."""
+ line = "invalid data"
+ result = parse_tshark_output(line)
+ assert result is None
+
+ def test_empty_line(self):
+ """Test handling of empty lines."""
+ result = parse_tshark_output("")
+ assert result is None
+
+ def test_partial_fields(self):
+ """Test with fewer than 5 fields."""
+ line = "5\t0xABCD1234" # Only 2 fields
+ result = parse_tshark_output(line)
+ assert result is None
+
+
+class TestArfcnToFrequency:
+ """Tests for arfcn_to_frequency()."""
+
+ def test_gsm850_arfcn(self):
+ """Test ARFCN in GSM850 band."""
+ # GSM850: ARFCN 128-251, 869-894 MHz
+ arfcn = 128
+ freq = arfcn_to_frequency(arfcn)
+ assert freq == 869000000 # 869 MHz
+
+ arfcn = 251
+ freq = arfcn_to_frequency(arfcn)
+ assert freq == 893600000 # 893.6 MHz
+
+ def test_egsm900_arfcn(self):
+ """Test ARFCN in EGSM900 band."""
+ # EGSM900: ARFCN 0-124, 925-960 MHz
+ arfcn = 0
+ freq = arfcn_to_frequency(arfcn)
+ assert freq == 925000000 # 925 MHz
+
+ arfcn = 124
+ freq = arfcn_to_frequency(arfcn)
+ assert freq == 949800000 # 949.8 MHz
+
+ def test_dcs1800_arfcn(self):
+ """Test ARFCN in DCS1800 band."""
+ # DCS1800: ARFCN 512-885, 1805-1880 MHz
+ # Note: ARFCN 512 also exists in PCS1900 and will match that first
+ # Use ARFCN 811+ which is only in DCS1800
+ arfcn = 811 # Beyond PCS1900 range (512-810)
+ freq = arfcn_to_frequency(arfcn)
+ # 811 is ARFCN offset from 512, so freq = 1805MHz + (811-512)*200kHz
+ expected = 1805000000 + (811 - 512) * 200000
+ assert freq == expected
+
+ arfcn = 885
+ freq = arfcn_to_frequency(arfcn)
+ assert freq == 1879600000 # 1879.6 MHz
+
+ def test_pcs1900_arfcn(self):
+ """Test ARFCN in PCS1900 band."""
+ # PCS1900: ARFCN 512-810, 1930-1990 MHz
+ # Note: overlaps with DCS1800 ARFCN range, but different frequencies
+ arfcn = 512
+ freq = arfcn_to_frequency(arfcn)
+ # Will match first band (DCS1800 in Europe config)
+ assert freq > 0
+
+ def test_invalid_arfcn(self):
+ """Test ARFCN outside known ranges."""
+ with pytest.raises(ValueError, match="not found in any known GSM band"):
+ arfcn_to_frequency(9999)
+
+ with pytest.raises(ValueError):
+ arfcn_to_frequency(-1)
+
+ def test_arfcn_200khz_spacing(self):
+ """Test that ARFCNs are 200kHz apart."""
+ arfcn1 = 128
+ arfcn2 = 129
+ freq1 = arfcn_to_frequency(arfcn1)
+ freq2 = arfcn_to_frequency(arfcn2)
+ assert freq2 - freq1 == 200000 # 200 kHz
+
+
+class TestValidateBandNames:
+ """Tests for validate_band_names()."""
+
+ def test_valid_americas_bands(self):
+ """Test valid band names for Americas region."""
+ bands = ['GSM850', 'PCS1900']
+ result, error = validate_band_names(bands, 'Americas')
+ assert result == bands
+ assert error is None
+
+ def test_valid_europe_bands(self):
+ """Test valid band names for Europe region."""
+ # Note: Europe uses EGSM900, not GSM900
+ bands = ['EGSM900', 'DCS1800', 'GSM850', 'GSM800']
+ result, error = validate_band_names(bands, 'Europe')
+ assert result == bands
+ assert error is None
+
+ def test_valid_asia_bands(self):
+ """Test valid band names for Asia region."""
+ # Note: Asia uses EGSM900, not GSM900
+ bands = ['EGSM900', 'DCS1800']
+ result, error = validate_band_names(bands, 'Asia')
+ assert result == bands
+ assert error is None
+
+ def test_invalid_band_for_region(self):
+ """Test invalid band name for a region."""
+ bands = ['GSM900', 'INVALID_BAND']
+ result, error = validate_band_names(bands, 'Americas')
+ assert result == []
+ assert error is not None
+ assert 'Invalid bands' in error
+ assert 'INVALID_BAND' in error
+
+ def test_invalid_region(self):
+ """Test invalid region name."""
+ bands = ['GSM900']
+ result, error = validate_band_names(bands, 'InvalidRegion')
+ assert result == []
+ assert error is not None
+ assert 'Invalid region' in error
+
+ def test_empty_bands_list(self):
+ """Test with empty bands list."""
+ result, error = validate_band_names([], 'Americas')
+ assert result == []
+ assert error is None
+
+ def test_single_valid_band(self):
+ """Test with single valid band."""
+ bands = ['GSM850']
+ result, error = validate_band_names(bands, 'Americas')
+ assert result == ['GSM850']
+ assert error is None
+
+ def test_case_sensitive_band_names(self):
+ """Test that band names are case-sensitive."""
+ bands = ['gsm850'] # lowercase
+ result, error = validate_band_names(bands, 'Americas')
+ assert result == []
+ assert error is not None
+
+ def test_multiple_invalid_bands(self):
+ """Test with multiple invalid bands."""
+ bands = ['INVALID1', 'GSM850', 'INVALID2']
+ result, error = validate_band_names(bands, 'Americas')
+ assert result == []
+ assert error is not None
+ assert 'INVALID1' in error
+ assert 'INVALID2' in error
+
+
+class TestRegionalBandsConfig:
+ """Tests for REGIONAL_BANDS configuration."""
+
+ def test_all_regions_defined(self):
+ """Test that all expected regions are defined."""
+ assert 'Americas' in REGIONAL_BANDS
+ assert 'Europe' in REGIONAL_BANDS
+ assert 'Asia' in REGIONAL_BANDS
+
+ def test_all_bands_have_required_fields(self):
+ """Test that all bands have required configuration fields."""
+ for region, bands in REGIONAL_BANDS.items():
+ for band_name, band_config in bands.items():
+ assert 'start' in band_config
+ assert 'end' in band_config
+ assert 'arfcn_start' in band_config
+ assert 'arfcn_end' in band_config
+
+ def test_frequency_ranges_valid(self):
+ """Test that frequency ranges are positive and start < end."""
+ for region, bands in REGIONAL_BANDS.items():
+ for band_name, band_config in bands.items():
+ assert band_config['start'] > 0
+ assert band_config['end'] > 0
+ assert band_config['start'] < band_config['end']
+
+ def test_arfcn_ranges_valid(self):
+ """Test that ARFCN ranges are valid."""
+ for region, bands in REGIONAL_BANDS.items():
+ for band_name, band_config in bands.items():
+ assert band_config['arfcn_start'] >= 0
+ assert band_config['arfcn_end'] >= 0
+ assert band_config['arfcn_start'] <= band_config['arfcn_end']
diff --git a/tests/test_waterfall_fft.py b/tests/test_waterfall_fft.py
new file mode 100644
index 0000000..722569e
--- /dev/null
+++ b/tests/test_waterfall_fft.py
@@ -0,0 +1,168 @@
+"""Tests for the waterfall FFT pipeline."""
+
+import struct
+
+import numpy as np
+import pytest
+
+from utils.waterfall_fft import (
+ build_binary_frame,
+ compute_power_spectrum,
+ cu8_to_complex,
+ quantize_to_uint8,
+)
+
+
+class TestCu8ToComplex:
+ """Tests for cu8_to_complex conversion."""
+
+ def test_zero_maps_to_negative_one(self):
+ # I=0, Q=0 -> approximately -1 - 1j
+ result = cu8_to_complex(bytes([0, 0]))
+ assert result[0].real == pytest.approx(-1.0, abs=0.01)
+ assert result[0].imag == pytest.approx(-1.0, abs=0.01)
+
+ def test_255_maps_to_positive_one(self):
+ # I=255, Q=255 -> approximately +1 + 1j
+ result = cu8_to_complex(bytes([255, 255]))
+ assert result[0].real == pytest.approx(1.0, abs=0.01)
+ assert result[0].imag == pytest.approx(1.0, abs=0.01)
+
+ def test_128_maps_to_near_zero(self):
+ # I=128, Q=128 -> approximately 0 + 0j
+ result = cu8_to_complex(bytes([128, 128]))
+ assert abs(result[0].real) < 0.01
+ assert abs(result[0].imag) < 0.01
+
+ def test_output_length(self):
+ raw = bytes(range(256)) * 4 # 1024 bytes -> 512 complex samples
+ result = cu8_to_complex(raw)
+ assert len(result) == 512
+
+ def test_output_dtype(self):
+ result = cu8_to_complex(bytes([100, 200, 50, 150]))
+ assert result.dtype == np.complex64 or np.issubdtype(result.dtype, np.complexfloating)
+
+
+class TestComputePowerSpectrum:
+ """Tests for compute_power_spectrum."""
+
+ def test_output_length_matches_fft_size(self):
+ samples = np.zeros(4096, dtype=np.complex64)
+ result = compute_power_spectrum(samples, fft_size=1024, avg_count=4)
+ assert len(result) == 1024
+
+ def test_output_dtype(self):
+ samples = np.zeros(4096, dtype=np.complex64)
+ result = compute_power_spectrum(samples, fft_size=1024, avg_count=4)
+ assert result.dtype == np.float32
+
+ def test_pure_tone_peak_at_correct_bin(self):
+ fft_size = 1024
+ avg_count = 4
+ n = fft_size * avg_count
+ # Generate a pure tone at bin 256 (1/4 of sample rate)
+ t = np.arange(n, dtype=np.float32)
+ freq_bin = 256
+ tone = np.exp(2j * np.pi * freq_bin / fft_size * t).astype(np.complex64)
+ result = compute_power_spectrum(tone, fft_size=fft_size, avg_count=avg_count)
+ # After fftshift, bin 256 maps to index 256 + 512 = 768
+ peak_idx = np.argmax(result)
+ expected_idx = fft_size // 2 + freq_bin
+ assert peak_idx == expected_idx
+
+ def test_insufficient_samples_returns_default(self):
+ # Not enough samples for even one segment
+ samples = np.zeros(100, dtype=np.complex64)
+ result = compute_power_spectrum(samples, fft_size=1024, avg_count=4)
+ assert len(result) == 1024
+ assert np.all(result == -100.0)
+
+ def test_partial_avg_count(self):
+ # Only enough for 2 of 4 requested averages
+ fft_size = 1024
+ samples = np.random.randn(2048).astype(np.float32).view(np.complex64)
+ result = compute_power_spectrum(samples, fft_size=fft_size, avg_count=4)
+ assert len(result) == fft_size
+ # Should still return valid dB values (not -100 default)
+ assert np.any(result != -100.0)
+
+
+class TestQuantizeToUint8:
+ """Tests for quantize_to_uint8."""
+
+ def test_db_min_maps_to_zero(self):
+ power = np.array([-90.0], dtype=np.float32)
+ result = quantize_to_uint8(power, db_min=-90, db_max=-20)
+ assert result[0] == 0
+
+ def test_db_max_maps_to_255(self):
+ power = np.array([-20.0], dtype=np.float32)
+ result = quantize_to_uint8(power, db_min=-90, db_max=-20)
+ assert result[0] == 255
+
+ def test_below_min_clamped_to_zero(self):
+ power = np.array([-120.0], dtype=np.float32)
+ result = quantize_to_uint8(power, db_min=-90, db_max=-20)
+ assert result[0] == 0
+
+ def test_above_max_clamped_to_255(self):
+ power = np.array([0.0], dtype=np.float32)
+ result = quantize_to_uint8(power, db_min=-90, db_max=-20)
+ assert result[0] == 255
+
+ def test_midpoint(self):
+ # Midpoint between -90 and -20 is -55 -> ~127-128
+ power = np.array([-55.0], dtype=np.float32)
+ result = quantize_to_uint8(power, db_min=-90, db_max=-20)
+ assert 125 <= result[0] <= 130
+
+ def test_output_length(self):
+ power = np.random.randn(1024).astype(np.float32) * 30 - 60
+ result = quantize_to_uint8(power)
+ assert len(result) == 1024
+
+
+class TestBuildBinaryFrame:
+ """Tests for build_binary_frame."""
+
+ def test_header_values(self):
+ bins = bytes([128] * 1024)
+ frame = build_binary_frame(100.0, 102.0, bins)
+ msg_type = frame[0]
+ start_freq, end_freq = struct.unpack_from('
None:
@@ -169,6 +171,17 @@ class CleanupManager:
if store in self.stores:
self.stores.remove(store)
+ def register_db_cleanup(self, func: callable, interval_multiplier: int = 60) -> None:
+ """
+ Register a database cleanup function.
+
+ Args:
+ func: Cleanup function to call (should return number of deleted rows)
+ interval_multiplier: How many cleanup cycles to wait between calls (default: 60 = 1 hour if interval is 60s)
+ """
+ with self._lock:
+ self.db_cleanup_funcs.append((func, interval_multiplier))
+
def start(self) -> None:
"""Start the cleanup timer."""
with self._lock:
@@ -194,11 +207,15 @@ class CleanupManager:
self._timer.start()
def _run_cleanup(self) -> None:
- """Run cleanup on all registered stores."""
+ """Run cleanup on all registered stores and database tables."""
total_cleaned = 0
+ # Cleanup in-memory data stores
with self._lock:
stores = list(self.stores)
+ db_funcs = list(self.db_cleanup_funcs)
+ self._cleanup_count += 1
+ current_count = self._cleanup_count
for store in stores:
try:
@@ -206,6 +223,17 @@ class CleanupManager:
except Exception as e:
logger.error(f"Error cleaning up {store.name}: {e}")
+ # Cleanup database tables (less frequently)
+ for func, interval_multiplier in db_funcs:
+ if current_count % interval_multiplier == 0:
+ try:
+ deleted = func()
+ if deleted > 0:
+ logger.info(f"Database cleanup: {func.__name__} removed {deleted} rows")
+ total_cleaned += deleted
+ except Exception as e:
+ logger.error(f"Error in database cleanup {func.__name__}: {e}")
+
if total_cleaned > 0:
logger.info(f"Cleanup complete: removed {total_cleaned} stale entries")
diff --git a/utils/constants.py b/utils/constants.py
index f252938..f51124e 100644
--- a/utils/constants.py
+++ b/utils/constants.py
@@ -274,3 +274,14 @@ MAX_DEAUTH_ALERTS_AGE_SECONDS = 300 # 5 minutes
# Deauth detector sniff timeout (seconds)
DEAUTH_SNIFF_TIMEOUT = 0.5
+
+
+# =============================================================================
+# GSM SPY (Cellular Intelligence)
+# =============================================================================
+
+# Maximum age for GSM tower/device data in DataStore (seconds)
+MAX_GSM_AGE_SECONDS = 300 # 5 minutes
+
+# Timing Advance conversion to meters
+GSM_TA_METERS_PER_UNIT = 554
diff --git a/utils/database.py b/utils/database.py
index 92b62cc..9e62d87 100644
--- a/utils/database.py
+++ b/utils/database.py
@@ -88,65 +88,111 @@ def init_db() -> None:
ON signal_history(mode, device_id, timestamp)
''')
- # Device correlation table
- conn.execute('''
- CREATE TABLE IF NOT EXISTS device_correlations (
- id INTEGER PRIMARY KEY AUTOINCREMENT,
- wifi_mac TEXT,
- bt_mac TEXT,
- confidence REAL,
- first_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
- last_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
- metadata TEXT,
- UNIQUE(wifi_mac, bt_mac)
- )
- ''')
-
- # Alert rules
- conn.execute('''
- CREATE TABLE IF NOT EXISTS alert_rules (
- id INTEGER PRIMARY KEY AUTOINCREMENT,
- name TEXT NOT NULL,
- mode TEXT,
- event_type TEXT,
- match TEXT,
- severity TEXT DEFAULT 'medium',
- enabled BOOLEAN DEFAULT 1,
- notify TEXT,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
- )
- ''')
-
- # Alert events
- conn.execute('''
- CREATE TABLE IF NOT EXISTS alert_events (
- id INTEGER PRIMARY KEY AUTOINCREMENT,
- rule_id INTEGER,
- mode TEXT,
- event_type TEXT,
- severity TEXT DEFAULT 'medium',
- title TEXT,
- message TEXT,
- payload TEXT,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
- FOREIGN KEY (rule_id) REFERENCES alert_rules(id) ON DELETE SET NULL
- )
- ''')
-
- # Session recordings
- conn.execute('''
- CREATE TABLE IF NOT EXISTS recording_sessions (
- id TEXT PRIMARY KEY,
- mode TEXT NOT NULL,
- label TEXT,
- started_at TIMESTAMP NOT NULL,
- stopped_at TIMESTAMP,
- file_path TEXT NOT NULL,
- event_count INTEGER DEFAULT 0,
- size_bytes INTEGER DEFAULT 0,
- metadata TEXT
- )
- ''')
+ # Device correlation table
+ conn.execute('''
+ CREATE TABLE IF NOT EXISTS device_correlations (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ wifi_mac TEXT,
+ bt_mac TEXT,
+ confidence REAL,
+ first_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ last_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ metadata TEXT,
+ UNIQUE(wifi_mac, bt_mac)
+ )
+ ''')
+
+ # Alert rules
+ conn.execute('''
+ CREATE TABLE IF NOT EXISTS alert_rules (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ name TEXT NOT NULL,
+ mode TEXT,
+ event_type TEXT,
+ match TEXT,
+ severity TEXT DEFAULT 'medium',
+ enabled BOOLEAN DEFAULT 1,
+ notify TEXT,
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+ )
+ ''')
+
+ # Alert events
+ conn.execute('''
+ CREATE TABLE IF NOT EXISTS alert_events (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ rule_id INTEGER,
+ mode TEXT,
+ event_type TEXT,
+ severity TEXT DEFAULT 'medium',
+ title TEXT,
+ message TEXT,
+ payload TEXT,
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ FOREIGN KEY (rule_id) REFERENCES alert_rules(id) ON DELETE SET NULL
+ )
+ ''')
+
+ # Session recordings
+ conn.execute('''
+ CREATE TABLE IF NOT EXISTS recording_sessions (
+ id TEXT PRIMARY KEY,
+ mode TEXT NOT NULL,
+ label TEXT,
+ started_at TIMESTAMP NOT NULL,
+ stopped_at TIMESTAMP,
+ file_path TEXT NOT NULL,
+ event_count INTEGER DEFAULT 0,
+ size_bytes INTEGER DEFAULT 0,
+ metadata TEXT
+ )
+ ''')
+
+ # Alert rules
+ conn.execute('''
+ CREATE TABLE IF NOT EXISTS alert_rules (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ name TEXT NOT NULL,
+ mode TEXT,
+ event_type TEXT,
+ match TEXT,
+ severity TEXT DEFAULT 'medium',
+ enabled BOOLEAN DEFAULT 1,
+ notify TEXT,
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+ )
+ ''')
+
+ # Alert events
+ conn.execute('''
+ CREATE TABLE IF NOT EXISTS alert_events (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ rule_id INTEGER,
+ mode TEXT,
+ event_type TEXT,
+ severity TEXT DEFAULT 'medium',
+ title TEXT,
+ message TEXT,
+ payload TEXT,
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ FOREIGN KEY (rule_id) REFERENCES alert_rules(id) ON DELETE SET NULL
+ )
+ ''')
+
+ # Session recordings
+ conn.execute('''
+ CREATE TABLE IF NOT EXISTS recording_sessions (
+ id TEXT PRIMARY KEY,
+ mode TEXT NOT NULL,
+ label TEXT,
+ started_at TIMESTAMP NOT NULL,
+ stopped_at TIMESTAMP,
+ file_path TEXT NOT NULL,
+ event_count INTEGER DEFAULT 0,
+ size_bytes INTEGER DEFAULT 0,
+ metadata TEXT
+ )
+ ''')
# Users table for authentication
conn.execute('''
@@ -177,29 +223,29 @@ def init_db() -> None:
# =====================================================================
# TSCM Baselines - Environment snapshots for comparison
- conn.execute('''
- CREATE TABLE IF NOT EXISTS tscm_baselines (
- id INTEGER PRIMARY KEY AUTOINCREMENT,
- name TEXT NOT NULL,
- location TEXT,
- description TEXT,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
- wifi_networks TEXT,
- wifi_clients TEXT,
- bt_devices TEXT,
- rf_frequencies TEXT,
- gps_coords TEXT,
- is_active BOOLEAN DEFAULT 0
- )
- ''')
-
- # Ensure new columns exist for older databases
- try:
- columns = {row['name'] for row in conn.execute("PRAGMA table_info(tscm_baselines)")}
- if 'wifi_clients' not in columns:
- conn.execute('ALTER TABLE tscm_baselines ADD COLUMN wifi_clients TEXT')
- except Exception as e:
- logger.debug(f"Schema update skipped for tscm_baselines: {e}")
+ conn.execute('''
+ CREATE TABLE IF NOT EXISTS tscm_baselines (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ name TEXT NOT NULL,
+ location TEXT,
+ description TEXT,
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ wifi_networks TEXT,
+ wifi_clients TEXT,
+ bt_devices TEXT,
+ rf_frequencies TEXT,
+ gps_coords TEXT,
+ is_active BOOLEAN DEFAULT 0
+ )
+ ''')
+
+ # Ensure new columns exist for older databases
+ try:
+ columns = {row['name'] for row in conn.execute("PRAGMA table_info(tscm_baselines)")}
+ if 'wifi_clients' not in columns:
+ conn.execute('ALTER TABLE tscm_baselines ADD COLUMN wifi_clients TEXT')
+ except Exception as e:
+ logger.debug(f"Schema update skipped for tscm_baselines: {e}")
# TSCM Sweeps - Individual sweep sessions
conn.execute('''
@@ -407,6 +453,134 @@ def init_db() -> None:
ON tscm_cases(status, created_at)
''')
+ # =====================================================================
+ # GSM (Global System for Mobile) Intelligence Tables
+ # =====================================================================
+
+ # gsm_cells - Known cell towers (OpenCellID cache)
+ conn.execute('''
+ CREATE TABLE IF NOT EXISTS gsm_cells (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ mcc INTEGER NOT NULL,
+ mnc INTEGER NOT NULL,
+ lac INTEGER NOT NULL,
+ cid INTEGER NOT NULL,
+ lat REAL,
+ lon REAL,
+ azimuth INTEGER,
+ range_meters INTEGER,
+ samples INTEGER,
+ radio TEXT,
+ operator TEXT,
+ first_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ last_verified TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ metadata TEXT,
+ UNIQUE(mcc, mnc, lac, cid)
+ )
+ ''')
+
+ # gsm_rogues - Detected rogue towers / IMSI catchers
+ conn.execute('''
+ CREATE TABLE IF NOT EXISTS gsm_rogues (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ arfcn INTEGER NOT NULL,
+ mcc INTEGER,
+ mnc INTEGER,
+ lac INTEGER,
+ cid INTEGER,
+ signal_strength REAL,
+ reason TEXT NOT NULL,
+ threat_level TEXT DEFAULT 'medium',
+ detected_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ location_lat REAL,
+ location_lon REAL,
+ acknowledged BOOLEAN DEFAULT 0,
+ notes TEXT,
+ metadata TEXT
+ )
+ ''')
+
+ # gsm_signals - 60-day archive of signal observations
+ conn.execute('''
+ CREATE TABLE IF NOT EXISTS gsm_signals (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ imsi TEXT,
+ tmsi TEXT,
+ mcc INTEGER,
+ mnc INTEGER,
+ lac INTEGER,
+ cid INTEGER,
+ ta_value INTEGER,
+ signal_strength REAL,
+ arfcn INTEGER,
+ timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ metadata TEXT
+ )
+ ''')
+
+ # gsm_tmsi_log - 24-hour raw pings for crowd density
+ conn.execute('''
+ CREATE TABLE IF NOT EXISTS gsm_tmsi_log (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ tmsi TEXT NOT NULL,
+ lac INTEGER,
+ cid INTEGER,
+ ta_value INTEGER,
+ timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+ )
+ ''')
+
+ # gsm_velocity_log - 1-hour buffer for movement tracking
+ conn.execute('''
+ CREATE TABLE IF NOT EXISTS gsm_velocity_log (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ device_id TEXT NOT NULL,
+ prev_ta INTEGER,
+ curr_ta INTEGER,
+ prev_cid INTEGER,
+ curr_cid INTEGER,
+ timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ estimated_velocity REAL,
+ metadata TEXT
+ )
+ ''')
+
+ # GSM indexes for performance
+ conn.execute('''
+ CREATE INDEX IF NOT EXISTS idx_gsm_cells_location
+ ON gsm_cells(lat, lon)
+ ''')
+
+ conn.execute('''
+ CREATE INDEX IF NOT EXISTS idx_gsm_cells_identity
+ ON gsm_cells(mcc, mnc, lac, cid)
+ ''')
+
+ conn.execute('''
+ CREATE INDEX IF NOT EXISTS idx_gsm_rogues_severity
+ ON gsm_rogues(threat_level, detected_at)
+ ''')
+
+ conn.execute('''
+ CREATE INDEX IF NOT EXISTS idx_gsm_signals_cell_time
+ ON gsm_signals(cid, lac, timestamp)
+ ''')
+
+ conn.execute('''
+ CREATE INDEX IF NOT EXISTS idx_gsm_signals_device
+ ON gsm_signals(imsi, tmsi, timestamp)
+ ''')
+
+ conn.execute('''
+ CREATE INDEX IF NOT EXISTS idx_gsm_tmsi_log_time
+ ON gsm_tmsi_log(timestamp)
+ ''')
+
+ conn.execute('''
+ CREATE INDEX IF NOT EXISTS idx_gsm_velocity_log_device
+ ON gsm_velocity_log(device_id, timestamp)
+ ''')
+
# =====================================================================
# DSC (Digital Selective Calling) Tables
# =====================================================================
@@ -740,16 +914,16 @@ def get_correlations(min_confidence: float = 0.5) -> list[dict]:
# TSCM Functions
# =============================================================================
-def create_tscm_baseline(
- name: str,
- location: str | None = None,
- description: str | None = None,
- wifi_networks: list | None = None,
- wifi_clients: list | None = None,
- bt_devices: list | None = None,
- rf_frequencies: list | None = None,
- gps_coords: dict | None = None
-) -> int:
+def create_tscm_baseline(
+ name: str,
+ location: str | None = None,
+ description: str | None = None,
+ wifi_networks: list | None = None,
+ wifi_clients: list | None = None,
+ bt_devices: list | None = None,
+ rf_frequencies: list | None = None,
+ gps_coords: dict | None = None
+) -> int:
"""
Create a new TSCM baseline.
@@ -757,20 +931,20 @@ def create_tscm_baseline(
The ID of the created baseline
"""
with get_db() as conn:
- cursor = conn.execute('''
- INSERT INTO tscm_baselines
- (name, location, description, wifi_networks, wifi_clients, bt_devices, rf_frequencies, gps_coords)
- VALUES (?, ?, ?, ?, ?, ?, ?, ?)
- ''', (
- name,
- location,
- description,
- json.dumps(wifi_networks) if wifi_networks else None,
- json.dumps(wifi_clients) if wifi_clients else None,
- json.dumps(bt_devices) if bt_devices else None,
- json.dumps(rf_frequencies) if rf_frequencies else None,
- json.dumps(gps_coords) if gps_coords else None
- ))
+ cursor = conn.execute('''
+ INSERT INTO tscm_baselines
+ (name, location, description, wifi_networks, wifi_clients, bt_devices, rf_frequencies, gps_coords)
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
+ ''', (
+ name,
+ location,
+ description,
+ json.dumps(wifi_networks) if wifi_networks else None,
+ json.dumps(wifi_clients) if wifi_clients else None,
+ json.dumps(bt_devices) if bt_devices else None,
+ json.dumps(rf_frequencies) if rf_frequencies else None,
+ json.dumps(gps_coords) if gps_coords else None
+ ))
return cursor.lastrowid
@@ -785,19 +959,19 @@ def get_tscm_baseline(baseline_id: int) -> dict | None:
if row is None:
return None
- return {
- 'id': row['id'],
- 'name': row['name'],
- 'location': row['location'],
- 'description': row['description'],
- 'created_at': row['created_at'],
- 'wifi_networks': json.loads(row['wifi_networks']) if row['wifi_networks'] else [],
- 'wifi_clients': json.loads(row['wifi_clients']) if row['wifi_clients'] else [],
- 'bt_devices': json.loads(row['bt_devices']) if row['bt_devices'] else [],
- 'rf_frequencies': json.loads(row['rf_frequencies']) if row['rf_frequencies'] else [],
- 'gps_coords': json.loads(row['gps_coords']) if row['gps_coords'] else None,
- 'is_active': bool(row['is_active'])
- }
+ return {
+ 'id': row['id'],
+ 'name': row['name'],
+ 'location': row['location'],
+ 'description': row['description'],
+ 'created_at': row['created_at'],
+ 'wifi_networks': json.loads(row['wifi_networks']) if row['wifi_networks'] else [],
+ 'wifi_clients': json.loads(row['wifi_clients']) if row['wifi_clients'] else [],
+ 'bt_devices': json.loads(row['bt_devices']) if row['bt_devices'] else [],
+ 'rf_frequencies': json.loads(row['rf_frequencies']) if row['rf_frequencies'] else [],
+ 'gps_coords': json.loads(row['gps_coords']) if row['gps_coords'] else None,
+ 'is_active': bool(row['is_active'])
+ }
def get_all_tscm_baselines() -> list[dict]:
@@ -839,23 +1013,23 @@ def set_active_tscm_baseline(baseline_id: int) -> bool:
return cursor.rowcount > 0
-def update_tscm_baseline(
- baseline_id: int,
- wifi_networks: list | None = None,
- wifi_clients: list | None = None,
- bt_devices: list | None = None,
- rf_frequencies: list | None = None
-) -> bool:
+def update_tscm_baseline(
+ baseline_id: int,
+ wifi_networks: list | None = None,
+ wifi_clients: list | None = None,
+ bt_devices: list | None = None,
+ rf_frequencies: list | None = None
+) -> bool:
"""Update baseline device lists."""
updates = []
params = []
- if wifi_networks is not None:
- updates.append('wifi_networks = ?')
- params.append(json.dumps(wifi_networks))
- if wifi_clients is not None:
- updates.append('wifi_clients = ?')
- params.append(json.dumps(wifi_clients))
+ if wifi_networks is not None:
+ updates.append('wifi_networks = ?')
+ params.append(json.dumps(wifi_networks))
+ if wifi_clients is not None:
+ updates.append('wifi_clients = ?')
+ params.append(json.dumps(wifi_clients))
if bt_devices is not None:
updates.append('bt_devices = ?')
params.append(json.dumps(bt_devices))
@@ -1267,127 +1441,127 @@ def get_all_known_devices(
]
-def delete_known_device(identifier: str) -> bool:
- """Remove a device from the known-good registry."""
- with get_db() as conn:
- cursor = conn.execute(
- 'DELETE FROM tscm_known_devices WHERE identifier = ?',
- (identifier.upper(),)
- )
- return cursor.rowcount > 0
-
-
-# =============================================================================
-# TSCM Schedule Functions
-# =============================================================================
-
-def create_tscm_schedule(
- name: str,
- cron_expression: str,
- sweep_type: str = 'standard',
- baseline_id: int | None = None,
- zone_name: str | None = None,
- enabled: bool = True,
- notify_on_threat: bool = True,
- notify_email: str | None = None,
- last_run: str | None = None,
- next_run: str | None = None,
-) -> int:
- """Create a new TSCM sweep schedule."""
- with get_db() as conn:
- cursor = conn.execute('''
- INSERT INTO tscm_schedules
- (name, baseline_id, zone_name, cron_expression, sweep_type,
- enabled, last_run, next_run, notify_on_threat, notify_email)
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
- ''', (
- name,
- baseline_id,
- zone_name,
- cron_expression,
- sweep_type,
- 1 if enabled else 0,
- last_run,
- next_run,
- 1 if notify_on_threat else 0,
- notify_email,
- ))
- return cursor.lastrowid
-
-
-def get_tscm_schedule(schedule_id: int) -> dict | None:
- """Get a TSCM schedule by ID."""
- with get_db() as conn:
- cursor = conn.execute(
- 'SELECT * FROM tscm_schedules WHERE id = ?',
- (schedule_id,)
- )
- row = cursor.fetchone()
- return dict(row) if row else None
-
-
-def get_all_tscm_schedules(
- enabled: bool | None = None,
- limit: int = 200
-) -> list[dict]:
- """Get all TSCM schedules."""
- conditions = []
- params = []
-
- if enabled is not None:
- conditions.append('enabled = ?')
- params.append(1 if enabled else 0)
-
- where_clause = f'WHERE {" AND ".join(conditions)}' if conditions else ''
- params.append(limit)
-
- with get_db() as conn:
- cursor = conn.execute(f'''
- SELECT * FROM tscm_schedules
- {where_clause}
- ORDER BY id DESC
- LIMIT ?
- ''', params)
- return [dict(row) for row in cursor]
-
-
-def update_tscm_schedule(schedule_id: int, **fields) -> bool:
- """Update a TSCM schedule."""
- if not fields:
- return False
-
- updates = []
- params = []
-
- for key, value in fields.items():
- updates.append(f'{key} = ?')
- params.append(value)
-
- params.append(schedule_id)
-
- with get_db() as conn:
- cursor = conn.execute(
- f'UPDATE tscm_schedules SET {", ".join(updates)} WHERE id = ?',
- params
- )
- return cursor.rowcount > 0
-
-
-def delete_tscm_schedule(schedule_id: int) -> bool:
- """Delete a TSCM schedule."""
- with get_db() as conn:
- cursor = conn.execute(
- 'DELETE FROM tscm_schedules WHERE id = ?',
- (schedule_id,)
- )
- return cursor.rowcount > 0
-
-
-def is_known_good_device(identifier: str, location: str | None = None) -> dict | None:
- """Check if a device is in the known-good registry for a location."""
- with get_db() as conn:
- if location:
- cursor = conn.execute('''
+def delete_known_device(identifier: str) -> bool:
+ """Remove a device from the known-good registry."""
+ with get_db() as conn:
+ cursor = conn.execute(
+ 'DELETE FROM tscm_known_devices WHERE identifier = ?',
+ (identifier.upper(),)
+ )
+ return cursor.rowcount > 0
+
+
+# =============================================================================
+# TSCM Schedule Functions
+# =============================================================================
+
+def create_tscm_schedule(
+ name: str,
+ cron_expression: str,
+ sweep_type: str = 'standard',
+ baseline_id: int | None = None,
+ zone_name: str | None = None,
+ enabled: bool = True,
+ notify_on_threat: bool = True,
+ notify_email: str | None = None,
+ last_run: str | None = None,
+ next_run: str | None = None,
+) -> int:
+ """Create a new TSCM sweep schedule."""
+ with get_db() as conn:
+ cursor = conn.execute('''
+ INSERT INTO tscm_schedules
+ (name, baseline_id, zone_name, cron_expression, sweep_type,
+ enabled, last_run, next_run, notify_on_threat, notify_email)
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
+ ''', (
+ name,
+ baseline_id,
+ zone_name,
+ cron_expression,
+ sweep_type,
+ 1 if enabled else 0,
+ last_run,
+ next_run,
+ 1 if notify_on_threat else 0,
+ notify_email,
+ ))
+ return cursor.lastrowid
+
+
+def get_tscm_schedule(schedule_id: int) -> dict | None:
+ """Get a TSCM schedule by ID."""
+ with get_db() as conn:
+ cursor = conn.execute(
+ 'SELECT * FROM tscm_schedules WHERE id = ?',
+ (schedule_id,)
+ )
+ row = cursor.fetchone()
+ return dict(row) if row else None
+
+
+def get_all_tscm_schedules(
+ enabled: bool | None = None,
+ limit: int = 200
+) -> list[dict]:
+ """Get all TSCM schedules."""
+ conditions = []
+ params = []
+
+ if enabled is not None:
+ conditions.append('enabled = ?')
+ params.append(1 if enabled else 0)
+
+ where_clause = f'WHERE {" AND ".join(conditions)}' if conditions else ''
+ params.append(limit)
+
+ with get_db() as conn:
+ cursor = conn.execute(f'''
+ SELECT * FROM tscm_schedules
+ {where_clause}
+ ORDER BY id DESC
+ LIMIT ?
+ ''', params)
+ return [dict(row) for row in cursor]
+
+
+def update_tscm_schedule(schedule_id: int, **fields) -> bool:
+ """Update a TSCM schedule."""
+ if not fields:
+ return False
+
+ updates = []
+ params = []
+
+ for key, value in fields.items():
+ updates.append(f'{key} = ?')
+ params.append(value)
+
+ params.append(schedule_id)
+
+ with get_db() as conn:
+ cursor = conn.execute(
+ f'UPDATE tscm_schedules SET {", ".join(updates)} WHERE id = ?',
+ params
+ )
+ return cursor.rowcount > 0
+
+
+def delete_tscm_schedule(schedule_id: int) -> bool:
+ """Delete a TSCM schedule."""
+ with get_db() as conn:
+ cursor = conn.execute(
+ 'DELETE FROM tscm_schedules WHERE id = ?',
+ (schedule_id,)
+ )
+ return cursor.rowcount > 0
+
+
+def is_known_good_device(identifier: str, location: str | None = None) -> dict | None:
+ """Check if a device is in the known-good registry for a location."""
+ with get_db() as conn:
+ if location:
+ cursor = conn.execute('''
SELECT * FROM tscm_known_devices
WHERE identifier = ? AND (location = ? OR scope = 'global')
''', (identifier.upper(), location))
@@ -2123,3 +2297,61 @@ def cleanup_old_payloads(max_age_hours: int = 24) -> int:
WHERE received_at < datetime('now', ?)
''', (f'-{max_age_hours} hours',))
return cursor.rowcount
+
+
+# =============================================================================
+# GSM Cleanup Functions
+# =============================================================================
+
+def cleanup_old_gsm_signals(max_age_days: int = 60) -> int:
+ """
+ Remove old GSM signal observations (60-day archive).
+
+ Args:
+ max_age_days: Maximum age in days (default: 60)
+
+ Returns:
+ Number of deleted entries
+ """
+ with get_db() as conn:
+ cursor = conn.execute('''
+ DELETE FROM gsm_signals
+ WHERE timestamp < datetime('now', ?)
+ ''', (f'-{max_age_days} days',))
+ return cursor.rowcount
+
+
+def cleanup_old_gsm_tmsi_log(max_age_hours: int = 24) -> int:
+ """
+ Remove old TMSI log entries (24-hour buffer for crowd density).
+
+ Args:
+ max_age_hours: Maximum age in hours (default: 24)
+
+ Returns:
+ Number of deleted entries
+ """
+ with get_db() as conn:
+ cursor = conn.execute('''
+ DELETE FROM gsm_tmsi_log
+ WHERE timestamp < datetime('now', ?)
+ ''', (f'-{max_age_hours} hours',))
+ return cursor.rowcount
+
+
+def cleanup_old_gsm_velocity_log(max_age_hours: int = 1) -> int:
+ """
+ Remove old velocity log entries (1-hour buffer for movement tracking).
+
+ Args:
+ max_age_hours: Maximum age in hours (default: 1)
+
+ Returns:
+ Number of deleted entries
+ """
+ with get_db() as conn:
+ cursor = conn.execute('''
+ DELETE FROM gsm_velocity_log
+ WHERE timestamp < datetime('now', ?)
+ ''', (f'-{max_age_hours} hours',))
+ return cursor.rowcount
diff --git a/utils/dependencies.py b/utils/dependencies.py
index 256b995..e6a1bee 100644
--- a/utils/dependencies.py
+++ b/utils/dependencies.py
@@ -443,6 +443,38 @@ TOOL_DEPENDENCIES = {
}
}
}
+ },
+ 'gsm': {
+ 'name': 'GSM Intelligence',
+ 'tools': {
+ 'grgsm_scanner': {
+ 'required': True,
+ 'description': 'gr-gsm scanner for finding GSM towers',
+ 'install': {
+ 'apt': 'Build gr-gsm from source: https://github.com/bkerler/gr-gsm',
+ 'brew': 'brew install gr-gsm (may require manual build)',
+ 'manual': 'https://github.com/bkerler/gr-gsm'
+ }
+ },
+ 'grgsm_livemon': {
+ 'required': True,
+ 'description': 'gr-gsm live monitor for decoding GSM signals',
+ 'install': {
+ 'apt': 'Included with gr-gsm package',
+ 'brew': 'Included with gr-gsm',
+ 'manual': 'Included with gr-gsm'
+ }
+ },
+ 'tshark': {
+ 'required': True,
+ 'description': 'Wireshark CLI for parsing GSM packets',
+ 'install': {
+ 'apt': 'sudo apt-get install tshark',
+ 'brew': 'brew install wireshark',
+ 'manual': 'https://www.wireshark.org/download.html'
+ }
+ }
+ }
}
}
diff --git a/utils/gsm_geocoding.py b/utils/gsm_geocoding.py
new file mode 100644
index 0000000..681b990
--- /dev/null
+++ b/utils/gsm_geocoding.py
@@ -0,0 +1,200 @@
+"""GSM Cell Tower Geocoding Service.
+
+Provides hybrid cache-first geocoding with async API fallback for cell towers.
+"""
+
+from __future__ import annotations
+
+import logging
+import queue
+from typing import Any
+
+import requests
+
+import config
+from utils.database import get_db
+
+logger = logging.getLogger('intercept.gsm_geocoding')
+
+# Queue for pending geocoding requests
+_geocoding_queue = queue.Queue(maxsize=100)
+
+
+def lookup_cell_coordinates(mcc: int, mnc: int, lac: int, cid: int) -> dict[str, Any] | None:
+ """
+ Lookup cell tower coordinates with cache-first strategy.
+
+ Strategy:
+ 1. Check gsm_cells table (cache) - fast synchronous lookup
+ 2. If not found, return None (caller decides whether to use API)
+
+ Args:
+ mcc: Mobile Country Code
+ mnc: Mobile Network Code
+ lac: Location Area Code
+ cid: Cell ID
+
+ Returns:
+ dict with keys: lat, lon, source='cache', azimuth (optional),
+ range_meters (optional), operator (optional), radio (optional)
+ Returns None if not found in cache.
+ """
+ try:
+ with get_db() as conn:
+ result = conn.execute('''
+ SELECT lat, lon, azimuth, range_meters, operator, radio
+ FROM gsm_cells
+ WHERE mcc = ? AND mnc = ? AND lac = ? AND cid = ?
+ ''', (mcc, mnc, lac, cid)).fetchone()
+
+ if result:
+ return {
+ 'lat': result['lat'],
+ 'lon': result['lon'],
+ 'source': 'cache',
+ 'azimuth': result['azimuth'],
+ 'range_meters': result['range_meters'],
+ 'operator': result['operator'],
+ 'radio': result['radio']
+ }
+
+ return None
+
+ except Exception as e:
+ logger.error(f"Error looking up coordinates from cache: {e}")
+ return None
+
+
+def lookup_cell_from_api(mcc: int, mnc: int, lac: int, cid: int) -> dict[str, Any] | None:
+ """
+ Lookup cell tower from OpenCellID API and cache result.
+
+ Args:
+ mcc: Mobile Country Code
+ mnc: Mobile Network Code
+ lac: Location Area Code
+ cid: Cell ID
+
+ Returns:
+ dict with keys: lat, lon, source='api', azimuth (optional),
+ range_meters (optional), operator (optional), radio (optional)
+ Returns None if API call fails or cell not found.
+ """
+ try:
+ api_url = config.GSM_OPENCELLID_API_URL
+ params = {
+ 'key': config.GSM_OPENCELLID_API_KEY,
+ 'mcc': mcc,
+ 'mnc': mnc,
+ 'lac': lac,
+ 'cellid': cid,
+ 'format': 'json'
+ }
+
+ response = requests.get(api_url, params=params, timeout=10)
+
+ if response.status_code == 200:
+ cell_data = response.json()
+
+ # Cache the result
+ with get_db() as conn:
+ conn.execute('''
+ INSERT OR REPLACE INTO gsm_cells
+ (mcc, mnc, lac, cid, lat, lon, azimuth, range_meters, samples, radio, operator, last_verified)
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
+ ''', (
+ mcc, mnc, lac, cid,
+ cell_data.get('lat'),
+ cell_data.get('lon'),
+ cell_data.get('azimuth'),
+ cell_data.get('range'),
+ cell_data.get('samples'),
+ cell_data.get('radio'),
+ cell_data.get('operator')
+ ))
+ conn.commit()
+
+ logger.info(f"Cached cell tower from API: MCC={mcc} MNC={mnc} LAC={lac} CID={cid}")
+
+ return {
+ 'lat': cell_data.get('lat'),
+ 'lon': cell_data.get('lon'),
+ 'source': 'api',
+ 'azimuth': cell_data.get('azimuth'),
+ 'range_meters': cell_data.get('range'),
+ 'operator': cell_data.get('operator'),
+ 'radio': cell_data.get('radio')
+ }
+ else:
+ logger.warning(f"OpenCellID API returned {response.status_code} for MCC={mcc} MNC={mnc} LAC={lac} CID={cid}")
+ return None
+
+ except Exception as e:
+ logger.error(f"Error calling OpenCellID API: {e}")
+ return None
+
+
+def enrich_tower_data(tower_data: dict[str, Any]) -> dict[str, Any]:
+ """
+ Enrich tower data with coordinates using cache-first strategy.
+
+ If coordinates found in cache, adds them immediately.
+ If not found, marks as 'pending' and queues for background API lookup.
+
+ Args:
+ tower_data: Dictionary with keys mcc, mnc, lac, cid (and other tower data)
+
+ Returns:
+ Enriched tower_data dict with added fields:
+ - lat, lon (if found in cache)
+ - status='pending' (if needs API lookup)
+ - source='cache' (if from cache)
+ """
+ mcc = tower_data.get('mcc')
+ mnc = tower_data.get('mnc')
+ lac = tower_data.get('lac')
+ cid = tower_data.get('cid')
+
+ # Validate required fields
+ if not all([mcc is not None, mnc is not None, lac is not None, cid is not None]):
+ logger.warning(f"Tower data missing required fields: {tower_data}")
+ return tower_data
+
+ # Try cache lookup
+ coords = lookup_cell_coordinates(mcc, mnc, lac, cid)
+
+ if coords:
+ # Found in cache - add coordinates immediately
+ tower_data['lat'] = coords['lat']
+ tower_data['lon'] = coords['lon']
+ tower_data['source'] = 'cache'
+
+ # Add optional fields if available
+ if coords.get('azimuth') is not None:
+ tower_data['azimuth'] = coords['azimuth']
+ if coords.get('range_meters') is not None:
+ tower_data['range_meters'] = coords['range_meters']
+ if coords.get('operator'):
+ tower_data['operator'] = coords['operator']
+ if coords.get('radio'):
+ tower_data['radio'] = coords['radio']
+
+ logger.debug(f"Cache hit for tower: MCC={mcc} MNC={mnc} LAC={lac} CID={cid}")
+ else:
+ # Not in cache - mark as pending and queue for API lookup
+ tower_data['status'] = 'pending'
+ tower_data['source'] = 'unknown'
+
+ # Queue for background geocoding (non-blocking)
+ try:
+ _geocoding_queue.put_nowait(tower_data.copy())
+ logger.debug(f"Queued tower for geocoding: MCC={mcc} MNC={mnc} LAC={lac} CID={cid}")
+ except queue.Full:
+ logger.warning("Geocoding queue full, dropping tower")
+
+ return tower_data
+
+
+def get_geocoding_queue() -> queue.Queue:
+ """Get the geocoding queue for the background worker."""
+ return _geocoding_queue
diff --git a/utils/logging.py b/utils/logging.py
index 3d2cc6a..addbabe 100644
--- a/utils/logging.py
+++ b/utils/logging.py
@@ -28,3 +28,4 @@ wifi_logger = get_logger('intercept.wifi')
bluetooth_logger = get_logger('intercept.bluetooth')
adsb_logger = get_logger('intercept.adsb')
satellite_logger = get_logger('intercept.satellite')
+gsm_spy_logger = get_logger('intercept.gsm_spy')
diff --git a/utils/sdr/airspy.py b/utils/sdr/airspy.py
index b70c76d..875f4cb 100644
--- a/utils/sdr/airspy.py
+++ b/utils/sdr/airspy.py
@@ -185,6 +185,43 @@ class AirspyCommandBuilder(CommandBuilder):
return cmd
+ def build_iq_capture_command(
+ self,
+ device: SDRDevice,
+ frequency_mhz: float,
+ sample_rate: int = 2048000,
+ gain: Optional[float] = None,
+ ppm: Optional[int] = None,
+ bias_t: bool = False,
+ output_format: str = 'cu8',
+ ) -> list[str]:
+ """
+ Build rx_sdr command for raw I/Q capture with Airspy.
+
+ Outputs unsigned 8-bit I/Q pairs to stdout for waterfall display.
+ """
+ device_str = self._build_device_string(device)
+ freq_hz = int(frequency_mhz * 1e6)
+
+ cmd = [
+ 'rx_sdr',
+ '-d', device_str,
+ '-f', str(freq_hz),
+ '-s', str(sample_rate),
+ '-F', 'CU8',
+ ]
+
+ if gain is not None and gain > 0:
+ cmd.extend(['-g', self._format_gain(gain)])
+
+ if bias_t:
+ cmd.append('-T')
+
+ # Output to stdout
+ cmd.append('-')
+
+ return cmd
+
def get_capabilities(self) -> SDRCapabilities:
"""Return Airspy capabilities."""
return self.CAPABILITIES
diff --git a/utils/sdr/base.py b/utils/sdr/base.py
index 4dc79be..e7f84ba 100644
--- a/utils/sdr/base.py
+++ b/utils/sdr/base.py
@@ -186,6 +186,41 @@ class CommandBuilder(ABC):
"""Return hardware capabilities for this SDR type."""
pass
+ def build_iq_capture_command(
+ self,
+ device: SDRDevice,
+ frequency_mhz: float,
+ sample_rate: int = 2048000,
+ gain: Optional[float] = None,
+ ppm: Optional[int] = None,
+ bias_t: bool = False,
+ output_format: str = 'cu8',
+ ) -> list[str]:
+ """
+ Build raw I/Q capture command for streaming samples to stdout.
+
+ Used for real-time waterfall/spectrum display. Output is unsigned
+ 8-bit I/Q pairs (cu8) written continuously to stdout.
+
+ Args:
+ device: The SDR device to use
+ frequency_mhz: Center frequency in MHz
+ sample_rate: Sample rate in Hz (default 2048000)
+ gain: Gain in dB (None for auto)
+ ppm: PPM frequency correction
+ bias_t: Enable bias-T power (for active antennas)
+ output_format: Output sample format (default 'cu8')
+
+ Returns:
+ Command as list of strings for subprocess
+
+ Raises:
+ NotImplementedError: If the SDR type does not support I/Q capture.
+ """
+ raise NotImplementedError(
+ f"{self.__class__.__name__} does not support raw I/Q capture"
+ )
+
@classmethod
@abstractmethod
def get_sdr_type(cls) -> SDRType:
diff --git a/utils/sdr/hackrf.py b/utils/sdr/hackrf.py
index ea3a24e..63a5fd6 100644
--- a/utils/sdr/hackrf.py
+++ b/utils/sdr/hackrf.py
@@ -185,6 +185,44 @@ class HackRFCommandBuilder(CommandBuilder):
return cmd
+ def build_iq_capture_command(
+ self,
+ device: SDRDevice,
+ frequency_mhz: float,
+ sample_rate: int = 2048000,
+ gain: Optional[float] = None,
+ ppm: Optional[int] = None,
+ bias_t: bool = False,
+ output_format: str = 'cu8',
+ ) -> list[str]:
+ """
+ Build rx_sdr command for raw I/Q capture with HackRF.
+
+ Outputs unsigned 8-bit I/Q pairs to stdout for waterfall display.
+ """
+ device_str = self._build_device_string(device)
+ freq_hz = int(frequency_mhz * 1e6)
+
+ cmd = [
+ 'rx_sdr',
+ '-d', device_str,
+ '-f', str(freq_hz),
+ '-s', str(sample_rate),
+ '-F', 'CU8',
+ ]
+
+ if gain is not None and gain > 0:
+ lna, vga = self._split_gain(gain)
+ cmd.extend(['-g', f'LNA={lna},VGA={vga}'])
+
+ if bias_t:
+ cmd.append('-T')
+
+ # Output to stdout
+ cmd.append('-')
+
+ return cmd
+
def get_capabilities(self) -> SDRCapabilities:
"""Return HackRF capabilities."""
return self.CAPABILITIES
diff --git a/utils/sdr/limesdr.py b/utils/sdr/limesdr.py
index ad9a9d1..3dcd8d2 100644
--- a/utils/sdr/limesdr.py
+++ b/utils/sdr/limesdr.py
@@ -162,6 +162,41 @@ class LimeSDRCommandBuilder(CommandBuilder):
return cmd
+ def build_iq_capture_command(
+ self,
+ device: SDRDevice,
+ frequency_mhz: float,
+ sample_rate: int = 2048000,
+ gain: Optional[float] = None,
+ ppm: Optional[int] = None,
+ bias_t: bool = False,
+ output_format: str = 'cu8',
+ ) -> list[str]:
+ """
+ Build rx_sdr command for raw I/Q capture with LimeSDR.
+
+ Outputs unsigned 8-bit I/Q pairs to stdout for waterfall display.
+ Note: LimeSDR does not support bias-T, parameter is ignored.
+ """
+ device_str = self._build_device_string(device)
+ freq_hz = int(frequency_mhz * 1e6)
+
+ cmd = [
+ 'rx_sdr',
+ '-d', device_str,
+ '-f', str(freq_hz),
+ '-s', str(sample_rate),
+ '-F', 'CU8',
+ ]
+
+ if gain is not None and gain > 0:
+ cmd.extend(['-g', f'LNAH={int(gain)}'])
+
+ # Output to stdout
+ cmd.append('-')
+
+ return cmd
+
def get_capabilities(self) -> SDRCapabilities:
"""Return LimeSDR capabilities."""
return self.CAPABILITIES
diff --git a/utils/sdr/rtlsdr.py b/utils/sdr/rtlsdr.py
index 6d2b8d8..25b4495 100644
--- a/utils/sdr/rtlsdr.py
+++ b/utils/sdr/rtlsdr.py
@@ -231,6 +231,45 @@ class RTLSDRCommandBuilder(CommandBuilder):
return cmd
+ def build_iq_capture_command(
+ self,
+ device: SDRDevice,
+ frequency_mhz: float,
+ sample_rate: int = 2048000,
+ gain: Optional[float] = None,
+ ppm: Optional[int] = None,
+ bias_t: bool = False,
+ output_format: str = 'cu8',
+ ) -> list[str]:
+ """
+ Build rtl_sdr command for raw I/Q capture.
+
+ Outputs unsigned 8-bit I/Q pairs to stdout for waterfall display.
+ """
+ rtl_sdr_path = get_tool_path('rtl_sdr') or 'rtl_sdr'
+ freq_hz = int(frequency_mhz * 1e6)
+
+ cmd = [
+ rtl_sdr_path,
+ '-d', self._get_device_arg(device),
+ '-f', str(freq_hz),
+ '-s', str(sample_rate),
+ ]
+
+ if gain is not None and gain > 0:
+ cmd.extend(['-g', str(gain)])
+
+ if ppm is not None and ppm != 0:
+ cmd.extend(['-p', str(ppm)])
+
+ if bias_t:
+ cmd.append('-T')
+
+ # Output to stdout
+ cmd.append('-')
+
+ return cmd
+
def get_capabilities(self) -> SDRCapabilities:
"""Return RTL-SDR capabilities."""
return self.CAPABILITIES
diff --git a/utils/sdr/sdrplay.py b/utils/sdr/sdrplay.py
index 240e286..79df27c 100644
--- a/utils/sdr/sdrplay.py
+++ b/utils/sdr/sdrplay.py
@@ -163,6 +163,43 @@ class SDRPlayCommandBuilder(CommandBuilder):
return cmd
+ def build_iq_capture_command(
+ self,
+ device: SDRDevice,
+ frequency_mhz: float,
+ sample_rate: int = 2048000,
+ gain: Optional[float] = None,
+ ppm: Optional[int] = None,
+ bias_t: bool = False,
+ output_format: str = 'cu8',
+ ) -> list[str]:
+ """
+ Build rx_sdr command for raw I/Q capture with SDRPlay.
+
+ Outputs unsigned 8-bit I/Q pairs to stdout for waterfall display.
+ """
+ device_str = self._build_device_string(device)
+ freq_hz = int(frequency_mhz * 1e6)
+
+ cmd = [
+ 'rx_sdr',
+ '-d', device_str,
+ '-f', str(freq_hz),
+ '-s', str(sample_rate),
+ '-F', 'CU8',
+ ]
+
+ if gain is not None and gain > 0:
+ cmd.extend(['-g', f'IFGR={int(gain)}'])
+
+ if bias_t:
+ cmd.append('-T')
+
+ # Output to stdout
+ cmd.append('-')
+
+ return cmd
+
def get_capabilities(self) -> SDRCapabilities:
"""Return SDRPlay capabilities."""
return self.CAPABILITIES
diff --git a/utils/sstv/sstv_decoder.py b/utils/sstv/sstv_decoder.py
index 36834bc..dbb6591 100644
--- a/utils/sstv/sstv_decoder.py
+++ b/utils/sstv/sstv_decoder.py
@@ -225,7 +225,7 @@ class SSTVDecoder:
self._rtl_process = None
self._running = False
self._lock = threading.Lock()
- self._callback: Callable[[DecodeProgress], None] | None = None
+ self._callback: Callable[[dict], None] | None = None
self._output_dir = Path(output_dir) if output_dir else Path('instance/sstv_images')
self._url_prefix = url_prefix
self._images: list[SSTVImage] = []
@@ -253,7 +253,7 @@ class SSTVDecoder:
"""Return name of available decoder. Always available with pure Python."""
return 'python-sstv'
- def set_callback(self, callback: Callable[[DecodeProgress], None]) -> None:
+ def set_callback(self, callback: Callable[[dict], None]) -> None:
"""Set callback for decode progress updates."""
self._callback = callback
@@ -420,6 +420,10 @@ class SSTVDecoder:
chunk_counter += 1
+ # Scope: compute RMS/peak from raw int16 samples every chunk
+ rms_val = int(np.sqrt(np.mean(raw_samples.astype(np.float64) ** 2)))
+ peak_val = int(np.max(np.abs(raw_samples)))
+
if image_decoder is not None:
# Currently decoding an image
complete = image_decoder.feed(samples)
@@ -447,6 +451,7 @@ class SSTVDecoder:
message=f'Decoding {current_mode_name}: {pct}%',
partial_image=partial_url,
))
+ self._emit_scope(rms_val, peak_val, 'decoding')
if complete:
# Save image
@@ -479,6 +484,7 @@ class SSTVDecoder:
vis_detector.reset()
# Emit signal level metrics every ~500ms (every 5th 100ms chunk)
+ scope_tone: str | None = None
if chunk_counter % 5 == 0 and image_decoder is None:
rms = float(np.sqrt(np.mean(samples ** 2)))
signal_level = min(100, int(rms * 500))
@@ -501,6 +507,8 @@ class SSTVDecoder:
else:
sstv_tone = None
+ scope_tone = sstv_tone
+
self._emit_progress(DecodeProgress(
status='detecting',
message='Listening...',
@@ -509,6 +517,8 @@ class SSTVDecoder:
vis_state=vis_detector.state.value,
))
+ self._emit_scope(rms_val, peak_val, scope_tone)
+
except Exception as e:
logger.error(f"Error in decode thread: {e}")
if not self._running:
@@ -736,10 +746,18 @@ class SSTVDecoder:
"""Emit progress update to callback."""
if self._callback:
try:
- self._callback(progress)
+ self._callback(progress.to_dict())
except Exception as e:
logger.error(f"Error in progress callback: {e}")
+ def _emit_scope(self, rms: int, peak: int, tone: str | None = None) -> None:
+ """Emit scope signal levels to callback."""
+ if self._callback:
+ try:
+ self._callback({'type': 'sstv_scope', 'rms': rms, 'peak': peak, 'tone': tone})
+ except Exception:
+ pass
+
def decode_file(self, audio_path: str | Path) -> list[SSTVImage]:
"""Decode SSTV image(s) from an audio file.
diff --git a/utils/waterfall_fft.py b/utils/waterfall_fft.py
new file mode 100644
index 0000000..d8db37b
--- /dev/null
+++ b/utils/waterfall_fft.py
@@ -0,0 +1,136 @@
+"""FFT pipeline for real-time waterfall display.
+
+Converts raw I/Q samples from SDR hardware into quantized power spectrum
+frames suitable for binary WebSocket transmission.
+"""
+
+from __future__ import annotations
+
+import struct
+
+import numpy as np
+
+
+def cu8_to_complex(raw: bytes) -> np.ndarray:
+ """Convert unsigned 8-bit I/Q bytes to complex64.
+
+ RTL-SDR (and rx_sdr with -F cu8) outputs interleaved unsigned 8-bit
+ I/Q pairs where 128 is the zero point.
+
+ Args:
+ raw: Raw bytes, length must be even (I/Q pairs).
+
+ Returns:
+ Complex64 array of length len(raw) // 2.
+ """
+ iq = np.frombuffer(raw, dtype=np.uint8).astype(np.float32)
+ # Normalize: 0 -> -1.0, 128 -> ~0.0, 255 -> +1.0
+ iq = (iq - 127.5) / 127.5
+ return iq[0::2] + 1j * iq[1::2]
+
+
+def compute_power_spectrum(
+ samples: np.ndarray,
+ fft_size: int = 1024,
+ avg_count: int = 4,
+) -> np.ndarray:
+ """Compute averaged power spectrum in dBm.
+
+ Applies a Hann window, computes FFT, converts to power (dB),
+ and averages over multiple segments.
+
+ Args:
+ samples: Complex64 array, length >= fft_size * avg_count.
+ fft_size: Number of FFT bins.
+ avg_count: Number of segments to average.
+
+ Returns:
+ Float32 array of length fft_size with power in dB (fftshift'd).
+ """
+ window = np.hanning(fft_size).astype(np.float32)
+ accum = np.zeros(fft_size, dtype=np.float32)
+ actual_avg = 0
+
+ for i in range(avg_count):
+ offset = i * fft_size
+ if offset + fft_size > len(samples):
+ break
+ segment = samples[offset : offset + fft_size] * window
+ spectrum = np.fft.fft(segment)
+ power = np.real(spectrum * np.conj(spectrum))
+ # Avoid log10(0)
+ power = np.maximum(power, 1e-20)
+ accum += 10.0 * np.log10(power)
+ actual_avg += 1
+
+ if actual_avg == 0:
+ return np.full(fft_size, -100.0, dtype=np.float32)
+
+ accum /= actual_avg
+ return np.fft.fftshift(accum).astype(np.float32)
+
+
+def quantize_to_uint8(
+ power_db: np.ndarray,
+ db_min: float | None = None,
+ db_max: float | None = None,
+) -> bytes:
+ """Clamp and scale dB values to 0-255.
+
+ When *db_min* / *db_max* are ``None`` (the default) the range is
+ derived from the data so the full colour palette is always used.
+
+ Args:
+ power_db: Float32 array of power values in dB.
+ db_min: Value mapped to 0 (auto if None).
+ db_max: Value mapped to 255 (auto if None).
+
+ Returns:
+ Bytes of length len(power_db), each in [0, 255].
+ """
+ if db_min is None or db_max is None:
+ actual_min = float(np.min(power_db))
+ actual_max = float(np.max(power_db))
+ # Guarantee at least 1 dB of dynamic range
+ if actual_max - actual_min < 1.0:
+ actual_max = actual_min + 1.0
+ if db_min is None:
+ db_min = actual_min
+ if db_max is None:
+ db_max = actual_max
+
+ db_range = db_max - db_min
+ if db_range <= 0:
+ db_range = 1.0
+ scaled = (power_db - db_min) / db_range * 255.0
+ clamped = np.clip(scaled, 0, 255).astype(np.uint8)
+ return clamped.tobytes()
+
+
+def build_binary_frame(
+ start_freq: float,
+ end_freq: float,
+ quantized_bins: bytes,
+) -> bytes:
+ """Pack a binary waterfall frame for WebSocket transmission.
+
+ Wire format (little-endian):
+ [uint8 msg_type=0x01]
+ [float32 start_freq]
+ [float32 end_freq]
+ [uint16 bin_count]
+ [uint8[] bins]
+
+ Total size = 11 + bin_count bytes.
+
+ Args:
+ start_freq: Start frequency in MHz.
+ end_freq: End frequency in MHz.
+ quantized_bins: Pre-quantized uint8 bin data.
+
+ Returns:
+ Binary frame bytes.
+ """
+ bin_count = len(quantized_bins)
+ header = struct.pack('