diff --git a/.dockerignore b/.dockerignore index 6b0c09f..ea9fd0d 100644 --- a/.dockerignore +++ b/.dockerignore @@ -33,6 +33,9 @@ htmlcov/ # Logs *.log +# Local Postgres data +pgdata/ + # Captured files (don't include in image) *.cap *.pcap diff --git a/.gitignore b/.gitignore index 41afe6f..701993a 100644 --- a/.gitignore +++ b/.gitignore @@ -10,9 +10,17 @@ venv/ ENV/ uv.lock -# Logs -*.log -pager_messages.log +# Logs +*.log +pager_messages.log + +# Local data +downloads/ +pgdata/ + +# Local data +downloads/ +pgdata/ # IDE .idea/ @@ -34,7 +42,4 @@ build/ uv.lock *.db *.sqlite3 -intercept.db - -# Agent Files -.agent \ No newline at end of file +intercept.db diff --git a/README.md b/README.md index 7e0283f..de362a7 100644 --- a/README.md +++ b/README.md @@ -33,6 +33,7 @@ Support the developer of this open-source project - **ACARS Messaging** - Aircraft datalink messages via acarsdec - **Listening Post** - Frequency scanner with audio monitoring - **Satellite Tracking** - Pass prediction using TLE data +- **ADS-B History** - Persistent aircraft history with reporting dashboard (Postgres optional) - **WiFi Scanning** - Monitor mode reconnaissance via aircrack-ng - **Bluetooth Scanning** - Device discovery and tracker detection - **Spy Stations** - Number stations and diplomatic HF network database @@ -61,6 +62,17 @@ docker compose up -d > **Note:** Docker requires privileged mode for USB SDR access. See `docker-compose.yml` for configuration options. +### ADS-B History (Optional) + +The ADS-B history feature persists aircraft messages to Postgres for long-term analysis. + +```bash +# Start with ADS-B history and Postgres +docker compose --profile history up -d +``` + +Then open **/adsb/history** for the reporting dashboard. + ### Open the Interface After starting, open **http://localhost:5050** in your browser. The username and password is admin:admin diff --git a/app.py b/app.py index 032bac2..6c48dc8 100644 --- a/app.py +++ b/app.py @@ -203,9 +203,9 @@ cleanup_manager.register(dsc_messages) # ============================================ @app.before_request -def require_login(): - # Routes that don't require login (to avoid infinite redirect loop) - allowed_routes = ['login', 'static', 'favicon', 'health'] +def require_login(): + # Routes that don't require login (to avoid infinite redirect loop) + allowed_routes = ['login', 'static', 'favicon', 'health', 'health_check'] # If user is not logged in and the current route is not allowed... if 'logged_in' not in session and request.endpoint not in allowed_routes: @@ -710,4 +710,4 @@ def main() -> None: debug=args.debug, threaded=True, load_dotenv=False, - ) \ No newline at end of file + ) diff --git a/config.py b/config.py index 6f60429..3fc19fe 100644 --- a/config.py +++ b/config.py @@ -126,9 +126,18 @@ AIRODUMP_HEADER_LINES = _get_env_int('AIRODUMP_HEADER_LINES', 2) BT_SCAN_TIMEOUT = _get_env_int('BT_SCAN_TIMEOUT', 10) BT_UPDATE_INTERVAL = _get_env_float('BT_UPDATE_INTERVAL', 2.0) -# ADS-B settings -ADSB_SBS_PORT = _get_env_int('ADSB_SBS_PORT', 30003) -ADSB_UPDATE_INTERVAL = _get_env_float('ADSB_UPDATE_INTERVAL', 1.0) +# ADS-B settings +ADSB_SBS_PORT = _get_env_int('ADSB_SBS_PORT', 30003) +ADSB_UPDATE_INTERVAL = _get_env_float('ADSB_UPDATE_INTERVAL', 1.0) +ADSB_HISTORY_ENABLED = _get_env_bool('ADSB_HISTORY_ENABLED', False) +ADSB_DB_HOST = _get_env('ADSB_DB_HOST', 'localhost') +ADSB_DB_PORT = _get_env_int('ADSB_DB_PORT', 5432) +ADSB_DB_NAME = _get_env('ADSB_DB_NAME', 'intercept_adsb') +ADSB_DB_USER = _get_env('ADSB_DB_USER', 'intercept') +ADSB_DB_PASSWORD = _get_env('ADSB_DB_PASSWORD', 'intercept') +ADSB_HISTORY_BATCH_SIZE = _get_env_int('ADSB_HISTORY_BATCH_SIZE', 500) +ADSB_HISTORY_FLUSH_INTERVAL = _get_env_float('ADSB_HISTORY_FLUSH_INTERVAL', 1.0) +ADSB_HISTORY_QUEUE_SIZE = _get_env_int('ADSB_HISTORY_QUEUE_SIZE', 50000) # Satellite settings SATELLITE_UPDATE_INTERVAL = _get_env_int('SATELLITE_UPDATE_INTERVAL', 30) diff --git a/docker-compose.yml b/docker-compose.yml index 6d08681..303a29b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,5 +1,11 @@ # INTERCEPT - Signal Intelligence Platform # Docker Compose configuration for easy deployment +# +# Basic usage: +# docker compose up -d +# +# With ADS-B history (Postgres): +# docker compose --profile history up -d services: intercept: @@ -22,6 +28,14 @@ services: - INTERCEPT_HOST=0.0.0.0 - INTERCEPT_PORT=5050 - INTERCEPT_LOG_LEVEL=INFO + # ADS-B history is disabled by default + # To enable, use: docker compose --profile history up -d + # - INTERCEPT_ADSB_HISTORY_ENABLED=true + # - INTERCEPT_ADSB_DB_HOST=adsb_db + # - INTERCEPT_ADSB_DB_PORT=5432 + # - INTERCEPT_ADSB_DB_NAME=intercept_adsb + # - INTERCEPT_ADSB_DB_USER=intercept + # - INTERCEPT_ADSB_DB_PASSWORD=intercept # Network mode for WiFi scanning (requires host network) # network_mode: host restart: unless-stopped @@ -32,6 +46,54 @@ services: retries: 3 start_period: 10s + # ADS-B history with Postgres persistence + # Enable with: docker compose --profile history up -d + intercept-history: + build: . + container_name: intercept + profiles: + - history + depends_on: + - adsb_db + ports: + - "5050:5050" + privileged: true + environment: + - INTERCEPT_HOST=0.0.0.0 + - INTERCEPT_PORT=5050 + - INTERCEPT_LOG_LEVEL=INFO + - INTERCEPT_ADSB_HISTORY_ENABLED=true + - INTERCEPT_ADSB_DB_HOST=adsb_db + - INTERCEPT_ADSB_DB_PORT=5432 + - INTERCEPT_ADSB_DB_NAME=intercept_adsb + - INTERCEPT_ADSB_DB_USER=intercept + - INTERCEPT_ADSB_DB_PASSWORD=intercept + restart: unless-stopped + healthcheck: + test: ["CMD", "curl", "-sf", "http://localhost:5050/health"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 10s + + adsb_db: + image: postgres:16-alpine + container_name: intercept-adsb-db + profiles: + - history + environment: + - POSTGRES_DB=intercept_adsb + - POSTGRES_USER=intercept + - POSTGRES_PASSWORD=intercept + volumes: + - ./pgdata:/var/lib/postgresql/data + restart: unless-stopped + healthcheck: + test: ["CMD-SHELL", "pg_isready -U intercept -d intercept_adsb"] + interval: 10s + timeout: 5s + retries: 5 + # Optional: Add volume for persistent SQLite database # volumes: # intercept-data: diff --git a/docs/FEATURES.md b/docs/FEATURES.md index 99f216a..5a93cff 100644 --- a/docs/FEATURES.md +++ b/docs/FEATURES.md @@ -38,20 +38,22 @@ Complete feature list for all modules. - **Source links** - references to priyom.org for detailed information - **Famous stations** - UVB-76 "The Buzzer", Cuban HM01, Israeli E17z -## ADS-B Aircraft Tracking - -- **Real-time aircraft tracking** via dump1090 or rtl_adsb -- **Full-screen dashboard** - dedicated popout with virtual radar scope -- **Interactive Leaflet map** with OpenStreetMap tiles (dark-themed) -- **Aircraft trails** - optional flight path history visualization -- **Range rings** - distance reference circles from observer position -- **Aircraft filtering** - show all, military only, civil only, or emergency only -- **Marker clustering** - group nearby aircraft at lower zoom levels -- **Reception statistics** - max range, message rate, busiest hour, total seen -- **Observer location** - manual input or GPS geolocation -- **Audio alerts** - notifications for military and emergency aircraft -- **Emergency squawk highlighting** - visual alerts for 7500/7600/7700 -- **Aircraft details popup** - callsign, altitude, speed, heading, squawk, ICAO +## ADS-B Aircraft Tracking + +- **Real-time aircraft tracking** via dump1090 or rtl_adsb +- **Full-screen dashboard** - dedicated popout with virtual radar scope +- **Interactive Leaflet map** with OpenStreetMap tiles (dark-themed) +- **Aircraft trails** - optional flight path history visualization +- **Range rings** - distance reference circles from observer position +- **Aircraft filtering** - show all, military only, civil only, or emergency only +- **Marker clustering** - group nearby aircraft at lower zoom levels +- **Reception statistics** - max range, message rate, busiest hour, total seen +- **Persistent ADS-B history** - optional Postgres-backed message and snapshot storage +- **History reporting dashboard** - session controls, aircraft timelines, and detail modal +- **Observer location** - manual input or GPS geolocation +- **Audio alerts** - notifications for military and emergency aircraft +- **Emergency squawk highlighting** - visual alerts for 7500/7600/7700 +- **Aircraft details popup** - callsign, altitude, speed, heading, squawk, ICAO

Screenshot diff --git a/docs/USAGE.md b/docs/USAGE.md index a3d7692..7abdf04 100644 --- a/docs/USAGE.md +++ b/docs/USAGE.md @@ -74,10 +74,42 @@ INTERCEPT automatically detects known trackers: ### Emergency Squawks -The system highlights aircraft transmitting emergency squawks: -- **7500** - Hijack -- **7600** - Radio failure -- **7700** - General emergency +The system highlights aircraft transmitting emergency squawks: +- **7500** - Hijack +- **7600** - Radio failure +- **7700** - General emergency + +## ADS-B History (Optional) + +The history dashboard persists aircraft messages and per-aircraft snapshots to Postgres for long-running tracking and reporting. + +### Enable History + +Set the following environment variables (Docker recommended): + +| Variable | Default | Description | +|----------|---------|-------------| +| `INTERCEPT_ADSB_HISTORY_ENABLED` | `false` | Enables history storage and reporting | +| `INTERCEPT_ADSB_DB_HOST` | `localhost` | Postgres host (use `adsb_db` in Docker) | +| `INTERCEPT_ADSB_DB_PORT` | `5432` | Postgres port | +| `INTERCEPT_ADSB_DB_NAME` | `intercept_adsb` | Database name | +| `INTERCEPT_ADSB_DB_USER` | `intercept` | Database user | +| `INTERCEPT_ADSB_DB_PASSWORD` | `intercept` | Database password | + +### Docker Setup + +`docker-compose.yml` includes an `adsb_db` service and a persistent volume for history storage: + +```bash +docker compose up -d +``` + +### Using the History Dashboard + +1. Open **/adsb/history** +2. Use **Start Tracking** to run ADS-B in headless mode +3. View aircraft history and timelines +4. Stop tracking when desired (session history is recorded) ## Satellite Mode diff --git a/requirements.txt b/requirements.txt index 6b42e9f..0a54c58 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,6 +4,9 @@ flask-limiter>=2.5.4 requests>=2.28.0 Werkzeug>=3.1.5 +# ADS-B history (optional - only needed for Postgres persistence) +psycopg2-binary>=2.9.9 + # BLE scanning with manufacturer data detection (optional - for TSCM) bleak>=0.21.0 diff --git a/routes/adsb.py b/routes/adsb.py index 1c7b115..539477c 100644 --- a/routes/adsb.py +++ b/routes/adsb.py @@ -2,27 +2,39 @@ from __future__ import annotations -import json -import os -import queue -import shutil -import socket -import subprocess -import threading -import time -from typing import Any, Generator - -from flask import Blueprint, jsonify, request, Response, render_template - -import app as app_module -from utils.logging import adsb_logger as logger -from utils.validation import ( - validate_device_index, validate_gain, - validate_rtl_tcp_host, validate_rtl_tcp_port -) +import json +import os +import queue +import shutil +import socket +import subprocess +import threading +import time +from datetime import datetime, timezone +from typing import Any, Generator + +from flask import Blueprint, jsonify, request, Response, render_template +from flask import make_response +import psycopg2 +from psycopg2.extras import RealDictCursor + +import app as app_module +from config import ( + ADSB_DB_HOST, + ADSB_DB_NAME, + ADSB_DB_PASSWORD, + ADSB_DB_PORT, + ADSB_DB_USER, + ADSB_HISTORY_ENABLED, +) +from utils.logging import adsb_logger as logger +from utils.validation import ( + validate_device_index, validate_gain, + validate_rtl_tcp_host, validate_rtl_tcp_port +) from utils.sse import format_sse from utils.sdr import SDRFactory, SDRType -from utils.constants import ( +from utils.constants import ( ADSB_SBS_PORT, ADSB_TERMINATE_TIMEOUT, PROCESS_TERMINATE_TIMEOUT, @@ -33,9 +45,10 @@ from utils.constants import ( SSE_QUEUE_TIMEOUT, SOCKET_CONNECT_TIMEOUT, ADSB_UPDATE_INTERVAL, - DUMP1090_START_WAIT, -) -from utils import aircraft_db + DUMP1090_START_WAIT, +) +from utils import aircraft_db +from utils.adsb_history import adsb_history_writer, adsb_snapshot_writer, _ensure_adsb_schema adsb_bp = Blueprint('adsb', __name__, url_prefix='/adsb') @@ -56,7 +69,7 @@ _looked_up_icaos: set[str] = set() aircraft_db.load_database() # Common installation paths for dump1090 (when not in PATH) -DUMP1090_PATHS = [ +DUMP1090_PATHS = [ # Homebrew on Apple Silicon (M1/M2/M3) '/opt/homebrew/bin/dump1090', '/opt/homebrew/bin/dump1090-fa', @@ -69,8 +82,202 @@ DUMP1090_PATHS = [ '/usr/bin/dump1090', '/usr/bin/dump1090-fa', '/usr/bin/dump1090-mutability', -] - +] + + +def _get_part(parts: list[str], index: int) -> str | None: + if len(parts) <= index: + return None + value = parts[index].strip() + return value or None + + +def _parse_sbs_timestamp(date_str: str | None, time_str: str | None) -> datetime | None: + if not date_str or not time_str: + return None + combined = f"{date_str} {time_str}" + for fmt in ("%Y/%m/%d %H:%M:%S.%f", "%Y/%m/%d %H:%M:%S"): + try: + parsed = datetime.strptime(combined, fmt) + return parsed.replace(tzinfo=timezone.utc) + except ValueError: + continue + return None + + +def _parse_int(value: str | None) -> int | None: + if value is None: + return None + try: + return int(float(value)) + except (ValueError, TypeError): + return None + + +def _parse_float(value: str | None) -> float | None: + if value is None: + return None + try: + return float(value) + except (ValueError, TypeError): + return None + + +def _build_history_record( + parts: list[str], + msg_type: str, + icao: str, + msg_time: datetime | None, + logged_time: datetime | None, + service_addr: str, + raw_line: str, +) -> dict[str, Any]: + return { + 'received_at': datetime.now(timezone.utc), + 'msg_time': msg_time, + 'logged_time': logged_time, + 'icao': icao, + 'msg_type': _parse_int(msg_type), + 'callsign': _get_part(parts, 10), + 'altitude': _parse_int(_get_part(parts, 11)), + 'speed': _parse_int(_get_part(parts, 12)), + 'heading': _parse_int(_get_part(parts, 13)), + 'vertical_rate': _parse_int(_get_part(parts, 16)), + 'lat': _parse_float(_get_part(parts, 14)), + 'lon': _parse_float(_get_part(parts, 15)), + 'squawk': _get_part(parts, 17), + 'session_id': _get_part(parts, 2), + 'aircraft_id': _get_part(parts, 3), + 'flight_id': _get_part(parts, 5), + 'raw_line': raw_line, + 'source_host': service_addr, + } + + +_history_schema_checked = False + + +def _get_history_connection(): + return psycopg2.connect( + host=ADSB_DB_HOST, + port=ADSB_DB_PORT, + dbname=ADSB_DB_NAME, + user=ADSB_DB_USER, + password=ADSB_DB_PASSWORD, + ) + + +def _ensure_history_schema() -> None: + global _history_schema_checked + if _history_schema_checked: + return + try: + with _get_history_connection() as conn: + _ensure_adsb_schema(conn) + _history_schema_checked = True + except Exception as exc: + logger.warning("ADS-B schema check failed: %s", exc) + + +def _parse_int_param(value: str | None, default: int, min_value: int | None = None, max_value: int | None = None) -> int: + try: + parsed = int(value) if value is not None else default + except (ValueError, TypeError): + parsed = default + if min_value is not None: + parsed = max(min_value, parsed) + if max_value is not None: + parsed = min(max_value, parsed) + return parsed + + +def _get_active_session() -> dict[str, Any] | None: + if not ADSB_HISTORY_ENABLED: + return None + _ensure_history_schema() + try: + with _get_history_connection() as conn: + with conn.cursor(cursor_factory=RealDictCursor) as cur: + cur.execute( + """ + SELECT * + FROM adsb_sessions + WHERE ended_at IS NULL + ORDER BY started_at DESC + LIMIT 1 + """ + ) + return cur.fetchone() + except Exception as exc: + logger.warning("ADS-B session lookup failed: %s", exc) + return None + + +def _record_session_start( + *, + device_index: int | None, + sdr_type: str | None, + remote_host: str | None, + remote_port: int | None, + start_source: str | None, + started_by: str | None, +) -> dict[str, Any] | None: + if not ADSB_HISTORY_ENABLED: + return None + _ensure_history_schema() + try: + with _get_history_connection() as conn: + with conn.cursor(cursor_factory=RealDictCursor) as cur: + cur.execute( + """ + INSERT INTO adsb_sessions ( + device_index, + sdr_type, + remote_host, + remote_port, + start_source, + started_by + ) + VALUES (%s, %s, %s, %s, %s, %s) + RETURNING * + """, + ( + device_index, + sdr_type, + remote_host, + remote_port, + start_source, + started_by, + ), + ) + return cur.fetchone() + except Exception as exc: + logger.warning("ADS-B session start record failed: %s", exc) + return None + + +def _record_session_stop(*, stop_source: str | None, stopped_by: str | None) -> dict[str, Any] | None: + if not ADSB_HISTORY_ENABLED: + return None + _ensure_history_schema() + try: + with _get_history_connection() as conn: + with conn.cursor(cursor_factory=RealDictCursor) as cur: + cur.execute( + """ + UPDATE adsb_sessions + SET ended_at = NOW(), + stop_source = COALESCE(%s, stop_source), + stopped_by = COALESCE(%s, stopped_by) + WHERE ended_at IS NULL + RETURNING * + """, + (stop_source, stopped_by), + ) + return cur.fetchone() + except Exception as exc: + logger.warning("ADS-B session stop record failed: %s", exc) + return None def find_dump1090(): """Find dump1090 binary, checking PATH and common locations.""" @@ -100,12 +307,15 @@ def check_dump1090_service(): return None -def parse_sbs_stream(service_addr): - """Parse SBS format data from dump1090 SBS port.""" - global adsb_using_service, adsb_connected, adsb_messages_received, adsb_last_message_time, adsb_bytes_received, adsb_lines_received, _sbs_error_logged - - host, port = service_addr.split(':') - port = int(port) +def parse_sbs_stream(service_addr): + """Parse SBS format data from dump1090 SBS port.""" + global adsb_using_service, adsb_connected, adsb_messages_received, adsb_last_message_time, adsb_bytes_received, adsb_lines_received, _sbs_error_logged + + adsb_history_writer.start() + adsb_snapshot_writer.start() + + host, port = service_addr.split(':') + port = int(port) logger.info(f"SBS stream parser started, connecting to {host}:{port}") adsb_connected = False @@ -147,18 +357,31 @@ def parse_sbs_stream(service_addr): if adsb_lines_received <= 3: logger.info(f"SBS line {adsb_lines_received}: {line[:100]}") - parts = line.split(',') - if len(parts) < 11 or parts[0] != 'MSG': - if adsb_lines_received <= 5: - logger.debug(f"Skipping non-MSG line: {line[:50]}") - continue - - msg_type = parts[1] - icao = parts[4].upper() - if not icao: - continue - - aircraft = app_module.adsb_aircraft.get(icao) or {'icao': icao} + parts = line.split(',') + if len(parts) < 11 or parts[0] != 'MSG': + if adsb_lines_received <= 5: + logger.debug(f"Skipping non-MSG line: {line[:50]}") + continue + + msg_type = parts[1] + icao = parts[4].upper() + if not icao: + continue + + msg_time = _parse_sbs_timestamp(_get_part(parts, 6), _get_part(parts, 7)) + logged_time = _parse_sbs_timestamp(_get_part(parts, 8), _get_part(parts, 9)) + history_record = _build_history_record( + parts=parts, + msg_type=msg_type, + icao=icao, + msg_time=msg_time, + logged_time=logged_time, + service_addr=service_addr, + raw_line=line, + ) + adsb_history_writer.enqueue(history_record) + + aircraft = app_module.adsb_aircraft.get(icao) or {'icao': icao} # Look up aircraft type from database (once per ICAO) if icao not in _looked_up_icaos: @@ -229,12 +452,30 @@ def parse_sbs_stream(service_addr): now = time.time() if now - last_update >= ADSB_UPDATE_INTERVAL: - for update_icao in pending_updates: - if update_icao in app_module.adsb_aircraft: - app_module.adsb_queue.put({ - 'type': 'aircraft', - **app_module.adsb_aircraft[update_icao] - }) + for update_icao in pending_updates: + if update_icao in app_module.adsb_aircraft: + snapshot = app_module.adsb_aircraft[update_icao] + app_module.adsb_queue.put({ + 'type': 'aircraft', + **snapshot + }) + adsb_snapshot_writer.enqueue({ + 'captured_at': datetime.now(timezone.utc), + 'icao': update_icao, + 'callsign': snapshot.get('callsign'), + 'registration': snapshot.get('registration'), + 'type_code': snapshot.get('type_code'), + 'type_desc': snapshot.get('type_desc'), + 'altitude': snapshot.get('altitude'), + 'speed': snapshot.get('speed'), + 'heading': snapshot.get('heading'), + 'vertical_rate': snapshot.get('vertical_rate'), + 'lat': snapshot.get('lat'), + 'lon': snapshot.get('lon'), + 'squawk': snapshot.get('squawk'), + 'source_host': service_addr, + 'snapshot': snapshot, + }) pending_updates.clear() last_update = now @@ -282,18 +523,18 @@ def check_adsb_tools(): }) -@adsb_bp.route('/status') -def adsb_status(): - """Get ADS-B tracking status for debugging.""" +@adsb_bp.route('/status') +def adsb_status(): + """Get ADS-B tracking status for debugging.""" # Check if dump1090 process is still running dump1090_running = False if app_module.adsb_process: dump1090_running = app_module.adsb_process.poll() is None - return jsonify({ - 'tracking_active': adsb_using_service, - 'active_device': adsb_active_device, - 'connected_to_sbs': adsb_connected, + return jsonify({ + 'tracking_active': adsb_using_service, + 'active_device': adsb_active_device, + 'connected_to_sbs': adsb_connected, 'messages_received': adsb_messages_received, 'bytes_received': adsb_bytes_received, 'lines_received': adsb_lines_received, @@ -303,25 +544,50 @@ def adsb_status(): 'queue_size': app_module.adsb_queue.qsize(), 'dump1090_path': find_dump1090(), 'dump1090_running': dump1090_running, - 'port_30003_open': check_dump1090_service() is not None - }) + 'port_30003_open': check_dump1090_service() is not None + }) + + +@adsb_bp.route('/session') +def adsb_session(): + """Get ADS-B session status and uptime.""" + session = _get_active_session() + uptime_seconds = None + if session and session.get('started_at'): + started_at = session['started_at'] + if isinstance(started_at, datetime): + uptime_seconds = int((datetime.now(timezone.utc) - started_at).total_seconds()) + return jsonify({ + 'tracking_active': adsb_using_service, + 'connected_to_sbs': adsb_connected, + 'active_device': adsb_active_device, + 'session': session, + 'uptime_seconds': uptime_seconds, + }) @adsb_bp.route('/start', methods=['POST']) -def start_adsb(): - """Start ADS-B tracking.""" - global adsb_using_service, adsb_active_device - - with app_module.adsb_lock: - if adsb_using_service: - return jsonify({'status': 'already_running', 'message': 'ADS-B tracking already active'}), 409 - - data = request.json or {} - - # Validate inputs - try: - gain = int(validate_gain(data.get('gain', '40'))) - device = validate_device_index(data.get('device', '0')) +def start_adsb(): + """Start ADS-B tracking.""" + global adsb_using_service, adsb_active_device + + with app_module.adsb_lock: + if adsb_using_service: + session = _get_active_session() + return jsonify({ + 'status': 'already_running', + 'message': 'ADS-B tracking already active', + 'session': session + }), 409 + + data = request.json or {} + start_source = data.get('source') + started_by = request.remote_addr + + # Validate inputs + try: + gain = int(validate_gain(data.get('gain', '40'))) + device = validate_device_index(data.get('device', '0')) except ValueError as e: return jsonify({'status': 'error', 'message': str(e)}), 400 @@ -337,21 +603,45 @@ def start_adsb(): except ValueError as e: return jsonify({'status': 'error', 'message': str(e)}), 400 - remote_addr = f"{remote_sbs_host}:{remote_sbs_port}" - logger.info(f"Connecting to remote dump1090 SBS at {remote_addr}") - adsb_using_service = True - thread = threading.Thread(target=parse_sbs_stream, args=(remote_addr,), daemon=True) - thread.start() - return jsonify({'status': 'started', 'message': f'Connected to remote dump1090 at {remote_addr}'}) + remote_addr = f"{remote_sbs_host}:{remote_sbs_port}" + logger.info(f"Connecting to remote dump1090 SBS at {remote_addr}") + adsb_using_service = True + thread = threading.Thread(target=parse_sbs_stream, args=(remote_addr,), daemon=True) + thread.start() + session = _record_session_start( + device_index=device, + sdr_type='remote', + remote_host=remote_sbs_host, + remote_port=remote_sbs_port, + start_source=start_source, + started_by=started_by, + ) + return jsonify({ + 'status': 'started', + 'message': f'Connected to remote dump1090 at {remote_addr}', + 'session': session + }) # Check if dump1090 is already running externally (e.g., user started it manually) existing_service = check_dump1090_service() - if existing_service: - logger.info(f"Found existing dump1090 service at {existing_service}") - adsb_using_service = True - thread = threading.Thread(target=parse_sbs_stream, args=(existing_service,), daemon=True) - thread.start() - return jsonify({'status': 'started', 'message': 'Connected to existing dump1090 service'}) + if existing_service: + logger.info(f"Found existing dump1090 service at {existing_service}") + adsb_using_service = True + thread = threading.Thread(target=parse_sbs_stream, args=(existing_service,), daemon=True) + thread.start() + session = _record_session_start( + device_index=device, + sdr_type='external', + remote_host='localhost', + remote_port=ADSB_SBS_PORT, + start_source=start_source, + started_by=started_by, + ) + return jsonify({ + 'status': 'started', + 'message': 'Connected to existing dump1090 service', + 'session': session + }) # Get SDR type from request sdr_type_str = data.get('sdr_type', 'rtlsdr') @@ -402,10 +692,10 @@ def start_adsb(): if sdr_type == SDRType.RTL_SDR: cmd[0] = dump1090_path - try: - logger.info(f"Starting dump1090 with device index {device}: {' '.join(cmd)}") - app_module.adsb_process = subprocess.Popen( - cmd, + try: + logger.info(f"Starting dump1090 with device index {device}: {' '.join(cmd)}") + app_module.adsb_process = subprocess.Popen( + cmd, stdout=subprocess.DEVNULL, stderr=subprocess.PIPE, start_new_session=True # Create new process group for clean shutdown @@ -432,24 +722,40 @@ def start_adsb(): error_msg += f' Error: {stderr_output[:200]}' return jsonify({'status': 'error', 'message': error_msg}) - adsb_using_service = True - adsb_active_device = device # Track which device is being used - thread = threading.Thread(target=parse_sbs_stream, args=(f'localhost:{ADSB_SBS_PORT}',), daemon=True) - thread.start() - - return jsonify({'status': 'started', 'message': 'ADS-B tracking started', 'device': device}) + adsb_using_service = True + adsb_active_device = device # Track which device is being used + thread = threading.Thread(target=parse_sbs_stream, args=(f'localhost:{ADSB_SBS_PORT}',), daemon=True) + thread.start() + + session = _record_session_start( + device_index=device, + sdr_type=sdr_type.value, + remote_host=None, + remote_port=None, + start_source=start_source, + started_by=started_by, + ) + return jsonify({ + 'status': 'started', + 'message': 'ADS-B tracking started', + 'device': device, + 'session': session + }) except Exception as e: return jsonify({'status': 'error', 'message': str(e)}) @adsb_bp.route('/stop', methods=['POST']) -def stop_adsb(): - """Stop ADS-B tracking.""" - global adsb_using_service, adsb_active_device - - with app_module.adsb_lock: - if app_module.adsb_process: - try: +def stop_adsb(): + """Stop ADS-B tracking.""" + global adsb_using_service, adsb_active_device + data = request.json or {} + stop_source = data.get('source') + stopped_by = request.remote_addr + + with app_module.adsb_lock: + if app_module.adsb_process: + try: # Kill the entire process group to ensure all child processes are terminated pgid = os.getpgid(app_module.adsb_process.pid) os.killpg(pgid, 15) # SIGTERM @@ -463,12 +769,13 @@ def stop_adsb(): pass app_module.adsb_process = None logger.info("ADS-B process stopped") - adsb_using_service = False - adsb_active_device = None - - app_module.adsb_aircraft.clear() - _looked_up_icaos.clear() - return jsonify({'status': 'stopped'}) + adsb_using_service = False + adsb_active_device = None + + app_module.adsb_aircraft.clear() + _looked_up_icaos.clear() + session = _record_session_stop(stop_source=stop_source, stopped_by=stopped_by) + return jsonify({'status': 'stopped', 'session': session}) @adsb_bp.route('/stream') @@ -494,10 +801,165 @@ def stream_adsb(): return response -@adsb_bp.route('/dashboard') -def adsb_dashboard(): - """Popout ADS-B dashboard.""" - return render_template('adsb_dashboard.html') +@adsb_bp.route('/dashboard') +def adsb_dashboard(): + """Popout ADS-B dashboard.""" + return render_template('adsb_dashboard.html') + + +@adsb_bp.route('/history') +def adsb_history(): + """ADS-B history reporting dashboard.""" + resp = make_response(render_template('adsb_history.html', history_enabled=ADSB_HISTORY_ENABLED)) + resp.headers['Cache-Control'] = 'no-store' + return resp + + +@adsb_bp.route('/history/summary') +def adsb_history_summary(): + """Summary stats for ADS-B history window.""" + if not ADSB_HISTORY_ENABLED: + return jsonify({'error': 'ADS-B history is disabled'}), 503 + _ensure_history_schema() + + since_minutes = _parse_int_param(request.args.get('since_minutes'), 60, 1, 10080) + window = f'{since_minutes} minutes' + + sql = """ + SELECT + (SELECT COUNT(*) FROM adsb_messages WHERE received_at >= NOW() - INTERVAL %s) AS message_count, + (SELECT COUNT(*) FROM adsb_snapshots WHERE captured_at >= NOW() - INTERVAL %s) AS snapshot_count, + (SELECT COUNT(DISTINCT icao) FROM adsb_snapshots WHERE captured_at >= NOW() - INTERVAL %s) AS aircraft_count, + (SELECT MIN(captured_at) FROM adsb_snapshots WHERE captured_at >= NOW() - INTERVAL %s) AS first_seen, + (SELECT MAX(captured_at) FROM adsb_snapshots WHERE captured_at >= NOW() - INTERVAL %s) AS last_seen + """ + + try: + with _get_history_connection() as conn: + with conn.cursor(cursor_factory=RealDictCursor) as cur: + cur.execute(sql, (window, window, window, window, window)) + row = cur.fetchone() or {} + return jsonify(row) + except Exception as exc: + logger.warning("ADS-B history summary failed: %s", exc) + return jsonify({'error': 'History database unavailable'}), 503 + + +@adsb_bp.route('/history/aircraft') +def adsb_history_aircraft(): + """List latest aircraft snapshots for a time window.""" + if not ADSB_HISTORY_ENABLED: + return jsonify({'error': 'ADS-B history is disabled'}), 503 + _ensure_history_schema() + + since_minutes = _parse_int_param(request.args.get('since_minutes'), 60, 1, 10080) + limit = _parse_int_param(request.args.get('limit'), 200, 1, 2000) + search = (request.args.get('search') or '').strip() + window = f'{since_minutes} minutes' + pattern = f'%{search}%' + + sql = """ + SELECT * + FROM ( + SELECT DISTINCT ON (icao) + icao, + callsign, + registration, + type_code, + type_desc, + altitude, + speed, + heading, + vertical_rate, + lat, + lon, + squawk, + captured_at AS last_seen + FROM adsb_snapshots + WHERE captured_at >= NOW() - INTERVAL %s + AND (%s = '' OR icao ILIKE %s OR callsign ILIKE %s OR registration ILIKE %s) + ORDER BY icao, captured_at DESC + ) latest + ORDER BY last_seen DESC + LIMIT %s + """ + + try: + with _get_history_connection() as conn: + with conn.cursor(cursor_factory=RealDictCursor) as cur: + cur.execute(sql, (window, search, pattern, pattern, pattern, limit)) + rows = cur.fetchall() + return jsonify({'aircraft': rows, 'count': len(rows)}) + except Exception as exc: + logger.warning("ADS-B history aircraft query failed: %s", exc) + return jsonify({'error': 'History database unavailable'}), 503 + + +@adsb_bp.route('/history/timeline') +def adsb_history_timeline(): + """Timeline snapshots for a specific aircraft.""" + if not ADSB_HISTORY_ENABLED: + return jsonify({'error': 'ADS-B history is disabled'}), 503 + _ensure_history_schema() + + icao = (request.args.get('icao') or '').strip().upper() + if not icao: + return jsonify({'error': 'icao is required'}), 400 + + since_minutes = _parse_int_param(request.args.get('since_minutes'), 60, 1, 10080) + limit = _parse_int_param(request.args.get('limit'), 2000, 1, 20000) + window = f'{since_minutes} minutes' + + sql = """ + SELECT captured_at, altitude, speed, heading, vertical_rate, lat, lon, squawk + FROM adsb_snapshots + WHERE icao = %s + AND captured_at >= NOW() - INTERVAL %s + ORDER BY captured_at ASC + LIMIT %s + """ + + try: + with _get_history_connection() as conn: + with conn.cursor(cursor_factory=RealDictCursor) as cur: + cur.execute(sql, (icao, window, limit)) + rows = cur.fetchall() + return jsonify({'icao': icao, 'timeline': rows, 'count': len(rows)}) + except Exception as exc: + logger.warning("ADS-B history timeline query failed: %s", exc) + return jsonify({'error': 'History database unavailable'}), 503 + + +@adsb_bp.route('/history/messages') +def adsb_history_messages(): + """Raw message history for a specific aircraft.""" + if not ADSB_HISTORY_ENABLED: + return jsonify({'error': 'ADS-B history is disabled'}), 503 + _ensure_history_schema() + + icao = (request.args.get('icao') or '').strip().upper() + since_minutes = _parse_int_param(request.args.get('since_minutes'), 30, 1, 10080) + limit = _parse_int_param(request.args.get('limit'), 200, 1, 2000) + window = f'{since_minutes} minutes' + + sql = """ + SELECT received_at, msg_type, callsign, altitude, speed, heading, vertical_rate, lat, lon, squawk + FROM adsb_messages + WHERE received_at >= NOW() - INTERVAL %s + AND (%s = '' OR icao = %s) + ORDER BY received_at DESC + LIMIT %s + """ + + try: + with _get_history_connection() as conn: + with conn.cursor(cursor_factory=RealDictCursor) as cur: + cur.execute(sql, (window, icao, icao, limit)) + rows = cur.fetchall() + return jsonify({'icao': icao, 'messages': rows, 'count': len(rows)}) + except Exception as exc: + logger.warning("ADS-B history message query failed: %s", exc) + return jsonify({'error': 'History database unavailable'}), 503 # ============================================ diff --git a/static/css/adsb_history.css b/static/css/adsb_history.css new file mode 100644 index 0000000..387cc3f --- /dev/null +++ b/static/css/adsb_history.css @@ -0,0 +1,615 @@ +* { + margin: 0; + padding: 0; + box-sizing: border-box; +} + +:root { + --bg-dark: #0a0c10; + --bg-panel: #0f1218; + --bg-card: #141a24; + --border-color: #1f2937; + --border-glow: rgba(74, 158, 255, 0.6); + --text-primary: #e8eaed; + --text-secondary: #9ca3af; + --text-dim: #4b5563; + --accent-cyan: #4a9eff; + --accent-green: #22c55e; + --accent-amber: #d4a853; + --grid-line: rgba(74, 158, 255, 0.08); +} + +body { + font-family: 'Inter', -apple-system, BlinkMacSystemFont, sans-serif; + background: var(--bg-dark); + color: var(--text-primary); + min-height: 100vh; +} + +.mono { + font-family: 'JetBrains Mono', monospace; +} + +.radar-bg { + position: fixed; + inset: 0; + background-image: + linear-gradient(var(--grid-line) 1px, transparent 1px), + linear-gradient(90deg, var(--grid-line) 1px, transparent 1px); + background-size: 50px 50px; + pointer-events: none; + z-index: 0; +} + +.scanline { + position: fixed; + top: 0; + left: 0; + right: 0; + height: 2px; + background: linear-gradient(90deg, transparent, var(--accent-cyan), transparent); + animation: scan 6s linear infinite; + pointer-events: none; + z-index: 1; + opacity: 0.3; +} + +@keyframes scan { + 0% { top: -4px; } + 100% { top: 100vh; } +} + +.header { + position: relative; + z-index: 2; + padding: 12px 20px; + background: var(--bg-panel); + border-bottom: 1px solid var(--border-color); + display: flex; + justify-content: space-between; + align-items: center; + flex-wrap: wrap; + gap: 12px; +} + +.logo { + font-size: 18px; + font-weight: 700; + letter-spacing: 2px; + text-transform: uppercase; +} + +.logo span { + color: var(--text-secondary); + font-weight: 400; + font-size: 12px; + margin-left: 10px; + letter-spacing: 1px; +} + +.status-bar { + display: flex; + align-items: center; + gap: 12px; + font-family: 'JetBrains Mono', monospace; + font-size: 11px; +} + +.back-link { + color: var(--accent-cyan); + text-decoration: none; + font-size: 11px; + padding: 6px 12px; + border: 1px solid var(--accent-cyan); + border-radius: 4px; +} + +.history-shell { + position: relative; + z-index: 2; + padding: 16px 18px 28px; + display: flex; + flex-direction: column; + gap: 16px; +} + +.summary-strip { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(150px, 1fr)); + gap: 12px; +} + +.session-strip { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(160px, 1fr)); + gap: 14px; + align-items: center; + background: linear-gradient(120deg, rgba(15, 18, 24, 0.95), rgba(20, 26, 36, 0.95)); + border: 1px solid var(--border-color); + border-radius: 12px; + padding: 14px 16px; + box-shadow: 0 0 18px rgba(0, 0, 0, 0.35); +} + +.session-status { + display: flex; + align-items: center; + gap: 12px; +} + +.status-dot { + width: 12px; + height: 12px; + border-radius: 50%; + background: var(--text-dim); + box-shadow: 0 0 12px rgba(75, 85, 99, 0.6); +} + +.status-dot.active { + background: var(--accent-green); + box-shadow: 0 0 14px rgba(34, 197, 94, 0.8); +} + +.session-label { + font-size: 10px; + color: var(--text-secondary); + text-transform: uppercase; + letter-spacing: 1.2px; +} + +.session-value { + font-size: 14px; + font-weight: 600; +} + +.session-metric { + display: flex; + flex-direction: column; + gap: 6px; +} + +#sessionNotice { + color: var(--accent-cyan); +} + +.session-controls { + display: flex; + gap: 10px; + align-items: center; + justify-content: flex-end; +} + +.session-controls select { + background: var(--bg-dark); + border: 1px solid var(--border-color); + color: var(--text-primary); + padding: 8px 10px; + border-radius: 6px; + font-size: 12px; + min-width: 180px; +} + +.primary-btn.stop { + background: var(--accent-amber); + color: #0a0c10; +} + +.summary-card { + background: var(--bg-card); + border: 1px solid var(--border-color); + border-radius: 10px; + padding: 14px 16px; + box-shadow: 0 0 14px rgba(0, 0, 0, 0.3); +} + +.summary-label { + font-size: 11px; + color: var(--text-secondary); + text-transform: uppercase; + letter-spacing: 1.3px; + margin-bottom: 6px; +} + +.summary-value { + font-size: 18px; + font-weight: 600; + color: var(--text-primary); +} + +.controls { + display: flex; + flex-wrap: wrap; + gap: 14px; + align-items: flex-end; + background: var(--bg-panel); + border: 1px solid var(--border-color); + border-radius: 10px; + padding: 14px 16px; +} + +.control-group { + display: flex; + flex-direction: column; + gap: 6px; +} + +.control-group label { + font-size: 10px; + color: var(--text-secondary); + text-transform: uppercase; + letter-spacing: 1.2px; +} + +.control-group input, +.control-group select { + background: var(--bg-dark); + border: 1px solid var(--border-color); + color: var(--text-primary); + padding: 8px 10px; + border-radius: 6px; + font-size: 12px; + min-width: 160px; +} + +.primary-btn { + background: var(--accent-cyan); + border: none; + color: #0a0c10; + font-weight: 600; + padding: 10px 18px; + border-radius: 6px; + cursor: pointer; + transition: transform 0.2s ease, box-shadow 0.2s ease; +} + +.primary-btn:hover { + transform: translateY(-1px); + box-shadow: 0 6px 14px rgba(74, 158, 255, 0.3); +} + +.status-pill { + font-family: 'JetBrains Mono', monospace; + font-size: 11px; + padding: 8px 12px; + border-radius: 999px; + border: 1px solid var(--accent-amber); + color: var(--accent-amber); + text-transform: uppercase; + letter-spacing: 1px; +} + +.content-grid { + display: grid; + grid-template-columns: minmax(300px, 1fr) minmax(320px, 1fr); + gap: 16px; +} + +.panel { + background: var(--bg-panel); + border: 1px solid var(--border-color); + border-radius: 12px; + display: flex; + flex-direction: column; + min-height: 420px; +} + +.panel-header { + display: flex; + justify-content: space-between; + align-items: center; + padding: 12px 16px; + border-bottom: 1px solid var(--border-color); + font-size: 12px; + letter-spacing: 1.6px; + text-transform: uppercase; + color: var(--text-secondary); +} + +.panel-meta { + font-family: 'JetBrains Mono', monospace; + font-size: 11px; + color: var(--accent-cyan); +} + +.panel-body { + padding: 12px 14px; + flex: 1; + overflow: auto; +} + +.aircraft-table { + width: 100%; + border-collapse: collapse; + font-size: 12px; +} + +.aircraft-table th, +.aircraft-table td { + padding: 8px 6px; + border-bottom: 1px solid rgba(31, 41, 55, 0.6); + text-align: left; +} + +.aircraft-table th { + font-size: 10px; + color: var(--text-secondary); + text-transform: uppercase; + letter-spacing: 1px; +} + +.aircraft-row { + cursor: pointer; + transition: background 0.15s ease; +} + +.aircraft-row:hover { + background: rgba(74, 158, 255, 0.1); +} + +.mono { + font-family: 'JetBrains Mono', monospace; +} + +.empty-row td, +.empty-row { + color: var(--text-dim); + text-align: center; + padding: 18px 10px; +} + +.detail-card { + padding: 12px 14px; + background: var(--bg-card); + border: 1px solid var(--border-color); + border-radius: 10px; + margin-bottom: 12px; +} + +.detail-title { + font-weight: 600; + font-size: 14px; + margin-bottom: 6px; +} + +.detail-meta { + color: var(--text-secondary); + font-size: 12px; +} + +.chart-card { + background: var(--bg-card); + border: 1px solid var(--border-color); + border-radius: 10px; + padding: 10px; + height: 180px; + display: flex; + flex-direction: column; + gap: 6px; +} + +.chart-grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(180px, 1fr)); + gap: 10px; + margin-bottom: 12px; +} + +.chart-title { + font-size: 10px; + color: var(--text-secondary); + text-transform: uppercase; + letter-spacing: 1px; +} + +#altitudeChart { + width: 100%; + height: 100%; +} + +#speedChart, +#headingChart, +#verticalChart { + width: 100%; + height: 100%; +} + +.timeline-list { + display: flex; + flex-direction: column; + gap: 8px; + font-size: 12px; + color: var(--text-secondary); +} + +.timeline-row { + display: flex; + justify-content: space-between; + gap: 8px; + padding: 6px 10px; + border: 1px solid rgba(31, 41, 55, 0.6); + border-radius: 6px; + background: rgba(15, 18, 24, 0.6); +} + +.squawk-list { + margin-top: 10px; + display: flex; + flex-direction: column; + gap: 8px; + color: var(--text-secondary); +} + +.modal-backdrop { + position: fixed; + inset: 0; + background: rgba(5, 8, 15, 0.65); + display: flex; + align-items: center; + justify-content: center; + opacity: 0; + pointer-events: none; + transition: opacity 0.2s ease; + z-index: 50; +} + +.modal-backdrop.open { + opacity: 1; + pointer-events: auto; +} + +.modal-card { + background: var(--bg-panel); + border: 1px solid var(--border-color); + border-radius: 14px; + padding: 18px; + width: min(820px, 92vw); + box-shadow: 0 20px 60px rgba(0, 0, 0, 0.4); + position: relative; +} + +.modal-close { + position: absolute; + top: 12px; + right: 12px; + border: none; + background: transparent; + color: var(--text-secondary); + font-size: 24px; + cursor: pointer; +} + +.modal-header { + display: flex; + justify-content: space-between; + align-items: center; + gap: 16px; + margin-bottom: 14px; +} + +.modal-title { + font-size: 20px; + font-weight: 600; +} + +.modal-subtitle { + color: var(--text-secondary); + font-size: 12px; + margin-top: 4px; +} + +.modal-actions { + display: flex; + gap: 8px; +} + +.nav-btn { + background: rgba(74, 158, 255, 0.15); + border: 1px solid rgba(74, 158, 255, 0.4); + color: var(--accent-cyan); + padding: 6px 10px; + border-radius: 6px; + cursor: pointer; +} + +.modal-body { + display: grid; + grid-template-columns: 1fr 1.2fr; + gap: 16px; +} + +.modal-photo { + background: var(--bg-card); + border-radius: 12px; + border: 1px solid var(--border-color); + min-height: 220px; + display: flex; + align-items: center; + justify-content: center; + overflow: hidden; +} + +.modal-photo img { + width: 100%; + height: 100%; + object-fit: cover; + display: none; +} + +.photo-fallback { + color: var(--text-dim); + font-size: 12px; + text-transform: uppercase; + letter-spacing: 1px; + display: flex; + align-items: center; + justify-content: center; + height: 100%; + width: 100%; +} + +.modal-details { + display: grid; + grid-template-columns: 1fr 1fr; + gap: 10px 18px; + font-size: 12px; +} + +.detail-row { + display: flex; + flex-direction: column; + gap: 4px; + padding: 8px 10px; + background: rgba(20, 26, 36, 0.6); + border-radius: 8px; + border: 1px solid rgba(31, 41, 55, 0.6); +} + +.detail-row span { + color: var(--text-secondary); + font-size: 10px; + text-transform: uppercase; + letter-spacing: 1px; +} + +.detail-row strong { + font-size: 13px; +} + +@media (max-width: 1024px) { + .content-grid { + grid-template-columns: 1fr; + } + + .modal-body { + grid-template-columns: 1fr; + } +} + +@media (max-width: 720px) { + .controls { + flex-direction: column; + align-items: stretch; + } + + .control-group input, + .control-group select { + min-width: 100%; + } + + .panel { + min-height: 320px; + } + + .session-controls { + flex-direction: column; + align-items: stretch; + } + + .modal-card { + padding: 16px; + } + + .modal-details { + grid-template-columns: 1fr; + } +} diff --git a/templates/adsb_dashboard.html b/templates/adsb_dashboard.html index a172b74..286bf93 100644 --- a/templates/adsb_dashboard.html +++ b/templates/adsb_dashboard.html @@ -74,9 +74,12 @@ - + + + 📚 History +

@@ -1959,11 +1962,14 @@ ACARS: ${r.statistics.acarsMessages} messages`; setInterval(cleanupOldAircraft, 10000); checkAdsbTools(); checkAircraftDatabase(); - checkDvbDriverConflict(); - - // Auto-connect to gpsd if available - autoConnectGps(); - }); + checkDvbDriverConflict(); + + // Auto-connect to gpsd if available + autoConnectGps(); + + // Sync tracking state if ADS-B already running + syncTrackingStatus(); + }); // Track which device is being used for ADS-B tracking let adsbActiveDevice = null; @@ -2362,8 +2368,8 @@ sudo make install return { host, port }; } - async function toggleTracking() { - const btn = document.getElementById('startBtn'); + async function toggleTracking() { + const btn = document.getElementById('startBtn'); if (!isTracking) { // Check for remote dump1090 config @@ -2428,12 +2434,52 @@ sudo make install document.getElementById('trackingDot').classList.add('inactive'); document.getElementById('trackingStatus').textContent = 'STANDBY'; // Re-enable ADS-B device selector - document.getElementById('adsbDeviceSelect').disabled = false; - } - } - - function startEventStream() { - if (eventSource) eventSource.close(); + document.getElementById('adsbDeviceSelect').disabled = false; + } + } + + async function syncTrackingStatus() { + try { + const response = await fetch('/adsb/session'); + if (!response.ok) { + return; + } + const data = await response.json(); + if (!data.tracking_active) { + return; + } + isTracking = true; + startEventStream(); + drawRangeRings(); + const startBtn = document.getElementById('startBtn'); + startBtn.textContent = 'STOP'; + startBtn.classList.add('active'); + document.getElementById('trackingDot').classList.remove('inactive'); + document.getElementById('trackingStatus').textContent = 'TRACKING'; + document.getElementById('adsbDeviceSelect').disabled = true; + + const session = data.session || {}; + const startTime = session.started_at ? Date.parse(session.started_at) : null; + if (startTime) { + stats.sessionStart = startTime; + } + startSessionTimer(); + + const sessionDevice = session.device_index; + if (sessionDevice !== null && sessionDevice !== undefined) { + adsbActiveDevice = sessionDevice; + const adsbSelect = document.getElementById('adsbDeviceSelect'); + if (adsbSelect) { + adsbSelect.value = sessionDevice; + } + } + } catch (err) { + console.warn('Failed to sync ADS-B tracking status', err); + } + } + + function startEventStream() { + if (eventSource) eventSource.close(); console.log('Starting ADS-B event stream...'); eventSource = new EventSource('/adsb/stream'); diff --git a/templates/adsb_history.html b/templates/adsb_history.html new file mode 100644 index 0000000..2efdf1f --- /dev/null +++ b/templates/adsb_history.html @@ -0,0 +1,762 @@ + + + + + + ADS-B History // INTERCEPT + + + + + +
+
+ +
+ + +
+ +
+
+
+
Messages
+
--
+
+
+
Snapshots
+
--
+
+
+
Aircraft
+
--
+
+
+
First Seen
+
--
+
+
+
Last Seen
+
--
+
+
+ +
+
+
+
+
Tracking
+
--
+
+
+
+
Uptime
+
--:--:--
+
+
+
Started
+
--
+
+
+
Status
+
Ready
+
+
+ + +
+
+ +
+
+ + +
+
+ + +
+
+ + +
+ +
+ {% if history_enabled %} + HISTORY ONLINE + {% else %} + HISTORY DISABLED + {% endif %} +
+
+ +
+
+
+ RECENT AIRCRAFT + 0 +
+
+ + + + + + + + + + + + + + + +
ICAOCallsignAltSpeedLast Seen
No aircraft in this window
+
+
+ +
+
+ DETAIL TIMELINE + -- +
+
+
+
Select an aircraft
+
---
+
+
+
+
Altitude (ft)
+ +
+
+
Speed (kt)
+ +
+
+
Heading (deg)
+ +
+
+
Vertical Rate (fpm)
+ +
+
+
+
No timeline data
+
+
+
No squawk changes
+
+
+
+
+
+ + + + + + diff --git a/tests/test_adsb_history.py b/tests/test_adsb_history.py new file mode 100644 index 0000000..6d0d51a --- /dev/null +++ b/tests/test_adsb_history.py @@ -0,0 +1,277 @@ +"""Tests for ADS-B history persistence utilities.""" + +import queue +import threading +import time +from datetime import datetime, timezone +from unittest.mock import MagicMock, patch, PropertyMock + +import pytest + + +class TestAdsbHistoryWriterUnit: + """Unit tests for AdsbHistoryWriter (no database).""" + + @pytest.fixture + def mock_config(self): + """Mock config with history disabled.""" + with patch.multiple( + 'utils.adsb_history', + ADSB_HISTORY_ENABLED=False, + ADSB_DB_HOST='localhost', + ADSB_DB_PORT=5432, + ADSB_DB_NAME='test_db', + ADSB_DB_USER='test', + ADSB_DB_PASSWORD='test', + ADSB_HISTORY_BATCH_SIZE=100, + ADSB_HISTORY_FLUSH_INTERVAL=1.0, + ADSB_HISTORY_QUEUE_SIZE=1000, + ): + yield + + @pytest.fixture + def mock_config_enabled(self): + """Mock config with history enabled.""" + with patch.multiple( + 'utils.adsb_history', + ADSB_HISTORY_ENABLED=True, + ADSB_DB_HOST='localhost', + ADSB_DB_PORT=5432, + ADSB_DB_NAME='test_db', + ADSB_DB_USER='test', + ADSB_DB_PASSWORD='test', + ADSB_HISTORY_BATCH_SIZE=100, + ADSB_HISTORY_FLUSH_INTERVAL=1.0, + ADSB_HISTORY_QUEUE_SIZE=1000, + ): + yield + + def test_writer_disabled_by_default(self, mock_config): + """Test writer does nothing when disabled.""" + from utils.adsb_history import AdsbHistoryWriter + + writer = AdsbHistoryWriter() + writer.enabled = False + + # Should not start thread + writer.start() + assert writer._thread is None + + # Should not queue records + writer.enqueue({'icao': 'ABC123'}) + assert writer._queue.empty() + + def test_enqueue_adds_received_at(self, mock_config_enabled): + """Test enqueue adds received_at timestamp if missing.""" + from utils.adsb_history import AdsbHistoryWriter + + writer = AdsbHistoryWriter() + writer.enabled = True + + record = {'icao': 'ABC123'} + writer.enqueue(record) + + # Record should have received_at added + assert 'received_at' in record + assert isinstance(record['received_at'], datetime) + + def test_enqueue_preserves_existing_received_at(self, mock_config_enabled): + """Test enqueue preserves existing received_at.""" + from utils.adsb_history import AdsbHistoryWriter + + writer = AdsbHistoryWriter() + writer.enabled = True + + original_time = datetime(2025, 1, 1, 12, 0, 0, tzinfo=timezone.utc) + record = {'icao': 'ABC123', 'received_at': original_time} + writer.enqueue(record) + + assert record['received_at'] == original_time + + def test_enqueue_drops_when_queue_full(self, mock_config_enabled): + """Test enqueue drops records when queue is full.""" + from utils.adsb_history import AdsbHistoryWriter + + writer = AdsbHistoryWriter() + writer.enabled = True + writer._queue = queue.Queue(maxsize=2) + + # Fill the queue + writer.enqueue({'icao': 'A'}) + writer.enqueue({'icao': 'B'}) + + # This should be dropped + writer.enqueue({'icao': 'C'}) + + assert writer._dropped == 1 + assert writer._queue.qsize() == 2 + + +class TestAdsbSnapshotWriterUnit: + """Unit tests for AdsbSnapshotWriter (no database).""" + + @pytest.fixture + def mock_config_enabled(self): + """Mock config with history enabled.""" + with patch.multiple( + 'utils.adsb_history', + ADSB_HISTORY_ENABLED=True, + ADSB_DB_HOST='localhost', + ADSB_DB_PORT=5432, + ADSB_DB_NAME='test_db', + ADSB_DB_USER='test', + ADSB_DB_PASSWORD='test', + ADSB_HISTORY_BATCH_SIZE=100, + ADSB_HISTORY_FLUSH_INTERVAL=1.0, + ADSB_HISTORY_QUEUE_SIZE=1000, + ): + yield + + def test_snapshot_enqueue_adds_captured_at(self, mock_config_enabled): + """Test enqueue adds captured_at timestamp if missing.""" + from utils.adsb_history import AdsbSnapshotWriter + + writer = AdsbSnapshotWriter() + writer.enabled = True + + record = {'icao': 'ABC123'} + writer.enqueue(record) + + assert 'captured_at' in record + assert isinstance(record['captured_at'], datetime) + + +class TestMakeDsn: + """Tests for DSN generation.""" + + def test_make_dsn_format(self): + """Test DSN string format.""" + with patch.multiple( + 'utils.adsb_history', + ADSB_DB_HOST='testhost', + ADSB_DB_PORT=5433, + ADSB_DB_NAME='testdb', + ADSB_DB_USER='testuser', + ADSB_DB_PASSWORD='testpass', + ): + from utils.adsb_history import _make_dsn + + dsn = _make_dsn() + + assert 'host=testhost' in dsn + assert 'port=5433' in dsn + assert 'dbname=testdb' in dsn + assert 'user=testuser' in dsn + assert 'password=testpass' in dsn + + +class TestEnsureAdsbSchema: + """Tests for schema creation.""" + + def test_ensure_schema_creates_tables(self): + """Test schema creation SQL is executed.""" + from utils.adsb_history import _ensure_adsb_schema + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_conn.cursor.return_value.__enter__ = MagicMock(return_value=mock_cursor) + mock_conn.cursor.return_value.__exit__ = MagicMock(return_value=False) + + _ensure_adsb_schema(mock_conn) + + # Should execute CREATE TABLE statements + assert mock_cursor.execute.call_count >= 3 # 3 tables + indexes + + # Should commit + mock_conn.commit.assert_called_once() + + def test_ensure_schema_creates_indexes(self): + """Test schema creates required indexes.""" + from utils.adsb_history import _ensure_adsb_schema + + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_conn.cursor.return_value.__enter__ = MagicMock(return_value=mock_cursor) + mock_conn.cursor.return_value.__exit__ = MagicMock(return_value=False) + + _ensure_adsb_schema(mock_conn) + + # Get all executed SQL + executed_sql = [str(call) for call in mock_cursor.execute.call_args_list] + sql_text = ' '.join(executed_sql) + + # Should create indexes + assert 'CREATE INDEX' in sql_text or 'idx_adsb' in sql_text + + +class TestMessageFields: + """Tests for message field constants.""" + + def test_message_fields_exist(self): + """Test required message fields are defined.""" + from utils.adsb_history import _MESSAGE_FIELDS + + required_fields = [ + 'received_at', 'icao', 'callsign', 'altitude', + 'speed', 'heading', 'lat', 'lon', 'squawk' + ] + + for field in required_fields: + assert field in _MESSAGE_FIELDS + + def test_snapshot_fields_exist(self): + """Test required snapshot fields are defined.""" + from utils.adsb_history import _SNAPSHOT_FIELDS + + required_fields = [ + 'captured_at', 'icao', 'callsign', 'altitude', + 'lat', 'lon', 'snapshot' + ] + + for field in required_fields: + assert field in _SNAPSHOT_FIELDS + + +class TestWriterThreadSafety: + """Tests for thread safety of writers.""" + + def test_multiple_enqueue_thread_safe(self): + """Test multiple threads can enqueue safely.""" + with patch.multiple( + 'utils.adsb_history', + ADSB_HISTORY_ENABLED=True, + ADSB_HISTORY_QUEUE_SIZE=10000, + ADSB_HISTORY_BATCH_SIZE=100, + ADSB_HISTORY_FLUSH_INTERVAL=1.0, + ADSB_DB_HOST='localhost', + ADSB_DB_PORT=5432, + ADSB_DB_NAME='test', + ADSB_DB_USER='test', + ADSB_DB_PASSWORD='test', + ): + from utils.adsb_history import AdsbHistoryWriter + + writer = AdsbHistoryWriter() + writer.enabled = True + errors = [] + + def enqueue_many(n): + try: + for i in range(n): + writer.enqueue({'icao': f'TEST{i}', 'altitude': i * 100}) + except Exception as e: + errors.append(e) + + threads = [ + threading.Thread(target=enqueue_many, args=(100,)) + for _ in range(5) + ] + + for t in threads: + t.start() + for t in threads: + t.join() + + assert len(errors) == 0 + # Should have queued 500 records (5 threads * 100 each) + assert writer._queue.qsize() == 500 diff --git a/utils/adsb_history.py b/utils/adsb_history.py new file mode 100644 index 0000000..61fe1dd --- /dev/null +++ b/utils/adsb_history.py @@ -0,0 +1,397 @@ +"""ADS-B history persistence to PostgreSQL.""" + +from __future__ import annotations + +import logging +import queue +import threading +import time +from datetime import datetime, timezone +from typing import Iterable + +import psycopg2 +from psycopg2.extras import execute_values, Json + +from config import ( + ADSB_DB_HOST, + ADSB_DB_NAME, + ADSB_DB_PASSWORD, + ADSB_DB_PORT, + ADSB_DB_USER, + ADSB_HISTORY_BATCH_SIZE, + ADSB_HISTORY_ENABLED, + ADSB_HISTORY_FLUSH_INTERVAL, + ADSB_HISTORY_QUEUE_SIZE, +) + +logger = logging.getLogger('intercept.adsb_history') + + +_MESSAGE_FIELDS = ( + 'received_at', + 'msg_time', + 'logged_time', + 'icao', + 'msg_type', + 'callsign', + 'altitude', + 'speed', + 'heading', + 'vertical_rate', + 'lat', + 'lon', + 'squawk', + 'session_id', + 'aircraft_id', + 'flight_id', + 'raw_line', + 'source_host', +) + +_MESSAGE_INSERT_SQL = f""" + INSERT INTO adsb_messages ({', '.join(_MESSAGE_FIELDS)}) + VALUES %s +""" + +_SNAPSHOT_FIELDS = ( + 'captured_at', + 'icao', + 'callsign', + 'registration', + 'type_code', + 'type_desc', + 'altitude', + 'speed', + 'heading', + 'vertical_rate', + 'lat', + 'lon', + 'squawk', + 'source_host', + 'snapshot', +) + +_SNAPSHOT_INSERT_SQL = f""" + INSERT INTO adsb_snapshots ({', '.join(_SNAPSHOT_FIELDS)}) + VALUES %s +""" + +def _ensure_adsb_schema(conn: psycopg2.extensions.connection) -> None: + with conn.cursor() as cur: + cur.execute( + """ + CREATE TABLE IF NOT EXISTS adsb_messages ( + id BIGSERIAL PRIMARY KEY, + received_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + msg_time TIMESTAMPTZ, + logged_time TIMESTAMPTZ, + icao TEXT NOT NULL, + msg_type SMALLINT, + callsign TEXT, + altitude INTEGER, + speed INTEGER, + heading INTEGER, + vertical_rate INTEGER, + lat DOUBLE PRECISION, + lon DOUBLE PRECISION, + squawk TEXT, + session_id TEXT, + aircraft_id TEXT, + flight_id TEXT, + raw_line TEXT, + source_host TEXT + ) + """ + ) + cur.execute( + """ + CREATE INDEX IF NOT EXISTS idx_adsb_messages_icao_time + ON adsb_messages (icao, received_at) + """ + ) + cur.execute( + """ + CREATE INDEX IF NOT EXISTS idx_adsb_messages_received_at + ON adsb_messages (received_at) + """ + ) + cur.execute( + """ + CREATE INDEX IF NOT EXISTS idx_adsb_messages_msg_time + ON adsb_messages (msg_time) + """ + ) + cur.execute( + """ + CREATE TABLE IF NOT EXISTS adsb_snapshots ( + id BIGSERIAL PRIMARY KEY, + captured_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + icao TEXT NOT NULL, + callsign TEXT, + registration TEXT, + type_code TEXT, + type_desc TEXT, + altitude INTEGER, + speed INTEGER, + heading INTEGER, + vertical_rate INTEGER, + lat DOUBLE PRECISION, + lon DOUBLE PRECISION, + squawk TEXT, + source_host TEXT, + snapshot JSONB + ) + """ + ) + cur.execute( + """ + CREATE INDEX IF NOT EXISTS idx_adsb_snapshots_icao_time + ON adsb_snapshots (icao, captured_at) + """ + ) + cur.execute( + """ + CREATE INDEX IF NOT EXISTS idx_adsb_snapshots_captured_at + ON adsb_snapshots (captured_at) + """ + ) + cur.execute( + """ + CREATE TABLE IF NOT EXISTS adsb_sessions ( + id BIGSERIAL PRIMARY KEY, + started_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + ended_at TIMESTAMPTZ, + device_index INTEGER, + sdr_type TEXT, + remote_host TEXT, + remote_port INTEGER, + start_source TEXT, + stop_source TEXT, + started_by TEXT, + stopped_by TEXT, + notes TEXT + ) + """ + ) + cur.execute( + """ + CREATE INDEX IF NOT EXISTS idx_adsb_sessions_started_at + ON adsb_sessions (started_at) + """ + ) + cur.execute( + """ + CREATE INDEX IF NOT EXISTS idx_adsb_sessions_active + ON adsb_sessions (ended_at) + """ + ) + conn.commit() + + +def _make_dsn() -> str: + return ( + f"host={ADSB_DB_HOST} port={ADSB_DB_PORT} dbname={ADSB_DB_NAME} " + f"user={ADSB_DB_USER} password={ADSB_DB_PASSWORD}" + ) + + +class AdsbHistoryWriter: + """Background writer for ADS-B history records.""" + + def __init__(self) -> None: + self.enabled = ADSB_HISTORY_ENABLED + self._queue: queue.Queue[dict] = queue.Queue(maxsize=ADSB_HISTORY_QUEUE_SIZE) + self._thread: threading.Thread | None = None + self._stop_event = threading.Event() + self._conn: psycopg2.extensions.connection | None = None + self._dropped = 0 + + def start(self) -> None: + if not self.enabled: + return + if self._thread and self._thread.is_alive(): + return + self._thread = threading.Thread(target=self._run, name='adsb-history-writer', daemon=True) + self._thread.start() + logger.info("ADS-B history writer started") + + def stop(self) -> None: + self._stop_event.set() + + def enqueue(self, record: dict) -> None: + if not self.enabled: + return + if 'received_at' not in record or record['received_at'] is None: + record['received_at'] = datetime.now(timezone.utc) + try: + self._queue.put_nowait(record) + except queue.Full: + self._dropped += 1 + if self._dropped % 1000 == 0: + logger.warning("ADS-B history queue full, dropped %d records", self._dropped) + + def _run(self) -> None: + batch: list[dict] = [] + last_flush = time.time() + + while not self._stop_event.is_set(): + timeout = max(0.0, ADSB_HISTORY_FLUSH_INTERVAL - (time.time() - last_flush)) + try: + item = self._queue.get(timeout=timeout) + batch.append(item) + except queue.Empty: + pass + + now = time.time() + if batch and (len(batch) >= ADSB_HISTORY_BATCH_SIZE or now - last_flush >= ADSB_HISTORY_FLUSH_INTERVAL): + if self._flush(batch): + batch.clear() + last_flush = now + + def _ensure_connection(self) -> psycopg2.extensions.connection | None: + if self._conn: + return self._conn + try: + self._conn = psycopg2.connect(_make_dsn()) + self._conn.autocommit = False + self._ensure_schema(self._conn) + return self._conn + except Exception as exc: + logger.warning("ADS-B history DB connection failed: %s", exc) + self._conn = None + return None + + def _ensure_schema(self, conn: psycopg2.extensions.connection) -> None: + _ensure_adsb_schema(conn) + + def _flush(self, batch: Iterable[dict]) -> bool: + conn = self._ensure_connection() + if not conn: + time.sleep(2.0) + return False + + values = [] + for record in batch: + values.append(tuple(record.get(field) for field in _MESSAGE_FIELDS)) + + try: + with conn.cursor() as cur: + execute_values(cur, _MESSAGE_INSERT_SQL, values) + conn.commit() + return True + except Exception as exc: + logger.warning("ADS-B history insert failed: %s", exc) + try: + conn.rollback() + except Exception: + pass + self._conn = None + time.sleep(2.0) + return False + + +adsb_history_writer = AdsbHistoryWriter() + + +class AdsbSnapshotWriter: + """Background writer for ADS-B snapshot records.""" + + def __init__(self) -> None: + self.enabled = ADSB_HISTORY_ENABLED + self._queue: queue.Queue[dict] = queue.Queue(maxsize=ADSB_HISTORY_QUEUE_SIZE) + self._thread: threading.Thread | None = None + self._stop_event = threading.Event() + self._conn: psycopg2.extensions.connection | None = None + self._dropped = 0 + + def start(self) -> None: + if not self.enabled: + return + if self._thread and self._thread.is_alive(): + return + self._thread = threading.Thread(target=self._run, name='adsb-snapshot-writer', daemon=True) + self._thread.start() + logger.info("ADS-B snapshot writer started") + + def stop(self) -> None: + self._stop_event.set() + + def enqueue(self, record: dict) -> None: + if not self.enabled: + return + if 'captured_at' not in record or record['captured_at'] is None: + record['captured_at'] = datetime.now(timezone.utc) + try: + self._queue.put_nowait(record) + except queue.Full: + self._dropped += 1 + if self._dropped % 1000 == 0: + logger.warning("ADS-B snapshot queue full, dropped %d records", self._dropped) + + def _run(self) -> None: + batch: list[dict] = [] + last_flush = time.time() + + while not self._stop_event.is_set(): + timeout = max(0.0, ADSB_HISTORY_FLUSH_INTERVAL - (time.time() - last_flush)) + try: + item = self._queue.get(timeout=timeout) + batch.append(item) + except queue.Empty: + pass + + now = time.time() + if batch and (len(batch) >= ADSB_HISTORY_BATCH_SIZE or now - last_flush >= ADSB_HISTORY_FLUSH_INTERVAL): + if self._flush(batch): + batch.clear() + last_flush = now + + def _ensure_connection(self) -> psycopg2.extensions.connection | None: + if self._conn: + return self._conn + try: + self._conn = psycopg2.connect(_make_dsn()) + self._conn.autocommit = False + self._ensure_schema(self._conn) + return self._conn + except Exception as exc: + logger.warning("ADS-B snapshot DB connection failed: %s", exc) + self._conn = None + return None + + def _ensure_schema(self, conn: psycopg2.extensions.connection) -> None: + _ensure_adsb_schema(conn) + + def _flush(self, batch: Iterable[dict]) -> bool: + conn = self._ensure_connection() + if not conn: + time.sleep(2.0) + return False + + values = [] + for record in batch: + row = [] + for field in _SNAPSHOT_FIELDS: + value = record.get(field) + if field == 'snapshot' and value is not None: + value = Json(value) + row.append(value) + values.append(tuple(row)) + + try: + with conn.cursor() as cur: + execute_values(cur, _SNAPSHOT_INSERT_SQL, values) + conn.commit() + return True + except Exception as exc: + logger.warning("ADS-B snapshot insert failed: %s", exc) + try: + conn.rollback() + except Exception: + pass + self._conn = None + time.sleep(2.0) + return False + + +adsb_snapshot_writer = AdsbSnapshotWriter()