diff --git a/routes/adsb.py b/routes/adsb.py
index 187938b..6a685d2 100644
--- a/routes/adsb.py
+++ b/routes/adsb.py
@@ -3,6 +3,8 @@
from __future__ import annotations
import json
+import csv
+import io
import os
import queue
import shutil
@@ -10,7 +12,7 @@ import socket
import subprocess
import threading
import time
-from datetime import datetime, timezone
+from datetime import datetime, timedelta, timezone
from typing import Any, Generator
from flask import Blueprint, Response, jsonify, make_response, render_template, request
@@ -195,6 +197,40 @@ def _ensure_history_schema() -> None:
logger.warning("ADS-B schema check failed: %s", exc)
+MILITARY_ICAO_RANGES = [
+ (0xADF7C0, 0xADFFFF), # US
+ (0xAE0000, 0xAEFFFF), # US
+ (0x3F4000, 0x3F7FFF), # FR
+ (0x43C000, 0x43CFFF), # UK
+ (0x3D0000, 0x3DFFFF), # DE
+ (0x501C00, 0x501FFF), # NATO
+]
+
+MILITARY_CALLSIGN_PREFIXES = (
+ 'REACH', 'JAKE', 'DOOM', 'IRON', 'HAWK', 'VIPER', 'COBRA', 'THUNDER',
+ 'SHADOW', 'NIGHT', 'STEEL', 'GRIM', 'REAPER', 'BLADE', 'STRIKE',
+ 'RCH', 'CNV', 'MCH', 'EVAC', 'TOPCAT', 'ASCOT', 'RRR', 'HRK',
+ 'NAVY', 'ARMY', 'USAF', 'RAF', 'RCAF', 'RAAF', 'IAF', 'PAF',
+)
+
+
+def _is_military_aircraft(icao: str, callsign: str | None) -> bool:
+ """Return True if the ICAO hex or callsign indicates a military aircraft."""
+ try:
+ hex_val = int(icao, 16)
+ for start, end in MILITARY_ICAO_RANGES:
+ if start <= hex_val <= end:
+ return True
+ except (ValueError, TypeError):
+ pass
+ if callsign:
+ upper = callsign.upper().strip()
+ for prefix in MILITARY_CALLSIGN_PREFIXES:
+ if upper.startswith(prefix):
+ return True
+ return False
+
+
def _parse_int_param(value: str | None, default: int, min_value: int | None = None, max_value: int | None = None) -> int:
try:
parsed = int(value) if value is not None else default
@@ -207,6 +243,137 @@ def _parse_int_param(value: str | None, default: int, min_value: int | None = No
return parsed
+def _parse_iso_datetime(value: Any) -> datetime | None:
+ if not isinstance(value, str):
+ return None
+ cleaned = value.strip()
+ if not cleaned:
+ return None
+ if cleaned.endswith('Z'):
+ cleaned = f"{cleaned[:-1]}+00:00"
+ try:
+ parsed = datetime.fromisoformat(cleaned)
+ except ValueError:
+ return None
+ if parsed.tzinfo is None:
+ parsed = parsed.replace(tzinfo=timezone.utc)
+ return parsed.astimezone(timezone.utc)
+
+
+def _parse_export_scope(
+ args: Any,
+) -> tuple[str, int, datetime | None, datetime | None]:
+ scope = str(args.get('scope') or 'window').strip().lower()
+ if scope not in {'window', 'all', 'custom'}:
+ scope = 'window'
+ since_minutes = _parse_int_param(args.get('since_minutes'), 1440, 1, 525600)
+ start = _parse_iso_datetime(args.get('start'))
+ end = _parse_iso_datetime(args.get('end'))
+ if scope == 'custom' and (start is None or end is None or end <= start):
+ scope = 'window'
+ return scope, since_minutes, start, end
+
+
+def _add_time_filter(
+ *,
+ where_parts: list[str],
+ params: list[Any],
+ scope: str,
+ timestamp_field: str,
+ since_minutes: int,
+ start: datetime | None,
+ end: datetime | None,
+) -> None:
+ if scope == 'all':
+ return
+ if scope == 'custom' and start is not None and end is not None:
+ where_parts.append(f"{timestamp_field} >= %s AND {timestamp_field} < %s")
+ params.extend([start, end])
+ return
+ where_parts.append(f"{timestamp_field} >= NOW() - INTERVAL %s")
+ params.append(f'{since_minutes} minutes')
+
+
+def _serialize_export_value(value: Any) -> Any:
+ if isinstance(value, datetime):
+ return value.isoformat()
+ return value
+
+
+def _rows_to_serializable(rows: list[dict[str, Any]]) -> list[dict[str, Any]]:
+ return [{key: _serialize_export_value(value) for key, value in row.items()} for row in rows]
+
+
+def _build_export_csv(
+ *,
+ exported_at: str,
+ scope: str,
+ since_minutes: int | None,
+ icao: str,
+ search: str,
+ classification: str,
+ messages: list[dict[str, Any]],
+ snapshots: list[dict[str, Any]],
+ sessions: list[dict[str, Any]],
+ export_type: str,
+) -> str:
+ output = io.StringIO()
+ writer = csv.writer(output)
+
+ writer.writerow(['Exported At', exported_at])
+ writer.writerow(['Scope', scope])
+ if since_minutes is not None:
+ writer.writerow(['Since Minutes', since_minutes])
+ if icao:
+ writer.writerow(['ICAO Filter', icao])
+ if search:
+ writer.writerow(['Search Filter', search])
+ if classification != 'all':
+ writer.writerow(['Classification', classification])
+ writer.writerow([])
+
+ def write_section(title: str, rows: list[dict[str, Any]], columns: list[str]) -> None:
+ writer.writerow([title])
+ writer.writerow(columns)
+ for row in rows:
+ writer.writerow([_serialize_export_value(row.get(col)) for col in columns])
+ writer.writerow([])
+
+ if export_type in {'messages', 'all'}:
+ write_section(
+ 'Messages',
+ messages,
+ [
+ 'received_at', 'msg_time', 'logged_time', 'icao', 'msg_type', 'callsign',
+ 'altitude', 'speed', 'heading', 'vertical_rate', 'lat', 'lon', 'squawk',
+ 'session_id', 'aircraft_id', 'flight_id', 'source_host', 'raw_line',
+ ],
+ )
+
+ if export_type in {'snapshots', 'all'}:
+ write_section(
+ 'Snapshots',
+ snapshots,
+ [
+ 'captured_at', 'icao', 'callsign', 'registration', 'type_code', 'type_desc',
+ 'altitude', 'speed', 'heading', 'vertical_rate', 'lat', 'lon', 'squawk',
+ 'source_host',
+ ],
+ )
+
+ if export_type in {'sessions', 'all'}:
+ write_section(
+ 'Sessions',
+ sessions,
+ [
+ 'id', 'started_at', 'ended_at', 'device_index', 'sdr_type', 'remote_host',
+ 'remote_port', 'start_source', 'stop_source', 'started_by', 'stopped_by', 'notes',
+ ],
+ )
+
+ return output.getvalue()
+
+
def _broadcast_adsb_update(payload: dict[str, Any]) -> None:
"""Fan out a payload to all active ADS-B SSE subscribers."""
with _adsb_stream_subscribers_lock:
@@ -1069,7 +1236,7 @@ def adsb_history_summary():
return jsonify({'error': 'ADS-B history is disabled'}), 503
_ensure_history_schema()
- since_minutes = _parse_int_param(request.args.get('since_minutes'), 60, 1, 10080)
+ since_minutes = _parse_int_param(request.args.get('since_minutes'), 1440, 1, 10080)
window = f'{since_minutes} minutes'
sql = """
@@ -1099,7 +1266,7 @@ def adsb_history_aircraft():
return jsonify({'error': 'ADS-B history is disabled'}), 503
_ensure_history_schema()
- since_minutes = _parse_int_param(request.args.get('since_minutes'), 60, 1, 10080)
+ since_minutes = _parse_int_param(request.args.get('since_minutes'), 1440, 1, 10080)
limit = _parse_int_param(request.args.get('limit'), 200, 1, 2000)
search = (request.args.get('search') or '').strip()
window = f'{since_minutes} minutes'
@@ -1153,7 +1320,7 @@ def adsb_history_timeline():
if not icao:
return jsonify({'error': 'icao is required'}), 400
- since_minutes = _parse_int_param(request.args.get('since_minutes'), 60, 1, 10080)
+ since_minutes = _parse_int_param(request.args.get('since_minutes'), 1440, 1, 10080)
limit = _parse_int_param(request.args.get('limit'), 2000, 1, 20000)
window = f'{since_minutes} minutes'
@@ -1209,6 +1376,256 @@ def adsb_history_messages():
return jsonify({'error': 'History database unavailable'}), 503
+@adsb_bp.route('/history/export')
+def adsb_history_export():
+ """Export ADS-B history data in CSV or JSON format."""
+ if not ADSB_HISTORY_ENABLED or not PSYCOPG2_AVAILABLE:
+ return jsonify({'error': 'ADS-B history is disabled'}), 503
+ _ensure_history_schema()
+
+ export_format = str(request.args.get('format') or 'csv').strip().lower()
+ export_type = str(request.args.get('type') or 'all').strip().lower()
+ if export_format not in {'csv', 'json'}:
+ return jsonify({'error': 'format must be csv or json'}), 400
+ if export_type not in {'messages', 'snapshots', 'sessions', 'all'}:
+ return jsonify({'error': 'type must be messages, snapshots, sessions, or all'}), 400
+
+ scope, since_minutes, start, end = _parse_export_scope(request.args)
+ icao = (request.args.get('icao') or '').strip().upper()
+ search = (request.args.get('search') or '').strip()
+ classification = str(request.args.get('classification') or 'all').strip().lower()
+ if classification not in {'all', 'military', 'civilian'}:
+ classification = 'all'
+ pattern = f'%{search}%'
+
+ snapshots: list[dict[str, Any]] = []
+ messages: list[dict[str, Any]] = []
+ sessions: list[dict[str, Any]] = []
+
+ def _filter_by_classification(
+ rows: list[dict[str, Any]],
+ icao_key: str = 'icao',
+ callsign_key: str = 'callsign',
+ ) -> list[dict[str, Any]]:
+ if classification == 'all':
+ return rows
+ want_military = classification == 'military'
+ return [
+ r for r in rows
+ if _is_military_aircraft(r.get(icao_key, ''), r.get(callsign_key)) == want_military
+ ]
+
+ try:
+ with _get_history_connection() as conn:
+ with conn.cursor(cursor_factory=RealDictCursor) as cur:
+ if export_type in {'snapshots', 'all'}:
+ snapshot_where: list[str] = []
+ snapshot_params: list[Any] = []
+ _add_time_filter(
+ where_parts=snapshot_where,
+ params=snapshot_params,
+ scope=scope,
+ timestamp_field='captured_at',
+ since_minutes=since_minutes,
+ start=start,
+ end=end,
+ )
+ if icao:
+ snapshot_where.append("icao = %s")
+ snapshot_params.append(icao)
+ if search:
+ snapshot_where.append("(icao ILIKE %s OR callsign ILIKE %s OR registration ILIKE %s)")
+ snapshot_params.extend([pattern, pattern, pattern])
+
+ snapshot_sql = """
+ SELECT captured_at, icao, callsign, registration, type_code, type_desc,
+ altitude, speed, heading, vertical_rate, lat, lon, squawk, source_host
+ FROM adsb_snapshots
+ """
+ if snapshot_where:
+ snapshot_sql += " WHERE " + " AND ".join(snapshot_where)
+ snapshot_sql += " ORDER BY captured_at DESC"
+ cur.execute(snapshot_sql, tuple(snapshot_params))
+ snapshots = _filter_by_classification(cur.fetchall())
+
+ if export_type in {'messages', 'all'}:
+ message_where: list[str] = []
+ message_params: list[Any] = []
+ _add_time_filter(
+ where_parts=message_where,
+ params=message_params,
+ scope=scope,
+ timestamp_field='received_at',
+ since_minutes=since_minutes,
+ start=start,
+ end=end,
+ )
+ if icao:
+ message_where.append("icao = %s")
+ message_params.append(icao)
+ if search:
+ message_where.append("(icao ILIKE %s OR callsign ILIKE %s)")
+ message_params.extend([pattern, pattern])
+
+ message_sql = """
+ SELECT received_at, msg_time, logged_time, icao, msg_type, callsign,
+ altitude, speed, heading, vertical_rate, lat, lon, squawk,
+ session_id, aircraft_id, flight_id, source_host, raw_line
+ FROM adsb_messages
+ """
+ if message_where:
+ message_sql += " WHERE " + " AND ".join(message_where)
+ message_sql += " ORDER BY received_at DESC"
+ cur.execute(message_sql, tuple(message_params))
+ messages = _filter_by_classification(cur.fetchall())
+
+ if export_type in {'sessions', 'all'}:
+ session_where: list[str] = []
+ session_params: list[Any] = []
+ if scope == 'custom' and start is not None and end is not None:
+ session_where.append("COALESCE(ended_at, %s) >= %s AND started_at < %s")
+ session_params.extend([end, start, end])
+ elif scope == 'window':
+ session_where.append("COALESCE(ended_at, NOW()) >= NOW() - INTERVAL %s")
+ session_params.append(f'{since_minutes} minutes')
+
+ session_sql = """
+ SELECT id, started_at, ended_at, device_index, sdr_type, remote_host,
+ remote_port, start_source, stop_source, started_by, stopped_by, notes
+ FROM adsb_sessions
+ """
+ if session_where:
+ session_sql += " WHERE " + " AND ".join(session_where)
+ session_sql += " ORDER BY started_at DESC"
+ cur.execute(session_sql, tuple(session_params))
+ sessions = cur.fetchall()
+ except Exception as exc:
+ logger.warning("ADS-B history export failed: %s", exc)
+ return jsonify({'error': 'History database unavailable'}), 503
+
+ exported_at = datetime.now(timezone.utc).isoformat()
+ timestamp = datetime.now(timezone.utc).strftime('%Y%m%d_%H%M%S')
+ filename_scope = 'all' if scope == 'all' else ('custom' if scope == 'custom' else f'{since_minutes}m')
+ filename = f'adsb_history_{export_type}_{filename_scope}_{timestamp}.{export_format}'
+
+ if export_format == 'json':
+ payload = {
+ 'exported_at': exported_at,
+ 'format': export_format,
+ 'type': export_type,
+ 'scope': scope,
+ 'since_minutes': None if scope != 'window' else since_minutes,
+ 'filters': {
+ 'icao': icao or None,
+ 'search': search or None,
+ 'classification': classification,
+ 'start': start.isoformat() if start else None,
+ 'end': end.isoformat() if end else None,
+ },
+ 'counts': {
+ 'messages': len(messages),
+ 'snapshots': len(snapshots),
+ 'sessions': len(sessions),
+ },
+ 'messages': _rows_to_serializable(messages),
+ 'snapshots': _rows_to_serializable(snapshots),
+ 'sessions': _rows_to_serializable(sessions),
+ }
+ response = Response(
+ json.dumps(payload, indent=2, default=str),
+ mimetype='application/json',
+ )
+ response.headers['Content-Disposition'] = f'attachment; filename={filename}'
+ return response
+
+ csv_data = _build_export_csv(
+ exported_at=exported_at,
+ scope=scope,
+ since_minutes=since_minutes if scope == 'window' else None,
+ icao=icao,
+ search=search,
+ classification=classification,
+ messages=messages,
+ snapshots=snapshots,
+ sessions=sessions,
+ export_type=export_type,
+ )
+ response = Response(csv_data, mimetype='text/csv')
+ response.headers['Content-Disposition'] = f'attachment; filename={filename}'
+ return response
+
+
+@adsb_bp.route('/history/prune', methods=['POST'])
+def adsb_history_prune():
+ """Delete ADS-B history for a selected time range or entire dataset."""
+ if not ADSB_HISTORY_ENABLED or not PSYCOPG2_AVAILABLE:
+ return jsonify({'error': 'ADS-B history is disabled'}), 503
+ _ensure_history_schema()
+
+ payload = request.get_json(silent=True) or {}
+ mode = str(payload.get('mode') or 'range').strip().lower()
+ if mode not in {'range', 'all'}:
+ return jsonify({'error': 'mode must be range or all'}), 400
+
+ try:
+ with _get_history_connection() as conn:
+ with conn.cursor() as cur:
+ deleted = {'messages': 0, 'snapshots': 0}
+
+ if mode == 'all':
+ cur.execute("DELETE FROM adsb_messages")
+ deleted['messages'] = max(0, cur.rowcount or 0)
+ cur.execute("DELETE FROM adsb_snapshots")
+ deleted['snapshots'] = max(0, cur.rowcount or 0)
+ return jsonify({
+ 'status': 'ok',
+ 'mode': 'all',
+ 'deleted': deleted,
+ 'total_deleted': deleted['messages'] + deleted['snapshots'],
+ })
+
+ start = _parse_iso_datetime(payload.get('start'))
+ end = _parse_iso_datetime(payload.get('end'))
+ if start is None or end is None:
+ return jsonify({'error': 'start and end ISO datetime values are required'}), 400
+ if end <= start:
+ return jsonify({'error': 'end must be after start'}), 400
+ if end - start > timedelta(days=31):
+ return jsonify({'error': 'range cannot exceed 31 days'}), 400
+
+ cur.execute(
+ """
+ DELETE FROM adsb_messages
+ WHERE received_at >= %s
+ AND received_at < %s
+ """,
+ (start, end),
+ )
+ deleted['messages'] = max(0, cur.rowcount or 0)
+
+ cur.execute(
+ """
+ DELETE FROM adsb_snapshots
+ WHERE captured_at >= %s
+ AND captured_at < %s
+ """,
+ (start, end),
+ )
+ deleted['snapshots'] = max(0, cur.rowcount or 0)
+
+ return jsonify({
+ 'status': 'ok',
+ 'mode': 'range',
+ 'start': start.isoformat(),
+ 'end': end.isoformat(),
+ 'deleted': deleted,
+ 'total_deleted': deleted['messages'] + deleted['snapshots'],
+ })
+ except Exception as exc:
+ logger.warning("ADS-B history prune failed: %s", exc)
+ return jsonify({'error': 'History database unavailable'}), 503
+
+
# ============================================
# AIRCRAFT DATABASE MANAGEMENT
# ============================================
diff --git a/setup.sh b/setup.sh
index 7e2a590..7778156 100755
--- a/setup.sh
+++ b/setup.sh
@@ -957,7 +957,8 @@ install_satdump_from_source_debian() {
) &
progress_pid=$!
- if cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_GUI=OFF -DCMAKE_INSTALL_LIBDIR=lib .. >"$build_log" 2>&1 \
+ if cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_GUI=OFF -DCMAKE_INSTALL_LIBDIR=lib \
+ -DCMAKE_CXX_FLAGS="-Wno-template-body" .. >"$build_log" 2>&1 \
&& make -j "$(nproc)" >>"$build_log" 2>&1; then
kill $progress_pid 2>/dev/null; wait $progress_pid 2>/dev/null
$SUDO make install >/dev/null 2>&1
@@ -1089,8 +1090,9 @@ install_radiosonde_auto_rx() {
# --- dump1090 (Debian from source) ---
install_dump1090_from_source_debian() {
- info "dump1090 not available via APT. Building from source (required)..."
+ info "dump1090 not available via APT. Building from source (this may take a few minutes)..."
+ info "Installing build dependencies for dump1090..."
apt_install build-essential git pkg-config \
librtlsdr-dev libusb-1.0-0-dev \
libncurses-dev tcl-dev python3-dev
@@ -1127,6 +1129,7 @@ install_dump1090_from_source_debian() {
tail -20 "$build_log" | while IFS= read -r line; do warn " $line"; done
rm -rf "$tmp_dir/dump1090"
+ info "Cloning wiedehopf/readsb..."
git clone --depth 1 https://github.com/wiedehopf/readsb.git "$tmp_dir/dump1090" >/dev/null 2>&1 \
|| { fail "Failed to clone wiedehopf/readsb"; exit 1; }
@@ -1461,6 +1464,7 @@ install_tool_dump1090() {
$SUDO rm -f "$dump1090_path"
fi
if ! cmd_exists dump1090 && ! cmd_exists dump1090-mutability; then
+ info "Checking for dump1090 APT packages..."
apt_try_install_any dump1090-fa dump1090-mutability dump1090 || true
fi
if ! cmd_exists dump1090; then
@@ -1573,7 +1577,18 @@ install_tool_satdump() {
if [[ "$OS" == "macos" ]]; then
install_satdump_macos || warn "SatDump installation failed. Weather satellite decoding will not be available."
else
- install_satdump_from_source_debian || warn "SatDump build failed. Weather satellite decoding will not be available."
+ # Try system package first (available on Ubuntu 24.10+, Debian Trixie+)
+ if apt-cache show satdump >/dev/null 2>&1; then
+ info "SatDump is available as a system package — installing via apt..."
+ if apt_install satdump; then
+ ok "SatDump installed via apt."
+ else
+ warn "apt install failed — falling back to building from source..."
+ install_satdump_from_source_debian || warn "SatDump build failed. Weather satellite decoding will not be available."
+ fi
+ else
+ install_satdump_from_source_debian || warn "SatDump build failed. Weather satellite decoding will not be available."
+ fi
fi
else
warn "Skipping SatDump installation. You can install it later if needed."
diff --git a/static/css/adsb_history.css b/static/css/adsb_history.css
index 45b3512..653ddef 100644
--- a/static/css/adsb_history.css
+++ b/static/css/adsb_history.css
@@ -269,6 +269,21 @@ body {
min-width: 160px;
}
+.data-control-group {
+ min-width: 320px;
+}
+
+.data-actions {
+ display: flex;
+ align-items: center;
+ gap: 8px;
+ flex-wrap: wrap;
+}
+
+.data-actions input[type="date"] {
+ min-width: 150px;
+}
+
.primary-btn {
background: var(--accent-cyan);
border: none;
@@ -285,6 +300,31 @@ body {
box-shadow: 0 6px 14px rgba(74, 158, 255, 0.3);
}
+.primary-btn:disabled {
+ opacity: 0.55;
+ cursor: not-allowed;
+ transform: none;
+ box-shadow: none;
+}
+
+.warn-btn {
+ background: var(--accent-amber);
+ color: #0a0c10;
+}
+
+.warn-btn:hover {
+ box-shadow: 0 6px 14px rgba(214, 168, 94, 0.3);
+}
+
+.danger-btn {
+ background: #d84f63;
+ color: #f8fafc;
+}
+
+.danger-btn:hover {
+ box-shadow: 0 6px 14px rgba(216, 79, 99, 0.35);
+}
+
.status-pill {
font-family: var(--font-mono);
font-size: 11px;
@@ -296,6 +336,16 @@ body {
letter-spacing: 1px;
}
+.status-pill.ok {
+ border-color: var(--accent-green);
+ color: var(--accent-green);
+}
+
+.status-pill.error {
+ border-color: #d84f63;
+ color: #d84f63;
+}
+
.content-grid {
display: grid;
grid-template-columns: minmax(300px, 1fr) minmax(320px, 1fr);
@@ -364,6 +414,37 @@ body {
background: rgba(74, 158, 255, 0.1);
}
+.aircraft-row.military {
+ background: rgba(85, 107, 47, 0.12);
+}
+
+.aircraft-row.military:hover {
+ background: rgba(85, 107, 47, 0.22);
+}
+
+.mil-badge,
+.civ-badge {
+ display: inline-block;
+ font-size: 9px;
+ font-weight: 700;
+ letter-spacing: 0.8px;
+ padding: 2px 6px;
+ border-radius: 3px;
+ text-transform: uppercase;
+}
+
+.mil-badge {
+ background: rgba(85, 107, 47, 0.35);
+ color: #a3b86c;
+ border: 1px solid rgba(85, 107, 47, 0.6);
+}
+
+.civ-badge {
+ background: rgba(74, 158, 255, 0.15);
+ color: var(--text-dim);
+ border: 1px solid rgba(74, 158, 255, 0.25);
+}
+
.mono {
font-family: var(--font-mono);
}
@@ -614,6 +695,15 @@ body {
min-width: 100%;
}
+ .data-actions {
+ width: 100%;
+ }
+
+ .data-actions input[type="date"],
+ .data-actions .primary-btn {
+ width: 100%;
+ }
+
.panel {
min-height: 320px;
}
diff --git a/templates/adsb_history.html b/templates/adsb_history.html
index b7dee83..f88ea6b 100644
--- a/templates/adsb_history.html
+++ b/templates/adsb_history.html
@@ -87,9 +87,9 @@
@@ -97,6 +97,14 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
{% if history_enabled %}
@@ -128,6 +164,7 @@
| ICAO |
Callsign |
+ Class |
Alt |
Speed |
Last Seen |
@@ -135,7 +172,7 @@
- | No aircraft in this window |
+ No aircraft in this window |
@@ -285,6 +322,49 @@
const windowSelect = document.getElementById('windowSelect');
const searchInput = document.getElementById('searchInput');
const limitSelect = document.getElementById('limitSelect');
+ const classSelect = document.getElementById('classSelect');
+
+ const MILITARY_RANGES = [
+ { start: 0xADF7C0, end: 0xADFFFF },
+ { start: 0xAE0000, end: 0xAEFFFF },
+ { start: 0x3F4000, end: 0x3F7FFF },
+ { start: 0x43C000, end: 0x43CFFF },
+ { start: 0x3D0000, end: 0x3DFFFF },
+ { start: 0x501C00, end: 0x501FFF },
+ ];
+
+ const MILITARY_PREFIXES = [
+ 'REACH', 'JAKE', 'DOOM', 'IRON', 'HAWK', 'VIPER', 'COBRA', 'THUNDER',
+ 'SHADOW', 'NIGHT', 'STEEL', 'GRIM', 'REAPER', 'BLADE', 'STRIKE',
+ 'RCH', 'CNV', 'MCH', 'EVAC', 'TOPCAT', 'ASCOT', 'RRR', 'HRK',
+ 'NAVY', 'ARMY', 'USAF', 'RAF', 'RCAF', 'RAAF', 'IAF', 'PAF'
+ ];
+
+ function isMilitaryAircraft(icao, callsign) {
+ if (icao) {
+ const hex = parseInt(icao, 16);
+ for (const range of MILITARY_RANGES) {
+ if (hex >= range.start && hex <= range.end) return true;
+ }
+ }
+ if (callsign) {
+ const upper = callsign.toUpperCase().trim();
+ for (const prefix of MILITARY_PREFIXES) {
+ if (upper.startsWith(prefix)) return true;
+ }
+ }
+ return false;
+ }
+ const HISTORY_WINDOW_STORAGE_KEY = 'adsbHistoryWindowMinutes';
+ const VALID_HISTORY_WINDOWS = new Set(['15', '60', '360', '1440', '10080']);
+ const historyStatus = document.getElementById('historyStatus');
+ const pruneDateInput = document.getElementById('pruneDateInput');
+ const pruneDayBtn = document.getElementById('pruneDayBtn');
+ const clearHistoryBtn = document.getElementById('clearHistoryBtn');
+ const exportTypeSelect = document.getElementById('exportTypeSelect');
+ const exportFormatSelect = document.getElementById('exportFormatSelect');
+ const exportScopeSelect = document.getElementById('exportScopeSelect');
+ const exportBtn = document.getElementById('exportBtn');
let selectedIcao = '';
let sessionStartAt = null;
@@ -359,6 +439,197 @@
return `${hrs.toString().padStart(2, '0')}:${mins.toString().padStart(2, '0')}:${secs.toString().padStart(2, '0')}`;
}
+ function toLocalDateInputValue(date) {
+ const localDate = new Date(date.getTime() - (date.getTimezoneOffset() * 60000));
+ return localDate.toISOString().slice(0, 10);
+ }
+
+ function setDefaultPruneDate() {
+ if (!pruneDateInput) {
+ return;
+ }
+ const yesterday = new Date();
+ yesterday.setDate(yesterday.getDate() - 1);
+ pruneDateInput.value = toLocalDateInputValue(yesterday);
+ }
+
+ function setHistoryStatus(text, state = 'neutral') {
+ if (!historyStatus) {
+ return;
+ }
+ historyStatus.textContent = text;
+ historyStatus.classList.remove('ok', 'error');
+ if (state === 'ok') {
+ historyStatus.classList.add('ok');
+ } else if (state === 'error') {
+ historyStatus.classList.add('error');
+ }
+ }
+
+ function setHistoryActionsDisabled(disabled) {
+ if (pruneDateInput) {
+ pruneDateInput.disabled = disabled;
+ }
+ if (pruneDayBtn) {
+ pruneDayBtn.disabled = disabled;
+ }
+ if (clearHistoryBtn) {
+ clearHistoryBtn.disabled = disabled;
+ }
+ if (exportTypeSelect) {
+ exportTypeSelect.disabled = disabled;
+ }
+ if (exportFormatSelect) {
+ exportFormatSelect.disabled = disabled;
+ }
+ if (exportScopeSelect) {
+ exportScopeSelect.disabled = disabled;
+ }
+ if (exportBtn) {
+ exportBtn.disabled = disabled;
+ }
+ }
+
+ function getDownloadFilename(response, fallback) {
+ const disposition = response.headers.get('Content-Disposition') || '';
+ const utfMatch = disposition.match(/filename\*=UTF-8''([^;]+)/i);
+ if (utfMatch && utfMatch[1]) {
+ try {
+ return decodeURIComponent(utfMatch[1]);
+ } catch {
+ return utfMatch[1];
+ }
+ }
+ const plainMatch = disposition.match(/filename=\"?([^\";]+)\"?/i);
+ if (plainMatch && plainMatch[1]) {
+ return plainMatch[1];
+ }
+ return fallback;
+ }
+
+ async function exportHistoryData() {
+ if (!historyEnabled) {
+ return;
+ }
+ const exportType = exportTypeSelect.value;
+ const exportFormat = exportFormatSelect.value;
+ const exportScope = exportScopeSelect.value;
+ const sinceMinutes = windowSelect.value;
+ const search = searchInput.value.trim();
+
+ const classification = classSelect.value;
+ const params = new URLSearchParams({
+ format: exportFormat,
+ type: exportType,
+ scope: exportScope,
+ });
+ if (exportScope === 'window') {
+ params.set('since_minutes', sinceMinutes);
+ }
+ if (search) {
+ params.set('search', search);
+ }
+ if (classification !== 'all') {
+ params.set('classification', classification);
+ }
+
+ const fallbackName = `adsb_history_${exportType}.${exportFormat}`;
+ const exportUrl = `/adsb/history/export?${params.toString()}`;
+
+ setHistoryActionsDisabled(true);
+ try {
+ const resp = await fetch(exportUrl, { credentials: 'same-origin' });
+ if (!resp.ok) {
+ const err = await resp.json().catch(() => ({}));
+ throw new Error(err.error || 'Export failed');
+ }
+ const blob = await resp.blob();
+ const filename = getDownloadFilename(resp, fallbackName);
+ const url = URL.createObjectURL(blob);
+ const a = document.createElement('a');
+ a.href = url;
+ a.download = filename;
+ document.body.appendChild(a);
+ a.click();
+ a.remove();
+ URL.revokeObjectURL(url);
+ setHistoryStatus(`Export ready: ${filename}`, 'ok');
+ } catch (error) {
+ setHistoryStatus(`Export failed: ${error.message || 'unknown error'}`, 'error');
+ } finally {
+ setHistoryActionsDisabled(false);
+ }
+ }
+
+ async function pruneHistory(payload, successPrefix) {
+ if (!historyEnabled) {
+ return;
+ }
+ setHistoryActionsDisabled(true);
+ try {
+ const resp = await fetch('/adsb/history/prune', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ credentials: 'same-origin',
+ body: JSON.stringify(payload)
+ });
+ const data = await resp.json().catch(() => ({}));
+ if (!resp.ok) {
+ throw new Error(data.error || 'Failed to prune history');
+ }
+ const deleted = data.deleted || {};
+ const messagesDeleted = Number(deleted.messages || 0);
+ const snapshotsDeleted = Number(deleted.snapshots || 0);
+ const totalDeleted = Number(data.total_deleted || (messagesDeleted + snapshotsDeleted));
+ setHistoryStatus(
+ `${successPrefix}: ${formatNumber(totalDeleted)} records removed`,
+ 'ok'
+ );
+ await refreshAll();
+ if (selectedIcao && !recentAircraft.some(row => row.icao === selectedIcao)) {
+ await selectAircraft('');
+ }
+ } catch (error) {
+ setHistoryStatus(`Cleanup failed: ${error.message || 'unknown error'}`, 'error');
+ } finally {
+ setHistoryActionsDisabled(false);
+ }
+ }
+
+ async function removeSelectedDay() {
+ if (!pruneDateInput || !pruneDateInput.value) {
+ setHistoryStatus('Select a day first', 'error');
+ return;
+ }
+ const dayStartLocal = new Date(`${pruneDateInput.value}T00:00:00`);
+ if (Number.isNaN(dayStartLocal.getTime())) {
+ setHistoryStatus('Invalid day selected', 'error');
+ return;
+ }
+ const dayEndLocal = new Date(dayStartLocal.getTime() + (24 * 60 * 60 * 1000));
+ const dayLabel = dayStartLocal.toLocaleDateString();
+ const confirmed = window.confirm(`Delete ADS-B history for ${dayLabel}? This cannot be undone.`);
+ if (!confirmed) {
+ return;
+ }
+ await pruneHistory(
+ {
+ mode: 'range',
+ start: dayStartLocal.toISOString(),
+ end: dayEndLocal.toISOString(),
+ },
+ `Removed ${dayLabel}`
+ );
+ }
+
+ async function clearAllHistory() {
+ const confirmed = window.confirm('Delete ALL ADS-B history records? This cannot be undone.');
+ if (!confirmed) {
+ return;
+ }
+ await pruneHistory({ mode: 'all' }, 'All history cleared');
+ }
+
async function loadSummary() {
const sinceMinutes = windowSelect.value;
const resp = await fetch(`/adsb/history/summary?since_minutes=${sinceMinutes}`);
@@ -379,26 +650,45 @@
const search = encodeURIComponent(searchInput.value.trim());
const resp = await fetch(`/adsb/history/aircraft?since_minutes=${sinceMinutes}&limit=${limit}&search=${search}`);
if (!resp.ok) {
- aircraftTableBody.innerHTML = '| History database unavailable |
';
+ aircraftTableBody.innerHTML = '| History database unavailable |
';
return;
}
const data = await resp.json();
- const rows = data.aircraft || [];
+ let rows = data.aircraft || [];
+
+ // Tag each row with military classification
+ rows.forEach(row => {
+ row._military = isMilitaryAircraft(row.icao, row.callsign);
+ });
+
+ // Apply classification filter
+ const classFilter = classSelect.value;
+ if (classFilter === 'military') {
+ rows = rows.filter(r => r._military);
+ } else if (classFilter === 'civilian') {
+ rows = rows.filter(r => !r._military);
+ }
+
recentAircraft = rows;
aircraftCount.textContent = rows.length;
if (!rows.length) {
- aircraftTableBody.innerHTML = '| No aircraft in this window |
';
+ aircraftTableBody.innerHTML = '| No aircraft in this window |
';
return;
}
- aircraftTableBody.innerHTML = rows.map(row => `
-
+ aircraftTableBody.innerHTML = rows.map(row => {
+ const badge = row._military
+ ? 'MIL'
+ : 'CIV';
+ return `
+
| ${row.icao} |
${valueOrDash(row.callsign)} |
+ ${badge} |
${valueOrDash(row.altitude)} |
${valueOrDash(row.speed)} |
${formatTime(row.last_seen)} |
-
- `).join('');
+ `;
+ }).join('');
document.querySelectorAll('.aircraft-row').forEach((row, index) => {
row.addEventListener('click', () => {
@@ -747,12 +1037,31 @@
}
refreshBtn.addEventListener('click', refreshAll);
- windowSelect.addEventListener('change', refreshAll);
+ windowSelect.addEventListener('change', () => {
+ const selectedWindow = windowSelect.value;
+ if (VALID_HISTORY_WINDOWS.has(selectedWindow)) {
+ localStorage.setItem(HISTORY_WINDOW_STORAGE_KEY, selectedWindow);
+ }
+ refreshAll();
+ });
limitSelect.addEventListener('change', refreshAll);
+ classSelect.addEventListener('change', refreshAll);
searchInput.addEventListener('input', () => {
clearTimeout(searchInput._debounce);
searchInput._debounce = setTimeout(refreshAll, 350);
});
+ pruneDayBtn.addEventListener('click', removeSelectedDay);
+ clearHistoryBtn.addEventListener('click', clearAllHistory);
+ exportBtn.addEventListener('click', exportHistoryData);
+
+ const savedWindow = localStorage.getItem(HISTORY_WINDOW_STORAGE_KEY);
+ if (savedWindow && VALID_HISTORY_WINDOWS.has(savedWindow)) {
+ windowSelect.value = savedWindow;
+ }
+ setDefaultPruneDate();
+ if (!historyEnabled) {
+ setHistoryActionsDisabled(true);
+ }
refreshAll();
loadSessionDevices();
diff --git a/tests/test_sdr_detection.py b/tests/test_sdr_detection.py
index 8c00e24..3a3f749 100644
--- a/tests/test_sdr_detection.py
+++ b/tests/test_sdr_detection.py
@@ -17,9 +17,9 @@ def _clear_detection_caches():
yield
-@patch('utils.sdr.detection._check_tool', return_value=True)
+@patch('utils.sdr.detection.get_tool_path', return_value='/usr/bin/rtl_test')
@patch('utils.sdr.detection.subprocess.run')
-def test_detect_rtlsdr_devices_filters_empty_serial_entries(mock_run, _mock_check_tool):
+def test_detect_rtlsdr_devices_filters_empty_serial_entries(mock_run, _mock_tool_path):
"""Ignore malformed rtl_test rows that have an empty SN field."""
mock_result = MagicMock()
mock_result.stdout = ""
@@ -40,9 +40,9 @@ def test_detect_rtlsdr_devices_filters_empty_serial_entries(mock_run, _mock_chec
assert devices[0].serial == "1"
-@patch('utils.sdr.detection._check_tool', return_value=True)
+@patch('utils.sdr.detection.get_tool_path', return_value='/usr/bin/rtl_test')
@patch('utils.sdr.detection.subprocess.run')
-def test_detect_rtlsdr_devices_uses_replace_decode_mode(mock_run, _mock_check_tool):
+def test_detect_rtlsdr_devices_uses_replace_decode_mode(mock_run, _mock_tool_path):
"""Run rtl_test with tolerant decoding for malformed output bytes."""
mock_result = MagicMock()
mock_result.stdout = ""
@@ -74,9 +74,9 @@ HACKRF_INFO_OUTPUT = (
)
-@patch('utils.sdr.detection._check_tool', return_value=True)
+@patch('utils.sdr.detection.get_tool_path', return_value='/usr/bin/hackrf_info')
@patch('utils.sdr.detection.subprocess.run')
-def test_detect_hackrf_from_stdout(mock_run, _mock_check_tool):
+def test_detect_hackrf_from_stdout(mock_run, _mock_tool_path):
"""Parse HackRF device info from stdout."""
mock_result = MagicMock()
mock_result.stdout = HACKRF_INFO_OUTPUT
@@ -92,9 +92,9 @@ def test_detect_hackrf_from_stdout(mock_run, _mock_check_tool):
assert devices[0].index == 0
-@patch('utils.sdr.detection._check_tool', return_value=True)
+@patch('utils.sdr.detection.get_tool_path', return_value='/usr/bin/hackrf_info')
@patch('utils.sdr.detection.subprocess.run')
-def test_detect_hackrf_from_stderr(mock_run, _mock_check_tool):
+def test_detect_hackrf_from_stderr(mock_run, _mock_tool_path):
"""Parse HackRF device info when output goes to stderr (newer firmware)."""
mock_result = MagicMock()
mock_result.stdout = ""
@@ -109,9 +109,9 @@ def test_detect_hackrf_from_stderr(mock_run, _mock_check_tool):
assert devices[0].serial == "0000000000000000a06063c8234e925f"
-@patch('utils.sdr.detection._check_tool', return_value=True)
+@patch('utils.sdr.detection.get_tool_path', return_value='/usr/bin/hackrf_info')
@patch('utils.sdr.detection.subprocess.run')
-def test_detect_hackrf_nonzero_exit_with_valid_output(mock_run, _mock_check_tool):
+def test_detect_hackrf_nonzero_exit_with_valid_output(mock_run, _mock_tool_path):
"""Parse HackRF info even when hackrf_info exits non-zero (device busy)."""
mock_result = MagicMock()
mock_result.returncode = 1
@@ -125,9 +125,9 @@ def test_detect_hackrf_nonzero_exit_with_valid_output(mock_run, _mock_check_tool
assert devices[0].name == "HackRF One"
-@patch('utils.sdr.detection._check_tool', return_value=True)
+@patch('utils.sdr.detection.get_tool_path', return_value='/usr/bin/hackrf_info')
@patch('utils.sdr.detection.subprocess.run')
-def test_detect_hackrf_fallback_no_serial(mock_run, _mock_check_tool):
+def test_detect_hackrf_fallback_no_serial(mock_run, _mock_tool_path):
"""Fallback detection when serial is missing but 'Found HackRF' present."""
mock_result = MagicMock()
mock_result.stdout = "Found HackRF\nBoard ID Number: 2 (HackRF One)\n"
@@ -139,3 +139,24 @@ def test_detect_hackrf_fallback_no_serial(mock_run, _mock_check_tool):
assert len(devices) == 1
assert devices[0].name == "HackRF One"
assert devices[0].serial == "Unknown"
+
+
+@patch('utils.sdr.detection.get_tool_path', return_value='/usr/bin/hackrf_info')
+@patch('utils.sdr.detection.subprocess.run')
+def test_detect_hackrf_parses_legacy_serial_format(mock_run, _mock_tool_path):
+ """Accept legacy 'Serial Number' casing and spaced hex format."""
+ mock_result = MagicMock()
+ mock_result.stdout = (
+ "Found HackRF\n"
+ "Index: 0\n"
+ "Serial Number: 0x00000000 00000000 a06063c8 234e925f\n"
+ "Board ID Number: 3 (HackRF Pro)\n"
+ )
+ mock_result.stderr = ""
+ mock_run.return_value = mock_result
+
+ devices = detect_hackrf_devices()
+
+ assert len(devices) == 1
+ assert devices[0].name == "HackRF Pro"
+ assert devices[0].serial == "0000000000000000a06063c8234e925f"
diff --git a/tests/test_subghz.py b/tests/test_subghz.py
index 5de373f..2f52870 100644
--- a/tests/test_subghz.py
+++ b/tests/test_subghz.py
@@ -43,15 +43,16 @@ class TestSubGhzManagerInit:
assert status['mode'] == 'idle'
-class TestToolDetection:
+class TestToolDetection:
def test_check_hackrf_found(self, manager):
with patch('shutil.which', return_value='/usr/bin/hackrf_transfer'):
assert manager.check_hackrf() is True
- def test_check_hackrf_not_found(self, manager):
- with patch('shutil.which', return_value=None):
- manager._hackrf_available = None # reset cache
- assert manager.check_hackrf() is False
+ def test_check_hackrf_not_found(self, manager):
+ with patch('shutil.which', return_value=None), \
+ patch('utils.subghz.get_tool_path', return_value=None):
+ manager._hackrf_available = None # reset cache
+ assert manager.check_hackrf() is False
def test_check_rtl433_found(self, manager):
with patch('shutil.which', return_value='/usr/bin/rtl_433'):
@@ -62,13 +63,14 @@ class TestToolDetection:
assert manager.check_sweep() is True
-class TestReceive:
- def test_start_receive_no_hackrf(self, manager):
- with patch('shutil.which', return_value=None):
- manager._hackrf_available = None
- result = manager.start_receive(frequency_hz=433920000)
- assert result['status'] == 'error'
- assert 'not found' in result['message']
+class TestReceive:
+ def test_start_receive_no_hackrf(self, manager):
+ with patch('shutil.which', return_value=None), \
+ patch('utils.subghz.get_tool_path', return_value=None):
+ manager._hackrf_available = None
+ result = manager.start_receive(frequency_hz=433920000)
+ assert result['status'] == 'error'
+ assert 'not found' in result['message']
def test_start_receive_success(self, manager):
mock_proc = MagicMock()
@@ -164,11 +166,12 @@ class TestTxSafety:
result = SubGhzManager.validate_tx_frequency(500000000) # 500 MHz
assert result is not None
- def test_transmit_no_hackrf(self, manager):
- with patch('shutil.which', return_value=None):
- manager._hackrf_available = None
- result = manager.transmit(capture_id='abc123')
- assert result['status'] == 'error'
+ def test_transmit_no_hackrf(self, manager):
+ with patch('shutil.which', return_value=None), \
+ patch('utils.subghz.get_tool_path', return_value=None):
+ manager._hackrf_available = None
+ result = manager.transmit(capture_id='abc123')
+ assert result['status'] == 'error'
def test_transmit_capture_not_found(self, manager):
with patch('shutil.which', return_value='/usr/bin/hackrf_transfer'), \
@@ -464,12 +467,13 @@ class TestCaptureLibrary:
assert all(c.fingerprint_group_size == 2 for c in captures)
-class TestSweep:
- def test_start_sweep_no_tool(self, manager):
- with patch('shutil.which', return_value=None):
- manager._sweep_available = None
- result = manager.start_sweep()
- assert result['status'] == 'error'
+class TestSweep:
+ def test_start_sweep_no_tool(self, manager):
+ with patch('shutil.which', return_value=None), \
+ patch('utils.subghz.get_tool_path', return_value=None):
+ manager._sweep_available = None
+ result = manager.start_sweep()
+ assert result['status'] == 'error'
def test_start_sweep_success(self, manager):
import time as _time
@@ -494,14 +498,15 @@ class TestSweep:
assert result['status'] == 'not_running'
-class TestDecode:
- def test_start_decode_no_hackrf(self, manager):
- with patch('shutil.which', return_value=None):
- manager._hackrf_available = None
- manager._rtl433_available = None
- result = manager.start_decode(frequency_hz=433920000)
- assert result['status'] == 'error'
- assert 'hackrf_transfer' in result['message']
+class TestDecode:
+ def test_start_decode_no_hackrf(self, manager):
+ with patch('shutil.which', return_value=None), \
+ patch('utils.subghz.get_tool_path', return_value=None):
+ manager._hackrf_available = None
+ manager._rtl433_available = None
+ result = manager.start_decode(frequency_hz=433920000)
+ assert result['status'] == 'error'
+ assert 'hackrf_transfer' in result['message']
def test_start_decode_no_rtl433(self, manager):
def which_side_effect(name):
@@ -509,12 +514,13 @@ class TestDecode:
return '/usr/bin/hackrf_transfer'
return None
- with patch('shutil.which', side_effect=which_side_effect):
- manager._hackrf_available = None
- manager._rtl433_available = None
- result = manager.start_decode(frequency_hz=433920000)
- assert result['status'] == 'error'
- assert 'rtl_433' in result['message']
+ with patch('shutil.which', side_effect=which_side_effect), \
+ patch('utils.subghz.get_tool_path', return_value=None):
+ manager._hackrf_available = None
+ manager._rtl433_available = None
+ result = manager.start_decode(frequency_hz=433920000)
+ assert result['status'] == 'error'
+ assert 'rtl_433' in result['message']
def test_start_decode_success(self, manager):
mock_hackrf_proc = MagicMock()
@@ -537,9 +543,12 @@ class TestDecode:
return mock_hackrf_proc
return mock_rtl433_proc
- with patch('shutil.which', return_value='/usr/bin/tool'), \
- patch('subprocess.Popen', side_effect=popen_side_effect) as mock_popen, \
- patch('utils.subghz.register_process'):
+ def which_side_effect(name):
+ return f'/usr/bin/{name}'
+
+ with patch('shutil.which', side_effect=which_side_effect), \
+ patch('subprocess.Popen', side_effect=popen_side_effect) as mock_popen, \
+ patch('utils.subghz.register_process'):
import time as _time
manager._hackrf_available = None
manager._rtl433_available = None
@@ -556,16 +565,16 @@ class TestDecode:
# Two processes: hackrf_transfer + rtl_433
assert mock_popen.call_count == 2
- # Verify hackrf_transfer command
- hackrf_cmd = mock_popen.call_args_list[0][0][0]
- assert hackrf_cmd[0] == 'hackrf_transfer'
- assert '-r' in hackrf_cmd
-
- # Verify rtl_433 command
- rtl433_cmd = mock_popen.call_args_list[1][0][0]
- assert rtl433_cmd[0] == 'rtl_433'
- assert '-r' in rtl433_cmd
- assert 'cs8:-' in rtl433_cmd
+ # Verify hackrf_transfer command
+ hackrf_cmd = mock_popen.call_args_list[0][0][0]
+ assert os.path.basename(hackrf_cmd[0]) == 'hackrf_transfer'
+ assert '-r' in hackrf_cmd
+
+ # Verify rtl_433 command
+ rtl433_cmd = mock_popen.call_args_list[1][0][0]
+ assert os.path.basename(rtl433_cmd[0]) == 'rtl_433'
+ assert '-r' in rtl433_cmd
+ assert 'cs8:-' in rtl433_cmd
# Both processes tracked
assert manager._decode_hackrf_process is mock_hackrf_proc
diff --git a/utils/sdr/detection.py b/utils/sdr/detection.py
index 5f08725..c1fc0de 100644
--- a/utils/sdr/detection.py
+++ b/utils/sdr/detection.py
@@ -6,14 +6,15 @@ Detects RTL-SDR devices via rtl_test and other SDR hardware via SoapySDR.
from __future__ import annotations
-import logging
-import re
-import shutil
-import subprocess
-import time
-from typing import Optional
-
-from .base import SDRCapabilities, SDRDevice, SDRType
+import logging
+import re
+import subprocess
+import time
+from typing import Optional
+
+from utils.dependencies import get_tool_path
+
+from .base import SDRCapabilities, SDRDevice, SDRType
logger = logging.getLogger(__name__)
@@ -43,12 +44,7 @@ def _hackrf_probe_blocked() -> bool:
return False
-def _check_tool(name: str) -> bool:
- """Check if a tool is available in PATH."""
- return shutil.which(name) is not None
-
-
-def _get_capabilities_for_type(sdr_type: SDRType) -> SDRCapabilities:
+def _get_capabilities_for_type(sdr_type: SDRType) -> SDRCapabilities:
"""Get default capabilities for an SDR type."""
# Import here to avoid circular imports
from .rtlsdr import RTLSDRCommandBuilder
@@ -100,7 +96,7 @@ def _driver_to_sdr_type(driver: str) -> Optional[SDRType]:
return mapping.get(driver.lower())
-def detect_rtlsdr_devices() -> list[SDRDevice]:
+def detect_rtlsdr_devices() -> list[SDRDevice]:
"""
Detect RTL-SDR devices using rtl_test.
@@ -109,9 +105,10 @@ def detect_rtlsdr_devices() -> list[SDRDevice]:
"""
devices: list[SDRDevice] = []
- if not _check_tool('rtl_test'):
- logger.debug("rtl_test not found, skipping RTL-SDR detection")
- return devices
+ rtl_test_path = get_tool_path('rtl_test')
+ if not rtl_test_path:
+ logger.debug("rtl_test not found, skipping RTL-SDR detection")
+ return devices
try:
import os
@@ -122,11 +119,11 @@ def detect_rtlsdr_devices() -> list[SDRDevice]:
lib_paths = ['/usr/local/lib', '/opt/homebrew/lib']
current_ld = env.get('DYLD_LIBRARY_PATH', '')
env['DYLD_LIBRARY_PATH'] = ':'.join(lib_paths + [current_ld] if current_ld else lib_paths)
- result = subprocess.run(
- ['rtl_test', '-t'],
- capture_output=True,
- text=True,
- encoding='utf-8',
+ result = subprocess.run(
+ [rtl_test_path, '-t'],
+ capture_output=True,
+ text=True,
+ encoding='utf-8',
errors='replace',
timeout=5,
env=env
@@ -176,13 +173,14 @@ def detect_rtlsdr_devices() -> list[SDRDevice]:
return devices
-def _find_soapy_util() -> str | None:
- """Find SoapySDR utility command (name varies by distribution)."""
- # Try different command names used across distributions
- for cmd in ['SoapySDRUtil', 'soapy_sdr_util', 'soapysdr-util']:
- if _check_tool(cmd):
- return cmd
- return None
+def _find_soapy_util() -> str | None:
+ """Find SoapySDR utility command (name varies by distribution)."""
+ # Try different command names used across distributions
+ for cmd in ['SoapySDRUtil', 'soapy_sdr_util', 'soapysdr-util']:
+ tool_path = get_tool_path(cmd)
+ if tool_path:
+ return tool_path
+ return None
def _get_soapy_env() -> dict:
@@ -324,7 +322,7 @@ def _add_soapy_device(
))
-def detect_hackrf_devices() -> list[SDRDevice]:
+def detect_hackrf_devices() -> list[SDRDevice]:
"""
Detect HackRF devices using native hackrf_info tool.
@@ -343,33 +341,46 @@ def detect_hackrf_devices() -> list[SDRDevice]:
devices: list[SDRDevice] = []
- if not _check_tool('hackrf_info'):
- _hackrf_cache = devices
- _hackrf_cache_ts = now
- return devices
+ hackrf_info_path = get_tool_path('hackrf_info')
+ if not hackrf_info_path:
+ _hackrf_cache = devices
+ _hackrf_cache_ts = now
+ return devices
try:
- result = subprocess.run(
- ['hackrf_info'],
- capture_output=True,
- text=True,
- timeout=5
- )
+ result = subprocess.run(
+ [hackrf_info_path],
+ capture_output=True,
+ text=True,
+ timeout=5
+ )
# Combine stdout + stderr: newer firmware may print to stderr,
# and hackrf_info may exit non-zero when device is briefly busy
# but still output valid info.
- output = result.stdout + result.stderr
-
- # Parse hackrf_info output
- # Extract board name from "Board ID Number: X (Name)" and serial
- from .hackrf import HackRFCommandBuilder
-
- serial_pattern = r'Serial number:\s*(\S+)'
- board_pattern = r'Board ID Number:\s*\d+\s*\(([^)]+)\)'
-
- serials_found = re.findall(serial_pattern, output)
- boards_found = re.findall(board_pattern, output)
+ output = f"{result.stdout or ''}\n{result.stderr or ''}"
+
+ # Parse hackrf_info output
+ # Extract board name from "Board ID Number: X (Name)" and serial
+ from .hackrf import HackRFCommandBuilder
+
+ serial_pattern = re.compile(
+ r'^\s*Serial\s+number:\s*(.+)$',
+ re.IGNORECASE | re.MULTILINE,
+ )
+ board_pattern = re.compile(
+ r'Board\s+ID\s+Number:\s*\d+\s*\(([^)]+)\)',
+ re.IGNORECASE,
+ )
+
+ serials_found = []
+ for raw in serial_pattern.findall(output):
+ # Normalise legacy formats like "0x1234 5678" to plain hex.
+ serial = re.sub(r'0x', '', raw, flags=re.IGNORECASE)
+ serial = re.sub(r'[^0-9A-Fa-f]', '', serial)
+ if serial:
+ serials_found.append(serial)
+ boards_found = board_pattern.findall(output)
for i, serial in enumerate(serials_found):
board_name = boards_found[i] if i < len(boards_found) else 'HackRF'
@@ -383,11 +394,11 @@ def detect_hackrf_devices() -> list[SDRDevice]:
))
# Fallback: check if any HackRF found without serial
- if not devices and 'Found HackRF' in output:
- board_match = re.search(board_pattern, output)
- board_name = board_match.group(1) if board_match else 'HackRF'
- devices.append(SDRDevice(
- sdr_type=SDRType.HACKRF,
+ if not devices and re.search(r'Found\s+HackRF', output, re.IGNORECASE):
+ board_match = board_pattern.search(output)
+ board_name = board_match.group(1) if board_match else 'HackRF'
+ devices.append(SDRDevice(
+ sdr_type=SDRType.HACKRF,
index=0,
name=board_name,
serial='Unknown',
@@ -403,7 +414,7 @@ def detect_hackrf_devices() -> list[SDRDevice]:
return devices
-def probe_rtlsdr_device(device_index: int) -> str | None:
+def probe_rtlsdr_device(device_index: int) -> str | None:
"""Probe whether an RTL-SDR device is available at the USB level.
Runs a quick ``rtl_test`` invocation targeting a single device to
@@ -417,10 +428,11 @@ def probe_rtlsdr_device(device_index: int) -> str | None:
An error message string if the device cannot be opened,
or ``None`` if the device is available.
"""
- if not _check_tool('rtl_test'):
- # Can't probe without rtl_test — let the caller proceed and
- # surface errors from the actual decoder process instead.
- return None
+ rtl_test_path = get_tool_path('rtl_test')
+ if not rtl_test_path:
+ # Can't probe without rtl_test — let the caller proceed and
+ # surface errors from the actual decoder process instead.
+ return None
try:
import os
@@ -437,11 +449,11 @@ def probe_rtlsdr_device(device_index: int) -> str | None:
# Use Popen with early termination instead of run() with full timeout.
# rtl_test prints device info to stderr quickly, then keeps running
# its test loop. We kill it as soon as we see success or failure.
- proc = subprocess.Popen(
- ['rtl_test', '-d', str(device_index), '-t'],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- text=True,
+ proc = subprocess.Popen(
+ [rtl_test_path, '-d', str(device_index), '-t'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ text=True,
env=env,
)
diff --git a/utils/subghz.py b/utils/subghz.py
index 99686d9..422cc95 100644
--- a/utils/subghz.py
+++ b/utils/subghz.py
@@ -21,11 +21,12 @@ from datetime import datetime, timezone
from pathlib import Path
from typing import BinaryIO, Callable
-import numpy as np
-
-from utils.logging import get_logger
-from utils.process import register_process, safe_terminate, unregister_process
-from utils.constants import (
+import numpy as np
+
+from utils.dependencies import get_tool_path
+from utils.logging import get_logger
+from utils.process import register_process, safe_terminate, unregister_process
+from utils.constants import (
SUBGHZ_TX_ALLOWED_BANDS,
SUBGHZ_FREQ_MIN_MHZ,
SUBGHZ_FREQ_MAX_MHZ,
@@ -187,19 +188,23 @@ class SubGhzManager:
except Exception as e:
logger.error(f"Error in SubGHz callback: {e}")
- # ------------------------------------------------------------------
- # Tool detection
- # ------------------------------------------------------------------
-
- def check_hackrf(self) -> bool:
- if self._hackrf_available is None:
- self._hackrf_available = shutil.which('hackrf_transfer') is not None
- return self._hackrf_available
-
- def check_hackrf_info(self) -> bool:
- if self._hackrf_info_available is None:
- self._hackrf_info_available = shutil.which('hackrf_info') is not None
- return self._hackrf_info_available
+ # ------------------------------------------------------------------
+ # Tool detection
+ # ------------------------------------------------------------------
+
+ def _resolve_tool(self, name: str) -> str | None:
+ """Resolve executable path via PATH first, then platform-aware fallbacks."""
+ return shutil.which(name) or get_tool_path(name)
+
+ def check_hackrf(self) -> bool:
+ if self._hackrf_available is None:
+ self._hackrf_available = self._resolve_tool('hackrf_transfer') is not None
+ return self._hackrf_available
+
+ def check_hackrf_info(self) -> bool:
+ if self._hackrf_info_available is None:
+ self._hackrf_info_available = self._resolve_tool('hackrf_info') is not None
+ return self._hackrf_info_available
def check_hackrf_device(self) -> bool | None:
"""Return True if a HackRF device is detected, False if not, or None if detection unavailable."""
@@ -228,15 +233,15 @@ class SubGhzManager:
return 'HackRF device not detected'
return None
- def check_rtl433(self) -> bool:
- if self._rtl433_available is None:
- self._rtl433_available = shutil.which('rtl_433') is not None
- return self._rtl433_available
-
- def check_sweep(self) -> bool:
- if self._sweep_available is None:
- self._sweep_available = shutil.which('hackrf_sweep') is not None
- return self._sweep_available
+ def check_rtl433(self) -> bool:
+ if self._rtl433_available is None:
+ self._rtl433_available = self._resolve_tool('rtl_433') is not None
+ return self._rtl433_available
+
+ def check_sweep(self) -> bool:
+ if self._sweep_available is None:
+ self._sweep_available = self._resolve_tool('hackrf_sweep') is not None
+ return self._sweep_available
# ------------------------------------------------------------------
# Status
@@ -307,23 +312,24 @@ class SubGhzManager:
# RECEIVE (IQ capture via hackrf_transfer -r)
# ------------------------------------------------------------------
- def start_receive(
- self,
- frequency_hz: int,
- sample_rate: int = 2000000,
+ def start_receive(
+ self,
+ frequency_hz: int,
+ sample_rate: int = 2000000,
lna_gain: int = 32,
vga_gain: int = 20,
trigger_enabled: bool = False,
trigger_pre_ms: int = 350,
- trigger_post_ms: int = 700,
- device_serial: str | None = None,
- ) -> dict:
- # Pre-lock: tool availability & device detection (blocking I/O)
- if not self.check_hackrf():
- return {'status': 'error', 'message': 'hackrf_transfer not found'}
- device_err = self._require_hackrf_device()
- if device_err:
- return {'status': 'error', 'message': device_err}
+ trigger_post_ms: int = 700,
+ device_serial: str | None = None,
+ ) -> dict:
+ # Pre-lock: tool availability & device detection (blocking I/O)
+ hackrf_transfer_path = self._resolve_tool('hackrf_transfer')
+ if not hackrf_transfer_path:
+ return {'status': 'error', 'message': 'hackrf_transfer not found'}
+ device_err = self._require_hackrf_device()
+ if device_err:
+ return {'status': 'error', 'message': device_err}
with self._lock:
if self.active_mode != 'idle':
@@ -339,11 +345,11 @@ class SubGhzManager:
basename = f"{freq_mhz:.3f}MHz_{ts}"
iq_file = self._captures_dir / f"{basename}.iq"
- cmd = [
- 'hackrf_transfer',
- '-r', str(iq_file),
- '-f', str(frequency_hz),
- '-s', str(sample_rate),
+ cmd = [
+ hackrf_transfer_path,
+ '-r', str(iq_file),
+ '-f', str(frequency_hz),
+ '-s', str(sample_rate),
'-l', str(lna_gain),
'-g', str(vga_gain),
]
@@ -1272,23 +1278,25 @@ class SubGhzManager:
# DECODE (hackrf_transfer piped to rtl_433)
# ------------------------------------------------------------------
- def start_decode(
- self,
- frequency_hz: int,
- sample_rate: int = 2_000_000,
+ def start_decode(
+ self,
+ frequency_hz: int,
+ sample_rate: int = 2_000_000,
lna_gain: int = 32,
vga_gain: int = 20,
- decode_profile: str = 'weather',
- device_serial: str | None = None,
- ) -> dict:
- # Pre-lock: tool availability & device detection (blocking I/O)
- if not self.check_hackrf():
- return {'status': 'error', 'message': 'hackrf_transfer not found'}
- if not self.check_rtl433():
- return {'status': 'error', 'message': 'rtl_433 not found'}
- device_err = self._require_hackrf_device()
- if device_err:
- return {'status': 'error', 'message': device_err}
+ decode_profile: str = 'weather',
+ device_serial: str | None = None,
+ ) -> dict:
+ # Pre-lock: tool availability & device detection (blocking I/O)
+ hackrf_transfer_path = self._resolve_tool('hackrf_transfer')
+ if not hackrf_transfer_path:
+ return {'status': 'error', 'message': 'hackrf_transfer not found'}
+ rtl433_path = self._resolve_tool('rtl_433')
+ if not rtl433_path:
+ return {'status': 'error', 'message': 'rtl_433 not found'}
+ device_err = self._require_hackrf_device()
+ if device_err:
+ return {'status': 'error', 'message': device_err}
with self._lock:
if self.active_mode != 'idle':
@@ -1299,25 +1307,25 @@ class SubGhzManager:
requested_sample_rate = int(sample_rate)
stable_sample_rate = max(2_000_000, min(2_000_000, requested_sample_rate))
- # Build hackrf_transfer command (producer: raw IQ to stdout)
- hackrf_cmd = [
- 'hackrf_transfer',
- '-r', '-',
- '-f', str(frequency_hz),
- '-s', str(stable_sample_rate),
+ # Build hackrf_transfer command (producer: raw IQ to stdout)
+ hackrf_cmd = [
+ hackrf_transfer_path,
+ '-r', '-',
+ '-f', str(frequency_hz),
+ '-s', str(stable_sample_rate),
'-l', str(max(SUBGHZ_LNA_GAIN_MIN, min(SUBGHZ_LNA_GAIN_MAX, lna_gain))),
'-g', str(max(SUBGHZ_VGA_GAIN_MIN, min(SUBGHZ_VGA_GAIN_MAX, vga_gain))),
]
if device_serial:
hackrf_cmd.extend(['-d', device_serial])
- # Build rtl_433 command (consumer: reads IQ from stdin)
- # Feed signed 8-bit complex IQ directly from hackrf_transfer.
- rtl433_cmd = [
- 'rtl_433',
- '-r', 'cs8:-',
- '-s', str(stable_sample_rate),
- '-f', str(frequency_hz),
+ # Build rtl_433 command (consumer: reads IQ from stdin)
+ # Feed signed 8-bit complex IQ directly from hackrf_transfer.
+ rtl433_cmd = [
+ rtl433_path,
+ '-r', 'cs8:-',
+ '-s', str(stable_sample_rate),
+ '-f', str(frequency_hz),
'-F', 'json',
'-F', 'log',
'-M', 'level',
@@ -1936,21 +1944,22 @@ class SubGhzManager:
except OSError as exc:
logger.debug(f"Failed to remove TX temp file {path}: {exc}")
- def transmit(
- self,
- capture_id: str,
- tx_gain: int = 20,
+ def transmit(
+ self,
+ capture_id: str,
+ tx_gain: int = 20,
max_duration: int = 10,
start_seconds: float | None = None,
- duration_seconds: float | None = None,
- device_serial: str | None = None,
- ) -> dict:
- # Pre-lock: tool availability & device detection (blocking I/O)
- if not self.check_hackrf():
- return {'status': 'error', 'message': 'hackrf_transfer not found'}
- device_err = self._require_hackrf_device()
- if device_err:
- return {'status': 'error', 'message': device_err}
+ duration_seconds: float | None = None,
+ device_serial: str | None = None,
+ ) -> dict:
+ # Pre-lock: tool availability & device detection (blocking I/O)
+ hackrf_transfer_path = self._resolve_tool('hackrf_transfer')
+ if not hackrf_transfer_path:
+ return {'status': 'error', 'message': 'hackrf_transfer not found'}
+ device_err = self._require_hackrf_device()
+ if device_err:
+ return {'status': 'error', 'message': device_err}
# Pre-lock: capture lookup, validation, and segment I/O (can be large)
capture = self._load_capture(capture_id)
@@ -2046,14 +2055,14 @@ class SubGhzManager:
# Clear any orphaned temp segment from a previous TX attempt.
self._cleanup_tx_temp_file()
- if segment_path_for_cleanup:
- self._tx_temp_file = segment_path_for_cleanup
-
- cmd = [
- 'hackrf_transfer',
- '-t', str(tx_path),
- '-f', str(capture.frequency_hz),
- '-s', str(capture.sample_rate),
+ if segment_path_for_cleanup:
+ self._tx_temp_file = segment_path_for_cleanup
+
+ cmd = [
+ hackrf_transfer_path,
+ '-t', str(tx_path),
+ '-f', str(capture.frequency_hz),
+ '-s', str(capture.sample_rate),
'-x', str(tx_gain),
]
if device_serial:
@@ -2183,19 +2192,20 @@ class SubGhzManager:
# SWEEP (hackrf_sweep)
# ------------------------------------------------------------------
- def start_sweep(
- self,
- freq_start_mhz: float = 300.0,
- freq_end_mhz: float = 928.0,
- bin_width: int = 100000,
- device_serial: str | None = None,
- ) -> dict:
- # Pre-lock: tool availability & device detection (blocking I/O)
- if not self.check_sweep():
- return {'status': 'error', 'message': 'hackrf_sweep not found'}
- device_err = self._require_hackrf_device()
- if device_err:
- return {'status': 'error', 'message': device_err}
+ def start_sweep(
+ self,
+ freq_start_mhz: float = 300.0,
+ freq_end_mhz: float = 928.0,
+ bin_width: int = 100000,
+ device_serial: str | None = None,
+ ) -> dict:
+ # Pre-lock: tool availability & device detection (blocking I/O)
+ hackrf_sweep_path = self._resolve_tool('hackrf_sweep')
+ if not hackrf_sweep_path:
+ return {'status': 'error', 'message': 'hackrf_sweep not found'}
+ device_err = self._require_hackrf_device()
+ if device_err:
+ return {'status': 'error', 'message': device_err}
# Wait for previous sweep thread to exit (blocking) before lock
if self._sweep_thread and self._sweep_thread.is_alive():
@@ -2204,14 +2214,14 @@ class SubGhzManager:
return {'status': 'error', 'message': 'Previous sweep still shutting down'}
with self._lock:
- if self.active_mode != 'idle':
- return {'status': 'error', 'message': f'Already running: {self.active_mode}'}
-
- cmd = [
- 'hackrf_sweep',
- '-f', f'{int(freq_start_mhz)}:{int(freq_end_mhz)}',
- '-w', str(bin_width),
- ]
+ if self.active_mode != 'idle':
+ return {'status': 'error', 'message': f'Already running: {self.active_mode}'}
+
+ cmd = [
+ hackrf_sweep_path,
+ '-f', f'{int(freq_start_mhz)}:{int(freq_end_mhz)}',
+ '-w', str(bin_width),
+ ]
if device_serial:
cmd.extend(['-d', device_serial])