From 86e4ba7e29e76f32090a42f95f44d46795b1b97d Mon Sep 17 00:00:00 2001 From: Smittix Date: Sat, 7 Feb 2026 18:29:58 +0000 Subject: [PATCH] Add alerts/recording, WiFi/TSCM updates, optimize waterfall --- config.py | 21 +- intercept_agent.py | 65 +++- routes/__init__.py | 12 +- routes/acars.py | 13 +- routes/adsb.py | 5 + routes/ais.py | 5 + routes/alerts.py | 76 +++++ routes/aprs.py | 13 +- routes/bluetooth.py | 19 +- routes/bluetooth_v2.py | 120 ++++--- routes/dmr.py | 5 + routes/dsc.py | 5 + routes/listening_post.py | 93 ++++-- routes/pager.py | 15 +- routes/recordings.py | 109 ++++++ routes/rtlamr.py | 5 + routes/sensor.py | 13 +- routes/sstv.py | 15 +- routes/sstv_general.py | 5 + routes/tscm.py | 17 +- routes/wifi.py | 133 ++++++-- routes/wifi_v2.py | 78 +++-- static/css/index.css | 17 + static/css/settings.css | 41 +++ static/js/core/alerts.js | 194 +++++++++++ static/js/core/recordings.js | 136 ++++++++ static/js/core/settings-manager.js | 8 + static/js/modes/bluetooth.js | 107 ++++-- static/js/modes/listening-post.js | 62 +++- static/js/modes/wifi.js | 102 ++++-- templates/index.html | 67 +++- templates/partials/modes/wifi.html | 17 +- templates/partials/settings-modal.html | 79 +++++ utils/alerts.py | 443 +++++++++++++++++++++++++ utils/bluetooth/models.py | 23 +- utils/database.py | 202 +++++++---- utils/event_pipeline.py | 29 ++ utils/recording.py | 222 +++++++++++++ utils/tscm/advanced.py | 96 ++++-- utils/tscm/baseline.py | 245 +++++++++----- utils/tscm/detector.py | 20 +- utils/wifi/scanner.py | 47 +-- 42 files changed, 2514 insertions(+), 485 deletions(-) create mode 100644 routes/alerts.py create mode 100644 routes/recordings.py create mode 100644 static/js/core/alerts.js create mode 100644 static/js/core/recordings.js create mode 100644 utils/alerts.py create mode 100644 utils/event_pipeline.py create mode 100644 utils/recording.py diff --git a/config.py b/config.py index e312af3..269c81b 100644 --- a/config.py +++ b/config.py @@ -204,14 +204,19 @@ SATELLITE_UPDATE_INTERVAL = _get_env_int('SATELLITE_UPDATE_INTERVAL', 30) SATELLITE_TRAJECTORY_POINTS = _get_env_int('SATELLITE_TRAJECTORY_POINTS', 30) SATELLITE_ORBIT_MINUTES = _get_env_int('SATELLITE_ORBIT_MINUTES', 45) -# Update checking -GITHUB_REPO = _get_env('GITHUB_REPO', 'smittix/intercept') -UPDATE_CHECK_ENABLED = _get_env_bool('UPDATE_CHECK_ENABLED', True) -UPDATE_CHECK_INTERVAL_HOURS = _get_env_int('UPDATE_CHECK_INTERVAL_HOURS', 6) - -# Admin credentials -ADMIN_USERNAME = _get_env('ADMIN_USERNAME', 'admin') -ADMIN_PASSWORD = _get_env('ADMIN_PASSWORD', 'admin') +# Update checking +GITHUB_REPO = _get_env('GITHUB_REPO', 'smittix/intercept') +UPDATE_CHECK_ENABLED = _get_env_bool('UPDATE_CHECK_ENABLED', True) +UPDATE_CHECK_INTERVAL_HOURS = _get_env_int('UPDATE_CHECK_INTERVAL_HOURS', 6) + +# Alerting +ALERT_WEBHOOK_URL = _get_env('ALERT_WEBHOOK_URL', '') +ALERT_WEBHOOK_SECRET = _get_env('ALERT_WEBHOOK_SECRET', '') +ALERT_WEBHOOK_TIMEOUT = _get_env_int('ALERT_WEBHOOK_TIMEOUT', 5) + +# Admin credentials +ADMIN_USERNAME = _get_env('ADMIN_USERNAME', 'admin') +ADMIN_PASSWORD = _get_env('ADMIN_PASSWORD', 'admin') def configure_logging() -> None: """Configure application logging.""" diff --git a/intercept_agent.py b/intercept_agent.py index 7405e2b..ac635a3 100644 --- a/intercept_agent.py +++ b/intercept_agent.py @@ -1542,9 +1542,10 @@ class ModeManager: def _start_wifi(self, params: dict) -> dict: """Start WiFi scanning using Intercept's UnifiedWiFiScanner.""" interface = params.get('interface') - channel = params.get('channel') - band = params.get('band', 'abg') - scan_type = params.get('scan_type', 'deep') + channel = params.get('channel') + channels = params.get('channels') + band = params.get('band', 'abg') + scan_type = params.get('scan_type', 'deep') # Handle quick scan - returns results synchronously if scan_type == 'quick': @@ -1573,8 +1574,21 @@ class ModeManager: else: scan_band = 'all' - # Start deep scan - if scanner.start_deep_scan(interface=interface, band=scan_band, channel=channel): + channel_list = None + if channels: + if isinstance(channels, str): + channel_list = [c.strip() for c in channels.split(',') if c.strip()] + elif isinstance(channels, (list, tuple, set)): + channel_list = list(channels) + else: + channel_list = [channels] + try: + channel_list = [int(c) for c in channel_list] + except (TypeError, ValueError): + return {'status': 'error', 'message': 'Invalid channels'} + + # Start deep scan + if scanner.start_deep_scan(interface=interface, band=scan_band, channel=channel, channels=channel_list): # Start thread to sync data to agent's dictionaries thread = threading.Thread( target=self._wifi_data_sync, @@ -1593,12 +1607,12 @@ class ModeManager: else: return {'status': 'error', 'message': scanner.get_status().error or 'Failed to start deep scan'} - except ImportError: - # Fallback to direct airodump-ng - return self._start_wifi_fallback(interface, channel, band) - except Exception as e: - logger.error(f"WiFi scanner error: {e}") - return {'status': 'error', 'message': str(e)} + except ImportError: + # Fallback to direct airodump-ng + return self._start_wifi_fallback(interface, channel, band, channels) + except Exception as e: + logger.error(f"WiFi scanner error: {e}") + return {'status': 'error', 'message': str(e)} def _wifi_data_sync(self, scanner): """Sync WiFi scanner data to agent's data structures.""" @@ -1632,8 +1646,14 @@ class ModeManager: if hasattr(self, '_wifi_scanner_instance') and self._wifi_scanner_instance: self._wifi_scanner_instance.stop_deep_scan() - def _start_wifi_fallback(self, interface: str | None, channel: int | None, band: str) -> dict: - """Fallback WiFi deep scan using airodump-ng directly.""" + def _start_wifi_fallback( + self, + interface: str | None, + channel: int | None, + band: str, + channels: list[int] | str | None = None, + ) -> dict: + """Fallback WiFi deep scan using airodump-ng directly.""" if not interface: return {'status': 'error', 'message': 'WiFi interface required'} @@ -1660,8 +1680,23 @@ class ModeManager: cmd = [airodump_path, '-w', csv_path, '--output-format', output_formats, '--band', band] if gps_manager.is_running: cmd.append('--gpsd') - if channel: - cmd.extend(['-c', str(channel)]) + channel_list = None + if channels: + if isinstance(channels, str): + channel_list = [c.strip() for c in channels.split(',') if c.strip()] + elif isinstance(channels, (list, tuple, set)): + channel_list = list(channels) + else: + channel_list = [channels] + try: + channel_list = [int(c) for c in channel_list] + except (TypeError, ValueError): + return {'status': 'error', 'message': 'Invalid channels'} + + if channel_list: + cmd.extend(['-c', ','.join(str(c) for c in channel_list)]) + elif channel: + cmd.extend(['-c', str(channel)]) cmd.append(interface) try: diff --git a/routes/__init__.py b/routes/__init__.py index b4426bd..448d771 100644 --- a/routes/__init__.py +++ b/routes/__init__.py @@ -27,8 +27,10 @@ def register_blueprints(app): from .updater import updater_bp from .sstv import sstv_bp from .sstv_general import sstv_general_bp - from .dmr import dmr_bp - from .websdr import websdr_bp + from .dmr import dmr_bp + from .websdr import websdr_bp + from .alerts import alerts_bp + from .recordings import recordings_bp app.register_blueprint(pager_bp) app.register_blueprint(sensor_bp) @@ -55,8 +57,10 @@ def register_blueprints(app): app.register_blueprint(updater_bp) # GitHub update checking app.register_blueprint(sstv_bp) # ISS SSTV decoder app.register_blueprint(sstv_general_bp) # General terrestrial SSTV - app.register_blueprint(dmr_bp) # DMR / P25 / Digital Voice - app.register_blueprint(websdr_bp) # HF/Shortwave WebSDR + app.register_blueprint(dmr_bp) # DMR / P25 / Digital Voice + app.register_blueprint(websdr_bp) # HF/Shortwave WebSDR + app.register_blueprint(alerts_bp) # Cross-mode alerts + app.register_blueprint(recordings_bp) # Session recordings # Initialize TSCM state with queue and lock from app import app as app_module diff --git a/routes/acars.py b/routes/acars.py index 1615293..47f572a 100644 --- a/routes/acars.py +++ b/routes/acars.py @@ -20,7 +20,8 @@ from flask import Blueprint, jsonify, request, Response import app as app_module from utils.logging import sensor_logger as logger from utils.validation import validate_device_index, validate_gain, validate_ppm -from utils.sse import format_sse +from utils.sse import format_sse +from utils.event_pipeline import process_event from utils.constants import ( PROCESS_TERMINATE_TIMEOUT, SSE_KEEPALIVE_INTERVAL, @@ -391,9 +392,13 @@ def stream_acars() -> Response: while True: try: - msg = app_module.acars_queue.get(timeout=SSE_QUEUE_TIMEOUT) - last_keepalive = time.time() - yield format_sse(msg) + msg = app_module.acars_queue.get(timeout=SSE_QUEUE_TIMEOUT) + last_keepalive = time.time() + try: + process_event('acars', msg, msg.get('type')) + except Exception: + pass + yield format_sse(msg) except queue.Empty: now = time.time() if now - last_keepalive >= SSE_KEEPALIVE_INTERVAL: diff --git a/routes/adsb.py b/routes/adsb.py index 2f34e1d..0239c4a 100644 --- a/routes/adsb.py +++ b/routes/adsb.py @@ -43,6 +43,7 @@ from utils.validation import ( validate_rtl_tcp_host, validate_rtl_tcp_port ) from utils.sse import format_sse +from utils.event_pipeline import process_event from utils.sdr import SDRFactory, SDRType from utils.constants import ( ADSB_SBS_PORT, @@ -843,6 +844,10 @@ def stream_adsb(): try: msg = app_module.adsb_queue.get(timeout=SSE_QUEUE_TIMEOUT) last_keepalive = time.time() + try: + process_event('adsb', msg, msg.get('type')) + except Exception: + pass yield format_sse(msg) except queue.Empty: now = time.time() diff --git a/routes/ais.py b/routes/ais.py index 68f6140..b481fd8 100644 --- a/routes/ais.py +++ b/routes/ais.py @@ -19,6 +19,7 @@ from config import SHARED_OBSERVER_LOCATION_ENABLED from utils.logging import get_logger from utils.validation import validate_device_index, validate_gain from utils.sse import format_sse +from utils.event_pipeline import process_event from utils.sdr import SDRFactory, SDRType from utils.constants import ( AIS_TCP_PORT, @@ -484,6 +485,10 @@ def stream_ais(): try: msg = app_module.ais_queue.get(timeout=SSE_QUEUE_TIMEOUT) last_keepalive = time.time() + try: + process_event('ais', msg, msg.get('type')) + except Exception: + pass yield format_sse(msg) except queue.Empty: now = time.time() diff --git a/routes/alerts.py b/routes/alerts.py new file mode 100644 index 0000000..578d1bc --- /dev/null +++ b/routes/alerts.py @@ -0,0 +1,76 @@ +"""Alerting API endpoints.""" + +from __future__ import annotations + +import queue +import time +from typing import Generator + +from flask import Blueprint, Response, jsonify, request + +from utils.alerts import get_alert_manager +from utils.sse import format_sse + +alerts_bp = Blueprint('alerts', __name__, url_prefix='/alerts') + + +@alerts_bp.route('/rules', methods=['GET']) +def list_rules(): + manager = get_alert_manager() + include_disabled = request.args.get('all') in ('1', 'true', 'yes') + return jsonify({'status': 'success', 'rules': manager.list_rules(include_disabled=include_disabled)}) + + +@alerts_bp.route('/rules', methods=['POST']) +def create_rule(): + data = request.get_json() or {} + if not isinstance(data.get('match', {}), dict): + return jsonify({'status': 'error', 'message': 'match must be a JSON object'}), 400 + + manager = get_alert_manager() + rule_id = manager.add_rule(data) + return jsonify({'status': 'success', 'rule_id': rule_id}) + + +@alerts_bp.route('/rules/', methods=['PUT', 'PATCH']) +def update_rule(rule_id: int): + data = request.get_json() or {} + manager = get_alert_manager() + ok = manager.update_rule(rule_id, data) + if not ok: + return jsonify({'status': 'error', 'message': 'Rule not found or no changes'}), 404 + return jsonify({'status': 'success'}) + + +@alerts_bp.route('/rules/', methods=['DELETE']) +def delete_rule(rule_id: int): + manager = get_alert_manager() + ok = manager.delete_rule(rule_id) + if not ok: + return jsonify({'status': 'error', 'message': 'Rule not found'}), 404 + return jsonify({'status': 'success'}) + + +@alerts_bp.route('/events', methods=['GET']) +def list_events(): + manager = get_alert_manager() + limit = request.args.get('limit', default=100, type=int) + mode = request.args.get('mode') + severity = request.args.get('severity') + events = manager.list_events(limit=limit, mode=mode, severity=severity) + return jsonify({'status': 'success', 'events': events}) + + +@alerts_bp.route('/stream', methods=['GET']) +def stream_alerts() -> Response: + manager = get_alert_manager() + + def generate() -> Generator[str, None, None]: + for event in manager.stream_events(timeout=1.0): + yield format_sse(event) + + response = Response(generate(), mimetype='text/event-stream') + response.headers['Cache-Control'] = 'no-cache' + response.headers['X-Accel-Buffering'] = 'no' + response.headers['Connection'] = 'keep-alive' + return response diff --git a/routes/aprs.py b/routes/aprs.py index 901d47a..fbbced7 100644 --- a/routes/aprs.py +++ b/routes/aprs.py @@ -21,7 +21,8 @@ from flask import Blueprint, jsonify, request, Response import app as app_module from utils.logging import sensor_logger as logger from utils.validation import validate_device_index, validate_gain, validate_ppm -from utils.sse import format_sse +from utils.sse import format_sse +from utils.event_pipeline import process_event from utils.constants import ( PROCESS_TERMINATE_TIMEOUT, SSE_KEEPALIVE_INTERVAL, @@ -1725,9 +1726,13 @@ def stream_aprs() -> Response: while True: try: - msg = app_module.aprs_queue.get(timeout=SSE_QUEUE_TIMEOUT) - last_keepalive = time.time() - yield format_sse(msg) + msg = app_module.aprs_queue.get(timeout=SSE_QUEUE_TIMEOUT) + last_keepalive = time.time() + try: + process_event('aprs', msg, msg.get('type')) + except Exception: + pass + yield format_sse(msg) except queue.Empty: now = time.time() if now - last_keepalive >= SSE_KEEPALIVE_INTERVAL: diff --git a/routes/bluetooth.py b/routes/bluetooth.py index d6fc1fe..f7f7d1f 100644 --- a/routes/bluetooth.py +++ b/routes/bluetooth.py @@ -18,10 +18,11 @@ from typing import Any, Generator from flask import Blueprint, jsonify, request, Response import app as app_module -from utils.dependencies import check_tool -from utils.logging import bluetooth_logger as logger -from utils.sse import format_sse -from utils.validation import validate_bluetooth_interface +from utils.dependencies import check_tool +from utils.logging import bluetooth_logger as logger +from utils.sse import format_sse +from utils.event_pipeline import process_event +from utils.validation import validate_bluetooth_interface from data.oui import OUI_DATABASE, load_oui_database, get_manufacturer from data.patterns import AIRTAG_PREFIXES, TILE_PREFIXES, SAMSUNG_TRACKER from utils.constants import ( @@ -561,9 +562,13 @@ def stream_bt(): while True: try: - msg = app_module.bt_queue.get(timeout=1) - last_keepalive = time.time() - yield format_sse(msg) + msg = app_module.bt_queue.get(timeout=1) + last_keepalive = time.time() + try: + process_event('bluetooth', msg, msg.get('type')) + except Exception: + pass + yield format_sse(msg) except queue.Empty: now = time.time() if now - last_keepalive >= keepalive_interval: diff --git a/routes/bluetooth_v2.py b/routes/bluetooth_v2.py index 8051477..24ace6e 100644 --- a/routes/bluetooth_v2.py +++ b/routes/bluetooth_v2.py @@ -7,32 +7,40 @@ aggregation, and heuristics. from __future__ import annotations -import csv -import io -import json -import logging +import csv +import io +import json +import logging +import threading +import time from datetime import datetime from typing import Generator from flask import Blueprint, Response, jsonify, request, session -from utils.bluetooth import ( - BluetoothScanner, - BTDeviceAggregate, - get_bluetooth_scanner, - check_capabilities, - RANGE_UNKNOWN, +from utils.bluetooth import ( + BluetoothScanner, + BTDeviceAggregate, + get_bluetooth_scanner, + check_capabilities, + RANGE_UNKNOWN, TrackerType, TrackerConfidence, get_tracker_engine, -) -from utils.database import get_db -from utils.sse import format_sse +) +from utils.database import get_db +from utils.sse import format_sse +from utils.event_pipeline import process_event logger = logging.getLogger('intercept.bluetooth_v2') # Blueprint -bluetooth_v2_bp = Blueprint('bluetooth_v2', __name__, url_prefix='/api/bluetooth') +bluetooth_v2_bp = Blueprint('bluetooth_v2', __name__, url_prefix='/api/bluetooth') + +# Seen-before tracking +_bt_seen_cache: set[str] = set() +_bt_session_seen: set[str] = set() +_bt_seen_lock = threading.Lock() # ============================================================================= # DATABASE FUNCTIONS @@ -164,13 +172,20 @@ def get_all_baselines() -> list[dict]: return [dict(row) for row in cursor] -def save_observation_history(device: BTDeviceAggregate) -> None: - """Save device observation to history.""" - with get_db() as conn: - conn.execute(''' - INSERT INTO bt_observation_history (device_id, rssi, seen_count) - VALUES (?, ?, ?) - ''', (device.device_id, device.rssi_current, device.seen_count)) +def save_observation_history(device: BTDeviceAggregate) -> None: + """Save device observation to history.""" + with get_db() as conn: + conn.execute(''' + INSERT INTO bt_observation_history (device_id, rssi, seen_count) + VALUES (?, ?, ?) + ''', (device.device_id, device.rssi_current, device.seen_count)) + + +def load_seen_device_ids() -> set[str]: + """Load distinct device IDs from history for seen-before tracking.""" + with get_db() as conn: + cursor = conn.execute('SELECT DISTINCT device_id FROM bt_observation_history') + return {row['device_id'] for row in cursor} # ============================================================================= @@ -191,7 +206,7 @@ def get_capabilities(): @bluetooth_v2_bp.route('/scan/start', methods=['POST']) -def start_scan(): +def start_scan(): """ Start Bluetooth scanning. @@ -221,17 +236,42 @@ def start_scan(): # Get scanner instance scanner = get_bluetooth_scanner(adapter_id) - # Check if already scanning - if scanner.is_scanning: - return jsonify({ - 'status': 'already_running', - 'scan_status': scanner.get_status().to_dict() - }) - - # Initialize database tables if needed - init_bt_tables() - - # Load active baseline if exists + # Initialize database tables if needed + init_bt_tables() + + def _handle_seen_before(device: BTDeviceAggregate) -> None: + try: + with _bt_seen_lock: + device.seen_before = device.device_id in _bt_seen_cache + if device.device_id not in _bt_session_seen: + save_observation_history(device) + _bt_session_seen.add(device.device_id) + except Exception as e: + logger.debug(f"BT seen-before update failed: {e}") + + # Setup seen-before callback + if scanner._on_device_updated is None: + scanner._on_device_updated = _handle_seen_before + + # Ensure cache is initialized + with _bt_seen_lock: + if not _bt_seen_cache: + _bt_seen_cache.update(load_seen_device_ids()) + + # Check if already scanning + if scanner.is_scanning: + return jsonify({ + 'status': 'already_running', + 'scan_status': scanner.get_status().to_dict() + }) + + # Refresh seen-before cache and reset session set for a new scan + with _bt_seen_lock: + _bt_seen_cache.clear() + _bt_seen_cache.update(load_seen_device_ids()) + _bt_session_seen.clear() + + # Load active baseline if exists baseline_id = get_active_baseline_id() if baseline_id: device_ids = get_baseline_device_ids(baseline_id) @@ -856,11 +896,15 @@ def stream_events(): else: return event_type, event - def event_generator() -> Generator[str, None, None]: - """Generate SSE events from scanner.""" - for event in scanner.stream_events(timeout=1.0): - event_name, event_data = map_event_type(event) - yield format_sse(event_data, event=event_name) + def event_generator() -> Generator[str, None, None]: + """Generate SSE events from scanner.""" + for event in scanner.stream_events(timeout=1.0): + event_name, event_data = map_event_type(event) + try: + process_event('bluetooth', event_data, event_name) + except Exception: + pass + yield format_sse(event_data, event=event_name) return Response( event_generator(), diff --git a/routes/dmr.py b/routes/dmr.py index 8770583..d993614 100644 --- a/routes/dmr.py +++ b/routes/dmr.py @@ -18,6 +18,7 @@ from flask import Blueprint, jsonify, request, Response import app as app_module from utils.logging import get_logger from utils.sse import format_sse +from utils.event_pipeline import process_event from utils.process import register_process, unregister_process from utils.constants import ( SSE_QUEUE_TIMEOUT, @@ -495,6 +496,10 @@ def stream_dmr() -> Response: try: msg = dmr_queue.get(timeout=SSE_QUEUE_TIMEOUT) last_keepalive = time.time() + try: + process_event('dmr', msg, msg.get('type')) + except Exception: + pass yield format_sse(msg) except queue.Empty: now = time.time() diff --git a/routes/dsc.py b/routes/dsc.py index 92853ed..5dadfcd 100644 --- a/routes/dsc.py +++ b/routes/dsc.py @@ -36,6 +36,7 @@ from utils.database import ( ) from utils.dsc.parser import parse_dsc_message from utils.sse import format_sse +from utils.event_pipeline import process_event from utils.validation import validate_device_index, validate_gain from utils.sdr import SDRFactory, SDRType from utils.dependencies import get_tool_path @@ -525,6 +526,10 @@ def stream() -> Response: try: msg = app_module.dsc_queue.get(timeout=1) last_keepalive = time.time() + try: + process_event('dsc', msg, msg.get('type')) + except Exception: + pass yield format_sse(msg) except queue.Empty: now = time.time() diff --git a/routes/listening_post.py b/routes/listening_post.py index 271e043..5912a8c 100644 --- a/routes/listening_post.py +++ b/routes/listening_post.py @@ -19,7 +19,8 @@ from flask import Blueprint, jsonify, request, Response import app as app_module from utils.logging import get_logger -from utils.sse import format_sse +from utils.sse import format_sse +from utils.event_pipeline import process_event from utils.constants import ( SSE_QUEUE_TIMEOUT, SSE_KEEPALIVE_INTERVAL, @@ -1180,9 +1181,13 @@ def stream_scanner_events() -> Response: while True: try: - msg = scanner_queue.get(timeout=SSE_QUEUE_TIMEOUT) - last_keepalive = time.time() - yield format_sse(msg) + msg = scanner_queue.get(timeout=SSE_QUEUE_TIMEOUT) + last_keepalive = time.time() + try: + process_event('listening_scanner', msg, msg.get('type')) + except Exception: + pass + yield format_sse(msg) except queue.Empty: now = time.time() if now - last_keepalive >= SSE_KEEPALIVE_INTERVAL: @@ -1514,14 +1519,15 @@ waterfall_thread: Optional[threading.Thread] = None waterfall_running = False waterfall_lock = threading.Lock() waterfall_queue: queue.Queue = queue.Queue(maxsize=200) -waterfall_active_device: Optional[int] = None -waterfall_config = { - 'start_freq': 88.0, - 'end_freq': 108.0, - 'bin_size': 10000, - 'gain': 40, - 'device': 0, -} +waterfall_active_device: Optional[int] = None +waterfall_config = { + 'start_freq': 88.0, + 'end_freq': 108.0, + 'bin_size': 10000, + 'gain': 40, + 'device': 0, + 'max_bins': 1024, +} def _waterfall_loop(): @@ -1606,11 +1612,14 @@ def _waterfall_loop(): except ValueError: continue - if all_bins: - msg = { - 'type': 'waterfall_sweep', - 'start_freq': sweep_start_hz / 1e6, - 'end_freq': sweep_end_hz / 1e6, + if all_bins: + max_bins = int(waterfall_config.get('max_bins') or 0) + if max_bins > 0 and len(all_bins) > max_bins: + all_bins = _downsample_bins(all_bins, max_bins) + msg = { + 'type': 'waterfall_sweep', + 'start_freq': sweep_start_hz / 1e6, + 'end_freq': sweep_end_hz / 1e6, 'bins': all_bins, 'timestamp': datetime.now().isoformat(), } @@ -1649,14 +1658,19 @@ def start_waterfall() -> Response: data = request.json or {} - try: - waterfall_config['start_freq'] = float(data.get('start_freq', 88.0)) - waterfall_config['end_freq'] = float(data.get('end_freq', 108.0)) - waterfall_config['bin_size'] = int(data.get('bin_size', 10000)) - waterfall_config['gain'] = int(data.get('gain', 40)) - waterfall_config['device'] = int(data.get('device', 0)) - except (ValueError, TypeError) as e: - return jsonify({'status': 'error', 'message': f'Invalid parameter: {e}'}), 400 + try: + waterfall_config['start_freq'] = float(data.get('start_freq', 88.0)) + waterfall_config['end_freq'] = float(data.get('end_freq', 108.0)) + waterfall_config['bin_size'] = int(data.get('bin_size', 10000)) + waterfall_config['gain'] = int(data.get('gain', 40)) + waterfall_config['device'] = int(data.get('device', 0)) + if data.get('max_bins') is not None: + max_bins = int(data.get('max_bins', waterfall_config['max_bins'])) + if max_bins < 64 or max_bins > 4096: + return jsonify({'status': 'error', 'message': 'max_bins must be between 64 and 4096'}), 400 + waterfall_config['max_bins'] = max_bins + except (ValueError, TypeError) as e: + return jsonify({'status': 'error', 'message': f'Invalid parameter: {e}'}), 400 if waterfall_config['start_freq'] >= waterfall_config['end_freq']: return jsonify({'status': 'error', 'message': 'start_freq must be less than end_freq'}), 400 @@ -1711,10 +1725,14 @@ def stream_waterfall() -> Response: def generate() -> Generator[str, None, None]: last_keepalive = time.time() while True: - try: - msg = waterfall_queue.get(timeout=SSE_QUEUE_TIMEOUT) - last_keepalive = time.time() - yield format_sse(msg) + try: + msg = waterfall_queue.get(timeout=SSE_QUEUE_TIMEOUT) + last_keepalive = time.time() + try: + process_event('waterfall', msg, msg.get('type')) + except Exception: + pass + yield format_sse(msg) except queue.Empty: now = time.time() if now - last_keepalive >= SSE_KEEPALIVE_INTERVAL: @@ -1725,3 +1743,20 @@ def stream_waterfall() -> Response: response.headers['Cache-Control'] = 'no-cache' response.headers['X-Accel-Buffering'] = 'no' return response +def _downsample_bins(values: list[float], target: int) -> list[float]: + """Downsample bins to a target length using simple averaging.""" + if target <= 0 or len(values) <= target: + return values + + out: list[float] = [] + step = len(values) / target + for i in range(target): + start = int(i * step) + end = int((i + 1) * step) + if end <= start: + end = min(start + 1, len(values)) + chunk = values[start:end] + if not chunk: + continue + out.append(sum(chunk) / len(chunk)) + return out diff --git a/routes/pager.py b/routes/pager.py index 5dd1efe..4ee5425 100644 --- a/routes/pager.py +++ b/routes/pager.py @@ -22,7 +22,8 @@ from utils.validation import ( validate_frequency, validate_device_index, validate_gain, validate_ppm, validate_rtl_tcp_host, validate_rtl_tcp_port ) -from utils.sse import format_sse +from utils.sse import format_sse +from utils.event_pipeline import process_event from utils.process import safe_terminate, register_process, unregister_process from utils.sdr import SDRFactory, SDRType, SDRValidationError from utils.dependencies import get_tool_path @@ -468,10 +469,14 @@ def stream() -> Response: keepalive_interval = 30.0 # Send keepalive every 30 seconds instead of 1 second while True: - try: - msg = app_module.output_queue.get(timeout=1) - last_keepalive = time.time() - yield format_sse(msg) + try: + msg = app_module.output_queue.get(timeout=1) + last_keepalive = time.time() + try: + process_event('pager', msg, msg.get('type')) + except Exception: + pass + yield format_sse(msg) except queue.Empty: now = time.time() if now - last_keepalive >= keepalive_interval: diff --git a/routes/recordings.py b/routes/recordings.py new file mode 100644 index 0000000..063fe93 --- /dev/null +++ b/routes/recordings.py @@ -0,0 +1,109 @@ +"""Session recording API endpoints.""" + +from __future__ import annotations + +from pathlib import Path + +from flask import Blueprint, jsonify, request, send_file + +from utils.recording import get_recording_manager, RECORDING_ROOT + +recordings_bp = Blueprint('recordings', __name__, url_prefix='/recordings') + + +@recordings_bp.route('/start', methods=['POST']) +def start_recording(): + data = request.get_json() or {} + mode = (data.get('mode') or '').strip() + if not mode: + return jsonify({'status': 'error', 'message': 'mode is required'}), 400 + + label = data.get('label') + metadata = data.get('metadata') if isinstance(data.get('metadata'), dict) else {} + + manager = get_recording_manager() + session = manager.start_recording(mode=mode, label=label, metadata=metadata) + + return jsonify({ + 'status': 'success', + 'session': { + 'id': session.id, + 'mode': session.mode, + 'label': session.label, + 'started_at': session.started_at.isoformat(), + 'file_path': str(session.file_path), + } + }) + + +@recordings_bp.route('/stop', methods=['POST']) +def stop_recording(): + data = request.get_json() or {} + mode = data.get('mode') + session_id = data.get('id') + + manager = get_recording_manager() + session = manager.stop_recording(mode=mode, session_id=session_id) + if not session: + return jsonify({'status': 'error', 'message': 'No active recording found'}), 404 + + return jsonify({ + 'status': 'success', + 'session': { + 'id': session.id, + 'mode': session.mode, + 'label': session.label, + 'started_at': session.started_at.isoformat(), + 'stopped_at': session.stopped_at.isoformat() if session.stopped_at else None, + 'event_count': session.event_count, + 'size_bytes': session.size_bytes, + 'file_path': str(session.file_path), + } + }) + + +@recordings_bp.route('', methods=['GET']) +def list_recordings(): + manager = get_recording_manager() + limit = request.args.get('limit', default=50, type=int) + return jsonify({ + 'status': 'success', + 'recordings': manager.list_recordings(limit=limit), + 'active': manager.get_active(), + }) + + +@recordings_bp.route('/', methods=['GET']) +def get_recording(session_id: str): + manager = get_recording_manager() + rec = manager.get_recording(session_id) + if not rec: + return jsonify({'status': 'error', 'message': 'Recording not found'}), 404 + return jsonify({'status': 'success', 'recording': rec}) + + +@recordings_bp.route('//download', methods=['GET']) +def download_recording(session_id: str): + manager = get_recording_manager() + rec = manager.get_recording(session_id) + if not rec: + return jsonify({'status': 'error', 'message': 'Recording not found'}), 404 + + file_path = Path(rec['file_path']) + try: + resolved_root = RECORDING_ROOT.resolve() + resolved_file = file_path.resolve() + if resolved_root not in resolved_file.parents: + return jsonify({'status': 'error', 'message': 'Invalid recording path'}), 400 + except Exception: + return jsonify({'status': 'error', 'message': 'Invalid recording path'}), 400 + + if not file_path.exists(): + return jsonify({'status': 'error', 'message': 'Recording file missing'}), 404 + + return send_file( + file_path, + mimetype='application/x-ndjson', + as_attachment=True, + download_name=file_path.name, + ) diff --git a/routes/rtlamr.py b/routes/rtlamr.py index 8d31167..abbfd1d 100644 --- a/routes/rtlamr.py +++ b/routes/rtlamr.py @@ -18,6 +18,7 @@ from utils.validation import ( validate_frequency, validate_device_index, validate_gain, validate_ppm ) from utils.sse import format_sse +from utils.event_pipeline import process_event from utils.process import safe_terminate, register_process, unregister_process rtlamr_bp = Blueprint('rtlamr', __name__) @@ -295,6 +296,10 @@ def stream_rtlamr() -> Response: try: msg = app_module.rtlamr_queue.get(timeout=1) last_keepalive = time.time() + try: + process_event('rtlamr', msg, msg.get('type')) + except Exception: + pass yield format_sse(msg) except queue.Empty: now = time.time() diff --git a/routes/sensor.py b/routes/sensor.py index da99c8f..e5a719e 100644 --- a/routes/sensor.py +++ b/routes/sensor.py @@ -18,7 +18,8 @@ from utils.validation import ( validate_frequency, validate_device_index, validate_gain, validate_ppm, validate_rtl_tcp_host, validate_rtl_tcp_port ) -from utils.sse import format_sse +from utils.sse import format_sse +from utils.event_pipeline import process_event from utils.process import safe_terminate, register_process, unregister_process from utils.sdr import SDRFactory, SDRType @@ -231,9 +232,13 @@ def stream_sensor() -> Response: while True: try: - msg = app_module.sensor_queue.get(timeout=1) - last_keepalive = time.time() - yield format_sse(msg) + msg = app_module.sensor_queue.get(timeout=1) + last_keepalive = time.time() + try: + process_event('sensor', msg, msg.get('type')) + except Exception: + pass + yield format_sse(msg) except queue.Empty: now = time.time() if now - last_keepalive >= keepalive_interval: diff --git a/routes/sstv.py b/routes/sstv.py index b46bea5..ed3676a 100644 --- a/routes/sstv.py +++ b/routes/sstv.py @@ -15,7 +15,8 @@ from flask import Blueprint, jsonify, request, Response, send_file import app as app_module from utils.logging import get_logger -from utils.sse import format_sse +from utils.sse import format_sse +from utils.event_pipeline import process_event from utils.sstv import ( get_sstv_decoder, is_sstv_available, @@ -398,10 +399,14 @@ def stream_progress(): keepalive_interval = 30.0 while True: - try: - progress = _sstv_queue.get(timeout=1) - last_keepalive = time.time() - yield format_sse(progress) + try: + progress = _sstv_queue.get(timeout=1) + last_keepalive = time.time() + try: + process_event('sstv', progress, progress.get('type')) + except Exception: + pass + yield format_sse(progress) except queue.Empty: now = time.time() if now - last_keepalive >= keepalive_interval: diff --git a/routes/sstv_general.py b/routes/sstv_general.py index f12bf38..5ebcbb2 100644 --- a/routes/sstv_general.py +++ b/routes/sstv_general.py @@ -15,6 +15,7 @@ from flask import Blueprint, Response, jsonify, request, send_file from utils.logging import get_logger from utils.sse import format_sse +from utils.event_pipeline import process_event from utils.sstv import ( DecodeProgress, get_general_sstv_decoder, @@ -274,6 +275,10 @@ def stream_progress(): try: progress = _sstv_general_queue.get(timeout=1) last_keepalive = time.time() + try: + process_event('sstv_general', progress, progress.get('type')) + except Exception: + pass yield format_sse(progress) except queue.Empty: now = time.time() diff --git a/routes/tscm.py b/routes/tscm.py index 784d248..5a3d31d 100644 --- a/routes/tscm.py +++ b/routes/tscm.py @@ -60,6 +60,7 @@ from utils.tscm.device_identity import ( ingest_ble_dict, ingest_wifi_dict, ) +from utils.event_pipeline import process_event # Import unified Bluetooth scanner helper for TSCM integration try: @@ -627,6 +628,10 @@ def sweep_stream(): try: if tscm_queue: msg = tscm_queue.get(timeout=1) + try: + process_event('tscm', msg, msg.get('type')) + except Exception: + pass yield f"data: {json.dumps(msg)}\n\n" else: time.sleep(1) @@ -2023,6 +2028,7 @@ def _run_sweep( comparator = BaselineComparator(baseline) baseline_comparison = comparator.compare_all( wifi_devices=list(all_wifi.values()), + wifi_clients=list(all_wifi_clients.values()), bt_devices=list(all_bt.values()), rf_signals=all_rf ) @@ -2132,6 +2138,7 @@ def _run_sweep( 'total_new': baseline_comparison['total_new'], 'total_missing': baseline_comparison['total_missing'], 'wifi': baseline_comparison.get('wifi'), + 'wifi_clients': baseline_comparison.get('wifi_clients'), 'bluetooth': baseline_comparison.get('bluetooth'), 'rf': baseline_comparison.get('rf'), }) @@ -2297,6 +2304,7 @@ def compare_against_baseline(): Expects JSON body with: - wifi_devices: list of WiFi devices (optional) + - wifi_clients: list of WiFi clients (optional) - bt_devices: list of Bluetooth devices (optional) - rf_signals: list of RF signals (optional) @@ -2305,12 +2313,14 @@ def compare_against_baseline(): data = request.get_json() or {} wifi_devices = data.get('wifi_devices') + wifi_clients = data.get('wifi_clients') bt_devices = data.get('bt_devices') rf_signals = data.get('rf_signals') # Use the convenience function that gets active baseline comparison = get_comparison_for_active_baseline( wifi_devices=wifi_devices, + wifi_clients=wifi_clients, bt_devices=bt_devices, rf_signals=rf_signals ) @@ -2404,7 +2414,10 @@ def feed_wifi(): """Feed WiFi device data for baseline recording.""" data = request.get_json() if data: - _baseline_recorder.add_wifi_device(data) + if data.get('is_client'): + _baseline_recorder.add_wifi_client(data) + else: + _baseline_recorder.add_wifi_device(data) return jsonify({'status': 'success'}) @@ -3056,12 +3069,14 @@ def get_baseline_diff(baseline_id: int, sweep_id: int): results = json.loads(results) current_wifi = results.get('wifi_devices', []) + current_wifi_clients = results.get('wifi_clients', []) current_bt = results.get('bt_devices', []) current_rf = results.get('rf_signals', []) diff = calculate_baseline_diff( baseline=baseline, current_wifi=current_wifi, + current_wifi_clients=current_wifi_clients, current_bt=current_bt, current_rf=current_rf, sweep_id=sweep_id diff --git a/routes/wifi.py b/routes/wifi.py index c6464b5..3c6018a 100644 --- a/routes/wifi.py +++ b/routes/wifi.py @@ -17,11 +17,12 @@ from flask import Blueprint, jsonify, request, Response import app as app_module from utils.dependencies import check_tool, get_tool_path -from utils.logging import wifi_logger as logger -from utils.process import is_valid_mac, is_valid_channel -from utils.validation import validate_wifi_channel, validate_mac_address, validate_network_interface -from utils.sse import format_sse -from data.oui import get_manufacturer +from utils.logging import wifi_logger as logger +from utils.process import is_valid_mac, is_valid_channel +from utils.validation import validate_wifi_channel, validate_mac_address, validate_network_interface +from utils.sse import format_sse +from utils.event_pipeline import process_event +from data.oui import get_manufacturer from utils.constants import ( WIFI_TERMINATE_TIMEOUT, PMKID_TERMINATE_TIMEOUT, @@ -46,8 +47,33 @@ from utils.constants import ( wifi_bp = Blueprint('wifi', __name__, url_prefix='/wifi') # PMKID process state -pmkid_process = None -pmkid_lock = threading.Lock() +pmkid_process = None +pmkid_lock = threading.Lock() + + +def _parse_channel_list(raw_channels: Any) -> list[int] | None: + """Parse a channel list from string/list input.""" + if raw_channels in (None, '', []): + return None + + if isinstance(raw_channels, str): + parts = [p.strip() for p in re.split(r'[\s,]+', raw_channels) if p.strip()] + elif isinstance(raw_channels, (list, tuple, set)): + parts = list(raw_channels) + else: + parts = [raw_channels] + + channels: list[int] = [] + seen = set() + for part in parts: + if part in (None, ''): + continue + ch = validate_wifi_channel(part) + if ch not in seen: + channels.append(ch) + seen.add(ch) + + return channels or None def detect_wifi_interfaces(): @@ -607,8 +633,9 @@ def start_wifi_scan(): return jsonify({'status': 'error', 'message': 'Scan already running'}) data = request.json - channel = data.get('channel') - band = data.get('band', 'abg') + channel = data.get('channel') + channels = data.get('channels') + band = data.get('band', 'abg') # Use provided interface or fall back to stored monitor interface interface = data.get('interface') @@ -658,8 +685,17 @@ def start_wifi_scan(): interface ] - if channel: - cmd.extend(['-c', str(channel)]) + channel_list = None + if channels: + try: + channel_list = _parse_channel_list(channels) + except ValueError as e: + return jsonify({'status': 'error', 'message': str(e)}), 400 + + if channel_list: + cmd.extend(['-c', ','.join(str(c) for c in channel_list)]) + elif channel: + cmd.extend(['-c', str(channel)]) logger.info(f"Running: {' '.join(cmd)}") @@ -851,32 +887,53 @@ def check_handshake_status(): return jsonify({'status': 'stopped', 'file_exists': False, 'handshake_found': False}) file_size = os.path.getsize(capture_file) - handshake_found = False + handshake_found = False + handshake_valid: bool | None = None + handshake_checked = False + handshake_reason: str | None = None try: - if target_bssid and is_valid_mac(target_bssid): - aircrack_path = get_tool_path('aircrack-ng') - if aircrack_path: - result = subprocess.run( - [aircrack_path, '-a', '2', '-b', target_bssid, capture_file], - capture_output=True, text=True, timeout=10 - ) - output = result.stdout + result.stderr - if '1 handshake' in output or ('handshake' in output.lower() and 'wpa' in output.lower()): - if '0 handshake' not in output: - handshake_found = True + if target_bssid and is_valid_mac(target_bssid): + aircrack_path = get_tool_path('aircrack-ng') + if aircrack_path: + result = subprocess.run( + [aircrack_path, '-a', '2', '-b', target_bssid, capture_file], + capture_output=True, text=True, timeout=10 + ) + output = result.stdout + result.stderr + output_lower = output.lower() + handshake_checked = True + + if 'no valid wpa handshakes found' in output_lower: + handshake_valid = False + handshake_reason = 'No valid WPA handshake found' + elif '0 handshake' in output_lower: + handshake_valid = False + elif '1 handshake' in output_lower or ('handshake' in output_lower and 'wpa' in output_lower): + handshake_valid = True + else: + handshake_valid = False except subprocess.TimeoutExpired: pass - except Exception as e: - logger.error(f"Error checking handshake: {e}") - - return jsonify({ - 'status': 'running' if app_module.wifi_process and app_module.wifi_process.poll() is None else 'stopped', - 'file_exists': True, - 'file_size': file_size, - 'file': capture_file, - 'handshake_found': handshake_found - }) + except Exception as e: + logger.error(f"Error checking handshake: {e}") + + if handshake_valid: + handshake_found = True + normalized_bssid = target_bssid.upper() if target_bssid else None + if normalized_bssid and normalized_bssid not in app_module.wifi_handshakes: + app_module.wifi_handshakes.append(normalized_bssid) + + return jsonify({ + 'status': 'running' if app_module.wifi_process and app_module.wifi_process.poll() is None else 'stopped', + 'file_exists': True, + 'file_size': file_size, + 'file': capture_file, + 'handshake_found': handshake_found, + 'handshake_valid': handshake_valid, + 'handshake_checked': handshake_checked, + 'handshake_reason': handshake_reason + }) @wifi_bp.route('/pmkid/capture', methods=['POST']) @@ -1084,9 +1141,13 @@ def stream_wifi(): while True: try: - msg = app_module.wifi_queue.get(timeout=1) - last_keepalive = time.time() - yield format_sse(msg) + msg = app_module.wifi_queue.get(timeout=1) + last_keepalive = time.time() + try: + process_event('wifi', msg, msg.get('type')) + except Exception: + pass + yield format_sse(msg) except queue.Empty: now = time.time() if now - last_keepalive >= keepalive_interval: diff --git a/routes/wifi_v2.py b/routes/wifi_v2.py index 07dc6fb..ec9748e 100644 --- a/routes/wifi_v2.py +++ b/routes/wifi_v2.py @@ -16,14 +16,16 @@ from typing import Generator from flask import Blueprint, jsonify, request, Response -from utils.wifi import ( - get_wifi_scanner, - analyze_channels, - get_hidden_correlator, - SCAN_MODE_QUICK, - SCAN_MODE_DEEP, -) -from utils.sse import format_sse +from utils.wifi import ( + get_wifi_scanner, + analyze_channels, + get_hidden_correlator, + SCAN_MODE_QUICK, + SCAN_MODE_DEEP, +) +from utils.sse import format_sse +from utils.validation import validate_wifi_channel +from utils.event_pipeline import process_event logger = logging.getLogger(__name__) @@ -85,28 +87,44 @@ def start_deep_scan(): Requires monitor mode interface and root privileges. - Request body: - interface: Monitor mode interface (e.g., 'wlan0mon') - band: Band to scan ('2.4', '5', 'all') - channel: Optional specific channel to monitor + Request body: + interface: Monitor mode interface (e.g., 'wlan0mon') + band: Band to scan ('2.4', '5', 'all') + channel: Optional specific channel to monitor + channels: Optional list or comma-separated channels to monitor """ data = request.get_json() or {} interface = data.get('interface') band = data.get('band', 'all') - channel = data.get('channel') - - if channel: - try: - channel = int(channel) - except ValueError: - return jsonify({'error': 'Invalid channel'}), 400 + channel = data.get('channel') + channels = data.get('channels') + + channel_list = None + if channels: + if isinstance(channels, str): + channel_list = [c.strip() for c in channels.split(',') if c.strip()] + elif isinstance(channels, (list, tuple, set)): + channel_list = list(channels) + else: + channel_list = [channels] + try: + channel_list = [validate_wifi_channel(c) for c in channel_list] + except (TypeError, ValueError): + return jsonify({'error': 'Invalid channels'}), 400 + + if channel: + try: + channel = validate_wifi_channel(channel) + except ValueError: + return jsonify({'error': 'Invalid channel'}), 400 scanner = get_wifi_scanner() - success = scanner.start_deep_scan( - interface=interface, - band=band, - channel=channel, - ) + success = scanner.start_deep_scan( + interface=interface, + band=band, + channel=channel, + channels=channel_list, + ) if success: return jsonify({ @@ -388,10 +406,14 @@ def event_stream(): - keepalive: Periodic keepalive """ def generate() -> Generator[str, None, None]: - scanner = get_wifi_scanner() - - for event in scanner.get_event_stream(): - yield format_sse(event) + scanner = get_wifi_scanner() + + for event in scanner.get_event_stream(): + try: + process_event('wifi', event, event.get('type')) + except Exception: + pass + yield format_sse(event) response = Response(generate(), mimetype='text/event-stream') response.headers['Cache-Control'] = 'no-cache' diff --git a/static/css/index.css b/static/css/index.css index b68145c..ab517b2 100644 --- a/static/css/index.css +++ b/static/css/index.css @@ -4201,6 +4201,12 @@ header h1 .tagline { color: #000; } +.bt-detail-btn.active { + background: rgba(34, 197, 94, 0.2); + border-color: rgba(34, 197, 94, 0.6); + color: #9fffd1; +} + /* Selected device highlight */ .bt-device-row.selected { background: rgba(0, 212, 255, 0.1); @@ -4392,6 +4398,17 @@ header h1 .tagline { border: 1px solid rgba(139, 92, 246, 0.3); } +.bt-history-badge { + display: inline-block; + padding: 1px 4px; + border-radius: 3px; + font-size: 8px; + font-weight: 600; + letter-spacing: 0.2px; + background: rgba(34, 197, 94, 0.15); + color: #22c55e; +} + .bt-device-name { font-size: 13px; font-weight: 600; diff --git a/static/css/settings.css b/static/css/settings.css index 9641b2b..41c6618 100644 --- a/static/css/settings.css +++ b/static/css/settings.css @@ -163,6 +163,47 @@ color: var(--text-muted, #666); } +/* Settings Feed Lists */ +.settings-feed { + background: var(--bg-tertiary, #12121f); + border: 1px solid var(--border-color, #1a1a2e); + border-radius: 6px; + padding: 8px; + max-height: 240px; + overflow-y: auto; +} + +.settings-feed-item { + padding: 8px; + border-bottom: 1px solid rgba(255, 255, 255, 0.05); + font-size: 11px; +} + +.settings-feed-item:last-child { + border-bottom: none; +} + +.settings-feed-title { + display: flex; + justify-content: space-between; + align-items: center; + font-weight: 600; + color: var(--text-primary, #e0e0e0); + margin-bottom: 4px; +} + +.settings-feed-meta { + color: var(--text-muted, #666); + font-size: 10px; +} + +.settings-feed-empty { + color: var(--text-dim, #666); + text-align: center; + padding: 20px 10px; + font-size: 11px; +} + /* Toggle Switch */ .toggle-switch { position: relative; diff --git a/static/js/core/alerts.js b/static/js/core/alerts.js new file mode 100644 index 0000000..52dcb84 --- /dev/null +++ b/static/js/core/alerts.js @@ -0,0 +1,194 @@ +const AlertCenter = (function() { + 'use strict'; + + let alerts = []; + let rules = []; + let eventSource = null; + + const TRACKER_RULE_NAME = 'Tracker Detected'; + + function init() { + loadRules(); + loadFeed(); + connect(); + } + + function connect() { + if (eventSource) { + eventSource.close(); + } + eventSource = new EventSource('/alerts/stream'); + eventSource.onmessage = function(e) { + try { + const data = JSON.parse(e.data); + if (data.type === 'keepalive') return; + handleAlert(data); + } catch (err) { + console.error('[Alerts] SSE parse error', err); + } + }; + eventSource.onerror = function() { + console.warn('[Alerts] SSE connection error'); + }; + } + + function handleAlert(alert) { + alerts.unshift(alert); + alerts = alerts.slice(0, 50); + updateFeedUI(); + + if (typeof showNotification === 'function') { + const severity = (alert.severity || '').toLowerCase(); + if (['high', 'critical'].includes(severity)) { + showNotification(alert.title || 'Alert', alert.message || 'Alert triggered'); + } + } + } + + function updateFeedUI() { + const list = document.getElementById('alertsFeedList'); + const countEl = document.getElementById('alertsFeedCount'); + if (countEl) countEl.textContent = `(${alerts.length})`; + if (!list) return; + + if (alerts.length === 0) { + list.innerHTML = '
No alerts yet
'; + return; + } + + list.innerHTML = alerts.map(alert => { + const title = escapeHtml(alert.title || 'Alert'); + const message = escapeHtml(alert.message || ''); + const severity = escapeHtml(alert.severity || 'medium'); + const createdAt = alert.created_at ? new Date(alert.created_at).toLocaleString() : ''; + return ` +
+
+ ${title} + ${severity.toUpperCase()} +
+
${message}
+
${createdAt}
+
+ `; + }).join(''); + } + + function loadFeed() { + fetch('/alerts/events?limit=20') + .then(r => r.json()) + .then(data => { + if (data.status === 'success') { + alerts = data.events || []; + updateFeedUI(); + } + }) + .catch(err => console.error('[Alerts] Load feed failed', err)); + } + + function loadRules() { + fetch('/alerts/rules?all=1') + .then(r => r.json()) + .then(data => { + if (data.status === 'success') { + rules = data.rules || []; + } + }) + .catch(err => console.error('[Alerts] Load rules failed', err)); + } + + function enableTrackerAlerts() { + ensureTrackerRule(true); + } + + function disableTrackerAlerts() { + ensureTrackerRule(false); + } + + function ensureTrackerRule(enabled) { + loadRules(); + setTimeout(() => { + const existing = rules.find(r => r.name === TRACKER_RULE_NAME); + if (existing) { + fetch(`/alerts/rules/${existing.id}`, { + method: 'PATCH', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ enabled }) + }).then(() => loadRules()); + } else if (enabled) { + fetch('/alerts/rules', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + name: TRACKER_RULE_NAME, + mode: 'bluetooth', + event_type: 'device_update', + match: { is_tracker: true }, + severity: 'high', + enabled: true, + notify: { webhook: true } + }) + }).then(() => loadRules()); + } + }, 150); + } + + function addBluetoothWatchlist(address, name) { + if (!address) return; + const existing = rules.find(r => r.mode === 'bluetooth' && r.match && r.match.address === address); + if (existing) { + return; + } + fetch('/alerts/rules', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + name: name ? `Watchlist ${name}` : `Watchlist ${address}`, + mode: 'bluetooth', + event_type: 'device_update', + match: { address: address }, + severity: 'medium', + enabled: true, + notify: { webhook: true } + }) + }).then(() => loadRules()); + } + + function removeBluetoothWatchlist(address) { + if (!address) return; + const existing = rules.find(r => r.mode === 'bluetooth' && r.match && r.match.address === address); + if (!existing) return; + fetch(`/alerts/rules/${existing.id}`, { method: 'DELETE' }) + .then(() => loadRules()); + } + + function isWatchlisted(address) { + return rules.some(r => r.mode === 'bluetooth' && r.match && r.match.address === address && r.enabled); + } + + function escapeHtml(str) { + if (!str) return ''; + return String(str) + .replace(/&/g, '&') + .replace(//g, '>') + .replace(/"/g, '"') + .replace(/'/g, '''); + } + + return { + init, + loadFeed, + enableTrackerAlerts, + disableTrackerAlerts, + addBluetoothWatchlist, + removeBluetoothWatchlist, + isWatchlisted, + }; +})(); + +document.addEventListener('DOMContentLoaded', () => { + if (typeof AlertCenter !== 'undefined') { + AlertCenter.init(); + } +}); diff --git a/static/js/core/recordings.js b/static/js/core/recordings.js new file mode 100644 index 0000000..d4188b7 --- /dev/null +++ b/static/js/core/recordings.js @@ -0,0 +1,136 @@ +const RecordingUI = (function() { + 'use strict'; + + let recordings = []; + let active = []; + + function init() { + refresh(); + } + + function refresh() { + fetch('/recordings') + .then(r => r.json()) + .then(data => { + if (data.status !== 'success') return; + recordings = data.recordings || []; + active = data.active || []; + renderActive(); + renderRecordings(); + }) + .catch(err => console.error('[Recording] Load failed', err)); + } + + function start() { + const modeSelect = document.getElementById('recordingModeSelect'); + const labelInput = document.getElementById('recordingLabelInput'); + const mode = modeSelect ? modeSelect.value : ''; + const label = labelInput ? labelInput.value : ''; + if (!mode) return; + + fetch('/recordings/start', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ mode, label }) + }) + .then(r => r.json()) + .then(() => { + refresh(); + }) + .catch(err => console.error('[Recording] Start failed', err)); + } + + function stop() { + const modeSelect = document.getElementById('recordingModeSelect'); + const mode = modeSelect ? modeSelect.value : ''; + if (!mode) return; + + fetch('/recordings/stop', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ mode }) + }) + .then(r => r.json()) + .then(() => refresh()) + .catch(err => console.error('[Recording] Stop failed', err)); + } + + function stopById(sessionId) { + fetch('/recordings/stop', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ id: sessionId }) + }).then(() => refresh()); + } + + function renderActive() { + const container = document.getElementById('recordingActiveList'); + if (!container) return; + if (!active.length) { + container.innerHTML = '
No active recordings
'; + return; + } + container.innerHTML = active.map(session => { + return ` +
+
+ ${escapeHtml(session.mode)} + +
+
Started: ${new Date(session.started_at).toLocaleString()}
+
Events: ${session.event_count || 0}
+
+ `; + }).join(''); + } + + function renderRecordings() { + const container = document.getElementById('recordingList'); + if (!container) return; + if (!recordings.length) { + container.innerHTML = '
No recordings yet
'; + return; + } + container.innerHTML = recordings.map(rec => { + return ` +
+
+ ${escapeHtml(rec.mode)}${rec.label ? ` • ${escapeHtml(rec.label)}` : ''} + +
+
${new Date(rec.started_at).toLocaleString()}${rec.stopped_at ? ` → ${new Date(rec.stopped_at).toLocaleString()}` : ''}
+
Events: ${rec.event_count || 0} • ${(rec.size_bytes || 0) / 1024.0 > 0 ? (rec.size_bytes / 1024).toFixed(1) + ' KB' : '0 KB'}
+
+ `; + }).join(''); + } + + function download(sessionId) { + window.open(`/recordings/${sessionId}/download`, '_blank'); + } + + function escapeHtml(str) { + if (!str) return ''; + return String(str) + .replace(/&/g, '&') + .replace(//g, '>') + .replace(/"/g, '"') + .replace(/'/g, '''); + } + + return { + init, + refresh, + start, + stop, + stopById, + download, + }; +})(); + +document.addEventListener('DOMContentLoaded', () => { + if (typeof RecordingUI !== 'undefined') { + RecordingUI.init(); + } +}); diff --git a/static/js/core/settings-manager.js b/static/js/core/settings-manager.js index efd61da..3d2cb8b 100644 --- a/static/js/core/settings-manager.js +++ b/static/js/core/settings-manager.js @@ -922,5 +922,13 @@ function switchSettingsTab(tabName) { loadUpdateStatus(); } else if (tabName === 'location') { loadObserverLocation(); + } else if (tabName === 'alerts') { + if (typeof AlertCenter !== 'undefined') { + AlertCenter.loadFeed(); + } + } else if (tabName === 'recording') { + if (typeof RecordingUI !== 'undefined') { + RecordingUI.refresh(); + } } } diff --git a/static/js/modes/bluetooth.js b/static/js/modes/bluetooth.js index 2972c0c..16aa152 100644 --- a/static/js/modes/bluetooth.js +++ b/static/js/modes/bluetooth.js @@ -366,7 +366,10 @@ const BluetoothMode = (function() { // Badges const badgesEl = document.getElementById('btDetailBadges'); let badgesHtml = `${protocol.toUpperCase()}`; - badgesHtml += `${device.in_baseline ? '✓ KNOWN' : '● NEW'}`; + badgesHtml += `${device.in_baseline ? '✓ KNOWN' : '● NEW'}`; + if (device.seen_before) { + badgesHtml += `SEEN BEFORE`; + } // Tracker badge if (device.is_tracker) { @@ -448,12 +451,14 @@ const BluetoothMode = (function() { ? minMax[0] + '/' + minMax[1] : '--'; - document.getElementById('btDetailFirstSeen').textContent = device.first_seen - ? new Date(device.first_seen).toLocaleTimeString() - : '--'; - document.getElementById('btDetailLastSeen').textContent = device.last_seen - ? new Date(device.last_seen).toLocaleTimeString() - : '--'; + document.getElementById('btDetailFirstSeen').textContent = device.first_seen + ? new Date(device.first_seen).toLocaleTimeString() + : '--'; + document.getElementById('btDetailLastSeen').textContent = device.last_seen + ? new Date(device.last_seen).toLocaleTimeString() + : '--'; + + updateWatchlistButton(device); // Services const servicesContainer = document.getElementById('btDetailServices'); @@ -465,13 +470,29 @@ const BluetoothMode = (function() { servicesContainer.style.display = 'none'; } - // Show content, hide placeholder - placeholder.style.display = 'none'; - content.style.display = 'block'; + // Show content, hide placeholder + placeholder.style.display = 'none'; + content.style.display = 'block'; // Highlight selected device in list highlightSelectedDevice(deviceId); - } + } + + /** + * Update watchlist button state + */ + function updateWatchlistButton(device) { + const btn = document.getElementById('btDetailWatchBtn'); + if (!btn) return; + if (typeof AlertCenter === 'undefined') { + btn.style.display = 'none'; + return; + } + btn.style.display = ''; + const watchlisted = AlertCenter.isWatchlisted(device.address); + btn.textContent = watchlisted ? 'Watching' : 'Watchlist'; + btn.classList.toggle('active', watchlisted); + } /** * Clear device selection @@ -525,24 +546,43 @@ const BluetoothMode = (function() { /** * Copy selected device address to clipboard */ - function copyAddress() { - if (!selectedDeviceId) return; - const device = devices.get(selectedDeviceId); - if (!device) return; + function copyAddress() { + if (!selectedDeviceId) return; + const device = devices.get(selectedDeviceId); + if (!device) return; - navigator.clipboard.writeText(device.address).then(() => { - const btn = document.querySelector('.bt-detail-btn'); - if (btn) { - const originalText = btn.textContent; - btn.textContent = 'Copied!'; - btn.style.background = '#22c55e'; + navigator.clipboard.writeText(device.address).then(() => { + const btn = document.getElementById('btDetailCopyBtn'); + if (btn) { + const originalText = btn.textContent; + btn.textContent = 'Copied!'; + btn.style.background = '#22c55e'; setTimeout(() => { btn.textContent = originalText; btn.style.background = ''; }, 1500); } - }); - } + }); + } + + /** + * Toggle Bluetooth watchlist for selected device + */ + function toggleWatchlist() { + if (!selectedDeviceId) return; + const device = devices.get(selectedDeviceId); + if (!device || typeof AlertCenter === 'undefined') return; + + if (AlertCenter.isWatchlisted(device.address)) { + AlertCenter.removeBluetoothWatchlist(device.address); + showInfo('Removed from watchlist'); + } else { + AlertCenter.addBluetoothWatchlist(device.address, device.name || device.address); + showInfo('Added to watchlist'); + } + + setTimeout(() => updateWatchlistButton(device), 200); + } /** * Select a device - opens modal with details @@ -1090,10 +1130,11 @@ const BluetoothMode = (function() { const isNew = !inBaseline; const hasName = !!device.name; const isTracker = device.is_tracker === true; - const trackerType = device.tracker_type; - const trackerConfidence = device.tracker_confidence; - const riskScore = device.risk_score || 0; - const agentName = device._agent || 'Local'; + const trackerType = device.tracker_type; + const trackerConfidence = device.tracker_confidence; + const riskScore = device.risk_score || 0; + const agentName = device._agent || 'Local'; + const seenBefore = device.seen_before === true; // Calculate RSSI bar width (0-100%) // RSSI typically ranges from -100 (weak) to -30 (very strong) @@ -1145,8 +1186,9 @@ const BluetoothMode = (function() { // Build secondary info line let secondaryParts = [addr]; - if (mfr) secondaryParts.push(mfr); - secondaryParts.push('Seen ' + seenCount + '×'); + if (mfr) secondaryParts.push(mfr); + secondaryParts.push('Seen ' + seenCount + '×'); + if (seenBefore) secondaryParts.push('SEEN BEFORE'); // Add agent name if not Local if (agentName !== 'Local') { secondaryParts.push('' + escapeHtml(agentName) + ''); @@ -1358,9 +1400,10 @@ const BluetoothMode = (function() { setBaseline, clearBaseline, exportData, - selectDevice, - clearSelection, - copyAddress, + selectDevice, + clearSelection, + copyAddress, + toggleWatchlist, // Agent handling handleAgentChange, diff --git a/static/js/modes/listening-post.js b/static/js/modes/listening-post.js index e40e40b..bd8253c 100644 --- a/static/js/modes/listening-post.js +++ b/static/js/modes/listening-post.js @@ -3021,15 +3021,23 @@ let spectrumCanvas = null; let spectrumCtx = null; let waterfallStartFreq = 88; let waterfallEndFreq = 108; +let waterfallRowImage = null; +let waterfallPalette = null; +let lastWaterfallDraw = 0; +const WATERFALL_MIN_INTERVAL_MS = 80; function initWaterfallCanvas() { waterfallCanvas = document.getElementById('waterfallCanvas'); spectrumCanvas = document.getElementById('spectrumCanvas'); if (waterfallCanvas) waterfallCtx = waterfallCanvas.getContext('2d'); if (spectrumCanvas) spectrumCtx = spectrumCanvas.getContext('2d'); + if (waterfallCtx && waterfallCanvas) { + waterfallRowImage = waterfallCtx.createImageData(waterfallCanvas.width, 1); + if (!waterfallPalette) waterfallPalette = buildWaterfallPalette(); + } } -function dBmToColor(normalized) { +function dBmToRgb(normalized) { // Viridis-inspired: dark blue -> cyan -> green -> yellow const n = Math.max(0, Math.min(1, normalized)); let r, g, b; @@ -3054,7 +3062,15 @@ function dBmToColor(normalized) { g = Math.round(255 - t * 55); b = Math.round(20 - t * 20); } - return `rgb(${r},${g},${b})`; + return [r, g, b]; +} + +function buildWaterfallPalette() { + const palette = new Array(256); + for (let i = 0; i < 256; i++) { + palette[i] = dBmToRgb(i / 255); + } + return palette; } function drawWaterfallRow(bins) { @@ -3062,9 +3078,8 @@ function drawWaterfallRow(bins) { const w = waterfallCanvas.width; const h = waterfallCanvas.height; - // Scroll existing content down by 1 pixel - const imageData = waterfallCtx.getImageData(0, 0, w, h - 1); - waterfallCtx.putImageData(imageData, 0, 1); + // Scroll existing content down by 1 pixel (GPU-accelerated) + waterfallCtx.drawImage(waterfallCanvas, 0, 0, w, h - 1, 0, 1, w, h - 1); // Find min/max for normalization let minVal = Infinity, maxVal = -Infinity; @@ -3074,13 +3089,24 @@ function drawWaterfallRow(bins) { } const range = maxVal - minVal || 1; - // Draw new row at top - const binWidth = w / bins.length; - for (let i = 0; i < bins.length; i++) { - const normalized = (bins[i] - minVal) / range; - waterfallCtx.fillStyle = dBmToColor(normalized); - waterfallCtx.fillRect(Math.floor(i * binWidth), 0, Math.ceil(binWidth) + 1, 1); + // Draw new row at top using ImageData + if (!waterfallRowImage || waterfallRowImage.width !== w) { + waterfallRowImage = waterfallCtx.createImageData(w, 1); } + const rowData = waterfallRowImage.data; + const palette = waterfallPalette || buildWaterfallPalette(); + const binCount = bins.length; + for (let x = 0; x < w; x++) { + const idx = Math.min(binCount - 1, Math.floor((x / w) * binCount)); + const normalized = (bins[idx] - minVal) / range; + const color = palette[Math.max(0, Math.min(255, Math.floor(normalized * 255)))] || [0, 0, 0]; + const offset = x * 4; + rowData[offset] = color[0]; + rowData[offset + 1] = color[1]; + rowData[offset + 2] = color[2]; + rowData[offset + 3] = 255; + } + waterfallCtx.putImageData(waterfallRowImage, 0, 0); } function drawSpectrumLine(bins, startFreq, endFreq) { @@ -3154,6 +3180,7 @@ function startWaterfall() { const binSize = parseInt(document.getElementById('waterfallBinSize')?.value || 10000); const gain = parseInt(document.getElementById('waterfallGain')?.value || 40); const device = typeof getSelectedDevice === 'function' ? getSelectedDevice() : 0; + const maxBins = document.getElementById('waterfallCanvas')?.width || 800; if (startFreq >= endFreq) { if (typeof showNotification === 'function') showNotification('Error', 'End frequency must be greater than start'); @@ -3166,7 +3193,14 @@ function startWaterfall() { fetch('/listening/waterfall/start', { method: 'POST', headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ start_freq: startFreq, end_freq: endFreq, bin_size: binSize, gain: gain, device: device }) + body: JSON.stringify({ + start_freq: startFreq, + end_freq: endFreq, + bin_size: binSize, + gain: gain, + device: device, + max_bins: maxBins, + }) }) .then(r => r.json()) .then(data => { @@ -3176,6 +3210,7 @@ function startWaterfall() { document.getElementById('stopWaterfallBtn').style.display = 'block'; const waterfallPanel = document.getElementById('waterfallPanel'); if (waterfallPanel) waterfallPanel.style.display = 'block'; + lastWaterfallDraw = 0; initWaterfallCanvas(); connectWaterfallSSE(); } else { @@ -3204,6 +3239,9 @@ function connectWaterfallSSE() { waterfallEventSource.onmessage = function(event) { const msg = JSON.parse(event.data); if (msg.type === 'waterfall_sweep') { + const now = Date.now(); + if (now - lastWaterfallDraw < WATERFALL_MIN_INTERVAL_MS) return; + lastWaterfallDraw = now; drawWaterfallRow(msg.bins); drawSpectrumLine(msg.bins, msg.start_freq, msg.end_freq); } diff --git a/static/js/modes/wifi.js b/static/js/modes/wifi.js index 026cfa8..53cf450 100644 --- a/static/js/modes/wifi.js +++ b/static/js/modes/wifi.js @@ -28,9 +28,9 @@ const WiFiMode = (function() { maxProbes: 1000, }; - // ========================================================================== - // Agent Support - // ========================================================================== + // ========================================================================== + // Agent Support + // ========================================================================== /** * Get the API base URL, routing through agent proxy if agent is selected. @@ -59,15 +59,49 @@ const WiFiMode = (function() { /** * Check for agent mode conflicts before starting WiFi scan. */ - function checkAgentConflicts() { - if (typeof currentAgent === 'undefined' || currentAgent === 'local') { - return true; - } - if (typeof checkAgentModeConflict === 'function') { - return checkAgentModeConflict('wifi'); - } - return true; - } + function checkAgentConflicts() { + if (typeof currentAgent === 'undefined' || currentAgent === 'local') { + return true; + } + if (typeof checkAgentModeConflict === 'function') { + return checkAgentModeConflict('wifi'); + } + return true; + } + + function getChannelPresetList(preset) { + switch (preset) { + case '2.4-common': + return '1,6,11'; + case '2.4-all': + return '1,2,3,4,5,6,7,8,9,10,11,12,13'; + case '5-low': + return '36,40,44,48'; + case '5-mid': + return '52,56,60,64'; + case '5-high': + return '149,153,157,161,165'; + default: + return ''; + } + } + + function buildChannelConfig() { + const preset = document.getElementById('wifiChannelPreset')?.value || ''; + const listInput = document.getElementById('wifiChannelList')?.value || ''; + const singleInput = document.getElementById('wifiChannel')?.value || ''; + + const listValue = listInput.trim(); + const presetValue = getChannelPresetList(preset); + + const channels = listValue || presetValue || ''; + const channel = channels ? null : (singleInput.trim() ? parseInt(singleInput.trim()) : null); + + return { + channels: channels || null, + channel: Number.isFinite(channel) ? channel : null, + }; + } // ========================================================================== // State @@ -461,10 +495,10 @@ const WiFiMode = (function() { setScanning(true, 'deep'); try { - const iface = elements.interfaceSelect?.value || null; - const band = document.getElementById('wifiBand')?.value || 'all'; - const channel = document.getElementById('wifiChannel')?.value || null; - const isAgentMode = typeof currentAgent !== 'undefined' && currentAgent !== 'local'; + const iface = elements.interfaceSelect?.value || null; + const band = document.getElementById('wifiBand')?.value || 'all'; + const channelConfig = buildChannelConfig(); + const isAgentMode = typeof currentAgent !== 'undefined' && currentAgent !== 'local'; let response; if (isAgentMode) { @@ -473,23 +507,25 @@ const WiFiMode = (function() { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ - interface: iface, - scan_type: 'deep', - band: band === 'abg' ? 'all' : band === 'bg' ? '2.4' : '5', - channel: channel ? parseInt(channel) : null, - }), - }); - } else { - response = await fetch(`${CONFIG.apiBase}/scan/start`, { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ - interface: iface, - band: band === 'abg' ? 'all' : band === 'bg' ? '2.4' : '5', - channel: channel ? parseInt(channel) : null, - }), - }); - } + interface: iface, + scan_type: 'deep', + band: band === 'abg' ? 'all' : band === 'bg' ? '2.4' : '5', + channel: channelConfig.channel, + channels: channelConfig.channels, + }), + }); + } else { + response = await fetch(`${CONFIG.apiBase}/scan/start`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + interface: iface, + band: band === 'abg' ? 'all' : band === 'bg' ? '2.4' : '5', + channel: channelConfig.channel, + channels: channelConfig.channels, + }), + }); + } if (!response.ok) { const error = await response.json(); diff --git a/templates/index.html b/templates/index.html index 7934eef..3a56ebe 100644 --- a/templates/index.html +++ b/templates/index.html @@ -806,7 +806,8 @@ - + + @@ -6058,11 +6059,44 @@ : 'Monitor mode: Inactive'; } + function getWifiChannelPresetList(preset) { + switch (preset) { + case '2.4-common': + return '1,6,11'; + case '2.4-all': + return '1,2,3,4,5,6,7,8,9,10,11,12,13'; + case '5-low': + return '36,40,44,48'; + case '5-mid': + return '52,56,60,64'; + case '5-high': + return '149,153,157,161,165'; + default: + return ''; + } + } + + function buildWifiChannelConfig() { + const preset = document.getElementById('wifiChannelPreset')?.value || ''; + const listInput = document.getElementById('wifiChannelList')?.value || ''; + const singleInput = document.getElementById('wifiChannel')?.value || ''; + + const listValue = listInput.trim(); + const presetValue = getWifiChannelPresetList(preset); + const channels = listValue || presetValue || ''; + const channel = channels ? null : (singleInput.trim() ? parseInt(singleInput.trim()) : null); + + return { + channels: channels || null, + channel: Number.isFinite(channel) ? channel : null, + }; + } + // Start WiFi scan - auto-enables monitor mode if needed async function startWifiScan() { console.log('startWifiScan called'); const band = document.getElementById('wifiBand').value; - const channel = document.getElementById('wifiChannel').value; + const channelConfig = buildWifiChannelConfig(); // Auto-enable monitor mode if not already enabled if (!monitorInterface) { @@ -6124,7 +6158,8 @@ body: JSON.stringify({ interface: monitorInterface, band: band, - channel: channel || null + channel: channelConfig.channel, + channels: channelConfig.channels, }) }); const scanData = await scanResp.json(); @@ -6821,7 +6856,7 @@ if (data.handshake_found) { // Handshake captured! - statusSpan.textContent = '✓ HANDSHAKE CAPTURED!'; + statusSpan.textContent = '✓ VALID HANDSHAKE CAPTURED!'; statusSpan.style.color = 'var(--accent-green)'; handshakeCount++; document.getElementById('handshakeCount').textContent = handshakeCount; @@ -6854,7 +6889,11 @@ activeCapture.capturedFile = data.file; } else if (data.file_exists) { const sizeKB = (data.file_size / 1024).toFixed(1); - statusSpan.textContent = 'Capturing... (' + sizeKB + ' KB, ' + elapsedStr + ')'; + let extra = ''; + if (data.handshake_checked && data.handshake_valid === false) { + extra = data.handshake_reason ? ' • ' + data.handshake_reason : ' • No valid handshake yet'; + } + statusSpan.textContent = 'Capturing... (' + sizeKB + ' KB, ' + elapsedStr + ')' + extra; statusSpan.style.color = 'var(--accent-orange)'; } else if (data.status === 'stopped') { statusSpan.textContent = 'Capture stopped'; @@ -10905,6 +10944,13 @@ if (client.score >= 3) { addHighInterestDevice(client, 'wifi'); } + if (isRecordingBaseline) { + fetch('/tscm/feed/wifi', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(client) + }).catch(e => console.error('Baseline feed error:', e)); + } } } @@ -12331,6 +12377,11 @@ const id = item.bssid || item.mac || ''; return `${escapeHtml(name)} ${id ? `${escapeHtml(id)}` : ''}`; } + if (protocol === 'wifi_clients') { + const name = item.vendor || 'WiFi Client'; + const id = item.mac || item.address || ''; + return `${escapeHtml(name)} ${id ? `${escapeHtml(id)}` : ''}`; + } if (protocol === 'bluetooth') { const name = item.name || 'Unknown'; const id = item.mac || item.address || ''; @@ -12357,6 +12408,7 @@ const sections = [ { key: 'wifi', label: 'WiFi' }, + { key: 'wifi_clients', label: 'WiFi Clients' }, { key: 'bluetooth', label: 'Bluetooth' }, { key: 'rf', label: 'RF' }, ]; @@ -12759,7 +12811,7 @@ if (data.status === 'success') { document.getElementById('tscmBaselineStatus').textContent = - `Baseline saved: ${data.wifi_count} WiFi, ${data.bt_count} BT, ${data.rf_count} RF`; + `Baseline saved: ${data.wifi_count} WiFi, ${data.wifi_client_count || 0} Clients, ${data.bt_count} BT, ${data.rf_count} RF`; document.getElementById('tscmBaselineStatus').style.color = '#00ff88'; loadTscmBaselines(); } else { @@ -14574,6 +14626,9 @@ + + + diff --git a/templates/partials/modes/wifi.html b/templates/partials/modes/wifi.html index fa7d677..ba23ec6 100644 --- a/templates/partials/modes/wifi.html +++ b/templates/partials/modes/wifi.html @@ -69,7 +69,22 @@
- + + +
+
+ + +
+
+
diff --git a/templates/partials/settings-modal.html b/templates/partials/settings-modal.html index 3448563..b9c951b 100644 --- a/templates/partials/settings-modal.html +++ b/templates/partials/settings-modal.html @@ -15,6 +15,8 @@ + + @@ -280,6 +282,83 @@ + +
+
+
Alert Feed
+
+
No alerts yet
+
+
+ +
+
Quick Rules
+
+ + +
+
+ Use Bluetooth device details to add specific device watchlist alerts. +
+
+
+ + +
+
+
Start Recording
+
+
+ Mode + Record live events for a mode +
+ +
+
+
+ Label + Optional note for the session +
+ +
+
+ + +
+
+ +
+
Active Sessions
+
+
No active recordings
+
+
+ +
+
Recent Recordings
+
+
No recordings yet
+
+
+
+
diff --git a/utils/alerts.py b/utils/alerts.py new file mode 100644 index 0000000..1f52eed --- /dev/null +++ b/utils/alerts.py @@ -0,0 +1,443 @@ +"""Alerting engine for cross-mode events.""" + +from __future__ import annotations + +import json +import logging +import queue +import re +import threading +import time +from dataclasses import dataclass +from datetime import datetime, timezone +from typing import Any, Generator + +from config import ALERT_WEBHOOK_URL, ALERT_WEBHOOK_TIMEOUT, ALERT_WEBHOOK_SECRET +from utils.database import get_db + +logger = logging.getLogger('intercept.alerts') + + +@dataclass +class AlertRule: + id: int + name: str + mode: str | None + event_type: str | None + match: dict + severity: str + enabled: bool + notify: dict + created_at: str | None = None + + +class AlertManager: + def __init__(self) -> None: + self._queue: queue.Queue = queue.Queue(maxsize=1000) + self._rules_cache: list[AlertRule] = [] + self._rules_loaded_at = 0.0 + self._cache_lock = threading.Lock() + + # ------------------------------------------------------------------ + # Rule management + # ------------------------------------------------------------------ + + def invalidate_cache(self) -> None: + with self._cache_lock: + self._rules_loaded_at = 0.0 + + def _load_rules(self) -> None: + with get_db() as conn: + cursor = conn.execute(''' + SELECT id, name, mode, event_type, match, severity, enabled, notify, created_at + FROM alert_rules + WHERE enabled = 1 + ORDER BY id ASC + ''') + rules: list[AlertRule] = [] + for row in cursor: + match = {} + notify = {} + try: + match = json.loads(row['match']) if row['match'] else {} + except json.JSONDecodeError: + match = {} + try: + notify = json.loads(row['notify']) if row['notify'] else {} + except json.JSONDecodeError: + notify = {} + rules.append(AlertRule( + id=row['id'], + name=row['name'], + mode=row['mode'], + event_type=row['event_type'], + match=match, + severity=row['severity'] or 'medium', + enabled=bool(row['enabled']), + notify=notify, + created_at=row['created_at'], + )) + with self._cache_lock: + self._rules_cache = rules + self._rules_loaded_at = time.time() + + def _get_rules(self) -> list[AlertRule]: + with self._cache_lock: + stale = (time.time() - self._rules_loaded_at) > 10 + if stale: + self._load_rules() + with self._cache_lock: + return list(self._rules_cache) + + def list_rules(self, include_disabled: bool = False) -> list[dict]: + with get_db() as conn: + if include_disabled: + cursor = conn.execute(''' + SELECT id, name, mode, event_type, match, severity, enabled, notify, created_at + FROM alert_rules + ORDER BY id DESC + ''') + else: + cursor = conn.execute(''' + SELECT id, name, mode, event_type, match, severity, enabled, notify, created_at + FROM alert_rules + WHERE enabled = 1 + ORDER BY id DESC + ''') + + return [ + { + 'id': row['id'], + 'name': row['name'], + 'mode': row['mode'], + 'event_type': row['event_type'], + 'match': json.loads(row['match']) if row['match'] else {}, + 'severity': row['severity'], + 'enabled': bool(row['enabled']), + 'notify': json.loads(row['notify']) if row['notify'] else {}, + 'created_at': row['created_at'], + } + for row in cursor + ] + + def add_rule(self, rule: dict) -> int: + with get_db() as conn: + cursor = conn.execute(''' + INSERT INTO alert_rules (name, mode, event_type, match, severity, enabled, notify) + VALUES (?, ?, ?, ?, ?, ?, ?) + ''', ( + rule.get('name') or 'Alert Rule', + rule.get('mode'), + rule.get('event_type'), + json.dumps(rule.get('match') or {}), + rule.get('severity') or 'medium', + 1 if rule.get('enabled', True) else 0, + json.dumps(rule.get('notify') or {}), + )) + rule_id = cursor.lastrowid + self.invalidate_cache() + return int(rule_id) + + def update_rule(self, rule_id: int, updates: dict) -> bool: + fields = [] + params = [] + for key in ('name', 'mode', 'event_type', 'severity'): + if key in updates: + fields.append(f"{key} = ?") + params.append(updates[key]) + if 'enabled' in updates: + fields.append('enabled = ?') + params.append(1 if updates['enabled'] else 0) + if 'match' in updates: + fields.append('match = ?') + params.append(json.dumps(updates['match'] or {})) + if 'notify' in updates: + fields.append('notify = ?') + params.append(json.dumps(updates['notify'] or {})) + + if not fields: + return False + + params.append(rule_id) + with get_db() as conn: + cursor = conn.execute( + f"UPDATE alert_rules SET {', '.join(fields)} WHERE id = ?", + params + ) + updated = cursor.rowcount > 0 + + if updated: + self.invalidate_cache() + return updated + + def delete_rule(self, rule_id: int) -> bool: + with get_db() as conn: + cursor = conn.execute('DELETE FROM alert_rules WHERE id = ?', (rule_id,)) + deleted = cursor.rowcount > 0 + if deleted: + self.invalidate_cache() + return deleted + + def list_events(self, limit: int = 100, mode: str | None = None, severity: str | None = None) -> list[dict]: + query = 'SELECT id, rule_id, mode, event_type, severity, title, message, payload, created_at FROM alert_events' + clauses = [] + params: list[Any] = [] + if mode: + clauses.append('mode = ?') + params.append(mode) + if severity: + clauses.append('severity = ?') + params.append(severity) + if clauses: + query += ' WHERE ' + ' AND '.join(clauses) + query += ' ORDER BY id DESC LIMIT ?' + params.append(limit) + + with get_db() as conn: + cursor = conn.execute(query, params) + events = [] + for row in cursor: + events.append({ + 'id': row['id'], + 'rule_id': row['rule_id'], + 'mode': row['mode'], + 'event_type': row['event_type'], + 'severity': row['severity'], + 'title': row['title'], + 'message': row['message'], + 'payload': json.loads(row['payload']) if row['payload'] else {}, + 'created_at': row['created_at'], + }) + return events + + # ------------------------------------------------------------------ + # Event processing + # ------------------------------------------------------------------ + + def process_event(self, mode: str, event: dict, event_type: str | None = None) -> None: + if not isinstance(event, dict): + return + + if event_type in ('keepalive', 'ping', 'status'): + return + + rules = self._get_rules() + if not rules: + return + + for rule in rules: + if rule.mode and rule.mode != mode: + continue + if rule.event_type and event_type and rule.event_type != event_type: + continue + if rule.event_type and not event_type: + continue + if not self._match_rule(rule.match, event): + continue + + title = rule.name or 'Alert' + message = self._build_message(rule, event, event_type) + payload = { + 'mode': mode, + 'event_type': event_type, + 'event': event, + 'rule': { + 'id': rule.id, + 'name': rule.name, + }, + } + event_id = self._store_event(rule.id, mode, event_type, rule.severity, title, message, payload) + alert_payload = { + 'id': event_id, + 'rule_id': rule.id, + 'mode': mode, + 'event_type': event_type, + 'severity': rule.severity, + 'title': title, + 'message': message, + 'payload': payload, + 'created_at': datetime.now(timezone.utc).isoformat(), + } + self._queue_event(alert_payload) + self._maybe_send_webhook(alert_payload, rule.notify) + + def _build_message(self, rule: AlertRule, event: dict, event_type: str | None) -> str: + if isinstance(rule.notify, dict) and rule.notify.get('message'): + return str(rule.notify.get('message')) + summary_bits = [] + if event_type: + summary_bits.append(event_type) + if 'name' in event: + summary_bits.append(str(event.get('name'))) + if 'ssid' in event: + summary_bits.append(str(event.get('ssid'))) + if 'bssid' in event: + summary_bits.append(str(event.get('bssid'))) + if 'address' in event: + summary_bits.append(str(event.get('address'))) + if 'mac' in event: + summary_bits.append(str(event.get('mac'))) + summary = ' | '.join(summary_bits) if summary_bits else 'Alert triggered' + return summary + + def _store_event( + self, + rule_id: int, + mode: str, + event_type: str | None, + severity: str, + title: str, + message: str, + payload: dict, + ) -> int: + with get_db() as conn: + cursor = conn.execute(''' + INSERT INTO alert_events (rule_id, mode, event_type, severity, title, message, payload) + VALUES (?, ?, ?, ?, ?, ?, ?) + ''', ( + rule_id, + mode, + event_type, + severity, + title, + message, + json.dumps(payload), + )) + return int(cursor.lastrowid) + + def _queue_event(self, alert_payload: dict) -> None: + try: + self._queue.put_nowait(alert_payload) + except queue.Full: + try: + self._queue.get_nowait() + self._queue.put_nowait(alert_payload) + except queue.Empty: + pass + + def _maybe_send_webhook(self, payload: dict, notify: dict) -> None: + if not ALERT_WEBHOOK_URL: + return + if isinstance(notify, dict) and notify.get('webhook') is False: + return + + try: + import urllib.request + req = urllib.request.Request( + ALERT_WEBHOOK_URL, + data=json.dumps(payload).encode('utf-8'), + headers={ + 'Content-Type': 'application/json', + 'User-Agent': 'Intercept-Alert', + 'X-Alert-Token': ALERT_WEBHOOK_SECRET or '', + }, + method='POST' + ) + with urllib.request.urlopen(req, timeout=ALERT_WEBHOOK_TIMEOUT) as _: + pass + except Exception as e: + logger.debug(f"Alert webhook failed: {e}") + + # ------------------------------------------------------------------ + # Matching + # ------------------------------------------------------------------ + + def _match_rule(self, rule_match: dict, event: dict) -> bool: + if not rule_match: + return True + + for key, expected in rule_match.items(): + actual = self._extract_value(event, key) + if not self._match_value(actual, expected): + return False + return True + + def _extract_value(self, event: dict, key: str) -> Any: + if '.' not in key: + return event.get(key) + current: Any = event + for part in key.split('.'): + if isinstance(current, dict): + current = current.get(part) + else: + return None + return current + + def _match_value(self, actual: Any, expected: Any) -> bool: + if isinstance(expected, dict) and 'op' in expected: + op = expected.get('op') + value = expected.get('value') + return self._apply_op(op, actual, value) + + if isinstance(expected, list): + return actual in expected + + if isinstance(expected, str): + if actual is None: + return False + return str(actual).lower() == expected.lower() + + return actual == expected + + def _apply_op(self, op: str, actual: Any, value: Any) -> bool: + if op == 'exists': + return actual is not None + if op == 'eq': + return actual == value + if op == 'neq': + return actual != value + if op == 'gt': + return _safe_number(actual) is not None and _safe_number(actual) > _safe_number(value) + if op == 'gte': + return _safe_number(actual) is not None and _safe_number(actual) >= _safe_number(value) + if op == 'lt': + return _safe_number(actual) is not None and _safe_number(actual) < _safe_number(value) + if op == 'lte': + return _safe_number(actual) is not None and _safe_number(actual) <= _safe_number(value) + if op == 'in': + return actual in (value or []) + if op == 'contains': + if actual is None: + return False + if isinstance(actual, list): + return any(str(value).lower() in str(item).lower() for item in actual) + return str(value).lower() in str(actual).lower() + if op == 'regex': + if actual is None or value is None: + return False + try: + return re.search(str(value), str(actual)) is not None + except re.error: + return False + return False + + # ------------------------------------------------------------------ + # Streaming + # ------------------------------------------------------------------ + + def stream_events(self, timeout: float = 1.0) -> Generator[dict, None, None]: + while True: + try: + event = self._queue.get(timeout=timeout) + yield event + except queue.Empty: + yield {'type': 'keepalive'} + + +_alert_manager: AlertManager | None = None +_alert_lock = threading.Lock() + + +def get_alert_manager() -> AlertManager: + global _alert_manager + with _alert_lock: + if _alert_manager is None: + _alert_manager = AlertManager() + return _alert_manager + + +def _safe_number(value: Any) -> float | None: + try: + return float(value) + except (TypeError, ValueError): + return None diff --git a/utils/bluetooth/models.py b/utils/bluetooth/models.py index 932342a..2810819 100644 --- a/utils/bluetooth/models.py +++ b/utils/bluetooth/models.py @@ -148,9 +148,10 @@ class BTDeviceAggregate: is_strong_stable: bool = False has_random_address: bool = False - # Baseline tracking - in_baseline: bool = False - baseline_id: Optional[int] = None + # Baseline tracking + in_baseline: bool = False + baseline_id: Optional[int] = None + seen_before: bool = False # Tracker detection fields is_tracker: bool = False @@ -274,9 +275,10 @@ class BTDeviceAggregate: }, 'heuristic_flags': self.heuristic_flags, - # Baseline - 'in_baseline': self.in_baseline, - 'baseline_id': self.baseline_id, + # Baseline + 'in_baseline': self.in_baseline, + 'baseline_id': self.baseline_id, + 'seen_before': self.seen_before, # Tracker detection 'tracker': { @@ -325,10 +327,11 @@ class BTDeviceAggregate: 'last_seen': self.last_seen.isoformat(), 'age_seconds': self.age_seconds, 'seen_count': self.seen_count, - 'heuristic_flags': self.heuristic_flags, - 'in_baseline': self.in_baseline, - # Tracker info for list view - 'is_tracker': self.is_tracker, + 'heuristic_flags': self.heuristic_flags, + 'in_baseline': self.in_baseline, + 'seen_before': self.seen_before, + # Tracker info for list view + 'is_tracker': self.is_tracker, 'tracker_type': self.tracker_type, 'tracker_name': self.tracker_name, 'tracker_confidence': self.tracker_confidence, diff --git a/utils/database.py b/utils/database.py index 6467c27..92b62cc 100644 --- a/utils/database.py +++ b/utils/database.py @@ -88,19 +88,65 @@ def init_db() -> None: ON signal_history(mode, device_id, timestamp) ''') - # Device correlation table - conn.execute(''' - CREATE TABLE IF NOT EXISTS device_correlations ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - wifi_mac TEXT, - bt_mac TEXT, - confidence REAL, - first_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - last_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - metadata TEXT, - UNIQUE(wifi_mac, bt_mac) - ) - ''') + # Device correlation table + conn.execute(''' + CREATE TABLE IF NOT EXISTS device_correlations ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + wifi_mac TEXT, + bt_mac TEXT, + confidence REAL, + first_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + last_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + metadata TEXT, + UNIQUE(wifi_mac, bt_mac) + ) + ''') + + # Alert rules + conn.execute(''' + CREATE TABLE IF NOT EXISTS alert_rules ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL, + mode TEXT, + event_type TEXT, + match TEXT, + severity TEXT DEFAULT 'medium', + enabled BOOLEAN DEFAULT 1, + notify TEXT, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + ''') + + # Alert events + conn.execute(''' + CREATE TABLE IF NOT EXISTS alert_events ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + rule_id INTEGER, + mode TEXT, + event_type TEXT, + severity TEXT DEFAULT 'medium', + title TEXT, + message TEXT, + payload TEXT, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (rule_id) REFERENCES alert_rules(id) ON DELETE SET NULL + ) + ''') + + # Session recordings + conn.execute(''' + CREATE TABLE IF NOT EXISTS recording_sessions ( + id TEXT PRIMARY KEY, + mode TEXT NOT NULL, + label TEXT, + started_at TIMESTAMP NOT NULL, + stopped_at TIMESTAMP, + file_path TEXT NOT NULL, + event_count INTEGER DEFAULT 0, + size_bytes INTEGER DEFAULT 0, + metadata TEXT + ) + ''') # Users table for authentication conn.execute(''' @@ -131,20 +177,29 @@ def init_db() -> None: # ===================================================================== # TSCM Baselines - Environment snapshots for comparison - conn.execute(''' - CREATE TABLE IF NOT EXISTS tscm_baselines ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - name TEXT NOT NULL, - location TEXT, - description TEXT, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - wifi_networks TEXT, - bt_devices TEXT, - rf_frequencies TEXT, - gps_coords TEXT, - is_active BOOLEAN DEFAULT 0 - ) - ''') + conn.execute(''' + CREATE TABLE IF NOT EXISTS tscm_baselines ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL, + location TEXT, + description TEXT, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + wifi_networks TEXT, + wifi_clients TEXT, + bt_devices TEXT, + rf_frequencies TEXT, + gps_coords TEXT, + is_active BOOLEAN DEFAULT 0 + ) + ''') + + # Ensure new columns exist for older databases + try: + columns = {row['name'] for row in conn.execute("PRAGMA table_info(tscm_baselines)")} + if 'wifi_clients' not in columns: + conn.execute('ALTER TABLE tscm_baselines ADD COLUMN wifi_clients TEXT') + except Exception as e: + logger.debug(f"Schema update skipped for tscm_baselines: {e}") # TSCM Sweeps - Individual sweep sessions conn.execute(''' @@ -685,15 +740,16 @@ def get_correlations(min_confidence: float = 0.5) -> list[dict]: # TSCM Functions # ============================================================================= -def create_tscm_baseline( - name: str, - location: str | None = None, - description: str | None = None, - wifi_networks: list | None = None, - bt_devices: list | None = None, - rf_frequencies: list | None = None, - gps_coords: dict | None = None -) -> int: +def create_tscm_baseline( + name: str, + location: str | None = None, + description: str | None = None, + wifi_networks: list | None = None, + wifi_clients: list | None = None, + bt_devices: list | None = None, + rf_frequencies: list | None = None, + gps_coords: dict | None = None +) -> int: """ Create a new TSCM baseline. @@ -701,19 +757,20 @@ def create_tscm_baseline( The ID of the created baseline """ with get_db() as conn: - cursor = conn.execute(''' - INSERT INTO tscm_baselines - (name, location, description, wifi_networks, bt_devices, rf_frequencies, gps_coords) - VALUES (?, ?, ?, ?, ?, ?, ?) - ''', ( - name, - location, - description, - json.dumps(wifi_networks) if wifi_networks else None, - json.dumps(bt_devices) if bt_devices else None, - json.dumps(rf_frequencies) if rf_frequencies else None, - json.dumps(gps_coords) if gps_coords else None - )) + cursor = conn.execute(''' + INSERT INTO tscm_baselines + (name, location, description, wifi_networks, wifi_clients, bt_devices, rf_frequencies, gps_coords) + VALUES (?, ?, ?, ?, ?, ?, ?, ?) + ''', ( + name, + location, + description, + json.dumps(wifi_networks) if wifi_networks else None, + json.dumps(wifi_clients) if wifi_clients else None, + json.dumps(bt_devices) if bt_devices else None, + json.dumps(rf_frequencies) if rf_frequencies else None, + json.dumps(gps_coords) if gps_coords else None + )) return cursor.lastrowid @@ -728,18 +785,19 @@ def get_tscm_baseline(baseline_id: int) -> dict | None: if row is None: return None - return { - 'id': row['id'], - 'name': row['name'], - 'location': row['location'], - 'description': row['description'], - 'created_at': row['created_at'], - 'wifi_networks': json.loads(row['wifi_networks']) if row['wifi_networks'] else [], - 'bt_devices': json.loads(row['bt_devices']) if row['bt_devices'] else [], - 'rf_frequencies': json.loads(row['rf_frequencies']) if row['rf_frequencies'] else [], - 'gps_coords': json.loads(row['gps_coords']) if row['gps_coords'] else None, - 'is_active': bool(row['is_active']) - } + return { + 'id': row['id'], + 'name': row['name'], + 'location': row['location'], + 'description': row['description'], + 'created_at': row['created_at'], + 'wifi_networks': json.loads(row['wifi_networks']) if row['wifi_networks'] else [], + 'wifi_clients': json.loads(row['wifi_clients']) if row['wifi_clients'] else [], + 'bt_devices': json.loads(row['bt_devices']) if row['bt_devices'] else [], + 'rf_frequencies': json.loads(row['rf_frequencies']) if row['rf_frequencies'] else [], + 'gps_coords': json.loads(row['gps_coords']) if row['gps_coords'] else None, + 'is_active': bool(row['is_active']) + } def get_all_tscm_baselines() -> list[dict]: @@ -781,19 +839,23 @@ def set_active_tscm_baseline(baseline_id: int) -> bool: return cursor.rowcount > 0 -def update_tscm_baseline( - baseline_id: int, - wifi_networks: list | None = None, - bt_devices: list | None = None, - rf_frequencies: list | None = None -) -> bool: +def update_tscm_baseline( + baseline_id: int, + wifi_networks: list | None = None, + wifi_clients: list | None = None, + bt_devices: list | None = None, + rf_frequencies: list | None = None +) -> bool: """Update baseline device lists.""" updates = [] params = [] - if wifi_networks is not None: - updates.append('wifi_networks = ?') - params.append(json.dumps(wifi_networks)) + if wifi_networks is not None: + updates.append('wifi_networks = ?') + params.append(json.dumps(wifi_networks)) + if wifi_clients is not None: + updates.append('wifi_clients = ?') + params.append(json.dumps(wifi_clients)) if bt_devices is not None: updates.append('bt_devices = ?') params.append(json.dumps(bt_devices)) diff --git a/utils/event_pipeline.py b/utils/event_pipeline.py new file mode 100644 index 0000000..cbab8bb --- /dev/null +++ b/utils/event_pipeline.py @@ -0,0 +1,29 @@ +"""Shared event pipeline for alerts and recordings.""" + +from __future__ import annotations + +from typing import Any + +from utils.alerts import get_alert_manager +from utils.recording import get_recording_manager + +IGNORE_TYPES = {'keepalive', 'ping'} + + +def process_event(mode: str, event: dict | Any, event_type: str | None = None) -> None: + if event_type in IGNORE_TYPES: + return + if not isinstance(event, dict): + return + + try: + get_recording_manager().record_event(mode, event, event_type) + except Exception: + # Recording failures should never break streaming + pass + + try: + get_alert_manager().process_event(mode, event, event_type) + except Exception: + # Alert failures should never break streaming + pass diff --git a/utils/recording.py b/utils/recording.py new file mode 100644 index 0000000..dc8ca79 --- /dev/null +++ b/utils/recording.py @@ -0,0 +1,222 @@ +"""Session recording utilities for SSE/event streams.""" + +from __future__ import annotations + +import json +import logging +import threading +import uuid +from dataclasses import dataclass +from datetime import datetime, timezone +from pathlib import Path +from typing import Any + +from utils.database import get_db + +logger = logging.getLogger('intercept.recording') + +RECORDING_ROOT = Path(__file__).parent.parent / 'instance' / 'recordings' + + +@dataclass +class RecordingSession: + id: str + mode: str + label: str | None + file_path: Path + started_at: datetime + stopped_at: datetime | None = None + event_count: int = 0 + size_bytes: int = 0 + metadata: dict | None = None + + _file_handle: Any | None = None + _lock: threading.Lock = threading.Lock() + + def open(self) -> None: + self.file_path.parent.mkdir(parents=True, exist_ok=True) + self._file_handle = self.file_path.open('a', encoding='utf-8') + + def close(self) -> None: + if self._file_handle: + self._file_handle.flush() + self._file_handle.close() + self._file_handle = None + + def write_event(self, record: dict) -> None: + if not self._file_handle: + self.open() + line = json.dumps(record, ensure_ascii=True) + '\n' + with self._lock: + self._file_handle.write(line) + self._file_handle.flush() + self.event_count += 1 + self.size_bytes += len(line.encode('utf-8')) + + +class RecordingManager: + def __init__(self) -> None: + self._active_by_mode: dict[str, RecordingSession] = {} + self._active_by_id: dict[str, RecordingSession] = {} + self._lock = threading.Lock() + + def start_recording(self, mode: str, label: str | None = None, metadata: dict | None = None) -> RecordingSession: + with self._lock: + existing = self._active_by_mode.get(mode) + if existing: + return existing + + session_id = str(uuid.uuid4()) + started_at = datetime.now(timezone.utc) + filename = f"{mode}_{started_at.strftime('%Y%m%d_%H%M%S')}_{session_id}.jsonl" + file_path = RECORDING_ROOT / mode / filename + + session = RecordingSession( + id=session_id, + mode=mode, + label=label, + file_path=file_path, + started_at=started_at, + metadata=metadata or {}, + ) + session.open() + + self._active_by_mode[mode] = session + self._active_by_id[session_id] = session + + with get_db() as conn: + conn.execute(''' + INSERT INTO recording_sessions + (id, mode, label, started_at, file_path, event_count, size_bytes, metadata) + VALUES (?, ?, ?, ?, ?, ?, ?, ?) + ''', ( + session.id, + session.mode, + session.label, + session.started_at.isoformat(), + str(session.file_path), + session.event_count, + session.size_bytes, + json.dumps(session.metadata or {}), + )) + + return session + + def stop_recording(self, mode: str | None = None, session_id: str | None = None) -> RecordingSession | None: + with self._lock: + session = None + if session_id: + session = self._active_by_id.get(session_id) + elif mode: + session = self._active_by_mode.get(mode) + + if not session: + return None + + session.stopped_at = datetime.now(timezone.utc) + session.close() + + self._active_by_mode.pop(session.mode, None) + self._active_by_id.pop(session.id, None) + + with get_db() as conn: + conn.execute(''' + UPDATE recording_sessions + SET stopped_at = ?, event_count = ?, size_bytes = ? + WHERE id = ? + ''', ( + session.stopped_at.isoformat(), + session.event_count, + session.size_bytes, + session.id, + )) + + return session + + def record_event(self, mode: str, event: dict, event_type: str | None = None) -> None: + if event_type in ('keepalive', 'ping'): + return + session = self._active_by_mode.get(mode) + if not session: + return + record = { + 'timestamp': datetime.now(timezone.utc).isoformat(), + 'mode': mode, + 'event_type': event_type, + 'event': event, + } + try: + session.write_event(record) + except Exception as e: + logger.debug(f"Recording write failed: {e}") + + def list_recordings(self, limit: int = 50) -> list[dict]: + with get_db() as conn: + cursor = conn.execute(''' + SELECT id, mode, label, started_at, stopped_at, file_path, event_count, size_bytes, metadata + FROM recording_sessions + ORDER BY started_at DESC + LIMIT ? + ''', (limit,)) + rows = [] + for row in cursor: + rows.append({ + 'id': row['id'], + 'mode': row['mode'], + 'label': row['label'], + 'started_at': row['started_at'], + 'stopped_at': row['stopped_at'], + 'file_path': row['file_path'], + 'event_count': row['event_count'], + 'size_bytes': row['size_bytes'], + 'metadata': json.loads(row['metadata']) if row['metadata'] else {}, + }) + return rows + + def get_recording(self, session_id: str) -> dict | None: + with get_db() as conn: + cursor = conn.execute(''' + SELECT id, mode, label, started_at, stopped_at, file_path, event_count, size_bytes, metadata + FROM recording_sessions + WHERE id = ? + ''', (session_id,)) + row = cursor.fetchone() + if not row: + return None + return { + 'id': row['id'], + 'mode': row['mode'], + 'label': row['label'], + 'started_at': row['started_at'], + 'stopped_at': row['stopped_at'], + 'file_path': row['file_path'], + 'event_count': row['event_count'], + 'size_bytes': row['size_bytes'], + 'metadata': json.loads(row['metadata']) if row['metadata'] else {}, + } + + def get_active(self) -> list[dict]: + with self._lock: + sessions = [] + for session in self._active_by_mode.values(): + sessions.append({ + 'id': session.id, + 'mode': session.mode, + 'label': session.label, + 'started_at': session.started_at.isoformat(), + 'event_count': session.event_count, + 'size_bytes': session.size_bytes, + }) + return sessions + + +_recording_manager: RecordingManager | None = None +_recording_lock = threading.Lock() + + +def get_recording_manager() -> RecordingManager: + global _recording_manager + with _recording_lock: + if _recording_manager is None: + _recording_manager = RecordingManager() + return _recording_manager diff --git a/utils/tscm/advanced.py b/utils/tscm/advanced.py index b36efd1..91d17cb 100644 --- a/utils/tscm/advanced.py +++ b/utils/tscm/advanced.py @@ -523,20 +523,22 @@ class BaselineDiff: } -def calculate_baseline_diff( - baseline: dict, - current_wifi: list[dict], - current_bt: list[dict], - current_rf: list[dict], - sweep_id: int -) -> BaselineDiff: +def calculate_baseline_diff( + baseline: dict, + current_wifi: list[dict], + current_wifi_clients: list[dict], + current_bt: list[dict], + current_rf: list[dict], + sweep_id: int +) -> BaselineDiff: """ Calculate comprehensive diff between baseline and current scan. Args: baseline: Baseline dict from database current_wifi: Current WiFi devices - current_bt: Current Bluetooth devices + current_wifi_clients: Current WiFi clients + current_bt: Current Bluetooth devices current_rf: Current RF signals sweep_id: Current sweep ID @@ -564,11 +566,16 @@ def calculate_baseline_diff( diff.is_stale = diff.baseline_age_hours > 72 # Build baseline lookup dicts - baseline_wifi = { - d.get('bssid', d.get('mac', '')).upper(): d - for d in baseline.get('wifi_networks', []) - if d.get('bssid') or d.get('mac') - } + baseline_wifi = { + d.get('bssid', d.get('mac', '')).upper(): d + for d in baseline.get('wifi_networks', []) + if d.get('bssid') or d.get('mac') + } + baseline_wifi_clients = { + d.get('mac', d.get('address', '')).upper(): d + for d in baseline.get('wifi_clients', []) + if d.get('mac') or d.get('address') + } baseline_bt = { d.get('mac', d.get('address', '')).upper(): d for d in baseline.get('bt_devices', []) @@ -580,8 +587,11 @@ def calculate_baseline_diff( if d.get('frequency') } - # Compare WiFi - _compare_wifi(diff, baseline_wifi, current_wifi) + # Compare WiFi + _compare_wifi(diff, baseline_wifi, current_wifi) + + # Compare WiFi clients + _compare_wifi_clients(diff, baseline_wifi_clients, current_wifi_clients) # Compare Bluetooth _compare_bluetooth(diff, baseline_bt, current_bt) @@ -607,7 +617,7 @@ def calculate_baseline_diff( return diff -def _compare_wifi(diff: BaselineDiff, baseline: dict, current: list[dict]) -> None: +def _compare_wifi(diff: BaselineDiff, baseline: dict, current: list[dict]) -> None: """Compare WiFi devices between baseline and current.""" current_macs = { d.get('bssid', d.get('mac', '')).upper(): d @@ -630,7 +640,48 @@ def _compare_wifi(diff: BaselineDiff, baseline: dict, current: list[dict]) -> No 'channel': device.get('channel'), 'rssi': device.get('power', device.get('signal')), } - )) + )) + + +def _compare_wifi_clients(diff: BaselineDiff, baseline: dict, current: list[dict]) -> None: + """Compare WiFi clients between baseline and current.""" + current_macs = { + d.get('mac', d.get('address', '')).upper(): d + for d in current + if d.get('mac') or d.get('address') + } + + # Find new clients + for mac, device in current_macs.items(): + if mac not in baseline: + name = device.get('vendor', 'WiFi Client') + diff.new_devices.append(DeviceChange( + identifier=mac, + protocol='wifi_client', + change_type='new', + description=f'New WiFi client: {name}', + expected=False, + details={ + 'vendor': name, + 'rssi': device.get('rssi'), + 'associated_bssid': device.get('associated_bssid'), + } + )) + + # Find missing clients + for mac, device in baseline.items(): + if mac not in current_macs: + name = device.get('vendor', 'WiFi Client') + diff.missing_devices.append(DeviceChange( + identifier=mac, + protocol='wifi_client', + change_type='missing', + description=f'Missing WiFi client: {name}', + expected=True, + details={ + 'vendor': name, + } + )) else: # Check for changes baseline_dev = baseline[mac] @@ -796,11 +847,12 @@ def _calculate_baseline_health(diff: BaselineDiff, baseline: dict) -> None: reasons.append(f"Baseline is {diff.baseline_age_hours:.0f} hours old") # Device churn penalty - total_baseline = ( - len(baseline.get('wifi_networks', [])) + - len(baseline.get('bt_devices', [])) + - len(baseline.get('rf_frequencies', [])) - ) + total_baseline = ( + len(baseline.get('wifi_networks', [])) + + len(baseline.get('wifi_clients', [])) + + len(baseline.get('bt_devices', [])) + + len(baseline.get('rf_frequencies', [])) + ) if total_baseline > 0: churn_rate = (diff.total_new + diff.total_missing) / total_baseline diff --git a/utils/tscm/baseline.py b/utils/tscm/baseline.py index 4cb0462..facbd02 100644 --- a/utils/tscm/baseline.py +++ b/utils/tscm/baseline.py @@ -26,12 +26,13 @@ class BaselineRecorder: Records and manages TSCM environment baselines. """ - def __init__(self): - self.recording = False - self.current_baseline_id: int | None = None - self.wifi_networks: dict[str, dict] = {} # BSSID -> network info - self.bt_devices: dict[str, dict] = {} # MAC -> device info - self.rf_frequencies: dict[float, dict] = {} # Frequency -> signal info + def __init__(self): + self.recording = False + self.current_baseline_id: int | None = None + self.wifi_networks: dict[str, dict] = {} # BSSID -> network info + self.wifi_clients: dict[str, dict] = {} # MAC -> client info + self.bt_devices: dict[str, dict] = {} # MAC -> device info + self.rf_frequencies: dict[float, dict] = {} # Frequency -> signal info def start_recording( self, @@ -50,10 +51,11 @@ class BaselineRecorder: Returns: Baseline ID """ - self.recording = True - self.wifi_networks = {} - self.bt_devices = {} - self.rf_frequencies = {} + self.recording = True + self.wifi_networks = {} + self.wifi_clients = {} + self.bt_devices = {} + self.rf_frequencies = {} # Create baseline in database self.current_baseline_id = create_tscm_baseline( @@ -78,24 +80,27 @@ class BaselineRecorder: self.recording = False # Convert to lists for storage - wifi_list = list(self.wifi_networks.values()) - bt_list = list(self.bt_devices.values()) - rf_list = list(self.rf_frequencies.values()) + wifi_list = list(self.wifi_networks.values()) + wifi_client_list = list(self.wifi_clients.values()) + bt_list = list(self.bt_devices.values()) + rf_list = list(self.rf_frequencies.values()) # Update database - update_tscm_baseline( - self.current_baseline_id, - wifi_networks=wifi_list, - bt_devices=bt_list, - rf_frequencies=rf_list - ) + update_tscm_baseline( + self.current_baseline_id, + wifi_networks=wifi_list, + wifi_clients=wifi_client_list, + bt_devices=bt_list, + rf_frequencies=rf_list + ) - summary = { - 'baseline_id': self.current_baseline_id, - 'wifi_count': len(wifi_list), - 'bt_count': len(bt_list), - 'rf_count': len(rf_list), - } + summary = { + 'baseline_id': self.current_baseline_id, + 'wifi_count': len(wifi_list), + 'wifi_client_count': len(wifi_client_list), + 'bt_count': len(bt_list), + 'rf_count': len(rf_list), + } logger.info( f"Baseline recording complete: {summary['wifi_count']} WiFi, " @@ -135,8 +140,8 @@ class BaselineRecorder: 'last_seen': datetime.now().isoformat(), } - def add_bt_device(self, device: dict) -> None: - """Add a Bluetooth device to the current baseline.""" + def add_bt_device(self, device: dict) -> None: + """Add a Bluetooth device to the current baseline.""" if not self.recording: return @@ -150,7 +155,7 @@ class BaselineRecorder: 'rssi': device.get('rssi', self.bt_devices[mac].get('rssi')), }) else: - self.bt_devices[mac] = { + self.bt_devices[mac] = { 'mac': mac, 'name': device.get('name', ''), 'rssi': device.get('rssi', device.get('signal')), @@ -158,10 +163,37 @@ class BaselineRecorder: 'type': device.get('type', ''), 'first_seen': datetime.now().isoformat(), 'last_seen': datetime.now().isoformat(), - } - - def add_rf_signal(self, signal: dict) -> None: - """Add an RF signal to the current baseline.""" + } + + def add_wifi_client(self, client: dict) -> None: + """Add a WiFi client to the current baseline.""" + if not self.recording: + return + + mac = client.get('mac', client.get('address', '')).upper() + if not mac: + return + + if mac in self.wifi_clients: + self.wifi_clients[mac].update({ + 'last_seen': datetime.now().isoformat(), + 'rssi': client.get('rssi', self.wifi_clients[mac].get('rssi')), + 'associated_bssid': client.get('associated_bssid', self.wifi_clients[mac].get('associated_bssid')), + }) + else: + self.wifi_clients[mac] = { + 'mac': mac, + 'vendor': client.get('vendor', ''), + 'rssi': client.get('rssi'), + 'associated_bssid': client.get('associated_bssid'), + 'probed_ssids': client.get('probed_ssids', []), + 'probe_count': client.get('probe_count', len(client.get('probed_ssids', []))), + 'first_seen': datetime.now().isoformat(), + 'last_seen': datetime.now().isoformat(), + } + + def add_rf_signal(self, signal: dict) -> None: + """Add an RF signal to the current baseline.""" if not self.recording: return @@ -191,15 +223,16 @@ class BaselineRecorder: 'hit_count': 1, } - def get_recording_status(self) -> dict: - """Get current recording status and counts.""" - return { - 'recording': self.recording, - 'baseline_id': self.current_baseline_id, - 'wifi_count': len(self.wifi_networks), - 'bt_count': len(self.bt_devices), - 'rf_count': len(self.rf_frequencies), - } + def get_recording_status(self) -> dict: + """Get current recording status and counts.""" + return { + 'recording': self.recording, + 'baseline_id': self.current_baseline_id, + 'wifi_count': len(self.wifi_networks), + 'wifi_client_count': len(self.wifi_clients), + 'bt_count': len(self.bt_devices), + 'rf_count': len(self.rf_frequencies), + } class BaselineComparator: @@ -220,11 +253,16 @@ class BaselineComparator: for d in baseline.get('wifi_networks', []) if d.get('bssid') or d.get('mac') } - self.baseline_bt = { - d.get('mac', d.get('address', '')).upper(): d - for d in baseline.get('bt_devices', []) - if d.get('mac') or d.get('address') - } + self.baseline_bt = { + d.get('mac', d.get('address', '')).upper(): d + for d in baseline.get('bt_devices', []) + if d.get('mac') or d.get('address') + } + self.baseline_wifi_clients = { + d.get('mac', d.get('address', '')).upper(): d + for d in baseline.get('wifi_clients', []) + if d.get('mac') or d.get('address') + } self.baseline_rf = { round(d.get('frequency', 0), 1): d for d in baseline.get('rf_frequencies', []) @@ -269,8 +307,8 @@ class BaselineComparator: 'matching_count': len(matching_devices), } - def compare_bluetooth(self, current_devices: list[dict]) -> dict: - """Compare current Bluetooth devices against baseline.""" + def compare_bluetooth(self, current_devices: list[dict]) -> dict: + """Compare current Bluetooth devices against baseline.""" current_macs = { d.get('mac', d.get('address', '')).upper(): d for d in current_devices @@ -291,14 +329,45 @@ class BaselineComparator: if mac not in current_macs: missing_devices.append(device) - return { - 'new': new_devices, - 'missing': missing_devices, - 'matching': matching_devices, - 'new_count': len(new_devices), - 'missing_count': len(missing_devices), - 'matching_count': len(matching_devices), - } + return { + 'new': new_devices, + 'missing': missing_devices, + 'matching': matching_devices, + 'new_count': len(new_devices), + 'missing_count': len(missing_devices), + 'matching_count': len(matching_devices), + } + + def compare_wifi_clients(self, current_devices: list[dict]) -> dict: + """Compare current WiFi clients against baseline.""" + current_macs = { + d.get('mac', d.get('address', '')).upper(): d + for d in current_devices + if d.get('mac') or d.get('address') + } + + new_devices = [] + missing_devices = [] + matching_devices = [] + + for mac, device in current_macs.items(): + if mac not in self.baseline_wifi_clients: + new_devices.append(device) + else: + matching_devices.append(device) + + for mac, device in self.baseline_wifi_clients.items(): + if mac not in current_macs: + missing_devices.append(device) + + return { + 'new': new_devices, + 'missing': missing_devices, + 'matching': matching_devices, + 'new_count': len(new_devices), + 'missing_count': len(missing_devices), + 'matching_count': len(matching_devices), + } def compare_rf(self, current_signals: list[dict]) -> dict: """Compare current RF signals against baseline.""" @@ -331,35 +400,42 @@ class BaselineComparator: 'matching_count': len(matching_signals), } - def compare_all( - self, - wifi_devices: list[dict] | None = None, - bt_devices: list[dict] | None = None, - rf_signals: list[dict] | None = None - ) -> dict: + def compare_all( + self, + wifi_devices: list[dict] | None = None, + wifi_clients: list[dict] | None = None, + bt_devices: list[dict] | None = None, + rf_signals: list[dict] | None = None + ) -> dict: """ Compare all current data against baseline. Returns: Dict with comparison results for each category """ - results = { - 'wifi': None, - 'bluetooth': None, - 'rf': None, - 'total_new': 0, - 'total_missing': 0, - } + results = { + 'wifi': None, + 'wifi_clients': None, + 'bluetooth': None, + 'rf': None, + 'total_new': 0, + 'total_missing': 0, + } - if wifi_devices is not None: - results['wifi'] = self.compare_wifi(wifi_devices) - results['total_new'] += results['wifi']['new_count'] - results['total_missing'] += results['wifi']['missing_count'] - - if bt_devices is not None: - results['bluetooth'] = self.compare_bluetooth(bt_devices) - results['total_new'] += results['bluetooth']['new_count'] - results['total_missing'] += results['bluetooth']['missing_count'] + if wifi_devices is not None: + results['wifi'] = self.compare_wifi(wifi_devices) + results['total_new'] += results['wifi']['new_count'] + results['total_missing'] += results['wifi']['missing_count'] + + if wifi_clients is not None: + results['wifi_clients'] = self.compare_wifi_clients(wifi_clients) + results['total_new'] += results['wifi_clients']['new_count'] + results['total_missing'] += results['wifi_clients']['missing_count'] + + if bt_devices is not None: + results['bluetooth'] = self.compare_bluetooth(bt_devices) + results['total_new'] += results['bluetooth']['new_count'] + results['total_missing'] += results['bluetooth']['missing_count'] if rf_signals is not None: results['rf'] = self.compare_rf(rf_signals) @@ -369,11 +445,12 @@ class BaselineComparator: return results -def get_comparison_for_active_baseline( - wifi_devices: list[dict] | None = None, - bt_devices: list[dict] | None = None, - rf_signals: list[dict] | None = None -) -> dict | None: +def get_comparison_for_active_baseline( + wifi_devices: list[dict] | None = None, + wifi_clients: list[dict] | None = None, + bt_devices: list[dict] | None = None, + rf_signals: list[dict] | None = None +) -> dict | None: """ Convenience function to compare against the active baseline. @@ -385,4 +462,4 @@ def get_comparison_for_active_baseline( return None comparator = BaselineComparator(baseline) - return comparator.compare_all(wifi_devices, bt_devices, rf_signals) + return comparator.compare_all(wifi_devices, wifi_clients, bt_devices, rf_signals) diff --git a/utils/tscm/detector.py b/utils/tscm/detector.py index 4245706..fa94518 100644 --- a/utils/tscm/detector.py +++ b/utils/tscm/detector.py @@ -113,14 +113,18 @@ class ThreatDetector: def _load_baseline(self, baseline: dict) -> None: """Load baseline device identifiers for comparison.""" - # WiFi networks and clients - for network in baseline.get('wifi_networks', []): - if 'bssid' in network: - self.baseline_wifi_macs.add(network['bssid'].upper()) - if 'clients' in network: - for client in network['clients']: - if 'mac' in client: - self.baseline_wifi_macs.add(client['mac'].upper()) + # WiFi networks and clients + for network in baseline.get('wifi_networks', []): + if 'bssid' in network: + self.baseline_wifi_macs.add(network['bssid'].upper()) + if 'clients' in network: + for client in network['clients']: + if 'mac' in client: + self.baseline_wifi_macs.add(client['mac'].upper()) + + for client in baseline.get('wifi_clients', []): + if 'mac' in client: + self.baseline_wifi_macs.add(client['mac'].upper()) # Bluetooth devices for device in baseline.get('bt_devices', []): diff --git a/utils/wifi/scanner.py b/utils/wifi/scanner.py index d9e6032..9951107 100644 --- a/utils/wifi/scanner.py +++ b/utils/wifi/scanner.py @@ -662,12 +662,13 @@ class UnifiedWiFiScanner: # Deep Scan (airodump-ng) # ========================================================================= - def start_deep_scan( - self, - interface: Optional[str] = None, - band: str = 'all', - channel: Optional[int] = None, - ) -> bool: + def start_deep_scan( + self, + interface: Optional[str] = None, + band: str = 'all', + channel: Optional[int] = None, + channels: Optional[list[int]] = None, + ) -> bool: """ Start continuous deep scan with airodump-ng. @@ -700,11 +701,11 @@ class UnifiedWiFiScanner: # Start airodump-ng in background thread self._deep_scan_stop_event.clear() - self._deep_scan_thread = threading.Thread( - target=self._run_deep_scan, - args=(iface, band, channel), - daemon=True, - ) + self._deep_scan_thread = threading.Thread( + target=self._run_deep_scan, + args=(iface, band, channel, channels), + daemon=True, + ) self._deep_scan_thread.start() self._status = WiFiScanStatus( @@ -766,8 +767,14 @@ class UnifiedWiFiScanner: return True - def _run_deep_scan(self, interface: str, band: str, channel: Optional[int]): - """Background thread for running airodump-ng.""" + def _run_deep_scan( + self, + interface: str, + band: str, + channel: Optional[int], + channels: Optional[list[int]], + ): + """Background thread for running airodump-ng.""" from .parsers.airodump import parse_airodump_csv import tempfile @@ -779,12 +786,14 @@ class UnifiedWiFiScanner: # Build command cmd = ['airodump-ng', '-w', output_prefix, '--output-format', 'csv'] - if channel: - cmd.extend(['-c', str(channel)]) - elif band == '2.4': - cmd.extend(['--band', 'bg']) - elif band == '5': - cmd.extend(['--band', 'a']) + if channels: + cmd.extend(['-c', ','.join(str(c) for c in channels)]) + elif channel: + cmd.extend(['-c', str(channel)]) + elif band == '2.4': + cmd.extend(['--band', 'bg']) + elif band == '5': + cmd.extend(['--band', 'a']) cmd.append(interface)