Add alerts/recording, WiFi/TSCM updates, optimize waterfall

This commit is contained in:
Smittix
2026-02-07 18:29:58 +00:00
parent 4bbc00b765
commit 86e4ba7e29
42 changed files with 2514 additions and 485 deletions

View File

@@ -27,8 +27,10 @@ def register_blueprints(app):
from .updater import updater_bp
from .sstv import sstv_bp
from .sstv_general import sstv_general_bp
from .dmr import dmr_bp
from .websdr import websdr_bp
from .dmr import dmr_bp
from .websdr import websdr_bp
from .alerts import alerts_bp
from .recordings import recordings_bp
app.register_blueprint(pager_bp)
app.register_blueprint(sensor_bp)
@@ -55,8 +57,10 @@ def register_blueprints(app):
app.register_blueprint(updater_bp) # GitHub update checking
app.register_blueprint(sstv_bp) # ISS SSTV decoder
app.register_blueprint(sstv_general_bp) # General terrestrial SSTV
app.register_blueprint(dmr_bp) # DMR / P25 / Digital Voice
app.register_blueprint(websdr_bp) # HF/Shortwave WebSDR
app.register_blueprint(dmr_bp) # DMR / P25 / Digital Voice
app.register_blueprint(websdr_bp) # HF/Shortwave WebSDR
app.register_blueprint(alerts_bp) # Cross-mode alerts
app.register_blueprint(recordings_bp) # Session recordings
# Initialize TSCM state with queue and lock from app
import app as app_module

View File

@@ -20,7 +20,8 @@ from flask import Blueprint, jsonify, request, Response
import app as app_module
from utils.logging import sensor_logger as logger
from utils.validation import validate_device_index, validate_gain, validate_ppm
from utils.sse import format_sse
from utils.sse import format_sse
from utils.event_pipeline import process_event
from utils.constants import (
PROCESS_TERMINATE_TIMEOUT,
SSE_KEEPALIVE_INTERVAL,
@@ -391,9 +392,13 @@ def stream_acars() -> Response:
while True:
try:
msg = app_module.acars_queue.get(timeout=SSE_QUEUE_TIMEOUT)
last_keepalive = time.time()
yield format_sse(msg)
msg = app_module.acars_queue.get(timeout=SSE_QUEUE_TIMEOUT)
last_keepalive = time.time()
try:
process_event('acars', msg, msg.get('type'))
except Exception:
pass
yield format_sse(msg)
except queue.Empty:
now = time.time()
if now - last_keepalive >= SSE_KEEPALIVE_INTERVAL:

View File

@@ -43,6 +43,7 @@ from utils.validation import (
validate_rtl_tcp_host, validate_rtl_tcp_port
)
from utils.sse import format_sse
from utils.event_pipeline import process_event
from utils.sdr import SDRFactory, SDRType
from utils.constants import (
ADSB_SBS_PORT,
@@ -843,6 +844,10 @@ def stream_adsb():
try:
msg = app_module.adsb_queue.get(timeout=SSE_QUEUE_TIMEOUT)
last_keepalive = time.time()
try:
process_event('adsb', msg, msg.get('type'))
except Exception:
pass
yield format_sse(msg)
except queue.Empty:
now = time.time()

View File

@@ -19,6 +19,7 @@ from config import SHARED_OBSERVER_LOCATION_ENABLED
from utils.logging import get_logger
from utils.validation import validate_device_index, validate_gain
from utils.sse import format_sse
from utils.event_pipeline import process_event
from utils.sdr import SDRFactory, SDRType
from utils.constants import (
AIS_TCP_PORT,
@@ -484,6 +485,10 @@ def stream_ais():
try:
msg = app_module.ais_queue.get(timeout=SSE_QUEUE_TIMEOUT)
last_keepalive = time.time()
try:
process_event('ais', msg, msg.get('type'))
except Exception:
pass
yield format_sse(msg)
except queue.Empty:
now = time.time()

76
routes/alerts.py Normal file
View File

@@ -0,0 +1,76 @@
"""Alerting API endpoints."""
from __future__ import annotations
import queue
import time
from typing import Generator
from flask import Blueprint, Response, jsonify, request
from utils.alerts import get_alert_manager
from utils.sse import format_sse
alerts_bp = Blueprint('alerts', __name__, url_prefix='/alerts')
@alerts_bp.route('/rules', methods=['GET'])
def list_rules():
manager = get_alert_manager()
include_disabled = request.args.get('all') in ('1', 'true', 'yes')
return jsonify({'status': 'success', 'rules': manager.list_rules(include_disabled=include_disabled)})
@alerts_bp.route('/rules', methods=['POST'])
def create_rule():
data = request.get_json() or {}
if not isinstance(data.get('match', {}), dict):
return jsonify({'status': 'error', 'message': 'match must be a JSON object'}), 400
manager = get_alert_manager()
rule_id = manager.add_rule(data)
return jsonify({'status': 'success', 'rule_id': rule_id})
@alerts_bp.route('/rules/<int:rule_id>', methods=['PUT', 'PATCH'])
def update_rule(rule_id: int):
data = request.get_json() or {}
manager = get_alert_manager()
ok = manager.update_rule(rule_id, data)
if not ok:
return jsonify({'status': 'error', 'message': 'Rule not found or no changes'}), 404
return jsonify({'status': 'success'})
@alerts_bp.route('/rules/<int:rule_id>', methods=['DELETE'])
def delete_rule(rule_id: int):
manager = get_alert_manager()
ok = manager.delete_rule(rule_id)
if not ok:
return jsonify({'status': 'error', 'message': 'Rule not found'}), 404
return jsonify({'status': 'success'})
@alerts_bp.route('/events', methods=['GET'])
def list_events():
manager = get_alert_manager()
limit = request.args.get('limit', default=100, type=int)
mode = request.args.get('mode')
severity = request.args.get('severity')
events = manager.list_events(limit=limit, mode=mode, severity=severity)
return jsonify({'status': 'success', 'events': events})
@alerts_bp.route('/stream', methods=['GET'])
def stream_alerts() -> Response:
manager = get_alert_manager()
def generate() -> Generator[str, None, None]:
for event in manager.stream_events(timeout=1.0):
yield format_sse(event)
response = Response(generate(), mimetype='text/event-stream')
response.headers['Cache-Control'] = 'no-cache'
response.headers['X-Accel-Buffering'] = 'no'
response.headers['Connection'] = 'keep-alive'
return response

View File

@@ -21,7 +21,8 @@ from flask import Blueprint, jsonify, request, Response
import app as app_module
from utils.logging import sensor_logger as logger
from utils.validation import validate_device_index, validate_gain, validate_ppm
from utils.sse import format_sse
from utils.sse import format_sse
from utils.event_pipeline import process_event
from utils.constants import (
PROCESS_TERMINATE_TIMEOUT,
SSE_KEEPALIVE_INTERVAL,
@@ -1725,9 +1726,13 @@ def stream_aprs() -> Response:
while True:
try:
msg = app_module.aprs_queue.get(timeout=SSE_QUEUE_TIMEOUT)
last_keepalive = time.time()
yield format_sse(msg)
msg = app_module.aprs_queue.get(timeout=SSE_QUEUE_TIMEOUT)
last_keepalive = time.time()
try:
process_event('aprs', msg, msg.get('type'))
except Exception:
pass
yield format_sse(msg)
except queue.Empty:
now = time.time()
if now - last_keepalive >= SSE_KEEPALIVE_INTERVAL:

View File

@@ -18,10 +18,11 @@ from typing import Any, Generator
from flask import Blueprint, jsonify, request, Response
import app as app_module
from utils.dependencies import check_tool
from utils.logging import bluetooth_logger as logger
from utils.sse import format_sse
from utils.validation import validate_bluetooth_interface
from utils.dependencies import check_tool
from utils.logging import bluetooth_logger as logger
from utils.sse import format_sse
from utils.event_pipeline import process_event
from utils.validation import validate_bluetooth_interface
from data.oui import OUI_DATABASE, load_oui_database, get_manufacturer
from data.patterns import AIRTAG_PREFIXES, TILE_PREFIXES, SAMSUNG_TRACKER
from utils.constants import (
@@ -561,9 +562,13 @@ def stream_bt():
while True:
try:
msg = app_module.bt_queue.get(timeout=1)
last_keepalive = time.time()
yield format_sse(msg)
msg = app_module.bt_queue.get(timeout=1)
last_keepalive = time.time()
try:
process_event('bluetooth', msg, msg.get('type'))
except Exception:
pass
yield format_sse(msg)
except queue.Empty:
now = time.time()
if now - last_keepalive >= keepalive_interval:

View File

@@ -7,32 +7,40 @@ aggregation, and heuristics.
from __future__ import annotations
import csv
import io
import json
import logging
import csv
import io
import json
import logging
import threading
import time
from datetime import datetime
from typing import Generator
from flask import Blueprint, Response, jsonify, request, session
from utils.bluetooth import (
BluetoothScanner,
BTDeviceAggregate,
get_bluetooth_scanner,
check_capabilities,
RANGE_UNKNOWN,
from utils.bluetooth import (
BluetoothScanner,
BTDeviceAggregate,
get_bluetooth_scanner,
check_capabilities,
RANGE_UNKNOWN,
TrackerType,
TrackerConfidence,
get_tracker_engine,
)
from utils.database import get_db
from utils.sse import format_sse
)
from utils.database import get_db
from utils.sse import format_sse
from utils.event_pipeline import process_event
logger = logging.getLogger('intercept.bluetooth_v2')
# Blueprint
bluetooth_v2_bp = Blueprint('bluetooth_v2', __name__, url_prefix='/api/bluetooth')
bluetooth_v2_bp = Blueprint('bluetooth_v2', __name__, url_prefix='/api/bluetooth')
# Seen-before tracking
_bt_seen_cache: set[str] = set()
_bt_session_seen: set[str] = set()
_bt_seen_lock = threading.Lock()
# =============================================================================
# DATABASE FUNCTIONS
@@ -164,13 +172,20 @@ def get_all_baselines() -> list[dict]:
return [dict(row) for row in cursor]
def save_observation_history(device: BTDeviceAggregate) -> None:
"""Save device observation to history."""
with get_db() as conn:
conn.execute('''
INSERT INTO bt_observation_history (device_id, rssi, seen_count)
VALUES (?, ?, ?)
''', (device.device_id, device.rssi_current, device.seen_count))
def save_observation_history(device: BTDeviceAggregate) -> None:
"""Save device observation to history."""
with get_db() as conn:
conn.execute('''
INSERT INTO bt_observation_history (device_id, rssi, seen_count)
VALUES (?, ?, ?)
''', (device.device_id, device.rssi_current, device.seen_count))
def load_seen_device_ids() -> set[str]:
"""Load distinct device IDs from history for seen-before tracking."""
with get_db() as conn:
cursor = conn.execute('SELECT DISTINCT device_id FROM bt_observation_history')
return {row['device_id'] for row in cursor}
# =============================================================================
@@ -191,7 +206,7 @@ def get_capabilities():
@bluetooth_v2_bp.route('/scan/start', methods=['POST'])
def start_scan():
def start_scan():
"""
Start Bluetooth scanning.
@@ -221,17 +236,42 @@ def start_scan():
# Get scanner instance
scanner = get_bluetooth_scanner(adapter_id)
# Check if already scanning
if scanner.is_scanning:
return jsonify({
'status': 'already_running',
'scan_status': scanner.get_status().to_dict()
})
# Initialize database tables if needed
init_bt_tables()
# Load active baseline if exists
# Initialize database tables if needed
init_bt_tables()
def _handle_seen_before(device: BTDeviceAggregate) -> None:
try:
with _bt_seen_lock:
device.seen_before = device.device_id in _bt_seen_cache
if device.device_id not in _bt_session_seen:
save_observation_history(device)
_bt_session_seen.add(device.device_id)
except Exception as e:
logger.debug(f"BT seen-before update failed: {e}")
# Setup seen-before callback
if scanner._on_device_updated is None:
scanner._on_device_updated = _handle_seen_before
# Ensure cache is initialized
with _bt_seen_lock:
if not _bt_seen_cache:
_bt_seen_cache.update(load_seen_device_ids())
# Check if already scanning
if scanner.is_scanning:
return jsonify({
'status': 'already_running',
'scan_status': scanner.get_status().to_dict()
})
# Refresh seen-before cache and reset session set for a new scan
with _bt_seen_lock:
_bt_seen_cache.clear()
_bt_seen_cache.update(load_seen_device_ids())
_bt_session_seen.clear()
# Load active baseline if exists
baseline_id = get_active_baseline_id()
if baseline_id:
device_ids = get_baseline_device_ids(baseline_id)
@@ -856,11 +896,15 @@ def stream_events():
else:
return event_type, event
def event_generator() -> Generator[str, None, None]:
"""Generate SSE events from scanner."""
for event in scanner.stream_events(timeout=1.0):
event_name, event_data = map_event_type(event)
yield format_sse(event_data, event=event_name)
def event_generator() -> Generator[str, None, None]:
"""Generate SSE events from scanner."""
for event in scanner.stream_events(timeout=1.0):
event_name, event_data = map_event_type(event)
try:
process_event('bluetooth', event_data, event_name)
except Exception:
pass
yield format_sse(event_data, event=event_name)
return Response(
event_generator(),

View File

@@ -18,6 +18,7 @@ from flask import Blueprint, jsonify, request, Response
import app as app_module
from utils.logging import get_logger
from utils.sse import format_sse
from utils.event_pipeline import process_event
from utils.process import register_process, unregister_process
from utils.constants import (
SSE_QUEUE_TIMEOUT,
@@ -495,6 +496,10 @@ def stream_dmr() -> Response:
try:
msg = dmr_queue.get(timeout=SSE_QUEUE_TIMEOUT)
last_keepalive = time.time()
try:
process_event('dmr', msg, msg.get('type'))
except Exception:
pass
yield format_sse(msg)
except queue.Empty:
now = time.time()

View File

@@ -36,6 +36,7 @@ from utils.database import (
)
from utils.dsc.parser import parse_dsc_message
from utils.sse import format_sse
from utils.event_pipeline import process_event
from utils.validation import validate_device_index, validate_gain
from utils.sdr import SDRFactory, SDRType
from utils.dependencies import get_tool_path
@@ -525,6 +526,10 @@ def stream() -> Response:
try:
msg = app_module.dsc_queue.get(timeout=1)
last_keepalive = time.time()
try:
process_event('dsc', msg, msg.get('type'))
except Exception:
pass
yield format_sse(msg)
except queue.Empty:
now = time.time()

View File

@@ -19,7 +19,8 @@ from flask import Blueprint, jsonify, request, Response
import app as app_module
from utils.logging import get_logger
from utils.sse import format_sse
from utils.sse import format_sse
from utils.event_pipeline import process_event
from utils.constants import (
SSE_QUEUE_TIMEOUT,
SSE_KEEPALIVE_INTERVAL,
@@ -1180,9 +1181,13 @@ def stream_scanner_events() -> Response:
while True:
try:
msg = scanner_queue.get(timeout=SSE_QUEUE_TIMEOUT)
last_keepalive = time.time()
yield format_sse(msg)
msg = scanner_queue.get(timeout=SSE_QUEUE_TIMEOUT)
last_keepalive = time.time()
try:
process_event('listening_scanner', msg, msg.get('type'))
except Exception:
pass
yield format_sse(msg)
except queue.Empty:
now = time.time()
if now - last_keepalive >= SSE_KEEPALIVE_INTERVAL:
@@ -1514,14 +1519,15 @@ waterfall_thread: Optional[threading.Thread] = None
waterfall_running = False
waterfall_lock = threading.Lock()
waterfall_queue: queue.Queue = queue.Queue(maxsize=200)
waterfall_active_device: Optional[int] = None
waterfall_config = {
'start_freq': 88.0,
'end_freq': 108.0,
'bin_size': 10000,
'gain': 40,
'device': 0,
}
waterfall_active_device: Optional[int] = None
waterfall_config = {
'start_freq': 88.0,
'end_freq': 108.0,
'bin_size': 10000,
'gain': 40,
'device': 0,
'max_bins': 1024,
}
def _waterfall_loop():
@@ -1606,11 +1612,14 @@ def _waterfall_loop():
except ValueError:
continue
if all_bins:
msg = {
'type': 'waterfall_sweep',
'start_freq': sweep_start_hz / 1e6,
'end_freq': sweep_end_hz / 1e6,
if all_bins:
max_bins = int(waterfall_config.get('max_bins') or 0)
if max_bins > 0 and len(all_bins) > max_bins:
all_bins = _downsample_bins(all_bins, max_bins)
msg = {
'type': 'waterfall_sweep',
'start_freq': sweep_start_hz / 1e6,
'end_freq': sweep_end_hz / 1e6,
'bins': all_bins,
'timestamp': datetime.now().isoformat(),
}
@@ -1649,14 +1658,19 @@ def start_waterfall() -> Response:
data = request.json or {}
try:
waterfall_config['start_freq'] = float(data.get('start_freq', 88.0))
waterfall_config['end_freq'] = float(data.get('end_freq', 108.0))
waterfall_config['bin_size'] = int(data.get('bin_size', 10000))
waterfall_config['gain'] = int(data.get('gain', 40))
waterfall_config['device'] = int(data.get('device', 0))
except (ValueError, TypeError) as e:
return jsonify({'status': 'error', 'message': f'Invalid parameter: {e}'}), 400
try:
waterfall_config['start_freq'] = float(data.get('start_freq', 88.0))
waterfall_config['end_freq'] = float(data.get('end_freq', 108.0))
waterfall_config['bin_size'] = int(data.get('bin_size', 10000))
waterfall_config['gain'] = int(data.get('gain', 40))
waterfall_config['device'] = int(data.get('device', 0))
if data.get('max_bins') is not None:
max_bins = int(data.get('max_bins', waterfall_config['max_bins']))
if max_bins < 64 or max_bins > 4096:
return jsonify({'status': 'error', 'message': 'max_bins must be between 64 and 4096'}), 400
waterfall_config['max_bins'] = max_bins
except (ValueError, TypeError) as e:
return jsonify({'status': 'error', 'message': f'Invalid parameter: {e}'}), 400
if waterfall_config['start_freq'] >= waterfall_config['end_freq']:
return jsonify({'status': 'error', 'message': 'start_freq must be less than end_freq'}), 400
@@ -1711,10 +1725,14 @@ def stream_waterfall() -> Response:
def generate() -> Generator[str, None, None]:
last_keepalive = time.time()
while True:
try:
msg = waterfall_queue.get(timeout=SSE_QUEUE_TIMEOUT)
last_keepalive = time.time()
yield format_sse(msg)
try:
msg = waterfall_queue.get(timeout=SSE_QUEUE_TIMEOUT)
last_keepalive = time.time()
try:
process_event('waterfall', msg, msg.get('type'))
except Exception:
pass
yield format_sse(msg)
except queue.Empty:
now = time.time()
if now - last_keepalive >= SSE_KEEPALIVE_INTERVAL:
@@ -1725,3 +1743,20 @@ def stream_waterfall() -> Response:
response.headers['Cache-Control'] = 'no-cache'
response.headers['X-Accel-Buffering'] = 'no'
return response
def _downsample_bins(values: list[float], target: int) -> list[float]:
"""Downsample bins to a target length using simple averaging."""
if target <= 0 or len(values) <= target:
return values
out: list[float] = []
step = len(values) / target
for i in range(target):
start = int(i * step)
end = int((i + 1) * step)
if end <= start:
end = min(start + 1, len(values))
chunk = values[start:end]
if not chunk:
continue
out.append(sum(chunk) / len(chunk))
return out

View File

@@ -22,7 +22,8 @@ from utils.validation import (
validate_frequency, validate_device_index, validate_gain, validate_ppm,
validate_rtl_tcp_host, validate_rtl_tcp_port
)
from utils.sse import format_sse
from utils.sse import format_sse
from utils.event_pipeline import process_event
from utils.process import safe_terminate, register_process, unregister_process
from utils.sdr import SDRFactory, SDRType, SDRValidationError
from utils.dependencies import get_tool_path
@@ -468,10 +469,14 @@ def stream() -> Response:
keepalive_interval = 30.0 # Send keepalive every 30 seconds instead of 1 second
while True:
try:
msg = app_module.output_queue.get(timeout=1)
last_keepalive = time.time()
yield format_sse(msg)
try:
msg = app_module.output_queue.get(timeout=1)
last_keepalive = time.time()
try:
process_event('pager', msg, msg.get('type'))
except Exception:
pass
yield format_sse(msg)
except queue.Empty:
now = time.time()
if now - last_keepalive >= keepalive_interval:

109
routes/recordings.py Normal file
View File

@@ -0,0 +1,109 @@
"""Session recording API endpoints."""
from __future__ import annotations
from pathlib import Path
from flask import Blueprint, jsonify, request, send_file
from utils.recording import get_recording_manager, RECORDING_ROOT
recordings_bp = Blueprint('recordings', __name__, url_prefix='/recordings')
@recordings_bp.route('/start', methods=['POST'])
def start_recording():
data = request.get_json() or {}
mode = (data.get('mode') or '').strip()
if not mode:
return jsonify({'status': 'error', 'message': 'mode is required'}), 400
label = data.get('label')
metadata = data.get('metadata') if isinstance(data.get('metadata'), dict) else {}
manager = get_recording_manager()
session = manager.start_recording(mode=mode, label=label, metadata=metadata)
return jsonify({
'status': 'success',
'session': {
'id': session.id,
'mode': session.mode,
'label': session.label,
'started_at': session.started_at.isoformat(),
'file_path': str(session.file_path),
}
})
@recordings_bp.route('/stop', methods=['POST'])
def stop_recording():
data = request.get_json() or {}
mode = data.get('mode')
session_id = data.get('id')
manager = get_recording_manager()
session = manager.stop_recording(mode=mode, session_id=session_id)
if not session:
return jsonify({'status': 'error', 'message': 'No active recording found'}), 404
return jsonify({
'status': 'success',
'session': {
'id': session.id,
'mode': session.mode,
'label': session.label,
'started_at': session.started_at.isoformat(),
'stopped_at': session.stopped_at.isoformat() if session.stopped_at else None,
'event_count': session.event_count,
'size_bytes': session.size_bytes,
'file_path': str(session.file_path),
}
})
@recordings_bp.route('', methods=['GET'])
def list_recordings():
manager = get_recording_manager()
limit = request.args.get('limit', default=50, type=int)
return jsonify({
'status': 'success',
'recordings': manager.list_recordings(limit=limit),
'active': manager.get_active(),
})
@recordings_bp.route('/<session_id>', methods=['GET'])
def get_recording(session_id: str):
manager = get_recording_manager()
rec = manager.get_recording(session_id)
if not rec:
return jsonify({'status': 'error', 'message': 'Recording not found'}), 404
return jsonify({'status': 'success', 'recording': rec})
@recordings_bp.route('/<session_id>/download', methods=['GET'])
def download_recording(session_id: str):
manager = get_recording_manager()
rec = manager.get_recording(session_id)
if not rec:
return jsonify({'status': 'error', 'message': 'Recording not found'}), 404
file_path = Path(rec['file_path'])
try:
resolved_root = RECORDING_ROOT.resolve()
resolved_file = file_path.resolve()
if resolved_root not in resolved_file.parents:
return jsonify({'status': 'error', 'message': 'Invalid recording path'}), 400
except Exception:
return jsonify({'status': 'error', 'message': 'Invalid recording path'}), 400
if not file_path.exists():
return jsonify({'status': 'error', 'message': 'Recording file missing'}), 404
return send_file(
file_path,
mimetype='application/x-ndjson',
as_attachment=True,
download_name=file_path.name,
)

View File

@@ -18,6 +18,7 @@ from utils.validation import (
validate_frequency, validate_device_index, validate_gain, validate_ppm
)
from utils.sse import format_sse
from utils.event_pipeline import process_event
from utils.process import safe_terminate, register_process, unregister_process
rtlamr_bp = Blueprint('rtlamr', __name__)
@@ -295,6 +296,10 @@ def stream_rtlamr() -> Response:
try:
msg = app_module.rtlamr_queue.get(timeout=1)
last_keepalive = time.time()
try:
process_event('rtlamr', msg, msg.get('type'))
except Exception:
pass
yield format_sse(msg)
except queue.Empty:
now = time.time()

View File

@@ -18,7 +18,8 @@ from utils.validation import (
validate_frequency, validate_device_index, validate_gain, validate_ppm,
validate_rtl_tcp_host, validate_rtl_tcp_port
)
from utils.sse import format_sse
from utils.sse import format_sse
from utils.event_pipeline import process_event
from utils.process import safe_terminate, register_process, unregister_process
from utils.sdr import SDRFactory, SDRType
@@ -231,9 +232,13 @@ def stream_sensor() -> Response:
while True:
try:
msg = app_module.sensor_queue.get(timeout=1)
last_keepalive = time.time()
yield format_sse(msg)
msg = app_module.sensor_queue.get(timeout=1)
last_keepalive = time.time()
try:
process_event('sensor', msg, msg.get('type'))
except Exception:
pass
yield format_sse(msg)
except queue.Empty:
now = time.time()
if now - last_keepalive >= keepalive_interval:

View File

@@ -15,7 +15,8 @@ from flask import Blueprint, jsonify, request, Response, send_file
import app as app_module
from utils.logging import get_logger
from utils.sse import format_sse
from utils.sse import format_sse
from utils.event_pipeline import process_event
from utils.sstv import (
get_sstv_decoder,
is_sstv_available,
@@ -398,10 +399,14 @@ def stream_progress():
keepalive_interval = 30.0
while True:
try:
progress = _sstv_queue.get(timeout=1)
last_keepalive = time.time()
yield format_sse(progress)
try:
progress = _sstv_queue.get(timeout=1)
last_keepalive = time.time()
try:
process_event('sstv', progress, progress.get('type'))
except Exception:
pass
yield format_sse(progress)
except queue.Empty:
now = time.time()
if now - last_keepalive >= keepalive_interval:

View File

@@ -15,6 +15,7 @@ from flask import Blueprint, Response, jsonify, request, send_file
from utils.logging import get_logger
from utils.sse import format_sse
from utils.event_pipeline import process_event
from utils.sstv import (
DecodeProgress,
get_general_sstv_decoder,
@@ -274,6 +275,10 @@ def stream_progress():
try:
progress = _sstv_general_queue.get(timeout=1)
last_keepalive = time.time()
try:
process_event('sstv_general', progress, progress.get('type'))
except Exception:
pass
yield format_sse(progress)
except queue.Empty:
now = time.time()

View File

@@ -60,6 +60,7 @@ from utils.tscm.device_identity import (
ingest_ble_dict,
ingest_wifi_dict,
)
from utils.event_pipeline import process_event
# Import unified Bluetooth scanner helper for TSCM integration
try:
@@ -627,6 +628,10 @@ def sweep_stream():
try:
if tscm_queue:
msg = tscm_queue.get(timeout=1)
try:
process_event('tscm', msg, msg.get('type'))
except Exception:
pass
yield f"data: {json.dumps(msg)}\n\n"
else:
time.sleep(1)
@@ -2023,6 +2028,7 @@ def _run_sweep(
comparator = BaselineComparator(baseline)
baseline_comparison = comparator.compare_all(
wifi_devices=list(all_wifi.values()),
wifi_clients=list(all_wifi_clients.values()),
bt_devices=list(all_bt.values()),
rf_signals=all_rf
)
@@ -2132,6 +2138,7 @@ def _run_sweep(
'total_new': baseline_comparison['total_new'],
'total_missing': baseline_comparison['total_missing'],
'wifi': baseline_comparison.get('wifi'),
'wifi_clients': baseline_comparison.get('wifi_clients'),
'bluetooth': baseline_comparison.get('bluetooth'),
'rf': baseline_comparison.get('rf'),
})
@@ -2297,6 +2304,7 @@ def compare_against_baseline():
Expects JSON body with:
- wifi_devices: list of WiFi devices (optional)
- wifi_clients: list of WiFi clients (optional)
- bt_devices: list of Bluetooth devices (optional)
- rf_signals: list of RF signals (optional)
@@ -2305,12 +2313,14 @@ def compare_against_baseline():
data = request.get_json() or {}
wifi_devices = data.get('wifi_devices')
wifi_clients = data.get('wifi_clients')
bt_devices = data.get('bt_devices')
rf_signals = data.get('rf_signals')
# Use the convenience function that gets active baseline
comparison = get_comparison_for_active_baseline(
wifi_devices=wifi_devices,
wifi_clients=wifi_clients,
bt_devices=bt_devices,
rf_signals=rf_signals
)
@@ -2404,7 +2414,10 @@ def feed_wifi():
"""Feed WiFi device data for baseline recording."""
data = request.get_json()
if data:
_baseline_recorder.add_wifi_device(data)
if data.get('is_client'):
_baseline_recorder.add_wifi_client(data)
else:
_baseline_recorder.add_wifi_device(data)
return jsonify({'status': 'success'})
@@ -3056,12 +3069,14 @@ def get_baseline_diff(baseline_id: int, sweep_id: int):
results = json.loads(results)
current_wifi = results.get('wifi_devices', [])
current_wifi_clients = results.get('wifi_clients', [])
current_bt = results.get('bt_devices', [])
current_rf = results.get('rf_signals', [])
diff = calculate_baseline_diff(
baseline=baseline,
current_wifi=current_wifi,
current_wifi_clients=current_wifi_clients,
current_bt=current_bt,
current_rf=current_rf,
sweep_id=sweep_id

View File

@@ -17,11 +17,12 @@ from flask import Blueprint, jsonify, request, Response
import app as app_module
from utils.dependencies import check_tool, get_tool_path
from utils.logging import wifi_logger as logger
from utils.process import is_valid_mac, is_valid_channel
from utils.validation import validate_wifi_channel, validate_mac_address, validate_network_interface
from utils.sse import format_sse
from data.oui import get_manufacturer
from utils.logging import wifi_logger as logger
from utils.process import is_valid_mac, is_valid_channel
from utils.validation import validate_wifi_channel, validate_mac_address, validate_network_interface
from utils.sse import format_sse
from utils.event_pipeline import process_event
from data.oui import get_manufacturer
from utils.constants import (
WIFI_TERMINATE_TIMEOUT,
PMKID_TERMINATE_TIMEOUT,
@@ -46,8 +47,33 @@ from utils.constants import (
wifi_bp = Blueprint('wifi', __name__, url_prefix='/wifi')
# PMKID process state
pmkid_process = None
pmkid_lock = threading.Lock()
pmkid_process = None
pmkid_lock = threading.Lock()
def _parse_channel_list(raw_channels: Any) -> list[int] | None:
"""Parse a channel list from string/list input."""
if raw_channels in (None, '', []):
return None
if isinstance(raw_channels, str):
parts = [p.strip() for p in re.split(r'[\s,]+', raw_channels) if p.strip()]
elif isinstance(raw_channels, (list, tuple, set)):
parts = list(raw_channels)
else:
parts = [raw_channels]
channels: list[int] = []
seen = set()
for part in parts:
if part in (None, ''):
continue
ch = validate_wifi_channel(part)
if ch not in seen:
channels.append(ch)
seen.add(ch)
return channels or None
def detect_wifi_interfaces():
@@ -607,8 +633,9 @@ def start_wifi_scan():
return jsonify({'status': 'error', 'message': 'Scan already running'})
data = request.json
channel = data.get('channel')
band = data.get('band', 'abg')
channel = data.get('channel')
channels = data.get('channels')
band = data.get('band', 'abg')
# Use provided interface or fall back to stored monitor interface
interface = data.get('interface')
@@ -658,8 +685,17 @@ def start_wifi_scan():
interface
]
if channel:
cmd.extend(['-c', str(channel)])
channel_list = None
if channels:
try:
channel_list = _parse_channel_list(channels)
except ValueError as e:
return jsonify({'status': 'error', 'message': str(e)}), 400
if channel_list:
cmd.extend(['-c', ','.join(str(c) for c in channel_list)])
elif channel:
cmd.extend(['-c', str(channel)])
logger.info(f"Running: {' '.join(cmd)}")
@@ -851,32 +887,53 @@ def check_handshake_status():
return jsonify({'status': 'stopped', 'file_exists': False, 'handshake_found': False})
file_size = os.path.getsize(capture_file)
handshake_found = False
handshake_found = False
handshake_valid: bool | None = None
handshake_checked = False
handshake_reason: str | None = None
try:
if target_bssid and is_valid_mac(target_bssid):
aircrack_path = get_tool_path('aircrack-ng')
if aircrack_path:
result = subprocess.run(
[aircrack_path, '-a', '2', '-b', target_bssid, capture_file],
capture_output=True, text=True, timeout=10
)
output = result.stdout + result.stderr
if '1 handshake' in output or ('handshake' in output.lower() and 'wpa' in output.lower()):
if '0 handshake' not in output:
handshake_found = True
if target_bssid and is_valid_mac(target_bssid):
aircrack_path = get_tool_path('aircrack-ng')
if aircrack_path:
result = subprocess.run(
[aircrack_path, '-a', '2', '-b', target_bssid, capture_file],
capture_output=True, text=True, timeout=10
)
output = result.stdout + result.stderr
output_lower = output.lower()
handshake_checked = True
if 'no valid wpa handshakes found' in output_lower:
handshake_valid = False
handshake_reason = 'No valid WPA handshake found'
elif '0 handshake' in output_lower:
handshake_valid = False
elif '1 handshake' in output_lower or ('handshake' in output_lower and 'wpa' in output_lower):
handshake_valid = True
else:
handshake_valid = False
except subprocess.TimeoutExpired:
pass
except Exception as e:
logger.error(f"Error checking handshake: {e}")
return jsonify({
'status': 'running' if app_module.wifi_process and app_module.wifi_process.poll() is None else 'stopped',
'file_exists': True,
'file_size': file_size,
'file': capture_file,
'handshake_found': handshake_found
})
except Exception as e:
logger.error(f"Error checking handshake: {e}")
if handshake_valid:
handshake_found = True
normalized_bssid = target_bssid.upper() if target_bssid else None
if normalized_bssid and normalized_bssid not in app_module.wifi_handshakes:
app_module.wifi_handshakes.append(normalized_bssid)
return jsonify({
'status': 'running' if app_module.wifi_process and app_module.wifi_process.poll() is None else 'stopped',
'file_exists': True,
'file_size': file_size,
'file': capture_file,
'handshake_found': handshake_found,
'handshake_valid': handshake_valid,
'handshake_checked': handshake_checked,
'handshake_reason': handshake_reason
})
@wifi_bp.route('/pmkid/capture', methods=['POST'])
@@ -1084,9 +1141,13 @@ def stream_wifi():
while True:
try:
msg = app_module.wifi_queue.get(timeout=1)
last_keepalive = time.time()
yield format_sse(msg)
msg = app_module.wifi_queue.get(timeout=1)
last_keepalive = time.time()
try:
process_event('wifi', msg, msg.get('type'))
except Exception:
pass
yield format_sse(msg)
except queue.Empty:
now = time.time()
if now - last_keepalive >= keepalive_interval:

View File

@@ -16,14 +16,16 @@ from typing import Generator
from flask import Blueprint, jsonify, request, Response
from utils.wifi import (
get_wifi_scanner,
analyze_channels,
get_hidden_correlator,
SCAN_MODE_QUICK,
SCAN_MODE_DEEP,
)
from utils.sse import format_sse
from utils.wifi import (
get_wifi_scanner,
analyze_channels,
get_hidden_correlator,
SCAN_MODE_QUICK,
SCAN_MODE_DEEP,
)
from utils.sse import format_sse
from utils.validation import validate_wifi_channel
from utils.event_pipeline import process_event
logger = logging.getLogger(__name__)
@@ -85,28 +87,44 @@ def start_deep_scan():
Requires monitor mode interface and root privileges.
Request body:
interface: Monitor mode interface (e.g., 'wlan0mon')
band: Band to scan ('2.4', '5', 'all')
channel: Optional specific channel to monitor
Request body:
interface: Monitor mode interface (e.g., 'wlan0mon')
band: Band to scan ('2.4', '5', 'all')
channel: Optional specific channel to monitor
channels: Optional list or comma-separated channels to monitor
"""
data = request.get_json() or {}
interface = data.get('interface')
band = data.get('band', 'all')
channel = data.get('channel')
if channel:
try:
channel = int(channel)
except ValueError:
return jsonify({'error': 'Invalid channel'}), 400
channel = data.get('channel')
channels = data.get('channels')
channel_list = None
if channels:
if isinstance(channels, str):
channel_list = [c.strip() for c in channels.split(',') if c.strip()]
elif isinstance(channels, (list, tuple, set)):
channel_list = list(channels)
else:
channel_list = [channels]
try:
channel_list = [validate_wifi_channel(c) for c in channel_list]
except (TypeError, ValueError):
return jsonify({'error': 'Invalid channels'}), 400
if channel:
try:
channel = validate_wifi_channel(channel)
except ValueError:
return jsonify({'error': 'Invalid channel'}), 400
scanner = get_wifi_scanner()
success = scanner.start_deep_scan(
interface=interface,
band=band,
channel=channel,
)
success = scanner.start_deep_scan(
interface=interface,
band=band,
channel=channel,
channels=channel_list,
)
if success:
return jsonify({
@@ -388,10 +406,14 @@ def event_stream():
- keepalive: Periodic keepalive
"""
def generate() -> Generator[str, None, None]:
scanner = get_wifi_scanner()
for event in scanner.get_event_stream():
yield format_sse(event)
scanner = get_wifi_scanner()
for event in scanner.get_event_stream():
try:
process_event('wifi', event, event.get('type'))
except Exception:
pass
yield format_sse(event)
response = Response(generate(), mimetype='text/event-stream')
response.headers['Cache-Control'] = 'no-cache'