Add alerts/recording, WiFi/TSCM updates, optimize waterfall

This commit is contained in:
Smittix
2026-02-07 18:29:58 +00:00
parent 4bbc00b765
commit 86e4ba7e29
42 changed files with 2514 additions and 485 deletions

443
utils/alerts.py Normal file
View File

@@ -0,0 +1,443 @@
"""Alerting engine for cross-mode events."""
from __future__ import annotations
import json
import logging
import queue
import re
import threading
import time
from dataclasses import dataclass
from datetime import datetime, timezone
from typing import Any, Generator
from config import ALERT_WEBHOOK_URL, ALERT_WEBHOOK_TIMEOUT, ALERT_WEBHOOK_SECRET
from utils.database import get_db
logger = logging.getLogger('intercept.alerts')
@dataclass
class AlertRule:
id: int
name: str
mode: str | None
event_type: str | None
match: dict
severity: str
enabled: bool
notify: dict
created_at: str | None = None
class AlertManager:
def __init__(self) -> None:
self._queue: queue.Queue = queue.Queue(maxsize=1000)
self._rules_cache: list[AlertRule] = []
self._rules_loaded_at = 0.0
self._cache_lock = threading.Lock()
# ------------------------------------------------------------------
# Rule management
# ------------------------------------------------------------------
def invalidate_cache(self) -> None:
with self._cache_lock:
self._rules_loaded_at = 0.0
def _load_rules(self) -> None:
with get_db() as conn:
cursor = conn.execute('''
SELECT id, name, mode, event_type, match, severity, enabled, notify, created_at
FROM alert_rules
WHERE enabled = 1
ORDER BY id ASC
''')
rules: list[AlertRule] = []
for row in cursor:
match = {}
notify = {}
try:
match = json.loads(row['match']) if row['match'] else {}
except json.JSONDecodeError:
match = {}
try:
notify = json.loads(row['notify']) if row['notify'] else {}
except json.JSONDecodeError:
notify = {}
rules.append(AlertRule(
id=row['id'],
name=row['name'],
mode=row['mode'],
event_type=row['event_type'],
match=match,
severity=row['severity'] or 'medium',
enabled=bool(row['enabled']),
notify=notify,
created_at=row['created_at'],
))
with self._cache_lock:
self._rules_cache = rules
self._rules_loaded_at = time.time()
def _get_rules(self) -> list[AlertRule]:
with self._cache_lock:
stale = (time.time() - self._rules_loaded_at) > 10
if stale:
self._load_rules()
with self._cache_lock:
return list(self._rules_cache)
def list_rules(self, include_disabled: bool = False) -> list[dict]:
with get_db() as conn:
if include_disabled:
cursor = conn.execute('''
SELECT id, name, mode, event_type, match, severity, enabled, notify, created_at
FROM alert_rules
ORDER BY id DESC
''')
else:
cursor = conn.execute('''
SELECT id, name, mode, event_type, match, severity, enabled, notify, created_at
FROM alert_rules
WHERE enabled = 1
ORDER BY id DESC
''')
return [
{
'id': row['id'],
'name': row['name'],
'mode': row['mode'],
'event_type': row['event_type'],
'match': json.loads(row['match']) if row['match'] else {},
'severity': row['severity'],
'enabled': bool(row['enabled']),
'notify': json.loads(row['notify']) if row['notify'] else {},
'created_at': row['created_at'],
}
for row in cursor
]
def add_rule(self, rule: dict) -> int:
with get_db() as conn:
cursor = conn.execute('''
INSERT INTO alert_rules (name, mode, event_type, match, severity, enabled, notify)
VALUES (?, ?, ?, ?, ?, ?, ?)
''', (
rule.get('name') or 'Alert Rule',
rule.get('mode'),
rule.get('event_type'),
json.dumps(rule.get('match') or {}),
rule.get('severity') or 'medium',
1 if rule.get('enabled', True) else 0,
json.dumps(rule.get('notify') or {}),
))
rule_id = cursor.lastrowid
self.invalidate_cache()
return int(rule_id)
def update_rule(self, rule_id: int, updates: dict) -> bool:
fields = []
params = []
for key in ('name', 'mode', 'event_type', 'severity'):
if key in updates:
fields.append(f"{key} = ?")
params.append(updates[key])
if 'enabled' in updates:
fields.append('enabled = ?')
params.append(1 if updates['enabled'] else 0)
if 'match' in updates:
fields.append('match = ?')
params.append(json.dumps(updates['match'] or {}))
if 'notify' in updates:
fields.append('notify = ?')
params.append(json.dumps(updates['notify'] or {}))
if not fields:
return False
params.append(rule_id)
with get_db() as conn:
cursor = conn.execute(
f"UPDATE alert_rules SET {', '.join(fields)} WHERE id = ?",
params
)
updated = cursor.rowcount > 0
if updated:
self.invalidate_cache()
return updated
def delete_rule(self, rule_id: int) -> bool:
with get_db() as conn:
cursor = conn.execute('DELETE FROM alert_rules WHERE id = ?', (rule_id,))
deleted = cursor.rowcount > 0
if deleted:
self.invalidate_cache()
return deleted
def list_events(self, limit: int = 100, mode: str | None = None, severity: str | None = None) -> list[dict]:
query = 'SELECT id, rule_id, mode, event_type, severity, title, message, payload, created_at FROM alert_events'
clauses = []
params: list[Any] = []
if mode:
clauses.append('mode = ?')
params.append(mode)
if severity:
clauses.append('severity = ?')
params.append(severity)
if clauses:
query += ' WHERE ' + ' AND '.join(clauses)
query += ' ORDER BY id DESC LIMIT ?'
params.append(limit)
with get_db() as conn:
cursor = conn.execute(query, params)
events = []
for row in cursor:
events.append({
'id': row['id'],
'rule_id': row['rule_id'],
'mode': row['mode'],
'event_type': row['event_type'],
'severity': row['severity'],
'title': row['title'],
'message': row['message'],
'payload': json.loads(row['payload']) if row['payload'] else {},
'created_at': row['created_at'],
})
return events
# ------------------------------------------------------------------
# Event processing
# ------------------------------------------------------------------
def process_event(self, mode: str, event: dict, event_type: str | None = None) -> None:
if not isinstance(event, dict):
return
if event_type in ('keepalive', 'ping', 'status'):
return
rules = self._get_rules()
if not rules:
return
for rule in rules:
if rule.mode and rule.mode != mode:
continue
if rule.event_type and event_type and rule.event_type != event_type:
continue
if rule.event_type and not event_type:
continue
if not self._match_rule(rule.match, event):
continue
title = rule.name or 'Alert'
message = self._build_message(rule, event, event_type)
payload = {
'mode': mode,
'event_type': event_type,
'event': event,
'rule': {
'id': rule.id,
'name': rule.name,
},
}
event_id = self._store_event(rule.id, mode, event_type, rule.severity, title, message, payload)
alert_payload = {
'id': event_id,
'rule_id': rule.id,
'mode': mode,
'event_type': event_type,
'severity': rule.severity,
'title': title,
'message': message,
'payload': payload,
'created_at': datetime.now(timezone.utc).isoformat(),
}
self._queue_event(alert_payload)
self._maybe_send_webhook(alert_payload, rule.notify)
def _build_message(self, rule: AlertRule, event: dict, event_type: str | None) -> str:
if isinstance(rule.notify, dict) and rule.notify.get('message'):
return str(rule.notify.get('message'))
summary_bits = []
if event_type:
summary_bits.append(event_type)
if 'name' in event:
summary_bits.append(str(event.get('name')))
if 'ssid' in event:
summary_bits.append(str(event.get('ssid')))
if 'bssid' in event:
summary_bits.append(str(event.get('bssid')))
if 'address' in event:
summary_bits.append(str(event.get('address')))
if 'mac' in event:
summary_bits.append(str(event.get('mac')))
summary = ' | '.join(summary_bits) if summary_bits else 'Alert triggered'
return summary
def _store_event(
self,
rule_id: int,
mode: str,
event_type: str | None,
severity: str,
title: str,
message: str,
payload: dict,
) -> int:
with get_db() as conn:
cursor = conn.execute('''
INSERT INTO alert_events (rule_id, mode, event_type, severity, title, message, payload)
VALUES (?, ?, ?, ?, ?, ?, ?)
''', (
rule_id,
mode,
event_type,
severity,
title,
message,
json.dumps(payload),
))
return int(cursor.lastrowid)
def _queue_event(self, alert_payload: dict) -> None:
try:
self._queue.put_nowait(alert_payload)
except queue.Full:
try:
self._queue.get_nowait()
self._queue.put_nowait(alert_payload)
except queue.Empty:
pass
def _maybe_send_webhook(self, payload: dict, notify: dict) -> None:
if not ALERT_WEBHOOK_URL:
return
if isinstance(notify, dict) and notify.get('webhook') is False:
return
try:
import urllib.request
req = urllib.request.Request(
ALERT_WEBHOOK_URL,
data=json.dumps(payload).encode('utf-8'),
headers={
'Content-Type': 'application/json',
'User-Agent': 'Intercept-Alert',
'X-Alert-Token': ALERT_WEBHOOK_SECRET or '',
},
method='POST'
)
with urllib.request.urlopen(req, timeout=ALERT_WEBHOOK_TIMEOUT) as _:
pass
except Exception as e:
logger.debug(f"Alert webhook failed: {e}")
# ------------------------------------------------------------------
# Matching
# ------------------------------------------------------------------
def _match_rule(self, rule_match: dict, event: dict) -> bool:
if not rule_match:
return True
for key, expected in rule_match.items():
actual = self._extract_value(event, key)
if not self._match_value(actual, expected):
return False
return True
def _extract_value(self, event: dict, key: str) -> Any:
if '.' not in key:
return event.get(key)
current: Any = event
for part in key.split('.'):
if isinstance(current, dict):
current = current.get(part)
else:
return None
return current
def _match_value(self, actual: Any, expected: Any) -> bool:
if isinstance(expected, dict) and 'op' in expected:
op = expected.get('op')
value = expected.get('value')
return self._apply_op(op, actual, value)
if isinstance(expected, list):
return actual in expected
if isinstance(expected, str):
if actual is None:
return False
return str(actual).lower() == expected.lower()
return actual == expected
def _apply_op(self, op: str, actual: Any, value: Any) -> bool:
if op == 'exists':
return actual is not None
if op == 'eq':
return actual == value
if op == 'neq':
return actual != value
if op == 'gt':
return _safe_number(actual) is not None and _safe_number(actual) > _safe_number(value)
if op == 'gte':
return _safe_number(actual) is not None and _safe_number(actual) >= _safe_number(value)
if op == 'lt':
return _safe_number(actual) is not None and _safe_number(actual) < _safe_number(value)
if op == 'lte':
return _safe_number(actual) is not None and _safe_number(actual) <= _safe_number(value)
if op == 'in':
return actual in (value or [])
if op == 'contains':
if actual is None:
return False
if isinstance(actual, list):
return any(str(value).lower() in str(item).lower() for item in actual)
return str(value).lower() in str(actual).lower()
if op == 'regex':
if actual is None or value is None:
return False
try:
return re.search(str(value), str(actual)) is not None
except re.error:
return False
return False
# ------------------------------------------------------------------
# Streaming
# ------------------------------------------------------------------
def stream_events(self, timeout: float = 1.0) -> Generator[dict, None, None]:
while True:
try:
event = self._queue.get(timeout=timeout)
yield event
except queue.Empty:
yield {'type': 'keepalive'}
_alert_manager: AlertManager | None = None
_alert_lock = threading.Lock()
def get_alert_manager() -> AlertManager:
global _alert_manager
with _alert_lock:
if _alert_manager is None:
_alert_manager = AlertManager()
return _alert_manager
def _safe_number(value: Any) -> float | None:
try:
return float(value)
except (TypeError, ValueError):
return None

View File

@@ -148,9 +148,10 @@ class BTDeviceAggregate:
is_strong_stable: bool = False
has_random_address: bool = False
# Baseline tracking
in_baseline: bool = False
baseline_id: Optional[int] = None
# Baseline tracking
in_baseline: bool = False
baseline_id: Optional[int] = None
seen_before: bool = False
# Tracker detection fields
is_tracker: bool = False
@@ -274,9 +275,10 @@ class BTDeviceAggregate:
},
'heuristic_flags': self.heuristic_flags,
# Baseline
'in_baseline': self.in_baseline,
'baseline_id': self.baseline_id,
# Baseline
'in_baseline': self.in_baseline,
'baseline_id': self.baseline_id,
'seen_before': self.seen_before,
# Tracker detection
'tracker': {
@@ -325,10 +327,11 @@ class BTDeviceAggregate:
'last_seen': self.last_seen.isoformat(),
'age_seconds': self.age_seconds,
'seen_count': self.seen_count,
'heuristic_flags': self.heuristic_flags,
'in_baseline': self.in_baseline,
# Tracker info for list view
'is_tracker': self.is_tracker,
'heuristic_flags': self.heuristic_flags,
'in_baseline': self.in_baseline,
'seen_before': self.seen_before,
# Tracker info for list view
'is_tracker': self.is_tracker,
'tracker_type': self.tracker_type,
'tracker_name': self.tracker_name,
'tracker_confidence': self.tracker_confidence,

View File

@@ -88,19 +88,65 @@ def init_db() -> None:
ON signal_history(mode, device_id, timestamp)
''')
# Device correlation table
conn.execute('''
CREATE TABLE IF NOT EXISTS device_correlations (
id INTEGER PRIMARY KEY AUTOINCREMENT,
wifi_mac TEXT,
bt_mac TEXT,
confidence REAL,
first_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
last_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
metadata TEXT,
UNIQUE(wifi_mac, bt_mac)
)
''')
# Device correlation table
conn.execute('''
CREATE TABLE IF NOT EXISTS device_correlations (
id INTEGER PRIMARY KEY AUTOINCREMENT,
wifi_mac TEXT,
bt_mac TEXT,
confidence REAL,
first_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
last_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
metadata TEXT,
UNIQUE(wifi_mac, bt_mac)
)
''')
# Alert rules
conn.execute('''
CREATE TABLE IF NOT EXISTS alert_rules (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
mode TEXT,
event_type TEXT,
match TEXT,
severity TEXT DEFAULT 'medium',
enabled BOOLEAN DEFAULT 1,
notify TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
''')
# Alert events
conn.execute('''
CREATE TABLE IF NOT EXISTS alert_events (
id INTEGER PRIMARY KEY AUTOINCREMENT,
rule_id INTEGER,
mode TEXT,
event_type TEXT,
severity TEXT DEFAULT 'medium',
title TEXT,
message TEXT,
payload TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (rule_id) REFERENCES alert_rules(id) ON DELETE SET NULL
)
''')
# Session recordings
conn.execute('''
CREATE TABLE IF NOT EXISTS recording_sessions (
id TEXT PRIMARY KEY,
mode TEXT NOT NULL,
label TEXT,
started_at TIMESTAMP NOT NULL,
stopped_at TIMESTAMP,
file_path TEXT NOT NULL,
event_count INTEGER DEFAULT 0,
size_bytes INTEGER DEFAULT 0,
metadata TEXT
)
''')
# Users table for authentication
conn.execute('''
@@ -131,20 +177,29 @@ def init_db() -> None:
# =====================================================================
# TSCM Baselines - Environment snapshots for comparison
conn.execute('''
CREATE TABLE IF NOT EXISTS tscm_baselines (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
location TEXT,
description TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
wifi_networks TEXT,
bt_devices TEXT,
rf_frequencies TEXT,
gps_coords TEXT,
is_active BOOLEAN DEFAULT 0
)
''')
conn.execute('''
CREATE TABLE IF NOT EXISTS tscm_baselines (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
location TEXT,
description TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
wifi_networks TEXT,
wifi_clients TEXT,
bt_devices TEXT,
rf_frequencies TEXT,
gps_coords TEXT,
is_active BOOLEAN DEFAULT 0
)
''')
# Ensure new columns exist for older databases
try:
columns = {row['name'] for row in conn.execute("PRAGMA table_info(tscm_baselines)")}
if 'wifi_clients' not in columns:
conn.execute('ALTER TABLE tscm_baselines ADD COLUMN wifi_clients TEXT')
except Exception as e:
logger.debug(f"Schema update skipped for tscm_baselines: {e}")
# TSCM Sweeps - Individual sweep sessions
conn.execute('''
@@ -685,15 +740,16 @@ def get_correlations(min_confidence: float = 0.5) -> list[dict]:
# TSCM Functions
# =============================================================================
def create_tscm_baseline(
name: str,
location: str | None = None,
description: str | None = None,
wifi_networks: list | None = None,
bt_devices: list | None = None,
rf_frequencies: list | None = None,
gps_coords: dict | None = None
) -> int:
def create_tscm_baseline(
name: str,
location: str | None = None,
description: str | None = None,
wifi_networks: list | None = None,
wifi_clients: list | None = None,
bt_devices: list | None = None,
rf_frequencies: list | None = None,
gps_coords: dict | None = None
) -> int:
"""
Create a new TSCM baseline.
@@ -701,19 +757,20 @@ def create_tscm_baseline(
The ID of the created baseline
"""
with get_db() as conn:
cursor = conn.execute('''
INSERT INTO tscm_baselines
(name, location, description, wifi_networks, bt_devices, rf_frequencies, gps_coords)
VALUES (?, ?, ?, ?, ?, ?, ?)
''', (
name,
location,
description,
json.dumps(wifi_networks) if wifi_networks else None,
json.dumps(bt_devices) if bt_devices else None,
json.dumps(rf_frequencies) if rf_frequencies else None,
json.dumps(gps_coords) if gps_coords else None
))
cursor = conn.execute('''
INSERT INTO tscm_baselines
(name, location, description, wifi_networks, wifi_clients, bt_devices, rf_frequencies, gps_coords)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
''', (
name,
location,
description,
json.dumps(wifi_networks) if wifi_networks else None,
json.dumps(wifi_clients) if wifi_clients else None,
json.dumps(bt_devices) if bt_devices else None,
json.dumps(rf_frequencies) if rf_frequencies else None,
json.dumps(gps_coords) if gps_coords else None
))
return cursor.lastrowid
@@ -728,18 +785,19 @@ def get_tscm_baseline(baseline_id: int) -> dict | None:
if row is None:
return None
return {
'id': row['id'],
'name': row['name'],
'location': row['location'],
'description': row['description'],
'created_at': row['created_at'],
'wifi_networks': json.loads(row['wifi_networks']) if row['wifi_networks'] else [],
'bt_devices': json.loads(row['bt_devices']) if row['bt_devices'] else [],
'rf_frequencies': json.loads(row['rf_frequencies']) if row['rf_frequencies'] else [],
'gps_coords': json.loads(row['gps_coords']) if row['gps_coords'] else None,
'is_active': bool(row['is_active'])
}
return {
'id': row['id'],
'name': row['name'],
'location': row['location'],
'description': row['description'],
'created_at': row['created_at'],
'wifi_networks': json.loads(row['wifi_networks']) if row['wifi_networks'] else [],
'wifi_clients': json.loads(row['wifi_clients']) if row['wifi_clients'] else [],
'bt_devices': json.loads(row['bt_devices']) if row['bt_devices'] else [],
'rf_frequencies': json.loads(row['rf_frequencies']) if row['rf_frequencies'] else [],
'gps_coords': json.loads(row['gps_coords']) if row['gps_coords'] else None,
'is_active': bool(row['is_active'])
}
def get_all_tscm_baselines() -> list[dict]:
@@ -781,19 +839,23 @@ def set_active_tscm_baseline(baseline_id: int) -> bool:
return cursor.rowcount > 0
def update_tscm_baseline(
baseline_id: int,
wifi_networks: list | None = None,
bt_devices: list | None = None,
rf_frequencies: list | None = None
) -> bool:
def update_tscm_baseline(
baseline_id: int,
wifi_networks: list | None = None,
wifi_clients: list | None = None,
bt_devices: list | None = None,
rf_frequencies: list | None = None
) -> bool:
"""Update baseline device lists."""
updates = []
params = []
if wifi_networks is not None:
updates.append('wifi_networks = ?')
params.append(json.dumps(wifi_networks))
if wifi_networks is not None:
updates.append('wifi_networks = ?')
params.append(json.dumps(wifi_networks))
if wifi_clients is not None:
updates.append('wifi_clients = ?')
params.append(json.dumps(wifi_clients))
if bt_devices is not None:
updates.append('bt_devices = ?')
params.append(json.dumps(bt_devices))

29
utils/event_pipeline.py Normal file
View File

@@ -0,0 +1,29 @@
"""Shared event pipeline for alerts and recordings."""
from __future__ import annotations
from typing import Any
from utils.alerts import get_alert_manager
from utils.recording import get_recording_manager
IGNORE_TYPES = {'keepalive', 'ping'}
def process_event(mode: str, event: dict | Any, event_type: str | None = None) -> None:
if event_type in IGNORE_TYPES:
return
if not isinstance(event, dict):
return
try:
get_recording_manager().record_event(mode, event, event_type)
except Exception:
# Recording failures should never break streaming
pass
try:
get_alert_manager().process_event(mode, event, event_type)
except Exception:
# Alert failures should never break streaming
pass

222
utils/recording.py Normal file
View File

@@ -0,0 +1,222 @@
"""Session recording utilities for SSE/event streams."""
from __future__ import annotations
import json
import logging
import threading
import uuid
from dataclasses import dataclass
from datetime import datetime, timezone
from pathlib import Path
from typing import Any
from utils.database import get_db
logger = logging.getLogger('intercept.recording')
RECORDING_ROOT = Path(__file__).parent.parent / 'instance' / 'recordings'
@dataclass
class RecordingSession:
id: str
mode: str
label: str | None
file_path: Path
started_at: datetime
stopped_at: datetime | None = None
event_count: int = 0
size_bytes: int = 0
metadata: dict | None = None
_file_handle: Any | None = None
_lock: threading.Lock = threading.Lock()
def open(self) -> None:
self.file_path.parent.mkdir(parents=True, exist_ok=True)
self._file_handle = self.file_path.open('a', encoding='utf-8')
def close(self) -> None:
if self._file_handle:
self._file_handle.flush()
self._file_handle.close()
self._file_handle = None
def write_event(self, record: dict) -> None:
if not self._file_handle:
self.open()
line = json.dumps(record, ensure_ascii=True) + '\n'
with self._lock:
self._file_handle.write(line)
self._file_handle.flush()
self.event_count += 1
self.size_bytes += len(line.encode('utf-8'))
class RecordingManager:
def __init__(self) -> None:
self._active_by_mode: dict[str, RecordingSession] = {}
self._active_by_id: dict[str, RecordingSession] = {}
self._lock = threading.Lock()
def start_recording(self, mode: str, label: str | None = None, metadata: dict | None = None) -> RecordingSession:
with self._lock:
existing = self._active_by_mode.get(mode)
if existing:
return existing
session_id = str(uuid.uuid4())
started_at = datetime.now(timezone.utc)
filename = f"{mode}_{started_at.strftime('%Y%m%d_%H%M%S')}_{session_id}.jsonl"
file_path = RECORDING_ROOT / mode / filename
session = RecordingSession(
id=session_id,
mode=mode,
label=label,
file_path=file_path,
started_at=started_at,
metadata=metadata or {},
)
session.open()
self._active_by_mode[mode] = session
self._active_by_id[session_id] = session
with get_db() as conn:
conn.execute('''
INSERT INTO recording_sessions
(id, mode, label, started_at, file_path, event_count, size_bytes, metadata)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
''', (
session.id,
session.mode,
session.label,
session.started_at.isoformat(),
str(session.file_path),
session.event_count,
session.size_bytes,
json.dumps(session.metadata or {}),
))
return session
def stop_recording(self, mode: str | None = None, session_id: str | None = None) -> RecordingSession | None:
with self._lock:
session = None
if session_id:
session = self._active_by_id.get(session_id)
elif mode:
session = self._active_by_mode.get(mode)
if not session:
return None
session.stopped_at = datetime.now(timezone.utc)
session.close()
self._active_by_mode.pop(session.mode, None)
self._active_by_id.pop(session.id, None)
with get_db() as conn:
conn.execute('''
UPDATE recording_sessions
SET stopped_at = ?, event_count = ?, size_bytes = ?
WHERE id = ?
''', (
session.stopped_at.isoformat(),
session.event_count,
session.size_bytes,
session.id,
))
return session
def record_event(self, mode: str, event: dict, event_type: str | None = None) -> None:
if event_type in ('keepalive', 'ping'):
return
session = self._active_by_mode.get(mode)
if not session:
return
record = {
'timestamp': datetime.now(timezone.utc).isoformat(),
'mode': mode,
'event_type': event_type,
'event': event,
}
try:
session.write_event(record)
except Exception as e:
logger.debug(f"Recording write failed: {e}")
def list_recordings(self, limit: int = 50) -> list[dict]:
with get_db() as conn:
cursor = conn.execute('''
SELECT id, mode, label, started_at, stopped_at, file_path, event_count, size_bytes, metadata
FROM recording_sessions
ORDER BY started_at DESC
LIMIT ?
''', (limit,))
rows = []
for row in cursor:
rows.append({
'id': row['id'],
'mode': row['mode'],
'label': row['label'],
'started_at': row['started_at'],
'stopped_at': row['stopped_at'],
'file_path': row['file_path'],
'event_count': row['event_count'],
'size_bytes': row['size_bytes'],
'metadata': json.loads(row['metadata']) if row['metadata'] else {},
})
return rows
def get_recording(self, session_id: str) -> dict | None:
with get_db() as conn:
cursor = conn.execute('''
SELECT id, mode, label, started_at, stopped_at, file_path, event_count, size_bytes, metadata
FROM recording_sessions
WHERE id = ?
''', (session_id,))
row = cursor.fetchone()
if not row:
return None
return {
'id': row['id'],
'mode': row['mode'],
'label': row['label'],
'started_at': row['started_at'],
'stopped_at': row['stopped_at'],
'file_path': row['file_path'],
'event_count': row['event_count'],
'size_bytes': row['size_bytes'],
'metadata': json.loads(row['metadata']) if row['metadata'] else {},
}
def get_active(self) -> list[dict]:
with self._lock:
sessions = []
for session in self._active_by_mode.values():
sessions.append({
'id': session.id,
'mode': session.mode,
'label': session.label,
'started_at': session.started_at.isoformat(),
'event_count': session.event_count,
'size_bytes': session.size_bytes,
})
return sessions
_recording_manager: RecordingManager | None = None
_recording_lock = threading.Lock()
def get_recording_manager() -> RecordingManager:
global _recording_manager
with _recording_lock:
if _recording_manager is None:
_recording_manager = RecordingManager()
return _recording_manager

View File

@@ -523,20 +523,22 @@ class BaselineDiff:
}
def calculate_baseline_diff(
baseline: dict,
current_wifi: list[dict],
current_bt: list[dict],
current_rf: list[dict],
sweep_id: int
) -> BaselineDiff:
def calculate_baseline_diff(
baseline: dict,
current_wifi: list[dict],
current_wifi_clients: list[dict],
current_bt: list[dict],
current_rf: list[dict],
sweep_id: int
) -> BaselineDiff:
"""
Calculate comprehensive diff between baseline and current scan.
Args:
baseline: Baseline dict from database
current_wifi: Current WiFi devices
current_bt: Current Bluetooth devices
current_wifi_clients: Current WiFi clients
current_bt: Current Bluetooth devices
current_rf: Current RF signals
sweep_id: Current sweep ID
@@ -564,11 +566,16 @@ def calculate_baseline_diff(
diff.is_stale = diff.baseline_age_hours > 72
# Build baseline lookup dicts
baseline_wifi = {
d.get('bssid', d.get('mac', '')).upper(): d
for d in baseline.get('wifi_networks', [])
if d.get('bssid') or d.get('mac')
}
baseline_wifi = {
d.get('bssid', d.get('mac', '')).upper(): d
for d in baseline.get('wifi_networks', [])
if d.get('bssid') or d.get('mac')
}
baseline_wifi_clients = {
d.get('mac', d.get('address', '')).upper(): d
for d in baseline.get('wifi_clients', [])
if d.get('mac') or d.get('address')
}
baseline_bt = {
d.get('mac', d.get('address', '')).upper(): d
for d in baseline.get('bt_devices', [])
@@ -580,8 +587,11 @@ def calculate_baseline_diff(
if d.get('frequency')
}
# Compare WiFi
_compare_wifi(diff, baseline_wifi, current_wifi)
# Compare WiFi
_compare_wifi(diff, baseline_wifi, current_wifi)
# Compare WiFi clients
_compare_wifi_clients(diff, baseline_wifi_clients, current_wifi_clients)
# Compare Bluetooth
_compare_bluetooth(diff, baseline_bt, current_bt)
@@ -607,7 +617,7 @@ def calculate_baseline_diff(
return diff
def _compare_wifi(diff: BaselineDiff, baseline: dict, current: list[dict]) -> None:
def _compare_wifi(diff: BaselineDiff, baseline: dict, current: list[dict]) -> None:
"""Compare WiFi devices between baseline and current."""
current_macs = {
d.get('bssid', d.get('mac', '')).upper(): d
@@ -630,7 +640,48 @@ def _compare_wifi(diff: BaselineDiff, baseline: dict, current: list[dict]) -> No
'channel': device.get('channel'),
'rssi': device.get('power', device.get('signal')),
}
))
))
def _compare_wifi_clients(diff: BaselineDiff, baseline: dict, current: list[dict]) -> None:
"""Compare WiFi clients between baseline and current."""
current_macs = {
d.get('mac', d.get('address', '')).upper(): d
for d in current
if d.get('mac') or d.get('address')
}
# Find new clients
for mac, device in current_macs.items():
if mac not in baseline:
name = device.get('vendor', 'WiFi Client')
diff.new_devices.append(DeviceChange(
identifier=mac,
protocol='wifi_client',
change_type='new',
description=f'New WiFi client: {name}',
expected=False,
details={
'vendor': name,
'rssi': device.get('rssi'),
'associated_bssid': device.get('associated_bssid'),
}
))
# Find missing clients
for mac, device in baseline.items():
if mac not in current_macs:
name = device.get('vendor', 'WiFi Client')
diff.missing_devices.append(DeviceChange(
identifier=mac,
protocol='wifi_client',
change_type='missing',
description=f'Missing WiFi client: {name}',
expected=True,
details={
'vendor': name,
}
))
else:
# Check for changes
baseline_dev = baseline[mac]
@@ -796,11 +847,12 @@ def _calculate_baseline_health(diff: BaselineDiff, baseline: dict) -> None:
reasons.append(f"Baseline is {diff.baseline_age_hours:.0f} hours old")
# Device churn penalty
total_baseline = (
len(baseline.get('wifi_networks', [])) +
len(baseline.get('bt_devices', [])) +
len(baseline.get('rf_frequencies', []))
)
total_baseline = (
len(baseline.get('wifi_networks', [])) +
len(baseline.get('wifi_clients', [])) +
len(baseline.get('bt_devices', [])) +
len(baseline.get('rf_frequencies', []))
)
if total_baseline > 0:
churn_rate = (diff.total_new + diff.total_missing) / total_baseline

View File

@@ -26,12 +26,13 @@ class BaselineRecorder:
Records and manages TSCM environment baselines.
"""
def __init__(self):
self.recording = False
self.current_baseline_id: int | None = None
self.wifi_networks: dict[str, dict] = {} # BSSID -> network info
self.bt_devices: dict[str, dict] = {} # MAC -> device info
self.rf_frequencies: dict[float, dict] = {} # Frequency -> signal info
def __init__(self):
self.recording = False
self.current_baseline_id: int | None = None
self.wifi_networks: dict[str, dict] = {} # BSSID -> network info
self.wifi_clients: dict[str, dict] = {} # MAC -> client info
self.bt_devices: dict[str, dict] = {} # MAC -> device info
self.rf_frequencies: dict[float, dict] = {} # Frequency -> signal info
def start_recording(
self,
@@ -50,10 +51,11 @@ class BaselineRecorder:
Returns:
Baseline ID
"""
self.recording = True
self.wifi_networks = {}
self.bt_devices = {}
self.rf_frequencies = {}
self.recording = True
self.wifi_networks = {}
self.wifi_clients = {}
self.bt_devices = {}
self.rf_frequencies = {}
# Create baseline in database
self.current_baseline_id = create_tscm_baseline(
@@ -78,24 +80,27 @@ class BaselineRecorder:
self.recording = False
# Convert to lists for storage
wifi_list = list(self.wifi_networks.values())
bt_list = list(self.bt_devices.values())
rf_list = list(self.rf_frequencies.values())
wifi_list = list(self.wifi_networks.values())
wifi_client_list = list(self.wifi_clients.values())
bt_list = list(self.bt_devices.values())
rf_list = list(self.rf_frequencies.values())
# Update database
update_tscm_baseline(
self.current_baseline_id,
wifi_networks=wifi_list,
bt_devices=bt_list,
rf_frequencies=rf_list
)
update_tscm_baseline(
self.current_baseline_id,
wifi_networks=wifi_list,
wifi_clients=wifi_client_list,
bt_devices=bt_list,
rf_frequencies=rf_list
)
summary = {
'baseline_id': self.current_baseline_id,
'wifi_count': len(wifi_list),
'bt_count': len(bt_list),
'rf_count': len(rf_list),
}
summary = {
'baseline_id': self.current_baseline_id,
'wifi_count': len(wifi_list),
'wifi_client_count': len(wifi_client_list),
'bt_count': len(bt_list),
'rf_count': len(rf_list),
}
logger.info(
f"Baseline recording complete: {summary['wifi_count']} WiFi, "
@@ -135,8 +140,8 @@ class BaselineRecorder:
'last_seen': datetime.now().isoformat(),
}
def add_bt_device(self, device: dict) -> None:
"""Add a Bluetooth device to the current baseline."""
def add_bt_device(self, device: dict) -> None:
"""Add a Bluetooth device to the current baseline."""
if not self.recording:
return
@@ -150,7 +155,7 @@ class BaselineRecorder:
'rssi': device.get('rssi', self.bt_devices[mac].get('rssi')),
})
else:
self.bt_devices[mac] = {
self.bt_devices[mac] = {
'mac': mac,
'name': device.get('name', ''),
'rssi': device.get('rssi', device.get('signal')),
@@ -158,10 +163,37 @@ class BaselineRecorder:
'type': device.get('type', ''),
'first_seen': datetime.now().isoformat(),
'last_seen': datetime.now().isoformat(),
}
def add_rf_signal(self, signal: dict) -> None:
"""Add an RF signal to the current baseline."""
}
def add_wifi_client(self, client: dict) -> None:
"""Add a WiFi client to the current baseline."""
if not self.recording:
return
mac = client.get('mac', client.get('address', '')).upper()
if not mac:
return
if mac in self.wifi_clients:
self.wifi_clients[mac].update({
'last_seen': datetime.now().isoformat(),
'rssi': client.get('rssi', self.wifi_clients[mac].get('rssi')),
'associated_bssid': client.get('associated_bssid', self.wifi_clients[mac].get('associated_bssid')),
})
else:
self.wifi_clients[mac] = {
'mac': mac,
'vendor': client.get('vendor', ''),
'rssi': client.get('rssi'),
'associated_bssid': client.get('associated_bssid'),
'probed_ssids': client.get('probed_ssids', []),
'probe_count': client.get('probe_count', len(client.get('probed_ssids', []))),
'first_seen': datetime.now().isoformat(),
'last_seen': datetime.now().isoformat(),
}
def add_rf_signal(self, signal: dict) -> None:
"""Add an RF signal to the current baseline."""
if not self.recording:
return
@@ -191,15 +223,16 @@ class BaselineRecorder:
'hit_count': 1,
}
def get_recording_status(self) -> dict:
"""Get current recording status and counts."""
return {
'recording': self.recording,
'baseline_id': self.current_baseline_id,
'wifi_count': len(self.wifi_networks),
'bt_count': len(self.bt_devices),
'rf_count': len(self.rf_frequencies),
}
def get_recording_status(self) -> dict:
"""Get current recording status and counts."""
return {
'recording': self.recording,
'baseline_id': self.current_baseline_id,
'wifi_count': len(self.wifi_networks),
'wifi_client_count': len(self.wifi_clients),
'bt_count': len(self.bt_devices),
'rf_count': len(self.rf_frequencies),
}
class BaselineComparator:
@@ -220,11 +253,16 @@ class BaselineComparator:
for d in baseline.get('wifi_networks', [])
if d.get('bssid') or d.get('mac')
}
self.baseline_bt = {
d.get('mac', d.get('address', '')).upper(): d
for d in baseline.get('bt_devices', [])
if d.get('mac') or d.get('address')
}
self.baseline_bt = {
d.get('mac', d.get('address', '')).upper(): d
for d in baseline.get('bt_devices', [])
if d.get('mac') or d.get('address')
}
self.baseline_wifi_clients = {
d.get('mac', d.get('address', '')).upper(): d
for d in baseline.get('wifi_clients', [])
if d.get('mac') or d.get('address')
}
self.baseline_rf = {
round(d.get('frequency', 0), 1): d
for d in baseline.get('rf_frequencies', [])
@@ -269,8 +307,8 @@ class BaselineComparator:
'matching_count': len(matching_devices),
}
def compare_bluetooth(self, current_devices: list[dict]) -> dict:
"""Compare current Bluetooth devices against baseline."""
def compare_bluetooth(self, current_devices: list[dict]) -> dict:
"""Compare current Bluetooth devices against baseline."""
current_macs = {
d.get('mac', d.get('address', '')).upper(): d
for d in current_devices
@@ -291,14 +329,45 @@ class BaselineComparator:
if mac not in current_macs:
missing_devices.append(device)
return {
'new': new_devices,
'missing': missing_devices,
'matching': matching_devices,
'new_count': len(new_devices),
'missing_count': len(missing_devices),
'matching_count': len(matching_devices),
}
return {
'new': new_devices,
'missing': missing_devices,
'matching': matching_devices,
'new_count': len(new_devices),
'missing_count': len(missing_devices),
'matching_count': len(matching_devices),
}
def compare_wifi_clients(self, current_devices: list[dict]) -> dict:
"""Compare current WiFi clients against baseline."""
current_macs = {
d.get('mac', d.get('address', '')).upper(): d
for d in current_devices
if d.get('mac') or d.get('address')
}
new_devices = []
missing_devices = []
matching_devices = []
for mac, device in current_macs.items():
if mac not in self.baseline_wifi_clients:
new_devices.append(device)
else:
matching_devices.append(device)
for mac, device in self.baseline_wifi_clients.items():
if mac not in current_macs:
missing_devices.append(device)
return {
'new': new_devices,
'missing': missing_devices,
'matching': matching_devices,
'new_count': len(new_devices),
'missing_count': len(missing_devices),
'matching_count': len(matching_devices),
}
def compare_rf(self, current_signals: list[dict]) -> dict:
"""Compare current RF signals against baseline."""
@@ -331,35 +400,42 @@ class BaselineComparator:
'matching_count': len(matching_signals),
}
def compare_all(
self,
wifi_devices: list[dict] | None = None,
bt_devices: list[dict] | None = None,
rf_signals: list[dict] | None = None
) -> dict:
def compare_all(
self,
wifi_devices: list[dict] | None = None,
wifi_clients: list[dict] | None = None,
bt_devices: list[dict] | None = None,
rf_signals: list[dict] | None = None
) -> dict:
"""
Compare all current data against baseline.
Returns:
Dict with comparison results for each category
"""
results = {
'wifi': None,
'bluetooth': None,
'rf': None,
'total_new': 0,
'total_missing': 0,
}
results = {
'wifi': None,
'wifi_clients': None,
'bluetooth': None,
'rf': None,
'total_new': 0,
'total_missing': 0,
}
if wifi_devices is not None:
results['wifi'] = self.compare_wifi(wifi_devices)
results['total_new'] += results['wifi']['new_count']
results['total_missing'] += results['wifi']['missing_count']
if bt_devices is not None:
results['bluetooth'] = self.compare_bluetooth(bt_devices)
results['total_new'] += results['bluetooth']['new_count']
results['total_missing'] += results['bluetooth']['missing_count']
if wifi_devices is not None:
results['wifi'] = self.compare_wifi(wifi_devices)
results['total_new'] += results['wifi']['new_count']
results['total_missing'] += results['wifi']['missing_count']
if wifi_clients is not None:
results['wifi_clients'] = self.compare_wifi_clients(wifi_clients)
results['total_new'] += results['wifi_clients']['new_count']
results['total_missing'] += results['wifi_clients']['missing_count']
if bt_devices is not None:
results['bluetooth'] = self.compare_bluetooth(bt_devices)
results['total_new'] += results['bluetooth']['new_count']
results['total_missing'] += results['bluetooth']['missing_count']
if rf_signals is not None:
results['rf'] = self.compare_rf(rf_signals)
@@ -369,11 +445,12 @@ class BaselineComparator:
return results
def get_comparison_for_active_baseline(
wifi_devices: list[dict] | None = None,
bt_devices: list[dict] | None = None,
rf_signals: list[dict] | None = None
) -> dict | None:
def get_comparison_for_active_baseline(
wifi_devices: list[dict] | None = None,
wifi_clients: list[dict] | None = None,
bt_devices: list[dict] | None = None,
rf_signals: list[dict] | None = None
) -> dict | None:
"""
Convenience function to compare against the active baseline.
@@ -385,4 +462,4 @@ def get_comparison_for_active_baseline(
return None
comparator = BaselineComparator(baseline)
return comparator.compare_all(wifi_devices, bt_devices, rf_signals)
return comparator.compare_all(wifi_devices, wifi_clients, bt_devices, rf_signals)

View File

@@ -113,14 +113,18 @@ class ThreatDetector:
def _load_baseline(self, baseline: dict) -> None:
"""Load baseline device identifiers for comparison."""
# WiFi networks and clients
for network in baseline.get('wifi_networks', []):
if 'bssid' in network:
self.baseline_wifi_macs.add(network['bssid'].upper())
if 'clients' in network:
for client in network['clients']:
if 'mac' in client:
self.baseline_wifi_macs.add(client['mac'].upper())
# WiFi networks and clients
for network in baseline.get('wifi_networks', []):
if 'bssid' in network:
self.baseline_wifi_macs.add(network['bssid'].upper())
if 'clients' in network:
for client in network['clients']:
if 'mac' in client:
self.baseline_wifi_macs.add(client['mac'].upper())
for client in baseline.get('wifi_clients', []):
if 'mac' in client:
self.baseline_wifi_macs.add(client['mac'].upper())
# Bluetooth devices
for device in baseline.get('bt_devices', []):

View File

@@ -662,12 +662,13 @@ class UnifiedWiFiScanner:
# Deep Scan (airodump-ng)
# =========================================================================
def start_deep_scan(
self,
interface: Optional[str] = None,
band: str = 'all',
channel: Optional[int] = None,
) -> bool:
def start_deep_scan(
self,
interface: Optional[str] = None,
band: str = 'all',
channel: Optional[int] = None,
channels: Optional[list[int]] = None,
) -> bool:
"""
Start continuous deep scan with airodump-ng.
@@ -700,11 +701,11 @@ class UnifiedWiFiScanner:
# Start airodump-ng in background thread
self._deep_scan_stop_event.clear()
self._deep_scan_thread = threading.Thread(
target=self._run_deep_scan,
args=(iface, band, channel),
daemon=True,
)
self._deep_scan_thread = threading.Thread(
target=self._run_deep_scan,
args=(iface, band, channel, channels),
daemon=True,
)
self._deep_scan_thread.start()
self._status = WiFiScanStatus(
@@ -766,8 +767,14 @@ class UnifiedWiFiScanner:
return True
def _run_deep_scan(self, interface: str, band: str, channel: Optional[int]):
"""Background thread for running airodump-ng."""
def _run_deep_scan(
self,
interface: str,
band: str,
channel: Optional[int],
channels: Optional[list[int]],
):
"""Background thread for running airodump-ng."""
from .parsers.airodump import parse_airodump_csv
import tempfile
@@ -779,12 +786,14 @@ class UnifiedWiFiScanner:
# Build command
cmd = ['airodump-ng', '-w', output_prefix, '--output-format', 'csv']
if channel:
cmd.extend(['-c', str(channel)])
elif band == '2.4':
cmd.extend(['--band', 'bg'])
elif band == '5':
cmd.extend(['--band', 'a'])
if channels:
cmd.extend(['-c', ','.join(str(c) for c in channels)])
elif channel:
cmd.extend(['-c', str(channel)])
elif band == '2.4':
cmd.extend(['--band', 'bg'])
elif band == '5':
cmd.extend(['--band', 'a'])
cmd.append(interface)