mirror of
https://github.com/smittix/intercept.git
synced 2026-04-24 06:40:00 -07:00
feat: Add cross-mode analytics dashboard with geofencing, correlations, and data export
Adds a unified analytics mode under the Security nav group that aggregates data across all signal modes. Includes emergency squawk alerting (7700/7600/7500), vertical rate anomaly detection, ACARS/VDL2-to-ADS-B flight correlation, geofence zones with enter/exit detection for aircraft/vessels/APRS stations, temporal pattern detection, RSSI history tracking, Meshtastic topology mapping, and JSON/CSV data export. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
148
utils/analytics.py
Normal file
148
utils/analytics.py
Normal file
@@ -0,0 +1,148 @@
|
||||
"""Cross-mode analytics: activity tracking, summaries, and emergency squawk detection."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import time
|
||||
from collections import deque
|
||||
from typing import Any
|
||||
|
||||
import app as app_module
|
||||
|
||||
|
||||
class ModeActivityTracker:
|
||||
"""Track device counts per mode in time-bucketed ring buffer for sparklines."""
|
||||
|
||||
def __init__(self, max_buckets: int = 60, bucket_interval: float = 5.0):
|
||||
self._max_buckets = max_buckets
|
||||
self._bucket_interval = bucket_interval
|
||||
self._history: dict[str, deque] = {}
|
||||
self._last_record_time = 0.0
|
||||
|
||||
def record(self) -> None:
|
||||
"""Snapshot current counts for all modes."""
|
||||
now = time.time()
|
||||
if now - self._last_record_time < self._bucket_interval:
|
||||
return
|
||||
self._last_record_time = now
|
||||
|
||||
counts = _get_mode_counts()
|
||||
for mode, count in counts.items():
|
||||
if mode not in self._history:
|
||||
self._history[mode] = deque(maxlen=self._max_buckets)
|
||||
self._history[mode].append(count)
|
||||
|
||||
def get_sparkline(self, mode: str) -> list[int]:
|
||||
"""Return sparkline array for a mode."""
|
||||
self.record()
|
||||
return list(self._history.get(mode, []))
|
||||
|
||||
def get_all_sparklines(self) -> dict[str, list[int]]:
|
||||
"""Return sparkline arrays for all tracked modes."""
|
||||
self.record()
|
||||
return {mode: list(values) for mode, values in self._history.items()}
|
||||
|
||||
|
||||
# Singleton
|
||||
_tracker: ModeActivityTracker | None = None
|
||||
|
||||
|
||||
def get_activity_tracker() -> ModeActivityTracker:
|
||||
global _tracker
|
||||
if _tracker is None:
|
||||
_tracker = ModeActivityTracker()
|
||||
return _tracker
|
||||
|
||||
|
||||
def _get_mode_counts() -> dict[str, int]:
|
||||
"""Read current entity counts from app_module DataStores."""
|
||||
counts: dict[str, int] = {}
|
||||
try:
|
||||
counts['adsb'] = len(app_module.adsb_aircraft)
|
||||
except Exception:
|
||||
counts['adsb'] = 0
|
||||
try:
|
||||
counts['ais'] = len(app_module.ais_vessels)
|
||||
except Exception:
|
||||
counts['ais'] = 0
|
||||
try:
|
||||
counts['wifi'] = len(app_module.wifi_networks)
|
||||
except Exception:
|
||||
counts['wifi'] = 0
|
||||
try:
|
||||
counts['bluetooth'] = len(app_module.bt_devices)
|
||||
except Exception:
|
||||
counts['bluetooth'] = 0
|
||||
try:
|
||||
counts['dsc'] = len(app_module.dsc_messages)
|
||||
except Exception:
|
||||
counts['dsc'] = 0
|
||||
return counts
|
||||
|
||||
|
||||
def get_cross_mode_summary() -> dict[str, Any]:
|
||||
"""Return counts dict for all active DataStores."""
|
||||
counts = _get_mode_counts()
|
||||
try:
|
||||
counts['wifi_clients'] = len(app_module.wifi_clients)
|
||||
except Exception:
|
||||
counts['wifi_clients'] = 0
|
||||
return counts
|
||||
|
||||
|
||||
def get_mode_health() -> dict[str, dict]:
|
||||
"""Check process refs and SDR status for each mode."""
|
||||
health: dict[str, dict] = {}
|
||||
|
||||
process_map = {
|
||||
'pager': 'current_process',
|
||||
'sensor': 'sensor_process',
|
||||
'adsb': 'adsb_process',
|
||||
'ais': 'ais_process',
|
||||
'acars': 'acars_process',
|
||||
'vdl2': 'vdl2_process',
|
||||
'aprs': 'aprs_process',
|
||||
'wifi': 'wifi_process',
|
||||
'bluetooth': 'bt_process',
|
||||
'dsc': 'dsc_process',
|
||||
}
|
||||
|
||||
for mode, attr in process_map.items():
|
||||
proc = getattr(app_module, attr, None)
|
||||
running = proc is not None and (proc.poll() is None if proc else False)
|
||||
health[mode] = {'running': running}
|
||||
|
||||
try:
|
||||
sdr_status = app_module.get_sdr_device_status()
|
||||
health['sdr_devices'] = {str(k): v for k, v in sdr_status.items()}
|
||||
except Exception:
|
||||
health['sdr_devices'] = {}
|
||||
|
||||
return health
|
||||
|
||||
|
||||
EMERGENCY_SQUAWKS = {
|
||||
'7700': 'General Emergency',
|
||||
'7600': 'Comms Failure',
|
||||
'7500': 'Hijack',
|
||||
}
|
||||
|
||||
|
||||
def get_emergency_squawks() -> list[dict]:
|
||||
"""Iterate adsb_aircraft DataStore for emergency squawk codes."""
|
||||
emergencies: list[dict] = []
|
||||
try:
|
||||
for icao, aircraft in app_module.adsb_aircraft.items():
|
||||
sq = str(aircraft.get('squawk', '')).strip()
|
||||
if sq in EMERGENCY_SQUAWKS:
|
||||
emergencies.append({
|
||||
'icao': icao,
|
||||
'callsign': aircraft.get('callsign', ''),
|
||||
'squawk': sq,
|
||||
'meaning': EMERGENCY_SQUAWKS[sq],
|
||||
'altitude': aircraft.get('altitude'),
|
||||
'lat': aircraft.get('lat'),
|
||||
'lon': aircraft.get('lon'),
|
||||
})
|
||||
except Exception:
|
||||
pass
|
||||
return emergencies
|
||||
84
utils/flight_correlator.py
Normal file
84
utils/flight_correlator.py
Normal file
@@ -0,0 +1,84 @@
|
||||
"""Match ACARS/VDL2 messages to ADS-B aircraft by callsign."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import time
|
||||
from collections import deque
|
||||
|
||||
|
||||
class FlightCorrelator:
|
||||
"""Correlate ACARS and VDL2 messages with ADS-B aircraft."""
|
||||
|
||||
def __init__(self, max_messages: int = 1000):
|
||||
self._acars_messages: deque[dict] = deque(maxlen=max_messages)
|
||||
self._vdl2_messages: deque[dict] = deque(maxlen=max_messages)
|
||||
|
||||
def add_acars_message(self, msg: dict) -> None:
|
||||
self._acars_messages.append({
|
||||
**msg,
|
||||
'_corr_time': time.time(),
|
||||
})
|
||||
|
||||
def add_vdl2_message(self, msg: dict) -> None:
|
||||
self._vdl2_messages.append({
|
||||
**msg,
|
||||
'_corr_time': time.time(),
|
||||
})
|
||||
|
||||
def get_messages_for_aircraft(
|
||||
self, icao: str | None = None, callsign: str | None = None
|
||||
) -> dict[str, list[dict]]:
|
||||
"""Match ACARS/VDL2 messages by callsign, flight, or registration fields."""
|
||||
if not icao and not callsign:
|
||||
return {'acars': [], 'vdl2': []}
|
||||
|
||||
search_terms: set[str] = set()
|
||||
if callsign:
|
||||
search_terms.add(callsign.strip().upper())
|
||||
if icao:
|
||||
search_terms.add(icao.strip().upper())
|
||||
|
||||
acars = []
|
||||
for msg in self._acars_messages:
|
||||
if self._msg_matches(msg, search_terms):
|
||||
acars.append(self._clean_msg(msg))
|
||||
|
||||
vdl2 = []
|
||||
for msg in self._vdl2_messages:
|
||||
if self._msg_matches(msg, search_terms):
|
||||
vdl2.append(self._clean_msg(msg))
|
||||
|
||||
return {'acars': acars, 'vdl2': vdl2}
|
||||
|
||||
@staticmethod
|
||||
def _msg_matches(msg: dict, terms: set[str]) -> bool:
|
||||
"""Check if any identifying field in msg matches the search terms."""
|
||||
for field in ('flight', 'tail', 'reg', 'callsign', 'icao', 'addr'):
|
||||
val = msg.get(field)
|
||||
if val and str(val).strip().upper() in terms:
|
||||
return True
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def _clean_msg(msg: dict) -> dict:
|
||||
"""Return message without internal correlation fields."""
|
||||
return {k: v for k, v in msg.items() if not k.startswith('_corr_')}
|
||||
|
||||
@property
|
||||
def acars_count(self) -> int:
|
||||
return len(self._acars_messages)
|
||||
|
||||
@property
|
||||
def vdl2_count(self) -> int:
|
||||
return len(self._vdl2_messages)
|
||||
|
||||
|
||||
# Singleton
|
||||
_correlator: FlightCorrelator | None = None
|
||||
|
||||
|
||||
def get_flight_correlator() -> FlightCorrelator:
|
||||
global _correlator
|
||||
if _correlator is None:
|
||||
_correlator = FlightCorrelator()
|
||||
return _correlator
|
||||
126
utils/geofence.py
Normal file
126
utils/geofence.py
Normal file
@@ -0,0 +1,126 @@
|
||||
"""Geofence zones with haversine distance, enter/exit detection, and SQLite persistence."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import math
|
||||
from typing import Any
|
||||
|
||||
from utils.database import get_db
|
||||
|
||||
|
||||
def haversine_distance(lat1: float, lon1: float, lat2: float, lon2: float) -> float:
|
||||
"""Return distance in meters between two lat/lon points."""
|
||||
R = 6_371_000 # Earth radius in meters
|
||||
phi1 = math.radians(lat1)
|
||||
phi2 = math.radians(lat2)
|
||||
dphi = math.radians(lat2 - lat1)
|
||||
dlam = math.radians(lon2 - lon1)
|
||||
|
||||
a = math.sin(dphi / 2) ** 2 + math.cos(phi1) * math.cos(phi2) * math.sin(dlam / 2) ** 2
|
||||
return R * 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
|
||||
|
||||
|
||||
def _ensure_table() -> None:
|
||||
"""Create geofence_zones table if it doesn't exist."""
|
||||
with get_db() as conn:
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS geofence_zones (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
lat REAL NOT NULL,
|
||||
lon REAL NOT NULL,
|
||||
radius_m REAL NOT NULL,
|
||||
alert_on TEXT DEFAULT 'enter_exit',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
''')
|
||||
|
||||
|
||||
class GeofenceManager:
|
||||
"""Manages geofence zones with enter/exit detection."""
|
||||
|
||||
def __init__(self):
|
||||
self._inside: dict[str, set[int]] = {} # entity_id -> set of zone_ids inside
|
||||
_ensure_table()
|
||||
|
||||
def list_zones(self) -> list[dict]:
|
||||
with get_db() as conn:
|
||||
cursor = conn.execute(
|
||||
'SELECT id, name, lat, lon, radius_m, alert_on, created_at FROM geofence_zones ORDER BY id'
|
||||
)
|
||||
return [dict(row) for row in cursor]
|
||||
|
||||
def add_zone(self, name: str, lat: float, lon: float, radius_m: float,
|
||||
alert_on: str = 'enter_exit') -> int:
|
||||
with get_db() as conn:
|
||||
cursor = conn.execute(
|
||||
'INSERT INTO geofence_zones (name, lat, lon, radius_m, alert_on) VALUES (?, ?, ?, ?, ?)',
|
||||
(name, lat, lon, radius_m, alert_on),
|
||||
)
|
||||
return cursor.lastrowid
|
||||
|
||||
def delete_zone(self, zone_id: int) -> bool:
|
||||
with get_db() as conn:
|
||||
cursor = conn.execute('DELETE FROM geofence_zones WHERE id = ?', (zone_id,))
|
||||
# Clean up inside tracking
|
||||
for entity_zones in self._inside.values():
|
||||
entity_zones.discard(zone_id)
|
||||
return cursor.rowcount > 0
|
||||
|
||||
def check_position(self, entity_id: str, entity_type: str,
|
||||
lat: float, lon: float,
|
||||
metadata: dict[str, Any] | None = None) -> list[dict]:
|
||||
"""Check entity position against all zones. Returns list of events."""
|
||||
zones = self.list_zones()
|
||||
if not zones:
|
||||
return []
|
||||
|
||||
events: list[dict] = []
|
||||
prev_inside = self._inside.get(entity_id, set())
|
||||
curr_inside: set[int] = set()
|
||||
|
||||
for zone in zones:
|
||||
dist = haversine_distance(lat, lon, zone['lat'], zone['lon'])
|
||||
zid = zone['id']
|
||||
if dist <= zone['radius_m']:
|
||||
curr_inside.add(zid)
|
||||
|
||||
if zid not in prev_inside and zone['alert_on'] in ('enter', 'enter_exit'):
|
||||
events.append({
|
||||
'type': 'geofence_enter',
|
||||
'zone_id': zid,
|
||||
'zone_name': zone['name'],
|
||||
'entity_id': entity_id,
|
||||
'entity_type': entity_type,
|
||||
'distance_m': round(dist, 1),
|
||||
'lat': lat,
|
||||
'lon': lon,
|
||||
**(metadata or {}),
|
||||
})
|
||||
else:
|
||||
if zid in prev_inside and zone['alert_on'] in ('exit', 'enter_exit'):
|
||||
events.append({
|
||||
'type': 'geofence_exit',
|
||||
'zone_id': zid,
|
||||
'zone_name': zone['name'],
|
||||
'entity_id': entity_id,
|
||||
'entity_type': entity_type,
|
||||
'distance_m': round(dist, 1),
|
||||
'lat': lat,
|
||||
'lon': lon,
|
||||
**(metadata or {}),
|
||||
})
|
||||
|
||||
self._inside[entity_id] = curr_inside
|
||||
return events
|
||||
|
||||
|
||||
# Singleton
|
||||
_manager: GeofenceManager | None = None
|
||||
|
||||
|
||||
def get_geofence_manager() -> GeofenceManager:
|
||||
global _manager
|
||||
if _manager is None:
|
||||
_manager = GeofenceManager()
|
||||
return _manager
|
||||
@@ -306,6 +306,9 @@ class MeshtasticClient:
|
||||
self._range_test_running: bool = False
|
||||
self._range_test_results: list[dict] = []
|
||||
|
||||
# Topology tracking: node_id -> {neighbors, hop_count, msg_count, last_seen}
|
||||
self._topology: dict[str, dict] = {}
|
||||
|
||||
@property
|
||||
def is_running(self) -> bool:
|
||||
return self._running
|
||||
@@ -326,6 +329,35 @@ class MeshtasticClient:
|
||||
"""Set callback for received messages."""
|
||||
self._callback = callback
|
||||
|
||||
def record_message_route(self, from_node: str, to_node: str, hops: int | None = None) -> None:
|
||||
"""Record a message route for topology tracking."""
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
for node_id in (from_node, to_node):
|
||||
if node_id not in self._topology:
|
||||
self._topology[node_id] = {
|
||||
'neighbors': set(),
|
||||
'hop_count': hops,
|
||||
'msg_count': 0,
|
||||
'last_seen': now,
|
||||
}
|
||||
entry = self._topology[node_id]
|
||||
entry['msg_count'] += 1
|
||||
entry['last_seen'] = now
|
||||
self._topology[from_node]['neighbors'].add(to_node)
|
||||
self._topology[to_node]['neighbors'].add(from_node)
|
||||
|
||||
def get_topology(self) -> dict:
|
||||
"""Return topology dict with serializable sets."""
|
||||
result = {}
|
||||
for node_id, data in self._topology.items():
|
||||
result[node_id] = {
|
||||
'neighbors': list(data.get('neighbors', set())),
|
||||
'hop_count': data.get('hop_count'),
|
||||
'msg_count': data.get('msg_count', 0),
|
||||
'last_seen': data.get('last_seen'),
|
||||
}
|
||||
return result
|
||||
|
||||
def connect(self, device: str | None = None, connection_type: str = 'serial',
|
||||
hostname: str | None = None) -> bool:
|
||||
"""
|
||||
@@ -463,6 +495,14 @@ class MeshtasticClient:
|
||||
# Track node from packet (always, even for filtered messages)
|
||||
self._track_node_from_packet(packet, decoded, portnum)
|
||||
|
||||
# Record topology route
|
||||
if from_num and to_num:
|
||||
self.record_message_route(
|
||||
self._format_node_id(from_num),
|
||||
self._format_node_id(to_num),
|
||||
packet.get('hopLimit'),
|
||||
)
|
||||
|
||||
# Parse traceroute responses
|
||||
if portnum == 'TRACEROUTE_APP':
|
||||
self._handle_traceroute_response(packet, decoded)
|
||||
|
||||
93
utils/temporal_patterns.py
Normal file
93
utils/temporal_patterns.py
Normal file
@@ -0,0 +1,93 @@
|
||||
"""Periodic pattern detection via interval analysis."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import time
|
||||
from collections import defaultdict
|
||||
|
||||
|
||||
class TemporalPatternDetector:
|
||||
"""Detect periodic patterns from event timestamps per device."""
|
||||
|
||||
def __init__(self, max_timestamps: int = 200):
|
||||
self._timestamps: dict[str, list[float]] = defaultdict(list)
|
||||
self._max_timestamps = max_timestamps
|
||||
|
||||
def record_event(self, device_id: str, mode: str, timestamp: float | None = None) -> None:
|
||||
key = f"{mode}:{device_id}"
|
||||
ts = timestamp or time.time()
|
||||
buf = self._timestamps[key]
|
||||
buf.append(ts)
|
||||
if len(buf) > self._max_timestamps:
|
||||
del buf[: len(buf) - self._max_timestamps]
|
||||
|
||||
def detect_patterns(self, device_id: str, mode: str | None = None) -> dict | None:
|
||||
"""Detect periodic patterns for a device.
|
||||
|
||||
Returns dict with period_seconds, confidence, occurrences or None.
|
||||
"""
|
||||
keys = []
|
||||
if mode:
|
||||
keys.append(f"{mode}:{device_id}")
|
||||
else:
|
||||
keys = [k for k in self._timestamps if k.endswith(f":{device_id}")]
|
||||
|
||||
for key in keys:
|
||||
result = self._analyze_intervals(self._timestamps.get(key, []))
|
||||
if result:
|
||||
result['device_id'] = device_id
|
||||
result['mode'] = key.split(':')[0]
|
||||
return result
|
||||
return None
|
||||
|
||||
def _analyze_intervals(self, timestamps: list[float]) -> dict | None:
|
||||
if len(timestamps) < 4:
|
||||
return None
|
||||
|
||||
intervals = [timestamps[i + 1] - timestamps[i] for i in range(len(timestamps) - 1)]
|
||||
|
||||
# Find the median interval
|
||||
sorted_intervals = sorted(intervals)
|
||||
median = sorted_intervals[len(sorted_intervals) // 2]
|
||||
|
||||
if median < 1.0:
|
||||
return None
|
||||
|
||||
# Count how many intervals are within 20% of the median
|
||||
tolerance = median * 0.2
|
||||
matching = sum(1 for iv in intervals if abs(iv - median) <= tolerance)
|
||||
confidence = matching / len(intervals)
|
||||
|
||||
if confidence < 0.5:
|
||||
return None
|
||||
|
||||
return {
|
||||
'period_seconds': round(median, 1),
|
||||
'confidence': round(confidence, 3),
|
||||
'occurrences': len(timestamps),
|
||||
}
|
||||
|
||||
def get_all_patterns(self) -> list[dict]:
|
||||
"""Return all detected patterns across all devices."""
|
||||
results = []
|
||||
seen = set()
|
||||
for key in self._timestamps:
|
||||
mode, device_id = key.split(':', 1)
|
||||
if device_id in seen:
|
||||
continue
|
||||
pattern = self.detect_patterns(device_id, mode)
|
||||
if pattern:
|
||||
results.append(pattern)
|
||||
seen.add(device_id)
|
||||
return results
|
||||
|
||||
|
||||
# Singleton
|
||||
_detector: TemporalPatternDetector | None = None
|
||||
|
||||
|
||||
def get_pattern_detector() -> TemporalPatternDetector:
|
||||
global _detector
|
||||
if _detector is None:
|
||||
_detector = TemporalPatternDetector()
|
||||
return _detector
|
||||
Reference in New Issue
Block a user