Add distributed agent architecture for multi-node signal intelligence

Features:
- Standalone agent server (intercept_agent.py) for remote sensor nodes
- Controller API blueprint for agent management and data aggregation
- Push mechanism for agents to send data to controller
- Pull mechanism for controller to proxy requests to agents
- Multi-agent SSE stream for combined data view
- Agent management page at /controller/manage
- Agent selector dropdown in main UI
- GPS integration for location tagging
- API key authentication for secure agent communication
- Integration with Intercept's dependency checking system

New files:
- intercept_agent.py: Remote agent HTTP server
- intercept_agent.cfg: Agent configuration template
- routes/controller.py: Controller API endpoints
- utils/agent_client.py: HTTP client for agents
- utils/trilateration.py: Multi-agent position calculation
- static/js/core/agents.js: Frontend agent management
- templates/agents.html: Agent management page
- docs/DISTRIBUTED_AGENTS.md: System documentation

Modified:
- app.py: Register controller blueprint
- utils/database.py: Add agents and push_payloads tables
- templates/index.html: Add agent selector section
This commit is contained in:
cemaxecuter
2026-01-26 06:14:42 -05:00
parent ada6d5f1f1
commit f980e2e76d
20 changed files with 8809 additions and 19 deletions

281
utils/agent_client.py Normal file
View File

@@ -0,0 +1,281 @@
"""
HTTP client for communicating with remote Intercept agents.
"""
from __future__ import annotations
import logging
from typing import Any
import requests
logger = logging.getLogger('intercept.agent_client')
class AgentHTTPError(RuntimeError):
"""Exception raised when agent HTTP request fails."""
def __init__(self, message: str, status_code: int | None = None):
super().__init__(message)
self.status_code = status_code
class AgentConnectionError(AgentHTTPError):
"""Exception raised when agent is unreachable."""
pass
class AgentClient:
"""HTTP client for communicating with a remote Intercept agent."""
def __init__(
self,
base_url: str,
api_key: str | None = None,
timeout: float = 60.0
):
"""
Initialize agent client.
Args:
base_url: Base URL of the agent (e.g., http://192.168.1.50:8020)
api_key: Optional API key for authentication
timeout: Request timeout in seconds
"""
self.base_url = base_url.rstrip('/')
self.api_key = api_key
self.timeout = timeout
def _headers(self) -> dict:
"""Get request headers."""
headers = {'Content-Type': 'application/json'}
if self.api_key:
headers['X-API-Key'] = self.api_key
return headers
def _get(self, path: str, params: dict | None = None) -> dict:
"""
Perform GET request to agent.
Args:
path: URL path (e.g., /capabilities)
params: Optional query parameters
Returns:
Parsed JSON response
Raises:
AgentHTTPError: On HTTP errors
AgentConnectionError: If agent is unreachable
"""
url = f"{self.base_url}{path}"
try:
response = requests.get(
url,
headers=self._headers(),
params=params,
timeout=self.timeout
)
response.raise_for_status()
return response.json() if response.content else {}
except requests.ConnectionError as e:
raise AgentConnectionError(f"Cannot connect to agent at {self.base_url}: {e}")
except requests.Timeout:
raise AgentConnectionError(f"Request to agent timed out after {self.timeout}s")
except requests.HTTPError as e:
raise AgentHTTPError(
f"Agent returned error: {e.response.status_code}",
status_code=e.response.status_code
)
except requests.RequestException as e:
raise AgentHTTPError(f"Request failed: {e}")
def _post(self, path: str, data: dict | None = None) -> dict:
"""
Perform POST request to agent.
Args:
path: URL path (e.g., /sensor/start)
data: Optional JSON body
Returns:
Parsed JSON response
Raises:
AgentHTTPError: On HTTP errors
AgentConnectionError: If agent is unreachable
"""
url = f"{self.base_url}{path}"
try:
response = requests.post(
url,
json=data or {},
headers=self._headers(),
timeout=self.timeout
)
response.raise_for_status()
return response.json() if response.content else {}
except requests.ConnectionError as e:
raise AgentConnectionError(f"Cannot connect to agent at {self.base_url}: {e}")
except requests.Timeout:
raise AgentConnectionError(f"Request to agent timed out after {self.timeout}s")
except requests.HTTPError as e:
raise AgentHTTPError(
f"Agent returned error: {e.response.status_code}",
status_code=e.response.status_code
)
except requests.RequestException as e:
raise AgentHTTPError(f"Request failed: {e}")
# =========================================================================
# Capability & Status
# =========================================================================
def get_capabilities(self) -> dict:
"""
Get agent capabilities (available modes, devices).
Returns:
Dict with 'modes' (mode -> bool), 'devices' (list), 'agent_version'
"""
return self._get('/capabilities')
def get_status(self) -> dict:
"""
Get agent status.
Returns:
Dict with 'running_modes', 'uptime', 'push_enabled', etc.
"""
return self._get('/status')
def health_check(self) -> bool:
"""
Check if agent is healthy.
Returns:
True if agent is reachable and healthy
"""
try:
result = self._get('/health')
return result.get('status') == 'healthy'
except (AgentHTTPError, AgentConnectionError):
return False
def get_config(self) -> dict:
"""Get agent configuration (non-sensitive fields)."""
return self._get('/config')
def update_config(self, **kwargs) -> dict:
"""
Update agent configuration.
Args:
push_enabled: Enable/disable push mode
push_interval: Push interval in seconds
Returns:
Updated config
"""
return self._post('/config', kwargs)
# =========================================================================
# Mode Operations
# =========================================================================
def start_mode(self, mode: str, params: dict | None = None) -> dict:
"""
Start a mode on the agent.
Args:
mode: Mode name (e.g., 'sensor', 'adsb', 'wifi')
params: Mode-specific parameters
Returns:
Start result with 'status' field
"""
return self._post(f'/{mode}/start', params or {})
def stop_mode(self, mode: str) -> dict:
"""
Stop a running mode on the agent.
Args:
mode: Mode name
Returns:
Stop result with 'status' field
"""
return self._post(f'/{mode}/stop')
def get_mode_status(self, mode: str) -> dict:
"""
Get status of a specific mode.
Args:
mode: Mode name
Returns:
Mode status with 'running' field
"""
return self._get(f'/{mode}/status')
def get_mode_data(self, mode: str) -> dict:
"""
Get current data snapshot for a mode.
Args:
mode: Mode name
Returns:
Data snapshot with 'data' field
"""
return self._get(f'/{mode}/data')
# =========================================================================
# Convenience Methods
# =========================================================================
def refresh_metadata(self) -> dict:
"""
Fetch comprehensive metadata from agent.
Returns:
Dict with capabilities, status, and config
"""
metadata = {
'capabilities': None,
'status': None,
'config': None,
'healthy': False,
}
try:
metadata['capabilities'] = self.get_capabilities()
metadata['status'] = self.get_status()
metadata['config'] = self.get_config()
metadata['healthy'] = True
except (AgentHTTPError, AgentConnectionError) as e:
logger.warning(f"Failed to refresh agent metadata: {e}")
return metadata
def __repr__(self) -> str:
return f"AgentClient({self.base_url})"
def create_client_from_agent(agent: dict) -> AgentClient:
"""
Create an AgentClient from an agent database record.
Args:
agent: Agent dict from database
Returns:
Configured AgentClient
"""
return AgentClient(
base_url=agent['base_url'],
api_key=agent.get('api_key'),
timeout=60.0
)

View File

@@ -385,6 +385,51 @@ def init_db() -> None:
ON dsc_alerts(source_mmsi, received_at)
''')
# =====================================================================
# Remote Agent Tables (for distributed/controller mode)
# =====================================================================
# Remote agents registry
conn.execute('''
CREATE TABLE IF NOT EXISTS agents (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT UNIQUE NOT NULL,
base_url TEXT NOT NULL,
description TEXT,
api_key TEXT,
capabilities TEXT,
interfaces TEXT,
gps_coords TEXT,
last_seen TIMESTAMP,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
is_active BOOLEAN DEFAULT 1
)
''')
# Push payloads received from remote agents
conn.execute('''
CREATE TABLE IF NOT EXISTS push_payloads (
id INTEGER PRIMARY KEY AUTOINCREMENT,
agent_id INTEGER NOT NULL,
scan_type TEXT NOT NULL,
interface TEXT,
payload TEXT NOT NULL,
received_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (agent_id) REFERENCES agents(id)
)
''')
# Indexes for agent tables
conn.execute('''
CREATE INDEX IF NOT EXISTS idx_agents_name
ON agents(name)
''')
conn.execute('''
CREATE INDEX IF NOT EXISTS idx_push_payloads_agent
ON push_payloads(agent_id, received_at)
''')
logger.info("Database initialized successfully")
@@ -1677,3 +1722,236 @@ def cleanup_old_dsc_alerts(max_age_days: int = 30) -> int:
AND received_at < datetime('now', ?)
''', (f'-{max_age_days} days',))
return cursor.rowcount
# =============================================================================
# Remote Agent Functions (for distributed/controller mode)
# =============================================================================
def create_agent(
name: str,
base_url: str,
api_key: str | None = None,
description: str | None = None,
capabilities: dict | None = None,
interfaces: dict | None = None,
gps_coords: dict | None = None
) -> int:
"""
Create a new remote agent.
Returns:
The ID of the created agent
"""
with get_db() as conn:
cursor = conn.execute('''
INSERT INTO agents
(name, base_url, api_key, description, capabilities, interfaces, gps_coords)
VALUES (?, ?, ?, ?, ?, ?, ?)
''', (
name,
base_url.rstrip('/'),
api_key,
description,
json.dumps(capabilities) if capabilities else None,
json.dumps(interfaces) if interfaces else None,
json.dumps(gps_coords) if gps_coords else None
))
return cursor.lastrowid
def get_agent(agent_id: int) -> dict | None:
"""Get an agent by ID."""
with get_db() as conn:
cursor = conn.execute('SELECT * FROM agents WHERE id = ?', (agent_id,))
row = cursor.fetchone()
if not row:
return None
return _row_to_agent(row)
def get_agent_by_name(name: str) -> dict | None:
"""Get an agent by name."""
with get_db() as conn:
cursor = conn.execute('SELECT * FROM agents WHERE name = ?', (name,))
row = cursor.fetchone()
if not row:
return None
return _row_to_agent(row)
def _row_to_agent(row) -> dict:
"""Convert database row to agent dict."""
return {
'id': row['id'],
'name': row['name'],
'base_url': row['base_url'],
'description': row['description'],
'api_key': row['api_key'],
'capabilities': json.loads(row['capabilities']) if row['capabilities'] else None,
'interfaces': json.loads(row['interfaces']) if row['interfaces'] else None,
'gps_coords': json.loads(row['gps_coords']) if row['gps_coords'] else None,
'last_seen': row['last_seen'],
'created_at': row['created_at'],
'is_active': bool(row['is_active'])
}
def list_agents(active_only: bool = True) -> list[dict]:
"""Get all agents."""
with get_db() as conn:
if active_only:
cursor = conn.execute(
'SELECT * FROM agents WHERE is_active = 1 ORDER BY name'
)
else:
cursor = conn.execute('SELECT * FROM agents ORDER BY name')
return [_row_to_agent(row) for row in cursor]
def update_agent(
agent_id: int,
base_url: str | None = None,
description: str | None = None,
api_key: str | None = None,
capabilities: dict | None = None,
interfaces: dict | None = None,
gps_coords: dict | None = None,
is_active: bool | None = None,
update_last_seen: bool = False
) -> bool:
"""Update an agent's fields."""
updates = []
params = []
if base_url is not None:
updates.append('base_url = ?')
params.append(base_url.rstrip('/'))
if description is not None:
updates.append('description = ?')
params.append(description)
if api_key is not None:
updates.append('api_key = ?')
params.append(api_key)
if capabilities is not None:
updates.append('capabilities = ?')
params.append(json.dumps(capabilities))
if interfaces is not None:
updates.append('interfaces = ?')
params.append(json.dumps(interfaces))
if gps_coords is not None:
updates.append('gps_coords = ?')
params.append(json.dumps(gps_coords))
if is_active is not None:
updates.append('is_active = ?')
params.append(1 if is_active else 0)
if update_last_seen:
updates.append('last_seen = CURRENT_TIMESTAMP')
if not updates:
return False
params.append(agent_id)
with get_db() as conn:
cursor = conn.execute(
f'UPDATE agents SET {", ".join(updates)} WHERE id = ?',
params
)
return cursor.rowcount > 0
def delete_agent(agent_id: int) -> bool:
"""Delete an agent and its push payloads."""
with get_db() as conn:
# Delete push payloads first (foreign key)
conn.execute('DELETE FROM push_payloads WHERE agent_id = ?', (agent_id,))
cursor = conn.execute('DELETE FROM agents WHERE id = ?', (agent_id,))
return cursor.rowcount > 0
def store_push_payload(
agent_id: int,
scan_type: str,
payload: dict,
interface: str | None = None,
received_at: str | None = None
) -> int:
"""
Store a push payload from a remote agent.
Returns:
The ID of the created payload record
"""
with get_db() as conn:
if received_at:
cursor = conn.execute('''
INSERT INTO push_payloads (agent_id, scan_type, interface, payload, received_at)
VALUES (?, ?, ?, ?, ?)
''', (agent_id, scan_type, interface, json.dumps(payload), received_at))
else:
cursor = conn.execute('''
INSERT INTO push_payloads (agent_id, scan_type, interface, payload)
VALUES (?, ?, ?, ?)
''', (agent_id, scan_type, interface, json.dumps(payload)))
# Update agent last_seen
conn.execute(
'UPDATE agents SET last_seen = CURRENT_TIMESTAMP WHERE id = ?',
(agent_id,)
)
return cursor.lastrowid
def get_recent_payloads(
agent_id: int | None = None,
scan_type: str | None = None,
limit: int = 100
) -> list[dict]:
"""Get recent push payloads, optionally filtered."""
conditions = []
params = []
if agent_id is not None:
conditions.append('p.agent_id = ?')
params.append(agent_id)
if scan_type is not None:
conditions.append('p.scan_type = ?')
params.append(scan_type)
where_clause = f'WHERE {" AND ".join(conditions)}' if conditions else ''
params.append(limit)
with get_db() as conn:
cursor = conn.execute(f'''
SELECT p.*, a.name as agent_name
FROM push_payloads p
JOIN agents a ON p.agent_id = a.id
{where_clause}
ORDER BY p.received_at DESC
LIMIT ?
''', params)
results = []
for row in cursor:
results.append({
'id': row['id'],
'agent_id': row['agent_id'],
'agent_name': row['agent_name'],
'scan_type': row['scan_type'],
'interface': row['interface'],
'payload': json.loads(row['payload']),
'received_at': row['received_at']
})
return results
def cleanup_old_payloads(max_age_hours: int = 24) -> int:
"""Remove old push payloads."""
with get_db() as conn:
cursor = conn.execute('''
DELETE FROM push_payloads
WHERE received_at < datetime('now', ?)
''', (f'-{max_age_hours} hours',))
return cursor.rowcount

572
utils/trilateration.py Normal file
View File

@@ -0,0 +1,572 @@
"""
Trilateration/Multilateration utilities for estimating device locations
from multiple agent observations using RSSI signal strength.
This module enables location estimation for devices that don't transmit
their own GPS coordinates (WiFi APs, Bluetooth devices, etc.) by using
signal strength measurements from multiple agents at known positions.
"""
from __future__ import annotations
import math
import logging
from dataclasses import dataclass, field
from typing import List, Tuple, Optional
from datetime import datetime, timezone
logger = logging.getLogger('intercept.trilateration')
# =============================================================================
# Data Classes
# =============================================================================
@dataclass
class AgentObservation:
"""A single observation of a device by an agent."""
agent_name: str
agent_lat: float
agent_lon: float
rssi: float # dBm
timestamp: datetime = field(default_factory=lambda: datetime.now(timezone.utc))
frequency_mhz: Optional[float] = None # For frequency-dependent path loss
@dataclass
class LocationEstimate:
"""Estimated location of a device with confidence metrics."""
latitude: float
longitude: float
accuracy_meters: float # Estimated accuracy radius
confidence: float # 0.0 to 1.0
num_observations: int
observations: List[AgentObservation] = field(default_factory=list)
method: str = "multilateration"
timestamp: datetime = field(default_factory=lambda: datetime.now(timezone.utc))
def to_dict(self) -> dict:
"""Convert to JSON-serializable dictionary."""
return {
'latitude': self.latitude,
'longitude': self.longitude,
'accuracy_meters': self.accuracy_meters,
'confidence': self.confidence,
'num_observations': self.num_observations,
'method': self.method,
'timestamp': self.timestamp.isoformat(),
'agents': [obs.agent_name for obs in self.observations]
}
# =============================================================================
# Path Loss Models
# =============================================================================
class PathLossModel:
"""
Convert RSSI to estimated distance using path loss models.
The free-space path loss (FSPL) model is:
FSPL(dB) = 20*log10(d) + 20*log10(f) - 147.55
Rearranged for distance:
d = 10^((RSSI_ref - RSSI) / (10 * n))
Where:
- n is the path loss exponent (2 for free space, 2.5-4 for indoor)
- RSSI_ref is the RSSI at 1 meter reference distance
"""
# Default parameters for different environments
ENVIRONMENTS = {
'free_space': {'n': 2.0, 'rssi_ref': -40},
'outdoor': {'n': 2.5, 'rssi_ref': -45},
'indoor': {'n': 3.0, 'rssi_ref': -50},
'indoor_obstructed': {'n': 4.0, 'rssi_ref': -55},
}
# Frequency-specific reference RSSI adjustments (WiFi vs Bluetooth)
FREQUENCY_ADJUSTMENTS = {
2400: 0, # 2.4 GHz WiFi/Bluetooth - baseline
5000: -3, # 5 GHz WiFi - weaker propagation
900: +5, # 900 MHz ISM - better propagation
433: +8, # 433 MHz sensors - even better
}
def __init__(
self,
environment: str = 'outdoor',
path_loss_exponent: Optional[float] = None,
reference_rssi: Optional[float] = None
):
"""
Initialize path loss model.
Args:
environment: One of 'free_space', 'outdoor', 'indoor', 'indoor_obstructed'
path_loss_exponent: Override the environment's default n value
reference_rssi: Override the environment's default RSSI at 1m
"""
env_params = self.ENVIRONMENTS.get(environment, self.ENVIRONMENTS['outdoor'])
self.n = path_loss_exponent if path_loss_exponent is not None else env_params['n']
self.rssi_ref = reference_rssi if reference_rssi is not None else env_params['rssi_ref']
def rssi_to_distance(
self,
rssi: float,
frequency_mhz: Optional[float] = None
) -> float:
"""
Convert RSSI to estimated distance in meters.
Args:
rssi: Measured RSSI in dBm
frequency_mhz: Signal frequency for adjustment (optional)
Returns:
Estimated distance in meters
"""
# Apply frequency adjustment if known
adjusted_ref = self.rssi_ref
if frequency_mhz:
for freq, adj in self.FREQUENCY_ADJUSTMENTS.items():
if abs(frequency_mhz - freq) < 500:
adjusted_ref += adj
break
# Calculate distance using log-distance path loss model
# d = 10^((RSSI_ref - RSSI) / (10 * n))
try:
exponent = (adjusted_ref - rssi) / (10.0 * self.n)
distance = math.pow(10, exponent)
# Sanity bounds
distance = max(0.5, min(distance, 10000))
return distance
except (ValueError, OverflowError):
return 100.0 # Default fallback
def distance_to_rssi(
self,
distance: float,
frequency_mhz: Optional[float] = None
) -> float:
"""
Estimate RSSI at a given distance (inverse of rssi_to_distance).
Useful for testing and validation.
"""
if distance <= 0:
distance = 0.5
adjusted_ref = self.rssi_ref
if frequency_mhz:
for freq, adj in self.FREQUENCY_ADJUSTMENTS.items():
if abs(frequency_mhz - freq) < 500:
adjusted_ref += adj
break
# RSSI = RSSI_ref - 10 * n * log10(d)
rssi = adjusted_ref - (10.0 * self.n * math.log10(distance))
return rssi
# =============================================================================
# Geographic Utilities
# =============================================================================
def haversine_distance(lat1: float, lon1: float, lat2: float, lon2: float) -> float:
"""
Calculate the great-circle distance between two points in meters.
Uses the Haversine formula for accuracy on Earth's surface.
"""
R = 6371000 # Earth's radius in meters
phi1 = math.radians(lat1)
phi2 = math.radians(lat2)
delta_phi = math.radians(lat2 - lat1)
delta_lambda = math.radians(lon2 - lon1)
a = math.sin(delta_phi / 2) ** 2 + \
math.cos(phi1) * math.cos(phi2) * math.sin(delta_lambda / 2) ** 2
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
return R * c
def meters_to_degrees(meters: float, latitude: float) -> Tuple[float, float]:
"""
Convert meters to approximate degrees at a given latitude.
Returns (lat_degrees, lon_degrees) for the given distance.
"""
# Latitude: roughly constant at ~111km per degree
lat_deg = meters / 111000.0
# Longitude: varies with latitude
lon_deg = meters / (111000.0 * math.cos(math.radians(latitude)))
return lat_deg, lon_deg
def offset_position(lat: float, lon: float, north_m: float, east_m: float) -> Tuple[float, float]:
"""
Offset a GPS position by meters north and east.
Returns (new_lat, new_lon).
"""
lat_offset = north_m / 111000.0
lon_offset = east_m / (111000.0 * math.cos(math.radians(lat)))
return lat + lat_offset, lon + lon_offset
# =============================================================================
# Trilateration Algorithm
# =============================================================================
class Trilateration:
"""
Estimate device location using multilateration from multiple RSSI observations.
Multilateration works by:
1. Converting RSSI to estimated distance from each observer
2. Finding the point that minimizes the sum of squared distance errors
3. Using iterative refinement for better accuracy
"""
def __init__(
self,
path_loss_model: Optional[PathLossModel] = None,
min_observations: int = 2,
max_iterations: int = 100,
convergence_threshold: float = 0.1 # meters
):
"""
Initialize trilateration engine.
Args:
path_loss_model: Model for RSSI to distance conversion
min_observations: Minimum number of observations required
max_iterations: Maximum iterations for refinement
convergence_threshold: Stop when movement is less than this (meters)
"""
self.path_loss = path_loss_model or PathLossModel()
self.min_observations = min_observations
self.max_iterations = max_iterations
self.convergence_threshold = convergence_threshold
def estimate_location(
self,
observations: List[AgentObservation]
) -> Optional[LocationEstimate]:
"""
Estimate device location from multiple agent observations.
Args:
observations: List of observations from different agents
Returns:
LocationEstimate if successful, None if insufficient data
"""
if len(observations) < self.min_observations:
logger.debug(f"Insufficient observations: {len(observations)} < {self.min_observations}")
return None
# Filter out observations with invalid coordinates
valid_obs = [
obs for obs in observations
if obs.agent_lat is not None and obs.agent_lon is not None
and -90 <= obs.agent_lat <= 90 and -180 <= obs.agent_lon <= 180
]
if len(valid_obs) < self.min_observations:
return None
# Convert RSSI to estimated distances
distances = []
for obs in valid_obs:
dist = self.path_loss.rssi_to_distance(obs.rssi, obs.frequency_mhz)
distances.append(dist)
# Use weighted centroid as initial estimate
# Weight by inverse distance (closer observations weighted more)
weights = [1.0 / max(d, 1.0) for d in distances]
total_weight = sum(weights)
initial_lat = sum(obs.agent_lat * w for obs, w in zip(valid_obs, weights)) / total_weight
initial_lon = sum(obs.agent_lon * w for obs, w in zip(valid_obs, weights)) / total_weight
# Iterative refinement using gradient descent
current_lat, current_lon = initial_lat, initial_lon
for iteration in range(self.max_iterations):
# Calculate gradient of error function
grad_lat = 0.0
grad_lon = 0.0
total_error = 0.0
for obs, expected_dist in zip(valid_obs, distances):
actual_dist = haversine_distance(
current_lat, current_lon,
obs.agent_lat, obs.agent_lon
)
error = actual_dist - expected_dist
total_error += error ** 2
if actual_dist > 0.1: # Avoid division by zero
# Gradient components
lat_diff = current_lat - obs.agent_lat
lon_diff = current_lon - obs.agent_lon
# Scale factor for lat/lon to meters
lat_scale = 111000.0
lon_scale = 111000.0 * math.cos(math.radians(current_lat))
grad_lat += error * (lat_diff * lat_scale) / actual_dist
grad_lon += error * (lon_diff * lon_scale) / actual_dist
# Adaptive learning rate based on error magnitude
rmse = math.sqrt(total_error / len(valid_obs))
learning_rate = min(0.5, rmse / 1000.0) / (iteration + 1)
# Update position
lat_delta = -learning_rate * grad_lat / 111000.0
lon_delta = -learning_rate * grad_lon / (111000.0 * math.cos(math.radians(current_lat)))
new_lat = current_lat + lat_delta
new_lon = current_lon + lon_delta
# Check convergence
movement = haversine_distance(current_lat, current_lon, new_lat, new_lon)
current_lat = new_lat
current_lon = new_lon
if movement < self.convergence_threshold:
break
# Calculate accuracy estimate (average distance error)
total_error = 0.0
for obs, expected_dist in zip(valid_obs, distances):
actual_dist = haversine_distance(
current_lat, current_lon,
obs.agent_lat, obs.agent_lon
)
total_error += abs(actual_dist - expected_dist)
avg_error = total_error / len(valid_obs)
# Calculate confidence based on:
# - Number of observations (more is better)
# - Agreement between observations (lower error is better)
# - RSSI strength (stronger signals are more reliable)
obs_factor = min(1.0, len(valid_obs) / 4.0) # Max confidence at 4+ observations
error_factor = max(0.0, 1.0 - avg_error / 500.0) # Decreases as error increases
rssi_factor = min(1.0, max(0.0, (max(obs.rssi for obs in valid_obs) + 90) / 50.0))
confidence = (obs_factor * 0.3 + error_factor * 0.5 + rssi_factor * 0.2)
return LocationEstimate(
latitude=current_lat,
longitude=current_lon,
accuracy_meters=avg_error * 1.5, # Safety factor
confidence=confidence,
num_observations=len(valid_obs),
observations=valid_obs,
method="multilateration"
)
# =============================================================================
# Device Location Tracker
# =============================================================================
class DeviceLocationTracker:
"""
Track device locations over time using observations from multiple agents.
This class aggregates observations for each device (by identifier like MAC address)
and periodically computes location estimates.
"""
def __init__(
self,
trilateration: Optional[Trilateration] = None,
observation_window_seconds: float = 60.0,
min_observations: int = 2
):
"""
Initialize device tracker.
Args:
trilateration: Trilateration engine to use
observation_window_seconds: How long to keep observations
min_observations: Minimum observations needed for location
"""
self.trilateration = trilateration or Trilateration()
self.observation_window = observation_window_seconds
self.min_observations = min_observations
# device_id -> list of AgentObservation
self.observations: dict[str, List[AgentObservation]] = {}
# device_id -> latest LocationEstimate
self.locations: dict[str, LocationEstimate] = {}
def add_observation(
self,
device_id: str,
agent_name: str,
agent_lat: float,
agent_lon: float,
rssi: float,
frequency_mhz: Optional[float] = None,
timestamp: Optional[datetime] = None
) -> Optional[LocationEstimate]:
"""
Add an observation and potentially update location estimate.
Args:
device_id: Unique identifier for the device (MAC, BSSID, etc.)
agent_name: Name of the observing agent
agent_lat: Agent's GPS latitude
agent_lon: Agent's GPS longitude
rssi: Observed signal strength in dBm
frequency_mhz: Signal frequency (optional)
timestamp: Observation time (defaults to now)
Returns:
Updated LocationEstimate if enough data, None otherwise
"""
obs = AgentObservation(
agent_name=agent_name,
agent_lat=agent_lat,
agent_lon=agent_lon,
rssi=rssi,
frequency_mhz=frequency_mhz,
timestamp=timestamp or datetime.now(timezone.utc)
)
if device_id not in self.observations:
self.observations[device_id] = []
self.observations[device_id].append(obs)
# Prune old observations
self._prune_observations(device_id)
# Try to compute/update location
return self._update_location(device_id)
def _prune_observations(self, device_id: str) -> None:
"""Remove observations older than the window."""
now = datetime.now(timezone.utc)
cutoff = now.timestamp() - self.observation_window
self.observations[device_id] = [
obs for obs in self.observations[device_id]
if obs.timestamp.timestamp() > cutoff
]
def _update_location(self, device_id: str) -> Optional[LocationEstimate]:
"""Compute location estimate from current observations."""
obs_list = self.observations.get(device_id, [])
# Get unique agents (use most recent observation per agent)
agent_obs: dict[str, AgentObservation] = {}
for obs in obs_list:
if obs.agent_name not in agent_obs or obs.timestamp > agent_obs[obs.agent_name].timestamp:
agent_obs[obs.agent_name] = obs
unique_observations = list(agent_obs.values())
if len(unique_observations) < self.min_observations:
return None
estimate = self.trilateration.estimate_location(unique_observations)
if estimate:
self.locations[device_id] = estimate
return estimate
def get_location(self, device_id: str) -> Optional[LocationEstimate]:
"""Get the latest location estimate for a device."""
return self.locations.get(device_id)
def get_all_locations(self) -> dict[str, LocationEstimate]:
"""Get all current location estimates."""
return dict(self.locations)
def get_devices_near(
self,
lat: float,
lon: float,
radius_meters: float
) -> List[Tuple[str, LocationEstimate]]:
"""Find all tracked devices within radius of a point."""
results = []
for device_id, estimate in self.locations.items():
dist = haversine_distance(lat, lon, estimate.latitude, estimate.longitude)
if dist <= radius_meters:
results.append((device_id, estimate))
return results
def clear(self) -> None:
"""Clear all observations and locations."""
self.observations.clear()
self.locations.clear()
# =============================================================================
# Convenience Functions
# =============================================================================
def estimate_location_from_observations(
observations: List[dict],
environment: str = 'outdoor'
) -> Optional[dict]:
"""
Convenience function to estimate location from a list of observation dicts.
Args:
observations: List of dicts with keys:
- agent_lat: float
- agent_lon: float
- rssi: float (dBm)
- agent_name: str (optional)
- frequency_mhz: float (optional)
environment: Path loss environment ('outdoor', 'indoor', etc.)
Returns:
Location dict or None if insufficient data
Example:
observations = [
{'agent_lat': 40.7128, 'agent_lon': -74.0060, 'rssi': -55, 'agent_name': 'node-1'},
{'agent_lat': 40.7135, 'agent_lon': -74.0055, 'rssi': -70, 'agent_name': 'node-2'},
{'agent_lat': 40.7120, 'agent_lon': -74.0050, 'rssi': -62, 'agent_name': 'node-3'},
]
result = estimate_location_from_observations(observations)
# result: {'latitude': 40.7130, 'longitude': -74.0056, 'accuracy_meters': 25, ...}
"""
obs_list = []
for obs in observations:
obs_list.append(AgentObservation(
agent_name=obs.get('agent_name', 'unknown'),
agent_lat=obs['agent_lat'],
agent_lon=obs['agent_lon'],
rssi=obs['rssi'],
frequency_mhz=obs.get('frequency_mhz')
))
trilat = Trilateration(
path_loss_model=PathLossModel(environment=environment)
)
estimate = trilat.estimate_location(obs_list)
return estimate.to_dict() if estimate else None