Add proximity radar visualization and signal history heatmap

Backend:
- Add device_key.py for stable device identification (identity > public MAC > fingerprint)
- Add distance.py with DistanceEstimator class (path-loss formula, EMA smoothing, confidence scoring)
- Add ring_buffer.py for time-windowed RSSI observation storage
- Extend BTDeviceAggregate with proximity_band, estimated_distance_m, distance_confidence, rssi_ema
- Add new API endpoints: /proximity/snapshot, /heatmap/data, /devices/<key>/timeseries
- Update TSCM integration to include new proximity fields

Frontend:
- Add proximity-radar.js: SVG radar with concentric rings, device dots positioned by distance
- Add timeline-heatmap.js: RSSI history grid with time buckets and color-coded signal strength
- Update bluetooth.js to initialize and feed data to new components
- Replace zone counters with radar visualization and zone summary
- Add proximity-viz.css for component styling

Tests:
- Add test_bluetooth_proximity.py with unit tests for device key stability, EMA smoothing,
  distance estimation, band classification, and ring buffer functionality

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Smittix
2026-01-21 19:25:33 +00:00
parent bd7c83b18c
commit 7957176e59
14 changed files with 2870 additions and 27 deletions

View File

@@ -8,12 +8,17 @@ device aggregation, RSSI statistics, and observable heuristics.
from .aggregator import DeviceAggregator
from .capability_check import check_capabilities, quick_adapter_check
from .constants import (
# Range bands
# Range bands (legacy)
RANGE_VERY_CLOSE,
RANGE_CLOSE,
RANGE_NEARBY,
RANGE_FAR,
RANGE_UNKNOWN,
# Proximity bands (new)
PROXIMITY_IMMEDIATE,
PROXIMITY_NEAR,
PROXIMITY_FAR,
PROXIMITY_UNKNOWN,
# Protocols
PROTOCOL_BLE,
PROTOCOL_CLASSIC,
@@ -25,8 +30,11 @@ from .constants import (
ADDRESS_TYPE_RPA,
ADDRESS_TYPE_NRPA,
)
from .device_key import generate_device_key, is_randomized_mac, extract_key_type
from .distance import DistanceEstimator, ProximityBand, get_distance_estimator
from .heuristics import HeuristicsEngine, evaluate_device_heuristics, evaluate_all_devices
from .models import BTDeviceAggregate, BTObservation, ScanStatus, SystemCapabilities
from .ring_buffer import RingBuffer, get_ring_buffer, reset_ring_buffer
from .scanner import BluetoothScanner, get_bluetooth_scanner, reset_bluetooth_scanner
__all__ = [
@@ -44,6 +52,21 @@ __all__ = [
# Aggregator
'DeviceAggregator',
# Device key generation
'generate_device_key',
'is_randomized_mac',
'extract_key_type',
# Distance estimation
'DistanceEstimator',
'ProximityBand',
'get_distance_estimator',
# Ring buffer
'RingBuffer',
'get_ring_buffer',
'reset_ring_buffer',
# Heuristics
'HeuristicsEngine',
'evaluate_device_heuristics',
@@ -53,15 +76,25 @@ __all__ = [
'check_capabilities',
'quick_adapter_check',
# Constants
# Constants - Range bands (legacy)
'RANGE_VERY_CLOSE',
'RANGE_CLOSE',
'RANGE_NEARBY',
'RANGE_FAR',
'RANGE_UNKNOWN',
# Constants - Proximity bands (new)
'PROXIMITY_IMMEDIATE',
'PROXIMITY_NEAR',
'PROXIMITY_FAR',
'PROXIMITY_UNKNOWN',
# Constants - Protocols
'PROTOCOL_BLE',
'PROTOCOL_CLASSIC',
'PROTOCOL_AUTO',
# Constants - Address types
'ADDRESS_TYPE_PUBLIC',
'ADDRESS_TYPE_RANDOM',
'ADDRESS_TYPE_RANDOM_STATIC',

View File

@@ -36,6 +36,9 @@ from .constants import (
PROTOCOL_CLASSIC,
)
from .models import BTObservation, BTDeviceAggregate
from .device_key import generate_device_key, is_randomized_mac
from .distance import DistanceEstimator, get_distance_estimator
from .ring_buffer import RingBuffer, get_ring_buffer
class DeviceAggregator:
@@ -53,6 +56,13 @@ class DeviceAggregator:
self._baseline_device_ids: set[str] = set()
self._baseline_set_time: Optional[datetime] = None
# Proximity estimation components
self._distance_estimator = get_distance_estimator()
self._ring_buffer = get_ring_buffer()
# Device key mapping (device_id -> device_key)
self._device_keys: dict[str, str] = {}
def ingest(self, observation: BTObservation) -> BTDeviceAggregate:
"""
Ingest a new observation and update the device aggregate.
@@ -119,6 +129,43 @@ class DeviceAggregator:
device.in_baseline = device_id in self._baseline_device_ids
device.is_new = not device.in_baseline and self._baseline_set_time is not None
# Generate stable device key
device_key = generate_device_key(
address=observation.address,
address_type=observation.address_type,
name=device.name,
manufacturer_id=device.manufacturer_id,
service_uuids=device.service_uuids if device.service_uuids else None,
)
device.device_key = device_key
self._device_keys[device_id] = device_key
# Check if randomized MAC
device.is_randomized_mac = is_randomized_mac(observation.address_type)
# Apply EMA smoothing to RSSI
if observation.rssi is not None:
device.rssi_ema = self._distance_estimator.apply_ema_smoothing(
current=observation.rssi,
prev_ema=device.rssi_ema,
)
# Get 60-second min/max
device.rssi_60s_min, device.rssi_60s_max = self._distance_estimator.get_rssi_60s_window(
device.rssi_samples,
window_seconds=60,
)
# Store in ring buffer for heatmap
self._ring_buffer.ingest(
device_key=device_key,
rssi=observation.rssi,
timestamp=observation.timestamp,
)
# Estimate distance and proximity band
self._update_proximity(device)
return device
def _infer_protocol(self, observation: BTObservation) -> str:
@@ -219,6 +266,31 @@ class DeviceAggregator:
device.range_band = RANGE_UNKNOWN
device.range_confidence = confidence * 0.5 # Reduced confidence for unknown
def _update_proximity(self, device: BTDeviceAggregate) -> None:
"""Update proximity estimation for a device."""
if device.rssi_ema is None:
device.proximity_band = 'unknown'
device.estimated_distance_m = None
device.distance_confidence = 0.0
return
# Estimate distance
distance, confidence = self._distance_estimator.estimate_distance(
rssi=device.rssi_ema,
tx_power=device.tx_power,
variance=device.rssi_variance,
)
device.estimated_distance_m = distance
device.distance_confidence = confidence
# Classify proximity band
band = self._distance_estimator.classify_proximity_band(
distance_m=distance,
rssi_ema=device.rssi_ema,
)
device.proximity_band = str(band)
def _merge_device_info(self, device: BTDeviceAggregate, observation: BTObservation) -> None:
"""Merge observation data into device aggregate (prefer non-None values)."""
# Name (prefer longer names as they're usually more complete)
@@ -345,3 +417,107 @@ class DeviceAggregator:
def has_baseline(self) -> bool:
"""Whether a baseline is set."""
return self._baseline_set_time is not None
@property
def ring_buffer(self) -> RingBuffer:
"""Access the ring buffer for timeseries data."""
return self._ring_buffer
def get_device_by_key(self, device_key: str) -> Optional[BTDeviceAggregate]:
"""Get a device by its stable device key."""
with self._lock:
# Find device_id from device_key
for device_id, key in self._device_keys.items():
if key == device_key:
return self._devices.get(device_id)
return None
def get_timeseries(
self,
device_key: str,
window_minutes: int = 30,
downsample_seconds: int = 10,
) -> list[dict]:
"""
Get timeseries data for a device.
Args:
device_key: Stable device identifier.
window_minutes: Time window in minutes.
downsample_seconds: Bucket size for downsampling.
Returns:
List of {timestamp, rssi} dicts.
"""
return self._ring_buffer.get_timeseries(
device_key=device_key,
window_minutes=window_minutes,
downsample_seconds=downsample_seconds,
)
def get_heatmap_data(
self,
top_n: int = 20,
window_minutes: int = 10,
bucket_seconds: int = 10,
sort_by: str = 'recency',
) -> dict:
"""
Get heatmap data for visualization.
Args:
top_n: Number of devices to include.
window_minutes: Time window.
bucket_seconds: Bucket size for downsampling.
sort_by: Sort method ('recency', 'strength', 'activity').
Returns:
Dict with device timeseries and metadata.
"""
# Get timeseries data from ring buffer
timeseries = self._ring_buffer.get_all_timeseries(
window_minutes=window_minutes,
downsample_seconds=bucket_seconds,
top_n=top_n,
sort_by=sort_by,
)
# Enrich with device metadata
result = {
'window_minutes': window_minutes,
'bucket_seconds': bucket_seconds,
'devices': [],
}
with self._lock:
for device_key, ts_data in timeseries.items():
device = self.get_device_by_key(device_key)
device_info = {
'device_key': device_key,
'timeseries': ts_data,
}
if device:
device_info.update({
'name': device.name,
'address': device.address,
'rssi_current': device.rssi_current,
'rssi_ema': round(device.rssi_ema, 1) if device.rssi_ema else None,
'proximity_band': device.proximity_band,
})
else:
device_info.update({
'name': None,
'address': None,
'rssi_current': None,
'rssi_ema': None,
'proximity_band': 'unknown',
})
result['devices'].append(device_info)
return result
def prune_ring_buffer(self) -> int:
"""Prune old observations from ring buffer."""
return self._ring_buffer.prune_old()

View File

@@ -120,6 +120,63 @@ RANGE_NEARBY = 'nearby'
RANGE_FAR = 'far'
RANGE_UNKNOWN = 'unknown'
# =============================================================================
# PROXIMITY BANDS (new visualization system)
# =============================================================================
PROXIMITY_IMMEDIATE = 'immediate' # < 1m
PROXIMITY_NEAR = 'near' # 1-3m
PROXIMITY_FAR = 'far' # 3-10m
PROXIMITY_UNKNOWN = 'unknown'
# RSSI thresholds for proximity band classification (dBm)
PROXIMITY_RSSI_IMMEDIATE = -40 # >= -40 dBm -> immediate
PROXIMITY_RSSI_NEAR = -55 # >= -55 dBm -> near
PROXIMITY_RSSI_FAR = -75 # >= -75 dBm -> far
# =============================================================================
# DISTANCE ESTIMATION SETTINGS
# =============================================================================
# Path-loss exponent for indoor environments (typical range: 2-4)
DISTANCE_PATH_LOSS_EXPONENT = 2.5
# Reference RSSI at 1 meter (typical BLE value)
DISTANCE_RSSI_AT_1M = -59
# EMA smoothing alpha (higher = more responsive, lower = smoother)
DISTANCE_EMA_ALPHA = 0.3
# Variance thresholds for confidence scoring (dBm^2)
DISTANCE_LOW_VARIANCE = 25.0 # High confidence
DISTANCE_HIGH_VARIANCE = 100.0 # Low confidence
# =============================================================================
# RING BUFFER SETTINGS
# =============================================================================
# Observation retention period (minutes)
RING_BUFFER_RETENTION_MINUTES = 30
# Minimum interval between observations per device (seconds)
RING_BUFFER_MIN_INTERVAL_SECONDS = 2.0
# Maximum observations stored per device
RING_BUFFER_MAX_OBSERVATIONS = 1000
# =============================================================================
# HEATMAP SETTINGS
# =============================================================================
# Default time window for heatmap (minutes)
HEATMAP_DEFAULT_WINDOW_MINUTES = 10
# Default bucket size for downsampling (seconds)
HEATMAP_DEFAULT_BUCKET_SECONDS = 10
# Maximum devices to show in heatmap
HEATMAP_MAX_DEVICES = 50
# =============================================================================
# COMMON MANUFACTURER IDS (OUI -> Name mapping for common vendors)
# =============================================================================

View File

@@ -0,0 +1,120 @@
"""
Stable device key generation for Bluetooth devices.
Generates consistent identifiers for devices even when MAC addresses rotate.
"""
from __future__ import annotations
import hashlib
from typing import Optional
from .constants import (
ADDRESS_TYPE_PUBLIC,
ADDRESS_TYPE_RANDOM_STATIC,
)
def generate_device_key(
address: str,
address_type: str,
identity_address: Optional[str] = None,
name: Optional[str] = None,
manufacturer_id: Optional[int] = None,
service_uuids: Optional[list[str]] = None,
) -> str:
"""
Generate a stable device key for identifying a Bluetooth device.
Priority order:
1. identity_address -> "id:{address}" (resolved from RPA via IRK)
2. public/static MAC -> "mac:{address}" (stable addresses)
3. Random address -> "fp:{hash}" (fingerprint from device characteristics)
Args:
address: The Bluetooth address (MAC).
address_type: Type of address (public, random, random_static, rpa, nrpa).
identity_address: Resolved identity address if available.
name: Device name if available.
manufacturer_id: Manufacturer ID if available.
service_uuids: List of service UUIDs if available.
Returns:
A stable device key string.
"""
# Priority 1: Use identity address if available (resolved RPA)
if identity_address:
return f"id:{identity_address.upper()}"
# Priority 2: Use public or random_static addresses directly
if address_type in (ADDRESS_TYPE_PUBLIC, ADDRESS_TYPE_RANDOM_STATIC):
return f"mac:{address.upper()}"
# Priority 3: Generate fingerprint hash for random addresses
return _generate_fingerprint_key(address, name, manufacturer_id, service_uuids)
def _generate_fingerprint_key(
address: str,
name: Optional[str],
manufacturer_id: Optional[int],
service_uuids: Optional[list[str]],
) -> str:
"""
Generate a fingerprint-based key for devices with random addresses.
Uses device characteristics to create a stable identifier when the
MAC address rotates.
"""
# Build fingerprint components
components = []
# Include name if available (most stable identifier for random MACs)
if name:
components.append(f"name:{name}")
# Include manufacturer ID
if manufacturer_id is not None:
components.append(f"mfr:{manufacturer_id}")
# Include sorted service UUIDs
if service_uuids:
sorted_uuids = sorted(set(service_uuids))
components.append(f"svc:{','.join(sorted_uuids)}")
# If we have enough characteristics, generate a hash
if components:
fingerprint_str = "|".join(components)
hash_digest = hashlib.sha256(fingerprint_str.encode()).hexdigest()[:16]
return f"fp:{hash_digest}"
# Fallback: use address directly (least stable for random MACs)
return f"mac:{address.upper()}"
def is_randomized_mac(address_type: str) -> bool:
"""
Check if an address type indicates a randomized MAC.
Args:
address_type: The address type string.
Returns:
True if the address is randomized, False otherwise.
"""
return address_type not in (ADDRESS_TYPE_PUBLIC, ADDRESS_TYPE_RANDOM_STATIC)
def extract_key_type(device_key: str) -> str:
"""
Extract the key type prefix from a device key.
Args:
device_key: The device key string.
Returns:
The key type ('id', 'mac', or 'fp').
"""
if ':' in device_key:
return device_key.split(':', 1)[0]
return 'unknown'

274
utils/bluetooth/distance.py Normal file
View File

@@ -0,0 +1,274 @@
"""
Distance estimation for Bluetooth devices.
Provides path-loss based distance calculation, band classification,
and EMA smoothing for RSSI values.
"""
from __future__ import annotations
from enum import Enum
from typing import Optional
class ProximityBand(str, Enum):
"""Proximity band classifications."""
IMMEDIATE = 'immediate' # < 1m
NEAR = 'near' # 1-3m
FAR = 'far' # 3-10m
UNKNOWN = 'unknown' # Cannot determine
def __str__(self) -> str:
return self.value
# Default path-loss exponent for indoor environments
DEFAULT_PATH_LOSS_EXPONENT = 2.5
# RSSI thresholds for band classification (dBm)
RSSI_THRESHOLD_IMMEDIATE = -40 # >= -40 dBm
RSSI_THRESHOLD_NEAR = -55 # >= -55 dBm
RSSI_THRESHOLD_FAR = -75 # >= -75 dBm
# Default reference RSSI at 1 meter (typical BLE)
DEFAULT_RSSI_AT_1M = -59
# Default EMA alpha
DEFAULT_EMA_ALPHA = 0.3
# Variance thresholds for confidence scoring
LOW_VARIANCE_THRESHOLD = 25.0 # dBm^2
HIGH_VARIANCE_THRESHOLD = 100.0 # dBm^2
class DistanceEstimator:
"""
Estimates distance to Bluetooth devices based on RSSI.
Uses path-loss formula when TX power is available, falls back to
band-based estimation otherwise.
"""
def __init__(
self,
path_loss_exponent: float = DEFAULT_PATH_LOSS_EXPONENT,
rssi_at_1m: int = DEFAULT_RSSI_AT_1M,
ema_alpha: float = DEFAULT_EMA_ALPHA,
):
"""
Initialize the distance estimator.
Args:
path_loss_exponent: Path-loss exponent (n), typically 2-4.
rssi_at_1m: Reference RSSI at 1 meter.
ema_alpha: Smoothing factor for EMA (0-1).
"""
self.path_loss_exponent = path_loss_exponent
self.rssi_at_1m = rssi_at_1m
self.ema_alpha = ema_alpha
def estimate_distance(
self,
rssi: float,
tx_power: Optional[int] = None,
variance: Optional[float] = None,
) -> tuple[Optional[float], float]:
"""
Estimate distance to a device based on RSSI.
Args:
rssi: Current RSSI value (dBm).
tx_power: Transmitted power at 1m (dBm), if advertised.
variance: RSSI variance for confidence scoring.
Returns:
Tuple of (distance_m, confidence) where distance_m may be None
if estimation fails, and confidence is 0.0-1.0.
"""
if rssi is None or rssi > 0:
return None, 0.0
# Calculate base confidence from variance
base_confidence = self._calculate_variance_confidence(variance)
if tx_power is not None:
# Use path-loss formula: d = 10^((tx_power - rssi) / (10 * n))
distance = self._path_loss_distance(rssi, tx_power)
# Higher confidence with TX power
confidence = min(1.0, base_confidence * 1.2) if base_confidence > 0 else 0.6
return distance, confidence
else:
# Fall back to band-based estimation
distance = self._estimate_from_bands(rssi)
# Lower confidence without TX power
confidence = base_confidence * 0.6 if base_confidence > 0 else 0.3
return distance, confidence
def _path_loss_distance(self, rssi: float, tx_power: int) -> float:
"""
Calculate distance using path-loss formula.
Formula: d = 10^((tx_power - rssi) / (10 * n))
Args:
rssi: Current RSSI value.
tx_power: Transmitted power at 1m.
Returns:
Estimated distance in meters.
"""
exponent = (tx_power - rssi) / (10 * self.path_loss_exponent)
distance = 10 ** exponent
# Clamp to reasonable range
return max(0.1, min(100.0, distance))
def _estimate_from_bands(self, rssi: float) -> float:
"""
Estimate distance based on RSSI bands when TX power unavailable.
Uses calibrated thresholds to provide rough distance estimate.
Args:
rssi: Current RSSI value.
Returns:
Estimated distance in meters (midpoint of band).
"""
if rssi >= RSSI_THRESHOLD_IMMEDIATE:
return 0.5 # Immediate: ~0.5m
elif rssi >= RSSI_THRESHOLD_NEAR:
return 2.0 # Near: ~2m
elif rssi >= RSSI_THRESHOLD_FAR:
return 6.0 # Far: ~6m
else:
return 15.0 # Very far: ~15m
def _calculate_variance_confidence(self, variance: Optional[float]) -> float:
"""
Calculate confidence based on RSSI variance.
Lower variance = higher confidence.
Args:
variance: RSSI variance value.
Returns:
Confidence factor (0.0-1.0).
"""
if variance is None:
return 0.5 # Unknown variance
if variance <= LOW_VARIANCE_THRESHOLD:
return 0.9 # High confidence - stable signal
elif variance <= HIGH_VARIANCE_THRESHOLD:
# Linear interpolation between thresholds
ratio = (variance - LOW_VARIANCE_THRESHOLD) / (HIGH_VARIANCE_THRESHOLD - LOW_VARIANCE_THRESHOLD)
return 0.9 - (ratio * 0.5) # 0.9 to 0.4
else:
return 0.3 # Low confidence - unstable signal
def classify_proximity_band(
self,
distance_m: Optional[float] = None,
rssi_ema: Optional[float] = None,
) -> ProximityBand:
"""
Classify device into a proximity band.
Uses distance if available, falls back to RSSI-based classification.
Args:
distance_m: Estimated distance in meters.
rssi_ema: Smoothed RSSI value.
Returns:
ProximityBand classification.
"""
# Prefer distance-based classification
if distance_m is not None:
if distance_m < 1.0:
return ProximityBand.IMMEDIATE
elif distance_m < 3.0:
return ProximityBand.NEAR
elif distance_m < 10.0:
return ProximityBand.FAR
else:
return ProximityBand.UNKNOWN
# Fall back to RSSI-based classification
if rssi_ema is not None:
if rssi_ema >= RSSI_THRESHOLD_IMMEDIATE:
return ProximityBand.IMMEDIATE
elif rssi_ema >= RSSI_THRESHOLD_NEAR:
return ProximityBand.NEAR
elif rssi_ema >= RSSI_THRESHOLD_FAR:
return ProximityBand.FAR
return ProximityBand.UNKNOWN
def apply_ema_smoothing(
self,
current: int,
prev_ema: Optional[float] = None,
alpha: Optional[float] = None,
) -> float:
"""
Apply Exponential Moving Average smoothing to RSSI.
Formula: new_ema = alpha * current + (1-alpha) * prev_ema
Args:
current: Current RSSI value.
prev_ema: Previous EMA value (None for first value).
alpha: Smoothing factor (0-1), uses instance default if None.
Returns:
New EMA value.
"""
if alpha is None:
alpha = self.ema_alpha
if prev_ema is None:
return float(current)
return alpha * current + (1 - alpha) * prev_ema
def get_rssi_60s_window(
self,
rssi_samples: list[tuple],
window_seconds: int = 60,
) -> tuple[Optional[int], Optional[int]]:
"""
Get min/max RSSI from the last N seconds.
Args:
rssi_samples: List of (timestamp, rssi) tuples.
window_seconds: Window size in seconds.
Returns:
Tuple of (min_rssi, max_rssi) or (None, None) if no samples.
"""
from datetime import datetime, timedelta
if not rssi_samples:
return None, None
cutoff = datetime.now() - timedelta(seconds=window_seconds)
recent_rssi = [rssi for ts, rssi in rssi_samples if ts >= cutoff]
if not recent_rssi:
return None, None
return min(recent_rssi), max(recent_rssi)
# Module-level instance for convenience
_default_estimator: Optional[DistanceEstimator] = None
def get_distance_estimator() -> DistanceEstimator:
"""Get or create the default distance estimator instance."""
global _default_estimator
if _default_estimator is None:
_default_estimator = DistanceEstimator()
return _default_estimator

View File

@@ -11,8 +11,13 @@ from typing import Optional
from .constants import (
MANUFACTURER_NAMES,
ADDRESS_TYPE_PUBLIC,
ADDRESS_TYPE_RANDOM,
ADDRESS_TYPE_RANDOM_STATIC,
ADDRESS_TYPE_RPA,
ADDRESS_TYPE_NRPA,
RANGE_UNKNOWN,
PROTOCOL_BLE,
PROXIMITY_UNKNOWN,
)
@@ -100,10 +105,21 @@ class BTDeviceAggregate:
rssi_variance: Optional[float] = None
rssi_confidence: float = 0.0 # 0.0-1.0
# Range band (very_close/close/nearby/far/unknown)
# Range band (very_close/close/nearby/far/unknown) - legacy
range_band: str = RANGE_UNKNOWN
range_confidence: float = 0.0
# Proximity band (new system: immediate/near/far/unknown)
device_key: Optional[str] = None
proximity_band: str = PROXIMITY_UNKNOWN
estimated_distance_m: Optional[float] = None
distance_confidence: float = 0.0
rssi_ema: Optional[float] = None
rssi_60s_min: Optional[int] = None
rssi_60s_max: Optional[int] = None
is_randomized_mac: bool = False
threat_tags: list[str] = field(default_factory=list)
# Device info (merged from observations)
name: Optional[str] = None
manufacturer_id: Optional[int] = None
@@ -193,10 +209,21 @@ class BTDeviceAggregate:
'rssi_confidence': round(self.rssi_confidence, 2),
'rssi_history': self.get_rssi_history(),
# Range
# Range (legacy)
'range_band': self.range_band,
'range_confidence': round(self.range_confidence, 2),
# Proximity (new system)
'device_key': self.device_key,
'proximity_band': self.proximity_band,
'estimated_distance_m': round(self.estimated_distance_m, 2) if self.estimated_distance_m else None,
'distance_confidence': round(self.distance_confidence, 2),
'rssi_ema': round(self.rssi_ema, 1) if self.rssi_ema else None,
'rssi_60s_min': self.rssi_60s_min,
'rssi_60s_max': self.rssi_60s_max,
'is_randomized_mac': self.is_randomized_mac,
'threat_tags': self.threat_tags,
# Device info
'name': self.name,
'manufacturer_id': self.manufacturer_id,
@@ -231,6 +258,7 @@ class BTDeviceAggregate:
"""Compact dictionary for list views."""
return {
'device_id': self.device_id,
'device_key': self.device_key,
'address': self.address,
'address_type': self.address_type,
'protocol': self.protocol,
@@ -238,7 +266,12 @@ class BTDeviceAggregate:
'manufacturer_name': self.manufacturer_name,
'rssi_current': self.rssi_current,
'rssi_median': round(self.rssi_median, 1) if self.rssi_median else None,
'rssi_ema': round(self.rssi_ema, 1) if self.rssi_ema else None,
'range_band': self.range_band,
'proximity_band': self.proximity_band,
'estimated_distance_m': round(self.estimated_distance_m, 2) if self.estimated_distance_m else None,
'distance_confidence': round(self.distance_confidence, 2),
'is_randomized_mac': self.is_randomized_mac,
'last_seen': self.last_seen.isoformat(),
'age_seconds': self.age_seconds,
'seen_count': self.seen_count,

View File

@@ -0,0 +1,335 @@
"""
Ring buffer for time-windowed Bluetooth observation storage.
Provides efficient storage of RSSI observations with rate limiting,
automatic pruning, and downsampling for visualization.
"""
from __future__ import annotations
import threading
from collections import deque
from datetime import datetime, timedelta
from typing import Optional
# Default configuration
DEFAULT_RETENTION_MINUTES = 30
DEFAULT_MIN_INTERVAL_SECONDS = 2.0
DEFAULT_MAX_OBSERVATIONS_PER_DEVICE = 1000
class RingBuffer:
"""
Time-windowed ring buffer for Bluetooth RSSI observations.
Features:
- Rate-limited ingestion (max 1 observation per device per interval)
- Automatic pruning of old observations
- Downsampling for efficient visualization
- Thread-safe operations
"""
def __init__(
self,
retention_minutes: int = DEFAULT_RETENTION_MINUTES,
min_interval_seconds: float = DEFAULT_MIN_INTERVAL_SECONDS,
max_observations_per_device: int = DEFAULT_MAX_OBSERVATIONS_PER_DEVICE,
):
"""
Initialize the ring buffer.
Args:
retention_minutes: How long to keep observations (minutes).
min_interval_seconds: Minimum time between observations per device.
max_observations_per_device: Maximum observations stored per device.
"""
self.retention_minutes = retention_minutes
self.min_interval_seconds = min_interval_seconds
self.max_observations_per_device = max_observations_per_device
# device_key -> deque[(timestamp, rssi)]
self._observations: dict[str, deque[tuple[datetime, int]]] = {}
# device_key -> last_ingested_timestamp
self._last_ingested: dict[str, datetime] = {}
self._lock = threading.Lock()
def ingest(
self,
device_key: str,
rssi: int,
timestamp: Optional[datetime] = None,
) -> bool:
"""
Ingest an RSSI observation for a device.
Rate-limited to prevent flooding from high-frequency advertisers.
Args:
device_key: Stable device identifier.
rssi: RSSI value in dBm.
timestamp: Observation timestamp (defaults to now).
Returns:
True if observation was stored, False if rate-limited.
"""
if timestamp is None:
timestamp = datetime.now()
with self._lock:
# Check rate limit
last_time = self._last_ingested.get(device_key)
if last_time is not None:
elapsed = (timestamp - last_time).total_seconds()
if elapsed < self.min_interval_seconds:
return False
# Initialize deque for new device
if device_key not in self._observations:
self._observations[device_key] = deque(
maxlen=self.max_observations_per_device
)
# Store observation
self._observations[device_key].append((timestamp, rssi))
self._last_ingested[device_key] = timestamp
return True
def get_timeseries(
self,
device_key: str,
window_minutes: Optional[int] = None,
downsample_seconds: int = 10,
) -> list[dict]:
"""
Get downsampled timeseries data for a device.
Args:
device_key: Device identifier.
window_minutes: Time window (defaults to retention period).
downsample_seconds: Bucket size for downsampling.
Returns:
List of dicts with 'timestamp' and 'rssi' keys.
"""
if window_minutes is None:
window_minutes = self.retention_minutes
cutoff = datetime.now() - timedelta(minutes=window_minutes)
with self._lock:
obs = self._observations.get(device_key)
if not obs:
return []
# Filter to window and downsample
return self._downsample(
[(ts, rssi) for ts, rssi in obs if ts >= cutoff],
downsample_seconds,
)
def get_all_timeseries(
self,
window_minutes: Optional[int] = None,
downsample_seconds: int = 10,
top_n: Optional[int] = None,
sort_by: str = 'recency',
) -> dict[str, list[dict]]:
"""
Get downsampled timeseries for all devices.
Args:
window_minutes: Time window.
downsample_seconds: Bucket size for downsampling.
top_n: Limit to top N devices.
sort_by: Sort method ('recency', 'strength', 'activity').
Returns:
Dict mapping device_key to timeseries data.
"""
if window_minutes is None:
window_minutes = self.retention_minutes
cutoff = datetime.now() - timedelta(minutes=window_minutes)
with self._lock:
# Build list of (device_key, last_seen, avg_rssi, count)
device_info = []
for device_key, obs in self._observations.items():
recent = [(ts, rssi) for ts, rssi in obs if ts >= cutoff]
if not recent:
continue
last_seen = max(ts for ts, _ in recent)
avg_rssi = sum(rssi for _, rssi in recent) / len(recent)
device_info.append((device_key, last_seen, avg_rssi, len(recent)))
# Sort based on criteria
if sort_by == 'strength':
device_info.sort(key=lambda x: x[2], reverse=True) # Higher RSSI first
elif sort_by == 'activity':
device_info.sort(key=lambda x: x[3], reverse=True) # More observations first
else: # recency
device_info.sort(key=lambda x: x[1], reverse=True) # Most recent first
# Limit to top N
if top_n is not None:
device_info = device_info[:top_n]
# Build result
result = {}
for device_key, _, _, _ in device_info:
obs = self._observations.get(device_key, [])
recent = [(ts, rssi) for ts, rssi in obs if ts >= cutoff]
result[device_key] = self._downsample(recent, downsample_seconds)
return result
def _downsample(
self,
observations: list[tuple[datetime, int]],
bucket_seconds: int,
) -> list[dict]:
"""
Downsample observations into time buckets.
Uses average RSSI for each bucket.
Args:
observations: List of (timestamp, rssi) tuples.
bucket_seconds: Size of each bucket in seconds.
Returns:
List of dicts with 'timestamp' and 'rssi'.
"""
if not observations:
return []
# Group into buckets
buckets: dict[datetime, list[int]] = {}
for ts, rssi in observations:
# Round timestamp to bucket boundary
bucket_ts = ts.replace(
second=(ts.second // bucket_seconds) * bucket_seconds,
microsecond=0,
)
if bucket_ts not in buckets:
buckets[bucket_ts] = []
buckets[bucket_ts].append(rssi)
# Calculate average for each bucket
result = []
for bucket_ts in sorted(buckets.keys()):
rssi_values = buckets[bucket_ts]
avg_rssi = sum(rssi_values) / len(rssi_values)
result.append({
'timestamp': bucket_ts.isoformat(),
'rssi': round(avg_rssi, 1),
})
return result
def prune_old(self) -> int:
"""
Remove observations older than retention period.
Returns:
Number of observations removed.
"""
cutoff = datetime.now() - timedelta(minutes=self.retention_minutes)
removed = 0
with self._lock:
empty_devices = []
for device_key, obs in self._observations.items():
initial_len = len(obs)
# Remove old observations from the left
while obs and obs[0][0] < cutoff:
obs.popleft()
removed += initial_len - len(obs)
if not obs:
empty_devices.append(device_key)
# Clean up empty device entries
for device_key in empty_devices:
del self._observations[device_key]
self._last_ingested.pop(device_key, None)
return removed
def get_device_count(self) -> int:
"""Get number of devices with stored observations."""
with self._lock:
return len(self._observations)
def get_observation_count(self, device_key: Optional[str] = None) -> int:
"""
Get total observation count.
Args:
device_key: If specified, count only for this device.
Returns:
Number of stored observations.
"""
with self._lock:
if device_key:
obs = self._observations.get(device_key)
return len(obs) if obs else 0
return sum(len(obs) for obs in self._observations.values())
def clear(self) -> None:
"""Clear all stored observations."""
with self._lock:
self._observations.clear()
self._last_ingested.clear()
def get_device_stats(self, device_key: str) -> Optional[dict]:
"""
Get statistics for a specific device.
Args:
device_key: Device identifier.
Returns:
Dict with stats or None if device not found.
"""
with self._lock:
obs = self._observations.get(device_key)
if not obs:
return None
rssi_values = [rssi for _, rssi in obs]
timestamps = [ts for ts, _ in obs]
return {
'observation_count': len(obs),
'first_observation': min(timestamps).isoformat(),
'last_observation': max(timestamps).isoformat(),
'rssi_min': min(rssi_values),
'rssi_max': max(rssi_values),
'rssi_avg': sum(rssi_values) / len(rssi_values),
}
# Module-level instance for shared access
_ring_buffer: Optional[RingBuffer] = None
def get_ring_buffer() -> RingBuffer:
"""Get or create the shared ring buffer instance."""
global _ring_buffer
if _ring_buffer is None:
_ring_buffer = RingBuffer()
return _ring_buffer
def reset_ring_buffer() -> None:
"""Reset the shared ring buffer instance."""
global _ring_buffer
if _ring_buffer is not None:
_ring_buffer.clear()
_ring_buffer = None