mirror of
https://github.com/smittix/intercept.git
synced 2026-04-24 06:40:00 -07:00
Merge branch 'pr-124'
# Conflicts: # app.py # routes/__init__.py # utils/database.py
This commit is contained in:
69
app.py
69
app.py
@@ -39,6 +39,7 @@ from utils.constants import (
|
||||
MAX_VESSEL_AGE_SECONDS,
|
||||
MAX_DSC_MESSAGE_AGE_SECONDS,
|
||||
MAX_DEAUTH_ALERTS_AGE_SECONDS,
|
||||
MAX_GSM_AGE_SECONDS,
|
||||
QUEUE_MAX_SIZE,
|
||||
)
|
||||
import logging
|
||||
@@ -187,6 +188,16 @@ deauth_detector = None
|
||||
deauth_detector_queue = queue.Queue(maxsize=QUEUE_MAX_SIZE)
|
||||
deauth_detector_lock = threading.Lock()
|
||||
|
||||
# GSM Spy
|
||||
gsm_spy_scanner_running = False # Flag: scanner thread active
|
||||
gsm_spy_livemon_process = None # For grgsm_livemon process
|
||||
gsm_spy_monitor_process = None # For tshark monitoring process
|
||||
gsm_spy_queue = queue.Queue(maxsize=QUEUE_MAX_SIZE)
|
||||
gsm_spy_lock = threading.Lock()
|
||||
gsm_spy_active_device = None
|
||||
gsm_spy_selected_arfcn = None
|
||||
gsm_spy_region = 'Americas' # Default band
|
||||
|
||||
# ============================================
|
||||
# GLOBAL STATE DICTIONARIES
|
||||
# ============================================
|
||||
@@ -219,6 +230,16 @@ dsc_messages = DataStore(max_age_seconds=MAX_DSC_MESSAGE_AGE_SECONDS, name='dsc_
|
||||
# Deauth alerts - using DataStore for automatic cleanup
|
||||
deauth_alerts = DataStore(max_age_seconds=MAX_DEAUTH_ALERTS_AGE_SECONDS, name='deauth_alerts')
|
||||
|
||||
# GSM Spy data stores
|
||||
gsm_spy_towers = DataStore(
|
||||
max_age_seconds=MAX_GSM_AGE_SECONDS,
|
||||
name='gsm_spy_towers'
|
||||
)
|
||||
gsm_spy_devices = DataStore(
|
||||
max_age_seconds=MAX_GSM_AGE_SECONDS,
|
||||
name='gsm_spy_devices'
|
||||
)
|
||||
|
||||
# Satellite state
|
||||
satellite_passes = [] # Predicted satellite passes (not auto-cleaned, calculated)
|
||||
|
||||
@@ -231,6 +252,8 @@ cleanup_manager.register(adsb_aircraft)
|
||||
cleanup_manager.register(ais_vessels)
|
||||
cleanup_manager.register(dsc_messages)
|
||||
cleanup_manager.register(deauth_alerts)
|
||||
cleanup_manager.register(gsm_spy_towers)
|
||||
cleanup_manager.register(gsm_spy_devices)
|
||||
|
||||
# ============================================
|
||||
# SDR DEVICE REGISTRY
|
||||
@@ -664,6 +687,8 @@ def kill_all() -> Response:
|
||||
global current_process, sensor_process, wifi_process, adsb_process, ais_process, acars_process
|
||||
global aprs_process, aprs_rtl_process, dsc_process, dsc_rtl_process, bt_process
|
||||
global dmr_process, dmr_rtl_process
|
||||
global gsm_spy_livemon_process, gsm_spy_monitor_process
|
||||
global gsm_spy_scanner_running, gsm_spy_active_device, gsm_spy_selected_arfcn, gsm_spy_region
|
||||
|
||||
# Import adsb and ais modules to reset their state
|
||||
from routes import adsb as adsb_module
|
||||
@@ -677,6 +702,7 @@ def kill_all() -> Response:
|
||||
'dump1090', 'acarsdec', 'direwolf', 'AIS-catcher',
|
||||
'hcitool', 'bluetoothctl', 'dsd',
|
||||
'rtl_tcp', 'rtl_power', 'rtlamr', 'ffmpeg',
|
||||
'grgsm_scanner', 'grgsm_livemon', 'tshark'
|
||||
]
|
||||
|
||||
for proc in processes_to_kill:
|
||||
@@ -745,6 +771,29 @@ def kill_all() -> Response:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Reset GSM Spy state
|
||||
with gsm_spy_lock:
|
||||
gsm_spy_scanner_running = False
|
||||
gsm_spy_active_device = None
|
||||
gsm_spy_selected_arfcn = None
|
||||
gsm_spy_region = 'Americas'
|
||||
|
||||
if gsm_spy_livemon_process:
|
||||
try:
|
||||
if safe_terminate(gsm_spy_livemon_process):
|
||||
killed.append('grgsm_livemon')
|
||||
except Exception:
|
||||
pass
|
||||
gsm_spy_livemon_process = None
|
||||
|
||||
if gsm_spy_monitor_process:
|
||||
try:
|
||||
if safe_terminate(gsm_spy_monitor_process):
|
||||
killed.append('tshark')
|
||||
except Exception:
|
||||
pass
|
||||
gsm_spy_monitor_process = None
|
||||
|
||||
# Clear SDR device registry
|
||||
with sdr_device_registry_lock:
|
||||
sdr_device_registry.clear()
|
||||
@@ -834,6 +883,26 @@ def main() -> None:
|
||||
from utils.database import init_db
|
||||
init_db()
|
||||
|
||||
# Register database cleanup functions
|
||||
from utils.database import (
|
||||
cleanup_old_gsm_signals,
|
||||
cleanup_old_gsm_tmsi_log,
|
||||
cleanup_old_gsm_velocity_log,
|
||||
cleanup_old_signal_history,
|
||||
cleanup_old_timeline_entries,
|
||||
cleanup_old_dsc_alerts,
|
||||
cleanup_old_payloads
|
||||
)
|
||||
# GSM cleanups: signals (60 days), TMSI log (24 hours), velocity (1 hour)
|
||||
# Interval multiplier: cleanup every N cycles (60s interval = 1 cleanup per hour at multiplier 60)
|
||||
cleanup_manager.register_db_cleanup(cleanup_old_gsm_tmsi_log, interval_multiplier=60) # Every hour
|
||||
cleanup_manager.register_db_cleanup(cleanup_old_gsm_velocity_log, interval_multiplier=60) # Every hour
|
||||
cleanup_manager.register_db_cleanup(cleanup_old_gsm_signals, interval_multiplier=1440) # Every 24 hours
|
||||
cleanup_manager.register_db_cleanup(cleanup_old_signal_history, interval_multiplier=1440) # Every 24 hours
|
||||
cleanup_manager.register_db_cleanup(cleanup_old_timeline_entries, interval_multiplier=1440) # Every 24 hours
|
||||
cleanup_manager.register_db_cleanup(cleanup_old_dsc_alerts, interval_multiplier=1440) # Every 24 hours
|
||||
cleanup_manager.register_db_cleanup(cleanup_old_payloads, interval_multiplier=1440) # Every 24 hours
|
||||
|
||||
# Start automatic cleanup of stale data entries
|
||||
cleanup_manager.start()
|
||||
|
||||
|
||||
@@ -218,6 +218,12 @@ ALERT_WEBHOOK_TIMEOUT = _get_env_int('ALERT_WEBHOOK_TIMEOUT', 5)
|
||||
ADMIN_USERNAME = _get_env('ADMIN_USERNAME', 'admin')
|
||||
ADMIN_PASSWORD = _get_env('ADMIN_PASSWORD', 'admin')
|
||||
|
||||
# GSM Spy settings
|
||||
GSM_OPENCELLID_API_KEY = _get_env('GSM_OPENCELLID_API_KEY', '')
|
||||
GSM_OPENCELLID_API_URL = _get_env('GSM_OPENCELLID_API_URL', 'https://opencellid.org/cell/get')
|
||||
GSM_API_DAILY_LIMIT = _get_env_int('GSM_API_DAILY_LIMIT', 1000)
|
||||
GSM_TA_METERS_PER_UNIT = _get_env_int('GSM_TA_METERS_PER_UNIT', 554)
|
||||
|
||||
def configure_logging() -> None:
|
||||
"""Configure application logging."""
|
||||
logging.basicConfig(
|
||||
|
||||
@@ -27,10 +27,11 @@ def register_blueprints(app):
|
||||
from .updater import updater_bp
|
||||
from .sstv import sstv_bp
|
||||
from .sstv_general import sstv_general_bp
|
||||
from .dmr import dmr_bp
|
||||
from .websdr import websdr_bp
|
||||
from .alerts import alerts_bp
|
||||
from .recordings import recordings_bp
|
||||
from .dmr import dmr_bp
|
||||
from .websdr import websdr_bp
|
||||
from .alerts import alerts_bp
|
||||
from .recordings import recordings_bp
|
||||
from .gsm_spy import gsm_spy_bp
|
||||
|
||||
app.register_blueprint(pager_bp)
|
||||
app.register_blueprint(sensor_bp)
|
||||
@@ -57,10 +58,11 @@ def register_blueprints(app):
|
||||
app.register_blueprint(updater_bp) # GitHub update checking
|
||||
app.register_blueprint(sstv_bp) # ISS SSTV decoder
|
||||
app.register_blueprint(sstv_general_bp) # General terrestrial SSTV
|
||||
app.register_blueprint(dmr_bp) # DMR / P25 / Digital Voice
|
||||
app.register_blueprint(websdr_bp) # HF/Shortwave WebSDR
|
||||
app.register_blueprint(alerts_bp) # Cross-mode alerts
|
||||
app.register_blueprint(recordings_bp) # Session recordings
|
||||
app.register_blueprint(dmr_bp) # DMR / P25 / Digital Voice
|
||||
app.register_blueprint(websdr_bp) # HF/Shortwave WebSDR
|
||||
app.register_blueprint(alerts_bp) # Cross-mode alerts
|
||||
app.register_blueprint(recordings_bp) # Session recordings
|
||||
app.register_blueprint(gsm_spy_bp) # GSM cellular intelligence
|
||||
|
||||
# Initialize TSCM state with queue and lock from app
|
||||
import app as app_module
|
||||
|
||||
1511
routes/gsm_spy.py
Normal file
1511
routes/gsm_spy.py
Normal file
File diff suppressed because it is too large
Load Diff
127
setup.sh
127
setup.sh
@@ -694,6 +694,52 @@ install_macos_packages() {
|
||||
progress "Installing gpsd"
|
||||
brew_install gpsd
|
||||
|
||||
# gr-gsm for GSM Intelligence
|
||||
if ! cmd_exists grgsm_scanner; then
|
||||
echo
|
||||
info "gr-gsm provides GSM cellular signal decoding..."
|
||||
if ask_yes_no "Do you want to install gr-gsm?"; then
|
||||
progress "Installing gr-gsm"
|
||||
brew_install gnuradio
|
||||
(brew_install gr-gsm) || {
|
||||
warn "gr-gsm not available in Homebrew, attempting manual build..."
|
||||
# Manual build instructions
|
||||
if ask_yes_no "Attempt to build gr-gsm from source? (requires CMake and build tools)"; then
|
||||
info "Cloning gr-gsm repository..."
|
||||
git clone https://github.com/ptrkrysik/gr-gsm.git /tmp/gr-gsm
|
||||
cd /tmp/gr-gsm
|
||||
mkdir build && cd build
|
||||
cmake ..
|
||||
make -j$(sysctl -n hw.ncpu)
|
||||
sudo make install
|
||||
cd ~
|
||||
rm -rf /tmp/gr-gsm
|
||||
ok "gr-gsm installed successfully"
|
||||
else
|
||||
warn "Skipping gr-gsm source build. GSM Spy feature will not work."
|
||||
fi
|
||||
}
|
||||
else
|
||||
warn "Skipping gr-gsm installation. GSM Spy feature will not work."
|
||||
fi
|
||||
else
|
||||
ok "gr-gsm already installed"
|
||||
fi
|
||||
|
||||
# Wireshark (tshark) for packet analysis
|
||||
if ! cmd_exists tshark; then
|
||||
echo
|
||||
info "tshark is used for GSM packet parsing..."
|
||||
if ask_yes_no "Do you want to install tshark?"; then
|
||||
progress "Installing Wireshark (tshark)"
|
||||
brew_install wireshark
|
||||
else
|
||||
warn "Skipping tshark installation."
|
||||
fi
|
||||
else
|
||||
ok "tshark already installed"
|
||||
fi
|
||||
|
||||
progress "Installing Ubertooth tools (optional)"
|
||||
if ! cmd_exists ubertooth-btle; then
|
||||
echo
|
||||
@@ -1104,6 +1150,87 @@ install_debian_packages() {
|
||||
progress "Installing gpsd"
|
||||
apt_install gpsd gpsd-clients || true
|
||||
|
||||
# gr-gsm for GSM Intelligence
|
||||
if ! cmd_exists grgsm_scanner; then
|
||||
echo
|
||||
info "gr-gsm provides GSM cellular signal decoding..."
|
||||
if ask_yes_no "Do you want to install gr-gsm?"; then
|
||||
progress "Installing GNU Radio and gr-gsm"
|
||||
# Try to install gr-gsm directly from package repositories
|
||||
apt_install gnuradio gnuradio-dev gr-osmosdr gr-gsm || {
|
||||
warn "gr-gsm package not available in repositories. Attempting source build..."
|
||||
|
||||
# Fallback: Build from source
|
||||
progress "Building gr-gsm from source"
|
||||
apt_install git cmake libboost-all-dev libcppunit-dev swig \
|
||||
doxygen liblog4cpp5-dev python3-scipy python3-numpy \
|
||||
libvolk-dev libuhd-dev libfftw3-dev || true
|
||||
|
||||
info "Cloning gr-gsm repository..."
|
||||
if [ -d /tmp/gr-gsm ]; then
|
||||
rm -rf /tmp/gr-gsm
|
||||
fi
|
||||
|
||||
git clone https://github.com/ptrkrysik/gr-gsm.git /tmp/gr-gsm || {
|
||||
warn "Failed to clone gr-gsm repository. GSM Spy will not be available."
|
||||
return 0
|
||||
}
|
||||
|
||||
cd /tmp/gr-gsm
|
||||
mkdir -p build && cd build
|
||||
|
||||
# Try to find GNU Radio cmake files
|
||||
if [ -d /usr/lib/x86_64-linux-gnu/cmake/gnuradio ]; then
|
||||
export CMAKE_PREFIX_PATH="/usr/lib/x86_64-linux-gnu/cmake/gnuradio:$CMAKE_PREFIX_PATH"
|
||||
fi
|
||||
|
||||
info "Running CMake configuration..."
|
||||
if cmake .. 2>/dev/null; then
|
||||
info "Compiling gr-gsm (this may take several minutes)..."
|
||||
if make -j$(nproc) 2>/dev/null; then
|
||||
$SUDO make install
|
||||
$SUDO ldconfig
|
||||
cd ~
|
||||
rm -rf /tmp/gr-gsm
|
||||
ok "gr-gsm built and installed successfully"
|
||||
else
|
||||
warn "gr-gsm compilation failed. GSM Spy feature will not work."
|
||||
cd ~
|
||||
rm -rf /tmp/gr-gsm
|
||||
fi
|
||||
else
|
||||
warn "gr-gsm CMake configuration failed. GNU Radio 3.8+ may not be available."
|
||||
cd ~
|
||||
rm -rf /tmp/gr-gsm
|
||||
fi
|
||||
}
|
||||
|
||||
# Verify installation
|
||||
if cmd_exists grgsm_scanner; then
|
||||
ok "gr-gsm installed successfully"
|
||||
else
|
||||
warn "gr-gsm installation incomplete. GSM Spy feature will not work."
|
||||
fi
|
||||
else
|
||||
warn "Skipping gr-gsm installation."
|
||||
fi
|
||||
else
|
||||
ok "gr-gsm already installed"
|
||||
fi
|
||||
|
||||
# Wireshark (tshark)
|
||||
if ! cmd_exists tshark; then
|
||||
echo
|
||||
info "Installing tshark for GSM packet analysis..."
|
||||
apt_install tshark || true
|
||||
# Allow non-root capture
|
||||
$SUDO dpkg-reconfigure wireshark-common 2>/dev/null || true
|
||||
$SUDO usermod -a -G wireshark $USER 2>/dev/null || true
|
||||
ok "tshark installed. You may need to re-login for wireshark group permissions."
|
||||
else
|
||||
ok "tshark already installed"
|
||||
fi
|
||||
|
||||
progress "Installing Python packages"
|
||||
apt_install python3-venv python3-pip || true
|
||||
# Install Python packages via apt (more reliable than pip on modern Debian/Ubuntu)
|
||||
|
||||
2396
templates/gsm_spy_dashboard.html
Normal file
2396
templates/gsm_spy_dashboard.html
Normal file
File diff suppressed because it is too large
Load Diff
@@ -171,6 +171,10 @@
|
||||
<span class="mode-icon icon"><svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><path d="M3 18l2 2h14l2-2"/><path d="M5 18v-4a2 2 0 0 1 2-2h10a2 2 0 0 1 2 2v4"/><path d="M12 12V6"/></svg></span>
|
||||
<span class="mode-name">Vessels</span>
|
||||
</a>
|
||||
<a href="/gsm_spy/dashboard" class="mode-card mode-card-sm" style="text-decoration: none;">
|
||||
<span class="mode-icon icon"><svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><rect x="5" y="2" width="14" height="20" rx="2" ry="2"/><line x1="12" y1="18" x2="12.01" y2="18"/><path d="M8 6h8M8 10h8M8 14h8"/></svg></span>
|
||||
<span class="mode-name">GSM SPY</span>
|
||||
</a>
|
||||
<button class="mode-card mode-card-sm" onclick="selectMode('aprs')">
|
||||
<span class="mode-icon icon"><svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><path d="M20 10c0 6-8 12-8 12s-8-6-8-12a8 8 0 0 1 16 0Z"/><circle cx="12" cy="10" r="3"/></svg></span>
|
||||
<span class="mode-name">APRS</span>
|
||||
|
||||
@@ -67,6 +67,7 @@
|
||||
{{ mode_item('rtlamr', 'Meters', '<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><path d="M22 12h-4l-3 9L9 3l-3 9H2"/></svg>') }}
|
||||
{{ mode_item('adsb', 'Aircraft', '<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><path d="M21 16v-2l-8-5V3.5a1.5 1.5 0 0 0-3 0V9l-8 5v2l8-2.5V19l-2 1.5V22l3.5-1 3.5 1v-1.5L13 19v-5.5l8 2.5z"/></svg>', '/adsb/dashboard') }}
|
||||
{{ mode_item('ais', 'Vessels', '<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><path d="M3 18l2 2h14l2-2"/><path d="M5 18v-4a2 2 0 0 1 2-2h10a2 2 0 0 1 2 2v4"/><path d="M12 12V6"/><path d="M12 6l4 3"/></svg>', '/ais/dashboard') }}
|
||||
{{ mode_item('gsm', 'GSM SPY', '<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><rect x="5" y="2" width="14" height="20" rx="2" ry="2"/><line x1="12" y1="18" x2="12.01" y2="18"/><path d="M8 6h8M8 10h8M8 14h8"/></svg>', '/gsm_spy/dashboard') }}
|
||||
{{ mode_item('aprs', 'APRS', '<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><path d="M20 10c0 6-8 12-8 12s-8-6-8-12a8 8 0 0 1 16 0Z"/><circle cx="12" cy="10" r="3"/></svg>') }}
|
||||
{{ mode_item('listening', 'Listening Post', '<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><rect x="3" y="3" width="18" height="18" rx="2"/><path d="M3 9h18"/><path d="M9 21V9"/></svg>') }}
|
||||
{{ mode_item('spystations', 'Spy Stations', '<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><path d="M4.9 19.1C1 15.2 1 8.8 4.9 4.9"/><path d="M7.8 16.2c-2.3-2.3-2.3-6.1 0-8.5"/><circle cx="12" cy="12" r="2"/><path d="M16.2 7.8c2.3 2.3 2.3 6.1 0 8.5"/><path d="M19.1 4.9C23 8.8 23 15.1 19.1 19"/></svg>') }}
|
||||
|
||||
302
tests/test_gsm_spy.py
Normal file
302
tests/test_gsm_spy.py
Normal file
@@ -0,0 +1,302 @@
|
||||
"""Unit tests for GSM Spy parsing and validation functions."""
|
||||
|
||||
import pytest
|
||||
from routes.gsm_spy import (
|
||||
parse_grgsm_scanner_output,
|
||||
parse_tshark_output,
|
||||
arfcn_to_frequency,
|
||||
validate_band_names,
|
||||
REGIONAL_BANDS
|
||||
)
|
||||
|
||||
|
||||
class TestParseGrgsmScannerOutput:
|
||||
"""Tests for parse_grgsm_scanner_output()."""
|
||||
|
||||
def test_valid_table_row(self):
|
||||
"""Test parsing a valid scanner output table row."""
|
||||
line = " 23 | 940.6 | 31245 | 1234 | 214 | 01 | -48"
|
||||
result = parse_grgsm_scanner_output(line)
|
||||
|
||||
assert result is not None
|
||||
assert result['type'] == 'tower'
|
||||
assert result['arfcn'] == 23
|
||||
assert result['frequency'] == 940.6
|
||||
assert result['cid'] == 31245
|
||||
assert result['lac'] == 1234
|
||||
assert result['mcc'] == 214
|
||||
assert result['mnc'] == 1
|
||||
assert result['signal_strength'] == -48.0
|
||||
assert 'timestamp' in result
|
||||
|
||||
def test_header_line(self):
|
||||
"""Test that header lines are skipped."""
|
||||
line = "ARFCN | Freq (MHz) | CID | LAC | MCC | MNC | Power (dB)"
|
||||
result = parse_grgsm_scanner_output(line)
|
||||
assert result is None
|
||||
|
||||
def test_separator_line(self):
|
||||
"""Test that separator lines are skipped."""
|
||||
line = "--------------------------------------------------------------------"
|
||||
result = parse_grgsm_scanner_output(line)
|
||||
assert result is None
|
||||
|
||||
def test_progress_line(self):
|
||||
"""Test that progress lines are skipped."""
|
||||
line = "Scanning: 50% complete"
|
||||
result = parse_grgsm_scanner_output(line)
|
||||
assert result is None
|
||||
|
||||
def test_found_line(self):
|
||||
"""Test that 'Found X towers' lines are skipped."""
|
||||
line = "Found 5 towers"
|
||||
result = parse_grgsm_scanner_output(line)
|
||||
assert result is None
|
||||
|
||||
def test_invalid_data(self):
|
||||
"""Test handling of invalid data."""
|
||||
line = " abc | xyz | invalid | data | bad | bad | bad"
|
||||
result = parse_grgsm_scanner_output(line)
|
||||
assert result is None
|
||||
|
||||
def test_empty_line(self):
|
||||
"""Test handling of empty lines."""
|
||||
result = parse_grgsm_scanner_output("")
|
||||
assert result is None
|
||||
|
||||
def test_partial_data(self):
|
||||
"""Test handling of incomplete table rows."""
|
||||
line = " 23 | 940.6 | 31245" # Missing fields
|
||||
result = parse_grgsm_scanner_output(line)
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestParseTsharkOutput:
|
||||
"""Tests for parse_tshark_output()."""
|
||||
|
||||
def test_valid_full_output(self):
|
||||
"""Test parsing tshark output with all fields."""
|
||||
line = "5\t0xABCD1234\t123456789012345\t1234\t31245"
|
||||
result = parse_tshark_output(line)
|
||||
|
||||
assert result is not None
|
||||
assert result['type'] == 'device'
|
||||
assert result['ta_value'] == 5
|
||||
assert result['tmsi'] == '0xABCD1234'
|
||||
assert result['imsi'] == '123456789012345'
|
||||
assert result['lac'] == 1234
|
||||
assert result['cid'] == 31245
|
||||
assert result['distance_meters'] == 5 * 554 # TA * 554 meters
|
||||
assert 'timestamp' in result
|
||||
|
||||
def test_missing_optional_fields(self):
|
||||
"""Test parsing with missing optional fields (empty tabs)."""
|
||||
line = "3\t\t\t1234\t31245"
|
||||
result = parse_tshark_output(line)
|
||||
|
||||
assert result is not None
|
||||
assert result['ta_value'] == 3
|
||||
assert result['tmsi'] is None
|
||||
assert result['imsi'] is None
|
||||
assert result['lac'] == 1234
|
||||
assert result['cid'] == 31245
|
||||
|
||||
def test_no_ta_value(self):
|
||||
"""Test parsing without TA value (empty field)."""
|
||||
# When TA is empty, int('') will fail, so the parse returns None
|
||||
# This is the current behavior - the function expects valid integers or valid empty handling
|
||||
line = "\t0xABCD1234\t123456789012345\t1234\t31245"
|
||||
result = parse_tshark_output(line)
|
||||
# Current implementation will fail to parse this due to int('') failing
|
||||
assert result is None
|
||||
|
||||
def test_invalid_line(self):
|
||||
"""Test handling of invalid tshark output."""
|
||||
line = "invalid data"
|
||||
result = parse_tshark_output(line)
|
||||
assert result is None
|
||||
|
||||
def test_empty_line(self):
|
||||
"""Test handling of empty lines."""
|
||||
result = parse_tshark_output("")
|
||||
assert result is None
|
||||
|
||||
def test_partial_fields(self):
|
||||
"""Test with fewer than 5 fields."""
|
||||
line = "5\t0xABCD1234" # Only 2 fields
|
||||
result = parse_tshark_output(line)
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestArfcnToFrequency:
|
||||
"""Tests for arfcn_to_frequency()."""
|
||||
|
||||
def test_gsm850_arfcn(self):
|
||||
"""Test ARFCN in GSM850 band."""
|
||||
# GSM850: ARFCN 128-251, 869-894 MHz
|
||||
arfcn = 128
|
||||
freq = arfcn_to_frequency(arfcn)
|
||||
assert freq == 869000000 # 869 MHz
|
||||
|
||||
arfcn = 251
|
||||
freq = arfcn_to_frequency(arfcn)
|
||||
assert freq == 893600000 # 893.6 MHz
|
||||
|
||||
def test_egsm900_arfcn(self):
|
||||
"""Test ARFCN in EGSM900 band."""
|
||||
# EGSM900: ARFCN 0-124, 925-960 MHz
|
||||
arfcn = 0
|
||||
freq = arfcn_to_frequency(arfcn)
|
||||
assert freq == 925000000 # 925 MHz
|
||||
|
||||
arfcn = 124
|
||||
freq = arfcn_to_frequency(arfcn)
|
||||
assert freq == 949800000 # 949.8 MHz
|
||||
|
||||
def test_dcs1800_arfcn(self):
|
||||
"""Test ARFCN in DCS1800 band."""
|
||||
# DCS1800: ARFCN 512-885, 1805-1880 MHz
|
||||
# Note: ARFCN 512 also exists in PCS1900 and will match that first
|
||||
# Use ARFCN 811+ which is only in DCS1800
|
||||
arfcn = 811 # Beyond PCS1900 range (512-810)
|
||||
freq = arfcn_to_frequency(arfcn)
|
||||
# 811 is ARFCN offset from 512, so freq = 1805MHz + (811-512)*200kHz
|
||||
expected = 1805000000 + (811 - 512) * 200000
|
||||
assert freq == expected
|
||||
|
||||
arfcn = 885
|
||||
freq = arfcn_to_frequency(arfcn)
|
||||
assert freq == 1879600000 # 1879.6 MHz
|
||||
|
||||
def test_pcs1900_arfcn(self):
|
||||
"""Test ARFCN in PCS1900 band."""
|
||||
# PCS1900: ARFCN 512-810, 1930-1990 MHz
|
||||
# Note: overlaps with DCS1800 ARFCN range, but different frequencies
|
||||
arfcn = 512
|
||||
freq = arfcn_to_frequency(arfcn)
|
||||
# Will match first band (DCS1800 in Europe config)
|
||||
assert freq > 0
|
||||
|
||||
def test_invalid_arfcn(self):
|
||||
"""Test ARFCN outside known ranges."""
|
||||
with pytest.raises(ValueError, match="not found in any known GSM band"):
|
||||
arfcn_to_frequency(9999)
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
arfcn_to_frequency(-1)
|
||||
|
||||
def test_arfcn_200khz_spacing(self):
|
||||
"""Test that ARFCNs are 200kHz apart."""
|
||||
arfcn1 = 128
|
||||
arfcn2 = 129
|
||||
freq1 = arfcn_to_frequency(arfcn1)
|
||||
freq2 = arfcn_to_frequency(arfcn2)
|
||||
assert freq2 - freq1 == 200000 # 200 kHz
|
||||
|
||||
|
||||
class TestValidateBandNames:
|
||||
"""Tests for validate_band_names()."""
|
||||
|
||||
def test_valid_americas_bands(self):
|
||||
"""Test valid band names for Americas region."""
|
||||
bands = ['GSM850', 'PCS1900']
|
||||
result, error = validate_band_names(bands, 'Americas')
|
||||
assert result == bands
|
||||
assert error is None
|
||||
|
||||
def test_valid_europe_bands(self):
|
||||
"""Test valid band names for Europe region."""
|
||||
# Note: Europe uses EGSM900, not GSM900
|
||||
bands = ['EGSM900', 'DCS1800', 'GSM850', 'GSM800']
|
||||
result, error = validate_band_names(bands, 'Europe')
|
||||
assert result == bands
|
||||
assert error is None
|
||||
|
||||
def test_valid_asia_bands(self):
|
||||
"""Test valid band names for Asia region."""
|
||||
# Note: Asia uses EGSM900, not GSM900
|
||||
bands = ['EGSM900', 'DCS1800']
|
||||
result, error = validate_band_names(bands, 'Asia')
|
||||
assert result == bands
|
||||
assert error is None
|
||||
|
||||
def test_invalid_band_for_region(self):
|
||||
"""Test invalid band name for a region."""
|
||||
bands = ['GSM900', 'INVALID_BAND']
|
||||
result, error = validate_band_names(bands, 'Americas')
|
||||
assert result == []
|
||||
assert error is not None
|
||||
assert 'Invalid bands' in error
|
||||
assert 'INVALID_BAND' in error
|
||||
|
||||
def test_invalid_region(self):
|
||||
"""Test invalid region name."""
|
||||
bands = ['GSM900']
|
||||
result, error = validate_band_names(bands, 'InvalidRegion')
|
||||
assert result == []
|
||||
assert error is not None
|
||||
assert 'Invalid region' in error
|
||||
|
||||
def test_empty_bands_list(self):
|
||||
"""Test with empty bands list."""
|
||||
result, error = validate_band_names([], 'Americas')
|
||||
assert result == []
|
||||
assert error is None
|
||||
|
||||
def test_single_valid_band(self):
|
||||
"""Test with single valid band."""
|
||||
bands = ['GSM850']
|
||||
result, error = validate_band_names(bands, 'Americas')
|
||||
assert result == ['GSM850']
|
||||
assert error is None
|
||||
|
||||
def test_case_sensitive_band_names(self):
|
||||
"""Test that band names are case-sensitive."""
|
||||
bands = ['gsm850'] # lowercase
|
||||
result, error = validate_band_names(bands, 'Americas')
|
||||
assert result == []
|
||||
assert error is not None
|
||||
|
||||
def test_multiple_invalid_bands(self):
|
||||
"""Test with multiple invalid bands."""
|
||||
bands = ['INVALID1', 'GSM850', 'INVALID2']
|
||||
result, error = validate_band_names(bands, 'Americas')
|
||||
assert result == []
|
||||
assert error is not None
|
||||
assert 'INVALID1' in error
|
||||
assert 'INVALID2' in error
|
||||
|
||||
|
||||
class TestRegionalBandsConfig:
|
||||
"""Tests for REGIONAL_BANDS configuration."""
|
||||
|
||||
def test_all_regions_defined(self):
|
||||
"""Test that all expected regions are defined."""
|
||||
assert 'Americas' in REGIONAL_BANDS
|
||||
assert 'Europe' in REGIONAL_BANDS
|
||||
assert 'Asia' in REGIONAL_BANDS
|
||||
|
||||
def test_all_bands_have_required_fields(self):
|
||||
"""Test that all bands have required configuration fields."""
|
||||
for region, bands in REGIONAL_BANDS.items():
|
||||
for band_name, band_config in bands.items():
|
||||
assert 'start' in band_config
|
||||
assert 'end' in band_config
|
||||
assert 'arfcn_start' in band_config
|
||||
assert 'arfcn_end' in band_config
|
||||
|
||||
def test_frequency_ranges_valid(self):
|
||||
"""Test that frequency ranges are positive and start < end."""
|
||||
for region, bands in REGIONAL_BANDS.items():
|
||||
for band_name, band_config in bands.items():
|
||||
assert band_config['start'] > 0
|
||||
assert band_config['end'] > 0
|
||||
assert band_config['start'] < band_config['end']
|
||||
|
||||
def test_arfcn_ranges_valid(self):
|
||||
"""Test that ARFCN ranges are valid."""
|
||||
for region, bands in REGIONAL_BANDS.items():
|
||||
for band_name, band_config in bands.items():
|
||||
assert band_config['arfcn_start'] >= 0
|
||||
assert band_config['arfcn_end'] >= 0
|
||||
assert band_config['arfcn_start'] <= band_config['arfcn_end']
|
||||
@@ -142,7 +142,7 @@ class DataStore:
|
||||
|
||||
|
||||
class CleanupManager:
|
||||
"""Manages periodic cleanup of multiple data stores."""
|
||||
"""Manages periodic cleanup of multiple data stores and database tables."""
|
||||
|
||||
def __init__(self, interval: float = 60.0):
|
||||
"""
|
||||
@@ -152,9 +152,11 @@ class CleanupManager:
|
||||
interval: Cleanup interval in seconds
|
||||
"""
|
||||
self.stores: list[DataStore] = []
|
||||
self.db_cleanup_funcs: list[tuple[callable, int]] = [] # (func, interval_multiplier)
|
||||
self.interval = interval
|
||||
self._timer: threading.Timer | None = None
|
||||
self._running = False
|
||||
self._cleanup_count = 0
|
||||
self._lock = threading.Lock()
|
||||
|
||||
def register(self, store: DataStore) -> None:
|
||||
@@ -169,6 +171,17 @@ class CleanupManager:
|
||||
if store in self.stores:
|
||||
self.stores.remove(store)
|
||||
|
||||
def register_db_cleanup(self, func: callable, interval_multiplier: int = 60) -> None:
|
||||
"""
|
||||
Register a database cleanup function.
|
||||
|
||||
Args:
|
||||
func: Cleanup function to call (should return number of deleted rows)
|
||||
interval_multiplier: How many cleanup cycles to wait between calls (default: 60 = 1 hour if interval is 60s)
|
||||
"""
|
||||
with self._lock:
|
||||
self.db_cleanup_funcs.append((func, interval_multiplier))
|
||||
|
||||
def start(self) -> None:
|
||||
"""Start the cleanup timer."""
|
||||
with self._lock:
|
||||
@@ -194,11 +207,15 @@ class CleanupManager:
|
||||
self._timer.start()
|
||||
|
||||
def _run_cleanup(self) -> None:
|
||||
"""Run cleanup on all registered stores."""
|
||||
"""Run cleanup on all registered stores and database tables."""
|
||||
total_cleaned = 0
|
||||
|
||||
# Cleanup in-memory data stores
|
||||
with self._lock:
|
||||
stores = list(self.stores)
|
||||
db_funcs = list(self.db_cleanup_funcs)
|
||||
self._cleanup_count += 1
|
||||
current_count = self._cleanup_count
|
||||
|
||||
for store in stores:
|
||||
try:
|
||||
@@ -206,6 +223,17 @@ class CleanupManager:
|
||||
except Exception as e:
|
||||
logger.error(f"Error cleaning up {store.name}: {e}")
|
||||
|
||||
# Cleanup database tables (less frequently)
|
||||
for func, interval_multiplier in db_funcs:
|
||||
if current_count % interval_multiplier == 0:
|
||||
try:
|
||||
deleted = func()
|
||||
if deleted > 0:
|
||||
logger.info(f"Database cleanup: {func.__name__} removed {deleted} rows")
|
||||
total_cleaned += deleted
|
||||
except Exception as e:
|
||||
logger.error(f"Error in database cleanup {func.__name__}: {e}")
|
||||
|
||||
if total_cleaned > 0:
|
||||
logger.info(f"Cleanup complete: removed {total_cleaned} stale entries")
|
||||
|
||||
|
||||
@@ -274,3 +274,14 @@ MAX_DEAUTH_ALERTS_AGE_SECONDS = 300 # 5 minutes
|
||||
|
||||
# Deauth detector sniff timeout (seconds)
|
||||
DEAUTH_SNIFF_TIMEOUT = 0.5
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# GSM SPY (Cellular Intelligence)
|
||||
# =============================================================================
|
||||
|
||||
# Maximum age for GSM tower/device data in DataStore (seconds)
|
||||
MAX_GSM_AGE_SECONDS = 300 # 5 minutes
|
||||
|
||||
# Timing Advance conversion to meters
|
||||
GSM_TA_METERS_PER_UNIT = 554
|
||||
|
||||
@@ -88,65 +88,111 @@ def init_db() -> None:
|
||||
ON signal_history(mode, device_id, timestamp)
|
||||
''')
|
||||
|
||||
# Device correlation table
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS device_correlations (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
wifi_mac TEXT,
|
||||
bt_mac TEXT,
|
||||
confidence REAL,
|
||||
first_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
last_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
metadata TEXT,
|
||||
UNIQUE(wifi_mac, bt_mac)
|
||||
)
|
||||
''')
|
||||
|
||||
# Alert rules
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS alert_rules (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
mode TEXT,
|
||||
event_type TEXT,
|
||||
match TEXT,
|
||||
severity TEXT DEFAULT 'medium',
|
||||
enabled BOOLEAN DEFAULT 1,
|
||||
notify TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
''')
|
||||
|
||||
# Alert events
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS alert_events (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
rule_id INTEGER,
|
||||
mode TEXT,
|
||||
event_type TEXT,
|
||||
severity TEXT DEFAULT 'medium',
|
||||
title TEXT,
|
||||
message TEXT,
|
||||
payload TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (rule_id) REFERENCES alert_rules(id) ON DELETE SET NULL
|
||||
)
|
||||
''')
|
||||
|
||||
# Session recordings
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS recording_sessions (
|
||||
id TEXT PRIMARY KEY,
|
||||
mode TEXT NOT NULL,
|
||||
label TEXT,
|
||||
started_at TIMESTAMP NOT NULL,
|
||||
stopped_at TIMESTAMP,
|
||||
file_path TEXT NOT NULL,
|
||||
event_count INTEGER DEFAULT 0,
|
||||
size_bytes INTEGER DEFAULT 0,
|
||||
metadata TEXT
|
||||
)
|
||||
''')
|
||||
# Device correlation table
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS device_correlations (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
wifi_mac TEXT,
|
||||
bt_mac TEXT,
|
||||
confidence REAL,
|
||||
first_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
last_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
metadata TEXT,
|
||||
UNIQUE(wifi_mac, bt_mac)
|
||||
)
|
||||
''')
|
||||
|
||||
# Alert rules
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS alert_rules (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
mode TEXT,
|
||||
event_type TEXT,
|
||||
match TEXT,
|
||||
severity TEXT DEFAULT 'medium',
|
||||
enabled BOOLEAN DEFAULT 1,
|
||||
notify TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
''')
|
||||
|
||||
# Alert events
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS alert_events (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
rule_id INTEGER,
|
||||
mode TEXT,
|
||||
event_type TEXT,
|
||||
severity TEXT DEFAULT 'medium',
|
||||
title TEXT,
|
||||
message TEXT,
|
||||
payload TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (rule_id) REFERENCES alert_rules(id) ON DELETE SET NULL
|
||||
)
|
||||
''')
|
||||
|
||||
# Session recordings
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS recording_sessions (
|
||||
id TEXT PRIMARY KEY,
|
||||
mode TEXT NOT NULL,
|
||||
label TEXT,
|
||||
started_at TIMESTAMP NOT NULL,
|
||||
stopped_at TIMESTAMP,
|
||||
file_path TEXT NOT NULL,
|
||||
event_count INTEGER DEFAULT 0,
|
||||
size_bytes INTEGER DEFAULT 0,
|
||||
metadata TEXT
|
||||
)
|
||||
''')
|
||||
|
||||
# Alert rules
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS alert_rules (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
mode TEXT,
|
||||
event_type TEXT,
|
||||
match TEXT,
|
||||
severity TEXT DEFAULT 'medium',
|
||||
enabled BOOLEAN DEFAULT 1,
|
||||
notify TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
''')
|
||||
|
||||
# Alert events
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS alert_events (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
rule_id INTEGER,
|
||||
mode TEXT,
|
||||
event_type TEXT,
|
||||
severity TEXT DEFAULT 'medium',
|
||||
title TEXT,
|
||||
message TEXT,
|
||||
payload TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (rule_id) REFERENCES alert_rules(id) ON DELETE SET NULL
|
||||
)
|
||||
''')
|
||||
|
||||
# Session recordings
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS recording_sessions (
|
||||
id TEXT PRIMARY KEY,
|
||||
mode TEXT NOT NULL,
|
||||
label TEXT,
|
||||
started_at TIMESTAMP NOT NULL,
|
||||
stopped_at TIMESTAMP,
|
||||
file_path TEXT NOT NULL,
|
||||
event_count INTEGER DEFAULT 0,
|
||||
size_bytes INTEGER DEFAULT 0,
|
||||
metadata TEXT
|
||||
)
|
||||
''')
|
||||
|
||||
# Users table for authentication
|
||||
conn.execute('''
|
||||
@@ -177,29 +223,29 @@ def init_db() -> None:
|
||||
# =====================================================================
|
||||
|
||||
# TSCM Baselines - Environment snapshots for comparison
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS tscm_baselines (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
location TEXT,
|
||||
description TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
wifi_networks TEXT,
|
||||
wifi_clients TEXT,
|
||||
bt_devices TEXT,
|
||||
rf_frequencies TEXT,
|
||||
gps_coords TEXT,
|
||||
is_active BOOLEAN DEFAULT 0
|
||||
)
|
||||
''')
|
||||
|
||||
# Ensure new columns exist for older databases
|
||||
try:
|
||||
columns = {row['name'] for row in conn.execute("PRAGMA table_info(tscm_baselines)")}
|
||||
if 'wifi_clients' not in columns:
|
||||
conn.execute('ALTER TABLE tscm_baselines ADD COLUMN wifi_clients TEXT')
|
||||
except Exception as e:
|
||||
logger.debug(f"Schema update skipped for tscm_baselines: {e}")
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS tscm_baselines (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
location TEXT,
|
||||
description TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
wifi_networks TEXT,
|
||||
wifi_clients TEXT,
|
||||
bt_devices TEXT,
|
||||
rf_frequencies TEXT,
|
||||
gps_coords TEXT,
|
||||
is_active BOOLEAN DEFAULT 0
|
||||
)
|
||||
''')
|
||||
|
||||
# Ensure new columns exist for older databases
|
||||
try:
|
||||
columns = {row['name'] for row in conn.execute("PRAGMA table_info(tscm_baselines)")}
|
||||
if 'wifi_clients' not in columns:
|
||||
conn.execute('ALTER TABLE tscm_baselines ADD COLUMN wifi_clients TEXT')
|
||||
except Exception as e:
|
||||
logger.debug(f"Schema update skipped for tscm_baselines: {e}")
|
||||
|
||||
# TSCM Sweeps - Individual sweep sessions
|
||||
conn.execute('''
|
||||
@@ -407,6 +453,134 @@ def init_db() -> None:
|
||||
ON tscm_cases(status, created_at)
|
||||
''')
|
||||
|
||||
# =====================================================================
|
||||
# GSM (Global System for Mobile) Intelligence Tables
|
||||
# =====================================================================
|
||||
|
||||
# gsm_cells - Known cell towers (OpenCellID cache)
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS gsm_cells (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
mcc INTEGER NOT NULL,
|
||||
mnc INTEGER NOT NULL,
|
||||
lac INTEGER NOT NULL,
|
||||
cid INTEGER NOT NULL,
|
||||
lat REAL,
|
||||
lon REAL,
|
||||
azimuth INTEGER,
|
||||
range_meters INTEGER,
|
||||
samples INTEGER,
|
||||
radio TEXT,
|
||||
operator TEXT,
|
||||
first_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
last_verified TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
metadata TEXT,
|
||||
UNIQUE(mcc, mnc, lac, cid)
|
||||
)
|
||||
''')
|
||||
|
||||
# gsm_rogues - Detected rogue towers / IMSI catchers
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS gsm_rogues (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
arfcn INTEGER NOT NULL,
|
||||
mcc INTEGER,
|
||||
mnc INTEGER,
|
||||
lac INTEGER,
|
||||
cid INTEGER,
|
||||
signal_strength REAL,
|
||||
reason TEXT NOT NULL,
|
||||
threat_level TEXT DEFAULT 'medium',
|
||||
detected_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
location_lat REAL,
|
||||
location_lon REAL,
|
||||
acknowledged BOOLEAN DEFAULT 0,
|
||||
notes TEXT,
|
||||
metadata TEXT
|
||||
)
|
||||
''')
|
||||
|
||||
# gsm_signals - 60-day archive of signal observations
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS gsm_signals (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
imsi TEXT,
|
||||
tmsi TEXT,
|
||||
mcc INTEGER,
|
||||
mnc INTEGER,
|
||||
lac INTEGER,
|
||||
cid INTEGER,
|
||||
ta_value INTEGER,
|
||||
signal_strength REAL,
|
||||
arfcn INTEGER,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
metadata TEXT
|
||||
)
|
||||
''')
|
||||
|
||||
# gsm_tmsi_log - 24-hour raw pings for crowd density
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS gsm_tmsi_log (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
tmsi TEXT NOT NULL,
|
||||
lac INTEGER,
|
||||
cid INTEGER,
|
||||
ta_value INTEGER,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
''')
|
||||
|
||||
# gsm_velocity_log - 1-hour buffer for movement tracking
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS gsm_velocity_log (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
device_id TEXT NOT NULL,
|
||||
prev_ta INTEGER,
|
||||
curr_ta INTEGER,
|
||||
prev_cid INTEGER,
|
||||
curr_cid INTEGER,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
estimated_velocity REAL,
|
||||
metadata TEXT
|
||||
)
|
||||
''')
|
||||
|
||||
# GSM indexes for performance
|
||||
conn.execute('''
|
||||
CREATE INDEX IF NOT EXISTS idx_gsm_cells_location
|
||||
ON gsm_cells(lat, lon)
|
||||
''')
|
||||
|
||||
conn.execute('''
|
||||
CREATE INDEX IF NOT EXISTS idx_gsm_cells_identity
|
||||
ON gsm_cells(mcc, mnc, lac, cid)
|
||||
''')
|
||||
|
||||
conn.execute('''
|
||||
CREATE INDEX IF NOT EXISTS idx_gsm_rogues_severity
|
||||
ON gsm_rogues(threat_level, detected_at)
|
||||
''')
|
||||
|
||||
conn.execute('''
|
||||
CREATE INDEX IF NOT EXISTS idx_gsm_signals_cell_time
|
||||
ON gsm_signals(cid, lac, timestamp)
|
||||
''')
|
||||
|
||||
conn.execute('''
|
||||
CREATE INDEX IF NOT EXISTS idx_gsm_signals_device
|
||||
ON gsm_signals(imsi, tmsi, timestamp)
|
||||
''')
|
||||
|
||||
conn.execute('''
|
||||
CREATE INDEX IF NOT EXISTS idx_gsm_tmsi_log_time
|
||||
ON gsm_tmsi_log(timestamp)
|
||||
''')
|
||||
|
||||
conn.execute('''
|
||||
CREATE INDEX IF NOT EXISTS idx_gsm_velocity_log_device
|
||||
ON gsm_velocity_log(device_id, timestamp)
|
||||
''')
|
||||
|
||||
# =====================================================================
|
||||
# DSC (Digital Selective Calling) Tables
|
||||
# =====================================================================
|
||||
@@ -740,16 +914,16 @@ def get_correlations(min_confidence: float = 0.5) -> list[dict]:
|
||||
# TSCM Functions
|
||||
# =============================================================================
|
||||
|
||||
def create_tscm_baseline(
|
||||
name: str,
|
||||
location: str | None = None,
|
||||
description: str | None = None,
|
||||
wifi_networks: list | None = None,
|
||||
wifi_clients: list | None = None,
|
||||
bt_devices: list | None = None,
|
||||
rf_frequencies: list | None = None,
|
||||
gps_coords: dict | None = None
|
||||
) -> int:
|
||||
def create_tscm_baseline(
|
||||
name: str,
|
||||
location: str | None = None,
|
||||
description: str | None = None,
|
||||
wifi_networks: list | None = None,
|
||||
wifi_clients: list | None = None,
|
||||
bt_devices: list | None = None,
|
||||
rf_frequencies: list | None = None,
|
||||
gps_coords: dict | None = None
|
||||
) -> int:
|
||||
"""
|
||||
Create a new TSCM baseline.
|
||||
|
||||
@@ -757,20 +931,20 @@ def create_tscm_baseline(
|
||||
The ID of the created baseline
|
||||
"""
|
||||
with get_db() as conn:
|
||||
cursor = conn.execute('''
|
||||
INSERT INTO tscm_baselines
|
||||
(name, location, description, wifi_networks, wifi_clients, bt_devices, rf_frequencies, gps_coords)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
''', (
|
||||
name,
|
||||
location,
|
||||
description,
|
||||
json.dumps(wifi_networks) if wifi_networks else None,
|
||||
json.dumps(wifi_clients) if wifi_clients else None,
|
||||
json.dumps(bt_devices) if bt_devices else None,
|
||||
json.dumps(rf_frequencies) if rf_frequencies else None,
|
||||
json.dumps(gps_coords) if gps_coords else None
|
||||
))
|
||||
cursor = conn.execute('''
|
||||
INSERT INTO tscm_baselines
|
||||
(name, location, description, wifi_networks, wifi_clients, bt_devices, rf_frequencies, gps_coords)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
''', (
|
||||
name,
|
||||
location,
|
||||
description,
|
||||
json.dumps(wifi_networks) if wifi_networks else None,
|
||||
json.dumps(wifi_clients) if wifi_clients else None,
|
||||
json.dumps(bt_devices) if bt_devices else None,
|
||||
json.dumps(rf_frequencies) if rf_frequencies else None,
|
||||
json.dumps(gps_coords) if gps_coords else None
|
||||
))
|
||||
return cursor.lastrowid
|
||||
|
||||
|
||||
@@ -785,19 +959,19 @@ def get_tscm_baseline(baseline_id: int) -> dict | None:
|
||||
if row is None:
|
||||
return None
|
||||
|
||||
return {
|
||||
'id': row['id'],
|
||||
'name': row['name'],
|
||||
'location': row['location'],
|
||||
'description': row['description'],
|
||||
'created_at': row['created_at'],
|
||||
'wifi_networks': json.loads(row['wifi_networks']) if row['wifi_networks'] else [],
|
||||
'wifi_clients': json.loads(row['wifi_clients']) if row['wifi_clients'] else [],
|
||||
'bt_devices': json.loads(row['bt_devices']) if row['bt_devices'] else [],
|
||||
'rf_frequencies': json.loads(row['rf_frequencies']) if row['rf_frequencies'] else [],
|
||||
'gps_coords': json.loads(row['gps_coords']) if row['gps_coords'] else None,
|
||||
'is_active': bool(row['is_active'])
|
||||
}
|
||||
return {
|
||||
'id': row['id'],
|
||||
'name': row['name'],
|
||||
'location': row['location'],
|
||||
'description': row['description'],
|
||||
'created_at': row['created_at'],
|
||||
'wifi_networks': json.loads(row['wifi_networks']) if row['wifi_networks'] else [],
|
||||
'wifi_clients': json.loads(row['wifi_clients']) if row['wifi_clients'] else [],
|
||||
'bt_devices': json.loads(row['bt_devices']) if row['bt_devices'] else [],
|
||||
'rf_frequencies': json.loads(row['rf_frequencies']) if row['rf_frequencies'] else [],
|
||||
'gps_coords': json.loads(row['gps_coords']) if row['gps_coords'] else None,
|
||||
'is_active': bool(row['is_active'])
|
||||
}
|
||||
|
||||
|
||||
def get_all_tscm_baselines() -> list[dict]:
|
||||
@@ -839,23 +1013,23 @@ def set_active_tscm_baseline(baseline_id: int) -> bool:
|
||||
return cursor.rowcount > 0
|
||||
|
||||
|
||||
def update_tscm_baseline(
|
||||
baseline_id: int,
|
||||
wifi_networks: list | None = None,
|
||||
wifi_clients: list | None = None,
|
||||
bt_devices: list | None = None,
|
||||
rf_frequencies: list | None = None
|
||||
) -> bool:
|
||||
def update_tscm_baseline(
|
||||
baseline_id: int,
|
||||
wifi_networks: list | None = None,
|
||||
wifi_clients: list | None = None,
|
||||
bt_devices: list | None = None,
|
||||
rf_frequencies: list | None = None
|
||||
) -> bool:
|
||||
"""Update baseline device lists."""
|
||||
updates = []
|
||||
params = []
|
||||
|
||||
if wifi_networks is not None:
|
||||
updates.append('wifi_networks = ?')
|
||||
params.append(json.dumps(wifi_networks))
|
||||
if wifi_clients is not None:
|
||||
updates.append('wifi_clients = ?')
|
||||
params.append(json.dumps(wifi_clients))
|
||||
if wifi_networks is not None:
|
||||
updates.append('wifi_networks = ?')
|
||||
params.append(json.dumps(wifi_networks))
|
||||
if wifi_clients is not None:
|
||||
updates.append('wifi_clients = ?')
|
||||
params.append(json.dumps(wifi_clients))
|
||||
if bt_devices is not None:
|
||||
updates.append('bt_devices = ?')
|
||||
params.append(json.dumps(bt_devices))
|
||||
@@ -1267,127 +1441,127 @@ def get_all_known_devices(
|
||||
]
|
||||
|
||||
|
||||
def delete_known_device(identifier: str) -> bool:
|
||||
"""Remove a device from the known-good registry."""
|
||||
with get_db() as conn:
|
||||
cursor = conn.execute(
|
||||
'DELETE FROM tscm_known_devices WHERE identifier = ?',
|
||||
(identifier.upper(),)
|
||||
)
|
||||
return cursor.rowcount > 0
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# TSCM Schedule Functions
|
||||
# =============================================================================
|
||||
|
||||
def create_tscm_schedule(
|
||||
name: str,
|
||||
cron_expression: str,
|
||||
sweep_type: str = 'standard',
|
||||
baseline_id: int | None = None,
|
||||
zone_name: str | None = None,
|
||||
enabled: bool = True,
|
||||
notify_on_threat: bool = True,
|
||||
notify_email: str | None = None,
|
||||
last_run: str | None = None,
|
||||
next_run: str | None = None,
|
||||
) -> int:
|
||||
"""Create a new TSCM sweep schedule."""
|
||||
with get_db() as conn:
|
||||
cursor = conn.execute('''
|
||||
INSERT INTO tscm_schedules
|
||||
(name, baseline_id, zone_name, cron_expression, sweep_type,
|
||||
enabled, last_run, next_run, notify_on_threat, notify_email)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
''', (
|
||||
name,
|
||||
baseline_id,
|
||||
zone_name,
|
||||
cron_expression,
|
||||
sweep_type,
|
||||
1 if enabled else 0,
|
||||
last_run,
|
||||
next_run,
|
||||
1 if notify_on_threat else 0,
|
||||
notify_email,
|
||||
))
|
||||
return cursor.lastrowid
|
||||
|
||||
|
||||
def get_tscm_schedule(schedule_id: int) -> dict | None:
|
||||
"""Get a TSCM schedule by ID."""
|
||||
with get_db() as conn:
|
||||
cursor = conn.execute(
|
||||
'SELECT * FROM tscm_schedules WHERE id = ?',
|
||||
(schedule_id,)
|
||||
)
|
||||
row = cursor.fetchone()
|
||||
return dict(row) if row else None
|
||||
|
||||
|
||||
def get_all_tscm_schedules(
|
||||
enabled: bool | None = None,
|
||||
limit: int = 200
|
||||
) -> list[dict]:
|
||||
"""Get all TSCM schedules."""
|
||||
conditions = []
|
||||
params = []
|
||||
|
||||
if enabled is not None:
|
||||
conditions.append('enabled = ?')
|
||||
params.append(1 if enabled else 0)
|
||||
|
||||
where_clause = f'WHERE {" AND ".join(conditions)}' if conditions else ''
|
||||
params.append(limit)
|
||||
|
||||
with get_db() as conn:
|
||||
cursor = conn.execute(f'''
|
||||
SELECT * FROM tscm_schedules
|
||||
{where_clause}
|
||||
ORDER BY id DESC
|
||||
LIMIT ?
|
||||
''', params)
|
||||
return [dict(row) for row in cursor]
|
||||
|
||||
|
||||
def update_tscm_schedule(schedule_id: int, **fields) -> bool:
|
||||
"""Update a TSCM schedule."""
|
||||
if not fields:
|
||||
return False
|
||||
|
||||
updates = []
|
||||
params = []
|
||||
|
||||
for key, value in fields.items():
|
||||
updates.append(f'{key} = ?')
|
||||
params.append(value)
|
||||
|
||||
params.append(schedule_id)
|
||||
|
||||
with get_db() as conn:
|
||||
cursor = conn.execute(
|
||||
f'UPDATE tscm_schedules SET {", ".join(updates)} WHERE id = ?',
|
||||
params
|
||||
)
|
||||
return cursor.rowcount > 0
|
||||
|
||||
|
||||
def delete_tscm_schedule(schedule_id: int) -> bool:
|
||||
"""Delete a TSCM schedule."""
|
||||
with get_db() as conn:
|
||||
cursor = conn.execute(
|
||||
'DELETE FROM tscm_schedules WHERE id = ?',
|
||||
(schedule_id,)
|
||||
)
|
||||
return cursor.rowcount > 0
|
||||
|
||||
|
||||
def is_known_good_device(identifier: str, location: str | None = None) -> dict | None:
|
||||
"""Check if a device is in the known-good registry for a location."""
|
||||
with get_db() as conn:
|
||||
if location:
|
||||
cursor = conn.execute('''
|
||||
def delete_known_device(identifier: str) -> bool:
|
||||
"""Remove a device from the known-good registry."""
|
||||
with get_db() as conn:
|
||||
cursor = conn.execute(
|
||||
'DELETE FROM tscm_known_devices WHERE identifier = ?',
|
||||
(identifier.upper(),)
|
||||
)
|
||||
return cursor.rowcount > 0
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# TSCM Schedule Functions
|
||||
# =============================================================================
|
||||
|
||||
def create_tscm_schedule(
|
||||
name: str,
|
||||
cron_expression: str,
|
||||
sweep_type: str = 'standard',
|
||||
baseline_id: int | None = None,
|
||||
zone_name: str | None = None,
|
||||
enabled: bool = True,
|
||||
notify_on_threat: bool = True,
|
||||
notify_email: str | None = None,
|
||||
last_run: str | None = None,
|
||||
next_run: str | None = None,
|
||||
) -> int:
|
||||
"""Create a new TSCM sweep schedule."""
|
||||
with get_db() as conn:
|
||||
cursor = conn.execute('''
|
||||
INSERT INTO tscm_schedules
|
||||
(name, baseline_id, zone_name, cron_expression, sweep_type,
|
||||
enabled, last_run, next_run, notify_on_threat, notify_email)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
''', (
|
||||
name,
|
||||
baseline_id,
|
||||
zone_name,
|
||||
cron_expression,
|
||||
sweep_type,
|
||||
1 if enabled else 0,
|
||||
last_run,
|
||||
next_run,
|
||||
1 if notify_on_threat else 0,
|
||||
notify_email,
|
||||
))
|
||||
return cursor.lastrowid
|
||||
|
||||
|
||||
def get_tscm_schedule(schedule_id: int) -> dict | None:
|
||||
"""Get a TSCM schedule by ID."""
|
||||
with get_db() as conn:
|
||||
cursor = conn.execute(
|
||||
'SELECT * FROM tscm_schedules WHERE id = ?',
|
||||
(schedule_id,)
|
||||
)
|
||||
row = cursor.fetchone()
|
||||
return dict(row) if row else None
|
||||
|
||||
|
||||
def get_all_tscm_schedules(
|
||||
enabled: bool | None = None,
|
||||
limit: int = 200
|
||||
) -> list[dict]:
|
||||
"""Get all TSCM schedules."""
|
||||
conditions = []
|
||||
params = []
|
||||
|
||||
if enabled is not None:
|
||||
conditions.append('enabled = ?')
|
||||
params.append(1 if enabled else 0)
|
||||
|
||||
where_clause = f'WHERE {" AND ".join(conditions)}' if conditions else ''
|
||||
params.append(limit)
|
||||
|
||||
with get_db() as conn:
|
||||
cursor = conn.execute(f'''
|
||||
SELECT * FROM tscm_schedules
|
||||
{where_clause}
|
||||
ORDER BY id DESC
|
||||
LIMIT ?
|
||||
''', params)
|
||||
return [dict(row) for row in cursor]
|
||||
|
||||
|
||||
def update_tscm_schedule(schedule_id: int, **fields) -> bool:
|
||||
"""Update a TSCM schedule."""
|
||||
if not fields:
|
||||
return False
|
||||
|
||||
updates = []
|
||||
params = []
|
||||
|
||||
for key, value in fields.items():
|
||||
updates.append(f'{key} = ?')
|
||||
params.append(value)
|
||||
|
||||
params.append(schedule_id)
|
||||
|
||||
with get_db() as conn:
|
||||
cursor = conn.execute(
|
||||
f'UPDATE tscm_schedules SET {", ".join(updates)} WHERE id = ?',
|
||||
params
|
||||
)
|
||||
return cursor.rowcount > 0
|
||||
|
||||
|
||||
def delete_tscm_schedule(schedule_id: int) -> bool:
|
||||
"""Delete a TSCM schedule."""
|
||||
with get_db() as conn:
|
||||
cursor = conn.execute(
|
||||
'DELETE FROM tscm_schedules WHERE id = ?',
|
||||
(schedule_id,)
|
||||
)
|
||||
return cursor.rowcount > 0
|
||||
|
||||
|
||||
def is_known_good_device(identifier: str, location: str | None = None) -> dict | None:
|
||||
"""Check if a device is in the known-good registry for a location."""
|
||||
with get_db() as conn:
|
||||
if location:
|
||||
cursor = conn.execute('''
|
||||
SELECT * FROM tscm_known_devices
|
||||
WHERE identifier = ? AND (location = ? OR scope = 'global')
|
||||
''', (identifier.upper(), location))
|
||||
@@ -2123,3 +2297,61 @@ def cleanup_old_payloads(max_age_hours: int = 24) -> int:
|
||||
WHERE received_at < datetime('now', ?)
|
||||
''', (f'-{max_age_hours} hours',))
|
||||
return cursor.rowcount
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# GSM Cleanup Functions
|
||||
# =============================================================================
|
||||
|
||||
def cleanup_old_gsm_signals(max_age_days: int = 60) -> int:
|
||||
"""
|
||||
Remove old GSM signal observations (60-day archive).
|
||||
|
||||
Args:
|
||||
max_age_days: Maximum age in days (default: 60)
|
||||
|
||||
Returns:
|
||||
Number of deleted entries
|
||||
"""
|
||||
with get_db() as conn:
|
||||
cursor = conn.execute('''
|
||||
DELETE FROM gsm_signals
|
||||
WHERE timestamp < datetime('now', ?)
|
||||
''', (f'-{max_age_days} days',))
|
||||
return cursor.rowcount
|
||||
|
||||
|
||||
def cleanup_old_gsm_tmsi_log(max_age_hours: int = 24) -> int:
|
||||
"""
|
||||
Remove old TMSI log entries (24-hour buffer for crowd density).
|
||||
|
||||
Args:
|
||||
max_age_hours: Maximum age in hours (default: 24)
|
||||
|
||||
Returns:
|
||||
Number of deleted entries
|
||||
"""
|
||||
with get_db() as conn:
|
||||
cursor = conn.execute('''
|
||||
DELETE FROM gsm_tmsi_log
|
||||
WHERE timestamp < datetime('now', ?)
|
||||
''', (f'-{max_age_hours} hours',))
|
||||
return cursor.rowcount
|
||||
|
||||
|
||||
def cleanup_old_gsm_velocity_log(max_age_hours: int = 1) -> int:
|
||||
"""
|
||||
Remove old velocity log entries (1-hour buffer for movement tracking).
|
||||
|
||||
Args:
|
||||
max_age_hours: Maximum age in hours (default: 1)
|
||||
|
||||
Returns:
|
||||
Number of deleted entries
|
||||
"""
|
||||
with get_db() as conn:
|
||||
cursor = conn.execute('''
|
||||
DELETE FROM gsm_velocity_log
|
||||
WHERE timestamp < datetime('now', ?)
|
||||
''', (f'-{max_age_hours} hours',))
|
||||
return cursor.rowcount
|
||||
|
||||
@@ -443,6 +443,38 @@ TOOL_DEPENDENCIES = {
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'gsm': {
|
||||
'name': 'GSM Intelligence',
|
||||
'tools': {
|
||||
'grgsm_scanner': {
|
||||
'required': True,
|
||||
'description': 'gr-gsm scanner for finding GSM towers',
|
||||
'install': {
|
||||
'apt': 'Build gr-gsm from source: https://github.com/ptrkrysik/gr-gsm',
|
||||
'brew': 'brew install gr-gsm (may require manual build)',
|
||||
'manual': 'https://github.com/ptrkrysik/gr-gsm'
|
||||
}
|
||||
},
|
||||
'grgsm_livemon': {
|
||||
'required': True,
|
||||
'description': 'gr-gsm live monitor for decoding GSM signals',
|
||||
'install': {
|
||||
'apt': 'Included with gr-gsm package',
|
||||
'brew': 'Included with gr-gsm',
|
||||
'manual': 'Included with gr-gsm'
|
||||
}
|
||||
},
|
||||
'tshark': {
|
||||
'required': True,
|
||||
'description': 'Wireshark CLI for parsing GSM packets',
|
||||
'install': {
|
||||
'apt': 'sudo apt-get install tshark',
|
||||
'brew': 'brew install wireshark',
|
||||
'manual': 'https://www.wireshark.org/download.html'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
200
utils/gsm_geocoding.py
Normal file
200
utils/gsm_geocoding.py
Normal file
@@ -0,0 +1,200 @@
|
||||
"""GSM Cell Tower Geocoding Service.
|
||||
|
||||
Provides hybrid cache-first geocoding with async API fallback for cell towers.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import queue
|
||||
from typing import Any
|
||||
|
||||
import requests
|
||||
|
||||
import config
|
||||
from utils.database import get_db
|
||||
|
||||
logger = logging.getLogger('intercept.gsm_geocoding')
|
||||
|
||||
# Queue for pending geocoding requests
|
||||
_geocoding_queue = queue.Queue(maxsize=100)
|
||||
|
||||
|
||||
def lookup_cell_coordinates(mcc: int, mnc: int, lac: int, cid: int) -> dict[str, Any] | None:
|
||||
"""
|
||||
Lookup cell tower coordinates with cache-first strategy.
|
||||
|
||||
Strategy:
|
||||
1. Check gsm_cells table (cache) - fast synchronous lookup
|
||||
2. If not found, return None (caller decides whether to use API)
|
||||
|
||||
Args:
|
||||
mcc: Mobile Country Code
|
||||
mnc: Mobile Network Code
|
||||
lac: Location Area Code
|
||||
cid: Cell ID
|
||||
|
||||
Returns:
|
||||
dict with keys: lat, lon, source='cache', azimuth (optional),
|
||||
range_meters (optional), operator (optional), radio (optional)
|
||||
Returns None if not found in cache.
|
||||
"""
|
||||
try:
|
||||
with get_db() as conn:
|
||||
result = conn.execute('''
|
||||
SELECT lat, lon, azimuth, range_meters, operator, radio
|
||||
FROM gsm_cells
|
||||
WHERE mcc = ? AND mnc = ? AND lac = ? AND cid = ?
|
||||
''', (mcc, mnc, lac, cid)).fetchone()
|
||||
|
||||
if result:
|
||||
return {
|
||||
'lat': result['lat'],
|
||||
'lon': result['lon'],
|
||||
'source': 'cache',
|
||||
'azimuth': result['azimuth'],
|
||||
'range_meters': result['range_meters'],
|
||||
'operator': result['operator'],
|
||||
'radio': result['radio']
|
||||
}
|
||||
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error looking up coordinates from cache: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def lookup_cell_from_api(mcc: int, mnc: int, lac: int, cid: int) -> dict[str, Any] | None:
|
||||
"""
|
||||
Lookup cell tower from OpenCellID API and cache result.
|
||||
|
||||
Args:
|
||||
mcc: Mobile Country Code
|
||||
mnc: Mobile Network Code
|
||||
lac: Location Area Code
|
||||
cid: Cell ID
|
||||
|
||||
Returns:
|
||||
dict with keys: lat, lon, source='api', azimuth (optional),
|
||||
range_meters (optional), operator (optional), radio (optional)
|
||||
Returns None if API call fails or cell not found.
|
||||
"""
|
||||
try:
|
||||
api_url = config.GSM_OPENCELLID_API_URL
|
||||
params = {
|
||||
'key': config.GSM_OPENCELLID_API_KEY,
|
||||
'mcc': mcc,
|
||||
'mnc': mnc,
|
||||
'lac': lac,
|
||||
'cellid': cid,
|
||||
'format': 'json'
|
||||
}
|
||||
|
||||
response = requests.get(api_url, params=params, timeout=10)
|
||||
|
||||
if response.status_code == 200:
|
||||
cell_data = response.json()
|
||||
|
||||
# Cache the result
|
||||
with get_db() as conn:
|
||||
conn.execute('''
|
||||
INSERT OR REPLACE INTO gsm_cells
|
||||
(mcc, mnc, lac, cid, lat, lon, azimuth, range_meters, samples, radio, operator, last_verified)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
|
||||
''', (
|
||||
mcc, mnc, lac, cid,
|
||||
cell_data.get('lat'),
|
||||
cell_data.get('lon'),
|
||||
cell_data.get('azimuth'),
|
||||
cell_data.get('range'),
|
||||
cell_data.get('samples'),
|
||||
cell_data.get('radio'),
|
||||
cell_data.get('operator')
|
||||
))
|
||||
conn.commit()
|
||||
|
||||
logger.info(f"Cached cell tower from API: MCC={mcc} MNC={mnc} LAC={lac} CID={cid}")
|
||||
|
||||
return {
|
||||
'lat': cell_data.get('lat'),
|
||||
'lon': cell_data.get('lon'),
|
||||
'source': 'api',
|
||||
'azimuth': cell_data.get('azimuth'),
|
||||
'range_meters': cell_data.get('range'),
|
||||
'operator': cell_data.get('operator'),
|
||||
'radio': cell_data.get('radio')
|
||||
}
|
||||
else:
|
||||
logger.warning(f"OpenCellID API returned {response.status_code} for MCC={mcc} MNC={mnc} LAC={lac} CID={cid}")
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error calling OpenCellID API: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def enrich_tower_data(tower_data: dict[str, Any]) -> dict[str, Any]:
|
||||
"""
|
||||
Enrich tower data with coordinates using cache-first strategy.
|
||||
|
||||
If coordinates found in cache, adds them immediately.
|
||||
If not found, marks as 'pending' and queues for background API lookup.
|
||||
|
||||
Args:
|
||||
tower_data: Dictionary with keys mcc, mnc, lac, cid (and other tower data)
|
||||
|
||||
Returns:
|
||||
Enriched tower_data dict with added fields:
|
||||
- lat, lon (if found in cache)
|
||||
- status='pending' (if needs API lookup)
|
||||
- source='cache' (if from cache)
|
||||
"""
|
||||
mcc = tower_data.get('mcc')
|
||||
mnc = tower_data.get('mnc')
|
||||
lac = tower_data.get('lac')
|
||||
cid = tower_data.get('cid')
|
||||
|
||||
# Validate required fields
|
||||
if not all([mcc is not None, mnc is not None, lac is not None, cid is not None]):
|
||||
logger.warning(f"Tower data missing required fields: {tower_data}")
|
||||
return tower_data
|
||||
|
||||
# Try cache lookup
|
||||
coords = lookup_cell_coordinates(mcc, mnc, lac, cid)
|
||||
|
||||
if coords:
|
||||
# Found in cache - add coordinates immediately
|
||||
tower_data['lat'] = coords['lat']
|
||||
tower_data['lon'] = coords['lon']
|
||||
tower_data['source'] = 'cache'
|
||||
|
||||
# Add optional fields if available
|
||||
if coords.get('azimuth') is not None:
|
||||
tower_data['azimuth'] = coords['azimuth']
|
||||
if coords.get('range_meters') is not None:
|
||||
tower_data['range_meters'] = coords['range_meters']
|
||||
if coords.get('operator'):
|
||||
tower_data['operator'] = coords['operator']
|
||||
if coords.get('radio'):
|
||||
tower_data['radio'] = coords['radio']
|
||||
|
||||
logger.debug(f"Cache hit for tower: MCC={mcc} MNC={mnc} LAC={lac} CID={cid}")
|
||||
else:
|
||||
# Not in cache - mark as pending and queue for API lookup
|
||||
tower_data['status'] = 'pending'
|
||||
tower_data['source'] = 'unknown'
|
||||
|
||||
# Queue for background geocoding (non-blocking)
|
||||
try:
|
||||
_geocoding_queue.put_nowait(tower_data.copy())
|
||||
logger.debug(f"Queued tower for geocoding: MCC={mcc} MNC={mnc} LAC={lac} CID={cid}")
|
||||
except queue.Full:
|
||||
logger.warning("Geocoding queue full, dropping tower")
|
||||
|
||||
return tower_data
|
||||
|
||||
|
||||
def get_geocoding_queue() -> queue.Queue:
|
||||
"""Get the geocoding queue for the background worker."""
|
||||
return _geocoding_queue
|
||||
Reference in New Issue
Block a user