From 90281b1535b1201e69b8e9928d62ad2ce2dbacba Mon Sep 17 00:00:00 2001 From: Smittix Date: Thu, 12 Mar 2026 20:49:08 +0000 Subject: [PATCH] fix(modes): deep-linked mode scripts fail when body not yet parsed MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ensureModeScript() used document.body.appendChild() to load lazy mode scripts, but the preload for ?mode= query params runs in before exists, causing all deep-linked modes to silently fail. Also fix cross-mode handoffs (BT→BT Locate, WiFi→WiFi Locate, Spy Stations→Waterfall) that assumed target module was already loaded. Co-Authored-By: Claude Opus 4.6 --- .dockerignore | 17 +- .env.example | 7 + .github/workflows/ci.yml | 25 + Dockerfile | 390 +- app.py | 88 +- config.py | 2 +- requirements.txt | 2 + routes/__init__.py | 11 + routes/acars.py | 28 +- routes/adsb.py | 56 +- routes/ais.py | 24 +- routes/alerts.py | 17 +- routes/aprs.py | 79 +- routes/bluetooth.py | 97 +- routes/bluetooth_v2.py | 7 +- routes/bt_locate.py | 150 +- routes/controller.py | 359 +- routes/correlation.py | 40 +- routes/dsc.py | 3 +- routes/gps.py | 87 +- routes/listening_post.py | 2346 ----------- routes/listening_post/__init__.py | 523 +++ routes/listening_post/audio.py | 502 +++ routes/listening_post/scanner.py | 824 ++++ routes/listening_post/tools.py | 91 + routes/listening_post/waterfall.py | 509 +++ routes/meshtastic.py | 35 +- routes/meteor_websocket.py | 4 +- routes/morse.py | 23 +- routes/offline.py | 50 +- routes/ook.py | 25 +- routes/pager.py | 29 +- routes/radiosonde.py | 42 +- routes/recordings.py | 71 +- routes/rtlamr.py | 22 +- routes/satellite.py | 21 +- routes/sensor.py | 19 +- routes/settings.py | 84 +- routes/signalid.py | 9 +- routes/space_weather.py | 5 +- routes/sstv.py | 19 +- routes/sstv_general.py | 75 +- routes/subghz.py | 305 +- routes/system.py | 7 +- routes/{tscm.py => tscm/__init__.py} | 5809 ++++++++------------------ routes/tscm/analysis.py | 1077 +++++ routes/tscm/baseline.py | 272 ++ routes/tscm/cases.py | 149 + routes/tscm/meeting.py | 205 + routes/tscm/schedules.py | 186 + routes/tscm/sweep.py | 434 ++ routes/updater.py | 26 +- routes/vdl2.py | 28 +- routes/weather_sat.py | 30 +- routes/websdr.py | 21 +- routes/wefax.py | 89 +- routes/wifi.py | 412 +- routes/wifi_v2.py | 110 +- start.sh | 17 +- static/css/core/components.css | 73 + static/css/core/variables.css | 6 +- static/css/index.css | 156 +- static/css/responsive.css | 33 +- static/js/modes/bluetooth.js | 44 +- static/js/modes/meshtastic.js | 32 +- static/js/modes/spy-stations.js | 19 +- static/js/modes/sstv.js | 29 +- static/js/modes/weather-satellite.js | 52 +- static/js/modes/websdr.js | 30 +- static/js/modes/wifi.js | 34 +- templates/adsb_dashboard.html | 70 +- templates/ais_dashboard.html | 69 +- templates/index.html | 322 +- templates/login.html | 1 + templates/partials/modes/wifi.html | 8 +- templates/partials/nav.html | 88 +- templates/satellite_dashboard.html | 33 +- tests/conftest.py | 112 + tests/test_satellite.py | 3 +- tests/test_wifi.py | 4 +- utils/database.py | 23 +- utils/process.py | 10 +- utils/responses.py | 37 + utils/sdr/base.py | 10 + utils/sdr/detection.py | 180 +- utils/sdr/rtlsdr.py | 3 +- utils/weather_sat_predict.py | 21 +- 87 files changed, 9128 insertions(+), 8368 deletions(-) create mode 100644 .github/workflows/ci.yml delete mode 100644 routes/listening_post.py create mode 100644 routes/listening_post/__init__.py create mode 100644 routes/listening_post/audio.py create mode 100644 routes/listening_post/scanner.py create mode 100644 routes/listening_post/tools.py create mode 100644 routes/listening_post/waterfall.py rename routes/{tscm.py => tscm/__init__.py} (51%) create mode 100644 routes/tscm/analysis.py create mode 100644 routes/tscm/baseline.py create mode 100644 routes/tscm/cases.py create mode 100644 routes/tscm/meeting.py create mode 100644 routes/tscm/schedules.py create mode 100644 routes/tscm/sweep.py create mode 100644 utils/responses.py diff --git a/.dockerignore b/.dockerignore index e92261e..659c5a3 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,6 +1,8 @@ -# Git +# Git & CI .git .gitignore +.github +.claude # Python __pycache__ @@ -29,6 +31,19 @@ tests/ .coverage htmlcov/ .mypy_cache/ +.ruff_cache +.DS_Store +tasks/ + +# Documentation +*.md + +# Runtime data (mounted as volume) +instance/ +data/ + +# Build scripts +build-multiarch.sh # Logs *.log diff --git a/.env.example b/.env.example index e78804b..d13466e 100644 --- a/.env.example +++ b/.env.example @@ -6,6 +6,13 @@ # Container timezone (e.g. America/New_York, Europe/London, Australia/Sydney) TZ=UTC +# Flask secret key (auto-generated if not set) +# INTERCEPT_SECRET_KEY=your-secret-key-here + +# Admin credentials (password auto-generated on first run if not set) +# INTERCEPT_ADMIN_USERNAME=admin +# INTERCEPT_ADMIN_PASSWORD=your-password-here + # Postgres password (default: intercept) INTERCEPT_ADSB_DB_PASSWORD=intercept diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..ad47f04 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,25 @@ +name: CI + +on: [push, pull_request] + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: '3.11' + - run: pip install ruff + - run: ruff check . + + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: '3.11' + - run: pip install -r requirements.txt + - run: pip install pytest + - run: pytest --tb=short -q diff --git a/Dockerfile b/Dockerfile index 9bd9406..a6b7201 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,194 @@ # INTERCEPT - Signal Intelligence Platform # Docker container for running the web interface +# Multi-stage build: builder compiles tools, runtime keeps only what's needed +############################################################################### +# Stage 1: Builder — compile all tools from source +############################################################################### +FROM python:3.11-slim AS builder + +WORKDIR /tmp/build + +# Install ALL build dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + build-essential \ + git \ + pkg-config \ + cmake \ + librtlsdr-dev \ + libusb-1.0-0-dev \ + libncurses-dev \ + libsndfile1-dev \ + libgtk-3-dev \ + libasound2-dev \ + libsoapysdr-dev \ + libhackrf-dev \ + liblimesuite-dev \ + libfftw3-dev \ + libpng-dev \ + libtiff-dev \ + libjemalloc-dev \ + libvolk-dev \ + libnng-dev \ + libzstd-dev \ + libsqlite3-dev \ + libcurl4-openssl-dev \ + zlib1g-dev \ + libzmq3-dev \ + libpulse-dev \ + libfftw3-bin \ + liblapack-dev \ + libglib2.0-dev \ + libxml2-dev \ + curl \ + && rm -rf /var/lib/apt/lists/* + +# Create staging directory for all built artifacts +RUN mkdir -p /staging/usr/bin /staging/usr/local/bin /staging/usr/local/lib /staging/opt + +# Build dump1090 +RUN cd /tmp \ + && git clone --depth 1 https://github.com/flightaware/dump1090.git \ + && cd dump1090 \ + && sed -i 's/-Werror//g' Makefile \ + && make BLADERF=no RTLSDR=yes \ + && cp dump1090 /staging/usr/bin/dump1090-fa \ + && ln -s /usr/bin/dump1090-fa /staging/usr/bin/dump1090 \ + && rm -rf /tmp/dump1090 + +# Build AIS-catcher +RUN cd /tmp \ + && git clone https://github.com/jvde-github/AIS-catcher.git \ + && cd AIS-catcher \ + && mkdir build && cd build \ + && cmake .. \ + && make \ + && cp AIS-catcher /staging/usr/bin/AIS-catcher \ + && rm -rf /tmp/AIS-catcher + +# Build readsb +RUN cd /tmp \ + && git clone --depth 1 https://github.com/wiedehopf/readsb.git \ + && cd readsb \ + && make BLADERF=no PLUTOSDR=no SOAPYSDR=yes \ + && cp readsb /staging/usr/bin/readsb \ + && rm -rf /tmp/readsb + +# Build rx_tools +RUN cd /tmp \ + && git clone https://github.com/rxseger/rx_tools.git \ + && cd rx_tools \ + && mkdir build && cd build \ + && cmake .. \ + && make \ + && DESTDIR=/staging make install \ + && rm -rf /tmp/rx_tools + +# Build acarsdec +RUN cd /tmp \ + && git clone --depth 1 https://github.com/TLeconte/acarsdec.git \ + && cd acarsdec \ + && mkdir build && cd build \ + && cmake .. -Drtl=ON -DCMAKE_POLICY_VERSION_MINIMUM=3.5 \ + && make \ + && cp acarsdec /staging/usr/bin/acarsdec \ + && rm -rf /tmp/acarsdec + +# Build libacars (required by dumpvdl2) +RUN cd /tmp \ + && git clone --depth 1 https://github.com/szpajder/libacars.git \ + && cd libacars \ + && mkdir build && cd build \ + && cmake .. \ + && make \ + && make install \ + && ldconfig \ + && cp -a /usr/local/lib/libacars* /staging/usr/local/lib/ \ + && rm -rf /tmp/libacars + +# Build dumpvdl2 (VDL2 aircraft datalink decoder) +RUN cd /tmp \ + && git clone --depth 1 https://github.com/szpajder/dumpvdl2.git \ + && cd dumpvdl2 \ + && mkdir build && cd build \ + && cmake .. \ + && make \ + && cp src/dumpvdl2 /staging/usr/bin/dumpvdl2 \ + && rm -rf /tmp/dumpvdl2 + +# Build slowrx (SSTV decoder) — pinned to known-good commit +RUN cd /tmp \ + && git clone https://github.com/windytan/slowrx.git \ + && cd slowrx \ + && git checkout ca6d7012 \ + && make \ + && install -m 0755 slowrx /staging/usr/local/bin/slowrx \ + && rm -rf /tmp/slowrx + +# Build SatDump (weather satellite decoder - NOAA APT & Meteor LRPT) — pinned to v1.2.2 +RUN cd /tmp \ + && git clone --depth 1 --branch 1.2.2 https://github.com/SatDump/SatDump.git \ + && cd SatDump \ + && mkdir build && cd build \ + && cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_GUI=OFF -DCMAKE_INSTALL_LIBDIR=lib .. \ + && make -j$(nproc) \ + && make install \ + && ldconfig \ + # Ensure SatDump plugins are in the expected path (handles multiarch differences) + && mkdir -p /usr/local/lib/satdump/plugins \ + && if [ -z "$(ls /usr/local/lib/satdump/plugins/*.so 2>/dev/null)" ]; then \ + for dir in /usr/local/lib/*/satdump/plugins /usr/lib/*/satdump/plugins /usr/lib/satdump/plugins; do \ + if [ -d "$dir" ] && [ -n "$(ls "$dir"/*.so 2>/dev/null)" ]; then \ + ln -sf "$dir"/*.so /usr/local/lib/satdump/plugins/; \ + break; \ + fi; \ + done; \ + fi \ + # Copy SatDump install artifacts to staging + && cp -a /usr/local/bin/satdump /staging/usr/local/bin/ 2>/dev/null || true \ + && cp -a /usr/local/lib/libsatdump* /staging/usr/local/lib/ 2>/dev/null || true \ + && cp -a /usr/local/lib/satdump /staging/usr/local/lib/ 2>/dev/null || true \ + && cp -a /usr/local/share/satdump /staging/usr/local/share/ 2>/dev/null; mkdir -p /staging/usr/local/share \ + && cp -a /usr/local/share/satdump /staging/usr/local/share/ 2>/dev/null || true \ + && rm -rf /tmp/SatDump + +# Build hackrf CLI tools from source — avoids libhackrf0 version conflict +# between the 'hackrf' apt package and soapysdr-module-hackrf's newer libhackrf0 +RUN cd /tmp \ + && git clone --depth 1 https://github.com/greatscottgadgets/hackrf.git \ + && cd hackrf/host \ + && mkdir build && cd build \ + && cmake .. \ + && make \ + && make install \ + && ldconfig \ + && cp -a /usr/local/bin/hackrf_* /staging/usr/local/bin/ 2>/dev/null || true \ + && cp -a /usr/local/lib/libhackrf* /staging/usr/local/lib/ 2>/dev/null || true \ + && rm -rf /tmp/hackrf + +# Install radiosonde_auto_rx (weather balloon decoder) +RUN cd /tmp \ + && git clone --depth 1 https://github.com/projecthorus/radiosonde_auto_rx.git \ + && cd radiosonde_auto_rx/auto_rx \ + && pip install --no-cache-dir -r requirements.txt semver \ + && bash build.sh \ + && mkdir -p /staging/opt/radiosonde_auto_rx/auto_rx \ + && cp -r . /staging/opt/radiosonde_auto_rx/auto_rx/ \ + && chmod +x /staging/opt/radiosonde_auto_rx/auto_rx/auto_rx.py \ + && rm -rf /tmp/radiosonde_auto_rx + +# Build rtlamr (utility meter decoder - requires Go) +RUN cd /tmp \ + && curl -fsSL "https://go.dev/dl/go1.22.5.linux-$(dpkg --print-architecture).tar.gz" | tar -C /usr/local -xz \ + && export PATH="$PATH:/usr/local/go/bin" \ + && export GOPATH=/tmp/gopath \ + && go install github.com/bemasher/rtlamr@latest \ + && cp /tmp/gopath/bin/rtlamr /staging/usr/bin/rtlamr \ + && rm -rf /usr/local/go /tmp/gopath + +############################################################################### +# Stage 2: Runtime — lean image with only runtime dependencies +############################################################################### FROM python:3.11-slim LABEL maintainer="INTERCEPT Project" @@ -12,12 +200,10 @@ WORKDIR /app # Pre-accept tshark non-root capture prompt for non-interactive install RUN echo 'wireshark-common wireshark-common/install-setuid boolean true' | debconf-set-selections -# Install system dependencies for SDR tools +# Install ONLY runtime dependencies (no -dev packages, no build tools) RUN apt-get update && apt-get install -y --no-install-recommends \ # RTL-SDR tools rtl-sdr \ - librtlsdr-dev \ - libusb-1.0-0-dev \ # 433MHz decoder rtl-433 \ # Pager decoder @@ -43,7 +229,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ # GPS support gpsd \ gpsd-clients \ - # Utilities # APRS direwolf \ # WiFi Extra @@ -62,192 +247,17 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ procps \ && rm -rf /var/lib/apt/lists/* -# Build dump1090-fa and acarsdec from source (packages not available in slim repos) -RUN apt-get update && apt-get install -y --no-install-recommends \ - build-essential \ - git \ - pkg-config \ - cmake \ - libncurses-dev \ - libsndfile1-dev \ - # GTK is required for slowrx (SSTV decoder GUI dependency). - # Note: slowrx is kept for backwards compatibility, but the pure Python - # SSTV decoder in utils/sstv/ is now the primary implementation. - # GTK can be removed if slowrx is deprecated in future releases. - libgtk-3-dev \ - libasound2-dev \ - libsoapysdr-dev \ - libhackrf-dev \ - liblimesuite-dev \ - libfftw3-dev \ - libpng-dev \ - libtiff-dev \ - libjemalloc-dev \ - libvolk-dev \ - libnng-dev \ - libzstd-dev \ - libsqlite3-dev \ - libcurl4-openssl-dev \ - zlib1g-dev \ - libzmq3-dev \ - libpulse-dev \ - libfftw3-bin \ - liblapack-dev \ - libglib2.0-dev \ - libxml2-dev \ - # Build dump1090 - && cd /tmp \ - && git clone --depth 1 https://github.com/flightaware/dump1090.git \ - && cd dump1090 \ - && sed -i 's/-Werror//g' Makefile \ - && make BLADERF=no RTLSDR=yes \ - && cp dump1090 /usr/bin/dump1090-fa \ - && ln -s /usr/bin/dump1090-fa /usr/bin/dump1090 \ - && rm -rf /tmp/dump1090 \ - # Build AIS-catcher - && cd /tmp \ - && git clone https://github.com/jvde-github/AIS-catcher.git \ - && cd AIS-catcher \ - && mkdir build && cd build \ - && cmake .. \ - && make \ - && cp AIS-catcher /usr/bin/AIS-catcher \ - && cd /tmp \ - && rm -rf /tmp/AIS-catcher \ - # Build readsb - && cd /tmp \ - && git clone --depth 1 https://github.com/wiedehopf/readsb.git \ - && cd readsb \ - && make BLADERF=no PLUTOSDR=no SOAPYSDR=yes \ - && cp readsb /usr/bin/readsb \ - && cd /tmp \ - && rm -rf /tmp/readsb \ - # Build rx_tools - && cd /tmp \ - && git clone https://github.com/rxseger/rx_tools.git \ - && cd rx_tools \ - && mkdir build && cd build \ - && cmake .. \ - && make \ - && make install \ - && cd /tmp \ - && rm -rf /tmp/rx_tools \ - # Build acarsdec - && cd /tmp \ - && git clone --depth 1 https://github.com/TLeconte/acarsdec.git \ - && cd acarsdec \ - && mkdir build && cd build \ - && cmake .. -Drtl=ON -DCMAKE_POLICY_VERSION_MINIMUM=3.5 \ - && make \ - && cp acarsdec /usr/bin/acarsdec \ - && rm -rf /tmp/acarsdec \ - # Build libacars (required by dumpvdl2) - && cd /tmp \ - && git clone --depth 1 https://github.com/szpajder/libacars.git \ - && cd libacars \ - && mkdir build && cd build \ - && cmake .. \ - && make \ - && make install \ - && ldconfig \ - && rm -rf /tmp/libacars \ - # Build dumpvdl2 (VDL2 aircraft datalink decoder) - && cd /tmp \ - && git clone --depth 1 https://github.com/szpajder/dumpvdl2.git \ - && cd dumpvdl2 \ - && mkdir build && cd build \ - && cmake .. \ - && make \ - && cp src/dumpvdl2 /usr/bin/dumpvdl2 \ - && rm -rf /tmp/dumpvdl2 \ - # Build slowrx (SSTV decoder) — pinned to known-good commit - && cd /tmp \ - && git clone https://github.com/windytan/slowrx.git \ - && cd slowrx \ - && git checkout ca6d7012 \ - && make \ - && install -m 0755 slowrx /usr/local/bin/slowrx \ - && rm -rf /tmp/slowrx \ - # Build SatDump (weather satellite decoder - NOAA APT & Meteor LRPT) — pinned to v1.2.2 - && cd /tmp \ - && git clone --depth 1 --branch 1.2.2 https://github.com/SatDump/SatDump.git \ - && cd SatDump \ - && mkdir build && cd build \ - && cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_GUI=OFF -DCMAKE_INSTALL_LIBDIR=lib .. \ - && make -j$(nproc) \ - && make install \ - && ldconfig \ - # Ensure SatDump plugins are in the expected path (handles multiarch differences) - && mkdir -p /usr/local/lib/satdump/plugins \ - && if [ -z "$(ls /usr/local/lib/satdump/plugins/*.so 2>/dev/null)" ]; then \ - for dir in /usr/local/lib/*/satdump/plugins /usr/lib/*/satdump/plugins /usr/lib/satdump/plugins; do \ - if [ -d "$dir" ] && [ -n "$(ls "$dir"/*.so 2>/dev/null)" ]; then \ - ln -sf "$dir"/*.so /usr/local/lib/satdump/plugins/; \ - break; \ - fi; \ - done; \ - fi \ - && cd /tmp \ - && rm -rf /tmp/SatDump \ - # Build hackrf CLI tools from source — avoids libhackrf0 version conflict - # between the 'hackrf' apt package and soapysdr-module-hackrf's newer libhackrf0 - && cd /tmp \ - && git clone --depth 1 https://github.com/greatscottgadgets/hackrf.git \ - && cd hackrf/host \ - && mkdir build && cd build \ - && cmake .. \ - && make \ - && make install \ - && ldconfig \ - && rm -rf /tmp/hackrf \ - # Install radiosonde_auto_rx (weather balloon decoder) - && cd /tmp \ - && git clone --depth 1 https://github.com/projecthorus/radiosonde_auto_rx.git \ - && cd radiosonde_auto_rx/auto_rx \ - && pip install --no-cache-dir -r requirements.txt semver \ - && bash build.sh \ - && mkdir -p /opt/radiosonde_auto_rx/auto_rx \ - && cp -r . /opt/radiosonde_auto_rx/auto_rx/ \ - && chmod +x /opt/radiosonde_auto_rx/auto_rx/auto_rx.py \ - && cd /tmp \ - && rm -rf /tmp/radiosonde_auto_rx \ - # Build rtlamr (utility meter decoder - requires Go) - && cd /tmp \ - && curl -fsSL "https://go.dev/dl/go1.22.5.linux-$(dpkg --print-architecture).tar.gz" | tar -C /usr/local -xz \ - && export PATH="$PATH:/usr/local/go/bin" \ - && export GOPATH=/tmp/gopath \ - && go install github.com/bemasher/rtlamr@latest \ - && cp /tmp/gopath/bin/rtlamr /usr/bin/rtlamr \ - && rm -rf /usr/local/go /tmp/gopath \ - # Cleanup build tools to reduce image size - # libgtk-3-dev is explicitly removed; runtime GTK libs remain for slowrx - && apt-get remove -y \ - build-essential \ - git \ - pkg-config \ - cmake \ - libncurses-dev \ - libsndfile1-dev \ - libgtk-3-dev \ - libasound2-dev \ - libpng-dev \ - libtiff-dev \ - libjemalloc-dev \ - libvolk-dev \ - libnng-dev \ - libzstd-dev \ - libsoapysdr-dev \ - libhackrf-dev \ - liblimesuite-dev \ - libsqlite3-dev \ - libcurl4-openssl-dev \ - zlib1g-dev \ - libzmq3-dev \ - libpulse-dev \ - libfftw3-dev \ - liblapack-dev \ - && apt-get autoremove -y \ - && rm -rf /var/lib/apt/lists/* +# Copy compiled binaries and libraries from builder stage +COPY --from=builder /staging/usr/bin/ /usr/bin/ +COPY --from=builder /staging/usr/local/bin/ /usr/local/bin/ +COPY --from=builder /staging/usr/local/lib/ /usr/local/lib/ +COPY --from=builder /staging/opt/ /opt/ + +# Copy radiosonde Python dependencies installed during builder stage +COPY --from=builder /usr/local/lib/python3.11/site-packages/ /usr/local/lib/python3.11/site-packages/ + +# Refresh shared library cache for custom-built libraries +RUN ldconfig # Copy requirements first for better caching COPY requirements.txt . diff --git a/app.py b/app.py index b1593a0..b1ed60c 100644 --- a/app.py +++ b/app.py @@ -22,6 +22,7 @@ import queue import threading import platform import subprocess +from pathlib import Path from typing import Any @@ -48,6 +49,16 @@ try: _has_limiter = True except ImportError: _has_limiter = False +try: + from flask_compress import Compress + _has_compress = True +except ImportError: + _has_compress = False +try: + from flask_wtf.csrf import CSRFProtect + _has_csrf = True +except ImportError: + _has_csrf = False # Track application start time for uptime calculation import time as _time _app_start_time = _time.time() @@ -55,7 +66,29 @@ logger = logging.getLogger('intercept.database') # Create Flask app app = Flask(__name__) -app.secret_key = "signals_intelligence_secret" # Required for flash messages +def _load_or_generate_secret_key(): + """Load secret key from env var or instance file, generating if needed.""" + env_key = os.environ.get('INTERCEPT_SECRET_KEY') + if env_key: + return env_key + key_path = Path('instance/secret.key') + if key_path.exists(): + return key_path.read_text().strip() + key_path.parent.mkdir(exist_ok=True) + key = os.urandom(32).hex() + key_path.write_text(key) + return key + +app.secret_key = _load_or_generate_secret_key() + +# Set up HTTP compression (gzip/brotli for HTML, CSS, JS, JSON) +if _has_compress: + Compress(app) +else: + logging.getLogger('intercept').warning( + "flask-compress not installed – HTTP compression disabled. " + "Install with: pip install flask-compress" + ) # Set up rate limiting if _has_limiter: @@ -77,6 +110,16 @@ else: return decorator limiter = _NoopLimiter() +# Set up CSRF protection +if _has_csrf: + csrf = CSRFProtect(app) +else: + logging.getLogger('intercept').warning( + "flask-wtf not installed – CSRF protection disabled. " + "Install with: pip install flask-wtf" + ) + csrf = None + # Disable Werkzeug debugger PIN (not needed for local development tool) os.environ['WERKZEUG_DEBUG_PIN'] = 'off' @@ -106,6 +149,12 @@ def add_security_headers(response): response.headers['Referrer-Policy'] = 'strict-origin-when-cross-origin' # Permissions policy (disable unnecessary features) response.headers['Permissions-Policy'] = 'geolocation=(self), microphone=()' + # Cache-Control for static assets + if request.path.startswith('/static/'): + if '/vendor/' in request.path: + response.headers['Cache-Control'] = 'public, max-age=604800' # 7 days for vendored libs + else: + response.headers['Cache-Control'] = 'public, max-age=86400' # 24h for app assets return response @@ -803,13 +852,43 @@ def _get_wifi_health() -> tuple[bool, int, int]: @app.route('/health') def health_check() -> Response: """Health check endpoint for monitoring.""" + import platform import time bt_active, bt_device_count = _get_bluetooth_health() wifi_active, wifi_network_count, wifi_client_count = _get_wifi_health() - return jsonify({ - 'status': 'healthy', + + # Database health check + db_ok = True + try: + from utils.database import get_connection + get_connection().execute('SELECT 1') + except Exception: + db_ok = False + + # SDR device count (cached, non-blocking) + sdr_count = 0 + try: + from utils.sdr.detection import get_cached_devices + cached = get_cached_devices() + if cached is not None: + sdr_count = len(cached) + except (ImportError, Exception): + pass + + overall_status = 'healthy' if db_ok else 'degraded' + status_code = 200 if db_ok else 503 + + response = jsonify({ + 'status': overall_status, 'version': VERSION, 'uptime_seconds': round(time.time() - _app_start_time, 2), + 'system': { + 'python_version': platform.python_version(), + 'platform': platform.platform(), + }, + 'database': db_ok, + 'sdr_devices': sdr_count, + 'rate_limiting': _has_limiter, 'processes': { 'pager': current_process is not None and (current_process.poll() is None if current_process else False), 'sensor': sensor_process is not None and (sensor_process.poll() is None if sensor_process else False), @@ -843,9 +922,12 @@ def health_check() -> Response: 'dsc_messages_count': len(dsc_messages), } }) + response.status_code = status_code + return response @app.route('/killall', methods=['POST']) +@(csrf.exempt if csrf else lambda f: f) def kill_all() -> Response: """Kill all decoder, WiFi, and Bluetooth processes.""" global current_process, sensor_process, wifi_process, adsb_process, ais_process, acars_process diff --git a/config.py b/config.py index e226a34..c34fd13 100644 --- a/config.py +++ b/config.py @@ -399,7 +399,7 @@ ALERT_WEBHOOK_TIMEOUT = _get_env_int('ALERT_WEBHOOK_TIMEOUT', 5) # Admin credentials ADMIN_USERNAME = _get_env('ADMIN_USERNAME', 'admin') -ADMIN_PASSWORD = _get_env('ADMIN_PASSWORD', 'admin') +ADMIN_PASSWORD = _get_env('ADMIN_PASSWORD', '') def configure_logging() -> None: diff --git a/requirements.txt b/requirements.txt index 1b72bb0..103c73c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,7 @@ # Core dependencies flask>=3.0.0 +flask-wtf>=1.2.0 +flask-compress>=1.15 flask-limiter>=2.5.4 requests>=2.28.0 Werkzeug>=3.1.5 diff --git a/routes/__init__.py b/routes/__init__.py index 82b297c..868d3d5 100644 --- a/routes/__init__.py +++ b/routes/__init__.py @@ -1,7 +1,13 @@ # Routes package - registers all blueprints with the Flask app + def register_blueprints(app): """Register all route blueprints with the Flask app.""" + # Import CSRF to exempt API blueprints (they use JSON, not form tokens) + try: + from app import csrf as _csrf + except ImportError: + _csrf = None from .acars import acars_bp from .adsb import adsb_bp from .ais import ais_bp @@ -84,6 +90,11 @@ def register_blueprints(app): app.register_blueprint(system_bp) # System health monitoring app.register_blueprint(ook_bp) # Generic OOK signal decoder + # Exempt all API blueprints from CSRF (they use JSON, not form tokens) + if _csrf: + for bp in app.blueprints.values(): + _csrf.exempt(bp) + # Initialize TSCM state with queue and lock from app import app as app_module if hasattr(app_module, 'tscm_queue') and hasattr(app_module, 'tscm_lock'): diff --git a/routes/acars.py b/routes/acars.py index f7141eb..8612d26 100644 --- a/routes/acars.py +++ b/routes/acars.py @@ -17,6 +17,7 @@ from typing import Any, Generator from flask import Blueprint, Response, jsonify, request +from utils.responses import api_success, api_error import app as app_module from utils.acars_translator import translate_message from utils.constants import ( @@ -219,18 +220,12 @@ def start_acars() -> Response: with app_module.acars_lock: if app_module.acars_process and app_module.acars_process.poll() is None: - return jsonify({ - 'status': 'error', - 'message': 'ACARS decoder already running' - }), 409 + return api_error('ACARS decoder already running', 409) # Check for acarsdec acarsdec_path = find_acarsdec() if not acarsdec_path: - return jsonify({ - 'status': 'error', - 'message': 'acarsdec not found. Install with: sudo apt install acarsdec' - }), 400 + return api_error('acarsdec not found. Install with: sudo apt install acarsdec', 400) data = request.json or {} @@ -240,7 +235,7 @@ def start_acars() -> Response: gain = validate_gain(data.get('gain', '40')) ppm = validate_ppm(data.get('ppm', '0')) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) # Resolve SDR type for device selection sdr_type_str = data.get('sdr_type', 'rtlsdr') @@ -249,11 +244,7 @@ def start_acars() -> Response: device_int = int(device) error = app_module.claim_sdr_device(device_int, 'acars', sdr_type_str) if error: - return jsonify({ - 'status': 'error', - 'error_type': 'DEVICE_BUSY', - 'message': error - }), 409 + return api_error(error, 409, error_type='DEVICE_BUSY') acars_active_device = device_int acars_active_sdr_type = sdr_type_str @@ -372,7 +363,7 @@ def start_acars() -> Response: if stderr: error_msg += f': {stderr[:500]}' logger.error(error_msg) - return jsonify({'status': 'error', 'message': error_msg}), 500 + return api_error(error_msg, 500) app_module.acars_process = process register_process(process) @@ -399,7 +390,7 @@ def start_acars() -> Response: acars_active_device = None acars_active_sdr_type = None logger.error(f"Failed to start ACARS decoder: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 + return api_error(str(e), 500) @acars_bp.route('/stop', methods=['POST']) @@ -409,10 +400,7 @@ def stop_acars() -> Response: with app_module.acars_lock: if not app_module.acars_process: - return jsonify({ - 'status': 'error', - 'message': 'ACARS decoder not running' - }), 400 + return api_error('ACARS decoder not running', 400) try: app_module.acars_process.terminate() diff --git a/routes/adsb.py b/routes/adsb.py index 6a685d2..e1ea52f 100644 --- a/routes/adsb.py +++ b/routes/adsb.py @@ -17,6 +17,8 @@ from typing import Any, Generator from flask import Blueprint, Response, jsonify, make_response, render_template, request +from utils.responses import api_success, api_error + # psycopg2 is optional - only needed for PostgreSQL history persistence try: import psycopg2 @@ -866,7 +868,7 @@ def start_adsb(): gain = int(validate_gain(data.get('gain', '40'))) device = validate_device_index(data.get('device', '0')) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) # Check for remote SBS connection (e.g., remote dump1090) remote_sbs_host = data.get('remote_sbs_host') @@ -878,7 +880,7 @@ def start_adsb(): remote_sbs_host = validate_rtl_tcp_host(remote_sbs_host) remote_sbs_port = validate_rtl_tcp_port(remote_sbs_port) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) remote_addr = f"{remote_sbs_host}:{remote_sbs_port}" logger.info(f"Connecting to remote dump1090 SBS at {remote_addr}") @@ -935,12 +937,12 @@ def start_adsb(): if sdr_type == SDRType.RTL_SDR: dump1090_path = find_dump1090() if not dump1090_path: - return jsonify({'status': 'error', 'message': 'dump1090 not found. Install dump1090/dump1090-fa or ensure it is in /usr/local/bin/'}) + return api_error('dump1090 not found. Install dump1090/dump1090-fa or ensure it is in /usr/local/bin/') else: # For LimeSDR/HackRF, check for readsb (dump1090 with SoapySDR support) dump1090_path = shutil.which('readsb') or find_dump1090() if not dump1090_path: - return jsonify({'status': 'error', 'message': f'readsb or dump1090 not found for {sdr_type.value}. Install readsb with SoapySDR support.'}) + return api_error(f'readsb or dump1090 not found for {sdr_type.value}. Install readsb with SoapySDR support.') # Kill any stale app-started process (use process group to ensure full cleanup) if app_module.adsb_process: @@ -1122,7 +1124,7 @@ def start_adsb(): app_module.release_sdr_device(device_int, sdr_type_str) adsb_active_device = None adsb_active_sdr_type = None - return jsonify({'status': 'error', 'message': str(e)}) + return api_error(str(e)) @adsb_bp.route('/stop', methods=['POST']) @@ -1233,7 +1235,7 @@ def adsb_history(): def adsb_history_summary(): """Summary stats for ADS-B history window.""" if not ADSB_HISTORY_ENABLED or not PSYCOPG2_AVAILABLE: - return jsonify({'error': 'ADS-B history is disabled'}), 503 + return api_error('ADS-B history is disabled', 503) _ensure_history_schema() since_minutes = _parse_int_param(request.args.get('since_minutes'), 1440, 1, 10080) @@ -1256,14 +1258,14 @@ def adsb_history_summary(): return jsonify(row) except Exception as exc: logger.warning("ADS-B history summary failed: %s", exc) - return jsonify({'error': 'History database unavailable'}), 503 + return api_error('History database unavailable', 503) @adsb_bp.route('/history/aircraft') def adsb_history_aircraft(): """List latest aircraft snapshots for a time window.""" if not ADSB_HISTORY_ENABLED or not PSYCOPG2_AVAILABLE: - return jsonify({'error': 'ADS-B history is disabled'}), 503 + return api_error('ADS-B history is disabled', 503) _ensure_history_schema() since_minutes = _parse_int_param(request.args.get('since_minutes'), 1440, 1, 10080) @@ -1306,19 +1308,19 @@ def adsb_history_aircraft(): return jsonify({'aircraft': rows, 'count': len(rows)}) except Exception as exc: logger.warning("ADS-B history aircraft query failed: %s", exc) - return jsonify({'error': 'History database unavailable'}), 503 + return api_error('History database unavailable', 503) @adsb_bp.route('/history/timeline') def adsb_history_timeline(): """Timeline snapshots for a specific aircraft.""" if not ADSB_HISTORY_ENABLED or not PSYCOPG2_AVAILABLE: - return jsonify({'error': 'ADS-B history is disabled'}), 503 + return api_error('ADS-B history is disabled', 503) _ensure_history_schema() icao = (request.args.get('icao') or '').strip().upper() if not icao: - return jsonify({'error': 'icao is required'}), 400 + return api_error('icao is required', 400) since_minutes = _parse_int_param(request.args.get('since_minutes'), 1440, 1, 10080) limit = _parse_int_param(request.args.get('limit'), 2000, 1, 20000) @@ -1341,14 +1343,14 @@ def adsb_history_timeline(): return jsonify({'icao': icao, 'timeline': rows, 'count': len(rows)}) except Exception as exc: logger.warning("ADS-B history timeline query failed: %s", exc) - return jsonify({'error': 'History database unavailable'}), 503 + return api_error('History database unavailable', 503) @adsb_bp.route('/history/messages') def adsb_history_messages(): """Raw message history for a specific aircraft.""" if not ADSB_HISTORY_ENABLED or not PSYCOPG2_AVAILABLE: - return jsonify({'error': 'ADS-B history is disabled'}), 503 + return api_error('ADS-B history is disabled', 503) _ensure_history_schema() icao = (request.args.get('icao') or '').strip().upper() @@ -1373,22 +1375,22 @@ def adsb_history_messages(): return jsonify({'icao': icao, 'messages': rows, 'count': len(rows)}) except Exception as exc: logger.warning("ADS-B history message query failed: %s", exc) - return jsonify({'error': 'History database unavailable'}), 503 + return api_error('History database unavailable', 503) @adsb_bp.route('/history/export') def adsb_history_export(): """Export ADS-B history data in CSV or JSON format.""" if not ADSB_HISTORY_ENABLED or not PSYCOPG2_AVAILABLE: - return jsonify({'error': 'ADS-B history is disabled'}), 503 + return api_error('ADS-B history is disabled', 503) _ensure_history_schema() export_format = str(request.args.get('format') or 'csv').strip().lower() export_type = str(request.args.get('type') or 'all').strip().lower() if export_format not in {'csv', 'json'}: - return jsonify({'error': 'format must be csv or json'}), 400 + return api_error('format must be csv or json', 400) if export_type not in {'messages', 'snapshots', 'sessions', 'all'}: - return jsonify({'error': 'type must be messages, snapshots, sessions, or all'}), 400 + return api_error('type must be messages, snapshots, sessions, or all', 400) scope, since_minutes, start, end = _parse_export_scope(request.args) icao = (request.args.get('icao') or '').strip().upper() @@ -1501,7 +1503,7 @@ def adsb_history_export(): sessions = cur.fetchall() except Exception as exc: logger.warning("ADS-B history export failed: %s", exc) - return jsonify({'error': 'History database unavailable'}), 503 + return api_error('History database unavailable', 503) exported_at = datetime.now(timezone.utc).isoformat() timestamp = datetime.now(timezone.utc).strftime('%Y%m%d_%H%M%S') @@ -1559,13 +1561,13 @@ def adsb_history_export(): def adsb_history_prune(): """Delete ADS-B history for a selected time range or entire dataset.""" if not ADSB_HISTORY_ENABLED or not PSYCOPG2_AVAILABLE: - return jsonify({'error': 'ADS-B history is disabled'}), 503 + return api_error('ADS-B history is disabled', 503) _ensure_history_schema() payload = request.get_json(silent=True) or {} mode = str(payload.get('mode') or 'range').strip().lower() if mode not in {'range', 'all'}: - return jsonify({'error': 'mode must be range or all'}), 400 + return api_error('mode must be range or all', 400) try: with _get_history_connection() as conn: @@ -1587,11 +1589,11 @@ def adsb_history_prune(): start = _parse_iso_datetime(payload.get('start')) end = _parse_iso_datetime(payload.get('end')) if start is None or end is None: - return jsonify({'error': 'start and end ISO datetime values are required'}), 400 + return api_error('start and end ISO datetime values are required', 400) if end <= start: - return jsonify({'error': 'end must be after start'}), 400 + return api_error('end must be after start', 400) if end - start > timedelta(days=31): - return jsonify({'error': 'range cannot exceed 31 days'}), 400 + return api_error('range cannot exceed 31 days', 400) cur.execute( """ @@ -1623,7 +1625,7 @@ def adsb_history_prune(): }) except Exception as exc: logger.warning("ADS-B history prune failed: %s", exc) - return jsonify({'error': 'History database unavailable'}), 503 + return api_error('History database unavailable', 503) # ============================================ @@ -1668,7 +1670,7 @@ def aircraft_photo(registration: str): # Validate registration format (alphanumeric with dashes) if not registration or not all(c.isalnum() or c == '-' for c in registration): - return jsonify({'error': 'Invalid registration'}), 400 + return api_error('Invalid registration', 400) try: # Planespotters.net public API @@ -1701,7 +1703,7 @@ def aircraft_photo(registration: str): def get_aircraft_messages(icao: str): """Get correlated ACARS/VDL2 messages for an aircraft.""" if not icao or not all(c in '0123456789ABCDEFabcdef' for c in icao): - return jsonify({'status': 'error', 'message': 'Invalid ICAO'}), 400 + return api_error('Invalid ICAO', 400) aircraft = app_module.adsb_aircraft.get(icao.upper()) callsign = aircraft.get('callsign') if aircraft else None @@ -1722,4 +1724,4 @@ def get_aircraft_messages(icao: str): except Exception: pass - return jsonify({'status': 'success', 'icao': icao.upper(), **messages}) + return api_success(data={'icao': icao.upper(), **messages}) diff --git a/routes/ais.py b/routes/ais.py index 377300b..1a9f3ca 100644 --- a/routes/ais.py +++ b/routes/ais.py @@ -14,6 +14,7 @@ from typing import Generator from flask import Blueprint, jsonify, request, Response, render_template +from utils.responses import api_success, api_error import app as app_module from config import SHARED_OBSERVER_LOCATION_ENABLED from utils.logging import get_logger @@ -361,7 +362,7 @@ def start_ais(): with app_module.ais_lock: if ais_running: - return jsonify({'status': 'already_running', 'message': 'AIS tracking already active'}), 409 + return api_error('AIS tracking already active', 409) data = request.json or {} @@ -370,15 +371,12 @@ def start_ais(): gain = int(validate_gain(data.get('gain', '40'))) device = validate_device_index(data.get('device', '0')) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) # Find AIS-catcher ais_catcher_path = find_ais_catcher() if not ais_catcher_path: - return jsonify({ - 'status': 'error', - 'message': 'AIS-catcher not found. Install from https://github.com/jvde-github/AIS-catcher/releases' - }), 400 + return api_error('AIS-catcher not found. Install from https://github.com/jvde-github/AIS-catcher/releases', 400) # Get SDR type from request sdr_type_str = data.get('sdr_type', 'rtlsdr') @@ -406,11 +404,7 @@ def start_ais(): device_int = int(device) error = app_module.claim_sdr_device(device_int, 'ais', sdr_type_str) if error: - return jsonify({ - 'status': 'error', - 'error_type': 'DEVICE_BUSY', - 'message': error - }), 409 + return api_error(error, 409, error_type='DEVICE_BUSY') # Build command using SDR abstraction sdr_device = SDRFactory.create_default_device(sdr_type, index=device) @@ -455,7 +449,7 @@ def start_ais(): error_msg = 'AIS-catcher failed to start. Check SDR device connection.' if stderr_output: error_msg += f' Error: {stderr_output[:500]}' - return jsonify({'status': 'error', 'message': error_msg}), 500 + return api_error(error_msg, 500) ais_running = True ais_active_device = device @@ -475,7 +469,7 @@ def start_ais(): # Release device on failure app_module.release_sdr_device(device_int, sdr_type_str) logger.error(f"Failed to start AIS-catcher: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 + return api_error(str(e), 500) @ais_bp.route('/stop', methods=['POST']) @@ -535,7 +529,7 @@ def stream_ais(): def get_vessel_dsc(mmsi: str): """Get DSC messages associated with a vessel MMSI.""" if not mmsi or not mmsi.isdigit(): - return jsonify({'status': 'error', 'message': 'Invalid MMSI'}), 400 + return api_error('Invalid MMSI', 400) matches = [] try: @@ -545,7 +539,7 @@ def get_vessel_dsc(mmsi: str): except Exception: pass - return jsonify({'status': 'success', 'mmsi': mmsi, 'dsc_messages': matches}) + return api_success(data={'mmsi': mmsi, 'dsc_messages': matches}) @ais_bp.route('/dashboard') diff --git a/routes/alerts.py b/routes/alerts.py index 578d1bc..a1d414b 100644 --- a/routes/alerts.py +++ b/routes/alerts.py @@ -9,6 +9,7 @@ from typing import Generator from flask import Blueprint, Response, jsonify, request from utils.alerts import get_alert_manager +from utils.responses import api_success, api_error from utils.sse import format_sse alerts_bp = Blueprint('alerts', __name__, url_prefix='/alerts') @@ -18,18 +19,18 @@ alerts_bp = Blueprint('alerts', __name__, url_prefix='/alerts') def list_rules(): manager = get_alert_manager() include_disabled = request.args.get('all') in ('1', 'true', 'yes') - return jsonify({'status': 'success', 'rules': manager.list_rules(include_disabled=include_disabled)}) + return api_success(data={'rules': manager.list_rules(include_disabled=include_disabled)}) @alerts_bp.route('/rules', methods=['POST']) def create_rule(): data = request.get_json() or {} if not isinstance(data.get('match', {}), dict): - return jsonify({'status': 'error', 'message': 'match must be a JSON object'}), 400 + return api_error('match must be a JSON object', 400) manager = get_alert_manager() rule_id = manager.add_rule(data) - return jsonify({'status': 'success', 'rule_id': rule_id}) + return api_success(data={'rule_id': rule_id}) @alerts_bp.route('/rules/', methods=['PUT', 'PATCH']) @@ -38,8 +39,8 @@ def update_rule(rule_id: int): manager = get_alert_manager() ok = manager.update_rule(rule_id, data) if not ok: - return jsonify({'status': 'error', 'message': 'Rule not found or no changes'}), 404 - return jsonify({'status': 'success'}) + return api_error('Rule not found or no changes', 404) + return api_success() @alerts_bp.route('/rules/', methods=['DELETE']) @@ -47,8 +48,8 @@ def delete_rule(rule_id: int): manager = get_alert_manager() ok = manager.delete_rule(rule_id) if not ok: - return jsonify({'status': 'error', 'message': 'Rule not found'}), 404 - return jsonify({'status': 'success'}) + return api_error('Rule not found', 404) + return api_success() @alerts_bp.route('/events', methods=['GET']) @@ -58,7 +59,7 @@ def list_events(): mode = request.args.get('mode') severity = request.args.get('severity') events = manager.list_events(limit=limit, mode=mode, severity=severity) - return jsonify({'status': 'success', 'events': events}) + return api_success(data={'events': events}) @alerts_bp.route('/stream', methods=['GET']) diff --git a/routes/aprs.py b/routes/aprs.py index 4c40b02..09cc811 100644 --- a/routes/aprs.py +++ b/routes/aprs.py @@ -20,6 +20,7 @@ from typing import Any, Generator, Optional from flask import Blueprint, jsonify, request, Response +from utils.responses import api_success, api_error import app as app_module from utils.logging import sensor_logger as logger from utils.validation import ( @@ -1651,8 +1652,7 @@ def aprs_data() -> Response: if app_module.aprs_process: running = app_module.aprs_process.poll() is None - return jsonify({ - 'status': 'success', + return api_success(data={ 'running': running, 'stations': list(aprs_stations.values()), 'count': len(aprs_stations), @@ -1670,20 +1670,14 @@ def start_aprs() -> Response: with app_module.aprs_lock: if app_module.aprs_process and app_module.aprs_process.poll() is None: - return jsonify({ - 'status': 'error', - 'message': 'APRS decoder already running' - }), 409 + return api_error('APRS decoder already running', 409) # Check for decoder (prefer direwolf, fallback to multimon-ng) direwolf_path = find_direwolf() multimon_path = find_multimon_ng() if not direwolf_path and not multimon_path: - return jsonify({ - 'status': 'error', - 'message': 'No APRS decoder found. Install direwolf or multimon-ng' - }), 400 + return api_error('No APRS decoder found. Install direwolf or multimon-ng', 400) data = request.json or {} @@ -1693,7 +1687,7 @@ def start_aprs() -> Response: gain = validate_gain(data.get('gain', '40')) ppm = validate_ppm(data.get('ppm', '0')) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) # Check for rtl_tcp (remote SDR) connection rtl_tcp_host = data.get('rtl_tcp_host') @@ -1707,26 +1701,16 @@ def start_aprs() -> Response: if sdr_type == SDRType.RTL_SDR: if find_rtl_fm() is None: - return jsonify({ - 'status': 'error', - 'message': 'rtl_fm not found. Install with: sudo apt install rtl-sdr' - }), 400 + return api_error('rtl_fm not found. Install with: sudo apt install rtl-sdr', 400) else: if find_rx_fm() is None: - return jsonify({ - 'status': 'error', - 'message': f'rx_fm not found. Install SoapySDR tools for {sdr_type.value}.' - }), 400 + return api_error(f'rx_fm not found. Install SoapySDR tools for {sdr_type.value}.', 400) # Reserve SDR device to prevent conflicts (skip for remote rtl_tcp) if not rtl_tcp_host: error = app_module.claim_sdr_device(device, 'aprs', sdr_type_str) if error: - return jsonify({ - 'status': 'error', - 'error_type': 'DEVICE_BUSY', - 'message': error - }), 409 + return api_error(error, 409, error_type='DEVICE_BUSY') aprs_active_device = device aprs_active_sdr_type = sdr_type_str @@ -1757,7 +1741,7 @@ def start_aprs() -> Response: rtl_tcp_host = validate_rtl_tcp_host(rtl_tcp_host) rtl_tcp_port = validate_rtl_tcp_port(rtl_tcp_port) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) sdr_device = SDRFactory.create_network_device(rtl_tcp_host, rtl_tcp_port) logger.info(f"Using remote SDR: rtl_tcp://{rtl_tcp_host}:{rtl_tcp_port}") else: @@ -1782,7 +1766,7 @@ def start_aprs() -> Response: app_module.release_sdr_device(aprs_active_device, aprs_active_sdr_type or 'rtlsdr') aprs_active_device = None aprs_active_sdr_type = None - return jsonify({'status': 'error', 'message': f'Failed to build SDR command: {e}'}), 500 + return api_error(f'Failed to build SDR command: {e}', 500) # Build decoder command if direwolf_path: @@ -1888,7 +1872,7 @@ def start_aprs() -> Response: app_module.release_sdr_device(aprs_active_device, aprs_active_sdr_type or 'rtlsdr') aprs_active_device = None aprs_active_sdr_type = None - return jsonify({'status': 'error', 'message': error_msg}), 500 + return api_error(error_msg, 500) if decoder_process.poll() is not None: # Decoder exited early - capture any output from PTY @@ -1916,7 +1900,7 @@ def start_aprs() -> Response: app_module.release_sdr_device(aprs_active_device, aprs_active_sdr_type or 'rtlsdr') aprs_active_device = None aprs_active_sdr_type = None - return jsonify({'status': 'error', 'message': error_msg}), 500 + return api_error(error_msg, 500) # Store references for status checks and cleanup app_module.aprs_process = decoder_process @@ -1946,7 +1930,7 @@ def start_aprs() -> Response: app_module.release_sdr_device(aprs_active_device, aprs_active_sdr_type or 'rtlsdr') aprs_active_device = None aprs_active_sdr_type = None - return jsonify({'status': 'error', 'message': str(e)}), 500 + return api_error(str(e), 500) @aprs_bp.route('/stop', methods=['POST']) @@ -1964,10 +1948,7 @@ def stop_aprs() -> Response: processes_to_stop.append(app_module.aprs_process) if not processes_to_stop: - return jsonify({ - 'status': 'error', - 'message': 'APRS decoder not running' - }), 400 + return api_error('APRS decoder not running', 400) for proc in processes_to_stop: try: @@ -2045,10 +2026,7 @@ def scan_aprs_spectrum() -> Response: """ rtl_power_path = find_rtl_power() if not rtl_power_path: - return jsonify({ - 'status': 'error', - 'message': 'rtl_power not found. Install with: sudo apt install rtl-sdr' - }), 400 + return api_error('rtl_power not found. Install with: sudo apt install rtl-sdr', 400) # Get parameters from JSON body or query args if request.is_json: @@ -2068,7 +2046,7 @@ def scan_aprs_spectrum() -> Response: gain = validate_gain(gain) duration = min(max(int(duration), 5), 60) # Clamp 5-60 seconds except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) # Get center frequency if frequency: @@ -2113,18 +2091,12 @@ def scan_aprs_spectrum() -> Response: if result.returncode != 0: error_msg = result.stderr[:200] if result.stderr else f'Exit code {result.returncode}' - return jsonify({ - 'status': 'error', - 'message': f'rtl_power failed: {error_msg}' - }), 500 + return api_error(f'rtl_power failed: {error_msg}', 500) # Parse rtl_power CSV output # Format: date, time, start_hz, end_hz, step_hz, samples, db1, db2, db3, ... if not os.path.exists(tmp_file): - return jsonify({ - 'status': 'error', - 'message': 'rtl_power did not produce output file' - }), 500 + return api_error('rtl_power did not produce output file', 500) bins = [] with open(tmp_file, 'r') as f: @@ -2144,10 +2116,7 @@ def scan_aprs_spectrum() -> Response: continue if not bins: - return jsonify({ - 'status': 'error', - 'message': 'No spectrum data collected. Check SDR connection and antenna.' - }), 500 + return api_error('No spectrum data collected. Check SDR connection and antenna.', 500) # Calculate statistics db_values = [b['db'] for b in bins] @@ -2177,8 +2146,7 @@ def scan_aprs_spectrum() -> Response: else: advice = "Good signal detected. Decoding should work well." - return jsonify({ - 'status': 'success', + return api_success(data={ 'scan_params': { 'center_freq_mhz': center_freq_mhz, 'start_freq_mhz': start_freq_mhz, @@ -2204,13 +2172,10 @@ def scan_aprs_spectrum() -> Response: }) except subprocess.TimeoutExpired: - return jsonify({ - 'status': 'error', - 'message': f'Spectrum scan timed out after {duration + 15} seconds' - }), 500 + return api_error(f'Spectrum scan timed out after {duration + 15} seconds', 500) except Exception as e: logger.error(f"Spectrum scan error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 + return api_error(str(e), 500) finally: # Cleanup temp file try: diff --git a/routes/bluetooth.py b/routes/bluetooth.py index 9f48ab6..15de6b2 100644 --- a/routes/bluetooth.py +++ b/routes/bluetooth.py @@ -17,12 +17,13 @@ from typing import Any, Generator from flask import Blueprint, jsonify, request, Response +from utils.responses import api_success, api_error import app as app_module -from utils.dependencies import check_tool -from utils.logging import bluetooth_logger as logger -from utils.sse import sse_stream_fanout -from utils.event_pipeline import process_event -from utils.validation import validate_bluetooth_interface +from utils.dependencies import check_tool +from utils.logging import bluetooth_logger as logger +from utils.sse import sse_stream_fanout +from utils.event_pipeline import process_event +from utils.validation import validate_bluetooth_interface from data.oui import OUI_DATABASE, load_oui_database, get_manufacturer from data.patterns import AIRTAG_PREFIXES, TILE_PREFIXES, SAMSUNG_TRACKER from utils.constants import ( @@ -39,6 +40,23 @@ from utils.constants import ( bluetooth_bp = Blueprint('bluetooth', __name__, url_prefix='/bt') +# --- v1 deprecation --- +# These endpoints are deprecated in favor of /api/bluetooth/*. +# Frontend still uses v1, so they remain active. +# Migration: switch frontend to v2 endpoints, then remove this file. +_v1_deprecation_logged = set() + + +@bluetooth_bp.after_request +def _add_deprecation_header(response): + """Add X-Deprecated header to all v1 Bluetooth responses.""" + response.headers['X-Deprecated'] = 'Use /api/bluetooth/* endpoints instead' + endpoint = request.endpoint or '' + if endpoint not in _v1_deprecation_logged: + _v1_deprecation_logged.add(endpoint) + logger.warning(f"Deprecated v1 Bluetooth endpoint called: {request.path} — migrate to /api/bluetooth/*") + return response + def classify_bt_device(name, device_class, services, manufacturer=None): """Classify Bluetooth device type based on available info.""" @@ -331,8 +349,8 @@ def reload_oui_database_route(): if new_db: OUI_DATABASE.clear() OUI_DATABASE.update(new_db) - return jsonify({'status': 'success', 'entries': len(OUI_DATABASE)}) - return jsonify({'status': 'error', 'message': 'Could not load oui_database.json'}) + return api_success(data={'entries': len(OUI_DATABASE)}) + return api_error('Could not load oui_database.json') @bluetooth_bp.route('/interfaces') @@ -359,7 +377,7 @@ def start_bt_scan(): with app_module.bt_lock: if app_module.bt_process: if app_module.bt_process.poll() is None: - return jsonify({'status': 'error', 'message': 'Scan already running'}) + return api_error('Scan already running') else: app_module.bt_process = None @@ -371,7 +389,7 @@ def start_bt_scan(): try: interface = validate_bluetooth_interface(data.get('interface', 'hci0')) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) app_module.bt_interface = interface app_module.bt_devices = {} @@ -413,14 +431,14 @@ def start_bt_scan(): os.write(master_fd, b'scan on\n') else: - return jsonify({'status': 'error', 'message': f'Unknown scan mode: {scan_mode}'}) + return api_error(f'Unknown scan mode: {scan_mode}') time.sleep(0.5) if app_module.bt_process.poll() is not None: stderr_output = app_module.bt_process.stderr.read().decode('utf-8', errors='replace').strip() app_module.bt_process = None - return jsonify({'status': 'error', 'message': stderr_output or 'Process failed to start'}) + return api_error(stderr_output or 'Process failed to start') thread = threading.Thread(target=stream_bt_scan, args=(app_module.bt_process, scan_mode)) thread.daemon = True @@ -430,9 +448,9 @@ def start_bt_scan(): return jsonify({'status': 'started', 'mode': scan_mode, 'interface': interface}) except FileNotFoundError as e: - return jsonify({'status': 'error', 'message': f'Tool not found: {e.filename}'}) + return api_error(f'Tool not found: {e.filename}') except Exception as e: - return jsonify({'status': 'error', 'message': str(e)}) + return api_error(str(e)) @bluetooth_bp.route('/scan/stop', methods=['POST']) @@ -459,7 +477,7 @@ def reset_bt_adapter(): try: interface = validate_bluetooth_interface(data.get('interface', 'hci0')) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) with app_module.bt_lock: if app_module.bt_process: @@ -494,7 +512,7 @@ def reset_bt_adapter(): }) except Exception as e: - return jsonify({'status': 'error', 'message': str(e)}) + return api_error(str(e)) @bluetooth_bp.route('/enum', methods=['POST']) @@ -504,7 +522,7 @@ def enum_bt_services(): target_mac = data.get('mac') if not target_mac: - return jsonify({'status': 'error', 'message': 'Target MAC required'}) + return api_error('Target MAC required') try: result = subprocess.run( @@ -529,18 +547,17 @@ def enum_bt_services(): app_module.bt_services[target_mac] = services - return jsonify({ - 'status': 'success', + return api_success(data={ 'mac': target_mac, 'services': services }) except subprocess.TimeoutExpired: - return jsonify({'status': 'error', 'message': 'Connection timed out'}) + return api_error('Connection timed out') except FileNotFoundError: - return jsonify({'status': 'error', 'message': 'sdptool not found'}) + return api_error('sdptool not found') except Exception as e: - return jsonify({'status': 'error', 'message': str(e)}) + return api_error(str(e)) @bluetooth_bp.route('/devices') @@ -553,23 +570,23 @@ def get_bt_devices(): }) -@bluetooth_bp.route('/stream') -def stream_bt(): - """SSE stream for Bluetooth events.""" - def _on_msg(msg: dict[str, Any]) -> None: - process_event('bluetooth', msg, msg.get('type')) - - response = Response( - sse_stream_fanout( - source_queue=app_module.bt_queue, - channel_key='bluetooth', - timeout=1.0, - keepalive_interval=30.0, - on_message=_on_msg, - ), - mimetype='text/event-stream', - ) - response.headers['Cache-Control'] = 'no-cache' - response.headers['X-Accel-Buffering'] = 'no' - response.headers['Connection'] = 'keep-alive' +@bluetooth_bp.route('/stream') +def stream_bt(): + """SSE stream for Bluetooth events.""" + def _on_msg(msg: dict[str, Any]) -> None: + process_event('bluetooth', msg, msg.get('type')) + + response = Response( + sse_stream_fanout( + source_queue=app_module.bt_queue, + channel_key='bluetooth', + timeout=1.0, + keepalive_interval=30.0, + on_message=_on_msg, + ), + mimetype='text/event-stream', + ) + response.headers['Cache-Control'] = 'no-cache' + response.headers['X-Accel-Buffering'] = 'no' + response.headers['Connection'] = 'keep-alive' return response diff --git a/routes/bluetooth_v2.py b/routes/bluetooth_v2.py index 8878539..f152baa 100644 --- a/routes/bluetooth_v2.py +++ b/routes/bluetooth_v2.py @@ -29,6 +29,7 @@ from utils.bluetooth import ( get_tracker_engine, ) from utils.database import get_db +from utils.responses import api_success, api_error from utils.sse import format_sse from utils.event_pipeline import process_event @@ -231,7 +232,7 @@ def start_scan(): # Validate mode valid_modes = ('auto', 'dbus', 'bleak', 'hcitool', 'bluetoothctl', 'ubertooth') if mode not in valid_modes: - return jsonify({'error': f'Invalid mode. Must be one of: {valid_modes}'}), 400 + return api_error(f'Invalid mode. Must be one of: {valid_modes}', 400) # Get scanner instance scanner = get_bluetooth_scanner(adapter_id) @@ -389,7 +390,7 @@ def get_device(device_id: str): device = scanner.get_device(device_id) if not device: - return jsonify({'error': 'Device not found'}), 404 + return api_error('Device not found', 404) return jsonify(device.to_dict()) @@ -529,7 +530,7 @@ def get_tracker_detail(device_id: str): device = scanner.get_device(device_id) if not device: - return jsonify({'error': 'Device not found'}), 404 + return api_error('Device not found', 404) # Get RSSI history for timeline rssi_history = device.get_rssi_history(max_points=100) diff --git a/routes/bt_locate.py b/routes/bt_locate.py index dda456d..b6e6dca 100644 --- a/routes/bt_locate.py +++ b/routes/bt_locate.py @@ -12,6 +12,7 @@ from collections.abc import Generator from flask import Blueprint, Response, jsonify, request +from utils.responses import api_success, api_error from utils.bluetooth.irk_extractor import get_paired_irks from utils.bt_locate import ( Environment, @@ -33,18 +34,18 @@ def start_session(): """ Start a locate session. - Request JSON: - - mac_address: Target MAC address (optional) - - name_pattern: Target name substring (optional) - - irk_hex: Identity Resolving Key hex string (optional) - - device_id: Device ID from Bluetooth scanner (optional) - - device_key: Stable device key from Bluetooth scanner (optional) - - fingerprint_id: Payload fingerprint ID from Bluetooth scanner (optional) - - known_name: Hand-off device name (optional) - - known_manufacturer: Hand-off manufacturer (optional) - - last_known_rssi: Hand-off last RSSI (optional) - - environment: 'FREE_SPACE', 'OUTDOOR', 'INDOOR', 'CUSTOM' (default: OUTDOOR) - - custom_exponent: Path loss exponent for CUSTOM environment (optional) + Request JSON: + - mac_address: Target MAC address (optional) + - name_pattern: Target name substring (optional) + - irk_hex: Identity Resolving Key hex string (optional) + - device_id: Device ID from Bluetooth scanner (optional) + - device_key: Stable device key from Bluetooth scanner (optional) + - fingerprint_id: Payload fingerprint ID from Bluetooth scanner (optional) + - known_name: Hand-off device name (optional) + - known_manufacturer: Hand-off manufacturer (optional) + - last_known_rssi: Hand-off last RSSI (optional) + - environment: 'FREE_SPACE', 'OUTDOOR', 'INDOOR', 'CUSTOM' (default: OUTDOOR) + - custom_exponent: Path loss exponent for CUSTOM environment (optional) Returns: JSON with session status. @@ -52,47 +53,46 @@ def start_session(): data = request.get_json() or {} # Build target - target = LocateTarget( - mac_address=data.get('mac_address'), - name_pattern=data.get('name_pattern'), - irk_hex=data.get('irk_hex'), - device_id=data.get('device_id'), - device_key=data.get('device_key'), - fingerprint_id=data.get('fingerprint_id'), - known_name=data.get('known_name'), - known_manufacturer=data.get('known_manufacturer'), - last_known_rssi=data.get('last_known_rssi'), - ) + target = LocateTarget( + mac_address=data.get('mac_address'), + name_pattern=data.get('name_pattern'), + irk_hex=data.get('irk_hex'), + device_id=data.get('device_id'), + device_key=data.get('device_key'), + fingerprint_id=data.get('fingerprint_id'), + known_name=data.get('known_name'), + known_manufacturer=data.get('known_manufacturer'), + last_known_rssi=data.get('last_known_rssi'), + ) # At least one identifier required - if not any([ - target.mac_address, - target.name_pattern, - target.irk_hex, - target.device_id, - target.device_key, - target.fingerprint_id, - ]): - return jsonify({ - 'error': ( - 'At least one target identifier required ' - '(mac_address, name_pattern, irk_hex, device_id, device_key, or fingerprint_id)' - ) - }), 400 + if not any([ + target.mac_address, + target.name_pattern, + target.irk_hex, + target.device_id, + target.device_key, + target.fingerprint_id, + ]): + return api_error( + 'At least one target identifier required ' + '(mac_address, name_pattern, irk_hex, device_id, device_key, or fingerprint_id)', + 400 + ) # Parse environment env_str = data.get('environment', 'OUTDOOR').upper() try: environment = Environment[env_str] except KeyError: - return jsonify({'error': f'Invalid environment: {env_str}'}), 400 + return api_error(f'Invalid environment: {env_str}', 400) custom_exponent = data.get('custom_exponent') if custom_exponent is not None: try: custom_exponent = float(custom_exponent) except (ValueError, TypeError): - return jsonify({'error': 'custom_exponent must be a number'}), 400 + return api_error('custom_exponent must be a number', 400) # Fallback coordinates when GPS is unavailable (from user settings) fallback_lat = None @@ -109,27 +109,21 @@ def start_session(): f"env={environment.name}, fallback=({fallback_lat}, {fallback_lon})" ) - try: - session = start_locate_session( - target, environment, custom_exponent, fallback_lat, fallback_lon - ) - except RuntimeError as exc: - logger.warning(f"Unable to start BT Locate session: {exc}") - return jsonify({ - 'status': 'error', - 'error': 'Bluetooth scanner could not be started. Check adapter permissions/capabilities.', - }), 503 - except Exception as exc: - logger.exception(f"Unexpected error starting BT Locate session: {exc}") - return jsonify({ - 'status': 'error', - 'error': 'Failed to start locate session', - }), 500 - - return jsonify({ - 'status': 'started', - 'session': session.get_status(), - }) + try: + session = start_locate_session( + target, environment, custom_exponent, fallback_lat, fallback_lon + ) + except RuntimeError as exc: + logger.warning(f"Unable to start BT Locate session: {exc}") + return api_error('Bluetooth scanner could not be started. Check adapter permissions/capabilities.', 503) + except Exception as exc: + logger.exception(f"Unexpected error starting BT Locate session: {exc}") + return api_error('Failed to start locate session', 500) + + return jsonify({ + 'status': 'started', + 'session': session.get_status(), + }) @bt_locate_bp.route('/stop', methods=['POST']) @@ -143,18 +137,18 @@ def stop_session(): return jsonify({'status': 'stopped'}) -@bt_locate_bp.route('/status', methods=['GET']) -def get_status(): - """Get locate session status.""" - session = get_locate_session() - if not session: +@bt_locate_bp.route('/status', methods=['GET']) +def get_status(): + """Get locate session status.""" + session = get_locate_session() + if not session: return jsonify({ - 'active': False, - 'target': None, - }) - - include_debug = str(request.args.get('debug', '')).lower() in ('1', 'true', 'yes') - return jsonify(session.get_status(include_debug=include_debug)) + 'active': False, + 'target': None, + }) + + include_debug = str(request.args.get('debug', '')).lower() in ('1', 'true', 'yes') + return jsonify(session.get_status(include_debug=include_debug)) @bt_locate_bp.route('/trail', methods=['GET']) @@ -216,15 +210,15 @@ def test_resolve_rpa(): address = data.get('address', '') if not irk_hex or not address: - return jsonify({'error': 'irk_hex and address are required'}), 400 + return api_error('irk_hex and address are required', 400) try: irk = bytes.fromhex(irk_hex) except ValueError: - return jsonify({'error': 'Invalid IRK hex string'}), 400 + return api_error('Invalid IRK hex string', 400) if len(irk) != 16: - return jsonify({'error': 'IRK must be exactly 16 bytes (32 hex characters)'}), 400 + return api_error('IRK must be exactly 16 bytes (32 hex characters)', 400) result = resolve_rpa(irk, address) return jsonify({ @@ -239,14 +233,14 @@ def set_environment(): """Update the environment on the active session.""" session = get_locate_session() if not session: - return jsonify({'error': 'no active session'}), 400 + return api_error('no active session', 400) data = request.get_json() or {} env_str = data.get('environment', '').upper() try: environment = Environment[env_str] except KeyError: - return jsonify({'error': f'Invalid environment: {env_str}'}), 400 + return api_error(f'Invalid environment: {env_str}', 400) custom_exponent = data.get('custom_exponent') if custom_exponent is not None: @@ -268,11 +262,11 @@ def debug_matching(): """Debug endpoint showing scanner devices and match results.""" session = get_locate_session() if not session: - return jsonify({'error': 'no session'}) + return api_error('no session') scanner = session._scanner if not scanner: - return jsonify({'error': 'no scanner'}) + return api_error('no scanner') devices = scanner.get_devices(max_age_seconds=30) return jsonify({ diff --git a/routes/controller.py b/routes/controller.py index eb46e92..5f94075 100644 --- a/routes/controller.py +++ b/routes/controller.py @@ -10,18 +10,19 @@ This blueprint provides: from __future__ import annotations -import json -import logging -import queue -import threading -import time -from datetime import datetime, timezone -from typing import Generator - -import requests - -from flask import Blueprint, jsonify, request, Response +import json +import logging +import queue +import threading +import time +from datetime import datetime, timezone +from typing import Generator +import requests + +from flask import Blueprint, jsonify, request, Response + +from utils.responses import api_success, api_error from utils.database import ( create_agent, get_agent, get_agent_by_name, list_agents, update_agent, delete_agent, store_push_payload, get_recent_payloads @@ -37,28 +38,28 @@ from utils.trilateration import ( logger = logging.getLogger('intercept.controller') -controller_bp = Blueprint('controller', __name__, url_prefix='/controller') - -# Multi-agent SSE fanout state (per-client queues). -_agent_stream_subscribers: set[queue.Queue] = set() -_agent_stream_subscribers_lock = threading.Lock() -_AGENT_STREAM_CLIENT_QUEUE_SIZE = 500 - - -def _broadcast_agent_data(payload: dict) -> None: - """Fan out an ingested payload to all active /controller/stream/all clients.""" - with _agent_stream_subscribers_lock: - subscribers = tuple(_agent_stream_subscribers) - - for subscriber in subscribers: - try: - subscriber.put_nowait(payload) - except queue.Full: - try: - subscriber.get_nowait() - subscriber.put_nowait(payload) - except (queue.Empty, queue.Full): - continue +controller_bp = Blueprint('controller', __name__, url_prefix='/controller') + +# Multi-agent SSE fanout state (per-client queues). +_agent_stream_subscribers: set[queue.Queue] = set() +_agent_stream_subscribers_lock = threading.Lock() +_AGENT_STREAM_CLIENT_QUEUE_SIZE = 500 + + +def _broadcast_agent_data(payload: dict) -> None: + """Fan out an ingested payload to all active /controller/stream/all clients.""" + with _agent_stream_subscribers_lock: + subscribers = tuple(_agent_stream_subscribers) + + for subscriber in subscribers: + try: + subscriber.put_nowait(payload) + except queue.Full: + try: + subscriber.get_nowait() + subscriber.put_nowait(payload) + except (queue.Empty, queue.Full): + continue # ============================================================================= @@ -108,28 +109,25 @@ def register_agent(): base_url = data.get('base_url', '').strip() if not name: - return jsonify({'status': 'error', 'message': 'Agent name is required'}), 400 + return api_error('Agent name is required', 400) if not base_url: - return jsonify({'status': 'error', 'message': 'Base URL is required'}), 400 + return api_error('Base URL is required', 400) # Validate URL format from urllib.parse import urlparse try: parsed = urlparse(base_url) if parsed.scheme not in ('http', 'https'): - return jsonify({'status': 'error', 'message': 'URL must start with http:// or https://'}), 400 + return api_error('URL must start with http:// or https://', 400) if not parsed.netloc: - return jsonify({'status': 'error', 'message': 'Invalid URL format'}), 400 + return api_error('Invalid URL format', 400) except Exception: - return jsonify({'status': 'error', 'message': 'Invalid URL format'}), 400 + return api_error('Invalid URL format', 400) # Check if agent already exists existing = get_agent_by_name(name) if existing: - return jsonify({ - 'status': 'error', - 'message': f'Agent with name "{name}" already exists' - }), 409 + return api_error(f'Agent with name "{name}" already exists', 409) # Try to connect and get capabilities api_key = data.get('api_key', '').strip() or None @@ -171,7 +169,7 @@ def register_agent(): except Exception as e: logger.exception("Failed to create agent") - return jsonify({'status': 'error', 'message': str(e)}), 500 + return api_error(str(e), 500) @controller_bp.route('/agents/', methods=['GET']) @@ -179,7 +177,7 @@ def get_agent_detail(agent_id: int): """Get details of a specific agent.""" agent = get_agent(agent_id) if not agent: - return jsonify({'status': 'error', 'message': 'Agent not found'}), 404 + return api_error('Agent not found', 404) # Optionally refresh from agent refresh = request.args.get('refresh', 'false').lower() == 'true' @@ -215,7 +213,7 @@ def update_agent_detail(agent_id: int): """Update an agent's details.""" agent = get_agent(agent_id) if not agent: - return jsonify({'status': 'error', 'message': 'Agent not found'}), 404 + return api_error('Agent not found', 404) data = request.json or {} @@ -237,7 +235,7 @@ def remove_agent(agent_id: int): """Delete an agent.""" agent = get_agent(agent_id) if not agent: - return jsonify({'status': 'error', 'message': 'Agent not found'}), 404 + return api_error('Agent not found', 404) delete_agent(agent_id) return jsonify({'status': 'success', 'message': 'Agent deleted'}) @@ -248,7 +246,7 @@ def refresh_agent_metadata(agent_id: int): """Refresh an agent's capabilities and status.""" agent = get_agent(agent_id) if not agent: - return jsonify({'status': 'error', 'message': 'Agent not found'}), 404 + return api_error('Agent not found', 404) try: client = create_client_from_agent(agent) @@ -274,16 +272,10 @@ def refresh_agent_metadata(agent_id: int): 'metadata': metadata }) else: - return jsonify({ - 'status': 'error', - 'message': 'Agent is not reachable' - }), 503 + return api_error('Agent is not reachable', 503) except (AgentHTTPError, AgentConnectionError) as e: - return jsonify({ - 'status': 'error', - 'message': f'Failed to reach agent: {e}' - }), 503 + return api_error(f'Failed to reach agent: {e}', 503) # ============================================================================= @@ -295,7 +287,7 @@ def get_agent_status(agent_id: int): """Get an agent's current status including running modes.""" agent = get_agent(agent_id) if not agent: - return jsonify({'status': 'error', 'message': 'Agent not found'}), 404 + return api_error('Agent not found', 404) try: client = create_client_from_agent(agent) @@ -307,10 +299,7 @@ def get_agent_status(agent_id: int): 'agent_status': status }) except (AgentHTTPError, AgentConnectionError) as e: - return jsonify({ - 'status': 'error', - 'message': f'Failed to reach agent: {e}' - }), 503 + return api_error(f'Failed to reach agent: {e}', 503) @controller_bp.route('/agents/health', methods=['GET']) @@ -384,7 +373,7 @@ def proxy_start_mode(agent_id: int, mode: str): """Start a mode on a remote agent.""" agent = get_agent(agent_id) if not agent: - return jsonify({'status': 'error', 'message': 'Agent not found'}), 404 + return api_error('Agent not found', 404) params = request.json or {} @@ -403,15 +392,9 @@ def proxy_start_mode(agent_id: int, mode: str): }) except AgentConnectionError as e: - return jsonify({ - 'status': 'error', - 'message': f'Cannot connect to agent: {e}' - }), 503 + return api_error(f'Cannot connect to agent: {e}', 503) except AgentHTTPError as e: - return jsonify({ - 'status': 'error', - 'message': f'Agent error: {e}' - }), 502 + return api_error(f'Agent error: {e}', 502) @controller_bp.route('/agents///stop', methods=['POST']) @@ -419,7 +402,7 @@ def proxy_stop_mode(agent_id: int, mode: str): """Stop a mode on a remote agent.""" agent = get_agent(agent_id) if not agent: - return jsonify({'status': 'error', 'message': 'Agent not found'}), 404 + return api_error('Agent not found', 404) try: client = create_client_from_agent(agent) @@ -435,15 +418,9 @@ def proxy_stop_mode(agent_id: int, mode: str): }) except AgentConnectionError as e: - return jsonify({ - 'status': 'error', - 'message': f'Cannot connect to agent: {e}' - }), 503 + return api_error(f'Cannot connect to agent: {e}', 503) except AgentHTTPError as e: - return jsonify({ - 'status': 'error', - 'message': f'Agent error: {e}' - }), 502 + return api_error(f'Agent error: {e}', 502) @controller_bp.route('/agents///status', methods=['GET']) @@ -451,7 +428,7 @@ def proxy_mode_status(agent_id: int, mode: str): """Get mode status from a remote agent.""" agent = get_agent(agent_id) if not agent: - return jsonify({'status': 'error', 'message': 'Agent not found'}), 404 + return api_error('Agent not found', 404) try: client = create_client_from_agent(agent) @@ -465,18 +442,15 @@ def proxy_mode_status(agent_id: int, mode: str): }) except (AgentHTTPError, AgentConnectionError) as e: - return jsonify({ - 'status': 'error', - 'message': f'Agent error: {e}' - }), 502 + return api_error(f'Agent error: {e}', 502) -@controller_bp.route('/agents///data', methods=['GET']) -def proxy_mode_data(agent_id: int, mode: str): - """Get current data from a remote agent.""" - agent = get_agent(agent_id) - if not agent: - return jsonify({'status': 'error', 'message': 'Agent not found'}), 404 +@controller_bp.route('/agents///data', methods=['GET']) +def proxy_mode_data(agent_id: int, mode: str): + """Get current data from a remote agent.""" + agent = get_agent(agent_id) + if not agent: + return api_error('Agent not found', 404) try: client = create_client_from_agent(agent) @@ -494,60 +468,57 @@ def proxy_mode_data(agent_id: int, mode: str): 'data': result }) - except (AgentHTTPError, AgentConnectionError) as e: - return jsonify({ - 'status': 'error', - 'message': f'Agent error: {e}' - }), 502 - - -@controller_bp.route('/agents///stream') -def proxy_mode_stream(agent_id: int, mode: str): - """Proxy SSE stream from a remote agent.""" - agent = get_agent(agent_id) - if not agent: - return jsonify({'status': 'error', 'message': 'Agent not found'}), 404 - - client = create_client_from_agent(agent) - query = request.query_string.decode('utf-8') - url = f"{client.base_url}/{mode}/stream" - if query: - url = f"{url}?{query}" - - headers = {'Accept': 'text/event-stream'} - if agent.get('api_key'): - headers['X-API-Key'] = agent['api_key'] - - def generate() -> Generator[str, None, None]: - try: - with requests.get(url, headers=headers, stream=True, timeout=(5, 3600)) as resp: - resp.raise_for_status() - for chunk in resp.iter_content(chunk_size=1024): - if not chunk: - continue - yield chunk.decode('utf-8', errors='ignore') - except Exception as e: - logger.error(f"SSE proxy error for agent {agent_id}/{mode}: {e}") - yield format_sse({ - 'type': 'error', - 'message': str(e), - 'agent_id': agent_id, - 'mode': mode, - }) - - response = Response(generate(), mimetype='text/event-stream') - response.headers['Cache-Control'] = 'no-cache' - response.headers['X-Accel-Buffering'] = 'no' - response.headers['Connection'] = 'keep-alive' - return response - - -@controller_bp.route('/agents//wifi/monitor', methods=['POST']) -def proxy_wifi_monitor(agent_id: int): - """Toggle monitor mode on a remote agent's WiFi interface.""" - agent = get_agent(agent_id) - if not agent: - return jsonify({'status': 'error', 'message': 'Agent not found'}), 404 + except (AgentHTTPError, AgentConnectionError) as e: + return api_error(f'Agent error: {e}', 502) + + +@controller_bp.route('/agents///stream') +def proxy_mode_stream(agent_id: int, mode: str): + """Proxy SSE stream from a remote agent.""" + agent = get_agent(agent_id) + if not agent: + return api_error('Agent not found', 404) + + client = create_client_from_agent(agent) + query = request.query_string.decode('utf-8') + url = f"{client.base_url}/{mode}/stream" + if query: + url = f"{url}?{query}" + + headers = {'Accept': 'text/event-stream'} + if agent.get('api_key'): + headers['X-API-Key'] = agent['api_key'] + + def generate() -> Generator[str, None, None]: + try: + with requests.get(url, headers=headers, stream=True, timeout=(5, 3600)) as resp: + resp.raise_for_status() + for chunk in resp.iter_content(chunk_size=1024): + if not chunk: + continue + yield chunk.decode('utf-8', errors='ignore') + except Exception as e: + logger.error(f"SSE proxy error for agent {agent_id}/{mode}: {e}") + yield format_sse({ + 'type': 'error', + 'message': str(e), + 'agent_id': agent_id, + 'mode': mode, + }) + + response = Response(generate(), mimetype='text/event-stream') + response.headers['Cache-Control'] = 'no-cache' + response.headers['X-Accel-Buffering'] = 'no' + response.headers['Connection'] = 'keep-alive' + return response + + +@controller_bp.route('/agents//wifi/monitor', methods=['POST']) +def proxy_wifi_monitor(agent_id: int): + """Toggle monitor mode on a remote agent's WiFi interface.""" + agent = get_agent(agent_id) + if not agent: + return api_error('Agent not found', 404) data = request.json or {} @@ -582,15 +553,9 @@ def proxy_wifi_monitor(agent_id: int): }) except AgentConnectionError as e: - return jsonify({ - 'status': 'error', - 'message': f'Cannot connect to agent: {e}' - }), 503 + return api_error(f'Cannot connect to agent: {e}', 503) except AgentHTTPError as e: - return jsonify({ - 'status': 'error', - 'message': f'Agent error: {e}' - }), 502 + return api_error(f'Agent error: {e}', 502) # ============================================================================= @@ -616,23 +581,23 @@ def ingest_push_data(): """ data = request.json if not data: - return jsonify({'status': 'error', 'message': 'No data provided'}), 400 + return api_error('No data provided', 400) agent_name = data.get('agent_name') if not agent_name: - return jsonify({'status': 'error', 'message': 'agent_name required'}), 400 + return api_error('agent_name required', 400) # Find agent agent = get_agent_by_name(agent_name) if not agent: - return jsonify({'status': 'error', 'message': 'Unknown agent'}), 401 + return api_error('Unknown agent', 401) # Validate API key if configured if agent.get('api_key'): provided_key = request.headers.get('X-API-Key', '') if provided_key != agent['api_key']: logger.warning(f"Invalid API key from agent {agent_name}") - return jsonify({'status': 'error', 'message': 'Invalid API key'}), 401 + return api_error('Invalid API key', 401) # Store payload try: @@ -644,16 +609,16 @@ def ingest_push_data(): received_at=data.get('received_at') ) - # Emit to SSE stream (fanout to all connected clients) - _broadcast_agent_data({ - 'type': 'agent_data', - 'agent_id': agent['id'], - 'agent_name': agent_name, - 'scan_type': data.get('scan_type'), - 'interface': data.get('interface'), - 'payload': data.get('payload'), - 'received_at': data.get('received_at') or datetime.now(timezone.utc).isoformat() - }) + # Emit to SSE stream (fanout to all connected clients) + _broadcast_agent_data({ + 'type': 'agent_data', + 'agent_id': agent['id'], + 'agent_name': agent_name, + 'scan_type': data.get('scan_type'), + 'interface': data.get('interface'), + 'payload': data.get('payload'), + 'received_at': data.get('received_at') or datetime.now(timezone.utc).isoformat() + }) return jsonify({ 'status': 'accepted', @@ -662,7 +627,7 @@ def ingest_push_data(): except Exception as e: logger.exception("Failed to store push payload") - return jsonify({'status': 'error', 'message': str(e)}), 500 + return api_error(str(e), 500) @controller_bp.route('/api/payloads', methods=['GET']) @@ -690,35 +655,35 @@ def get_payloads(): # ============================================================================= @controller_bp.route('/stream/all') -def stream_all_agents(): +def stream_all_agents(): """ Combined SSE stream for data from all agents. This endpoint streams push data as it arrives from agents. Each message is tagged with agent_id and agent_name. """ - client_queue: queue.Queue = queue.Queue(maxsize=_AGENT_STREAM_CLIENT_QUEUE_SIZE) - with _agent_stream_subscribers_lock: - _agent_stream_subscribers.add(client_queue) - - def generate() -> Generator[str, None, None]: - last_keepalive = time.time() - keepalive_interval = 30.0 - - try: - while True: - try: - msg = client_queue.get(timeout=1.0) - last_keepalive = time.time() - yield format_sse(msg) - except queue.Empty: - now = time.time() - if now - last_keepalive >= keepalive_interval: - yield format_sse({'type': 'keepalive'}) - last_keepalive = now - finally: - with _agent_stream_subscribers_lock: - _agent_stream_subscribers.discard(client_queue) + client_queue: queue.Queue = queue.Queue(maxsize=_AGENT_STREAM_CLIENT_QUEUE_SIZE) + with _agent_stream_subscribers_lock: + _agent_stream_subscribers.add(client_queue) + + def generate() -> Generator[str, None, None]: + last_keepalive = time.time() + keepalive_interval = 30.0 + + try: + while True: + try: + msg = client_queue.get(timeout=1.0) + last_keepalive = time.time() + yield format_sse(msg) + except queue.Empty: + now = time.time() + if now - last_keepalive >= keepalive_interval: + yield format_sse({'type': 'keepalive'}) + last_keepalive = now + finally: + with _agent_stream_subscribers_lock: + _agent_stream_subscribers.discard(client_queue) response = Response(generate(), mimetype='text/event-stream') response.headers['Cache-Control'] = 'no-cache' @@ -783,7 +748,7 @@ def add_location_observation(): required = ['device_id', 'agent_name', 'agent_lat', 'agent_lon', 'rssi'] for field in required: if field not in data: - return jsonify({'status': 'error', 'message': f'Missing required field: {field}'}), 400 + return api_error(f'Missing required field: {field}', 400) # Look up agent GPS from database if not provided agent_lat = data.get('agent_lat') @@ -797,10 +762,7 @@ def add_location_observation(): agent_lon = coords.get('lon') or coords.get('longitude') if agent_lat is None or agent_lon is None: - return jsonify({ - 'status': 'error', - 'message': 'Agent GPS coordinates required' - }), 400 + return api_error('Agent GPS coordinates required', 400) estimate = device_tracker.add_observation( device_id=data['device_id'], @@ -837,10 +799,7 @@ def estimate_location(): observations = data.get('observations', []) if len(observations) < 2: - return jsonify({ - 'status': 'error', - 'message': 'At least 2 observations required' - }), 400 + return api_error('At least 2 observations required', 400) environment = data.get('environment', 'outdoor') @@ -852,7 +811,7 @@ def estimate_location(): }) except Exception as e: logger.exception("Location estimation failed") - return jsonify({'status': 'error', 'message': str(e)}), 500 + return api_error(str(e), 500) @controller_bp.route('/api/location/', methods=['GET']) @@ -904,7 +863,7 @@ def get_devices_near(): lon = float(request.args.get('lon', 0)) radius = float(request.args.get('radius', 100)) except (ValueError, TypeError): - return jsonify({'status': 'error', 'message': 'Invalid coordinates'}), 400 + return api_error('Invalid coordinates', 400) results = device_tracker.get_devices_near(lat, lon, radius) diff --git a/routes/correlation.py b/routes/correlation.py index 7869eb0..9097101 100644 --- a/routes/correlation.py +++ b/routes/correlation.py @@ -6,6 +6,7 @@ from flask import Blueprint, jsonify, request, Response import app as app_module from utils.correlation import get_correlations +from utils.responses import api_success, api_error from utils.logging import get_logger logger = get_logger('intercept.correlation') @@ -39,18 +40,14 @@ def get_device_correlations() -> Response: include_historical=include_historical ) - return jsonify({ - 'status': 'success', + return api_success(data={ 'correlations': correlations, 'wifi_count': len(wifi_devices), 'bt_count': len(bt_devices) }) except Exception as e: logger.error(f"Error calculating correlations: {e}") - return jsonify({ - 'status': 'error', - 'message': str(e) - }), 500 + return api_error(str(e), 500) @correlation_bp.route('/analyze', methods=['POST']) @@ -67,10 +64,7 @@ def analyze_correlation() -> Response: bt_mac = data.get('bt_mac') if not wifi_mac or not bt_mac: - return jsonify({ - 'status': 'error', - 'message': 'wifi_mac and bt_mac are required' - }), 400 + return api_error('wifi_mac and bt_mac are required', 400) try: # Get device data @@ -81,16 +75,10 @@ def analyze_correlation() -> Response: bt_device = app_module.bt_devices.get(bt_mac) if not wifi_device: - return jsonify({ - 'status': 'error', - 'message': f'WiFi device {wifi_mac} not found' - }), 404 + return api_error(f'WiFi device {wifi_mac} not found', 404) if not bt_device: - return jsonify({ - 'status': 'error', - 'message': f'Bluetooth device {bt_mac} not found' - }), 404 + return api_error(f'Bluetooth device {bt_mac} not found', 404) # Calculate correlation for this specific pair correlations = get_correlations( @@ -101,19 +89,9 @@ def analyze_correlation() -> Response: ) if correlations: - return jsonify({ - 'status': 'success', - 'correlation': correlations[0] - }) + return api_success(data={'correlation': correlations[0]}) else: - return jsonify({ - 'status': 'success', - 'correlation': None, - 'message': 'No correlation detected between these devices' - }) + return api_success(data={'correlation': None}, message='No correlation detected between these devices') except Exception as e: logger.error(f"Error analyzing correlation: {e}") - return jsonify({ - 'status': 'error', - 'message': str(e) - }), 500 + return api_error(str(e), 500) diff --git a/routes/dsc.py b/routes/dsc.py index 668e34c..4012674 100644 --- a/routes/dsc.py +++ b/routes/dsc.py @@ -21,6 +21,7 @@ from typing import Any, Generator from flask import Blueprint, jsonify, request, Response +from utils.responses import api_success, api_error import app as app_module from utils.constants import ( DSC_VHF_FREQUENCY_MHZ, @@ -380,7 +381,7 @@ def start_decoding() -> Response: rtl_tcp_host = validate_rtl_tcp_host(rtl_tcp_host) rtl_tcp_port = validate_rtl_tcp_port(rtl_tcp_port) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) sdr_device = SDRFactory.create_network_device(rtl_tcp_host, rtl_tcp_port) logger.info(f"Using remote SDR: rtl_tcp://{rtl_tcp_host}:{rtl_tcp_port}") else: diff --git a/routes/gps.py b/routes/gps.py index aab2b9e..23b1ce7 100644 --- a/routes/gps.py +++ b/routes/gps.py @@ -8,6 +8,7 @@ from collections.abc import Generator from flask import Blueprint, Response, jsonify +from utils.responses import api_success, api_error from utils.gps import ( GPSPosition, GPSSkyData, @@ -21,7 +22,7 @@ from utils.gps import ( stop_gpsd_daemon, ) from utils.logging import get_logger -from utils.sse import sse_stream_fanout +from utils.sse import sse_stream_fanout logger = get_logger('intercept.gps') @@ -65,17 +66,17 @@ def auto_connect_gps(): If gpsd is not running, attempts to detect GPS devices and start gpsd. Returns current status if already connected. """ - # Check if already running - reader = get_gps_reader() - if reader and reader.is_running: - # Ensure stream callbacks are attached for this process. - reader.add_callback(_position_callback) - reader.add_sky_callback(_sky_callback) - position = reader.position - sky = reader.sky - return jsonify({ - 'status': 'connected', - 'source': 'gpsd', + # Check if already running + reader = get_gps_reader() + if reader and reader.is_running: + # Ensure stream callbacks are attached for this process. + reader.add_callback(_position_callback) + reader.add_sky_callback(_sky_callback) + position = reader.position + sky = reader.sky + return jsonify({ + 'status': 'connected', + 'source': 'gpsd', 'has_fix': position is not None, 'position': position.to_dict() if position else None, 'sky': sky.to_dict() if sky else None, @@ -207,22 +208,22 @@ def get_position(): }) -@gps_bp.route('/satellites') -def get_satellites(): - """Get current satellite sky view data.""" - reader = get_gps_reader() - - if not reader or not reader.is_running: - return jsonify({ - 'status': 'waiting', - 'running': False, - 'message': 'GPS client not running' - }) - - sky = reader.sky - if sky: - return jsonify({ - 'status': 'ok', +@gps_bp.route('/satellites') +def get_satellites(): + """Get current satellite sky view data.""" + reader = get_gps_reader() + + if not reader or not reader.is_running: + return jsonify({ + 'status': 'waiting', + 'running': False, + 'message': 'GPS client not running' + }) + + sky = reader.sky + if sky: + return jsonify({ + 'status': 'ok', 'sky': sky.to_dict() }) else: @@ -232,19 +233,19 @@ def get_satellites(): }) -@gps_bp.route('/stream') -def stream_gps(): - """SSE stream of GPS position and sky updates.""" - response = Response( - sse_stream_fanout( - source_queue=_gps_queue, - channel_key='gps', - timeout=1.0, - keepalive_interval=30.0, - ), - mimetype='text/event-stream', - ) - response.headers['Cache-Control'] = 'no-cache' - response.headers['X-Accel-Buffering'] = 'no' - response.headers['Connection'] = 'keep-alive' +@gps_bp.route('/stream') +def stream_gps(): + """SSE stream of GPS position and sky updates.""" + response = Response( + sse_stream_fanout( + source_queue=_gps_queue, + channel_key='gps', + timeout=1.0, + keepalive_interval=30.0, + ), + mimetype='text/event-stream', + ) + response.headers['Cache-Control'] = 'no-cache' + response.headers['X-Accel-Buffering'] = 'no' + response.headers['Connection'] = 'keep-alive' return response diff --git a/routes/listening_post.py b/routes/listening_post.py deleted file mode 100644 index f637ef9..0000000 --- a/routes/listening_post.py +++ /dev/null @@ -1,2346 +0,0 @@ -"""Receiver routes for radio monitoring and frequency scanning.""" - -from __future__ import annotations - -import json -import math -import os -import queue -import select -import signal -import shutil -import struct -import subprocess -import threading -import time -from datetime import datetime -from typing import Any, Dict, Generator, List, Optional - -from flask import Blueprint, jsonify, request, Response - -import app as app_module -from utils.logging import get_logger -from utils.sse import sse_stream_fanout -from utils.event_pipeline import process_event -from utils.constants import ( - SSE_QUEUE_TIMEOUT, - SSE_KEEPALIVE_INTERVAL, - PROCESS_TERMINATE_TIMEOUT, -) -from utils.sdr import SDRFactory, SDRType - -logger = get_logger('intercept.receiver') - -receiver_bp = Blueprint('receiver', __name__, url_prefix='/receiver') - -# ============================================ -# GLOBAL STATE -# ============================================ - -# Audio demodulation state -audio_process = None -audio_rtl_process = None -audio_lock = threading.Lock() -audio_start_lock = threading.Lock() -audio_running = False -audio_frequency = 0.0 -audio_modulation = 'fm' -audio_source = 'process' -audio_start_token = 0 - -# Scanner state -scanner_thread: Optional[threading.Thread] = None -scanner_running = False -scanner_lock = threading.Lock() -scanner_paused = False -scanner_current_freq = 0.0 -scanner_active_device: Optional[int] = None -scanner_active_sdr_type: str = 'rtlsdr' -receiver_active_device: Optional[int] = None -receiver_active_sdr_type: str = 'rtlsdr' -scanner_power_process: Optional[subprocess.Popen] = None -scanner_config = { - 'start_freq': 88.0, - 'end_freq': 108.0, - 'step': 0.1, - 'modulation': 'wfm', - 'squelch': 0, - 'dwell_time': 10.0, # Seconds to stay on active frequency - 'scan_delay': 0.1, # Seconds between frequency hops (keep low for fast scanning) - 'device': 0, - 'gain': 40, - 'bias_t': False, # Bias-T power for external LNA - 'sdr_type': 'rtlsdr', # SDR type: rtlsdr, hackrf, airspy, limesdr, sdrplay - 'scan_method': 'power', # power (rtl_power) or classic (rtl_fm hop) - 'snr_threshold': 8, -} - -# Activity log -activity_log: List[Dict] = [] -activity_log_lock = threading.Lock() -MAX_LOG_ENTRIES = 500 - -# SSE queue for scanner events -scanner_queue: queue.Queue = queue.Queue(maxsize=100) - - -# ============================================ -# HELPER FUNCTIONS -# ============================================ - -def find_rtl_fm() -> str | None: - """Find rtl_fm binary.""" - return shutil.which('rtl_fm') - - -def find_rtl_power() -> str | None: - """Find rtl_power binary.""" - return shutil.which('rtl_power') - - -def find_rx_fm() -> str | None: - """Find rx_fm binary (SoapySDR FM demodulator for HackRF/Airspy/LimeSDR).""" - return shutil.which('rx_fm') - - -def find_ffmpeg() -> str | None: - """Find ffmpeg for audio encoding.""" - return shutil.which('ffmpeg') - - -VALID_MODULATIONS = ['fm', 'wfm', 'am', 'usb', 'lsb'] - - -def normalize_modulation(value: str) -> str: - """Normalize and validate modulation string.""" - mod = str(value or '').lower().strip() - if mod not in VALID_MODULATIONS: - raise ValueError(f'Invalid modulation. Use: {", ".join(VALID_MODULATIONS)}') - return mod - - -def _rtl_fm_demod_mode(modulation: str) -> str: - """Map UI modulation names to rtl_fm demod tokens.""" - mod = str(modulation or '').lower().strip() - return 'wbfm' if mod == 'wfm' else mod - - -def _wav_header(sample_rate: int = 48000, bits_per_sample: int = 16, channels: int = 1) -> bytes: - """Create a streaming WAV header with unknown data length.""" - bytes_per_sample = bits_per_sample // 8 - byte_rate = sample_rate * channels * bytes_per_sample - block_align = channels * bytes_per_sample - return ( - b'RIFF' - + struct.pack(' MAX_LOG_ENTRIES: - activity_log.pop() - - # Also push to SSE queue - try: - scanner_queue.put_nowait({ - 'type': 'log', - 'entry': entry - }) - except queue.Full: - pass - - -# ============================================ -# SCANNER IMPLEMENTATION -# ============================================ - -def scanner_loop(): - """Main scanner loop - scans frequencies looking for signals.""" - global scanner_running, scanner_paused, scanner_current_freq, scanner_skip_signal - global audio_process, audio_rtl_process, audio_running, audio_frequency - - logger.info("Scanner thread started") - add_activity_log('scanner_start', scanner_config['start_freq'], - f"Scanning {scanner_config['start_freq']}-{scanner_config['end_freq']} MHz") - - rtl_fm_path = find_rtl_fm() - - if not rtl_fm_path: - logger.error("rtl_fm not found") - add_activity_log('error', 0, 'rtl_fm not found') - scanner_running = False - return - - current_freq = scanner_config['start_freq'] - last_signal_time = 0 - signal_detected = False - - try: - while scanner_running: - # Check if paused - if scanner_paused: - time.sleep(0.1) - continue - - # Read config values on each iteration (allows live updates) - step_mhz = scanner_config['step'] / 1000.0 - squelch = scanner_config['squelch'] - mod = scanner_config['modulation'] - gain = scanner_config['gain'] - device = scanner_config['device'] - - scanner_current_freq = current_freq - - # Notify clients of frequency change - try: - scanner_queue.put_nowait({ - 'type': 'freq_change', - 'frequency': current_freq, - 'scanning': not signal_detected, - 'range_start': scanner_config['start_freq'], - 'range_end': scanner_config['end_freq'] - }) - except queue.Full: - pass - - # Start rtl_fm at this frequency - freq_hz = int(current_freq * 1e6) - - # Sample rates - if mod == 'wfm': - sample_rate = 170000 - resample_rate = 32000 - elif mod in ['usb', 'lsb']: - sample_rate = 12000 - resample_rate = 12000 - else: - sample_rate = 24000 - resample_rate = 24000 - - # Don't use squelch in rtl_fm - we want to analyze raw audio - rtl_cmd = [ - rtl_fm_path, - '-M', _rtl_fm_demod_mode(mod), - '-f', str(freq_hz), - '-s', str(sample_rate), - '-r', str(resample_rate), - '-g', str(gain), - '-d', str(device), - ] - # Add bias-t flag if enabled (for external LNA power) - if scanner_config.get('bias_t', False): - rtl_cmd.append('-T') - - try: - # Start rtl_fm - rtl_proc = subprocess.Popen( - rtl_cmd, - stdout=subprocess.PIPE, - stderr=subprocess.DEVNULL - ) - - # Read audio data for analysis - audio_data = b'' - - # Read audio samples for a short period - sample_duration = 0.25 # 250ms - balance between speed and detection - bytes_needed = int(resample_rate * 2 * sample_duration) # 16-bit mono - - while len(audio_data) < bytes_needed and scanner_running: - chunk = rtl_proc.stdout.read(4096) - if not chunk: - break - audio_data += chunk - - # Clean up rtl_fm - rtl_proc.terminate() - try: - rtl_proc.wait(timeout=1) - except subprocess.TimeoutExpired: - rtl_proc.kill() - - # Analyze audio level - audio_detected = False - rms = 0 - threshold = 500 - if len(audio_data) > 100: - import struct - samples = struct.unpack(f'{len(audio_data)//2}h', audio_data) - # Calculate RMS level (root mean square) - rms = (sum(s*s for s in samples) / len(samples)) ** 0.5 - - # Threshold based on squelch setting - # Lower squelch = more sensitive (lower threshold) - # squelch 0 = very sensitive, squelch 100 = only strong signals - if mod == 'wfm': - # WFM: threshold 500-10000 based on squelch - threshold = 500 + (squelch * 95) - min_threshold = 1500 - else: - # AM/NFM: threshold 300-6500 based on squelch - threshold = 300 + (squelch * 62) - min_threshold = 900 - - effective_threshold = max(threshold, min_threshold) - audio_detected = rms > effective_threshold - - # Send level info to clients - try: - scanner_queue.put_nowait({ - 'type': 'scan_update', - 'frequency': current_freq, - 'level': int(rms), - 'threshold': int(effective_threshold) if 'effective_threshold' in dir() else 0, - 'detected': audio_detected, - 'range_start': scanner_config['start_freq'], - 'range_end': scanner_config['end_freq'] - }) - except queue.Full: - pass - - if audio_detected and scanner_running: - if not signal_detected: - # New signal found! - signal_detected = True - last_signal_time = time.time() - add_activity_log('signal_found', current_freq, - f'Signal detected on {current_freq:.3f} MHz ({mod.upper()})') - logger.info(f"Signal found at {current_freq} MHz") - - # Start audio streaming for user - _start_audio_stream(current_freq, mod) - - try: - snr_db = round(10 * math.log10(rms / effective_threshold), 1) if rms > 0 and effective_threshold > 0 else 0.0 - scanner_queue.put_nowait({ - 'type': 'signal_found', - 'frequency': current_freq, - 'modulation': mod, - 'audio_streaming': True, - 'level': int(rms), - 'threshold': int(effective_threshold), - 'snr': snr_db, - 'range_start': scanner_config['start_freq'], - 'range_end': scanner_config['end_freq'] - }) - except queue.Full: - pass - - # Check for skip signal - if scanner_skip_signal: - scanner_skip_signal = False - signal_detected = False - _stop_audio_stream() - try: - scanner_queue.put_nowait({ - 'type': 'signal_skipped', - 'frequency': current_freq - }) - except queue.Full: - pass - # Move to next frequency (step is in kHz, convert to MHz) - current_freq += step_mhz - if current_freq > scanner_config['end_freq']: - current_freq = scanner_config['start_freq'] - continue - - # Stay on this frequency (dwell) but check periodically - dwell_start = time.time() - while (time.time() - dwell_start) < scanner_config['dwell_time'] and scanner_running: - if scanner_skip_signal: - break - time.sleep(0.2) - - last_signal_time = time.time() - - # After dwell, move on to keep scanning - if scanner_running and not scanner_skip_signal: - signal_detected = False - _stop_audio_stream() - try: - scanner_queue.put_nowait({ - 'type': 'signal_lost', - 'frequency': current_freq, - 'range_start': scanner_config['start_freq'], - 'range_end': scanner_config['end_freq'] - }) - except queue.Full: - pass - - current_freq += step_mhz - if current_freq > scanner_config['end_freq']: - current_freq = scanner_config['start_freq'] - add_activity_log('scan_cycle', current_freq, 'Scan cycle complete') - time.sleep(scanner_config['scan_delay']) - - else: - # No signal at this frequency - if signal_detected: - # Signal lost - duration = time.time() - last_signal_time + scanner_config['dwell_time'] - add_activity_log('signal_lost', current_freq, - f'Signal lost after {duration:.1f}s') - signal_detected = False - - # Stop audio - _stop_audio_stream() - - try: - scanner_queue.put_nowait({ - 'type': 'signal_lost', - 'frequency': current_freq - }) - except queue.Full: - pass - - # Move to next frequency (step is in kHz, convert to MHz) - current_freq += step_mhz - if current_freq > scanner_config['end_freq']: - current_freq = scanner_config['start_freq'] - add_activity_log('scan_cycle', current_freq, 'Scan cycle complete') - - time.sleep(scanner_config['scan_delay']) - - except Exception as e: - logger.error(f"Scanner error at {current_freq} MHz: {e}") - time.sleep(0.5) - - except Exception as e: - logger.error(f"Scanner loop error: {e}") - finally: - scanner_running = False - _stop_audio_stream() - add_activity_log('scanner_stop', scanner_current_freq, 'Scanner stopped') - logger.info("Scanner thread stopped") - - -def scanner_loop_power(): - """Power sweep scanner using rtl_power to detect peaks.""" - global scanner_running, scanner_paused, scanner_current_freq, scanner_power_process - - logger.info("Power sweep scanner thread started") - add_activity_log('scanner_start', scanner_config['start_freq'], - f"Power sweep {scanner_config['start_freq']}-{scanner_config['end_freq']} MHz") - - rtl_power_path = find_rtl_power() - if not rtl_power_path: - logger.error("rtl_power not found") - add_activity_log('error', 0, 'rtl_power not found') - scanner_running = False - return - - try: - while scanner_running: - if scanner_paused: - time.sleep(0.1) - continue - - start_mhz = scanner_config['start_freq'] - end_mhz = scanner_config['end_freq'] - step_khz = scanner_config['step'] - gain = scanner_config['gain'] - device = scanner_config['device'] - squelch = scanner_config['squelch'] - mod = scanner_config['modulation'] - - # Configure sweep - bin_hz = max(1000, int(step_khz * 1000)) - start_hz = int(start_mhz * 1e6) - end_hz = int(end_mhz * 1e6) - # Integration time per sweep (seconds) - integration = max(0.3, min(1.0, scanner_config.get('scan_delay', 0.5))) - - cmd = [ - rtl_power_path, - '-f', f'{start_hz}:{end_hz}:{bin_hz}', - '-i', f'{integration}', - '-1', - '-g', str(gain), - '-d', str(device), - ] - - try: - proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL) - scanner_power_process = proc - stdout, _ = proc.communicate(timeout=15) - except subprocess.TimeoutExpired: - proc.kill() - stdout = b'' - finally: - scanner_power_process = None - - if not scanner_running: - break - - if not stdout: - add_activity_log('error', start_mhz, 'Power sweep produced no data') - try: - scanner_queue.put_nowait({ - 'type': 'scan_update', - 'frequency': end_mhz, - 'level': 0, - 'threshold': int(float(scanner_config.get('snr_threshold', 12)) * 100), - 'detected': False, - 'range_start': scanner_config['start_freq'], - 'range_end': scanner_config['end_freq'] - }) - except queue.Full: - pass - time.sleep(0.2) - continue - - lines = stdout.decode(errors='ignore').splitlines() - segments = [] - for line in lines: - if not line or line.startswith('#'): - continue - - parts = [p.strip() for p in line.split(',')] - # Find start_hz token - start_idx = None - for i, tok in enumerate(parts): - try: - val = float(tok) - except ValueError: - continue - if val > 1e5: - start_idx = i - break - if start_idx is None or len(parts) < start_idx + 6: - continue - - try: - sweep_start = float(parts[start_idx]) - sweep_end = float(parts[start_idx + 1]) - sweep_bin = float(parts[start_idx + 2]) - raw_values = [] - for v in parts[start_idx + 3:]: - try: - raw_values.append(float(v)) - except ValueError: - continue - # rtl_power may include a samples field before the power list - if raw_values and raw_values[0] >= 0 and any(val < 0 for val in raw_values[1:]): - raw_values = raw_values[1:] - bin_values = raw_values - except ValueError: - continue - - if not bin_values: - continue - - segments.append((sweep_start, sweep_end, sweep_bin, bin_values)) - - if not segments: - add_activity_log('error', start_mhz, 'Power sweep bins missing') - try: - scanner_queue.put_nowait({ - 'type': 'scan_update', - 'frequency': end_mhz, - 'level': 0, - 'threshold': int(float(scanner_config.get('snr_threshold', 12)) * 100), - 'detected': False, - 'range_start': scanner_config['start_freq'], - 'range_end': scanner_config['end_freq'] - }) - except queue.Full: - pass - time.sleep(0.2) - continue - - # Process segments in ascending frequency order to avoid backtracking in UI - segments.sort(key=lambda s: s[0]) - total_bins = sum(len(seg[3]) for seg in segments) - if total_bins <= 0: - time.sleep(0.2) - continue - segment_offset = 0 - - for sweep_start, sweep_end, sweep_bin, bin_values in segments: - # Noise floor (median) - sorted_vals = sorted(bin_values) - mid = len(sorted_vals) // 2 - noise_floor = sorted_vals[mid] - - # SNR threshold (dB) - snr_threshold = float(scanner_config.get('snr_threshold', 12)) - - # Emit progress updates (throttled) - emit_stride = max(1, len(bin_values) // 60) - for idx, val in enumerate(bin_values): - if idx % emit_stride != 0 and idx != len(bin_values) - 1: - continue - freq_hz = sweep_start + sweep_bin * idx - scanner_current_freq = freq_hz / 1e6 - snr = val - noise_floor - level = int(max(0, snr) * 100) - threshold = int(snr_threshold * 100) - progress = min(1.0, (segment_offset + idx) / max(1, total_bins - 1)) - try: - scanner_queue.put_nowait({ - 'type': 'scan_update', - 'frequency': scanner_current_freq, - 'level': level, - 'threshold': threshold, - 'detected': snr >= snr_threshold, - 'progress': progress, - 'range_start': scanner_config['start_freq'], - 'range_end': scanner_config['end_freq'] - }) - except queue.Full: - pass - segment_offset += len(bin_values) - - # Detect peaks (clusters above threshold) - peaks = [] - in_cluster = False - peak_idx = None - peak_val = None - for idx, val in enumerate(bin_values): - snr = val - noise_floor - if snr >= snr_threshold: - if not in_cluster: - in_cluster = True - peak_idx = idx - peak_val = val - else: - if val > peak_val: - peak_val = val - peak_idx = idx - else: - if in_cluster and peak_idx is not None: - peaks.append((peak_idx, peak_val)) - in_cluster = False - peak_idx = None - peak_val = None - if in_cluster and peak_idx is not None: - peaks.append((peak_idx, peak_val)) - - for idx, val in peaks: - freq_hz = sweep_start + sweep_bin * (idx + 0.5) - freq_mhz = freq_hz / 1e6 - snr = val - noise_floor - level = int(max(0, snr) * 100) - threshold = int(snr_threshold * 100) - add_activity_log('signal_found', freq_mhz, - f'Peak detected at {freq_mhz:.3f} MHz ({mod.upper()})') - try: - scanner_queue.put_nowait({ - 'type': 'signal_found', - 'frequency': freq_mhz, - 'modulation': mod, - 'audio_streaming': False, - 'level': level, - 'threshold': threshold, - 'snr': round(snr, 1), - 'range_start': scanner_config['start_freq'], - 'range_end': scanner_config['end_freq'] - }) - except queue.Full: - pass - - add_activity_log('scan_cycle', start_mhz, 'Power sweep complete') - time.sleep(max(0.1, scanner_config.get('scan_delay', 0.5))) - - except Exception as e: - logger.error(f"Power sweep scanner error: {e}") - finally: - scanner_running = False - add_activity_log('scanner_stop', scanner_current_freq, 'Scanner stopped') - logger.info("Power sweep scanner thread stopped") - - -def _start_audio_stream( - frequency: float, - modulation: str, - *, - device: int | None = None, - sdr_type: str | None = None, - gain: int | None = None, - squelch: int | None = None, - bias_t: bool | None = None, -): - """Start audio streaming at given frequency.""" - global audio_process, audio_rtl_process, audio_running, audio_frequency, audio_modulation - - # Stop existing stream and snapshot config under lock - with audio_lock: - _stop_audio_stream_internal() - - ffmpeg_path = find_ffmpeg() - if not ffmpeg_path: - logger.error("ffmpeg not found") - return - - # Snapshot runtime tuning config so the spawned demod command cannot - # drift if shared scanner_config changes while startup is in-flight. - device_index = int(device if device is not None else scanner_config.get('device', 0)) - gain_value = int(gain if gain is not None else scanner_config.get('gain', 40)) - squelch_value = int(squelch if squelch is not None else scanner_config.get('squelch', 0)) - bias_t_enabled = bool(scanner_config.get('bias_t', False) if bias_t is None else bias_t) - sdr_type_str = str(sdr_type if sdr_type is not None else scanner_config.get('sdr_type', 'rtlsdr')).lower() - - # Build commands outside lock (no blocking I/O, just command construction) - try: - resolved_sdr_type = SDRType(sdr_type_str) - except ValueError: - resolved_sdr_type = SDRType.RTL_SDR - - # Set sample rates based on modulation - if modulation == 'wfm': - sample_rate = 170000 - resample_rate = 32000 - elif modulation in ['usb', 'lsb']: - sample_rate = 12000 - resample_rate = 12000 - else: - sample_rate = 24000 - resample_rate = 24000 - - # Build the SDR command based on device type - if resolved_sdr_type == SDRType.RTL_SDR: - rtl_fm_path = find_rtl_fm() - if not rtl_fm_path: - logger.error("rtl_fm not found") - return - - freq_hz = int(frequency * 1e6) - sdr_cmd = [ - rtl_fm_path, - '-M', _rtl_fm_demod_mode(modulation), - '-f', str(freq_hz), - '-s', str(sample_rate), - '-r', str(resample_rate), - '-g', str(gain_value), - '-d', str(device_index), - '-l', str(squelch_value), - ] - if bias_t_enabled: - sdr_cmd.append('-T') - else: - rx_fm_path = find_rx_fm() - if not rx_fm_path: - logger.error(f"rx_fm not found - required for {resolved_sdr_type.value}. Install SoapySDR utilities.") - return - - sdr_device = SDRFactory.create_default_device(resolved_sdr_type, index=device_index) - builder = SDRFactory.get_builder(resolved_sdr_type) - sdr_cmd = builder.build_fm_demod_command( - device=sdr_device, - frequency_mhz=frequency, - sample_rate=resample_rate, - gain=float(gain_value), - modulation=modulation, - squelch=squelch_value, - bias_t=bias_t_enabled, - ) - sdr_cmd[0] = rx_fm_path - - encoder_cmd = [ - ffmpeg_path, - '-hide_banner', - '-loglevel', 'error', - '-fflags', 'nobuffer', - '-flags', 'low_delay', - '-probesize', '32', - '-analyzeduration', '0', - '-f', 's16le', - '-ar', str(resample_rate), - '-ac', '1', - '-i', 'pipe:0', - '-acodec', 'pcm_s16le', - '-ar', '44100', - '-f', 'wav', - 'pipe:1' - ] - - # Retry loop outside lock — spawning + health check sleeps don't block - # other operations. audio_start_lock already serializes callers. - try: - rtl_stderr_log = '/tmp/rtl_fm_stderr.log' - ffmpeg_stderr_log = '/tmp/ffmpeg_stderr.log' - logger.info(f"Starting audio: {frequency} MHz, mod={modulation}, device={device_index}") - - new_rtl_proc = None - new_audio_proc = None - max_attempts = 3 - for attempt in range(max_attempts): - new_rtl_proc = None - new_audio_proc = None - rtl_err_handle = None - ffmpeg_err_handle = None - try: - rtl_err_handle = open(rtl_stderr_log, 'w') - ffmpeg_err_handle = open(ffmpeg_stderr_log, 'w') - new_rtl_proc = subprocess.Popen( - sdr_cmd, - stdout=subprocess.PIPE, - stderr=rtl_err_handle, - bufsize=0, - start_new_session=True - ) - new_audio_proc = subprocess.Popen( - encoder_cmd, - stdin=new_rtl_proc.stdout, - stdout=subprocess.PIPE, - stderr=ffmpeg_err_handle, - bufsize=0, - start_new_session=True - ) - if new_rtl_proc.stdout: - new_rtl_proc.stdout.close() - finally: - if rtl_err_handle: - rtl_err_handle.close() - if ffmpeg_err_handle: - ffmpeg_err_handle.close() - - # Brief delay to check if process started successfully - time.sleep(0.3) - - if (new_rtl_proc and new_rtl_proc.poll() is not None) or ( - new_audio_proc and new_audio_proc.poll() is not None - ): - rtl_stderr = '' - ffmpeg_stderr = '' - try: - with open(rtl_stderr_log, 'r') as f: - rtl_stderr = f.read().strip() - except Exception: - pass - try: - with open(ffmpeg_stderr_log, 'r') as f: - ffmpeg_stderr = f.read().strip() - except Exception: - pass - - if 'usb_claim_interface' in rtl_stderr and attempt < max_attempts - 1: - logger.warning(f"USB device busy (attempt {attempt + 1}/{max_attempts}), waiting for release...") - if new_audio_proc: - try: - new_audio_proc.terminate() - new_audio_proc.wait(timeout=0.5) - except Exception: - pass - if new_rtl_proc: - try: - new_rtl_proc.terminate() - new_rtl_proc.wait(timeout=0.5) - except Exception: - pass - time.sleep(1.0) - continue - - if new_audio_proc and new_audio_proc.poll() is None: - try: - new_audio_proc.terminate() - new_audio_proc.wait(timeout=0.5) - except Exception: - pass - if new_rtl_proc and new_rtl_proc.poll() is None: - try: - new_rtl_proc.terminate() - new_rtl_proc.wait(timeout=0.5) - except Exception: - pass - new_audio_proc = None - new_rtl_proc = None - - logger.error( - f"Audio pipeline exited immediately. rtl_fm stderr: {rtl_stderr}, ffmpeg stderr: {ffmpeg_stderr}" - ) - return - - # Pipeline started successfully - break - - # Verify pipeline is still alive, then install under lock - if ( - not new_audio_proc - or not new_rtl_proc - or new_audio_proc.poll() is not None - or new_rtl_proc.poll() is not None - ): - logger.warning("Audio pipeline did not remain alive after startup") - # Clean up failed processes - if new_audio_proc: - try: - new_audio_proc.terminate() - new_audio_proc.wait(timeout=0.5) - except Exception: - pass - if new_rtl_proc: - try: - new_rtl_proc.terminate() - new_rtl_proc.wait(timeout=0.5) - except Exception: - pass - return - - # Install processes under lock - with audio_lock: - audio_rtl_process = new_rtl_proc - audio_process = new_audio_proc - audio_running = True - audio_frequency = frequency - audio_modulation = modulation - logger.info(f"Audio stream started: {frequency} MHz ({modulation}) via {resolved_sdr_type.value}") - - except Exception as e: - logger.error(f"Failed to start audio stream: {e}") - - -def _stop_audio_stream(): - """Stop audio streaming.""" - with audio_lock: - _stop_audio_stream_internal() - - -def _stop_audio_stream_internal(): - """Internal stop (must hold lock).""" - global audio_process, audio_rtl_process, audio_running, audio_frequency, audio_source - - # Set flag first to stop any streaming - audio_running = False - audio_frequency = 0.0 - previous_source = audio_source - audio_source = 'process' - - if previous_source == 'waterfall': - try: - from routes.waterfall_websocket import stop_shared_monitor_from_capture - - stop_shared_monitor_from_capture() - except Exception: - pass - - had_processes = audio_process is not None or audio_rtl_process is not None - - # Kill the pipeline processes and their groups - if audio_process: - try: - # Kill entire process group (SDR demod + ffmpeg) - try: - os.killpg(os.getpgid(audio_process.pid), signal.SIGKILL) - except (ProcessLookupError, PermissionError): - audio_process.kill() - audio_process.wait(timeout=0.5) - except Exception: - pass - - if audio_rtl_process: - try: - try: - os.killpg(os.getpgid(audio_rtl_process.pid), signal.SIGKILL) - except (ProcessLookupError, PermissionError): - audio_rtl_process.kill() - audio_rtl_process.wait(timeout=0.5) - except Exception: - pass - - audio_process = None - audio_rtl_process = None - - # Brief pause for SDR device USB interface to be released by kernel. - # The _start_audio_stream retry loop handles longer contention windows - # so only a minimal delay is needed here. - if had_processes: - time.sleep(0.15) - - -# ============================================ -# API ENDPOINTS -# ============================================ - -@receiver_bp.route('/tools') -def check_tools() -> Response: - """Check for required tools.""" - rtl_fm = find_rtl_fm() - rtl_power = find_rtl_power() - rx_fm = find_rx_fm() - ffmpeg = find_ffmpeg() - - # Determine which SDR types are supported - supported_sdr_types = [] - if rtl_fm: - supported_sdr_types.append('rtlsdr') - if rx_fm: - # rx_fm from SoapySDR supports these types - supported_sdr_types.extend(['hackrf', 'airspy', 'limesdr', 'sdrplay']) - - return jsonify({ - 'rtl_fm': rtl_fm is not None, - 'rtl_power': rtl_power is not None, - 'rx_fm': rx_fm is not None, - 'ffmpeg': ffmpeg is not None, - 'available': (rtl_fm is not None or rx_fm is not None) and ffmpeg is not None, - 'supported_sdr_types': supported_sdr_types - }) - - -@receiver_bp.route('/scanner/start', methods=['POST']) -def start_scanner() -> Response: - """Start the frequency scanner.""" - global scanner_thread, scanner_running, scanner_config, scanner_active_device, scanner_active_sdr_type, receiver_active_device, receiver_active_sdr_type - - with scanner_lock: - if scanner_running: - return jsonify({ - 'status': 'error', - 'message': 'Scanner already running' - }), 409 - - # Clear stale queue entries so UI updates immediately - try: - while True: - scanner_queue.get_nowait() - except queue.Empty: - pass - - data = request.json or {} - - # Update scanner config - try: - scanner_config['start_freq'] = float(data.get('start_freq', 88.0)) - scanner_config['end_freq'] = float(data.get('end_freq', 108.0)) - scanner_config['step'] = float(data.get('step', 0.1)) - scanner_config['modulation'] = normalize_modulation(data.get('modulation', 'wfm')) - scanner_config['squelch'] = int(data.get('squelch', 0)) - scanner_config['dwell_time'] = float(data.get('dwell_time', 3.0)) - scanner_config['scan_delay'] = float(data.get('scan_delay', 0.5)) - scanner_config['device'] = int(data.get('device', 0)) - scanner_config['gain'] = int(data.get('gain', 40)) - scanner_config['bias_t'] = bool(data.get('bias_t', False)) - scanner_config['sdr_type'] = str(data.get('sdr_type', 'rtlsdr')).lower() - scanner_config['scan_method'] = str(data.get('scan_method', '')).lower().strip() - if data.get('snr_threshold') is not None: - scanner_config['snr_threshold'] = float(data.get('snr_threshold')) - except (ValueError, TypeError) as e: - return jsonify({ - 'status': 'error', - 'message': f'Invalid parameter: {e}' - }), 400 - - # Validate - if scanner_config['start_freq'] >= scanner_config['end_freq']: - return jsonify({ - 'status': 'error', - 'message': 'start_freq must be less than end_freq' - }), 400 - - # Decide scan method - if not scanner_config['scan_method']: - scanner_config['scan_method'] = 'power' if find_rtl_power() else 'classic' - - sdr_type = scanner_config['sdr_type'] - - # Power scan only supports RTL-SDR for now - if scanner_config['scan_method'] == 'power': - if sdr_type != 'rtlsdr' or not find_rtl_power(): - scanner_config['scan_method'] = 'classic' - - # Check tools based on chosen method - if scanner_config['scan_method'] == 'power': - if not find_rtl_power(): - return jsonify({ - 'status': 'error', - 'message': 'rtl_power not found. Install rtl-sdr tools.' - }), 503 - # Release listening device if active - if receiver_active_device is not None: - app_module.release_sdr_device(receiver_active_device, receiver_active_sdr_type) - receiver_active_device = None - receiver_active_sdr_type = 'rtlsdr' - # Claim device for scanner - error = app_module.claim_sdr_device(scanner_config['device'], 'scanner', scanner_config['sdr_type']) - if error: - return jsonify({ - 'status': 'error', - 'error_type': 'DEVICE_BUSY', - 'message': error - }), 409 - scanner_active_device = scanner_config['device'] - scanner_active_sdr_type = scanner_config['sdr_type'] - scanner_running = True - scanner_thread = threading.Thread(target=scanner_loop_power, daemon=True) - scanner_thread.start() - else: - if sdr_type == 'rtlsdr': - if not find_rtl_fm(): - return jsonify({ - 'status': 'error', - 'message': 'rtl_fm not found. Install rtl-sdr tools.' - }), 503 - else: - if not find_rx_fm(): - return jsonify({ - 'status': 'error', - 'message': f'rx_fm not found. Install SoapySDR utilities for {sdr_type}.' - }), 503 - if receiver_active_device is not None: - app_module.release_sdr_device(receiver_active_device, receiver_active_sdr_type) - receiver_active_device = None - receiver_active_sdr_type = 'rtlsdr' - error = app_module.claim_sdr_device(scanner_config['device'], 'scanner', scanner_config['sdr_type']) - if error: - return jsonify({ - 'status': 'error', - 'error_type': 'DEVICE_BUSY', - 'message': error - }), 409 - scanner_active_device = scanner_config['device'] - scanner_active_sdr_type = scanner_config['sdr_type'] - - scanner_running = True - scanner_thread = threading.Thread(target=scanner_loop, daemon=True) - scanner_thread.start() - - return jsonify({ - 'status': 'started', - 'config': scanner_config - }) - - -@receiver_bp.route('/scanner/stop', methods=['POST']) -def stop_scanner() -> Response: - """Stop the frequency scanner.""" - global scanner_running, scanner_active_device, scanner_active_sdr_type, scanner_power_process - - scanner_running = False - _stop_audio_stream() - if scanner_power_process and scanner_power_process.poll() is None: - try: - scanner_power_process.terminate() - scanner_power_process.wait(timeout=1) - except Exception: - try: - scanner_power_process.kill() - except Exception: - pass - scanner_power_process = None - if scanner_active_device is not None: - app_module.release_sdr_device(scanner_active_device, scanner_active_sdr_type) - scanner_active_device = None - scanner_active_sdr_type = 'rtlsdr' - - return jsonify({'status': 'stopped'}) - - -@receiver_bp.route('/scanner/pause', methods=['POST']) -def pause_scanner() -> Response: - """Pause/resume the scanner.""" - global scanner_paused - - scanner_paused = not scanner_paused - - if scanner_paused: - add_activity_log('scanner_pause', scanner_current_freq, 'Scanner paused') - else: - add_activity_log('scanner_resume', scanner_current_freq, 'Scanner resumed') - - return jsonify({ - 'status': 'paused' if scanner_paused else 'resumed', - 'paused': scanner_paused - }) - - -# Flag to trigger skip from API -scanner_skip_signal = False - - -@receiver_bp.route('/scanner/skip', methods=['POST']) -def skip_signal() -> Response: - """Skip current signal and continue scanning.""" - global scanner_skip_signal - - if not scanner_running: - return jsonify({ - 'status': 'error', - 'message': 'Scanner not running' - }), 400 - - scanner_skip_signal = True - add_activity_log('signal_skip', scanner_current_freq, f'Skipped signal at {scanner_current_freq:.3f} MHz') - - return jsonify({ - 'status': 'skipped', - 'frequency': scanner_current_freq - }) - - -@receiver_bp.route('/scanner/config', methods=['POST']) -def update_scanner_config() -> Response: - """Update scanner config while running (step, squelch, gain, dwell).""" - data = request.json or {} - - updated = [] - - if 'step' in data: - scanner_config['step'] = float(data['step']) - updated.append(f"step={data['step']}kHz") - - if 'squelch' in data: - scanner_config['squelch'] = int(data['squelch']) - updated.append(f"squelch={data['squelch']}") - - if 'gain' in data: - scanner_config['gain'] = int(data['gain']) - updated.append(f"gain={data['gain']}") - - if 'dwell_time' in data: - scanner_config['dwell_time'] = int(data['dwell_time']) - updated.append(f"dwell={data['dwell_time']}s") - - if 'modulation' in data: - try: - scanner_config['modulation'] = normalize_modulation(data['modulation']) - updated.append(f"mod={data['modulation']}") - except (ValueError, TypeError) as e: - return jsonify({ - 'status': 'error', - 'message': str(e) - }), 400 - - if updated: - logger.info(f"Scanner config updated: {', '.join(updated)}") - - return jsonify({ - 'status': 'updated', - 'config': scanner_config - }) - - -@receiver_bp.route('/scanner/status') -def scanner_status() -> Response: - """Get scanner status.""" - return jsonify({ - 'running': scanner_running, - 'paused': scanner_paused, - 'current_freq': scanner_current_freq, - 'config': scanner_config, - 'audio_streaming': audio_running, - 'audio_frequency': audio_frequency - }) - - -@receiver_bp.route('/scanner/stream') -def stream_scanner_events() -> Response: - """SSE stream for scanner events.""" - def _on_msg(msg: dict[str, Any]) -> None: - process_event('receiver_scanner', msg, msg.get('type')) - - response = Response( - sse_stream_fanout( - source_queue=scanner_queue, - channel_key='receiver_scanner', - timeout=SSE_QUEUE_TIMEOUT, - keepalive_interval=SSE_KEEPALIVE_INTERVAL, - on_message=_on_msg, - ), - mimetype='text/event-stream', - ) - response.headers['Cache-Control'] = 'no-cache' - response.headers['X-Accel-Buffering'] = 'no' - return response - - -@receiver_bp.route('/scanner/log') -def get_activity_log() -> Response: - """Get activity log.""" - limit = request.args.get('limit', 100, type=int) - with activity_log_lock: - return jsonify({ - 'log': activity_log[:limit], - 'total': len(activity_log) - }) - - -@receiver_bp.route('/scanner/log/clear', methods=['POST']) -def clear_activity_log() -> Response: - """Clear activity log.""" - with activity_log_lock: - activity_log.clear() - return jsonify({'status': 'cleared'}) - - -@receiver_bp.route('/presets') -def get_presets() -> Response: - """Get scanner presets.""" - presets = [ - {'name': 'FM Broadcast', 'start': 88.0, 'end': 108.0, 'step': 0.2, 'mod': 'wfm'}, - {'name': 'Air Band', 'start': 118.0, 'end': 137.0, 'step': 0.025, 'mod': 'am'}, - {'name': 'Marine VHF', 'start': 156.0, 'end': 163.0, 'step': 0.025, 'mod': 'fm'}, - {'name': 'Amateur 2m', 'start': 144.0, 'end': 148.0, 'step': 0.0125, 'mod': 'fm'}, - {'name': 'Amateur 70cm', 'start': 430.0, 'end': 440.0, 'step': 0.025, 'mod': 'fm'}, - {'name': 'PMR446', 'start': 446.0, 'end': 446.2, 'step': 0.0125, 'mod': 'fm'}, - {'name': 'FRS/GMRS', 'start': 462.5, 'end': 467.7, 'step': 0.025, 'mod': 'fm'}, - {'name': 'Weather Radio', 'start': 162.4, 'end': 162.55, 'step': 0.025, 'mod': 'fm'}, - ] - return jsonify({'presets': presets}) - - -# ============================================ -# MANUAL AUDIO ENDPOINTS (for direct listening) -# ============================================ - -@receiver_bp.route('/audio/start', methods=['POST']) -def start_audio() -> Response: - """Start audio at specific frequency (manual mode).""" - global scanner_running, scanner_active_device, scanner_active_sdr_type, receiver_active_device, receiver_active_sdr_type, scanner_power_process, scanner_thread - global audio_running, audio_frequency, audio_modulation, audio_source, audio_start_token - - data = request.json or {} - - try: - frequency = float(data.get('frequency', 0)) - modulation = normalize_modulation(data.get('modulation', 'wfm')) - squelch = int(data['squelch']) if data.get('squelch') is not None else 0 - gain = int(data['gain']) if data.get('gain') is not None else 40 - device = int(data['device']) if data.get('device') is not None else 0 - sdr_type = str(data.get('sdr_type', 'rtlsdr')).lower() - request_token_raw = data.get('request_token') - request_token = int(request_token_raw) if request_token_raw is not None else None - bias_t_raw = data.get('bias_t', scanner_config.get('bias_t', False)) - if isinstance(bias_t_raw, str): - bias_t = bias_t_raw.strip().lower() in {'1', 'true', 'yes', 'on'} - else: - bias_t = bool(bias_t_raw) - except (ValueError, TypeError) as e: - return jsonify({ - 'status': 'error', - 'message': f'Invalid parameter: {e}' - }), 400 - - if frequency <= 0: - return jsonify({ - 'status': 'error', - 'message': 'frequency is required' - }), 400 - - valid_sdr_types = ['rtlsdr', 'hackrf', 'airspy', 'limesdr', 'sdrplay'] - if sdr_type not in valid_sdr_types: - return jsonify({ - 'status': 'error', - 'message': f'Invalid sdr_type. Use: {", ".join(valid_sdr_types)}' - }), 400 - - with audio_start_lock: - if request_token is not None: - if request_token < audio_start_token: - return jsonify({ - 'status': 'stale', - 'message': 'Superseded audio start request', - 'source': audio_source, - 'superseded': True, - 'current_token': audio_start_token, - }), 409 - audio_start_token = request_token - else: - audio_start_token += 1 - request_token = audio_start_token - - # Grab scanner refs inside lock, signal stop, clear state - need_scanner_teardown = False - scanner_thread_ref = None - scanner_proc_ref = None - if scanner_running: - scanner_running = False - if scanner_active_device is not None: - app_module.release_sdr_device(scanner_active_device, scanner_active_sdr_type) - scanner_active_device = None - scanner_active_sdr_type = 'rtlsdr' - scanner_thread_ref = scanner_thread - scanner_proc_ref = scanner_power_process - scanner_power_process = None - need_scanner_teardown = True - - # Update config for audio - scanner_config['squelch'] = squelch - scanner_config['gain'] = gain - scanner_config['device'] = device - scanner_config['sdr_type'] = sdr_type - scanner_config['bias_t'] = bias_t - - # Scanner teardown outside lock (blocking: thread join, process wait, pkill, sleep) - if need_scanner_teardown: - if scanner_thread_ref and scanner_thread_ref.is_alive(): - try: - scanner_thread_ref.join(timeout=2.0) - except Exception: - pass - if scanner_proc_ref and scanner_proc_ref.poll() is None: - try: - scanner_proc_ref.terminate() - scanner_proc_ref.wait(timeout=1) - except Exception: - try: - scanner_proc_ref.kill() - except Exception: - pass - try: - subprocess.run(['pkill', '-9', 'rtl_power'], capture_output=True, timeout=0.5) - except Exception: - pass - time.sleep(0.5) - - # Re-acquire lock for waterfall check and device claim - with audio_start_lock: - - # Preferred path: when waterfall WebSocket is active on the same SDR, - # derive monitor audio from that IQ stream instead of spawning rtl_fm. - try: - from routes.waterfall_websocket import ( - get_shared_capture_status, - start_shared_monitor_from_capture, - ) - - shared = get_shared_capture_status() - if shared.get('running') and shared.get('device') == device: - _stop_audio_stream() - ok, msg = start_shared_monitor_from_capture( - device=device, - frequency_mhz=frequency, - modulation=modulation, - squelch=squelch, - ) - if ok: - audio_running = True - audio_frequency = frequency - audio_modulation = modulation - audio_source = 'waterfall' - # Shared monitor uses the waterfall's existing SDR claim. - if receiver_active_device is not None: - app_module.release_sdr_device(receiver_active_device, receiver_active_sdr_type) - receiver_active_device = None - receiver_active_sdr_type = 'rtlsdr' - return jsonify({ - 'status': 'started', - 'frequency': frequency, - 'modulation': modulation, - 'source': 'waterfall', - 'request_token': request_token, - }) - logger.warning(f"Shared waterfall monitor unavailable: {msg}") - except Exception as e: - logger.debug(f"Shared waterfall monitor probe failed: {e}") - - # Stop waterfall if it's using the same SDR (SSE path) - if waterfall_running and waterfall_active_device == device: - _stop_waterfall_internal() - time.sleep(0.2) - - # Claim device for listening audio. The WebSocket waterfall handler - # may still be tearing down its IQ capture process (thread join + - # safe_terminate can take several seconds), so we retry with back-off - # to give the USB device time to be fully released. - if receiver_active_device is None or receiver_active_device != device: - if receiver_active_device is not None: - app_module.release_sdr_device(receiver_active_device, receiver_active_sdr_type) - receiver_active_device = None - receiver_active_sdr_type = 'rtlsdr' - - error = None - max_claim_attempts = 6 - for attempt in range(max_claim_attempts): - error = app_module.claim_sdr_device(device, 'receiver', sdr_type) - if not error: - break - if attempt < max_claim_attempts - 1: - logger.debug( - f"Device claim attempt {attempt + 1}/{max_claim_attempts} " - f"failed, retrying in 0.5s: {error}" - ) - time.sleep(0.5) - - if error: - return jsonify({ - 'status': 'error', - 'error_type': 'DEVICE_BUSY', - 'message': error - }), 409 - receiver_active_device = device - receiver_active_sdr_type = sdr_type - - _start_audio_stream( - frequency, - modulation, - device=device, - sdr_type=sdr_type, - gain=gain, - squelch=squelch, - bias_t=bias_t, - ) - - if audio_running: - audio_source = 'process' - return jsonify({ - 'status': 'started', - 'frequency': audio_frequency, - 'modulation': audio_modulation, - 'source': 'process', - 'request_token': request_token, - }) - - # Avoid leaving a stale device claim after startup failure. - if receiver_active_device is not None: - app_module.release_sdr_device(receiver_active_device, receiver_active_sdr_type) - receiver_active_device = None - receiver_active_sdr_type = 'rtlsdr' - - start_error = '' - for log_path in ('/tmp/rtl_fm_stderr.log', '/tmp/ffmpeg_stderr.log'): - try: - with open(log_path, 'r') as handle: - content = handle.read().strip() - if content: - start_error = content.splitlines()[-1] - break - except Exception: - continue - - message = 'Failed to start audio. Check SDR device.' - if start_error: - message = f'Failed to start audio: {start_error}' - return jsonify({ - 'status': 'error', - 'message': message - }), 500 - - -@receiver_bp.route('/audio/stop', methods=['POST']) -def stop_audio() -> Response: - """Stop audio.""" - global receiver_active_device, receiver_active_sdr_type - _stop_audio_stream() - if receiver_active_device is not None: - app_module.release_sdr_device(receiver_active_device, receiver_active_sdr_type) - receiver_active_device = None - receiver_active_sdr_type = 'rtlsdr' - return jsonify({'status': 'stopped'}) - - -@receiver_bp.route('/audio/status') -def audio_status() -> Response: - """Get audio status.""" - running = audio_running - if audio_source == 'waterfall': - try: - from routes.waterfall_websocket import get_shared_capture_status - - shared = get_shared_capture_status() - running = bool(shared.get('running') and shared.get('monitor_enabled')) - except Exception: - running = False - - return jsonify({ - 'running': running, - 'frequency': audio_frequency, - 'modulation': audio_modulation, - 'source': audio_source, - }) - - -@receiver_bp.route('/audio/debug') -def audio_debug() -> Response: - """Get audio debug status and recent stderr logs.""" - rtl_log_path = '/tmp/rtl_fm_stderr.log' - ffmpeg_log_path = '/tmp/ffmpeg_stderr.log' - sample_path = '/tmp/audio_probe.bin' - - def _read_log(path: str) -> str: - try: - with open(path, 'r') as handle: - return handle.read().strip() - except Exception: - return '' - - shared = {} - if audio_source == 'waterfall': - try: - from routes.waterfall_websocket import get_shared_capture_status - - shared = get_shared_capture_status() - except Exception: - shared = {} - - return jsonify({ - 'running': audio_running, - 'frequency': audio_frequency, - 'modulation': audio_modulation, - 'source': audio_source, - 'sdr_type': scanner_config.get('sdr_type', 'rtlsdr'), - 'device': scanner_config.get('device', 0), - 'gain': scanner_config.get('gain', 0), - 'squelch': scanner_config.get('squelch', 0), - 'audio_process_alive': bool(audio_process and audio_process.poll() is None), - 'shared_capture': shared, - 'rtl_fm_stderr': _read_log(rtl_log_path), - 'ffmpeg_stderr': _read_log(ffmpeg_log_path), - 'audio_probe_bytes': os.path.getsize(sample_path) if os.path.exists(sample_path) else 0, - }) - - -@receiver_bp.route('/audio/probe') -def audio_probe() -> Response: - """Grab a small chunk of audio bytes from the pipeline for debugging.""" - global audio_process - - if audio_source == 'waterfall': - try: - from routes.waterfall_websocket import read_shared_monitor_audio_chunk - - data = read_shared_monitor_audio_chunk(timeout=2.0) - if not data: - return jsonify({'status': 'error', 'message': 'no shared audio data available'}), 504 - sample_path = '/tmp/audio_probe.bin' - with open(sample_path, 'wb') as handle: - handle.write(data) - return jsonify({'status': 'ok', 'bytes': len(data), 'source': 'waterfall'}) - except Exception as e: - return jsonify({'status': 'error', 'message': str(e)}), 500 - - if not audio_process or not audio_process.stdout: - return jsonify({'status': 'error', 'message': 'audio process not running'}), 400 - - sample_path = '/tmp/audio_probe.bin' - size = 0 - try: - ready, _, _ = select.select([audio_process.stdout], [], [], 2.0) - if not ready: - return jsonify({'status': 'error', 'message': 'no data available'}), 504 - data = audio_process.stdout.read(4096) - if not data: - return jsonify({'status': 'error', 'message': 'no data read'}), 504 - with open(sample_path, 'wb') as handle: - handle.write(data) - size = len(data) - except Exception as e: - return jsonify({'status': 'error', 'message': str(e)}), 500 - - return jsonify({'status': 'ok', 'bytes': size}) - - -@receiver_bp.route('/audio/stream') -def stream_audio() -> Response: - """Stream WAV audio.""" - request_token_raw = request.args.get('request_token') - request_token = None - if request_token_raw is not None: - try: - request_token = int(request_token_raw) - except (ValueError, TypeError): - request_token = None - - if request_token is not None and request_token < audio_start_token: - return Response(b'', mimetype='audio/wav', status=204) - - if audio_source == 'waterfall': - for _ in range(40): - if audio_running: - break - time.sleep(0.05) - - if not audio_running: - return Response(b'', mimetype='audio/wav', status=204) - - def generate_shared(): - global audio_running, audio_source - try: - from routes.waterfall_websocket import ( - get_shared_capture_status, - read_shared_monitor_audio_chunk, - ) - except Exception: - return - - # Browser expects an immediate WAV header. - yield _wav_header(sample_rate=48000) - inactive_since: float | None = None - - while audio_running and audio_source == 'waterfall': - if request_token is not None and request_token < audio_start_token: - break - chunk = read_shared_monitor_audio_chunk(timeout=1.0) - if chunk: - inactive_since = None - yield chunk - continue - shared = get_shared_capture_status() - if shared.get('running') and shared.get('monitor_enabled'): - inactive_since = None - continue - if inactive_since is None: - inactive_since = time.monotonic() - continue - if (time.monotonic() - inactive_since) < 4.0: - continue - if not shared.get('running') or not shared.get('monitor_enabled'): - audio_running = False - audio_source = 'process' - break - - return Response( - generate_shared(), - mimetype='audio/wav', - headers={ - 'Content-Type': 'audio/wav', - 'Cache-Control': 'no-cache, no-store', - 'X-Accel-Buffering': 'no', - 'Transfer-Encoding': 'chunked', - } - ) - - # Wait for audio process to be ready (up to 2 seconds). - for _ in range(40): - if audio_running and audio_process: - break - time.sleep(0.05) - - if not audio_running or not audio_process: - return Response(b'', mimetype='audio/wav', status=204) - - def generate(): - # Capture local reference to avoid race condition with stop - proc = audio_process - if not proc or not proc.stdout: - return - try: - # Drain stale audio that accumulated in the pipe buffer - # between pipeline start and stream connection. Keep the - # first chunk (contains WAV header) and discard the rest - # so the browser starts close to real-time. - header_chunk = None - while True: - ready, _, _ = select.select([proc.stdout], [], [], 0) - if not ready: - break - chunk = proc.stdout.read(8192) - if not chunk: - break - if header_chunk is None: - header_chunk = chunk - if header_chunk: - yield header_chunk - - # Stream real-time audio - first_chunk_deadline = time.time() + 20.0 - warned_wait = False - while audio_running and proc.poll() is None: - if request_token is not None and request_token < audio_start_token: - break - # Use select to avoid blocking forever - ready, _, _ = select.select([proc.stdout], [], [], 2.0) - if ready: - chunk = proc.stdout.read(8192) - if chunk: - warned_wait = False - yield chunk - else: - break - else: - # Keep connection open while demodulator settles. - if time.time() > first_chunk_deadline: - if not warned_wait: - logger.warning("Audio stream still waiting for first chunk") - warned_wait = True - continue - # Timeout - check if process died - if proc.poll() is not None: - break - except GeneratorExit: - pass - except Exception as e: - logger.error(f"Audio stream error: {e}") - - return Response( - generate(), - mimetype='audio/wav', - headers={ - 'Content-Type': 'audio/wav', - 'Cache-Control': 'no-cache, no-store', - 'X-Accel-Buffering': 'no', - 'Transfer-Encoding': 'chunked', - } - ) - - -# ============================================ -# SIGNAL IDENTIFICATION ENDPOINT -# ============================================ - -@receiver_bp.route('/signal/guess', methods=['POST']) -def guess_signal() -> Response: - """Identify a signal based on frequency, modulation, and other parameters.""" - data = request.json or {} - - freq_mhz = data.get('frequency_mhz') - if freq_mhz is None: - return jsonify({'status': 'error', 'message': 'frequency_mhz is required'}), 400 - - try: - freq_mhz = float(freq_mhz) - except (ValueError, TypeError): - return jsonify({'status': 'error', 'message': 'Invalid frequency_mhz'}), 400 - - if freq_mhz <= 0: - return jsonify({'status': 'error', 'message': 'frequency_mhz must be positive'}), 400 - - frequency_hz = int(freq_mhz * 1e6) - - modulation = data.get('modulation') - bandwidth_hz = data.get('bandwidth_hz') - if bandwidth_hz is not None: - try: - bandwidth_hz = int(bandwidth_hz) - except (ValueError, TypeError): - bandwidth_hz = None - - region = data.get('region', 'UK/EU') - - try: - from utils.signal_guess import guess_signal_type_dict - result = guess_signal_type_dict( - frequency_hz=frequency_hz, - modulation=modulation, - bandwidth_hz=bandwidth_hz, - region=region, - ) - return jsonify({'status': 'ok', **result}) - except Exception as e: - logger.error(f"Signal guess error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 - - -# ============================================ -# WATERFALL / SPECTROGRAM ENDPOINTS -# ============================================ - -waterfall_process: Optional[subprocess.Popen] = None -waterfall_thread: Optional[threading.Thread] = None -waterfall_running = False -waterfall_lock = threading.Lock() -waterfall_queue: queue.Queue = queue.Queue(maxsize=200) -waterfall_active_device: Optional[int] = None -waterfall_active_sdr_type: str = 'rtlsdr' -waterfall_config = { - 'start_freq': 88.0, - 'end_freq': 108.0, - 'bin_size': 10000, - 'gain': 40, - 'device': 0, - 'max_bins': 1024, - 'interval': 0.4, -} - - -def _parse_rtl_power_line(line: str) -> tuple[str | None, float | None, float | None, list[float]]: - """Parse a single rtl_power CSV line into bins.""" - if not line or line.startswith('#'): - return None, None, None, [] - - parts = [p.strip() for p in line.split(',')] - if len(parts) < 6: - return None, None, None, [] - - # Timestamp in first two fields (YYYY-MM-DD, HH:MM:SS) - timestamp = f"{parts[0]} {parts[1]}" if len(parts) >= 2 else parts[0] - - start_idx = None - for i, tok in enumerate(parts): - try: - val = float(tok) - except ValueError: - continue - if val > 1e5: - start_idx = i - break - if start_idx is None or len(parts) < start_idx + 4: - return timestamp, None, None, [] - - try: - seg_start = float(parts[start_idx]) - seg_end = float(parts[start_idx + 1]) - raw_values = [] - for v in parts[start_idx + 3:]: - try: - raw_values.append(float(v)) - except ValueError: - continue - if raw_values and raw_values[0] >= 0 and any(val < 0 for val in raw_values[1:]): - raw_values = raw_values[1:] - return timestamp, seg_start, seg_end, raw_values - except ValueError: - return timestamp, None, None, [] - - -def _queue_waterfall_error(message: str) -> None: - """Push an error message onto the waterfall SSE queue.""" - try: - waterfall_queue.put_nowait({ - 'type': 'waterfall_error', - 'message': message, - 'timestamp': datetime.now().isoformat(), - }) - except queue.Full: - pass - - -def _waterfall_loop(): - """Continuous waterfall sweep loop emitting FFT data.""" - global waterfall_running, waterfall_process - - sdr_type_str = waterfall_config.get('sdr_type', 'rtlsdr') - try: - sdr_type = SDRType(sdr_type_str) - except ValueError: - sdr_type = SDRType.RTL_SDR - - if sdr_type == SDRType.RTL_SDR: - _waterfall_loop_rtl_power() - else: - _waterfall_loop_iq(sdr_type) - - -def _waterfall_loop_iq(sdr_type: SDRType): - """Waterfall loop using rx_sdr IQ capture + FFT for HackRF/SoapySDR devices.""" - global waterfall_running, waterfall_process - - start_freq = waterfall_config['start_freq'] - end_freq = waterfall_config['end_freq'] - gain = waterfall_config['gain'] - device = waterfall_config['device'] - interval = float(waterfall_config.get('interval', 0.4)) - - # Use center frequency and sample rate to cover the requested span - center_mhz = (start_freq + end_freq) / 2.0 - span_hz = (end_freq - start_freq) * 1e6 - # Pick a sample rate that covers the span (minimum 2 MHz for HackRF) - sample_rate = max(2000000, int(span_hz)) - # Cap to sensible maximum - sample_rate = min(sample_rate, 20000000) - - sdr_device = SDRFactory.create_default_device(sdr_type, index=device) - builder = SDRFactory.get_builder(sdr_type) - - cmd = builder.build_iq_capture_command( - device=sdr_device, - frequency_mhz=center_mhz, - sample_rate=sample_rate, - gain=float(gain), - ) - - fft_size = min(int(waterfall_config.get('max_bins') or 1024), 4096) - - try: - waterfall_process = subprocess.Popen( - cmd, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - - # Detect immediate startup failures - time.sleep(0.35) - if waterfall_process.poll() is not None: - stderr_text = '' - try: - if waterfall_process.stderr: - stderr_text = waterfall_process.stderr.read().decode('utf-8', errors='ignore').strip() - except Exception: - stderr_text = '' - msg = stderr_text or f'IQ capture exited early (code {waterfall_process.returncode})' - logger.error(f"Waterfall startup failed: {msg}") - _queue_waterfall_error(msg) - return - - if not waterfall_process.stdout: - _queue_waterfall_error('IQ capture stdout unavailable') - return - - # Read IQ samples and compute FFT - # CU8 format: interleaved unsigned 8-bit I/Q pairs - bytes_per_sample = 2 # 1 byte I + 1 byte Q - chunk_bytes = fft_size * bytes_per_sample - received_any = False - - while waterfall_running: - raw = waterfall_process.stdout.read(chunk_bytes) - if not raw or len(raw) < chunk_bytes: - if waterfall_process.poll() is not None: - break - continue - - received_any = True - - # Convert CU8 to complex float: center at 127.5 - iq = struct.unpack(f'{fft_size * 2}B', raw) - # Compute power spectrum via FFT - real_parts = [(iq[i * 2] - 127.5) / 127.5 for i in range(fft_size)] - imag_parts = [(iq[i * 2 + 1] - 127.5) / 127.5 for i in range(fft_size)] - - bins: list[float] = [] - try: - # Try numpy if available for efficient FFT - import numpy as np - samples = np.array(real_parts, dtype=np.float32) + 1j * np.array(imag_parts, dtype=np.float32) - # Apply Hann window - window = np.hanning(fft_size) - samples *= window - spectrum = np.fft.fftshift(np.fft.fft(samples)) - power_db = 10.0 * np.log10(np.abs(spectrum) ** 2 + 1e-10) - bins = power_db.tolist() - except ImportError: - # Fallback: compute magnitude without full FFT - # Just report raw magnitudes per sample as approximate power - for i in range(fft_size): - mag = math.sqrt(real_parts[i] ** 2 + imag_parts[i] ** 2) - power = 10.0 * math.log10(mag ** 2 + 1e-10) - bins.append(power) - - max_bins = int(waterfall_config.get('max_bins') or 0) - if max_bins > 0 and len(bins) > max_bins: - bins = _downsample_bins(bins, max_bins) - - msg = { - 'type': 'waterfall_sweep', - 'start_freq': start_freq, - 'end_freq': end_freq, - 'bins': bins, - 'timestamp': datetime.now().isoformat(), - } - try: - waterfall_queue.put_nowait(msg) - except queue.Full: - try: - waterfall_queue.get_nowait() - except queue.Empty: - pass - try: - waterfall_queue.put_nowait(msg) - except queue.Full: - pass - - # Throttle to respect interval - time.sleep(interval) - - if waterfall_running and not received_any: - _queue_waterfall_error(f'No IQ data received from {sdr_type.value}') - - except Exception as e: - logger.error(f"Waterfall IQ loop error: {e}") - _queue_waterfall_error(f"Waterfall loop error: {e}") - finally: - waterfall_running = False - if waterfall_process and waterfall_process.poll() is None: - try: - waterfall_process.terminate() - waterfall_process.wait(timeout=1) - except Exception: - try: - waterfall_process.kill() - except Exception: - pass - waterfall_process = None - logger.info("Waterfall IQ loop stopped") - - -def _waterfall_loop_rtl_power(): - """Continuous rtl_power sweep loop emitting waterfall data.""" - global waterfall_running, waterfall_process - - rtl_power_path = find_rtl_power() - if not rtl_power_path: - logger.error("rtl_power not found for waterfall") - _queue_waterfall_error('rtl_power not found') - waterfall_running = False - return - - start_hz = int(waterfall_config['start_freq'] * 1e6) - end_hz = int(waterfall_config['end_freq'] * 1e6) - bin_hz = int(waterfall_config['bin_size']) - gain = waterfall_config['gain'] - device = waterfall_config['device'] - interval = float(waterfall_config.get('interval', 0.4)) - - cmd = [ - rtl_power_path, - '-f', f'{start_hz}:{end_hz}:{bin_hz}', - '-i', str(interval), - '-g', str(gain), - '-d', str(device), - ] - - try: - waterfall_process = subprocess.Popen( - cmd, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - bufsize=1, - text=True, - ) - - # Detect immediate startup failures (e.g. device busy / no device). - time.sleep(0.35) - if waterfall_process.poll() is not None: - stderr_text = '' - try: - if waterfall_process.stderr: - stderr_text = waterfall_process.stderr.read().strip() - except Exception: - stderr_text = '' - msg = stderr_text or f'rtl_power exited early (code {waterfall_process.returncode})' - logger.error(f"Waterfall startup failed: {msg}") - _queue_waterfall_error(msg) - return - - current_ts = None - all_bins: list[float] = [] - sweep_start_hz = start_hz - sweep_end_hz = end_hz - received_any = False - - if not waterfall_process.stdout: - _queue_waterfall_error('rtl_power stdout unavailable') - return - - for line in waterfall_process.stdout: - if not waterfall_running: - break - - ts, seg_start, seg_end, bins = _parse_rtl_power_line(line) - if ts is None or not bins: - continue - received_any = True - - if current_ts is None: - current_ts = ts - - if ts != current_ts and all_bins: - max_bins = int(waterfall_config.get('max_bins') or 0) - bins_to_send = all_bins - if max_bins > 0 and len(bins_to_send) > max_bins: - bins_to_send = _downsample_bins(bins_to_send, max_bins) - msg = { - 'type': 'waterfall_sweep', - 'start_freq': sweep_start_hz / 1e6, - 'end_freq': sweep_end_hz / 1e6, - 'bins': bins_to_send, - 'timestamp': datetime.now().isoformat(), - } - try: - waterfall_queue.put_nowait(msg) - except queue.Full: - try: - waterfall_queue.get_nowait() - except queue.Empty: - pass - try: - waterfall_queue.put_nowait(msg) - except queue.Full: - pass - - all_bins = [] - sweep_start_hz = start_hz - sweep_end_hz = end_hz - current_ts = ts - - all_bins.extend(bins) - if seg_start is not None: - sweep_start_hz = min(sweep_start_hz, seg_start) - if seg_end is not None: - sweep_end_hz = max(sweep_end_hz, seg_end) - - # Flush any remaining bins - if all_bins and waterfall_running: - max_bins = int(waterfall_config.get('max_bins') or 0) - bins_to_send = all_bins - if max_bins > 0 and len(bins_to_send) > max_bins: - bins_to_send = _downsample_bins(bins_to_send, max_bins) - msg = { - 'type': 'waterfall_sweep', - 'start_freq': sweep_start_hz / 1e6, - 'end_freq': sweep_end_hz / 1e6, - 'bins': bins_to_send, - 'timestamp': datetime.now().isoformat(), - } - try: - waterfall_queue.put_nowait(msg) - except queue.Full: - pass - - if waterfall_running and not received_any: - _queue_waterfall_error('No waterfall FFT data received from rtl_power') - - except Exception as e: - logger.error(f"Waterfall loop error: {e}") - _queue_waterfall_error(f"Waterfall loop error: {e}") - finally: - waterfall_running = False - if waterfall_process and waterfall_process.poll() is None: - try: - waterfall_process.terminate() - waterfall_process.wait(timeout=1) - except Exception: - try: - waterfall_process.kill() - except Exception: - pass - waterfall_process = None - logger.info("Waterfall loop stopped") - - -def _stop_waterfall_internal() -> None: - """Stop the waterfall display and release resources.""" - global waterfall_running, waterfall_process, waterfall_active_device, waterfall_active_sdr_type - - waterfall_running = False - if waterfall_process and waterfall_process.poll() is None: - try: - waterfall_process.terminate() - waterfall_process.wait(timeout=1) - except Exception: - try: - waterfall_process.kill() - except Exception: - pass - waterfall_process = None - - if waterfall_active_device is not None: - app_module.release_sdr_device(waterfall_active_device, waterfall_active_sdr_type) - waterfall_active_device = None - waterfall_active_sdr_type = 'rtlsdr' - - -@receiver_bp.route('/waterfall/start', methods=['POST']) -def start_waterfall() -> Response: - """Start the waterfall/spectrogram display.""" - global waterfall_thread, waterfall_running, waterfall_config, waterfall_active_device, waterfall_active_sdr_type - - with waterfall_lock: - if waterfall_running: - return jsonify({ - 'status': 'started', - 'already_running': True, - 'message': 'Waterfall already running', - 'config': waterfall_config, - }) - - data = request.json or {} - - # Determine SDR type - sdr_type_str = data.get('sdr_type', 'rtlsdr') - try: - sdr_type = SDRType(sdr_type_str) - except ValueError: - sdr_type = SDRType.RTL_SDR - sdr_type_str = sdr_type.value - - # RTL-SDR uses rtl_power; other types use rx_sdr via IQ capture - if sdr_type == SDRType.RTL_SDR: - if not find_rtl_power(): - return jsonify({'status': 'error', 'message': 'rtl_power not found'}), 503 - - try: - waterfall_config['start_freq'] = float(data.get('start_freq', 88.0)) - waterfall_config['end_freq'] = float(data.get('end_freq', 108.0)) - waterfall_config['bin_size'] = int(data.get('bin_size', 10000)) - waterfall_config['gain'] = int(data.get('gain', 40)) - waterfall_config['device'] = int(data.get('device', 0)) - waterfall_config['sdr_type'] = sdr_type_str - if data.get('interval') is not None: - interval = float(data.get('interval', waterfall_config['interval'])) - if interval < 0.1 or interval > 5: - return jsonify({'status': 'error', 'message': 'interval must be between 0.1 and 5 seconds'}), 400 - waterfall_config['interval'] = interval - if data.get('max_bins') is not None: - max_bins = int(data.get('max_bins', waterfall_config['max_bins'])) - if max_bins < 64 or max_bins > 4096: - return jsonify({'status': 'error', 'message': 'max_bins must be between 64 and 4096'}), 400 - waterfall_config['max_bins'] = max_bins - except (ValueError, TypeError) as e: - return jsonify({'status': 'error', 'message': f'Invalid parameter: {e}'}), 400 - - if waterfall_config['start_freq'] >= waterfall_config['end_freq']: - return jsonify({'status': 'error', 'message': 'start_freq must be less than end_freq'}), 400 - - # Clear stale queue - try: - while True: - waterfall_queue.get_nowait() - except queue.Empty: - pass - - # Claim SDR device - error = app_module.claim_sdr_device(waterfall_config['device'], 'waterfall', sdr_type_str) - if error: - return jsonify({'status': 'error', 'error_type': 'DEVICE_BUSY', 'message': error}), 409 - - waterfall_active_device = waterfall_config['device'] - waterfall_active_sdr_type = sdr_type_str - waterfall_running = True - waterfall_thread = threading.Thread(target=_waterfall_loop, daemon=True) - waterfall_thread.start() - - return jsonify({'status': 'started', 'config': waterfall_config}) - - -@receiver_bp.route('/waterfall/stop', methods=['POST']) -def stop_waterfall() -> Response: - """Stop the waterfall display.""" - _stop_waterfall_internal() - - return jsonify({'status': 'stopped'}) - - -@receiver_bp.route('/waterfall/stream') -def stream_waterfall() -> Response: - """SSE stream for waterfall data.""" - def _on_msg(msg: dict[str, Any]) -> None: - process_event('waterfall', msg, msg.get('type')) - - response = Response( - sse_stream_fanout( - source_queue=waterfall_queue, - channel_key='receiver_waterfall', - timeout=SSE_QUEUE_TIMEOUT, - keepalive_interval=SSE_KEEPALIVE_INTERVAL, - on_message=_on_msg, - ), - mimetype='text/event-stream', - ) - response.headers['Cache-Control'] = 'no-cache' - response.headers['X-Accel-Buffering'] = 'no' - return response -def _downsample_bins(values: list[float], target: int) -> list[float]: - """Downsample bins to a target length using simple averaging.""" - if target <= 0 or len(values) <= target: - return values - - out: list[float] = [] - step = len(values) / target - for i in range(target): - start = int(i * step) - end = int((i + 1) * step) - if end <= start: - end = min(start + 1, len(values)) - chunk = values[start:end] - if not chunk: - continue - out.append(sum(chunk) / len(chunk)) - return out diff --git a/routes/listening_post/__init__.py b/routes/listening_post/__init__.py new file mode 100644 index 0000000..b7926b6 --- /dev/null +++ b/routes/listening_post/__init__.py @@ -0,0 +1,523 @@ +"""Receiver routes for radio monitoring and frequency scanning. + +This package splits the listening post into sub-modules: + scanner - /scanner/*, /presets routes + audio - /audio/* routes + waterfall - /waterfall/* routes + tools - /tools, /signal/guess routes +""" + +from __future__ import annotations + +import os +import queue +import signal +import shutil +import struct +import subprocess +import threading +import time +from datetime import datetime +from typing import Dict, List, Optional + +from flask import Blueprint + +from utils.logging import get_logger +from utils.sse import sse_stream_fanout +from utils.event_pipeline import process_event +from utils.constants import ( + SSE_QUEUE_TIMEOUT, + SSE_KEEPALIVE_INTERVAL, + PROCESS_TERMINATE_TIMEOUT, +) +from utils.sdr import SDRFactory, SDRType + +logger = get_logger('intercept.receiver') + +receiver_bp = Blueprint('receiver', __name__, url_prefix='/receiver') + +# Deferred import to avoid circular import at module load time. +# app.py -> register_blueprints -> from .listening_post import receiver_bp +# must find receiver_bp already defined (above) before this import runs. +import app as app_module # noqa: E402 + +# ============================================ +# GLOBAL STATE +# ============================================ + +# Audio demodulation state +audio_process = None +audio_rtl_process = None +audio_lock = threading.Lock() +audio_start_lock = threading.Lock() +audio_running = False +audio_frequency = 0.0 +audio_modulation = 'fm' +audio_source = 'process' +audio_start_token = 0 + +# Scanner state +scanner_thread: Optional[threading.Thread] = None +scanner_running = False +scanner_lock = threading.Lock() +scanner_paused = False +scanner_current_freq = 0.0 +scanner_active_device: Optional[int] = None +scanner_active_sdr_type: str = 'rtlsdr' +receiver_active_device: Optional[int] = None +receiver_active_sdr_type: str = 'rtlsdr' +scanner_power_process: Optional[subprocess.Popen] = None +scanner_config = { + 'start_freq': 88.0, + 'end_freq': 108.0, + 'step': 0.1, + 'modulation': 'wfm', + 'squelch': 0, + 'dwell_time': 10.0, # Seconds to stay on active frequency + 'scan_delay': 0.1, # Seconds between frequency hops (keep low for fast scanning) + 'device': 0, + 'gain': 40, + 'bias_t': False, # Bias-T power for external LNA + 'sdr_type': 'rtlsdr', # SDR type: rtlsdr, hackrf, airspy, limesdr, sdrplay + 'scan_method': 'power', # power (rtl_power) or classic (rtl_fm hop) + 'snr_threshold': 8, +} + +# Activity log +activity_log: List[Dict] = [] +activity_log_lock = threading.Lock() +MAX_LOG_ENTRIES = 500 + +# SSE queue for scanner events +scanner_queue: queue.Queue = queue.Queue(maxsize=100) + +# Flag to trigger skip from API +scanner_skip_signal = False + +# Waterfall / spectrogram state +waterfall_process: Optional[subprocess.Popen] = None +waterfall_thread: Optional[threading.Thread] = None +waterfall_running = False +waterfall_lock = threading.Lock() +waterfall_queue: queue.Queue = queue.Queue(maxsize=200) +waterfall_active_device: Optional[int] = None +waterfall_active_sdr_type: str = 'rtlsdr' +waterfall_config = { + 'start_freq': 88.0, + 'end_freq': 108.0, + 'bin_size': 10000, + 'gain': 40, + 'device': 0, + 'max_bins': 1024, + 'interval': 0.4, +} + + +# ============================================ +# HELPER FUNCTIONS (shared across sub-modules) +# ============================================ + +VALID_MODULATIONS = ['fm', 'wfm', 'am', 'usb', 'lsb'] + + +def find_rtl_fm() -> str | None: + """Find rtl_fm binary.""" + return shutil.which('rtl_fm') + + +def find_rtl_power() -> str | None: + """Find rtl_power binary.""" + return shutil.which('rtl_power') + + +def find_rx_fm() -> str | None: + """Find rx_fm binary (SoapySDR FM demodulator for HackRF/Airspy/LimeSDR).""" + return shutil.which('rx_fm') + + +def find_ffmpeg() -> str | None: + """Find ffmpeg for audio encoding.""" + return shutil.which('ffmpeg') + + +def normalize_modulation(value: str) -> str: + """Normalize and validate modulation string.""" + mod = str(value or '').lower().strip() + if mod not in VALID_MODULATIONS: + raise ValueError(f'Invalid modulation. Use: {", ".join(VALID_MODULATIONS)}') + return mod + + +def _rtl_fm_demod_mode(modulation: str) -> str: + """Map UI modulation names to rtl_fm demod tokens.""" + mod = str(modulation or '').lower().strip() + return 'wbfm' if mod == 'wfm' else mod + + +def _wav_header(sample_rate: int = 48000, bits_per_sample: int = 16, channels: int = 1) -> bytes: + """Create a streaming WAV header with unknown data length.""" + bytes_per_sample = bits_per_sample // 8 + byte_rate = sample_rate * channels * bytes_per_sample + block_align = channels * bytes_per_sample + return ( + b'RIFF' + + struct.pack(' MAX_LOG_ENTRIES: + activity_log.pop() + + # Also push to SSE queue + try: + scanner_queue.put_nowait({ + 'type': 'log', + 'entry': entry + }) + except queue.Full: + pass + + +def _start_audio_stream( + frequency: float, + modulation: str, + *, + device: int | None = None, + sdr_type: str | None = None, + gain: int | None = None, + squelch: int | None = None, + bias_t: bool | None = None, +): + """Start audio streaming at given frequency.""" + global audio_process, audio_rtl_process, audio_running, audio_frequency, audio_modulation + + # Stop existing stream and snapshot config under lock + with audio_lock: + _stop_audio_stream_internal() + + ffmpeg_path = find_ffmpeg() + if not ffmpeg_path: + logger.error("ffmpeg not found") + return + + # Snapshot runtime tuning config so the spawned demod command cannot + # drift if shared scanner_config changes while startup is in-flight. + device_index = int(device if device is not None else scanner_config.get('device', 0)) + gain_value = int(gain if gain is not None else scanner_config.get('gain', 40)) + squelch_value = int(squelch if squelch is not None else scanner_config.get('squelch', 0)) + bias_t_enabled = bool(scanner_config.get('bias_t', False) if bias_t is None else bias_t) + sdr_type_str = str(sdr_type if sdr_type is not None else scanner_config.get('sdr_type', 'rtlsdr')).lower() + + # Build commands outside lock (no blocking I/O, just command construction) + try: + resolved_sdr_type = SDRType(sdr_type_str) + except ValueError: + resolved_sdr_type = SDRType.RTL_SDR + + # Set sample rates based on modulation + if modulation == 'wfm': + sample_rate = 170000 + resample_rate = 32000 + elif modulation in ['usb', 'lsb']: + sample_rate = 12000 + resample_rate = 12000 + else: + sample_rate = 24000 + resample_rate = 24000 + + # Build the SDR command based on device type + if resolved_sdr_type == SDRType.RTL_SDR: + rtl_fm_path = find_rtl_fm() + if not rtl_fm_path: + logger.error("rtl_fm not found") + return + + freq_hz = int(frequency * 1e6) + sdr_cmd = [ + rtl_fm_path, + '-M', _rtl_fm_demod_mode(modulation), + '-f', str(freq_hz), + '-s', str(sample_rate), + '-r', str(resample_rate), + '-g', str(gain_value), + '-d', str(device_index), + '-l', str(squelch_value), + ] + if bias_t_enabled: + sdr_cmd.append('-T') + else: + rx_fm_path = find_rx_fm() + if not rx_fm_path: + logger.error(f"rx_fm not found - required for {resolved_sdr_type.value}. Install SoapySDR utilities.") + return + + sdr_device = SDRFactory.create_default_device(resolved_sdr_type, index=device_index) + builder = SDRFactory.get_builder(resolved_sdr_type) + sdr_cmd = builder.build_fm_demod_command( + device=sdr_device, + frequency_mhz=frequency, + sample_rate=resample_rate, + gain=float(gain_value), + modulation=modulation, + squelch=squelch_value, + bias_t=bias_t_enabled, + ) + sdr_cmd[0] = rx_fm_path + + encoder_cmd = [ + ffmpeg_path, + '-hide_banner', + '-loglevel', 'error', + '-fflags', 'nobuffer', + '-flags', 'low_delay', + '-probesize', '32', + '-analyzeduration', '0', + '-f', 's16le', + '-ar', str(resample_rate), + '-ac', '1', + '-i', 'pipe:0', + '-acodec', 'pcm_s16le', + '-ar', '44100', + '-f', 'wav', + 'pipe:1' + ] + + # Retry loop outside lock — spawning + health check sleeps don't block + # other operations. audio_start_lock already serializes callers. + try: + rtl_stderr_log = '/tmp/rtl_fm_stderr.log' + ffmpeg_stderr_log = '/tmp/ffmpeg_stderr.log' + logger.info(f"Starting audio: {frequency} MHz, mod={modulation}, device={device_index}") + + new_rtl_proc = None + new_audio_proc = None + max_attempts = 3 + for attempt in range(max_attempts): + new_rtl_proc = None + new_audio_proc = None + rtl_err_handle = None + ffmpeg_err_handle = None + try: + rtl_err_handle = open(rtl_stderr_log, 'w') + ffmpeg_err_handle = open(ffmpeg_stderr_log, 'w') + new_rtl_proc = subprocess.Popen( + sdr_cmd, + stdout=subprocess.PIPE, + stderr=rtl_err_handle, + bufsize=0, + start_new_session=True + ) + new_audio_proc = subprocess.Popen( + encoder_cmd, + stdin=new_rtl_proc.stdout, + stdout=subprocess.PIPE, + stderr=ffmpeg_err_handle, + bufsize=0, + start_new_session=True + ) + if new_rtl_proc.stdout: + new_rtl_proc.stdout.close() + finally: + if rtl_err_handle: + rtl_err_handle.close() + if ffmpeg_err_handle: + ffmpeg_err_handle.close() + + # Brief delay to check if process started successfully + time.sleep(0.3) + + if (new_rtl_proc and new_rtl_proc.poll() is not None) or ( + new_audio_proc and new_audio_proc.poll() is not None + ): + rtl_stderr = '' + ffmpeg_stderr = '' + try: + with open(rtl_stderr_log, 'r') as f: + rtl_stderr = f.read().strip() + except Exception: + pass + try: + with open(ffmpeg_stderr_log, 'r') as f: + ffmpeg_stderr = f.read().strip() + except Exception: + pass + + if 'usb_claim_interface' in rtl_stderr and attempt < max_attempts - 1: + logger.warning(f"USB device busy (attempt {attempt + 1}/{max_attempts}), waiting for release...") + if new_audio_proc: + try: + new_audio_proc.terminate() + new_audio_proc.wait(timeout=0.5) + except Exception: + pass + if new_rtl_proc: + try: + new_rtl_proc.terminate() + new_rtl_proc.wait(timeout=0.5) + except Exception: + pass + time.sleep(1.0) + continue + + if new_audio_proc and new_audio_proc.poll() is None: + try: + new_audio_proc.terminate() + new_audio_proc.wait(timeout=0.5) + except Exception: + pass + if new_rtl_proc and new_rtl_proc.poll() is None: + try: + new_rtl_proc.terminate() + new_rtl_proc.wait(timeout=0.5) + except Exception: + pass + new_audio_proc = None + new_rtl_proc = None + + logger.error( + f"Audio pipeline exited immediately. rtl_fm stderr: {rtl_stderr}, ffmpeg stderr: {ffmpeg_stderr}" + ) + return + + # Pipeline started successfully + break + + # Verify pipeline is still alive, then install under lock + if ( + not new_audio_proc + or not new_rtl_proc + or new_audio_proc.poll() is not None + or new_rtl_proc.poll() is not None + ): + logger.warning("Audio pipeline did not remain alive after startup") + # Clean up failed processes + if new_audio_proc: + try: + new_audio_proc.terminate() + new_audio_proc.wait(timeout=0.5) + except Exception: + pass + if new_rtl_proc: + try: + new_rtl_proc.terminate() + new_rtl_proc.wait(timeout=0.5) + except Exception: + pass + return + + # Install processes under lock + with audio_lock: + audio_rtl_process = new_rtl_proc + audio_process = new_audio_proc + audio_running = True + audio_frequency = frequency + audio_modulation = modulation + logger.info(f"Audio stream started: {frequency} MHz ({modulation}) via {resolved_sdr_type.value}") + + except Exception as e: + logger.error(f"Failed to start audio stream: {e}") + + +def _stop_audio_stream(): + """Stop audio streaming.""" + with audio_lock: + _stop_audio_stream_internal() + + +def _stop_audio_stream_internal(): + """Internal stop (must hold lock).""" + global audio_process, audio_rtl_process, audio_running, audio_frequency, audio_source + + # Set flag first to stop any streaming + audio_running = False + audio_frequency = 0.0 + previous_source = audio_source + audio_source = 'process' + + if previous_source == 'waterfall': + try: + from routes.waterfall_websocket import stop_shared_monitor_from_capture + + stop_shared_monitor_from_capture() + except Exception: + pass + + had_processes = audio_process is not None or audio_rtl_process is not None + + # Kill the pipeline processes and their groups + if audio_process: + try: + # Kill entire process group (SDR demod + ffmpeg) + try: + os.killpg(os.getpgid(audio_process.pid), signal.SIGKILL) + except (ProcessLookupError, PermissionError): + audio_process.kill() + audio_process.wait(timeout=0.5) + except Exception: + pass + + if audio_rtl_process: + try: + try: + os.killpg(os.getpgid(audio_rtl_process.pid), signal.SIGKILL) + except (ProcessLookupError, PermissionError): + audio_rtl_process.kill() + audio_rtl_process.wait(timeout=0.5) + except Exception: + pass + + audio_process = None + audio_rtl_process = None + + # Brief pause for SDR device USB interface to be released by kernel. + # The _start_audio_stream retry loop handles longer contention windows + # so only a minimal delay is needed here. + if had_processes: + time.sleep(0.15) + + +def _stop_waterfall_internal() -> None: + """Stop the waterfall display and release resources.""" + global waterfall_running, waterfall_process, waterfall_active_device, waterfall_active_sdr_type + + waterfall_running = False + if waterfall_process and waterfall_process.poll() is None: + try: + waterfall_process.terminate() + waterfall_process.wait(timeout=1) + except Exception: + try: + waterfall_process.kill() + except Exception: + pass + waterfall_process = None + + if waterfall_active_device is not None: + app_module.release_sdr_device(waterfall_active_device, waterfall_active_sdr_type) + waterfall_active_device = None + waterfall_active_sdr_type = 'rtlsdr' + + +# ============================================ +# Import sub-modules to register routes on receiver_bp +# ============================================ +from . import scanner # noqa: E402, F401 +from . import audio # noqa: E402, F401 +from . import waterfall # noqa: E402, F401 +from . import tools # noqa: E402, F401 diff --git a/routes/listening_post/audio.py b/routes/listening_post/audio.py new file mode 100644 index 0000000..467bc57 --- /dev/null +++ b/routes/listening_post/audio.py @@ -0,0 +1,502 @@ +"""Audio routes for manual listening and audio streaming.""" + +from __future__ import annotations + +import os +import select +import subprocess +import time +from typing import Any + +from flask import jsonify, request, Response + +from . import ( + receiver_bp, + logger, + app_module, + scanner_config, + _wav_header, + _start_audio_stream, + _stop_audio_stream, + _stop_waterfall_internal, + normalize_modulation, +) +import routes.listening_post as _state + + +# ============================================ +# MANUAL AUDIO ENDPOINTS (for direct listening) +# ============================================ + +@receiver_bp.route('/audio/start', methods=['POST']) +def start_audio() -> Response: + """Start audio at specific frequency (manual mode).""" + data = request.json or {} + + try: + frequency = float(data.get('frequency', 0)) + modulation = normalize_modulation(data.get('modulation', 'wfm')) + squelch = int(data['squelch']) if data.get('squelch') is not None else 0 + gain = int(data['gain']) if data.get('gain') is not None else 40 + device = int(data['device']) if data.get('device') is not None else 0 + sdr_type = str(data.get('sdr_type', 'rtlsdr')).lower() + request_token_raw = data.get('request_token') + request_token = int(request_token_raw) if request_token_raw is not None else None + bias_t_raw = data.get('bias_t', scanner_config.get('bias_t', False)) + if isinstance(bias_t_raw, str): + bias_t = bias_t_raw.strip().lower() in {'1', 'true', 'yes', 'on'} + else: + bias_t = bool(bias_t_raw) + except (ValueError, TypeError) as e: + return jsonify({ + 'status': 'error', + 'message': f'Invalid parameter: {e}' + }), 400 + + if frequency <= 0: + return jsonify({ + 'status': 'error', + 'message': 'frequency is required' + }), 400 + + valid_sdr_types = ['rtlsdr', 'hackrf', 'airspy', 'limesdr', 'sdrplay'] + if sdr_type not in valid_sdr_types: + return jsonify({ + 'status': 'error', + 'message': f'Invalid sdr_type. Use: {", ".join(valid_sdr_types)}' + }), 400 + + with _state.audio_start_lock: + if request_token is not None: + if request_token < _state.audio_start_token: + return jsonify({ + 'status': 'stale', + 'message': 'Superseded audio start request', + 'source': _state.audio_source, + 'superseded': True, + 'current_token': _state.audio_start_token, + }), 409 + _state.audio_start_token = request_token + else: + _state.audio_start_token += 1 + request_token = _state.audio_start_token + + # Grab scanner refs inside lock, signal stop, clear state + need_scanner_teardown = False + scanner_thread_ref = None + scanner_proc_ref = None + if _state.scanner_running: + _state.scanner_running = False + if _state.scanner_active_device is not None: + app_module.release_sdr_device(_state.scanner_active_device, _state.scanner_active_sdr_type) + _state.scanner_active_device = None + _state.scanner_active_sdr_type = 'rtlsdr' + scanner_thread_ref = _state.scanner_thread + scanner_proc_ref = _state.scanner_power_process + _state.scanner_power_process = None + need_scanner_teardown = True + + # Update config for audio + scanner_config['squelch'] = squelch + scanner_config['gain'] = gain + scanner_config['device'] = device + scanner_config['sdr_type'] = sdr_type + scanner_config['bias_t'] = bias_t + + # Scanner teardown outside lock (blocking: thread join, process wait, pkill, sleep) + if need_scanner_teardown: + if scanner_thread_ref and scanner_thread_ref.is_alive(): + try: + scanner_thread_ref.join(timeout=2.0) + except Exception: + pass + if scanner_proc_ref and scanner_proc_ref.poll() is None: + try: + scanner_proc_ref.terminate() + scanner_proc_ref.wait(timeout=1) + except Exception: + try: + scanner_proc_ref.kill() + except Exception: + pass + try: + subprocess.run(['pkill', '-9', 'rtl_power'], capture_output=True, timeout=0.5) + except Exception: + pass + time.sleep(0.5) + + # Re-acquire lock for waterfall check and device claim + with _state.audio_start_lock: + + # Preferred path: when waterfall WebSocket is active on the same SDR, + # derive monitor audio from that IQ stream instead of spawning rtl_fm. + try: + from routes.waterfall_websocket import ( + get_shared_capture_status, + start_shared_monitor_from_capture, + ) + + shared = get_shared_capture_status() + if shared.get('running') and shared.get('device') == device: + _stop_audio_stream() + ok, msg = start_shared_monitor_from_capture( + device=device, + frequency_mhz=frequency, + modulation=modulation, + squelch=squelch, + ) + if ok: + _state.audio_running = True + _state.audio_frequency = frequency + _state.audio_modulation = modulation + _state.audio_source = 'waterfall' + # Shared monitor uses the waterfall's existing SDR claim. + if _state.receiver_active_device is not None: + app_module.release_sdr_device(_state.receiver_active_device, _state.receiver_active_sdr_type) + _state.receiver_active_device = None + _state.receiver_active_sdr_type = 'rtlsdr' + return jsonify({ + 'status': 'started', + 'frequency': frequency, + 'modulation': modulation, + 'source': 'waterfall', + 'request_token': request_token, + }) + logger.warning(f"Shared waterfall monitor unavailable: {msg}") + except Exception as e: + logger.debug(f"Shared waterfall monitor probe failed: {e}") + + # Stop waterfall if it's using the same SDR (SSE path) + if _state.waterfall_running and _state.waterfall_active_device == device: + _stop_waterfall_internal() + time.sleep(0.2) + + # Claim device for listening audio. The WebSocket waterfall handler + # may still be tearing down its IQ capture process (thread join + + # safe_terminate can take several seconds), so we retry with back-off + # to give the USB device time to be fully released. + if _state.receiver_active_device is None or _state.receiver_active_device != device: + if _state.receiver_active_device is not None: + app_module.release_sdr_device(_state.receiver_active_device, _state.receiver_active_sdr_type) + _state.receiver_active_device = None + _state.receiver_active_sdr_type = 'rtlsdr' + + error = None + max_claim_attempts = 6 + for attempt in range(max_claim_attempts): + error = app_module.claim_sdr_device(device, 'receiver', sdr_type) + if not error: + break + if attempt < max_claim_attempts - 1: + logger.debug( + f"Device claim attempt {attempt + 1}/{max_claim_attempts} " + f"failed, retrying in 0.5s: {error}" + ) + time.sleep(0.5) + + if error: + return jsonify({ + 'status': 'error', + 'error_type': 'DEVICE_BUSY', + 'message': error + }), 409 + _state.receiver_active_device = device + _state.receiver_active_sdr_type = sdr_type + + _start_audio_stream( + frequency, + modulation, + device=device, + sdr_type=sdr_type, + gain=gain, + squelch=squelch, + bias_t=bias_t, + ) + + if _state.audio_running: + _state.audio_source = 'process' + return jsonify({ + 'status': 'started', + 'frequency': _state.audio_frequency, + 'modulation': _state.audio_modulation, + 'source': 'process', + 'request_token': request_token, + }) + + # Avoid leaving a stale device claim after startup failure. + if _state.receiver_active_device is not None: + app_module.release_sdr_device(_state.receiver_active_device, _state.receiver_active_sdr_type) + _state.receiver_active_device = None + _state.receiver_active_sdr_type = 'rtlsdr' + + start_error = '' + for log_path in ('/tmp/rtl_fm_stderr.log', '/tmp/ffmpeg_stderr.log'): + try: + with open(log_path, 'r') as handle: + content = handle.read().strip() + if content: + start_error = content.splitlines()[-1] + break + except Exception: + continue + + message = 'Failed to start audio. Check SDR device.' + if start_error: + message = f'Failed to start audio: {start_error}' + return jsonify({ + 'status': 'error', + 'message': message + }), 500 + + +@receiver_bp.route('/audio/stop', methods=['POST']) +def stop_audio() -> Response: + """Stop audio.""" + _stop_audio_stream() + if _state.receiver_active_device is not None: + app_module.release_sdr_device(_state.receiver_active_device, _state.receiver_active_sdr_type) + _state.receiver_active_device = None + _state.receiver_active_sdr_type = 'rtlsdr' + return jsonify({'status': 'stopped'}) + + +@receiver_bp.route('/audio/status') +def audio_status() -> Response: + """Get audio status.""" + running = _state.audio_running + if _state.audio_source == 'waterfall': + try: + from routes.waterfall_websocket import get_shared_capture_status + + shared = get_shared_capture_status() + running = bool(shared.get('running') and shared.get('monitor_enabled')) + except Exception: + running = False + + return jsonify({ + 'running': running, + 'frequency': _state.audio_frequency, + 'modulation': _state.audio_modulation, + 'source': _state.audio_source, + }) + + +@receiver_bp.route('/audio/debug') +def audio_debug() -> Response: + """Get audio debug status and recent stderr logs.""" + rtl_log_path = '/tmp/rtl_fm_stderr.log' + ffmpeg_log_path = '/tmp/ffmpeg_stderr.log' + sample_path = '/tmp/audio_probe.bin' + + def _read_log(path: str) -> str: + try: + with open(path, 'r') as handle: + return handle.read().strip() + except Exception: + return '' + + shared = {} + if _state.audio_source == 'waterfall': + try: + from routes.waterfall_websocket import get_shared_capture_status + + shared = get_shared_capture_status() + except Exception: + shared = {} + + return jsonify({ + 'running': _state.audio_running, + 'frequency': _state.audio_frequency, + 'modulation': _state.audio_modulation, + 'source': _state.audio_source, + 'sdr_type': scanner_config.get('sdr_type', 'rtlsdr'), + 'device': scanner_config.get('device', 0), + 'gain': scanner_config.get('gain', 0), + 'squelch': scanner_config.get('squelch', 0), + 'audio_process_alive': bool(_state.audio_process and _state.audio_process.poll() is None), + 'shared_capture': shared, + 'rtl_fm_stderr': _read_log(rtl_log_path), + 'ffmpeg_stderr': _read_log(ffmpeg_log_path), + 'audio_probe_bytes': os.path.getsize(sample_path) if os.path.exists(sample_path) else 0, + }) + + +@receiver_bp.route('/audio/probe') +def audio_probe() -> Response: + """Grab a small chunk of audio bytes from the pipeline for debugging.""" + if _state.audio_source == 'waterfall': + try: + from routes.waterfall_websocket import read_shared_monitor_audio_chunk + + data = read_shared_monitor_audio_chunk(timeout=2.0) + if not data: + return jsonify({'status': 'error', 'message': 'no shared audio data available'}), 504 + sample_path = '/tmp/audio_probe.bin' + with open(sample_path, 'wb') as handle: + handle.write(data) + return jsonify({'status': 'ok', 'bytes': len(data), 'source': 'waterfall'}) + except Exception as e: + return jsonify({'status': 'error', 'message': str(e)}), 500 + + if not _state.audio_process or not _state.audio_process.stdout: + return jsonify({'status': 'error', 'message': 'audio process not running'}), 400 + + sample_path = '/tmp/audio_probe.bin' + size = 0 + try: + ready, _, _ = select.select([_state.audio_process.stdout], [], [], 2.0) + if not ready: + return jsonify({'status': 'error', 'message': 'no data available'}), 504 + data = _state.audio_process.stdout.read(4096) + if not data: + return jsonify({'status': 'error', 'message': 'no data read'}), 504 + with open(sample_path, 'wb') as handle: + handle.write(data) + size = len(data) + except Exception as e: + return jsonify({'status': 'error', 'message': str(e)}), 500 + + return jsonify({'status': 'ok', 'bytes': size}) + + +@receiver_bp.route('/audio/stream') +def stream_audio() -> Response: + """Stream WAV audio.""" + request_token_raw = request.args.get('request_token') + request_token = None + if request_token_raw is not None: + try: + request_token = int(request_token_raw) + except (ValueError, TypeError): + request_token = None + + if request_token is not None and request_token < _state.audio_start_token: + return Response(b'', mimetype='audio/wav', status=204) + + if _state.audio_source == 'waterfall': + for _ in range(40): + if _state.audio_running: + break + time.sleep(0.05) + + if not _state.audio_running: + return Response(b'', mimetype='audio/wav', status=204) + + def generate_shared(): + try: + from routes.waterfall_websocket import ( + get_shared_capture_status, + read_shared_monitor_audio_chunk, + ) + except Exception: + return + + # Browser expects an immediate WAV header. + yield _wav_header(sample_rate=48000) + inactive_since: float | None = None + + while _state.audio_running and _state.audio_source == 'waterfall': + if request_token is not None and request_token < _state.audio_start_token: + break + chunk = read_shared_monitor_audio_chunk(timeout=1.0) + if chunk: + inactive_since = None + yield chunk + continue + shared = get_shared_capture_status() + if shared.get('running') and shared.get('monitor_enabled'): + inactive_since = None + continue + if inactive_since is None: + inactive_since = time.monotonic() + continue + if (time.monotonic() - inactive_since) < 4.0: + continue + if not shared.get('running') or not shared.get('monitor_enabled'): + _state.audio_running = False + _state.audio_source = 'process' + break + + return Response( + generate_shared(), + mimetype='audio/wav', + headers={ + 'Content-Type': 'audio/wav', + 'Cache-Control': 'no-cache, no-store', + 'X-Accel-Buffering': 'no', + 'Transfer-Encoding': 'chunked', + } + ) + + # Wait for audio process to be ready (up to 2 seconds). + for _ in range(40): + if _state.audio_running and _state.audio_process: + break + time.sleep(0.05) + + if not _state.audio_running or not _state.audio_process: + return Response(b'', mimetype='audio/wav', status=204) + + def generate(): + # Capture local reference to avoid race condition with stop + proc = _state.audio_process + if not proc or not proc.stdout: + return + try: + # Drain stale audio that accumulated in the pipe buffer + # between pipeline start and stream connection. Keep the + # first chunk (contains WAV header) and discard the rest + # so the browser starts close to real-time. + header_chunk = None + while True: + ready, _, _ = select.select([proc.stdout], [], [], 0) + if not ready: + break + chunk = proc.stdout.read(8192) + if not chunk: + break + if header_chunk is None: + header_chunk = chunk + if header_chunk: + yield header_chunk + + # Stream real-time audio + first_chunk_deadline = time.time() + 20.0 + warned_wait = False + while _state.audio_running and proc.poll() is None: + if request_token is not None and request_token < _state.audio_start_token: + break + # Use select to avoid blocking forever + ready, _, _ = select.select([proc.stdout], [], [], 2.0) + if ready: + chunk = proc.stdout.read(8192) + if chunk: + warned_wait = False + yield chunk + else: + break + else: + # Keep connection open while demodulator settles. + if time.time() > first_chunk_deadline: + if not warned_wait: + logger.warning("Audio stream still waiting for first chunk") + warned_wait = True + continue + # Timeout - check if process died + if proc.poll() is not None: + break + except GeneratorExit: + pass + except Exception as e: + logger.error(f"Audio stream error: {e}") + + return Response( + generate(), + mimetype='audio/wav', + headers={ + 'Content-Type': 'audio/wav', + 'Cache-Control': 'no-cache, no-store', + 'X-Accel-Buffering': 'no', + 'Transfer-Encoding': 'chunked', + } + ) diff --git a/routes/listening_post/scanner.py b/routes/listening_post/scanner.py new file mode 100644 index 0000000..61b8b3c --- /dev/null +++ b/routes/listening_post/scanner.py @@ -0,0 +1,824 @@ +"""Scanner routes and implementation for frequency scanning.""" + +from __future__ import annotations + +import math +import queue +import struct +import subprocess +import threading +import time +from typing import Any + +from flask import jsonify, request, Response + +from . import ( + receiver_bp, + logger, + app_module, + scanner_queue, + scanner_config, + scanner_lock, + activity_log, + activity_log_lock, + add_activity_log, + find_rtl_fm, + find_rtl_power, + find_rx_fm, + normalize_modulation, + _rtl_fm_demod_mode, + _start_audio_stream, + _stop_audio_stream, + process_event, + sse_stream_fanout, + SSE_QUEUE_TIMEOUT, + SSE_KEEPALIVE_INTERVAL, +) +import routes.listening_post as _state + + +# ============================================ +# SCANNER IMPLEMENTATION +# ============================================ + +def scanner_loop(): + """Main scanner loop - scans frequencies looking for signals.""" + logger.info("Scanner thread started") + add_activity_log('scanner_start', scanner_config['start_freq'], + f"Scanning {scanner_config['start_freq']}-{scanner_config['end_freq']} MHz") + + rtl_fm_path = find_rtl_fm() + + if not rtl_fm_path: + logger.error("rtl_fm not found") + add_activity_log('error', 0, 'rtl_fm not found') + _state.scanner_running = False + return + + current_freq = scanner_config['start_freq'] + last_signal_time = 0 + signal_detected = False + + try: + while _state.scanner_running: + # Check if paused + if _state.scanner_paused: + time.sleep(0.1) + continue + + # Read config values on each iteration (allows live updates) + step_mhz = scanner_config['step'] / 1000.0 + squelch = scanner_config['squelch'] + mod = scanner_config['modulation'] + gain = scanner_config['gain'] + device = scanner_config['device'] + + _state.scanner_current_freq = current_freq + + # Notify clients of frequency change + try: + scanner_queue.put_nowait({ + 'type': 'freq_change', + 'frequency': current_freq, + 'scanning': not signal_detected, + 'range_start': scanner_config['start_freq'], + 'range_end': scanner_config['end_freq'] + }) + except queue.Full: + pass + + # Start rtl_fm at this frequency + freq_hz = int(current_freq * 1e6) + + # Sample rates + if mod == 'wfm': + sample_rate = 170000 + resample_rate = 32000 + elif mod in ['usb', 'lsb']: + sample_rate = 12000 + resample_rate = 12000 + else: + sample_rate = 24000 + resample_rate = 24000 + + # Don't use squelch in rtl_fm - we want to analyze raw audio + rtl_cmd = [ + rtl_fm_path, + '-M', _rtl_fm_demod_mode(mod), + '-f', str(freq_hz), + '-s', str(sample_rate), + '-r', str(resample_rate), + '-g', str(gain), + '-d', str(device), + ] + # Add bias-t flag if enabled (for external LNA power) + if scanner_config.get('bias_t', False): + rtl_cmd.append('-T') + + try: + # Start rtl_fm + rtl_proc = subprocess.Popen( + rtl_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL + ) + + # Read audio data for analysis + audio_data = b'' + + # Read audio samples for a short period + sample_duration = 0.25 # 250ms - balance between speed and detection + bytes_needed = int(resample_rate * 2 * sample_duration) # 16-bit mono + + while len(audio_data) < bytes_needed and _state.scanner_running: + chunk = rtl_proc.stdout.read(4096) + if not chunk: + break + audio_data += chunk + + # Clean up rtl_fm + rtl_proc.terminate() + try: + rtl_proc.wait(timeout=1) + except subprocess.TimeoutExpired: + rtl_proc.kill() + + # Analyze audio level + audio_detected = False + rms = 0 + threshold = 500 + if len(audio_data) > 100: + samples = struct.unpack(f'{len(audio_data)//2}h', audio_data) + # Calculate RMS level (root mean square) + rms = (sum(s*s for s in samples) / len(samples)) ** 0.5 + + # Threshold based on squelch setting + # Lower squelch = more sensitive (lower threshold) + # squelch 0 = very sensitive, squelch 100 = only strong signals + if mod == 'wfm': + # WFM: threshold 500-10000 based on squelch + threshold = 500 + (squelch * 95) + min_threshold = 1500 + else: + # AM/NFM: threshold 300-6500 based on squelch + threshold = 300 + (squelch * 62) + min_threshold = 900 + + effective_threshold = max(threshold, min_threshold) + audio_detected = rms > effective_threshold + + # Send level info to clients + try: + scanner_queue.put_nowait({ + 'type': 'scan_update', + 'frequency': current_freq, + 'level': int(rms), + 'threshold': int(effective_threshold) if 'effective_threshold' in dir() else 0, + 'detected': audio_detected, + 'range_start': scanner_config['start_freq'], + 'range_end': scanner_config['end_freq'] + }) + except queue.Full: + pass + + if audio_detected and _state.scanner_running: + if not signal_detected: + # New signal found! + signal_detected = True + last_signal_time = time.time() + add_activity_log('signal_found', current_freq, + f'Signal detected on {current_freq:.3f} MHz ({mod.upper()})') + logger.info(f"Signal found at {current_freq} MHz") + + # Start audio streaming for user + _start_audio_stream(current_freq, mod) + + try: + snr_db = round(10 * math.log10(rms / effective_threshold), 1) if rms > 0 and effective_threshold > 0 else 0.0 + scanner_queue.put_nowait({ + 'type': 'signal_found', + 'frequency': current_freq, + 'modulation': mod, + 'audio_streaming': True, + 'level': int(rms), + 'threshold': int(effective_threshold), + 'snr': snr_db, + 'range_start': scanner_config['start_freq'], + 'range_end': scanner_config['end_freq'] + }) + except queue.Full: + pass + + # Check for skip signal + if _state.scanner_skip_signal: + _state.scanner_skip_signal = False + signal_detected = False + _stop_audio_stream() + try: + scanner_queue.put_nowait({ + 'type': 'signal_skipped', + 'frequency': current_freq + }) + except queue.Full: + pass + # Move to next frequency (step is in kHz, convert to MHz) + current_freq += step_mhz + if current_freq > scanner_config['end_freq']: + current_freq = scanner_config['start_freq'] + continue + + # Stay on this frequency (dwell) but check periodically + dwell_start = time.time() + while (time.time() - dwell_start) < scanner_config['dwell_time'] and _state.scanner_running: + if _state.scanner_skip_signal: + break + time.sleep(0.2) + + last_signal_time = time.time() + + # After dwell, move on to keep scanning + if _state.scanner_running and not _state.scanner_skip_signal: + signal_detected = False + _stop_audio_stream() + try: + scanner_queue.put_nowait({ + 'type': 'signal_lost', + 'frequency': current_freq, + 'range_start': scanner_config['start_freq'], + 'range_end': scanner_config['end_freq'] + }) + except queue.Full: + pass + + current_freq += step_mhz + if current_freq > scanner_config['end_freq']: + current_freq = scanner_config['start_freq'] + add_activity_log('scan_cycle', current_freq, 'Scan cycle complete') + time.sleep(scanner_config['scan_delay']) + + else: + # No signal at this frequency + if signal_detected: + # Signal lost + duration = time.time() - last_signal_time + scanner_config['dwell_time'] + add_activity_log('signal_lost', current_freq, + f'Signal lost after {duration:.1f}s') + signal_detected = False + + # Stop audio + _stop_audio_stream() + + try: + scanner_queue.put_nowait({ + 'type': 'signal_lost', + 'frequency': current_freq + }) + except queue.Full: + pass + + # Move to next frequency (step is in kHz, convert to MHz) + current_freq += step_mhz + if current_freq > scanner_config['end_freq']: + current_freq = scanner_config['start_freq'] + add_activity_log('scan_cycle', current_freq, 'Scan cycle complete') + + time.sleep(scanner_config['scan_delay']) + + except Exception as e: + logger.error(f"Scanner error at {current_freq} MHz: {e}") + time.sleep(0.5) + + except Exception as e: + logger.error(f"Scanner loop error: {e}") + finally: + _state.scanner_running = False + _stop_audio_stream() + add_activity_log('scanner_stop', _state.scanner_current_freq, 'Scanner stopped') + logger.info("Scanner thread stopped") + + +def scanner_loop_power(): + """Power sweep scanner using rtl_power to detect peaks.""" + logger.info("Power sweep scanner thread started") + add_activity_log('scanner_start', scanner_config['start_freq'], + f"Power sweep {scanner_config['start_freq']}-{scanner_config['end_freq']} MHz") + + rtl_power_path = find_rtl_power() + if not rtl_power_path: + logger.error("rtl_power not found") + add_activity_log('error', 0, 'rtl_power not found') + _state.scanner_running = False + return + + try: + while _state.scanner_running: + if _state.scanner_paused: + time.sleep(0.1) + continue + + start_mhz = scanner_config['start_freq'] + end_mhz = scanner_config['end_freq'] + step_khz = scanner_config['step'] + gain = scanner_config['gain'] + device = scanner_config['device'] + squelch = scanner_config['squelch'] + mod = scanner_config['modulation'] + + # Configure sweep + bin_hz = max(1000, int(step_khz * 1000)) + start_hz = int(start_mhz * 1e6) + end_hz = int(end_mhz * 1e6) + # Integration time per sweep (seconds) + integration = max(0.3, min(1.0, scanner_config.get('scan_delay', 0.5))) + + cmd = [ + rtl_power_path, + '-f', f'{start_hz}:{end_hz}:{bin_hz}', + '-i', f'{integration}', + '-1', + '-g', str(gain), + '-d', str(device), + ] + + try: + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL) + _state.scanner_power_process = proc + stdout, _ = proc.communicate(timeout=15) + except subprocess.TimeoutExpired: + proc.kill() + stdout = b'' + finally: + _state.scanner_power_process = None + + if not _state.scanner_running: + break + + if not stdout: + add_activity_log('error', start_mhz, 'Power sweep produced no data') + try: + scanner_queue.put_nowait({ + 'type': 'scan_update', + 'frequency': end_mhz, + 'level': 0, + 'threshold': int(float(scanner_config.get('snr_threshold', 12)) * 100), + 'detected': False, + 'range_start': scanner_config['start_freq'], + 'range_end': scanner_config['end_freq'] + }) + except queue.Full: + pass + time.sleep(0.2) + continue + + lines = stdout.decode(errors='ignore').splitlines() + segments = [] + for line in lines: + if not line or line.startswith('#'): + continue + + parts = [p.strip() for p in line.split(',')] + # Find start_hz token + start_idx = None + for i, tok in enumerate(parts): + try: + val = float(tok) + except ValueError: + continue + if val > 1e5: + start_idx = i + break + if start_idx is None or len(parts) < start_idx + 6: + continue + + try: + sweep_start = float(parts[start_idx]) + sweep_end = float(parts[start_idx + 1]) + sweep_bin = float(parts[start_idx + 2]) + raw_values = [] + for v in parts[start_idx + 3:]: + try: + raw_values.append(float(v)) + except ValueError: + continue + # rtl_power may include a samples field before the power list + if raw_values and raw_values[0] >= 0 and any(val < 0 for val in raw_values[1:]): + raw_values = raw_values[1:] + bin_values = raw_values + except ValueError: + continue + + if not bin_values: + continue + + segments.append((sweep_start, sweep_end, sweep_bin, bin_values)) + + if not segments: + add_activity_log('error', start_mhz, 'Power sweep bins missing') + try: + scanner_queue.put_nowait({ + 'type': 'scan_update', + 'frequency': end_mhz, + 'level': 0, + 'threshold': int(float(scanner_config.get('snr_threshold', 12)) * 100), + 'detected': False, + 'range_start': scanner_config['start_freq'], + 'range_end': scanner_config['end_freq'] + }) + except queue.Full: + pass + time.sleep(0.2) + continue + + # Process segments in ascending frequency order to avoid backtracking in UI + segments.sort(key=lambda s: s[0]) + total_bins = sum(len(seg[3]) for seg in segments) + if total_bins <= 0: + time.sleep(0.2) + continue + segment_offset = 0 + + for sweep_start, sweep_end, sweep_bin, bin_values in segments: + # Noise floor (median) + sorted_vals = sorted(bin_values) + mid = len(sorted_vals) // 2 + noise_floor = sorted_vals[mid] + + # SNR threshold (dB) + snr_threshold = float(scanner_config.get('snr_threshold', 12)) + + # Emit progress updates (throttled) + emit_stride = max(1, len(bin_values) // 60) + for idx, val in enumerate(bin_values): + if idx % emit_stride != 0 and idx != len(bin_values) - 1: + continue + freq_hz = sweep_start + sweep_bin * idx + _state.scanner_current_freq = freq_hz / 1e6 + snr = val - noise_floor + level = int(max(0, snr) * 100) + threshold = int(snr_threshold * 100) + progress = min(1.0, (segment_offset + idx) / max(1, total_bins - 1)) + try: + scanner_queue.put_nowait({ + 'type': 'scan_update', + 'frequency': _state.scanner_current_freq, + 'level': level, + 'threshold': threshold, + 'detected': snr >= snr_threshold, + 'progress': progress, + 'range_start': scanner_config['start_freq'], + 'range_end': scanner_config['end_freq'] + }) + except queue.Full: + pass + segment_offset += len(bin_values) + + # Detect peaks (clusters above threshold) + peaks = [] + in_cluster = False + peak_idx = None + peak_val = None + for idx, val in enumerate(bin_values): + snr = val - noise_floor + if snr >= snr_threshold: + if not in_cluster: + in_cluster = True + peak_idx = idx + peak_val = val + else: + if val > peak_val: + peak_val = val + peak_idx = idx + else: + if in_cluster and peak_idx is not None: + peaks.append((peak_idx, peak_val)) + in_cluster = False + peak_idx = None + peak_val = None + if in_cluster and peak_idx is not None: + peaks.append((peak_idx, peak_val)) + + for idx, val in peaks: + freq_hz = sweep_start + sweep_bin * (idx + 0.5) + freq_mhz = freq_hz / 1e6 + snr = val - noise_floor + level = int(max(0, snr) * 100) + threshold = int(snr_threshold * 100) + add_activity_log('signal_found', freq_mhz, + f'Peak detected at {freq_mhz:.3f} MHz ({mod.upper()})') + try: + scanner_queue.put_nowait({ + 'type': 'signal_found', + 'frequency': freq_mhz, + 'modulation': mod, + 'audio_streaming': False, + 'level': level, + 'threshold': threshold, + 'snr': round(snr, 1), + 'range_start': scanner_config['start_freq'], + 'range_end': scanner_config['end_freq'] + }) + except queue.Full: + pass + + add_activity_log('scan_cycle', start_mhz, 'Power sweep complete') + time.sleep(max(0.1, scanner_config.get('scan_delay', 0.5))) + + except Exception as e: + logger.error(f"Power sweep scanner error: {e}") + finally: + _state.scanner_running = False + add_activity_log('scanner_stop', _state.scanner_current_freq, 'Scanner stopped') + logger.info("Power sweep scanner thread stopped") + + +# ============================================ +# SCANNER API ENDPOINTS +# ============================================ + +@receiver_bp.route('/scanner/start', methods=['POST']) +def start_scanner() -> Response: + """Start the frequency scanner.""" + with scanner_lock: + if _state.scanner_running: + return jsonify({ + 'status': 'error', + 'message': 'Scanner already running' + }), 409 + + # Clear stale queue entries so UI updates immediately + try: + while True: + scanner_queue.get_nowait() + except queue.Empty: + pass + + data = request.json or {} + + # Update scanner config + try: + scanner_config['start_freq'] = float(data.get('start_freq', 88.0)) + scanner_config['end_freq'] = float(data.get('end_freq', 108.0)) + scanner_config['step'] = float(data.get('step', 0.1)) + scanner_config['modulation'] = normalize_modulation(data.get('modulation', 'wfm')) + scanner_config['squelch'] = int(data.get('squelch', 0)) + scanner_config['dwell_time'] = float(data.get('dwell_time', 3.0)) + scanner_config['scan_delay'] = float(data.get('scan_delay', 0.5)) + scanner_config['device'] = int(data.get('device', 0)) + scanner_config['gain'] = int(data.get('gain', 40)) + scanner_config['bias_t'] = bool(data.get('bias_t', False)) + scanner_config['sdr_type'] = str(data.get('sdr_type', 'rtlsdr')).lower() + scanner_config['scan_method'] = str(data.get('scan_method', '')).lower().strip() + if data.get('snr_threshold') is not None: + scanner_config['snr_threshold'] = float(data.get('snr_threshold')) + except (ValueError, TypeError) as e: + return jsonify({ + 'status': 'error', + 'message': f'Invalid parameter: {e}' + }), 400 + + # Validate + if scanner_config['start_freq'] >= scanner_config['end_freq']: + return jsonify({ + 'status': 'error', + 'message': 'start_freq must be less than end_freq' + }), 400 + + # Decide scan method + if not scanner_config['scan_method']: + scanner_config['scan_method'] = 'power' if find_rtl_power() else 'classic' + + sdr_type = scanner_config['sdr_type'] + + # Power scan only supports RTL-SDR for now + if scanner_config['scan_method'] == 'power': + if sdr_type != 'rtlsdr' or not find_rtl_power(): + scanner_config['scan_method'] = 'classic' + + # Check tools based on chosen method + if scanner_config['scan_method'] == 'power': + if not find_rtl_power(): + return jsonify({ + 'status': 'error', + 'message': 'rtl_power not found. Install rtl-sdr tools.' + }), 503 + # Release listening device if active + if _state.receiver_active_device is not None: + app_module.release_sdr_device(_state.receiver_active_device, _state.receiver_active_sdr_type) + _state.receiver_active_device = None + _state.receiver_active_sdr_type = 'rtlsdr' + # Claim device for scanner + error = app_module.claim_sdr_device(scanner_config['device'], 'scanner', scanner_config['sdr_type']) + if error: + return jsonify({ + 'status': 'error', + 'error_type': 'DEVICE_BUSY', + 'message': error + }), 409 + _state.scanner_active_device = scanner_config['device'] + _state.scanner_active_sdr_type = scanner_config['sdr_type'] + _state.scanner_running = True + _state.scanner_thread = threading.Thread(target=scanner_loop_power, daemon=True) + _state.scanner_thread.start() + else: + if sdr_type == 'rtlsdr': + if not find_rtl_fm(): + return jsonify({ + 'status': 'error', + 'message': 'rtl_fm not found. Install rtl-sdr tools.' + }), 503 + else: + if not find_rx_fm(): + return jsonify({ + 'status': 'error', + 'message': f'rx_fm not found. Install SoapySDR utilities for {sdr_type}.' + }), 503 + if _state.receiver_active_device is not None: + app_module.release_sdr_device(_state.receiver_active_device, _state.receiver_active_sdr_type) + _state.receiver_active_device = None + _state.receiver_active_sdr_type = 'rtlsdr' + error = app_module.claim_sdr_device(scanner_config['device'], 'scanner', scanner_config['sdr_type']) + if error: + return jsonify({ + 'status': 'error', + 'error_type': 'DEVICE_BUSY', + 'message': error + }), 409 + _state.scanner_active_device = scanner_config['device'] + _state.scanner_active_sdr_type = scanner_config['sdr_type'] + + _state.scanner_running = True + _state.scanner_thread = threading.Thread(target=scanner_loop, daemon=True) + _state.scanner_thread.start() + + return jsonify({ + 'status': 'started', + 'config': scanner_config + }) + + +@receiver_bp.route('/scanner/stop', methods=['POST']) +def stop_scanner() -> Response: + """Stop the frequency scanner.""" + _state.scanner_running = False + _stop_audio_stream() + if _state.scanner_power_process and _state.scanner_power_process.poll() is None: + try: + _state.scanner_power_process.terminate() + _state.scanner_power_process.wait(timeout=1) + except Exception: + try: + _state.scanner_power_process.kill() + except Exception: + pass + _state.scanner_power_process = None + if _state.scanner_active_device is not None: + app_module.release_sdr_device(_state.scanner_active_device, _state.scanner_active_sdr_type) + _state.scanner_active_device = None + _state.scanner_active_sdr_type = 'rtlsdr' + + return jsonify({'status': 'stopped'}) + + +@receiver_bp.route('/scanner/pause', methods=['POST']) +def pause_scanner() -> Response: + """Pause/resume the scanner.""" + _state.scanner_paused = not _state.scanner_paused + + if _state.scanner_paused: + add_activity_log('scanner_pause', _state.scanner_current_freq, 'Scanner paused') + else: + add_activity_log('scanner_resume', _state.scanner_current_freq, 'Scanner resumed') + + return jsonify({ + 'status': 'paused' if _state.scanner_paused else 'resumed', + 'paused': _state.scanner_paused + }) + + +@receiver_bp.route('/scanner/skip', methods=['POST']) +def skip_signal() -> Response: + """Skip current signal and continue scanning.""" + if not _state.scanner_running: + return jsonify({ + 'status': 'error', + 'message': 'Scanner not running' + }), 400 + + _state.scanner_skip_signal = True + add_activity_log('signal_skip', _state.scanner_current_freq, f'Skipped signal at {_state.scanner_current_freq:.3f} MHz') + + return jsonify({ + 'status': 'skipped', + 'frequency': _state.scanner_current_freq + }) + + +@receiver_bp.route('/scanner/config', methods=['POST']) +def update_scanner_config() -> Response: + """Update scanner config while running (step, squelch, gain, dwell).""" + data = request.json or {} + + updated = [] + + if 'step' in data: + scanner_config['step'] = float(data['step']) + updated.append(f"step={data['step']}kHz") + + if 'squelch' in data: + scanner_config['squelch'] = int(data['squelch']) + updated.append(f"squelch={data['squelch']}") + + if 'gain' in data: + scanner_config['gain'] = int(data['gain']) + updated.append(f"gain={data['gain']}") + + if 'dwell_time' in data: + scanner_config['dwell_time'] = int(data['dwell_time']) + updated.append(f"dwell={data['dwell_time']}s") + + if 'modulation' in data: + try: + scanner_config['modulation'] = normalize_modulation(data['modulation']) + updated.append(f"mod={data['modulation']}") + except (ValueError, TypeError) as e: + return jsonify({ + 'status': 'error', + 'message': str(e) + }), 400 + + if updated: + logger.info(f"Scanner config updated: {', '.join(updated)}") + + return jsonify({ + 'status': 'updated', + 'config': scanner_config + }) + + +@receiver_bp.route('/scanner/status') +def scanner_status() -> Response: + """Get scanner status.""" + return jsonify({ + 'running': _state.scanner_running, + 'paused': _state.scanner_paused, + 'current_freq': _state.scanner_current_freq, + 'config': scanner_config, + 'audio_streaming': _state.audio_running, + 'audio_frequency': _state.audio_frequency + }) + + +@receiver_bp.route('/scanner/stream') +def stream_scanner_events() -> Response: + """SSE stream for scanner events.""" + def _on_msg(msg: dict[str, Any]) -> None: + process_event('receiver_scanner', msg, msg.get('type')) + + response = Response( + sse_stream_fanout( + source_queue=scanner_queue, + channel_key='receiver_scanner', + timeout=SSE_QUEUE_TIMEOUT, + keepalive_interval=SSE_KEEPALIVE_INTERVAL, + on_message=_on_msg, + ), + mimetype='text/event-stream', + ) + response.headers['Cache-Control'] = 'no-cache' + response.headers['X-Accel-Buffering'] = 'no' + return response + + +@receiver_bp.route('/scanner/log') +def get_activity_log() -> Response: + """Get activity log.""" + limit = request.args.get('limit', 100, type=int) + with activity_log_lock: + return jsonify({ + 'log': activity_log[:limit], + 'total': len(activity_log) + }) + + +@receiver_bp.route('/scanner/log/clear', methods=['POST']) +def clear_activity_log() -> Response: + """Clear activity log.""" + with activity_log_lock: + activity_log.clear() + return jsonify({'status': 'cleared'}) + + +@receiver_bp.route('/presets') +def get_presets() -> Response: + """Get scanner presets.""" + presets = [ + {'name': 'FM Broadcast', 'start': 88.0, 'end': 108.0, 'step': 0.2, 'mod': 'wfm'}, + {'name': 'Air Band', 'start': 118.0, 'end': 137.0, 'step': 0.025, 'mod': 'am'}, + {'name': 'Marine VHF', 'start': 156.0, 'end': 163.0, 'step': 0.025, 'mod': 'fm'}, + {'name': 'Amateur 2m', 'start': 144.0, 'end': 148.0, 'step': 0.0125, 'mod': 'fm'}, + {'name': 'Amateur 70cm', 'start': 430.0, 'end': 440.0, 'step': 0.025, 'mod': 'fm'}, + {'name': 'PMR446', 'start': 446.0, 'end': 446.2, 'step': 0.0125, 'mod': 'fm'}, + {'name': 'FRS/GMRS', 'start': 462.5, 'end': 467.7, 'step': 0.025, 'mod': 'fm'}, + {'name': 'Weather Radio', 'start': 162.4, 'end': 162.55, 'step': 0.025, 'mod': 'fm'}, + ] + return jsonify({'presets': presets}) diff --git a/routes/listening_post/tools.py b/routes/listening_post/tools.py new file mode 100644 index 0000000..f426ced --- /dev/null +++ b/routes/listening_post/tools.py @@ -0,0 +1,91 @@ +"""Tool check and signal identification routes.""" + +from __future__ import annotations + +from flask import jsonify, request, Response + +from . import ( + receiver_bp, + logger, + find_rtl_fm, + find_rtl_power, + find_rx_fm, + find_ffmpeg, +) + + +# ============================================ +# TOOL CHECK ENDPOINT +# ============================================ + +@receiver_bp.route('/tools') +def check_tools() -> Response: + """Check for required tools.""" + rtl_fm = find_rtl_fm() + rtl_power = find_rtl_power() + rx_fm = find_rx_fm() + ffmpeg = find_ffmpeg() + + # Determine which SDR types are supported + supported_sdr_types = [] + if rtl_fm: + supported_sdr_types.append('rtlsdr') + if rx_fm: + # rx_fm from SoapySDR supports these types + supported_sdr_types.extend(['hackrf', 'airspy', 'limesdr', 'sdrplay']) + + return jsonify({ + 'rtl_fm': rtl_fm is not None, + 'rtl_power': rtl_power is not None, + 'rx_fm': rx_fm is not None, + 'ffmpeg': ffmpeg is not None, + 'available': (rtl_fm is not None or rx_fm is not None) and ffmpeg is not None, + 'supported_sdr_types': supported_sdr_types + }) + + +# ============================================ +# SIGNAL IDENTIFICATION ENDPOINT +# ============================================ + +@receiver_bp.route('/signal/guess', methods=['POST']) +def guess_signal() -> Response: + """Identify a signal based on frequency, modulation, and other parameters.""" + data = request.json or {} + + freq_mhz = data.get('frequency_mhz') + if freq_mhz is None: + return jsonify({'status': 'error', 'message': 'frequency_mhz is required'}), 400 + + try: + freq_mhz = float(freq_mhz) + except (ValueError, TypeError): + return jsonify({'status': 'error', 'message': 'Invalid frequency_mhz'}), 400 + + if freq_mhz <= 0: + return jsonify({'status': 'error', 'message': 'frequency_mhz must be positive'}), 400 + + frequency_hz = int(freq_mhz * 1e6) + + modulation = data.get('modulation') + bandwidth_hz = data.get('bandwidth_hz') + if bandwidth_hz is not None: + try: + bandwidth_hz = int(bandwidth_hz) + except (ValueError, TypeError): + bandwidth_hz = None + + region = data.get('region', 'UK/EU') + + try: + from utils.signal_guess import guess_signal_type_dict + result = guess_signal_type_dict( + frequency_hz=frequency_hz, + modulation=modulation, + bandwidth_hz=bandwidth_hz, + region=region, + ) + return jsonify({'status': 'ok', **result}) + except Exception as e: + logger.error(f"Signal guess error: {e}") + return jsonify({'status': 'error', 'message': str(e)}), 500 diff --git a/routes/listening_post/waterfall.py b/routes/listening_post/waterfall.py new file mode 100644 index 0000000..c6ec3c5 --- /dev/null +++ b/routes/listening_post/waterfall.py @@ -0,0 +1,509 @@ +"""Waterfall / spectrogram routes and implementation.""" + +from __future__ import annotations + +import math +import queue +import struct +import subprocess +import threading +import time +from datetime import datetime +from typing import Any + +from flask import jsonify, request, Response + +from . import ( + receiver_bp, + logger, + app_module, + _stop_waterfall_internal, + process_event, + sse_stream_fanout, + SSE_QUEUE_TIMEOUT, + SSE_KEEPALIVE_INTERVAL, + find_rtl_power, + SDRFactory, + SDRType, +) +import routes.listening_post as _state + + +# ============================================ +# WATERFALL HELPER FUNCTIONS +# ============================================ + +def _parse_rtl_power_line(line: str) -> tuple[str | None, float | None, float | None, list[float]]: + """Parse a single rtl_power CSV line into bins.""" + if not line or line.startswith('#'): + return None, None, None, [] + + parts = [p.strip() for p in line.split(',')] + if len(parts) < 6: + return None, None, None, [] + + # Timestamp in first two fields (YYYY-MM-DD, HH:MM:SS) + timestamp = f"{parts[0]} {parts[1]}" if len(parts) >= 2 else parts[0] + + start_idx = None + for i, tok in enumerate(parts): + try: + val = float(tok) + except ValueError: + continue + if val > 1e5: + start_idx = i + break + if start_idx is None or len(parts) < start_idx + 4: + return timestamp, None, None, [] + + try: + seg_start = float(parts[start_idx]) + seg_end = float(parts[start_idx + 1]) + raw_values = [] + for v in parts[start_idx + 3:]: + try: + raw_values.append(float(v)) + except ValueError: + continue + if raw_values and raw_values[0] >= 0 and any(val < 0 for val in raw_values[1:]): + raw_values = raw_values[1:] + return timestamp, seg_start, seg_end, raw_values + except ValueError: + return timestamp, None, None, [] + + +def _queue_waterfall_error(message: str) -> None: + """Push an error message onto the waterfall SSE queue.""" + try: + _state.waterfall_queue.put_nowait({ + 'type': 'waterfall_error', + 'message': message, + 'timestamp': datetime.now().isoformat(), + }) + except queue.Full: + pass + + +def _downsample_bins(values: list[float], target: int) -> list[float]: + """Downsample bins to a target length using simple averaging.""" + if target <= 0 or len(values) <= target: + return values + + out: list[float] = [] + step = len(values) / target + for i in range(target): + start = int(i * step) + end = int((i + 1) * step) + if end <= start: + end = min(start + 1, len(values)) + chunk = values[start:end] + if not chunk: + continue + out.append(sum(chunk) / len(chunk)) + return out + + +# ============================================ +# WATERFALL LOOP IMPLEMENTATIONS +# ============================================ + +def _waterfall_loop(): + """Continuous waterfall sweep loop emitting FFT data.""" + sdr_type_str = _state.waterfall_config.get('sdr_type', 'rtlsdr') + try: + sdr_type = SDRType(sdr_type_str) + except ValueError: + sdr_type = SDRType.RTL_SDR + + if sdr_type == SDRType.RTL_SDR: + _waterfall_loop_rtl_power() + else: + _waterfall_loop_iq(sdr_type) + + +def _waterfall_loop_iq(sdr_type: SDRType): + """Waterfall loop using rx_sdr IQ capture + FFT for HackRF/SoapySDR devices.""" + start_freq = _state.waterfall_config['start_freq'] + end_freq = _state.waterfall_config['end_freq'] + gain = _state.waterfall_config['gain'] + device = _state.waterfall_config['device'] + interval = float(_state.waterfall_config.get('interval', 0.4)) + + # Use center frequency and sample rate to cover the requested span + center_mhz = (start_freq + end_freq) / 2.0 + span_hz = (end_freq - start_freq) * 1e6 + # Pick a sample rate that covers the span (minimum 2 MHz for HackRF) + sample_rate = max(2000000, int(span_hz)) + # Cap to sensible maximum + sample_rate = min(sample_rate, 20000000) + + sdr_device = SDRFactory.create_default_device(sdr_type, index=device) + builder = SDRFactory.get_builder(sdr_type) + + cmd = builder.build_iq_capture_command( + device=sdr_device, + frequency_mhz=center_mhz, + sample_rate=sample_rate, + gain=float(gain), + ) + + fft_size = min(int(_state.waterfall_config.get('max_bins') or 1024), 4096) + + try: + _state.waterfall_process = subprocess.Popen( + cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + + # Detect immediate startup failures + time.sleep(0.35) + if _state.waterfall_process.poll() is not None: + stderr_text = '' + try: + if _state.waterfall_process.stderr: + stderr_text = _state.waterfall_process.stderr.read().decode('utf-8', errors='ignore').strip() + except Exception: + stderr_text = '' + msg = stderr_text or f'IQ capture exited early (code {_state.waterfall_process.returncode})' + logger.error(f"Waterfall startup failed: {msg}") + _queue_waterfall_error(msg) + return + + if not _state.waterfall_process.stdout: + _queue_waterfall_error('IQ capture stdout unavailable') + return + + # Read IQ samples and compute FFT + # CU8 format: interleaved unsigned 8-bit I/Q pairs + bytes_per_sample = 2 # 1 byte I + 1 byte Q + chunk_bytes = fft_size * bytes_per_sample + received_any = False + + while _state.waterfall_running: + raw = _state.waterfall_process.stdout.read(chunk_bytes) + if not raw or len(raw) < chunk_bytes: + if _state.waterfall_process.poll() is not None: + break + continue + + received_any = True + + # Convert CU8 to complex float: center at 127.5 + iq = struct.unpack(f'{fft_size * 2}B', raw) + # Compute power spectrum via FFT + real_parts = [(iq[i * 2] - 127.5) / 127.5 for i in range(fft_size)] + imag_parts = [(iq[i * 2 + 1] - 127.5) / 127.5 for i in range(fft_size)] + + bins: list[float] = [] + try: + # Try numpy if available for efficient FFT + import numpy as np + samples = np.array(real_parts, dtype=np.float32) + 1j * np.array(imag_parts, dtype=np.float32) + # Apply Hann window + window = np.hanning(fft_size) + samples *= window + spectrum = np.fft.fftshift(np.fft.fft(samples)) + power_db = 10.0 * np.log10(np.abs(spectrum) ** 2 + 1e-10) + bins = power_db.tolist() + except ImportError: + # Fallback: compute magnitude without full FFT + # Just report raw magnitudes per sample as approximate power + for i in range(fft_size): + mag = math.sqrt(real_parts[i] ** 2 + imag_parts[i] ** 2) + power = 10.0 * math.log10(mag ** 2 + 1e-10) + bins.append(power) + + max_bins = int(_state.waterfall_config.get('max_bins') or 0) + if max_bins > 0 and len(bins) > max_bins: + bins = _downsample_bins(bins, max_bins) + + msg = { + 'type': 'waterfall_sweep', + 'start_freq': start_freq, + 'end_freq': end_freq, + 'bins': bins, + 'timestamp': datetime.now().isoformat(), + } + try: + _state.waterfall_queue.put_nowait(msg) + except queue.Full: + try: + _state.waterfall_queue.get_nowait() + except queue.Empty: + pass + try: + _state.waterfall_queue.put_nowait(msg) + except queue.Full: + pass + + # Throttle to respect interval + time.sleep(interval) + + if _state.waterfall_running and not received_any: + _queue_waterfall_error(f'No IQ data received from {sdr_type.value}') + + except Exception as e: + logger.error(f"Waterfall IQ loop error: {e}") + _queue_waterfall_error(f"Waterfall loop error: {e}") + finally: + _state.waterfall_running = False + if _state.waterfall_process and _state.waterfall_process.poll() is None: + try: + _state.waterfall_process.terminate() + _state.waterfall_process.wait(timeout=1) + except Exception: + try: + _state.waterfall_process.kill() + except Exception: + pass + _state.waterfall_process = None + logger.info("Waterfall IQ loop stopped") + + +def _waterfall_loop_rtl_power(): + """Continuous rtl_power sweep loop emitting waterfall data.""" + rtl_power_path = find_rtl_power() + if not rtl_power_path: + logger.error("rtl_power not found for waterfall") + _queue_waterfall_error('rtl_power not found') + _state.waterfall_running = False + return + + start_hz = int(_state.waterfall_config['start_freq'] * 1e6) + end_hz = int(_state.waterfall_config['end_freq'] * 1e6) + bin_hz = int(_state.waterfall_config['bin_size']) + gain = _state.waterfall_config['gain'] + device = _state.waterfall_config['device'] + interval = float(_state.waterfall_config.get('interval', 0.4)) + + cmd = [ + rtl_power_path, + '-f', f'{start_hz}:{end_hz}:{bin_hz}', + '-i', str(interval), + '-g', str(gain), + '-d', str(device), + ] + + try: + _state.waterfall_process = subprocess.Popen( + cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + bufsize=1, + text=True, + ) + + # Detect immediate startup failures (e.g. device busy / no device). + time.sleep(0.35) + if _state.waterfall_process.poll() is not None: + stderr_text = '' + try: + if _state.waterfall_process.stderr: + stderr_text = _state.waterfall_process.stderr.read().strip() + except Exception: + stderr_text = '' + msg = stderr_text or f'rtl_power exited early (code {_state.waterfall_process.returncode})' + logger.error(f"Waterfall startup failed: {msg}") + _queue_waterfall_error(msg) + return + + current_ts = None + all_bins: list[float] = [] + sweep_start_hz = start_hz + sweep_end_hz = end_hz + received_any = False + + if not _state.waterfall_process.stdout: + _queue_waterfall_error('rtl_power stdout unavailable') + return + + for line in _state.waterfall_process.stdout: + if not _state.waterfall_running: + break + + ts, seg_start, seg_end, bins = _parse_rtl_power_line(line) + if ts is None or not bins: + continue + received_any = True + + if current_ts is None: + current_ts = ts + + if ts != current_ts and all_bins: + max_bins = int(_state.waterfall_config.get('max_bins') or 0) + bins_to_send = all_bins + if max_bins > 0 and len(bins_to_send) > max_bins: + bins_to_send = _downsample_bins(bins_to_send, max_bins) + msg = { + 'type': 'waterfall_sweep', + 'start_freq': sweep_start_hz / 1e6, + 'end_freq': sweep_end_hz / 1e6, + 'bins': bins_to_send, + 'timestamp': datetime.now().isoformat(), + } + try: + _state.waterfall_queue.put_nowait(msg) + except queue.Full: + try: + _state.waterfall_queue.get_nowait() + except queue.Empty: + pass + try: + _state.waterfall_queue.put_nowait(msg) + except queue.Full: + pass + + all_bins = [] + sweep_start_hz = start_hz + sweep_end_hz = end_hz + current_ts = ts + + all_bins.extend(bins) + if seg_start is not None: + sweep_start_hz = min(sweep_start_hz, seg_start) + if seg_end is not None: + sweep_end_hz = max(sweep_end_hz, seg_end) + + # Flush any remaining bins + if all_bins and _state.waterfall_running: + max_bins = int(_state.waterfall_config.get('max_bins') or 0) + bins_to_send = all_bins + if max_bins > 0 and len(bins_to_send) > max_bins: + bins_to_send = _downsample_bins(bins_to_send, max_bins) + msg = { + 'type': 'waterfall_sweep', + 'start_freq': sweep_start_hz / 1e6, + 'end_freq': sweep_end_hz / 1e6, + 'bins': bins_to_send, + 'timestamp': datetime.now().isoformat(), + } + try: + _state.waterfall_queue.put_nowait(msg) + except queue.Full: + pass + + if _state.waterfall_running and not received_any: + _queue_waterfall_error('No waterfall FFT data received from rtl_power') + + except Exception as e: + logger.error(f"Waterfall loop error: {e}") + _queue_waterfall_error(f"Waterfall loop error: {e}") + finally: + _state.waterfall_running = False + if _state.waterfall_process and _state.waterfall_process.poll() is None: + try: + _state.waterfall_process.terminate() + _state.waterfall_process.wait(timeout=1) + except Exception: + try: + _state.waterfall_process.kill() + except Exception: + pass + _state.waterfall_process = None + logger.info("Waterfall loop stopped") + + +# ============================================ +# WATERFALL API ENDPOINTS +# ============================================ + +@receiver_bp.route('/waterfall/start', methods=['POST']) +def start_waterfall() -> Response: + """Start the waterfall/spectrogram display.""" + with _state.waterfall_lock: + if _state.waterfall_running: + return jsonify({ + 'status': 'started', + 'already_running': True, + 'message': 'Waterfall already running', + 'config': _state.waterfall_config, + }) + + data = request.json or {} + + # Determine SDR type + sdr_type_str = data.get('sdr_type', 'rtlsdr') + try: + sdr_type = SDRType(sdr_type_str) + except ValueError: + sdr_type = SDRType.RTL_SDR + sdr_type_str = sdr_type.value + + # RTL-SDR uses rtl_power; other types use rx_sdr via IQ capture + if sdr_type == SDRType.RTL_SDR: + if not find_rtl_power(): + return jsonify({'status': 'error', 'message': 'rtl_power not found'}), 503 + + try: + _state.waterfall_config['start_freq'] = float(data.get('start_freq', 88.0)) + _state.waterfall_config['end_freq'] = float(data.get('end_freq', 108.0)) + _state.waterfall_config['bin_size'] = int(data.get('bin_size', 10000)) + _state.waterfall_config['gain'] = int(data.get('gain', 40)) + _state.waterfall_config['device'] = int(data.get('device', 0)) + _state.waterfall_config['sdr_type'] = sdr_type_str + if data.get('interval') is not None: + interval = float(data.get('interval', _state.waterfall_config['interval'])) + if interval < 0.1 or interval > 5: + return jsonify({'status': 'error', 'message': 'interval must be between 0.1 and 5 seconds'}), 400 + _state.waterfall_config['interval'] = interval + if data.get('max_bins') is not None: + max_bins = int(data.get('max_bins', _state.waterfall_config['max_bins'])) + if max_bins < 64 or max_bins > 4096: + return jsonify({'status': 'error', 'message': 'max_bins must be between 64 and 4096'}), 400 + _state.waterfall_config['max_bins'] = max_bins + except (ValueError, TypeError) as e: + return jsonify({'status': 'error', 'message': f'Invalid parameter: {e}'}), 400 + + if _state.waterfall_config['start_freq'] >= _state.waterfall_config['end_freq']: + return jsonify({'status': 'error', 'message': 'start_freq must be less than end_freq'}), 400 + + # Clear stale queue + try: + while True: + _state.waterfall_queue.get_nowait() + except queue.Empty: + pass + + # Claim SDR device + error = app_module.claim_sdr_device(_state.waterfall_config['device'], 'waterfall', sdr_type_str) + if error: + return jsonify({'status': 'error', 'error_type': 'DEVICE_BUSY', 'message': error}), 409 + + _state.waterfall_active_device = _state.waterfall_config['device'] + _state.waterfall_active_sdr_type = sdr_type_str + _state.waterfall_running = True + _state.waterfall_thread = threading.Thread(target=_waterfall_loop, daemon=True) + _state.waterfall_thread.start() + + return jsonify({'status': 'started', 'config': _state.waterfall_config}) + + +@receiver_bp.route('/waterfall/stop', methods=['POST']) +def stop_waterfall() -> Response: + """Stop the waterfall display.""" + _stop_waterfall_internal() + + return jsonify({'status': 'stopped'}) + + +@receiver_bp.route('/waterfall/stream') +def stream_waterfall() -> Response: + """SSE stream for waterfall data.""" + def _on_msg(msg: dict[str, Any]) -> None: + process_event('waterfall', msg, msg.get('type')) + + response = Response( + sse_stream_fanout( + source_queue=_state.waterfall_queue, + channel_key='receiver_waterfall', + timeout=SSE_QUEUE_TIMEOUT, + keepalive_interval=SSE_KEEPALIVE_INTERVAL, + on_message=_on_msg, + ), + mimetype='text/event-stream', + ) + response.headers['Cache-Control'] = 'no-cache' + response.headers['X-Accel-Buffering'] = 'no' + return response diff --git a/routes/meshtastic.py b/routes/meshtastic.py index 48f958b..79477ab 100644 --- a/routes/meshtastic.py +++ b/routes/meshtastic.py @@ -16,8 +16,9 @@ from typing import Generator from flask import Blueprint, jsonify, request, Response +from utils.responses import api_success, api_error from utils.logging import get_logger -from utils.sse import sse_stream_fanout +from utils.sse import sse_stream_fanout from utils.meshtastic import ( get_meshtastic_client, start_meshtastic, @@ -453,8 +454,8 @@ def get_messages(): }) -@meshtastic_bp.route('/stream') -def stream_messages(): +@meshtastic_bp.route('/stream') +def stream_messages(): """ SSE stream of Meshtastic messages. @@ -469,18 +470,18 @@ def stream_messages(): Returns: SSE stream (text/event-stream) """ - response = Response( - sse_stream_fanout( - source_queue=_mesh_queue, - channel_key='meshtastic', - timeout=1.0, - keepalive_interval=30.0, - ), - mimetype='text/event-stream', - ) - response.headers['Cache-Control'] = 'no-cache' - response.headers['X-Accel-Buffering'] = 'no' - response.headers['Connection'] = 'keep-alive' + response = Response( + sse_stream_fanout( + source_queue=_mesh_queue, + channel_key='meshtastic', + timeout=1.0, + keepalive_interval=30.0, + ), + mimetype='text/event-stream', + ) + response.headers['Cache-Control'] = 'no-cache' + response.headers['X-Accel-Buffering'] = 'no' + response.headers['Connection'] = 'keep-alive' return response @@ -1050,11 +1051,11 @@ def request_store_forward(): def mesh_topology(): """Return mesh network topology graph.""" if not is_meshtastic_available(): - return jsonify({'status': 'error', 'message': 'Meshtastic SDK not installed'}), 400 + return api_error('Meshtastic SDK not installed', 400) client = get_meshtastic_client() if not client or not client.is_running: - return jsonify({'status': 'error', 'message': 'Not connected'}), 400 + return api_error('Not connected', 400) return jsonify({ 'status': 'success', diff --git a/routes/meteor_websocket.py b/routes/meteor_websocket.py index 2f75466..c13ccb7 100644 --- a/routes/meteor_websocket.py +++ b/routes/meteor_websocket.py @@ -20,6 +20,8 @@ from typing import Any from flask import Blueprint, Flask, Response, jsonify, request +from utils.responses import api_success, api_error + try: from flask_sock import Sock WEBSOCKET_AVAILABLE = True @@ -170,7 +172,7 @@ def meteor_events_export(): """Export events as CSV or JSON.""" detector = _detector if not detector: - return jsonify({'error': 'No active session'}), 400 + return api_error('No active session', 400) fmt = request.args.get('format', 'json').lower() if fmt == 'csv': diff --git a/routes/morse.py b/routes/morse.py index 5da43c9..a84b296 100644 --- a/routes/morse.py +++ b/routes/morse.py @@ -13,6 +13,7 @@ from typing import Any from flask import Blueprint, Response, jsonify, request +from utils.responses import api_success, api_error import app as app_module from utils.event_pipeline import process_event from utils.logging import sensor_logger as logger @@ -252,7 +253,7 @@ def start_morse() -> Response: try: detect_mode = _validate_detect_mode(data.get('detect_mode', 'goertzel')) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) freq_max = 1766.0 if detect_mode == 'envelope' else 30.0 try: @@ -261,7 +262,7 @@ def start_morse() -> Response: ppm = validate_ppm(data.get('ppm', '0')) device = validate_device_index(data.get('device', '0')) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) try: tone_freq = _validate_tone_freq(data.get('tone_freq', '700')) @@ -277,7 +278,7 @@ def start_morse() -> Response: tone_lock = _bool_value(data.get('tone_lock', False), False) wpm_lock = _bool_value(data.get('wpm_lock', False), False) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) sdr_type_str = data.get('sdr_type', 'rtlsdr') @@ -335,7 +336,7 @@ def start_morse() -> Response: rtl_tcp_host = validate_rtl_tcp_host(rtl_tcp_host) rtl_tcp_port = validate_rtl_tcp_port(rtl_tcp_port) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) network_sdr_device = SDRFactory.create_network_device(rtl_tcp_host, rtl_tcp_port) logger.info(f"Using remote SDR: rtl_tcp://{rtl_tcp_host}:{rtl_tcp_port}") @@ -696,7 +697,7 @@ def start_morse() -> Response: morse_last_error = msg _set_state(MORSE_ERROR, msg) _set_state(MORSE_IDLE, 'Idle') - return jsonify({'status': 'error', 'message': msg}), 500 + return api_error(msg, 500) with app_module.morse_lock: app_module.morse_process = active_rtl_process @@ -740,7 +741,7 @@ def start_morse() -> Response: morse_last_error = f'Tool not found: {e.filename}' _set_state(MORSE_ERROR, morse_last_error) _set_state(MORSE_IDLE, 'Idle') - return jsonify({'status': 'error', 'message': morse_last_error}), 400 + return api_error(morse_last_error, 400) except Exception as e: _cleanup_attempt( @@ -758,7 +759,7 @@ def start_morse() -> Response: morse_last_error = str(e) _set_state(MORSE_ERROR, morse_last_error) _set_state(MORSE_IDLE, 'Idle') - return jsonify({'status': 'error', 'message': str(e)}), 500 + return api_error(str(e), 500) @morse_bp.route('/morse/stop', methods=['POST']) @@ -908,11 +909,11 @@ def calibrate_morse() -> Response: def decode_morse_file() -> Response: """Decode Morse from an uploaded WAV file.""" if 'audio' not in request.files: - return jsonify({'status': 'error', 'message': 'No audio file provided'}), 400 + return api_error('No audio file provided', 400) audio_file = request.files['audio'] if not audio_file.filename: - return jsonify({'status': 'error', 'message': 'No file selected'}), 400 + return api_error('No file selected', 400) # Parse optional tuning/decoder parameters from form fields. form = request.form or {} @@ -930,7 +931,7 @@ def decode_morse_file() -> Response: tone_lock = _bool_value(form.get('tone_lock', 'false'), False) wpm_lock = _bool_value(form.get('wpm_lock', 'false'), False) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) with tempfile.NamedTemporaryFile(suffix='.wav', delete=False) as tmp: audio_file.save(tmp.name) @@ -968,7 +969,7 @@ def decode_morse_file() -> Response: }) except Exception as e: logger.error(f'Morse decode-file error: {e}') - return jsonify({'status': 'error', 'message': str(e)}), 500 + return api_error(str(e), 500) finally: with contextlib.suppress(Exception): tmp_path.unlink(missing_ok=True) diff --git a/routes/offline.py b/routes/offline.py index 51326f7..6e8c4c0 100644 --- a/routes/offline.py +++ b/routes/offline.py @@ -4,19 +4,20 @@ Offline mode routes - Asset management and settings for offline operation. from flask import Blueprint, jsonify, request from utils.database import get_setting, set_setting +from utils.responses import api_success, api_error import os offline_bp = Blueprint('offline', __name__, url_prefix='/offline') # Default offline settings -OFFLINE_DEFAULTS = { - 'offline.enabled': False, - # Default to bundled assets/fonts to avoid third-party CDN privacy blocks. - 'offline.assets_source': 'local', - 'offline.fonts_source': 'local', - 'offline.tile_provider': 'cartodb_dark_cyan', - 'offline.tile_server_url': '' -} +OFFLINE_DEFAULTS = { + 'offline.enabled': False, + # Default to bundled assets/fonts to avoid third-party CDN privacy blocks. + 'offline.assets_source': 'local', + 'offline.fonts_source': 'local', + 'offline.tile_provider': 'cartodb_dark_cyan', + 'offline.tile_server_url': '' +} # Asset paths to check ASSET_PATHS = { @@ -64,10 +65,7 @@ def get_offline_settings(): def get_settings(): """Get current offline settings.""" settings = get_offline_settings() - return jsonify({ - 'status': 'success', - 'settings': settings - }) + return api_success(data={'settings': settings}) @offline_bp.route('/settings', methods=['POST']) @@ -75,14 +73,14 @@ def save_setting(): """Save an offline setting.""" data = request.get_json() if not data or 'key' not in data or 'value' not in data: - return jsonify({'status': 'error', 'message': 'Missing key or value'}), 400 + return api_error('Missing key or value', 400) key = data['key'] value = data['value'] # Validate key is an allowed setting if key not in OFFLINE_DEFAULTS: - return jsonify({'status': 'error', 'message': f'Unknown setting: {key}'}), 400 + return api_error(f'Unknown setting: {key}', 400) # Validate value type matches default default_type = type(OFFLINE_DEFAULTS[key]) @@ -94,18 +92,11 @@ def save_setting(): else: value = default_type(value) except (ValueError, TypeError): - return jsonify({ - 'status': 'error', - 'message': f'Invalid value type for {key}' - }), 400 + return api_error(f'Invalid value type for {key}', 400) set_setting(key, value) - return jsonify({ - 'status': 'success', - 'key': key, - 'value': value - }) + return api_success(data={'key': key, 'value': value}) @offline_bp.route('/status', methods=['GET']) @@ -134,8 +125,7 @@ def get_status(): if not available: all_available = False - return jsonify({ - 'status': 'success', + return api_success(data={ 'all_available': all_available, 'assets': results, 'offline_enabled': get_setting('offline.enabled', False) @@ -147,11 +137,11 @@ def check_asset(): """Check if a specific asset file exists.""" path = request.args.get('path', '') if not path: - return jsonify({'status': 'error', 'message': 'Missing path parameter'}), 400 + return api_error('Missing path parameter', 400) # Security: only allow checking within static/vendor if not path.startswith('/static/vendor/'): - return jsonify({'status': 'error', 'message': 'Invalid path'}), 400 + return api_error('Invalid path', 400) # Remove leading slash and construct full path relative_path = path.lstrip('/') @@ -160,8 +150,4 @@ def check_asset(): exists = os.path.exists(full_path) - return jsonify({ - 'status': 'success', - 'path': path, - 'exists': exists - }) + return api_success(data={'path': path, 'exists': exists}) diff --git a/routes/ook.py b/routes/ook.py index 5934417..79ca5a1 100644 --- a/routes/ook.py +++ b/routes/ook.py @@ -19,6 +19,7 @@ from flask import Blueprint, Response, jsonify, request import app as app_module from utils.event_pipeline import process_event +from utils.responses import api_success, api_error from utils.logging import sensor_logger as logger from utils.ook import ook_parser_thread from utils.process import register_process, safe_terminate, unregister_process @@ -69,7 +70,7 @@ def start_ook() -> Response: if app_module.ook_process.poll() is not None: cleanup_ook(emit_status=False) else: - return jsonify({'status': 'error', 'message': 'OOK decoder already running'}), 409 + return api_error('OOK decoder already running', 409) data = request.json or {} @@ -79,12 +80,12 @@ def start_ook() -> Response: ppm = validate_ppm(data.get('ppm', '0')) device = validate_device_index(data.get('device', '0')) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) try: encoding = _validate_encoding(data.get('encoding', 'pwm')) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) # OOK flex decoder timing parameters (server-side range validation) try: @@ -95,11 +96,11 @@ def start_ook() -> Response: tolerance = validate_positive_int(data.get('tolerance', 150), 'tolerance', max_val=50000) min_bits = validate_positive_int(data.get('min_bits', 8), 'min_bits', max_val=4096) except ValueError as e: - return jsonify({'status': 'error', 'message': f'Invalid timing parameter: {e}'}), 400 + return api_error(f'Invalid timing parameter: {e}', 400) if min_bits < 1: - return jsonify({'status': 'error', 'message': 'min_bits must be >= 1'}), 400 + return api_error('min_bits must be >= 1', 400) if short_pulse < 1 or long_pulse < 1: - return jsonify({'status': 'error', 'message': 'Pulse widths must be >= 1'}), 400 + return api_error('Pulse widths must be >= 1', 400) deduplicate = bool(data.get('deduplicate', False)) # Parse SDR type early — needed for device claim @@ -117,11 +118,7 @@ def start_ook() -> Response: device_int = int(device) error = app_module.claim_sdr_device(device_int, 'ook', sdr_type_str) if error: - return jsonify({ - 'status': 'error', - 'error_type': 'DEVICE_BUSY', - 'message': error, - }), 409 + return api_error(error, 409, error_type='DEVICE_BUSY') ook_active_device = device_int ook_active_sdr_type = sdr_type_str @@ -136,7 +133,7 @@ def start_ook() -> Response: rtl_tcp_host = validate_rtl_tcp_host(rtl_tcp_host) rtl_tcp_port = validate_rtl_tcp_port(rtl_tcp_port) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) sdr_device = SDRFactory.create_network_device(rtl_tcp_host, rtl_tcp_port) logger.info(f'Using remote SDR: rtl_tcp://{rtl_tcp_host}:{rtl_tcp_port}') else: @@ -237,7 +234,7 @@ def start_ook() -> Response: app_module.release_sdr_device(ook_active_device, ook_active_sdr_type or 'rtlsdr') ook_active_device = None ook_active_sdr_type = None - return jsonify({'status': 'error', 'message': f'Tool not found: {e.filename}'}), 400 + return api_error(f'Tool not found: {e.filename}', 400) except Exception as e: try: @@ -251,7 +248,7 @@ def start_ook() -> Response: app_module.release_sdr_device(ook_active_device, ook_active_sdr_type or 'rtlsdr') ook_active_device = None ook_active_sdr_type = None - return jsonify({'status': 'error', 'message': str(e)}), 500 + return api_error(str(e), 500) def _close_pipe(pipe_obj) -> None: diff --git a/routes/pager.py b/routes/pager.py index 3c53d7a..991a951 100644 --- a/routes/pager.py +++ b/routes/pager.py @@ -18,6 +18,7 @@ from typing import Any, Generator from flask import Blueprint, jsonify, request, Response +from utils.responses import api_success, api_error import app as app_module from utils.logging import pager_logger as logger from utils.validation import ( @@ -275,7 +276,7 @@ def start_decoding() -> Response: with app_module.process_lock: if app_module.current_process: - return jsonify({'status': 'error', 'message': 'Already running'}), 409 + return api_error('Already running', 409) data = request.json or {} @@ -286,7 +287,7 @@ def start_decoding() -> Response: ppm = validate_ppm(data.get('ppm', '0')) device = validate_device_index(data.get('device', '0')) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) squelch = data.get('squelch', '0') try: @@ -294,7 +295,7 @@ def start_decoding() -> Response: if not 0 <= squelch <= 1000: raise ValueError("Squelch must be between 0 and 1000") except (ValueError, TypeError): - return jsonify({'status': 'error', 'message': 'Invalid squelch value'}), 400 + return api_error('Invalid squelch value', 400) # Check for rtl_tcp (remote SDR) connection rtl_tcp_host = data.get('rtl_tcp_host') @@ -308,11 +309,7 @@ def start_decoding() -> Response: device_int = int(device) error = app_module.claim_sdr_device(device_int, 'pager', sdr_type_str) if error: - return jsonify({ - 'status': 'error', - 'error_type': 'DEVICE_BUSY', - 'message': error - }), 409 + return api_error(error, 409, error_type='DEVICE_BUSY') pager_active_device = device_int pager_active_sdr_type = sdr_type_str @@ -324,7 +321,7 @@ def start_decoding() -> Response: app_module.release_sdr_device(pager_active_device, pager_active_sdr_type or 'rtlsdr') pager_active_device = None pager_active_sdr_type = None - return jsonify({'status': 'error', 'message': 'Protocols must be a list'}), 400 + return api_error('Protocols must be a list', 400) protocols = [p for p in protocols if p in valid_protocols] if not protocols: protocols = valid_protocols @@ -360,7 +357,7 @@ def start_decoding() -> Response: rtl_tcp_host = validate_rtl_tcp_host(rtl_tcp_host) rtl_tcp_port = validate_rtl_tcp_port(rtl_tcp_port) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) sdr_device = SDRFactory.create_network_device(rtl_tcp_host, rtl_tcp_port) logger.info(f"Using remote SDR: rtl_tcp://{rtl_tcp_host}:{rtl_tcp_port}") @@ -385,7 +382,7 @@ def start_decoding() -> Response: multimon_path = get_tool_path('multimon-ng') if not multimon_path: - return jsonify({'status': 'error', 'message': 'multimon-ng not found'}), 400 + return api_error('multimon-ng not found', 400) multimon_cmd = [multimon_path, '-t', 'raw'] + decoders + ['-f', 'alpha', '-'] full_cmd = ' '.join(rtl_cmd) + ' | ' + ' '.join(multimon_cmd) @@ -466,7 +463,7 @@ def start_decoding() -> Response: app_module.release_sdr_device(pager_active_device, pager_active_sdr_type or 'rtlsdr') pager_active_device = None pager_active_sdr_type = None - return jsonify({'status': 'error', 'message': f'Tool not found: {e.filename}'}) + return api_error(f'Tool not found: {e.filename}') except Exception as e: # Kill orphaned rtl_fm process if it was started try: @@ -482,7 +479,7 @@ def start_decoding() -> Response: app_module.release_sdr_device(pager_active_device, pager_active_sdr_type or 'rtlsdr') pager_active_device = None pager_active_sdr_type = None - return jsonify({'status': 'error', 'message': str(e)}) + return api_error(str(e)) @pager_bp.route('/stop', methods=['POST']) @@ -562,16 +559,16 @@ def toggle_logging() -> Response: is_in_logs = str(requested_path).startswith(str(logs_dir)) if not (is_in_cwd or is_in_logs): - return jsonify({'status': 'error', 'message': 'Invalid log file path'}), 400 + return api_error('Invalid log file path', 400) # Ensure it's not a directory if requested_path.is_dir(): - return jsonify({'status': 'error', 'message': 'Log file path must be a file, not a directory'}), 400 + return api_error('Log file path must be a file, not a directory', 400) app_module.log_file_path = str(requested_path) except (ValueError, OSError) as e: logger.warning(f"Invalid log file path: {e}") - return jsonify({'status': 'error', 'message': 'Invalid log file path'}), 400 + return api_error('Invalid log file path', 400) return jsonify({'logging': app_module.logging_enabled, 'log_file': app_module.log_file_path}) diff --git a/routes/radiosonde.py b/routes/radiosonde.py index 4c392ad..d09808c 100644 --- a/routes/radiosonde.py +++ b/routes/radiosonde.py @@ -20,6 +20,7 @@ from typing import Any from flask import Blueprint, Response, jsonify, request +from utils.responses import api_success, api_error import app as app_module from utils.constants import ( MAX_RADIOSONDE_AGE_SECONDS, @@ -479,10 +480,7 @@ def start_radiosonde(): with app_module.radiosonde_lock: if radiosonde_running: - return jsonify({ - 'status': 'already_running', - 'message': 'Radiosonde tracking already active', - }), 409 + return api_error('Radiosonde tracking already active', 409) data = request.json or {} @@ -491,7 +489,7 @@ def start_radiosonde(): gain = float(validate_gain(data.get('gain', '40'))) device = validate_device_index(data.get('device', '0')) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) freq_min = data.get('freq_min', 400.0) freq_max = data.get('freq_max', 406.0) @@ -503,7 +501,7 @@ def start_radiosonde(): if freq_min >= freq_max: raise ValueError("Min frequency must be less than max") except (ValueError, TypeError) as e: - return jsonify({'status': 'error', 'message': f'Invalid frequency range: {e}'}), 400 + return api_error(f'Invalid frequency range: {e}', 400) bias_t = data.get('bias_t', False) ppm = int(data.get('ppm', 0)) @@ -525,10 +523,7 @@ def start_radiosonde(): # Find auto_rx auto_rx_path = find_auto_rx() if not auto_rx_path: - return jsonify({ - 'status': 'error', - 'message': 'radiosonde_auto_rx not found. Install from https://github.com/projecthorus/radiosonde_auto_rx', - }), 400 + return api_error('radiosonde_auto_rx not found. Install from https://github.com/projecthorus/radiosonde_auto_rx', 400) # Get SDR type sdr_type_str = data.get('sdr_type', 'rtlsdr') @@ -552,11 +547,7 @@ def start_radiosonde(): device_int = int(device) error = app_module.claim_sdr_device(device_int, 'radiosonde', sdr_type_str) if error: - return jsonify({ - 'status': 'error', - 'error_type': 'DEVICE_BUSY', - 'message': error, - }), 409 + return api_error(error, 409, error_type='DEVICE_BUSY') # Generate config try: @@ -574,7 +565,7 @@ def start_radiosonde(): except (OSError, RuntimeError) as e: app_module.release_sdr_device(device_int, sdr_type_str) logger.error(f"Failed to generate radiosonde config: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 + return api_error(str(e), 500) # Build command - auto_rx -c expects the path to station.cfg cfg_abs = os.path.abspath(cfg_path) @@ -598,13 +589,11 @@ def start_radiosonde(): dep_error = dep_check.stderr.decode('utf-8', errors='ignore').strip() logger.error(f"radiosonde_auto_rx dependency check failed:\n{dep_error}") app_module.release_sdr_device(device_int, sdr_type_str) - return jsonify({ - 'status': 'error', - 'message': ( - 'radiosonde_auto_rx dependencies not satisfied. ' - f'Re-run setup.sh to install. Error: {dep_error[:500]}' - ), - }), 500 + return api_error( + 'radiosonde_auto_rx dependencies not satisfied. ' + f'Re-run setup.sh to install. Error: {dep_error[:500]}', + 500, + ) try: logger.info(f"Starting radiosonde_auto_rx: {' '.join(cmd)}") @@ -646,7 +635,7 @@ def start_radiosonde(): ) if stderr_output: error_msg += f' Error: {stderr_output[:500]}' - return jsonify({'status': 'error', 'message': error_msg}), 500 + return api_error(error_msg, 500) radiosonde_running = True radiosonde_active_device = device_int @@ -672,7 +661,7 @@ def start_radiosonde(): except Exception as e: app_module.release_sdr_device(device_int, sdr_type_str) logger.error(f"Failed to start radiosonde_auto_rx: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 + return api_error(str(e), 500) @radiosonde_bp.route('/stop', methods=['POST']) @@ -741,8 +730,7 @@ def stream_radiosonde(): def get_balloons(): """Get current balloon data.""" with _balloons_lock: - return jsonify({ - 'status': 'success', + return api_success(data={ 'count': len(radiosonde_balloons), 'balloons': dict(radiosonde_balloons), }) diff --git a/routes/recordings.py b/routes/recordings.py index 6eca285..b74446e 100644 --- a/routes/recordings.py +++ b/routes/recordings.py @@ -8,6 +8,7 @@ from pathlib import Path from flask import Blueprint, jsonify, request, send_file from utils.recording import get_recording_manager, RECORDING_ROOT +from utils.responses import api_success, api_error recordings_bp = Blueprint('recordings', __name__, url_prefix='/recordings') @@ -17,7 +18,7 @@ def start_recording(): data = request.get_json() or {} mode = (data.get('mode') or '').strip() if not mode: - return jsonify({'status': 'error', 'message': 'mode is required'}), 400 + return api_error('mode is required', 400) label = data.get('label') metadata = data.get('metadata') if isinstance(data.get('metadata'), dict) else {} @@ -25,16 +26,13 @@ def start_recording(): manager = get_recording_manager() session = manager.start_recording(mode=mode, label=label, metadata=metadata) - return jsonify({ - 'status': 'success', - 'session': { - 'id': session.id, - 'mode': session.mode, - 'label': session.label, - 'started_at': session.started_at.isoformat(), - 'file_path': str(session.file_path), - } - }) + return api_success(data={'session': { + 'id': session.id, + 'mode': session.mode, + 'label': session.label, + 'started_at': session.started_at.isoformat(), + 'file_path': str(session.file_path), + }}) @recordings_bp.route('/stop', methods=['POST']) @@ -46,29 +44,25 @@ def stop_recording(): manager = get_recording_manager() session = manager.stop_recording(mode=mode, session_id=session_id) if not session: - return jsonify({'status': 'error', 'message': 'No active recording found'}), 404 + return api_error('No active recording found', 404) - return jsonify({ - 'status': 'success', - 'session': { - 'id': session.id, - 'mode': session.mode, - 'label': session.label, - 'started_at': session.started_at.isoformat(), - 'stopped_at': session.stopped_at.isoformat() if session.stopped_at else None, - 'event_count': session.event_count, - 'size_bytes': session.size_bytes, - 'file_path': str(session.file_path), - } - }) + return api_success(data={'session': { + 'id': session.id, + 'mode': session.mode, + 'label': session.label, + 'started_at': session.started_at.isoformat(), + 'stopped_at': session.stopped_at.isoformat() if session.stopped_at else None, + 'event_count': session.event_count, + 'size_bytes': session.size_bytes, + 'file_path': str(session.file_path), + }}) @recordings_bp.route('', methods=['GET']) def list_recordings(): manager = get_recording_manager() limit = request.args.get('limit', default=50, type=int) - return jsonify({ - 'status': 'success', + return api_success(data={ 'recordings': manager.list_recordings(limit=limit), 'active': manager.get_active(), }) @@ -79,8 +73,8 @@ def get_recording(session_id: str): manager = get_recording_manager() rec = manager.get_recording(session_id) if not rec: - return jsonify({'status': 'error', 'message': 'Recording not found'}), 404 - return jsonify({'status': 'success', 'recording': rec}) + return api_error('Recording not found', 404) + return api_success(data={'recording': rec}) @recordings_bp.route('//download', methods=['GET']) @@ -88,19 +82,19 @@ def download_recording(session_id: str): manager = get_recording_manager() rec = manager.get_recording(session_id) if not rec: - return jsonify({'status': 'error', 'message': 'Recording not found'}), 404 + return api_error('Recording not found', 404) file_path = Path(rec['file_path']) try: resolved_root = RECORDING_ROOT.resolve() resolved_file = file_path.resolve() if resolved_root not in resolved_file.parents: - return jsonify({'status': 'error', 'message': 'Invalid recording path'}), 400 + return api_error('Invalid recording path', 400) except Exception: - return jsonify({'status': 'error', 'message': 'Invalid recording path'}), 400 + return api_error('Invalid recording path', 400) if not file_path.exists(): - return jsonify({'status': 'error', 'message': 'Recording file missing'}), 404 + return api_error('Recording file missing', 404) return send_file( file_path, @@ -116,19 +110,19 @@ def get_recording_events(session_id: str): manager = get_recording_manager() rec = manager.get_recording(session_id) if not rec: - return jsonify({'status': 'error', 'message': 'Recording not found'}), 404 + return api_error('Recording not found', 404) file_path = Path(rec['file_path']) try: resolved_root = RECORDING_ROOT.resolve() resolved_file = file_path.resolve() if resolved_root not in resolved_file.parents: - return jsonify({'status': 'error', 'message': 'Invalid recording path'}), 400 + return api_error('Invalid recording path', 400) except Exception: - return jsonify({'status': 'error', 'message': 'Invalid recording path'}), 400 + return api_error('Invalid recording path', 400) if not file_path.exists(): - return jsonify({'status': 'error', 'message': 'Recording file missing'}), 404 + return api_error('Recording file missing', 404) limit = max(1, min(5000, request.args.get('limit', default=500, type=int))) offset = max(0, request.args.get('offset', default=0, type=int)) @@ -150,8 +144,7 @@ def get_recording_events(session_id: str): except json.JSONDecodeError: continue - return jsonify({ - 'status': 'success', + return api_success(data={ 'recording': { 'id': rec['id'], 'mode': rec['mode'], diff --git a/routes/rtlamr.py b/routes/rtlamr.py index 311bda7..a930b12 100644 --- a/routes/rtlamr.py +++ b/routes/rtlamr.py @@ -12,6 +12,7 @@ from typing import Generator from flask import Blueprint, jsonify, request, Response +from utils.responses import api_success, api_error import app as app_module from utils.logging import sensor_logger as logger from utils.validation import ( @@ -102,16 +103,13 @@ def start_rtlamr() -> Response: with app_module.rtlamr_lock: if app_module.rtlamr_process: - return jsonify({'status': 'error', 'message': 'RTLAMR already running'}), 409 + return api_error('RTLAMR already running', 409) data = request.json or {} sdr_type_str = data.get('sdr_type', 'rtlsdr') if sdr_type_str != 'rtlsdr': - return jsonify({ - 'status': 'error', - 'message': f'{sdr_type_str.replace("_", " ").title()} is not yet supported for this mode. Please use an RTL-SDR device.' - }), 400 + return api_error(f'{sdr_type_str.replace("_", " ").title()} is not yet supported for this mode. Please use an RTL-SDR device.', 400) # Validate inputs try: @@ -120,17 +118,13 @@ def start_rtlamr() -> Response: ppm = validate_ppm(data.get('ppm', '0')) device = validate_device_index(data.get('device', '0')) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) # Check if device is available device_int = int(device) error = app_module.claim_sdr_device(device_int, 'rtlamr', sdr_type_str) if error: - return jsonify({ - 'status': 'error', - 'error_type': 'DEVICE_BUSY', - 'message': error - }), 409 + return api_error(error, 409, error_type='DEVICE_BUSY') rtlamr_active_device = device_int rtlamr_active_sdr_type = sdr_type_str @@ -181,7 +175,7 @@ def start_rtlamr() -> Response: if rtlamr_active_device is not None: app_module.release_sdr_device(rtlamr_active_device, rtlamr_active_sdr_type) rtlamr_active_device = None - return jsonify({'status': 'error', 'message': f'Failed to start rtl_tcp: {e}'}), 500 + return api_error(f'Failed to start rtl_tcp: {e}', 500) # Wait for rtl_tcp to start outside lock if rtl_tcp_just_started: @@ -253,7 +247,7 @@ def start_rtlamr() -> Response: if rtlamr_active_device is not None: app_module.release_sdr_device(rtlamr_active_device, rtlamr_active_sdr_type) rtlamr_active_device = None - return jsonify({'status': 'error', 'message': 'rtlamr not found. Install from https://github.com/bemasher/rtlamr'}) + return api_error('rtlamr not found. Install from https://github.com/bemasher/rtlamr') except Exception as e: # If rtlamr fails, clean up rtl_tcp and release device with rtl_tcp_lock: @@ -264,7 +258,7 @@ def start_rtlamr() -> Response: if rtlamr_active_device is not None: app_module.release_sdr_device(rtlamr_active_device, rtlamr_active_sdr_type) rtlamr_active_device = None - return jsonify({'status': 'error', 'message': str(e)}) + return api_error(str(e)) @rtlamr_bp.route('/stop_rtlamr', methods=['POST']) diff --git a/routes/satellite.py b/routes/satellite.py index 36e303e..dddd19f 100644 --- a/routes/satellite.py +++ b/routes/satellite.py @@ -13,6 +13,7 @@ import requests from flask import Blueprint, jsonify, request, render_template, Response +from utils.responses import api_success, api_error from config import SHARED_OBSERVER_LOCATION_ENABLED from data.satellites import TLE_SATELLITES @@ -206,7 +207,7 @@ def predict_passes(): hours = validate_hours(data.get('hours', 24)) min_el = validate_elevation(data.get('minEl', 10)) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) norad_to_name = { 25544: 'ISS', @@ -345,7 +346,7 @@ def get_satellite_position(): try: from skyfield.api import wgs84, EarthSatellite except ImportError: - return jsonify({'status': 'error', 'message': 'skyfield not installed'}), 503 + return api_error('skyfield not installed', 503) data = request.json or {} @@ -354,7 +355,7 @@ def get_satellite_position(): lat = validate_latitude(data.get('latitude', data.get('lat', 51.5074))) lon = validate_longitude(data.get('longitude', data.get('lon', -0.1278))) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) sat_input = data.get('satellites', []) include_track = bool(data.get('includeTrack', True)) @@ -528,7 +529,7 @@ def update_tle(): }) except Exception as e: logger.error(f"Error updating TLE data: {e}") - return jsonify({'status': 'error', 'message': 'TLE update failed'}) + return api_error('TLE update failed') @satellite_bp.route('/celestrak/') @@ -542,7 +543,7 @@ def fetch_celestrak(category): ] if category not in valid_categories: - return jsonify({'status': 'error', 'message': f'Invalid category. Valid: {valid_categories}'}) + return api_error(f'Invalid category. Valid: {valid_categories}') try: url = f'https://celestrak.org/NORAD/elements/gp.php?GROUP={category}&FORMAT=tle' @@ -583,7 +584,7 @@ def fetch_celestrak(category): except Exception as e: logger.error(f"Error fetching CelesTrak data: {e}") - return jsonify({'status': 'error', 'message': 'Failed to fetch satellite data'}) + return api_error('Failed to fetch satellite data') # ============================================================================= @@ -604,7 +605,7 @@ def add_tracked_satellites_endpoint(): global _tle_cache data = request.get_json(silent=True) if not data: - return jsonify({'status': 'error', 'message': 'No data provided'}), 400 + return api_error('No data provided', 400) # Accept a single satellite dict or a list sat_list = data if isinstance(data, list) else [data] @@ -667,12 +668,12 @@ def update_tracked_satellite_endpoint(norad_id): data = request.json or {} enabled = data.get('enabled') if enabled is None: - return jsonify({'status': 'error', 'message': 'Missing enabled field'}), 400 + return api_error('Missing enabled field', 400) ok = update_tracked_satellite(str(norad_id), bool(enabled)) if ok: return jsonify({'status': 'success'}) - return jsonify({'status': 'error', 'message': 'Satellite not found'}), 404 + return api_error('Satellite not found', 404) @satellite_bp.route('/tracked/', methods=['DELETE']) @@ -682,4 +683,4 @@ def delete_tracked_satellite_endpoint(norad_id): if ok: return jsonify({'status': 'success', 'message': msg}) status_code = 403 if 'builtin' in msg.lower() else 404 - return jsonify({'status': 'error', 'message': msg}), status_code + return api_error(msg, status_code) diff --git a/routes/sensor.py b/routes/sensor.py index 29026fa..c6cd707 100644 --- a/routes/sensor.py +++ b/routes/sensor.py @@ -13,6 +13,7 @@ from typing import Any, Generator from flask import Blueprint, jsonify, request, Response +from utils.responses import api_success, api_error import app as app_module from utils.logging import sensor_logger as logger from utils.validation import ( @@ -165,7 +166,7 @@ def start_sensor() -> Response: with app_module.sensor_lock: if app_module.sensor_process: - return jsonify({'status': 'error', 'message': 'Sensor already running'}), 409 + return api_error('Sensor already running', 409) data = request.json or {} @@ -176,7 +177,7 @@ def start_sensor() -> Response: ppm = validate_ppm(data.get('ppm', '0')) device = validate_device_index(data.get('device', '0')) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) # Check for rtl_tcp (remote SDR) connection rtl_tcp_host = data.get('rtl_tcp_host') @@ -190,11 +191,7 @@ def start_sensor() -> Response: device_int = int(device) error = app_module.claim_sdr_device(device_int, 'sensor', sdr_type_str) if error: - return jsonify({ - 'status': 'error', - 'error_type': 'DEVICE_BUSY', - 'message': error - }), 409 + return api_error(error, 409, error_type='DEVICE_BUSY') sensor_active_device = device_int sensor_active_sdr_type = sdr_type_str @@ -217,7 +214,7 @@ def start_sensor() -> Response: rtl_tcp_host = validate_rtl_tcp_host(rtl_tcp_host) rtl_tcp_port = validate_rtl_tcp_port(rtl_tcp_port) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) sdr_device = SDRFactory.create_network_device(rtl_tcp_host, rtl_tcp_port) logger.info(f"Using remote SDR: rtl_tcp://{rtl_tcp_host}:{rtl_tcp_port}") @@ -285,14 +282,14 @@ def start_sensor() -> Response: app_module.release_sdr_device(sensor_active_device, sensor_active_sdr_type or 'rtlsdr') sensor_active_device = None sensor_active_sdr_type = None - return jsonify({'status': 'error', 'message': 'rtl_433 not found. Install with: brew install rtl_433'}) + return api_error('rtl_433 not found. Install with: brew install rtl_433') except Exception as e: # Release device on failure if sensor_active_device is not None: app_module.release_sdr_device(sensor_active_device, sensor_active_sdr_type or 'rtlsdr') sensor_active_device = None sensor_active_sdr_type = None - return jsonify({'status': 'error', 'message': str(e)}) + return api_error(str(e)) @sensor_bp.route('/stop_sensor', methods=['POST']) @@ -346,4 +343,4 @@ def get_rssi_history() -> Response: result = {} for key, entries in sensor_rssi_history.items(): result[key] = [{'t': round(t, 1), 'rssi': rssi} for t, rssi in entries] - return jsonify({'status': 'success', 'devices': result}) + return api_success(data={'devices': result}) diff --git a/routes/settings.py b/routes/settings.py index dd2c621..0b51f83 100644 --- a/routes/settings.py +++ b/routes/settings.py @@ -16,6 +16,7 @@ from utils.database import ( get_correlations, ) from utils.logging import get_logger +from utils.responses import api_error, api_success logger = get_logger('intercept.settings') @@ -27,16 +28,10 @@ def get_settings() -> Response: """Get all settings.""" try: settings = get_all_settings() - return jsonify({ - 'status': 'success', - 'settings': settings - }) + return api_success(data={'settings': settings}) except Exception as e: logger.error(f"Error getting settings: {e}") - return jsonify({ - 'status': 'error', - 'message': str(e) - }), 500 + return api_error(str(e), 500) @settings_bp.route('', methods=['POST']) @@ -45,10 +40,7 @@ def save_settings() -> Response: data = request.json or {} if not data: - return jsonify({ - 'status': 'error', - 'message': 'No settings provided' - }), 400 + return api_error('No settings provided', 400) try: saved = [] @@ -60,16 +52,10 @@ def save_settings() -> Response: set_setting(key, value) saved.append(key) - return jsonify({ - 'status': 'success', - 'saved': saved - }) + return api_success(data={'saved': saved}) except Exception as e: logger.error(f"Error saving settings: {e}") - return jsonify({ - 'status': 'error', - 'message': str(e) - }), 500 + return api_error(str(e), 500) @settings_bp.route('/', methods=['GET']) @@ -83,17 +69,10 @@ def get_single_setting(key: str) -> Response: 'key': key }), 404 - return jsonify({ - 'status': 'success', - 'key': key, - 'value': value - }) + return api_success(data={'key': key, 'value': value}) except Exception as e: logger.error(f"Error getting setting {key}: {e}") - return jsonify({ - 'status': 'error', - 'message': str(e) - }), 500 + return api_error(str(e), 500) @settings_bp.route('/', methods=['PUT']) @@ -103,24 +82,14 @@ def update_single_setting(key: str) -> Response: value = data.get('value') if value is None and 'value' not in data: - return jsonify({ - 'status': 'error', - 'message': 'Value is required' - }), 400 + return api_error('Value is required', 400) try: set_setting(key, value) - return jsonify({ - 'status': 'success', - 'key': key, - 'value': value - }) + return api_success(data={'key': key, 'value': value}) except Exception as e: logger.error(f"Error updating setting {key}: {e}") - return jsonify({ - 'status': 'error', - 'message': str(e) - }), 500 + return api_error(str(e), 500) @settings_bp.route('/', methods=['DELETE']) @@ -129,11 +98,7 @@ def delete_single_setting(key: str) -> Response: try: deleted = delete_setting(key) if deleted: - return jsonify({ - 'status': 'success', - 'key': key, - 'deleted': True - }) + return api_success(data={'key': key, 'deleted': True}) else: return jsonify({ 'status': 'not_found', @@ -141,10 +106,7 @@ def delete_single_setting(key: str) -> Response: }), 404 except Exception as e: logger.error(f"Error deleting setting {key}: {e}") - return jsonify({ - 'status': 'error', - 'message': str(e) - }), 500 + return api_error(str(e), 500) # ============================================================================= @@ -158,16 +120,10 @@ def get_device_correlations() -> Response: try: correlations = get_correlations(min_confidence) - return jsonify({ - 'status': 'success', - 'correlations': correlations - }) + return api_success(data={'correlations': correlations}) except Exception as e: logger.error(f"Error getting correlations: {e}") - return jsonify({ - 'status': 'error', - 'message': str(e) - }), 500 + return api_error(str(e), 500) # ============================================================================= @@ -229,17 +185,11 @@ def check_dvb_driver_status() -> Response: def blacklist_dvb_drivers() -> Response: """Blacklist DVB kernel drivers to prevent them from claiming RTL-SDR devices.""" if sys.platform != 'linux': - return jsonify({ - 'status': 'error', - 'message': 'This feature is only available on Linux' - }), 400 + return api_error('This feature is only available on Linux', 400) # Check if we have permission (need to be running as root or with sudo) if os.geteuid() != 0: - return jsonify({ - 'status': 'error', - 'message': 'Root privileges required. Run the app with sudo or manually run: sudo modprobe -r dvb_usb_rtl28xxu rtl2832_sdr rtl2832 r820t' - }), 403 + return api_error('Root privileges required. Run the app with sudo or manually run: sudo modprobe -r dvb_usb_rtl28xxu rtl2832_sdr rtl2832 r820t', 403) errors = [] successes = [] diff --git a/routes/signalid.py b/routes/signalid.py index 5935dab..205a32e 100644 --- a/routes/signalid.py +++ b/routes/signalid.py @@ -10,6 +10,7 @@ from typing import Any from flask import Blueprint, Response, jsonify, request +from utils.responses import api_success, api_error from utils.logging import get_logger logger = get_logger('intercept.signalid') @@ -294,15 +295,15 @@ def sigidwiki_lookup() -> Response: freq_raw = payload.get('frequency_mhz') if freq_raw is None: - return jsonify({'status': 'error', 'message': 'frequency_mhz is required'}), 400 + return api_error('frequency_mhz is required', 400) try: frequency_mhz = float(freq_raw) except (TypeError, ValueError): - return jsonify({'status': 'error', 'message': 'Invalid frequency_mhz'}), 400 + return api_error('Invalid frequency_mhz', 400) if frequency_mhz <= 0: - return jsonify({'status': 'error', 'message': 'frequency_mhz must be positive'}), 400 + return api_error('frequency_mhz must be positive', 400) modulation = str(payload.get('modulation') or '').strip().upper() if modulation and len(modulation) > 16: @@ -331,7 +332,7 @@ def sigidwiki_lookup() -> Response: lookup = _lookup_sigidwiki_matches(frequency_mhz, modulation, limit) except Exception as exc: logger.error('SigID lookup failed: %s', exc) - return jsonify({'status': 'error', 'message': 'SigID lookup failed'}), 502 + return api_error('SigID lookup failed', 502) response_payload = { 'matches': lookup.get('matches', []), diff --git a/routes/space_weather.py b/routes/space_weather.py index 84a0308..3683076 100644 --- a/routes/space_weather.py +++ b/routes/space_weather.py @@ -13,6 +13,7 @@ from typing import Any from flask import Blueprint, Response, jsonify from utils.logging import get_logger +from utils.responses import api_success, api_error logger = get_logger('intercept.space_weather') @@ -289,7 +290,7 @@ def get_image(key: str): """Proxy and cache whitelisted space weather images.""" entry = IMAGE_WHITELIST.get(key) if not entry: - return jsonify({'error': 'Unknown image key'}), 404 + return api_error('Unknown image key', 404) cache_key = f'img_{key}' cached = _cache_get(cache_key) @@ -299,7 +300,7 @@ def get_image(key: str): img_data = _fetch_bytes(entry['url']) if img_data is None: - return jsonify({'error': 'Failed to fetch image'}), 502 + return api_error('Failed to fetch image', 502) _cache_set(cache_key, img_data, TTL_IMAGE) return Response(img_data, content_type=entry['content_type'], diff --git a/routes/sstv.py b/routes/sstv.py index 9fe4d6c..3db0243 100644 --- a/routes/sstv.py +++ b/routes/sstv.py @@ -14,6 +14,7 @@ from typing import Any from flask import Blueprint, jsonify, request, Response, send_file +from utils.responses import api_success, api_error import app as app_module from utils.logging import get_logger from utils.sse import sse_stream_fanout @@ -357,16 +358,16 @@ def get_image(filename: str): # Security: only allow alphanumeric filenames with .png extension if not filename.replace('_', '').replace('-', '').replace('.', '').isalnum(): - return jsonify({'status': 'error', 'message': 'Invalid filename'}), 400 + return api_error('Invalid filename', 400) if not filename.endswith('.png'): - return jsonify({'status': 'error', 'message': 'Only PNG files supported'}), 400 + return api_error('Only PNG files supported', 400) # Find image in decoder's output directory image_path = decoder._output_dir / filename if not image_path.exists(): - return jsonify({'status': 'error', 'message': 'Image not found'}), 404 + return api_error('Image not found', 404) return send_file(image_path, mimetype='image/png') @@ -386,15 +387,15 @@ def download_image(filename: str): # Security: only allow alphanumeric filenames with .png extension if not filename.replace('_', '').replace('-', '').replace('.', '').isalnum(): - return jsonify({'status': 'error', 'message': 'Invalid filename'}), 400 + return api_error('Invalid filename', 400) if not filename.endswith('.png'): - return jsonify({'status': 'error', 'message': 'Only PNG files supported'}), 400 + return api_error('Only PNG files supported', 400) image_path = decoder._output_dir / filename if not image_path.exists(): - return jsonify({'status': 'error', 'message': 'Image not found'}), 404 + return api_error('Image not found', 404) return send_file(image_path, mimetype='image/png', as_attachment=True, download_name=filename) @@ -414,15 +415,15 @@ def delete_image(filename: str): # Security: only allow alphanumeric filenames with .png extension if not filename.replace('_', '').replace('-', '').replace('.', '').isalnum(): - return jsonify({'status': 'error', 'message': 'Invalid filename'}), 400 + return api_error('Invalid filename', 400) if not filename.endswith('.png'): - return jsonify({'status': 'error', 'message': 'Only PNG files supported'}), 400 + return api_error('Only PNG files supported', 400) if decoder.delete_image(filename): return jsonify({'status': 'ok'}) else: - return jsonify({'status': 'error', 'message': 'Image not found'}), 404 + return api_error('Image not found', 404) @sstv_bp.route('/images', methods=['DELETE']) diff --git a/routes/sstv_general.py b/routes/sstv_general.py index a17fe7d..89902f7 100644 --- a/routes/sstv_general.py +++ b/routes/sstv_general.py @@ -13,6 +13,7 @@ from pathlib import Path from flask import Blueprint, Response, jsonify, request, send_file +from utils.responses import api_success, api_error import app as app_module from utils.logging import get_logger from utils.sse import sse_stream_fanout @@ -102,10 +103,7 @@ def start_decoder(): decoder = get_general_sstv_decoder() if decoder.decoder_available is None: - return jsonify({ - 'status': 'error', - 'message': 'SSTV decoder not available. Install numpy and Pillow: pip install numpy Pillow', - }), 400 + return api_error('SSTV decoder not available. Install numpy and Pillow: pip install numpy Pillow', 400) if decoder.is_running: return jsonify({ @@ -123,10 +121,7 @@ def start_decoder(): sdr_type_str = data.get('sdr_type', 'rtlsdr') if sdr_type_str != 'rtlsdr': - return jsonify({ - 'status': 'error', - 'message': f'{sdr_type_str.replace("_", " ").title()} is not yet supported for this mode. Please use an RTL-SDR device.' - }), 400 + return api_error(f'{sdr_type_str.replace("_", " ").title()} is not yet supported for this mode. Please use an RTL-SDR device.', 400) frequency = data.get('frequency') modulation = data.get('modulation') @@ -134,23 +129,14 @@ def start_decoder(): # Validate frequency if frequency is None: - return jsonify({ - 'status': 'error', - 'message': 'Frequency is required', - }), 400 + return api_error('Frequency is required', 400) try: frequency = float(frequency) if not (1 <= frequency <= 500): - return jsonify({ - 'status': 'error', - 'message': 'Frequency must be between 1-500 MHz (HF requires upconverter for RTL-SDR)', - }), 400 + return api_error('Frequency must be between 1-500 MHz (HF requires upconverter for RTL-SDR)', 400) except (TypeError, ValueError): - return jsonify({ - 'status': 'error', - 'message': 'Invalid frequency', - }), 400 + return api_error('Invalid frequency', 400) # Auto-detect modulation from frequency table if not specified if not modulation: @@ -158,21 +144,14 @@ def start_decoder(): # Validate modulation if modulation not in ('fm', 'usb', 'lsb'): - return jsonify({ - 'status': 'error', - 'message': 'Modulation must be fm, usb, or lsb', - }), 400 + return api_error('Modulation must be fm, usb, or lsb', 400) # Claim SDR device global _sstv_general_active_device, _sstv_general_active_sdr_type device_int = int(device_index) error = app_module.claim_sdr_device(device_int, 'sstv_general', sdr_type_str) if error: - return jsonify({ - 'status': 'error', - 'error_type': 'DEVICE_BUSY', - 'message': error, - }), 409 + return api_error(error, 409, error_type='DEVICE_BUSY') # Set callback and start decoder.set_callback(_progress_callback) @@ -193,10 +172,7 @@ def start_decoder(): }) else: app_module.release_sdr_device(device_int, sdr_type_str) - return jsonify({ - 'status': 'error', - 'message': 'Failed to start decoder', - }), 500 + return api_error('Failed to start decoder', 500) @sstv_general_bp.route('/stop', methods=['POST']) @@ -237,15 +213,15 @@ def get_image(filename: str): # Security: only allow alphanumeric filenames with .png extension if not filename.replace('_', '').replace('-', '').replace('.', '').isalnum(): - return jsonify({'status': 'error', 'message': 'Invalid filename'}), 400 + return api_error('Invalid filename', 400) if not filename.endswith('.png'): - return jsonify({'status': 'error', 'message': 'Only PNG files supported'}), 400 + return api_error('Only PNG files supported', 400) image_path = decoder._output_dir / filename if not image_path.exists(): - return jsonify({'status': 'error', 'message': 'Image not found'}), 404 + return api_error('Image not found', 404) return send_file(image_path, mimetype='image/png') @@ -257,15 +233,15 @@ def download_image(filename: str): # Security: only allow alphanumeric filenames with .png extension if not filename.replace('_', '').replace('-', '').replace('.', '').isalnum(): - return jsonify({'status': 'error', 'message': 'Invalid filename'}), 400 + return api_error('Invalid filename', 400) if not filename.endswith('.png'): - return jsonify({'status': 'error', 'message': 'Only PNG files supported'}), 400 + return api_error('Only PNG files supported', 400) image_path = decoder._output_dir / filename if not image_path.exists(): - return jsonify({'status': 'error', 'message': 'Image not found'}), 404 + return api_error('Image not found', 404) return send_file(image_path, mimetype='image/png', as_attachment=True, download_name=filename) @@ -277,15 +253,15 @@ def delete_image(filename: str): # Security: only allow alphanumeric filenames with .png extension if not filename.replace('_', '').replace('-', '').replace('.', '').isalnum(): - return jsonify({'status': 'error', 'message': 'Invalid filename'}), 400 + return api_error('Invalid filename', 400) if not filename.endswith('.png'): - return jsonify({'status': 'error', 'message': 'Only PNG files supported'}), 400 + return api_error('Only PNG files supported', 400) if decoder.delete_image(filename): return jsonify({'status': 'ok'}) else: - return jsonify({'status': 'error', 'message': 'Image not found'}), 404 + return api_error('Image not found', 404) @sstv_general_bp.route('/images', methods=['DELETE']) @@ -322,18 +298,12 @@ def stream_progress(): def decode_file(): """Decode SSTV from an uploaded audio file.""" if 'audio' not in request.files: - return jsonify({ - 'status': 'error', - 'message': 'No audio file provided', - }), 400 + return api_error('No audio file provided', 400) audio_file = request.files['audio'] if not audio_file.filename: - return jsonify({ - 'status': 'error', - 'message': 'No file selected', - }), 400 + return api_error('No file selected', 400) import tempfile with tempfile.NamedTemporaryFile(suffix='.wav', delete=False) as tmp: @@ -352,10 +322,7 @@ def decode_file(): except Exception as e: logger.error(f"Error decoding file: {e}") - return jsonify({ - 'status': 'error', - 'message': str(e), - }), 500 + return api_error(str(e), 500) finally: try: diff --git a/routes/subghz.py b/routes/subghz.py index 83fe45c..6f0c02c 100644 --- a/routes/subghz.py +++ b/routes/subghz.py @@ -10,10 +10,11 @@ import queue from flask import Blueprint, jsonify, request, Response, send_file -from utils.logging import get_logger -from utils.sse import sse_stream -from utils.subghz import get_subghz_manager -from utils.event_pipeline import process_event +from utils.responses import api_success, api_error +from utils.logging import get_logger +from utils.sse import sse_stream +from utils.subghz import get_subghz_manager +from utils.event_pipeline import process_event from utils.constants import ( SUBGHZ_FREQ_MIN_MHZ, SUBGHZ_FREQ_MAX_MHZ, @@ -33,14 +34,14 @@ subghz_bp = Blueprint('subghz', __name__, url_prefix='/subghz') _subghz_queue: queue.Queue = queue.Queue(maxsize=200) -def _event_callback(event: dict) -> None: - """Forward SubGhzManager events to the SSE queue.""" - try: - process_event('subghz', event, event.get('type')) - except Exception: - pass - try: - _subghz_queue.put_nowait(event) +def _event_callback(event: dict) -> None: + """Forward SubGhzManager events to the SSE queue.""" + try: + process_event('subghz', event, event.get('type')) + except Exception: + pass + try: + _subghz_queue.put_nowait(event) except queue.Full: try: _subghz_queue.get_nowait() @@ -76,44 +77,44 @@ def _validate_serial(data: dict) -> str | None: return None -def _validate_int(data: dict, key: str, default: int, min_val: int, max_val: int) -> int: - """Validate integer parameter with bounds clamping.""" +def _validate_int(data: dict, key: str, default: int, min_val: int, max_val: int) -> int: + """Validate integer parameter with bounds clamping.""" try: val = int(data.get(key, default)) return max(min_val, min(max_val, val)) except (ValueError, TypeError): - return default - - -def _validate_decode_profile(data: dict, default: str = 'weather') -> str: - profile = data.get('decode_profile', default) - if not isinstance(profile, str): - return default - profile = profile.strip().lower() - if profile in {'weather', 'all'}: - return profile - return default - - -def _validate_optional_float(data: dict, key: str) -> tuple[float | None, str | None]: - raw = data.get(key) - if raw is None or raw == '': - return None, None - try: - return float(raw), None - except (ValueError, TypeError): - return None, f'Invalid {key}' - - -def _validate_bool(data: dict, key: str, default: bool = False) -> bool: - raw = data.get(key, default) - if isinstance(raw, bool): - return raw - if isinstance(raw, (int, float)): - return bool(raw) - if isinstance(raw, str): - return raw.strip().lower() in {'1', 'true', 'yes', 'on', 'enabled'} - return default + return default + + +def _validate_decode_profile(data: dict, default: str = 'weather') -> str: + profile = data.get('decode_profile', default) + if not isinstance(profile, str): + return default + profile = profile.strip().lower() + if profile in {'weather', 'all'}: + return profile + return default + + +def _validate_optional_float(data: dict, key: str) -> tuple[float | None, str | None]: + raw = data.get(key) + if raw is None or raw == '': + return None, None + try: + return float(raw), None + except (ValueError, TypeError): + return None, f'Invalid {key}' + + +def _validate_bool(data: dict, key: str, default: bool = False) -> bool: + raw = data.get(key, default) + if isinstance(raw, bool): + return raw + if isinstance(raw, (int, float)): + return bool(raw) + if isinstance(raw, str): + return raw.strip().lower() in {'1', 'true', 'yes', 'on', 'enabled'} + return default # ------------------------------------------------------------------ @@ -136,34 +137,34 @@ def get_presets(): # ------------------------------------------------------------------ @subghz_bp.route('/receive/start', methods=['POST']) -def start_receive(): +def start_receive(): data = request.get_json(silent=True) or {} freq_hz, err = _validate_frequency_hz(data) if err: - return jsonify({'status': 'error', 'message': err}), 400 + return api_error(err, 400) - sample_rate = _validate_int(data, 'sample_rate', 2000000, 2000000, 20000000) - lna_gain = _validate_int(data, 'lna_gain', 32, 0, SUBGHZ_LNA_GAIN_MAX) - vga_gain = _validate_int(data, 'vga_gain', 20, 0, SUBGHZ_VGA_GAIN_MAX) - trigger_enabled = _validate_bool(data, 'trigger_enabled', False) - trigger_pre_ms = _validate_int(data, 'trigger_pre_ms', 350, 50, 5000) - trigger_post_ms = _validate_int(data, 'trigger_post_ms', 700, 100, 10000) - device_serial = _validate_serial(data) + sample_rate = _validate_int(data, 'sample_rate', 2000000, 2000000, 20000000) + lna_gain = _validate_int(data, 'lna_gain', 32, 0, SUBGHZ_LNA_GAIN_MAX) + vga_gain = _validate_int(data, 'vga_gain', 20, 0, SUBGHZ_VGA_GAIN_MAX) + trigger_enabled = _validate_bool(data, 'trigger_enabled', False) + trigger_pre_ms = _validate_int(data, 'trigger_pre_ms', 350, 50, 5000) + trigger_post_ms = _validate_int(data, 'trigger_post_ms', 700, 100, 10000) + device_serial = _validate_serial(data) manager = get_subghz_manager() manager.set_callback(_event_callback) - result = manager.start_receive( - frequency_hz=freq_hz, - sample_rate=sample_rate, - lna_gain=lna_gain, - vga_gain=vga_gain, - trigger_enabled=trigger_enabled, - trigger_pre_ms=trigger_pre_ms, - trigger_post_ms=trigger_post_ms, - device_serial=device_serial, - ) + result = manager.start_receive( + frequency_hz=freq_hz, + sample_rate=sample_rate, + lna_gain=lna_gain, + vga_gain=vga_gain, + trigger_enabled=trigger_enabled, + trigger_pre_ms=trigger_pre_ms, + trigger_post_ms=trigger_post_ms, + device_serial=device_serial, + ) status_code = 200 if result.get('status') != 'error' else 409 return jsonify(result), status_code @@ -186,25 +187,25 @@ def start_decode(): freq_hz, err = _validate_frequency_hz(data) if err: - return jsonify({'status': 'error', 'message': err}), 400 + return api_error(err, 400) - sample_rate = _validate_int(data, 'sample_rate', 2000000, 2000000, 20000000) - lna_gain = _validate_int(data, 'lna_gain', 32, 0, SUBGHZ_LNA_GAIN_MAX) - vga_gain = _validate_int(data, 'vga_gain', 20, 0, SUBGHZ_VGA_GAIN_MAX) - decode_profile = _validate_decode_profile(data) - device_serial = _validate_serial(data) + sample_rate = _validate_int(data, 'sample_rate', 2000000, 2000000, 20000000) + lna_gain = _validate_int(data, 'lna_gain', 32, 0, SUBGHZ_LNA_GAIN_MAX) + vga_gain = _validate_int(data, 'vga_gain', 20, 0, SUBGHZ_VGA_GAIN_MAX) + decode_profile = _validate_decode_profile(data) + device_serial = _validate_serial(data) manager = get_subghz_manager() manager.set_callback(_event_callback) result = manager.start_decode( frequency_hz=freq_hz, - sample_rate=sample_rate, - lna_gain=lna_gain, - vga_gain=vga_gain, - decode_profile=decode_profile, - device_serial=device_serial, - ) + sample_rate=sample_rate, + lna_gain=lna_gain, + vga_gain=vga_gain, + decode_profile=decode_profile, + device_serial=device_serial, + ) status_code = 200 if result.get('status') != 'error' else 409 return jsonify(result), status_code @@ -227,33 +228,33 @@ def start_transmit(): capture_id = data.get('capture_id') if not capture_id or not isinstance(capture_id, str): - return jsonify({'status': 'error', 'message': 'capture_id is required'}), 400 + return api_error('capture_id is required', 400) # Sanitize capture_id if not capture_id.isalnum(): - return jsonify({'status': 'error', 'message': 'Invalid capture_id'}), 400 + return api_error('Invalid capture_id', 400) - tx_gain = _validate_int(data, 'tx_gain', 20, 0, SUBGHZ_TX_VGA_GAIN_MAX) - max_duration = _validate_int(data, 'max_duration', 10, 1, SUBGHZ_TX_MAX_DURATION) - start_seconds, start_err = _validate_optional_float(data, 'start_seconds') - if start_err: - return jsonify({'status': 'error', 'message': start_err}), 400 - duration_seconds, duration_err = _validate_optional_float(data, 'duration_seconds') - if duration_err: - return jsonify({'status': 'error', 'message': duration_err}), 400 - device_serial = _validate_serial(data) + tx_gain = _validate_int(data, 'tx_gain', 20, 0, SUBGHZ_TX_VGA_GAIN_MAX) + max_duration = _validate_int(data, 'max_duration', 10, 1, SUBGHZ_TX_MAX_DURATION) + start_seconds, start_err = _validate_optional_float(data, 'start_seconds') + if start_err: + return api_error(start_err, 400) + duration_seconds, duration_err = _validate_optional_float(data, 'duration_seconds') + if duration_err: + return api_error(duration_err, 400) + device_serial = _validate_serial(data) manager = get_subghz_manager() manager.set_callback(_event_callback) result = manager.transmit( - capture_id=capture_id, - tx_gain=tx_gain, - max_duration=max_duration, - start_seconds=start_seconds, - duration_seconds=duration_seconds, - device_serial=device_serial, - ) + capture_id=capture_id, + tx_gain=tx_gain, + max_duration=max_duration, + start_seconds=start_seconds, + duration_seconds=duration_seconds, + device_serial=device_serial, + ) status_code = 200 if result.get('status') != 'error' else 400 return jsonify(result), status_code @@ -278,11 +279,11 @@ def start_sweep(): freq_start = float(data.get('freq_start_mhz', 300)) freq_end = float(data.get('freq_end_mhz', 928)) if freq_start >= freq_end: - return jsonify({'status': 'error', 'message': 'freq_start must be less than freq_end'}), 400 + return api_error('freq_start must be less than freq_end', 400) if freq_start < SUBGHZ_FREQ_MIN_MHZ or freq_end > SUBGHZ_FREQ_MAX_MHZ: - return jsonify({'status': 'error', 'message': f'Frequency range: {SUBGHZ_FREQ_MIN_MHZ}-{SUBGHZ_FREQ_MAX_MHZ} MHz'}), 400 + return api_error(f'Frequency range: {SUBGHZ_FREQ_MIN_MHZ}-{SUBGHZ_FREQ_MAX_MHZ} MHz', 400) except (ValueError, TypeError): - return jsonify({'status': 'error', 'message': 'Invalid frequency range'}), 400 + return api_error('Invalid frequency range', 400) bin_width = _validate_int(data, 'bin_width', 100000, 10000, 5000000) device_serial = _validate_serial(data) @@ -326,94 +327,94 @@ def list_captures(): @subghz_bp.route('/captures/') def get_capture(capture_id: str): if not capture_id.isalnum(): - return jsonify({'status': 'error', 'message': 'Invalid capture_id'}), 400 + return api_error('Invalid capture_id', 400) manager = get_subghz_manager() capture = manager.get_capture(capture_id) if not capture: - return jsonify({'status': 'error', 'message': 'Capture not found'}), 404 + return api_error('Capture not found', 404) return jsonify({'status': 'ok', 'capture': capture.to_dict()}) -@subghz_bp.route('/captures//download') -def download_capture(capture_id: str): - if not capture_id.isalnum(): - return jsonify({'status': 'error', 'message': 'Invalid capture_id'}), 400 +@subghz_bp.route('/captures//download') +def download_capture(capture_id: str): + if not capture_id.isalnum(): + return api_error('Invalid capture_id', 400) manager = get_subghz_manager() path = manager.get_capture_path(capture_id) if not path: - return jsonify({'status': 'error', 'message': 'Capture not found'}), 404 + return api_error('Capture not found', 404) return send_file( path, mimetype='application/octet-stream', - as_attachment=True, - download_name=path.name, - ) - - -@subghz_bp.route('/captures//trim', methods=['POST']) -def trim_capture(capture_id: str): - if not capture_id.isalnum(): - return jsonify({'status': 'error', 'message': 'Invalid capture_id'}), 400 - - data = request.get_json(silent=True) or {} - start_seconds, start_err = _validate_optional_float(data, 'start_seconds') - if start_err: - return jsonify({'status': 'error', 'message': start_err}), 400 - duration_seconds, duration_err = _validate_optional_float(data, 'duration_seconds') - if duration_err: - return jsonify({'status': 'error', 'message': duration_err}), 400 - - label = data.get('label', '') - if label is None: - label = '' - if not isinstance(label, str) or len(label) > 100: - return jsonify({'status': 'error', 'message': 'Label must be a string (max 100 chars)'}), 400 - - manager = get_subghz_manager() - result = manager.trim_capture( - capture_id=capture_id, - start_seconds=start_seconds, - duration_seconds=duration_seconds, - label=label, - ) - - if result.get('status') == 'ok': - return jsonify(result), 200 - message = str(result.get('message') or 'Trim failed') - status_code = 404 if 'not found' in message.lower() else 400 - return jsonify(result), status_code - - -@subghz_bp.route('/captures/', methods=['DELETE']) -def delete_capture(capture_id: str): - if not capture_id.isalnum(): - return jsonify({'status': 'error', 'message': 'Invalid capture_id'}), 400 + as_attachment=True, + download_name=path.name, + ) + + +@subghz_bp.route('/captures//trim', methods=['POST']) +def trim_capture(capture_id: str): + if not capture_id.isalnum(): + return api_error('Invalid capture_id', 400) + + data = request.get_json(silent=True) or {} + start_seconds, start_err = _validate_optional_float(data, 'start_seconds') + if start_err: + return api_error(start_err, 400) + duration_seconds, duration_err = _validate_optional_float(data, 'duration_seconds') + if duration_err: + return api_error(duration_err, 400) + + label = data.get('label', '') + if label is None: + label = '' + if not isinstance(label, str) or len(label) > 100: + return api_error('Label must be a string (max 100 chars)', 400) + + manager = get_subghz_manager() + result = manager.trim_capture( + capture_id=capture_id, + start_seconds=start_seconds, + duration_seconds=duration_seconds, + label=label, + ) + + if result.get('status') == 'ok': + return jsonify(result), 200 + message = str(result.get('message') or 'Trim failed') + status_code = 404 if 'not found' in message.lower() else 400 + return jsonify(result), status_code + + +@subghz_bp.route('/captures/', methods=['DELETE']) +def delete_capture(capture_id: str): + if not capture_id.isalnum(): + return api_error('Invalid capture_id', 400) manager = get_subghz_manager() if manager.delete_capture(capture_id): return jsonify({'status': 'deleted', 'id': capture_id}) - return jsonify({'status': 'error', 'message': 'Capture not found'}), 404 + return api_error('Capture not found', 404) @subghz_bp.route('/captures/', methods=['PATCH']) def update_capture(capture_id: str): if not capture_id.isalnum(): - return jsonify({'status': 'error', 'message': 'Invalid capture_id'}), 400 + return api_error('Invalid capture_id', 400) data = request.get_json(silent=True) or {} label = data.get('label', '') if not isinstance(label, str) or len(label) > 100: - return jsonify({'status': 'error', 'message': 'Label must be a string (max 100 chars)'}), 400 + return api_error('Label must be a string (max 100 chars)', 400) manager = get_subghz_manager() if manager.update_capture_label(capture_id, label): return jsonify({'status': 'updated', 'id': capture_id, 'label': label}) - return jsonify({'status': 'error', 'message': 'Capture not found'}), 404 + return api_error('Capture not found', 404) # ------------------------------------------------------------------ diff --git a/routes/system.py b/routes/system.py index 4a96aad..e40d1da 100644 --- a/routes/system.py +++ b/routes/system.py @@ -22,6 +22,7 @@ from flask import Blueprint, Response, jsonify, request from utils.constants import SSE_KEEPALIVE_INTERVAL, SSE_QUEUE_TIMEOUT from utils.logging import sensor_logger as logger +from utils.responses import api_success, api_error from utils.sse import sse_stream_fanout try: @@ -549,10 +550,10 @@ def get_weather() -> Response: lat, lon = loc.get('lat'), loc.get('lon') if lat is None or lon is None: - return jsonify({'error': 'No location available'}) + return api_error('No location available') if _requests is None: - return jsonify({'error': 'requests library not available'}) + return api_error('requests library not available') try: resp = _requests.get( @@ -580,4 +581,4 @@ def get_weather() -> Response: return jsonify(weather) except Exception as exc: logger.debug('Weather fetch failed: %s', exc) - return jsonify({'error': str(exc)}) + return api_error(str(exc)) diff --git a/routes/tscm.py b/routes/tscm/__init__.py similarity index 51% rename from routes/tscm.py rename to routes/tscm/__init__.py index dd3a862..3e87cd8 100644 --- a/routes/tscm.py +++ b/routes/tscm/__init__.py @@ -1,3985 +1,1824 @@ -""" -TSCM (Technical Surveillance Countermeasures) Routes - -Provides endpoints for counter-surveillance sweeps, baseline management, -threat detection, and reporting. -""" - -from __future__ import annotations - -import json -import logging -import queue -import threading -import time -from datetime import datetime, timedelta, timezone -from typing import Any - -from flask import Blueprint, Response, jsonify, request - -from data.tscm_frequencies import ( - SWEEP_PRESETS, - get_all_sweep_presets, - get_sweep_preset, -) -from utils.database import ( - add_device_timeline_entry, - add_tscm_threat, - acknowledge_tscm_threat, - cleanup_old_timeline_entries, - create_tscm_schedule, - create_tscm_sweep, - delete_tscm_baseline, - delete_tscm_schedule, - get_active_tscm_baseline, - get_all_tscm_baselines, - get_all_tscm_schedules, - get_tscm_baseline, - get_tscm_schedule, - get_tscm_sweep, - get_tscm_threat_summary, - get_tscm_threats, - set_active_tscm_baseline, - update_tscm_schedule, - update_tscm_sweep, -) -from utils.tscm.baseline import ( - BaselineComparator, - BaselineRecorder, - get_comparison_for_active_baseline, -) -from utils.tscm.correlation import ( - CorrelationEngine, - get_correlation_engine, - reset_correlation_engine, -) -from utils.tscm.detector import ThreatDetector -from utils.tscm.device_identity import ( - get_identity_engine, - reset_identity_engine, - ingest_ble_dict, - ingest_wifi_dict, -) -from utils.event_pipeline import process_event -from utils.sse import sse_stream_fanout - -# Import unified Bluetooth scanner helper for TSCM integration -try: - from routes.bluetooth_v2 import get_tscm_bluetooth_snapshot - _USE_UNIFIED_BT_SCANNER = True -except ImportError: - _USE_UNIFIED_BT_SCANNER = False - -logger = logging.getLogger('intercept.tscm') - -tscm_bp = Blueprint('tscm', __name__, url_prefix='/tscm') - -try: - from zoneinfo import ZoneInfo -except ImportError: # pragma: no cover - fallback for older Python - ZoneInfo = None - -# ============================================================================= -# Global State (will be initialized from app.py) -# ============================================================================= - -# These will be set by app.py -tscm_queue: queue.Queue | None = None -tscm_lock: threading.Lock | None = None - -# Local state -_sweep_thread: threading.Thread | None = None -_sweep_running = False -_current_sweep_id: int | None = None -_baseline_recorder = BaselineRecorder() -_schedule_thread: threading.Thread | None = None -_schedule_running = False - - -def init_tscm_state(tscm_q: queue.Queue, lock: threading.Lock) -> None: - """Initialize TSCM state from app.py.""" - global tscm_queue, tscm_lock - tscm_queue = tscm_q - tscm_lock = lock - start_tscm_scheduler() - - -def _emit_event(event_type: str, data: dict) -> None: - """Emit an event to the SSE queue.""" - if tscm_queue: - try: - tscm_queue.put_nowait({ - 'type': event_type, - 'timestamp': datetime.now().isoformat(), - **data - }) - except queue.Full: - logger.warning("TSCM queue full, dropping event") - - -# ============================================================================= -# Schedule Helpers -# ============================================================================= - -def _get_schedule_timezone(zone_name: str | None) -> Any: - """Resolve schedule timezone from a zone name or fallback to local.""" - if zone_name and ZoneInfo: - try: - return ZoneInfo(zone_name) - except Exception: - logger.warning(f"Invalid timezone '{zone_name}', using local time") - return datetime.now().astimezone().tzinfo or timezone.utc - - -def _parse_cron_field(field: str, min_value: int, max_value: int) -> set[int]: - """Parse a single cron field into a set of valid integers.""" - field = field.strip() - if not field: - raise ValueError("Empty cron field") - - values: set[int] = set() - parts = field.split(',') - for part in parts: - part = part.strip() - if part == '*': - values.update(range(min_value, max_value + 1)) - continue - if part.startswith('*/'): - step = int(part[2:]) - if step <= 0: - raise ValueError("Invalid step value") - values.update(range(min_value, max_value + 1, step)) - continue - range_part = part - step = 1 - if '/' in part: - range_part, step_str = part.split('/', 1) - step = int(step_str) - if step <= 0: - raise ValueError("Invalid step value") - if '-' in range_part: - start_str, end_str = range_part.split('-', 1) - start = int(start_str) - end = int(end_str) - if start > end: - start, end = end, start - values.update(range(start, end + 1, step)) - else: - values.add(int(range_part)) - - return {v for v in values if min_value <= v <= max_value} - - -def _parse_cron_expression(expr: str) -> tuple[dict[str, set[int]], dict[str, bool]]: - """Parse a cron expression into value sets and wildcard flags.""" - fields = (expr or '').split() - if len(fields) != 5: - raise ValueError("Cron expression must have 5 fields") - - minute_field, hour_field, dom_field, month_field, dow_field = fields - - sets = { - 'minute': _parse_cron_field(minute_field, 0, 59), - 'hour': _parse_cron_field(hour_field, 0, 23), - 'dom': _parse_cron_field(dom_field, 1, 31), - 'month': _parse_cron_field(month_field, 1, 12), - 'dow': _parse_cron_field(dow_field, 0, 7), - } - - # Normalize Sunday (7 -> 0) - if 7 in sets['dow']: - sets['dow'].add(0) - sets['dow'].discard(7) - - wildcards = { - 'dom': dom_field.strip() == '*', - 'dow': dow_field.strip() == '*', - } - return sets, wildcards - - -def _cron_matches(dt: datetime, sets: dict[str, set[int]], wildcards: dict[str, bool]) -> bool: - """Check if a datetime matches cron sets.""" - if dt.minute not in sets['minute']: - return False - if dt.hour not in sets['hour']: - return False - if dt.month not in sets['month']: - return False - - dom_match = dt.day in sets['dom'] - # Cron DOW: Sunday=0 - cron_dow = (dt.weekday() + 1) % 7 - dow_match = cron_dow in sets['dow'] - - if wildcards['dom'] and wildcards['dow']: - return True - if wildcards['dom']: - return dow_match - if wildcards['dow']: - return dom_match - return dom_match or dow_match - - -def _next_run_from_cron(expr: str, after_dt: datetime) -> datetime | None: - """Calculate next run time from cron expression after a given datetime.""" - sets, wildcards = _parse_cron_expression(expr) - # Round to next minute - candidate = after_dt.replace(second=0, microsecond=0) + timedelta(minutes=1) - # Search up to 366 days ahead - for _ in range(366 * 24 * 60): - if _cron_matches(candidate, sets, wildcards): - return candidate - candidate += timedelta(minutes=1) - return None - - -def _parse_schedule_timestamp(value: Any) -> datetime | None: - """Parse stored schedule timestamp to aware datetime.""" - if not value: - return None - if isinstance(value, datetime): - return value if value.tzinfo else value.replace(tzinfo=timezone.utc) - try: - parsed = datetime.fromisoformat(str(value)) - return parsed if parsed.tzinfo else parsed.replace(tzinfo=timezone.utc) - except Exception: - return None - - -def _schedule_loop() -> None: - """Background loop to trigger scheduled sweeps.""" - global _schedule_running - - while _schedule_running: - try: - schedules = get_all_tscm_schedules(enabled=True, limit=200) - now_utc = datetime.now(timezone.utc) - - for schedule in schedules: - schedule_id = schedule.get('id') - cron_expr = schedule.get('cron_expression') or '' - tz = _get_schedule_timezone(schedule.get('zone_name')) - now_local = datetime.now(tz) - - next_run = _parse_schedule_timestamp(schedule.get('next_run')) - - if not next_run: - try: - computed = _next_run_from_cron(cron_expr, now_local) - except Exception as e: - logger.error(f"Schedule {schedule_id} cron parse error: {e}") - continue - if computed: - update_tscm_schedule( - schedule_id, - next_run=computed.astimezone(timezone.utc).isoformat() - ) - continue - - if next_run <= now_utc: - if _sweep_running: - logger.info(f"Schedule {schedule_id} due but sweep running; skipping") - try: - computed = _next_run_from_cron(cron_expr, now_local) - except Exception as e: - logger.error(f"Schedule {schedule_id} cron parse error: {e}") - continue - if computed: - update_tscm_schedule( - schedule_id, - next_run=computed.astimezone(timezone.utc).isoformat() - ) - continue - - # Trigger sweep - result = _start_sweep_internal( - sweep_type=schedule.get('sweep_type') or 'standard', - baseline_id=schedule.get('baseline_id'), - wifi_enabled=True, - bt_enabled=True, - rf_enabled=True, - wifi_interface='', - bt_interface='', - sdr_device=None, - verbose_results=False - ) - - if result.get('status') == 'success': - try: - computed = _next_run_from_cron(cron_expr, now_local) - except Exception as e: - logger.error(f"Schedule {schedule_id} cron parse error: {e}") - computed = None - - update_tscm_schedule( - schedule_id, - last_run=now_utc.isoformat(), - next_run=computed.astimezone(timezone.utc).isoformat() if computed else None - ) - logger.info(f"Scheduled sweep started for schedule {schedule_id}") - else: - try: - computed = _next_run_from_cron(cron_expr, now_local) - except Exception as e: - logger.error(f"Schedule {schedule_id} cron parse error: {e}") - computed = None - if computed: - update_tscm_schedule( - schedule_id, - next_run=computed.astimezone(timezone.utc).isoformat() - ) - logger.warning(f"Scheduled sweep failed for schedule {schedule_id}: {result.get('message')}") - - except Exception as e: - logger.error(f"TSCM schedule loop error: {e}") - - time.sleep(30) - - -def start_tscm_scheduler() -> None: - """Start background scheduler thread for TSCM sweeps.""" - global _schedule_thread, _schedule_running - if _schedule_thread and _schedule_thread.is_alive(): - return - _schedule_running = True - _schedule_thread = threading.Thread(target=_schedule_loop, daemon=True) - _schedule_thread.start() - - -# ============================================================================= -# Sweep Endpoints -# ============================================================================= - -def _check_available_devices(wifi: bool, bt: bool, rf: bool) -> dict: - """Check which scanning devices are available.""" - import os - import platform - import shutil - import subprocess - - available = { - 'wifi': False, - 'bluetooth': False, - 'rf': False, - 'wifi_reason': 'Not checked', - 'bt_reason': 'Not checked', - 'rf_reason': 'Not checked', - } - - # Check WiFi - use the same scanner singleton that performs actual scans - if wifi: - try: - from utils.wifi.scanner import get_wifi_scanner - scanner = get_wifi_scanner() - interfaces = scanner._detect_interfaces() - if interfaces: - available['wifi'] = True - available['wifi_reason'] = f'WiFi available ({interfaces[0]["name"]})' - else: - available['wifi_reason'] = 'No wireless interfaces found' - except Exception as e: - available['wifi_reason'] = f'WiFi detection error: {e}' - - # Check Bluetooth - if bt: - if platform.system() == 'Darwin': - # macOS: Check for Bluetooth via system_profiler - try: - result = subprocess.run( - ['system_profiler', 'SPBluetoothDataType'], - capture_output=True, - text=True, - timeout=10 - ) - if 'Bluetooth' in result.stdout and result.returncode == 0: - available['bluetooth'] = True - available['bt_reason'] = 'macOS Bluetooth available' - else: - available['bt_reason'] = 'Bluetooth not available' - except (subprocess.TimeoutExpired, FileNotFoundError): - available['bt_reason'] = 'Cannot detect Bluetooth' - else: - # Linux: Check for Bluetooth tools - if shutil.which('bluetoothctl') or shutil.which('hcitool') or shutil.which('hciconfig'): - try: - result = subprocess.run( - ['hciconfig'], - capture_output=True, - text=True, - timeout=5 - ) - if 'hci' in result.stdout.lower(): - available['bluetooth'] = True - available['bt_reason'] = 'Bluetooth adapter detected' - else: - available['bt_reason'] = 'No Bluetooth adapters found' - except (subprocess.TimeoutExpired, FileNotFoundError, subprocess.SubprocessError): - # Try bluetoothctl as fallback - try: - result = subprocess.run( - ['bluetoothctl', 'list'], - capture_output=True, - text=True, - timeout=5 - ) - if result.stdout.strip(): - available['bluetooth'] = True - available['bt_reason'] = 'Bluetooth adapter detected' - else: - # Check /sys for Bluetooth - try: - import glob - bt_devs = glob.glob('/sys/class/bluetooth/hci*') - if bt_devs: - available['bluetooth'] = True - available['bt_reason'] = 'Bluetooth adapter detected' - else: - available['bt_reason'] = 'No Bluetooth adapters found' - except Exception: - available['bt_reason'] = 'No Bluetooth adapters found' - except (subprocess.TimeoutExpired, FileNotFoundError, subprocess.SubprocessError): - # Check /sys for Bluetooth - try: - import glob - bt_devs = glob.glob('/sys/class/bluetooth/hci*') - if bt_devs: - available['bluetooth'] = True - available['bt_reason'] = 'Bluetooth adapter detected' - else: - available['bt_reason'] = 'Cannot detect Bluetooth adapters' - except Exception: - available['bt_reason'] = 'Cannot detect Bluetooth adapters' - else: - # Fallback: check /sys even without tools - try: - import glob - bt_devs = glob.glob('/sys/class/bluetooth/hci*') - if bt_devs: - available['bluetooth'] = True - available['bt_reason'] = 'Bluetooth adapter detected (no scan tools)' - else: - available['bt_reason'] = 'Bluetooth tools not installed (bluez)' - except Exception: - available['bt_reason'] = 'Bluetooth tools not installed (bluez)' - - # Check RF/SDR - if rf: - try: - from utils.sdr import SDRFactory - devices = SDRFactory.detect_devices() - if devices: - available['rf'] = True - available['rf_reason'] = f'{len(devices)} SDR device(s) detected' - else: - available['rf_reason'] = 'No SDR devices found' - except ImportError: - available['rf_reason'] = 'SDR detection unavailable' - - return available - - -def _start_sweep_internal( - sweep_type: str, - baseline_id: int | None, - wifi_enabled: bool, - bt_enabled: bool, - rf_enabled: bool, - wifi_interface: str = '', - bt_interface: str = '', - sdr_device: int | None = None, - verbose_results: bool = False, -) -> dict: - """Start a TSCM sweep without request context.""" - global _sweep_running, _sweep_thread, _current_sweep_id - - if _sweep_running: - return {'status': 'error', 'message': 'Sweep already running', 'http_status': 409} - - # Check for available devices - devices = _check_available_devices(wifi_enabled, bt_enabled, rf_enabled) - - warnings = [] - if wifi_enabled and not devices['wifi']: - warnings.append(f"WiFi: {devices['wifi_reason']}") - if bt_enabled and not devices['bluetooth']: - warnings.append(f"Bluetooth: {devices['bt_reason']}") - if rf_enabled and not devices['rf']: - warnings.append(f"RF: {devices['rf_reason']}") - - # If no devices available at all, return error - if not any([devices['wifi'], devices['bluetooth'], devices['rf']]): - return { - 'status': 'error', - 'message': 'No scanning devices available', - 'details': warnings, - 'http_status': 400, - } - - # Create sweep record - _current_sweep_id = create_tscm_sweep( - sweep_type=sweep_type, - baseline_id=baseline_id, - wifi_enabled=wifi_enabled, - bt_enabled=bt_enabled, - rf_enabled=rf_enabled - ) - - _sweep_running = True - - # Start sweep thread - _sweep_thread = threading.Thread( - target=_run_sweep, - args=(sweep_type, baseline_id, wifi_enabled, bt_enabled, rf_enabled, - wifi_interface, bt_interface, sdr_device, verbose_results), - daemon=True - ) - _sweep_thread.start() - - logger.info(f"Started TSCM sweep: type={sweep_type}, id={_current_sweep_id}") - - return { - 'status': 'success', - 'message': 'Sweep started', - 'sweep_id': _current_sweep_id, - 'sweep_type': sweep_type, - 'warnings': warnings if warnings else None, - 'devices': { - 'wifi': devices['wifi'], - 'bluetooth': devices['bluetooth'], - 'rf': devices['rf'] - } - } - - -@tscm_bp.route('/status') -def tscm_status(): - """Check if any TSCM operation is currently running.""" - return jsonify({'running': _sweep_running}) - - -@tscm_bp.route('/sweep/start', methods=['POST']) -def start_sweep(): - """Start a TSCM sweep.""" - data = request.get_json() or {} - sweep_type = data.get('sweep_type', 'standard') - baseline_id = data.get('baseline_id') - if baseline_id in ('', None): - baseline_id = None - wifi_enabled = data.get('wifi', True) - bt_enabled = data.get('bluetooth', True) - rf_enabled = data.get('rf', True) - verbose_results = bool(data.get('verbose_results', False)) - - # Get interface selections - wifi_interface = data.get('wifi_interface', '') - bt_interface = data.get('bt_interface', '') - sdr_device = data.get('sdr_device') - - result = _start_sweep_internal( - sweep_type=sweep_type, - baseline_id=baseline_id, - wifi_enabled=wifi_enabled, - bt_enabled=bt_enabled, - rf_enabled=rf_enabled, - wifi_interface=wifi_interface, - bt_interface=bt_interface, - sdr_device=sdr_device, - verbose_results=verbose_results, - ) - http_status = result.pop('http_status', 200) - return jsonify(result), http_status - - -@tscm_bp.route('/sweep/stop', methods=['POST']) -def stop_sweep(): - """Stop the current TSCM sweep.""" - global _sweep_running - - if not _sweep_running: - return jsonify({'status': 'error', 'message': 'No sweep running'}) - - _sweep_running = False - - if _current_sweep_id: - update_tscm_sweep(_current_sweep_id, status='aborted', completed=True) - - _emit_event('sweep_stopped', {'reason': 'user_requested'}) - - logger.info("TSCM sweep stopped by user") - - return jsonify({'status': 'success', 'message': 'Sweep stopped'}) - - -@tscm_bp.route('/sweep/status') -def sweep_status(): - """Get current sweep status.""" - status = { - 'running': _sweep_running, - 'sweep_id': _current_sweep_id, - } - - if _current_sweep_id: - sweep = get_tscm_sweep(_current_sweep_id) - if sweep: - status['sweep'] = sweep - - return jsonify(status) - - -@tscm_bp.route('/sweep/stream') -def sweep_stream(): - """SSE stream for real-time sweep updates.""" - def _on_msg(msg: dict[str, Any]) -> None: - process_event('tscm', msg, msg.get('type')) - - return Response( - sse_stream_fanout( - source_queue=tscm_queue, - channel_key='tscm', - timeout=1.0, - keepalive_interval=30.0, - on_message=_on_msg, - ), - mimetype='text/event-stream', - headers={ - 'Cache-Control': 'no-cache', - 'Connection': 'keep-alive', - 'X-Accel-Buffering': 'no' - } - ) - - -# ============================================================================= -# Schedule Endpoints -# ============================================================================= - -@tscm_bp.route('/schedules', methods=['GET']) -def list_schedules(): - """List all TSCM sweep schedules.""" - enabled_param = request.args.get('enabled') - enabled = None - if enabled_param is not None: - enabled = enabled_param.lower() in ('1', 'true', 'yes') - - schedules = get_all_tscm_schedules(enabled=enabled, limit=200) - return jsonify({ - 'status': 'success', - 'count': len(schedules), - 'schedules': schedules, - }) - - -@tscm_bp.route('/schedules', methods=['POST']) -def create_schedule(): - """Create a new sweep schedule.""" - data = request.get_json() or {} - name = (data.get('name') or '').strip() - cron_expression = (data.get('cron_expression') or '').strip() - sweep_type = data.get('sweep_type', 'standard') - baseline_id = data.get('baseline_id') - zone_name = data.get('zone_name') - enabled = bool(data.get('enabled', True)) - notify_on_threat = bool(data.get('notify_on_threat', True)) - notify_email = data.get('notify_email') - - if not name: - return jsonify({'status': 'error', 'message': 'Schedule name required'}), 400 - if not cron_expression: - return jsonify({'status': 'error', 'message': 'cron_expression required'}), 400 - - next_run = None - if enabled: - try: - tz = _get_schedule_timezone(zone_name) - next_local = _next_run_from_cron(cron_expression, datetime.now(tz)) - next_run = next_local.astimezone(timezone.utc).isoformat() if next_local else None - except Exception as e: - return jsonify({'status': 'error', 'message': f'Invalid cron: {e}'}), 400 - - schedule_id = create_tscm_schedule( - name=name, - cron_expression=cron_expression, - sweep_type=sweep_type, - baseline_id=baseline_id, - zone_name=zone_name, - enabled=enabled, - notify_on_threat=notify_on_threat, - notify_email=notify_email, - next_run=next_run, - ) - schedule = get_tscm_schedule(schedule_id) - return jsonify({ - 'status': 'success', - 'message': 'Schedule created', - 'schedule': schedule - }) - - -@tscm_bp.route('/schedules/', methods=['PUT', 'PATCH']) -def update_schedule(schedule_id: int): - """Update a sweep schedule.""" - schedule = get_tscm_schedule(schedule_id) - if not schedule: - return jsonify({'status': 'error', 'message': 'Schedule not found'}), 404 - - data = request.get_json() or {} - updates: dict[str, Any] = {} - - for key in ('name', 'cron_expression', 'sweep_type', 'baseline_id', 'zone_name', 'notify_email'): - if key in data: - updates[key] = data[key] - - if 'baseline_id' in updates and updates['baseline_id'] in ('', None): - updates['baseline_id'] = None - - if 'enabled' in data: - updates['enabled'] = 1 if data['enabled'] else 0 - if 'notify_on_threat' in data: - updates['notify_on_threat'] = 1 if data['notify_on_threat'] else 0 - - # Recalculate next_run when cron/zone/enabled changes - if any(k in updates for k in ('cron_expression', 'zone_name', 'enabled')): - if updates.get('enabled', schedule.get('enabled', 1)): - cron_expr = updates.get('cron_expression', schedule.get('cron_expression', '')) - zone_name = updates.get('zone_name', schedule.get('zone_name')) - try: - tz = _get_schedule_timezone(zone_name) - next_local = _next_run_from_cron(cron_expr, datetime.now(tz)) - updates['next_run'] = next_local.astimezone(timezone.utc).isoformat() if next_local else None - except Exception as e: - return jsonify({'status': 'error', 'message': f'Invalid cron: {e}'}), 400 - else: - updates['next_run'] = None - - if not updates: - return jsonify({'status': 'error', 'message': 'No updates provided'}), 400 - - update_tscm_schedule(schedule_id, **updates) - schedule = get_tscm_schedule(schedule_id) - return jsonify({'status': 'success', 'schedule': schedule}) - - -@tscm_bp.route('/schedules/', methods=['DELETE']) -def delete_schedule(schedule_id: int): - """Delete a sweep schedule.""" - success = delete_tscm_schedule(schedule_id) - if not success: - return jsonify({'status': 'error', 'message': 'Schedule not found'}), 404 - return jsonify({'status': 'success', 'message': 'Schedule deleted'}) - - -@tscm_bp.route('/schedules//run', methods=['POST']) -def run_schedule_now(schedule_id: int): - """Trigger a scheduled sweep immediately.""" - schedule = get_tscm_schedule(schedule_id) - if not schedule: - return jsonify({'status': 'error', 'message': 'Schedule not found'}), 404 - - result = _start_sweep_internal( - sweep_type=schedule.get('sweep_type') or 'standard', - baseline_id=schedule.get('baseline_id'), - wifi_enabled=True, - bt_enabled=True, - rf_enabled=True, - wifi_interface='', - bt_interface='', - sdr_device=None, - verbose_results=False, - ) - - if result.get('status') != 'success': - status_code = result.pop('http_status', 400) - return jsonify(result), status_code - - # Update schedule run timestamps - cron_expr = schedule.get('cron_expression') or '' - tz = _get_schedule_timezone(schedule.get('zone_name')) - now_utc = datetime.now(timezone.utc) - try: - next_local = _next_run_from_cron(cron_expr, datetime.now(tz)) - except Exception: - next_local = None - - update_tscm_schedule( - schedule_id, - last_run=now_utc.isoformat(), - next_run=next_local.astimezone(timezone.utc).isoformat() if next_local else None, - ) - - return jsonify(result) - - -@tscm_bp.route('/devices') -def get_tscm_devices(): - """Get available scanning devices for TSCM sweeps.""" - import platform - import shutil - import subprocess - - devices = { - 'wifi_interfaces': [], - 'bt_adapters': [], - 'sdr_devices': [] - } - - # Detect WiFi interfaces - if platform.system() == 'Darwin': # macOS - try: - result = subprocess.run( - ['networksetup', '-listallhardwareports'], - capture_output=True, text=True, timeout=5 - ) - lines = result.stdout.split('\n') - for i, line in enumerate(lines): - if 'Wi-Fi' in line or 'AirPort' in line: - # Get the hardware port name (e.g., "Wi-Fi") - port_name = line.replace('Hardware Port:', '').strip() - for j in range(i + 1, min(i + 3, len(lines))): - if 'Device:' in lines[j]: - device = lines[j].split('Device:')[1].strip() - devices['wifi_interfaces'].append({ - 'name': device, - 'display_name': f'{port_name} ({device})', - 'type': 'internal', - 'monitor_capable': False - }) - break - except (FileNotFoundError, subprocess.TimeoutExpired, subprocess.SubprocessError): - pass - else: # Linux - try: - result = subprocess.run( - ['iw', 'dev'], - capture_output=True, text=True, timeout=5 - ) - current_iface = None - for line in result.stdout.split('\n'): - line = line.strip() - if line.startswith('Interface'): - current_iface = line.split()[1] - elif current_iface and 'type' in line: - iface_type = line.split()[-1] - devices['wifi_interfaces'].append({ - 'name': current_iface, - 'display_name': f'Wireless ({current_iface}) - {iface_type}', - 'type': iface_type, - 'monitor_capable': True - }) - current_iface = None - except (FileNotFoundError, subprocess.TimeoutExpired, subprocess.SubprocessError): - # Fall back to iwconfig - try: - result = subprocess.run( - ['iwconfig'], - capture_output=True, text=True, timeout=5 - ) - for line in result.stdout.split('\n'): - if 'IEEE 802.11' in line: - iface = line.split()[0] - devices['wifi_interfaces'].append({ - 'name': iface, - 'display_name': f'Wireless ({iface})', - 'type': 'managed', - 'monitor_capable': True - }) - except (FileNotFoundError, subprocess.TimeoutExpired, subprocess.SubprocessError): - pass - - # Detect Bluetooth adapters - if platform.system() == 'Linux': - try: - result = subprocess.run( - ['hciconfig'], - capture_output=True, text=True, timeout=5 - ) - import re - blocks = re.split(r'(?=^hci\d+:)', result.stdout, flags=re.MULTILINE) - for idx, block in enumerate(blocks): - if block.strip(): - first_line = block.split('\n')[0] - match = re.match(r'(hci\d+):', first_line) - if match: - iface_name = match.group(1) - is_up = 'UP RUNNING' in block or '\tUP ' in block - devices['bt_adapters'].append({ - 'name': iface_name, - 'display_name': f'Bluetooth Adapter ({iface_name})', - 'type': 'hci', - 'status': 'up' if is_up else 'down' - }) - except (FileNotFoundError, subprocess.TimeoutExpired, subprocess.SubprocessError): - # Try bluetoothctl as fallback - try: - result = subprocess.run( - ['bluetoothctl', 'list'], - capture_output=True, text=True, timeout=5 - ) - for line in result.stdout.split('\n'): - if 'Controller' in line: - # Format: Controller XX:XX:XX:XX:XX:XX Name - parts = line.split() - if len(parts) >= 3: - addr = parts[1] - name = ' '.join(parts[2:]) if len(parts) > 2 else 'Bluetooth' - devices['bt_adapters'].append({ - 'name': addr, - 'display_name': f'{name} ({addr[-8:]})', - 'type': 'controller', - 'status': 'available' - }) - except (FileNotFoundError, subprocess.TimeoutExpired, subprocess.SubprocessError): - pass - elif platform.system() == 'Darwin': - # macOS has built-in Bluetooth - get more info via system_profiler - try: - result = subprocess.run( - ['system_profiler', 'SPBluetoothDataType'], - capture_output=True, text=True, timeout=10 - ) - # Extract controller info - bt_name = 'Built-in Bluetooth' - bt_addr = '' - for line in result.stdout.split('\n'): - if 'Address:' in line: - bt_addr = line.split('Address:')[1].strip() - break - devices['bt_adapters'].append({ - 'name': 'default', - 'display_name': f'{bt_name}' + (f' ({bt_addr[-8:]})' if bt_addr else ''), - 'type': 'macos', - 'status': 'available' - }) - except (FileNotFoundError, subprocess.TimeoutExpired, subprocess.SubprocessError): - devices['bt_adapters'].append({ - 'name': 'default', - 'display_name': 'Built-in Bluetooth', - 'type': 'macos', - 'status': 'available' - }) - - # Detect SDR devices - try: - from utils.sdr import SDRFactory - sdr_list = SDRFactory.detect_devices() - for sdr in sdr_list: - # SDRDevice is a dataclass with attributes, not a dict - sdr_type_name = sdr.sdr_type.value if hasattr(sdr.sdr_type, 'value') else str(sdr.sdr_type) - # Create a friendly display name - display_name = sdr.name - if sdr.serial and sdr.serial not in ('N/A', 'Unknown'): - display_name = f'{sdr.name} (SN: {sdr.serial[-8:]})' - devices['sdr_devices'].append({ - 'index': sdr.index, - 'name': sdr.name, - 'display_name': display_name, - 'type': sdr_type_name, - 'serial': sdr.serial, - 'driver': sdr.driver - }) - except ImportError: - logger.debug("SDR module not available") - except Exception as e: - logger.warning(f"Error detecting SDR devices: {e}") - - # Check if running as root - import os - from flask import current_app - running_as_root = current_app.config.get('RUNNING_AS_ROOT', os.geteuid() == 0) - - warnings = [] - if not running_as_root: - warnings.append({ - 'type': 'privileges', - 'message': 'Not running as root. WiFi monitor mode and some Bluetooth features require sudo.', - 'action': 'Run with: sudo -E venv/bin/python intercept.py' - }) - - return jsonify({ - 'status': 'success', - 'devices': devices, - 'running_as_root': running_as_root, - 'warnings': warnings - }) - - -def _scan_wifi_networks(interface: str) -> list[dict]: - """ - Scan for WiFi networks using the unified WiFi scanner. - - This is a facade that maintains backwards compatibility with TSCM - while using the new unified scanner module. - - Automatically detects monitor mode interfaces and uses deep scan - (airodump-ng) when appropriate. - - Args: - interface: WiFi interface name (optional). - - Returns: - List of network dicts with: bssid, essid, power, channel, privacy - """ - try: - from utils.wifi import get_wifi_scanner - - scanner = get_wifi_scanner() - - # Check if interface is in monitor mode - is_monitor = False - if interface: - is_monitor = scanner._is_monitor_mode_interface(interface) - - if is_monitor: - # Use deep scan for monitor mode interfaces - logger.info(f"Interface {interface} is in monitor mode, using deep scan") - - # Check if airodump-ng is available - caps = scanner.check_capabilities() - if not caps.has_airodump_ng: - logger.warning("airodump-ng not available for monitor mode scanning") - return [] - - # Start a short deep scan - if not scanner.is_scanning: - scanner.start_deep_scan(interface=interface, band='all') - - # Wait briefly for some results - import time - time.sleep(5) - - # Get current access points - networks = [] - for ap in scanner.access_points: - networks.append(ap.to_legacy_dict()) - - logger.info(f"WiFi deep scan found {len(networks)} networks") - return networks - else: - # Use quick scan for managed mode interfaces - result = scanner.quick_scan(interface=interface, timeout=15) - - if result.error: - logger.warning(f"WiFi scan error: {result.error}") - - # Convert to legacy format for TSCM - networks = [] - for ap in result.access_points: - networks.append(ap.to_legacy_dict()) - - logger.info(f"WiFi scan found {len(networks)} networks") - return networks - - except ImportError as e: - logger.error(f"Failed to import wifi scanner: {e}") - return [] - except Exception as e: - logger.exception(f"WiFi scan failed: {e}") - return [] - - -def _scan_wifi_clients(interface: str) -> list[dict]: - """ - Get WiFi client observations from the unified WiFi scanner. - - Clients are only available when monitor-mode scanning is active. - """ - try: - from utils.wifi import get_wifi_scanner - - scanner = get_wifi_scanner() - if interface: - try: - if not scanner._is_monitor_mode_interface(interface): - return [] - except Exception: - return [] - - return [client.to_dict() for client in scanner.clients] - except ImportError as e: - logger.error(f"Failed to import wifi scanner: {e}") - return [] - except Exception as e: - logger.exception(f"WiFi client scan failed: {e}") - return [] - - -def _scan_bluetooth_devices(interface: str, duration: int = 10) -> list[dict]: - """ - Scan for Bluetooth devices with manufacturer data detection. - - Uses the BLE scanner module (bleak library) for proper manufacturer ID - detection, with fallback to system tools if bleak is unavailable. - """ - import platform - import os - import re - import shutil - import subprocess - - devices = [] - seen_macs = set() - - logger.info(f"Starting Bluetooth scan (duration={duration}s, interface={interface})") - - # Try the BLE scanner module first (uses bleak for proper manufacturer detection) - try: - from utils.tscm.ble_scanner import get_ble_scanner, scan_ble_devices - - logger.info("Using BLE scanner module with manufacturer detection") - ble_devices = scan_ble_devices(duration) - - for ble_dev in ble_devices: - mac = ble_dev.get('mac', '').upper() - if mac and mac not in seen_macs: - seen_macs.add(mac) - - device = { - 'mac': mac, - 'name': ble_dev.get('name', 'Unknown'), - 'rssi': ble_dev.get('rssi'), - 'type': 'ble', - 'manufacturer': ble_dev.get('manufacturer_name'), - 'manufacturer_id': ble_dev.get('manufacturer_id'), - 'is_tracker': ble_dev.get('is_tracker', False), - 'tracker_type': ble_dev.get('tracker_type'), - 'is_airtag': ble_dev.get('is_airtag', False), - 'is_tile': ble_dev.get('is_tile', False), - 'is_smarttag': ble_dev.get('is_smarttag', False), - 'is_espressif': ble_dev.get('is_espressif', False), - 'service_uuids': ble_dev.get('service_uuids', []), - } - devices.append(device) - - if devices: - logger.info(f"BLE scanner found {len(devices)} devices") - trackers = [d for d in devices if d.get('is_tracker')] - if trackers: - logger.info(f"Trackers detected: {[d.get('tracker_type') for d in trackers]}") - return devices - - except ImportError: - logger.warning("BLE scanner module not available, using fallback") - except Exception as e: - logger.warning(f"BLE scanner failed: {e}, using fallback") - - if platform.system() == 'Darwin': - # macOS: Use system_profiler for basic Bluetooth info - try: - result = subprocess.run( - ['system_profiler', 'SPBluetoothDataType', '-json'], - capture_output=True, text=True, timeout=15 - ) - import json - data = json.loads(result.stdout) - bt_data = data.get('SPBluetoothDataType', [{}])[0] - - # Get connected/paired devices - for section in ['device_connected', 'device_title']: - section_data = bt_data.get(section, {}) - if isinstance(section_data, dict): - for name, info in section_data.items(): - if isinstance(info, dict): - mac = info.get('device_address', '') - if mac and mac not in seen_macs: - seen_macs.add(mac) - devices.append({ - 'mac': mac.upper(), - 'name': name, - 'type': info.get('device_minorType', 'unknown'), - 'connected': section == 'device_connected' - }) - logger.info(f"macOS Bluetooth scan found {len(devices)} devices") - except (FileNotFoundError, subprocess.TimeoutExpired, subprocess.SubprocessError, json.JSONDecodeError) as e: - logger.warning(f"macOS Bluetooth scan failed: {e}") - - else: - # Linux: Try multiple methods - iface = interface or 'hci0' - - # Method 1: Try hcitool scan (simpler, more reliable) - if shutil.which('hcitool'): - try: - logger.info("Trying hcitool scan...") - result = subprocess.run( - ['hcitool', '-i', iface, 'scan', '--flush'], - capture_output=True, text=True, timeout=duration + 5 - ) - for line in result.stdout.split('\n'): - line = line.strip() - if line and '\t' in line: - parts = line.split('\t') - if len(parts) >= 1 and ':' in parts[0]: - mac = parts[0].strip().upper() - name = parts[1].strip() if len(parts) > 1 else 'Unknown' - if mac not in seen_macs: - seen_macs.add(mac) - devices.append({'mac': mac, 'name': name}) - logger.info(f"hcitool scan found {len(devices)} classic BT devices") - except (subprocess.TimeoutExpired, subprocess.SubprocessError) as e: - logger.warning(f"hcitool scan failed: {e}") - - # Method 2: Try btmgmt for BLE devices - if shutil.which('btmgmt'): - try: - logger.info("Trying btmgmt find...") - result = subprocess.run( - ['btmgmt', 'find'], - capture_output=True, text=True, timeout=duration + 5 - ) - for line in result.stdout.split('\n'): - # Parse btmgmt output: "dev_found: XX:XX:XX:XX:XX:XX type LE..." - if 'dev_found' in line.lower() or ('type' in line.lower() and ':' in line): - mac_match = re.search( - r'([0-9A-Fa-f]{2}:[0-9A-Fa-f]{2}:[0-9A-Fa-f]{2}:' - r'[0-9A-Fa-f]{2}:[0-9A-Fa-f]{2}:[0-9A-Fa-f]{2})', - line - ) - if mac_match: - mac = mac_match.group(1).upper() - if mac not in seen_macs: - seen_macs.add(mac) - # Try to extract name - name_match = re.search(r'name\s+(.+?)(?:\s|$)', line, re.I) - name = name_match.group(1) if name_match else 'Unknown BLE' - devices.append({ - 'mac': mac, - 'name': name, - 'type': 'ble' if 'le' in line.lower() else 'classic' - }) - logger.info(f"btmgmt found {len(devices)} total devices") - except (subprocess.TimeoutExpired, subprocess.SubprocessError) as e: - logger.warning(f"btmgmt find failed: {e}") - - # Method 3: Try bluetoothctl as last resort - if not devices and shutil.which('bluetoothctl'): - try: - import pty - import select - - logger.info("Trying bluetoothctl scan...") - master_fd, slave_fd = pty.openpty() - process = subprocess.Popen( - ['bluetoothctl'], - stdin=slave_fd, - stdout=slave_fd, - stderr=slave_fd, - close_fds=True - ) - os.close(slave_fd) - - # Start scanning - time.sleep(0.3) - os.write(master_fd, b'power on\n') - time.sleep(0.3) - os.write(master_fd, b'scan on\n') - - # Collect devices for specified duration - scan_end = time.time() + min(duration, 10) # Cap at 10 seconds - buffer = '' - - while time.time() < scan_end: - readable, _, _ = select.select([master_fd], [], [], 1.0) - if readable: - try: - data = os.read(master_fd, 4096) - if not data: - break - buffer += data.decode('utf-8', errors='replace') - - while '\n' in buffer: - line, buffer = buffer.split('\n', 1) - line = re.sub(r'\x1b\[[0-9;]*m', '', line).strip() - - if 'Device' in line: - match = re.search( - r'([0-9A-Fa-f]{2}:[0-9A-Fa-f]{2}:[0-9A-Fa-f]{2}:' - r'[0-9A-Fa-f]{2}:[0-9A-Fa-f]{2}:[0-9A-Fa-f]{2})\s*(.*)', - line - ) - if match: - mac = match.group(1).upper() - name = match.group(2).strip() - # Remove RSSI from name if present - name = re.sub(r'\s*RSSI:\s*-?\d+\s*', '', name).strip() - - if mac not in seen_macs: - seen_macs.add(mac) - devices.append({ - 'mac': mac, - 'name': name or '[Unknown]' - }) - except OSError: - break - - # Stop scanning and cleanup - try: - os.write(master_fd, b'scan off\n') - time.sleep(0.2) - os.write(master_fd, b'quit\n') - except OSError: - pass - - process.terminate() - try: - process.wait(timeout=2) - except subprocess.TimeoutExpired: - process.kill() - - try: - os.close(master_fd) - except OSError: - pass - - logger.info(f"bluetoothctl scan found {len(devices)} devices") - - except (FileNotFoundError, subprocess.SubprocessError) as e: - logger.warning(f"bluetoothctl scan failed: {e}") - - return devices - - -def _scan_rf_signals( - sdr_device: int | None, - duration: int = 30, - stop_check: callable | None = None, - sweep_ranges: list[dict] | None = None -) -> list[dict]: - """ - Scan for RF signals using SDR (rtl_power or hackrf_sweep). - - Scans common surveillance frequency bands: - - 88-108 MHz: FM broadcast (potential FM bugs) - - 315 MHz: Common ISM band (wireless devices) - - 433 MHz: ISM band (European wireless devices, car keys) - - 868 MHz: European ISM band - - 915 MHz: US ISM band - - 1.2 GHz: Video transmitters - - 2.4 GHz: WiFi, Bluetooth, video transmitters - - Args: - sdr_device: SDR device index - duration: Scan duration per band - stop_check: Optional callable that returns True if scan should stop. - Defaults to checking module-level _sweep_running. - sweep_ranges: Optional preset ranges (MHz) from SWEEP_PRESETS. - """ - # Default stop check uses module-level _sweep_running - if stop_check is None: - stop_check = lambda: not _sweep_running - import os - import shutil - import subprocess - import tempfile - - signals = [] - - logger.info(f"Starting RF scan (device={sdr_device})") - - # Detect available SDR devices and sweep tools - rtl_power_path = shutil.which('rtl_power') - hackrf_sweep_path = shutil.which('hackrf_sweep') - - sdr_type = None - sweep_tool_path = None - - try: - from utils.sdr import SDRFactory - from utils.sdr.base import SDRType - devices = SDRFactory.detect_devices() - rtlsdr_available = any(d.sdr_type == SDRType.RTL_SDR for d in devices) - hackrf_available = any(d.sdr_type == SDRType.HACKRF for d in devices) - except ImportError: - rtlsdr_available = False - hackrf_available = False - - # Pick the best available SDR + sweep tool combo - if rtlsdr_available and rtl_power_path: - sdr_type = 'rtlsdr' - sweep_tool_path = rtl_power_path - logger.info(f"Using RTL-SDR with rtl_power at: {rtl_power_path}") - elif hackrf_available and hackrf_sweep_path: - sdr_type = 'hackrf' - sweep_tool_path = hackrf_sweep_path - logger.info(f"Using HackRF with hackrf_sweep at: {hackrf_sweep_path}") - elif rtl_power_path: - # Tool exists but no device detected — try anyway (detection may have failed) - sdr_type = 'rtlsdr' - sweep_tool_path = rtl_power_path - logger.info(f"No SDR detected but rtl_power found, attempting RTL-SDR scan") - elif hackrf_sweep_path: - sdr_type = 'hackrf' - sweep_tool_path = hackrf_sweep_path - logger.info(f"No SDR detected but hackrf_sweep found, attempting HackRF scan") - - if not sweep_tool_path: - logger.warning("No supported sweep tool found (rtl_power or hackrf_sweep)") - _emit_event('rf_status', { - 'status': 'error', - 'message': 'No SDR sweep tool installed. Install rtl-sdr (rtl_power) or HackRF (hackrf_sweep) for RF scanning.', - }) - return signals - - # Define frequency bands to scan (in Hz) - # Format: (start_freq, end_freq, bin_size, description) - scan_bands: list[tuple[int, int, int, str]] = [] - - if sweep_ranges: - for rng in sweep_ranges: - try: - start_mhz = float(rng.get('start', 0)) - end_mhz = float(rng.get('end', 0)) - step_mhz = float(rng.get('step', 0.1)) - name = rng.get('name') or f"{start_mhz:.1f}-{end_mhz:.1f} MHz" - if start_mhz > 0 and end_mhz > start_mhz: - bin_size = max(1000, int(step_mhz * 1_000_000)) - scan_bands.append(( - int(start_mhz * 1_000_000), - int(end_mhz * 1_000_000), - bin_size, - name - )) - except (TypeError, ValueError): - continue - - if not scan_bands: - # Fallback: focus on common bug frequencies - scan_bands = [ - (88000000, 108000000, 100000, 'FM Broadcast'), # FM bugs - (315000000, 316000000, 10000, '315 MHz ISM'), # US ISM - (433000000, 434000000, 10000, '433 MHz ISM'), # EU ISM - (868000000, 869000000, 10000, '868 MHz ISM'), # EU ISM - (902000000, 928000000, 100000, '915 MHz ISM'), # US ISM - (1200000000, 1300000000, 100000, '1.2 GHz Video'), # Video TX - (2400000000, 2500000000, 500000, '2.4 GHz ISM'), # WiFi/BT/Video - ] - - # Create temp file for output - with tempfile.NamedTemporaryFile(mode='w', suffix='.csv', delete=False) as tmp: - tmp_path = tmp.name - - try: - # Build device argument - device_idx = sdr_device if sdr_device is not None else 0 - - # Scan each band and look for strong signals - for start_freq, end_freq, bin_size, band_name in scan_bands: - if stop_check(): - break - - logger.info(f"Scanning {band_name} ({start_freq/1e6:.1f}-{end_freq/1e6:.1f} MHz)") - - try: - # Build sweep command based on SDR type - if sdr_type == 'hackrf': - cmd = [ - sweep_tool_path, - '-f', f'{int(start_freq / 1e6)}:{int(end_freq / 1e6)}', - '-w', str(bin_size), - '-1', # Single sweep - ] - output_mode = 'stdout' - else: - cmd = [ - sweep_tool_path, - '-f', f'{start_freq}:{end_freq}:{bin_size}', - '-g', '40', # Gain - '-i', '1', # Integration interval (1 second) - '-1', # Single shot mode - '-c', '20%', # Crop 20% of edges - '-d', str(device_idx), - tmp_path, - ] - output_mode = 'file' - - logger.debug(f"Running: {' '.join(cmd)}") - - result = subprocess.run( - cmd, - capture_output=True, - text=True, - timeout=30 - ) - - if result.returncode != 0: - logger.warning(f"{os.path.basename(sweep_tool_path)} returned {result.returncode}: {result.stderr}") - - # For HackRF, write stdout CSV data to temp file for unified parsing - if output_mode == 'stdout' and result.stdout: - with open(tmp_path, 'w') as f: - f.write(result.stdout) - - # Parse the CSV output (same format for both rtl_power and hackrf_sweep) - if os.path.exists(tmp_path) and os.path.getsize(tmp_path) > 0: - with open(tmp_path, 'r') as f: - for line in f: - parts = line.strip().split(',') - if len(parts) >= 7: - try: - # CSV format: date, time, hz_low, hz_high, hz_step, samples, db_values... - hz_low = int(parts[2].strip()) - hz_high = int(parts[3].strip()) - hz_step = float(parts[4].strip()) - db_values = [float(x) for x in parts[6:] if x.strip()] - - # Find peaks above noise floor - noise_floor = sum(db_values) / len(db_values) if db_values else -100 - threshold = noise_floor + 6 # Signal must be 6dB above noise - - for idx, db in enumerate(db_values): - if db > threshold and db > -90: # Detect signals above -90dBm - freq_hz = hz_low + (idx * hz_step) - freq_mhz = freq_hz / 1000000 - - signals.append({ - 'frequency': freq_mhz, - 'frequency_hz': freq_hz, - 'power': db, - 'band': band_name, - 'noise_floor': noise_floor, - 'signal_strength': db - noise_floor - }) - except (ValueError, IndexError): - continue - - # Clear file for next band - open(tmp_path, 'w').close() - - except subprocess.TimeoutExpired: - logger.warning(f"RF scan timeout for band {band_name}") - except Exception as e: - logger.warning(f"RF scan error for band {band_name}: {e}") - - finally: - # Cleanup temp file - try: - os.unlink(tmp_path) - except OSError: - pass - - # Deduplicate nearby frequencies (within 100kHz) - if signals: - signals.sort(key=lambda x: x['frequency']) - deduped = [signals[0]] - for sig in signals[1:]: - if sig['frequency'] - deduped[-1]['frequency'] > 0.1: # 100 kHz - deduped.append(sig) - elif sig['power'] > deduped[-1]['power']: - deduped[-1] = sig # Keep stronger signal - signals = deduped - - logger.info(f"RF scan found {len(signals)} signals") - return signals - - -def _run_sweep( - sweep_type: str, - baseline_id: int | None, - wifi_enabled: bool, - bt_enabled: bool, - rf_enabled: bool, - wifi_interface: str = '', - bt_interface: str = '', - sdr_device: int | None = None, - verbose_results: bool = False -) -> None: - """ - Run the TSCM sweep in a background thread. - - This orchestrates data collection from WiFi, BT, and RF sources, - then analyzes results for threats using the correlation engine. - """ - global _sweep_running, _current_sweep_id - - try: - # Get baseline for comparison if specified - baseline = None - if baseline_id: - baseline = get_tscm_baseline(baseline_id) - - # Get sweep preset - preset = get_sweep_preset(sweep_type) or SWEEP_PRESETS.get('standard') - duration = preset.get('duration_seconds', 300) - - _emit_event('sweep_started', { - 'sweep_id': _current_sweep_id, - 'sweep_type': sweep_type, - 'duration': duration, - 'wifi': wifi_enabled, - 'bluetooth': bt_enabled, - 'rf': rf_enabled, - }) - - # Initialize detector and correlation engine - detector = ThreatDetector(baseline) - correlation = get_correlation_engine() - # Clear old profiles from previous sweeps (keep 24h history) - correlation.clear_old_profiles(24) - - # Initialize device identity engine for MAC-randomization resistant detection - identity_engine = get_identity_engine() - identity_engine.clear() # Start fresh for this sweep - from utils.tscm.advanced import get_timeline_manager - timeline_manager = get_timeline_manager() - try: - cleanup_old_timeline_entries(72) - except Exception as e: - logger.debug(f"TSCM timeline cleanup skipped: {e}") - - last_timeline_write: dict[str, float] = {} - timeline_bucket = getattr(timeline_manager, 'bucket_seconds', 30) - - def _maybe_store_timeline( - identifier: str, - protocol: str, - rssi: int | None = None, - channel: int | None = None, - frequency: float | None = None, - attributes: dict | None = None - ) -> None: - if not identifier: - return - - identifier_norm = identifier.upper() if isinstance(identifier, str) else str(identifier) - key = f"{protocol}:{identifier_norm}" - now_ts = time.time() - last_ts = last_timeline_write.get(key) - if last_ts and (now_ts - last_ts) < timeline_bucket: - return - - last_timeline_write[key] = now_ts - try: - add_device_timeline_entry( - device_identifier=identifier_norm, - protocol=protocol, - sweep_id=_current_sweep_id, - rssi=rssi, - channel=channel, - frequency=frequency, - attributes=attributes - ) - except Exception as e: - logger.debug(f"TSCM timeline store error: {e}") - - # Collect and analyze data - threats_found = 0 - severity_counts = {'critical': 0, 'high': 0, 'medium': 0, 'low': 0} - all_wifi = {} # Use dict for deduplication by BSSID - all_wifi_clients = {} # Use dict for deduplication by client MAC - all_bt = {} # Use dict for deduplication by MAC - all_rf = [] - - start_time = time.time() - last_wifi_scan = 0 - last_bt_scan = 0 - last_rf_scan = 0 - wifi_scan_interval = 15 # Scan WiFi every 15 seconds - bt_scan_interval = 20 # Scan Bluetooth every 20 seconds - rf_scan_interval = 30 # Scan RF every 30 seconds - - while _sweep_running and (time.time() - start_time) < duration: - current_time = time.time() - - # Perform WiFi scan - if wifi_enabled and (current_time - last_wifi_scan) >= wifi_scan_interval: - try: - wifi_networks = _scan_wifi_networks(wifi_interface) - last_wifi_scan = current_time - if not wifi_networks and not all_wifi: - logger.warning("TSCM WiFi scan returned 0 networks") - _emit_event('sweep_progress', { - 'progress': min(95, int(((current_time - start_time) / duration) * 100)), - 'status': f'Scanning WiFi... ({len(wifi_networks)} found)', - 'wifi_count': len(all_wifi) + len([n for n in wifi_networks if n.get('bssid') and n.get('bssid') not in all_wifi]), - 'bt_count': len(all_bt), - 'rf_count': len(all_rf), - }) - for network in wifi_networks: - try: - bssid = network.get('bssid', '') - ssid = network.get('essid', network.get('ssid')) - try: - rssi_val = int(network.get('power', network.get('signal'))) - except (ValueError, TypeError): - rssi_val = None - if bssid: - try: - timeline_manager.add_observation( - identifier=bssid, - protocol='wifi', - rssi=rssi_val, - channel=network.get('channel'), - name=ssid, - attributes={'ssid': ssid, 'encryption': network.get('privacy')} - ) - except Exception as e: - logger.debug(f"WiFi timeline observation error: {e}") - _maybe_store_timeline( - identifier=bssid, - protocol='wifi', - rssi=rssi_val, - channel=network.get('channel'), - attributes={'ssid': ssid, 'encryption': network.get('privacy')} - ) - if bssid and bssid not in all_wifi: - all_wifi[bssid] = network - # Emit device event for frontend - is_threat = False - # Analyze for threats - threat = detector.analyze_wifi_device(network) - if threat: - _handle_threat(threat) - threats_found += 1 - is_threat = True - sev = threat.get('severity', 'low').lower() - if sev in severity_counts: - severity_counts[sev] += 1 - # Classify device and get correlation profile - classification = detector.classify_wifi_device(network) - profile = correlation.analyze_wifi_device(network) - - # Feed to identity engine for MAC-randomization resistant clustering - # Note: WiFi APs don't typically use randomized MACs, but clients do - try: - wifi_obs = { - 'timestamp': datetime.now().isoformat(), - 'src_mac': bssid, - 'bssid': bssid, - 'ssid': network.get('essid'), - 'rssi': network.get('power'), - 'channel': network.get('channel'), - 'encryption': network.get('privacy'), - 'frame_type': 'beacon', - } - ingest_wifi_dict(wifi_obs) - except Exception as e: - logger.debug(f"Identity engine WiFi ingest error: {e}") - - # Send device to frontend - _emit_event('wifi_device', { - 'bssid': bssid, - 'ssid': network.get('essid', 'Hidden'), - 'channel': network.get('channel', ''), - 'signal': network.get('power', ''), - 'security': network.get('privacy', ''), - 'vendor': network.get('vendor'), - 'is_threat': is_threat, - 'is_new': not classification.get('in_baseline', False), - 'classification': profile.risk_level.value, - 'reasons': classification.get('reasons', []), - 'score': profile.total_score, - 'score_modifier': profile.score_modifier, - 'known_device': profile.known_device, - 'known_device_name': profile.known_device_name, - 'indicators': [{'type': i.type.value, 'desc': i.description} for i in profile.indicators], - 'recommended_action': profile.recommended_action, - }) - except Exception as e: - logger.error(f"WiFi device processing error for {network.get('bssid', '?')}: {e}") - - # WiFi clients (monitor mode only) - try: - wifi_clients = _scan_wifi_clients(wifi_interface) - for client in wifi_clients: - mac = (client.get('mac') or '').upper() - if not mac or mac in all_wifi_clients: - continue - all_wifi_clients[mac] = client - - rssi_val = client.get('rssi_current') - if rssi_val is None: - rssi_val = client.get('rssi_median') or client.get('rssi_ema') - - client_device = { - 'mac': mac, - 'vendor': client.get('vendor'), - 'name': client.get('vendor') or 'WiFi Client', - 'rssi': rssi_val, - 'associated_bssid': client.get('associated_bssid'), - 'probed_ssids': client.get('probed_ssids', []), - 'probe_count': client.get('probe_count', len(client.get('probed_ssids', []))), - 'is_client': True, - } - - try: - timeline_manager.add_observation( - identifier=mac, - protocol='wifi', - rssi=rssi_val, - name=client_device.get('vendor') or f'WiFi Client {mac[-5:]}', - attributes={'client': True, 'associated_bssid': client_device.get('associated_bssid')} - ) - except Exception as e: - logger.debug(f"WiFi client timeline observation error: {e}") - _maybe_store_timeline( - identifier=mac, - protocol='wifi', - rssi=rssi_val, - attributes={'client': True, 'associated_bssid': client_device.get('associated_bssid')} - ) - - profile = correlation.analyze_wifi_device(client_device) - client_device['classification'] = profile.risk_level.value - client_device['score'] = profile.total_score - client_device['score_modifier'] = profile.score_modifier - client_device['known_device'] = profile.known_device - client_device['known_device_name'] = profile.known_device_name - client_device['indicators'] = [ - {'type': i.type.value, 'desc': i.description} - for i in profile.indicators - ] - client_device['recommended_action'] = profile.recommended_action - - # Feed to identity engine for MAC-randomization resistant clustering - try: - wifi_obs = { - 'timestamp': datetime.now().isoformat(), - 'src_mac': mac, - 'bssid': client_device.get('associated_bssid'), - 'rssi': rssi_val, - 'frame_type': 'probe_request', - 'probed_ssids': client_device.get('probed_ssids', []), - } - ingest_wifi_dict(wifi_obs) - except Exception as e: - logger.debug(f"Identity engine WiFi client ingest error: {e}") - - _emit_event('wifi_client', client_device) - except Exception as e: - logger.debug(f"WiFi client scan error: {e}") - except Exception as e: - last_wifi_scan = current_time - logger.error(f"WiFi scan error: {e}") - - # Perform Bluetooth scan - if bt_enabled and (current_time - last_bt_scan) >= bt_scan_interval: - try: - # Use unified Bluetooth scanner if available - if _USE_UNIFIED_BT_SCANNER: - logger.info("TSCM: Using unified BT scanner for snapshot") - bt_devices = get_tscm_bluetooth_snapshot(duration=8) - logger.info(f"TSCM: Unified scanner returned {len(bt_devices)} devices") - else: - logger.info(f"TSCM: Using legacy BT scanner on {bt_interface}") - bt_devices = _scan_bluetooth_devices(bt_interface, duration=8) - logger.info(f"TSCM: Legacy scanner returned {len(bt_devices)} devices") - last_bt_scan = current_time - for device in bt_devices: - try: - mac = device.get('mac', '') - try: - rssi_val = int(device.get('rssi', device.get('signal'))) - except (ValueError, TypeError): - rssi_val = None - if mac: - try: - timeline_manager.add_observation( - identifier=mac, - protocol='bluetooth', - rssi=rssi_val, - name=device.get('name'), - attributes={'device_type': device.get('type')} - ) - except Exception as e: - logger.debug(f"BT timeline observation error: {e}") - _maybe_store_timeline( - identifier=mac, - protocol='bluetooth', - rssi=rssi_val, - attributes={'device_type': device.get('type')} - ) - if mac and mac not in all_bt: - all_bt[mac] = device - is_threat = False - # Analyze for threats - threat = detector.analyze_bt_device(device) - if threat: - _handle_threat(threat) - threats_found += 1 - is_threat = True - sev = threat.get('severity', 'low').lower() - if sev in severity_counts: - severity_counts[sev] += 1 - # Classify device and get correlation profile - classification = detector.classify_bt_device(device) - profile = correlation.analyze_bluetooth_device(device) - - # Feed to identity engine for MAC-randomization resistant clustering - try: - ble_obs = { - 'timestamp': datetime.now().isoformat(), - 'addr': mac, - 'rssi': device.get('rssi'), - 'manufacturer_id': device.get('manufacturer_id') or device.get('company_id'), - 'manufacturer_data': device.get('manufacturer_data'), - 'service_uuids': device.get('services', []), - 'local_name': device.get('name'), - } - ingest_ble_dict(ble_obs) - except Exception as e: - logger.debug(f"Identity engine BLE ingest error: {e}") - - # Send device to frontend - _emit_event('bt_device', { - 'mac': mac, - 'name': device.get('name', 'Unknown'), - 'device_type': device.get('type', ''), - 'rssi': device.get('rssi', ''), - 'manufacturer': device.get('manufacturer'), - 'tracker': device.get('tracker'), - 'tracker_type': device.get('tracker_type'), - 'is_threat': is_threat, - 'is_new': not classification.get('in_baseline', False), - 'classification': profile.risk_level.value, - 'reasons': classification.get('reasons', []), - 'is_audio_capable': classification.get('is_audio_capable', False), - 'score': profile.total_score, - 'score_modifier': profile.score_modifier, - 'known_device': profile.known_device, - 'known_device_name': profile.known_device_name, - 'indicators': [{'type': i.type.value, 'desc': i.description} for i in profile.indicators], - 'recommended_action': profile.recommended_action, - }) - except Exception as e: - logger.error(f"BT device processing error for {device.get('mac', '?')}: {e}") - except Exception as e: - last_bt_scan = current_time - import traceback - logger.error(f"Bluetooth scan error: {e}\n{traceback.format_exc()}") - - # Perform RF scan using SDR - if rf_enabled and (current_time - last_rf_scan) >= rf_scan_interval: - try: - _emit_event('sweep_progress', { - 'progress': min(100, int(((current_time - start_time) / duration) * 100)), - 'status': 'Scanning RF spectrum...', - 'wifi_count': len(all_wifi), - 'bt_count': len(all_bt), - 'rf_count': len(all_rf), - }) - # Try RF scan even if sdr_device is None (will use device 0) - rf_signals = _scan_rf_signals(sdr_device, sweep_ranges=preset.get('ranges')) - - # If no signals and this is first RF scan, send info event - if not rf_signals and last_rf_scan == 0: - _emit_event('rf_status', { - 'status': 'no_signals', - 'message': 'RF scan completed - no signals above threshold. This may be normal in a quiet RF environment.', - }) - - for signal in rf_signals: - freq_key = f"{signal['frequency']:.3f}" - try: - power_val = int(float(signal.get('power', signal.get('level')))) - except (ValueError, TypeError): - power_val = None - try: - timeline_manager.add_observation( - identifier=freq_key, - protocol='rf', - rssi=power_val, - frequency=signal.get('frequency'), - name=f"{freq_key} MHz", - attributes={'band': signal.get('band')} - ) - except Exception as e: - logger.debug(f"RF timeline observation error: {e}") - _maybe_store_timeline( - identifier=freq_key, - protocol='rf', - rssi=power_val, - frequency=signal.get('frequency'), - attributes={'band': signal.get('band')} - ) - if freq_key not in [f"{s['frequency']:.3f}" for s in all_rf]: - all_rf.append(signal) - is_threat = False - # Analyze RF signal for threats - threat = detector.analyze_rf_signal(signal) - if threat: - _handle_threat(threat) - threats_found += 1 - is_threat = True - sev = threat.get('severity', 'low').lower() - if sev in severity_counts: - severity_counts[sev] += 1 - # Classify signal and get correlation profile - classification = detector.classify_rf_signal(signal) - profile = correlation.analyze_rf_signal(signal) - # Send signal to frontend - _emit_event('rf_signal', { - 'frequency': signal['frequency'], - 'power': signal['power'], - 'band': signal['band'], - 'signal_strength': signal.get('signal_strength', 0), - 'is_threat': is_threat, - 'is_new': not classification.get('in_baseline', False), - 'classification': profile.risk_level.value, - 'reasons': classification.get('reasons', []), - 'score': profile.total_score, - 'score_modifier': profile.score_modifier, - 'known_device': profile.known_device, - 'known_device_name': profile.known_device_name, - 'indicators': [{'type': i.type.value, 'desc': i.description} for i in profile.indicators], - 'recommended_action': profile.recommended_action, - }) - last_rf_scan = current_time - except Exception as e: - logger.error(f"RF scan error: {e}") - - # Update progress - elapsed = time.time() - start_time - progress = min(100, int((elapsed / duration) * 100)) - - _emit_event('sweep_progress', { - 'progress': progress, - 'elapsed': int(elapsed), - 'duration': duration, - 'wifi_count': len(all_wifi), - 'bt_count': len(all_bt), - 'rf_count': len(all_rf), - 'threats_found': threats_found, - 'severity_counts': severity_counts, - }) - - time.sleep(2) # Update every 2 seconds - - # Complete sweep (run even if stopped by user so correlations/clusters are computed) - if _current_sweep_id: - # Run cross-protocol correlation analysis - correlations = correlation.correlate_devices() - findings = correlation.get_all_findings() - - # Run baseline comparison if a baseline was provided - baseline_comparison = None - if baseline: - comparator = BaselineComparator(baseline) - baseline_comparison = comparator.compare_all( - wifi_devices=list(all_wifi.values()), - wifi_clients=list(all_wifi_clients.values()), - bt_devices=list(all_bt.values()), - rf_signals=all_rf - ) - logger.info( - f"Baseline comparison: {baseline_comparison['total_new']} new, " - f"{baseline_comparison['total_missing']} missing" - ) - - # Finalize identity engine and get MAC-randomization resistant clusters - identity_engine.finalize_all_sessions() - identity_summary = identity_engine.get_summary() - identity_clusters = [c.to_dict() for c in identity_engine.get_clusters()] - - if verbose_results: - wifi_payload = list(all_wifi.values()) - wifi_client_payload = list(all_wifi_clients.values()) - bt_payload = list(all_bt.values()) - rf_payload = list(all_rf) - else: - wifi_payload = [ - { - 'bssid': d.get('bssid') or d.get('mac'), - 'essid': d.get('essid') or d.get('ssid'), - 'ssid': d.get('ssid') or d.get('essid'), - 'channel': d.get('channel'), - 'power': d.get('power', d.get('signal')), - 'privacy': d.get('privacy', d.get('encryption')), - 'encryption': d.get('encryption', d.get('privacy')), - } - for d in all_wifi.values() - ] - wifi_client_payload = [] - for client in all_wifi_clients.values(): - mac = client.get('mac') or client.get('address') - if isinstance(mac, str): - mac = mac.upper() - probed_ssids = client.get('probed_ssids') or [] - rssi = client.get('rssi') - if rssi is None: - rssi = client.get('rssi_current') - if rssi is None: - rssi = client.get('rssi_median') - if rssi is None: - rssi = client.get('rssi_ema') - wifi_client_payload.append({ - 'mac': mac, - 'vendor': client.get('vendor'), - 'rssi': rssi, - 'associated_bssid': client.get('associated_bssid'), - 'is_associated': client.get('is_associated'), - 'probed_ssids': probed_ssids, - 'probe_count': client.get('probe_count', len(probed_ssids)), - }) - bt_payload = [ - { - 'mac': d.get('mac') or d.get('address'), - 'name': d.get('name'), - 'rssi': d.get('rssi'), - 'manufacturer': d.get('manufacturer', d.get('manufacturer_name')), - } - for d in all_bt.values() - ] - rf_payload = [ - { - 'frequency': s.get('frequency'), - 'power': s.get('power', s.get('level')), - 'modulation': s.get('modulation'), - 'band': s.get('band'), - } - for s in all_rf - ] - - update_tscm_sweep( - _current_sweep_id, - status='completed', - results={ - 'wifi_devices': wifi_payload, - 'wifi_clients': wifi_client_payload, - 'bt_devices': bt_payload, - 'rf_signals': rf_payload, - 'wifi_count': len(all_wifi), - 'wifi_client_count': len(all_wifi_clients), - 'bt_count': len(all_bt), - 'rf_count': len(all_rf), - 'severity_counts': severity_counts, - 'correlation_summary': findings.get('summary', {}), - 'identity_summary': identity_summary.get('statistics', {}), - 'baseline_comparison': baseline_comparison, - 'results_detail_level': 'full' if verbose_results else 'compact', - }, - threats_found=threats_found, - completed=True - ) - - # Emit correlation findings - _emit_event('correlation_findings', { - 'correlations': correlations, - 'high_interest_count': findings['summary'].get('high_interest', 0), - 'needs_review_count': findings['summary'].get('needs_review', 0), - }) - - # Emit baseline comparison if a baseline was used - if baseline_comparison: - _emit_event('baseline_comparison', { - 'baseline_id': baseline.get('id'), - 'baseline_name': baseline.get('name'), - 'total_new': baseline_comparison['total_new'], - 'total_missing': baseline_comparison['total_missing'], - 'wifi': baseline_comparison.get('wifi'), - 'wifi_clients': baseline_comparison.get('wifi_clients'), - 'bluetooth': baseline_comparison.get('bluetooth'), - 'rf': baseline_comparison.get('rf'), - }) - - # Emit device identity cluster findings (MAC-randomization resistant) - _emit_event('identity_clusters', { - 'total_clusters': identity_summary.get('statistics', {}).get('total_clusters', 0), - 'high_risk_count': identity_summary.get('statistics', {}).get('high_risk_count', 0), - 'medium_risk_count': identity_summary.get('statistics', {}).get('medium_risk_count', 0), - 'unique_fingerprints': identity_summary.get('statistics', {}).get('unique_fingerprints', 0), - 'clusters': identity_clusters, - }) - - _emit_event('sweep_completed', { - 'sweep_id': _current_sweep_id, - 'threats_found': threats_found, - 'wifi_count': len(all_wifi), - 'wifi_client_count': len(all_wifi_clients), - 'bt_count': len(all_bt), - 'rf_count': len(all_rf), - 'severity_counts': severity_counts, - 'high_interest_devices': findings['summary'].get('high_interest', 0), - 'needs_review_devices': findings['summary'].get('needs_review', 0), - 'correlations_found': len(correlations), - 'identity_clusters': identity_summary['statistics'].get('total_clusters', 0), - 'baseline_new_devices': baseline_comparison['total_new'] if baseline_comparison else 0, - 'baseline_missing_devices': baseline_comparison['total_missing'] if baseline_comparison else 0, - }) - - except Exception as e: - logger.error(f"Sweep error: {e}") - _emit_event('sweep_error', {'error': str(e)}) - if _current_sweep_id: - update_tscm_sweep(_current_sweep_id, status='error', completed=True) - - finally: - _sweep_running = False - - -def _handle_threat(threat: dict) -> None: - """Handle a detected threat.""" - if not _current_sweep_id: - return - - # Add to database - threat_id = add_tscm_threat( - sweep_id=_current_sweep_id, - threat_type=threat['threat_type'], - severity=threat['severity'], - source=threat['source'], - identifier=threat['identifier'], - name=threat.get('name'), - signal_strength=threat.get('signal_strength'), - frequency=threat.get('frequency'), - details=threat.get('details') - ) - - # Emit event - _emit_event('threat_detected', { - 'threat_id': threat_id, - **threat - }) - - logger.warning( - f"TSCM threat detected: {threat['threat_type']} - " - f"{threat['identifier']} ({threat['severity']})" - ) - - -# ============================================================================= -# Baseline Endpoints -# ============================================================================= - -@tscm_bp.route('/baseline/record', methods=['POST']) -def record_baseline(): - """Start recording a new baseline.""" - data = request.get_json() or {} - name = data.get('name', f'Baseline {datetime.now().strftime("%Y-%m-%d %H:%M")}') - location = data.get('location') - description = data.get('description') - - baseline_id = _baseline_recorder.start_recording(name, location, description) - - return jsonify({ - 'status': 'success', - 'message': 'Baseline recording started', - 'baseline_id': baseline_id - }) - - -@tscm_bp.route('/baseline/stop', methods=['POST']) -def stop_baseline(): - """Stop baseline recording.""" - result = _baseline_recorder.stop_recording() - - if 'error' in result: - return jsonify({'status': 'error', 'message': result['error']}) - - return jsonify({ - 'status': 'success', - 'message': 'Baseline recording complete', - **result - }) - - -@tscm_bp.route('/baseline/status') -def baseline_status(): - """Get baseline recording status.""" - return jsonify(_baseline_recorder.get_recording_status()) - - -@tscm_bp.route('/baselines') -def list_baselines(): - """List all baselines.""" - baselines = get_all_tscm_baselines() - return jsonify({'status': 'success', 'baselines': baselines}) - - -@tscm_bp.route('/baseline/') -def get_baseline(baseline_id: int): - """Get a specific baseline.""" - baseline = get_tscm_baseline(baseline_id) - if not baseline: - return jsonify({'status': 'error', 'message': 'Baseline not found'}), 404 - - return jsonify({'status': 'success', 'baseline': baseline}) - - -@tscm_bp.route('/baseline//activate', methods=['POST']) -def activate_baseline(baseline_id: int): - """Set a baseline as active.""" - success = set_active_tscm_baseline(baseline_id) - if not success: - return jsonify({'status': 'error', 'message': 'Baseline not found'}), 404 - - return jsonify({'status': 'success', 'message': 'Baseline activated'}) - - -@tscm_bp.route('/baseline/', methods=['DELETE']) -def remove_baseline(baseline_id: int): - """Delete a baseline.""" - success = delete_tscm_baseline(baseline_id) - if not success: - return jsonify({'status': 'error', 'message': 'Baseline not found'}), 404 - - return jsonify({'status': 'success', 'message': 'Baseline deleted'}) - - -@tscm_bp.route('/baseline/active') -def get_active_baseline(): - """Get the currently active baseline.""" - baseline = get_active_tscm_baseline() - if not baseline: - return jsonify({'status': 'success', 'baseline': None}) - - return jsonify({'status': 'success', 'baseline': baseline}) - - -@tscm_bp.route('/baseline/compare', methods=['POST']) -def compare_against_baseline(): - """ - Compare provided device data against the active baseline. - - Expects JSON body with: - - wifi_devices: list of WiFi devices (optional) - - wifi_clients: list of WiFi clients (optional) - - bt_devices: list of Bluetooth devices (optional) - - rf_signals: list of RF signals (optional) - - Returns comparison showing new, missing, and matching devices. - """ - data = request.get_json() or {} - - wifi_devices = data.get('wifi_devices') - wifi_clients = data.get('wifi_clients') - bt_devices = data.get('bt_devices') - rf_signals = data.get('rf_signals') - - # Use the convenience function that gets active baseline - comparison = get_comparison_for_active_baseline( - wifi_devices=wifi_devices, - wifi_clients=wifi_clients, - bt_devices=bt_devices, - rf_signals=rf_signals - ) - - if comparison is None: - return jsonify({ - 'status': 'error', - 'message': 'No active baseline set' - }), 400 - - return jsonify({ - 'status': 'success', - 'comparison': comparison - }) - - -# ============================================================================= -# Threat Endpoints -# ============================================================================= - -@tscm_bp.route('/threats') -def list_threats(): - """List threats with optional filters.""" - sweep_id = request.args.get('sweep_id', type=int) - severity = request.args.get('severity') - acknowledged = request.args.get('acknowledged') - limit = request.args.get('limit', 100, type=int) - - ack_filter = None - if acknowledged is not None: - ack_filter = acknowledged.lower() in ('true', '1', 'yes') - - threats = get_tscm_threats( - sweep_id=sweep_id, - severity=severity, - acknowledged=ack_filter, - limit=limit - ) - - return jsonify({'status': 'success', 'threats': threats}) - - -@tscm_bp.route('/threats/summary') -def threat_summary(): - """Get threat count summary by severity.""" - summary = get_tscm_threat_summary() - return jsonify({'status': 'success', 'summary': summary}) - - -@tscm_bp.route('/threats/', methods=['PUT']) -def update_threat(threat_id: int): - """Update a threat (acknowledge, add notes).""" - data = request.get_json() or {} - - if data.get('acknowledge'): - notes = data.get('notes') - success = acknowledge_tscm_threat(threat_id, notes) - if not success: - return jsonify({'status': 'error', 'message': 'Threat not found'}), 404 - - return jsonify({'status': 'success', 'message': 'Threat updated'}) - - -# ============================================================================= -# Preset Endpoints -# ============================================================================= - -@tscm_bp.route('/presets') -def list_presets(): - """List available sweep presets.""" - presets = get_all_sweep_presets() - return jsonify({'status': 'success', 'presets': presets}) - - -@tscm_bp.route('/presets/') -def get_preset(preset_name: str): - """Get details for a specific preset.""" - preset = get_sweep_preset(preset_name) - if not preset: - return jsonify({'status': 'error', 'message': 'Preset not found'}), 404 - - return jsonify({'status': 'success', 'preset': preset}) - - -# ============================================================================= -# Data Feed Endpoints (for adding data during sweeps/baselines) -# ============================================================================= - -@tscm_bp.route('/feed/wifi', methods=['POST']) -def feed_wifi(): - """Feed WiFi device data for baseline recording.""" - data = request.get_json() - if data: - if data.get('is_client'): - _baseline_recorder.add_wifi_client(data) - else: - _baseline_recorder.add_wifi_device(data) - return jsonify({'status': 'success'}) - - -@tscm_bp.route('/feed/bluetooth', methods=['POST']) -def feed_bluetooth(): - """Feed Bluetooth device data for baseline recording.""" - data = request.get_json() - if data: - _baseline_recorder.add_bt_device(data) - return jsonify({'status': 'success'}) - - -@tscm_bp.route('/feed/rf', methods=['POST']) -def feed_rf(): - """Feed RF signal data for baseline recording.""" - data = request.get_json() - if data: - _baseline_recorder.add_rf_signal(data) - return jsonify({'status': 'success'}) - - -# ============================================================================= -# Correlation & Findings Endpoints -# ============================================================================= - -@tscm_bp.route('/findings') -def get_findings(): - """ - Get comprehensive TSCM findings from the correlation engine. - - Returns all device profiles organized by risk level, cross-protocol - correlations, and summary statistics with client-safe disclaimers. - """ - correlation = get_correlation_engine() - findings = correlation.get_all_findings() - - # Add client-safe disclaimer - findings['legal_disclaimer'] = ( - "DISCLAIMER: This TSCM screening system identifies wireless and RF anomalies " - "and indicators. Results represent potential items of interest, NOT confirmed " - "surveillance devices. No content has been intercepted or decoded. Findings " - "require professional analysis and verification. This tool does not prove " - "malicious intent or illegal activity." - ) - - return jsonify({ - 'status': 'success', - 'findings': findings - }) - - -@tscm_bp.route('/findings/high-interest') -def get_high_interest(): - """Get only high-interest devices (score >= 6).""" - correlation = get_correlation_engine() - high_interest = correlation.get_high_interest_devices() - - return jsonify({ - 'status': 'success', - 'count': len(high_interest), - 'devices': [d.to_dict() for d in high_interest], - 'disclaimer': ( - "High-interest classification indicates multiple indicators warrant " - "investigation. This does NOT confirm surveillance activity." - ) - }) - - -@tscm_bp.route('/findings/correlations') -def get_correlations(): - """Get cross-protocol correlation analysis.""" - correlation = get_correlation_engine() - correlations = correlation.correlate_devices() - - return jsonify({ - 'status': 'success', - 'count': len(correlations), - 'correlations': correlations, - 'explanation': ( - "Correlations identify devices across different protocols (Bluetooth, " - "WiFi, RF) that exhibit related behavior patterns. Cross-protocol " - "activity is one indicator among many in TSCM analysis." - ) - }) - - -@tscm_bp.route('/findings/device/') -def get_device_profile(identifier: str): - """Get detailed profile for a specific device.""" - correlation = get_correlation_engine() - - # Search all protocols for the identifier - for protocol in ['bluetooth', 'wifi', 'rf']: - key = f"{protocol}:{identifier}" - if key in correlation.device_profiles: - profile = correlation.device_profiles[key] - return jsonify({ - 'status': 'success', - 'profile': profile.to_dict() - }) - - return jsonify({ - 'status': 'error', - 'message': 'Device not found' - }), 404 - - -# ============================================================================= -# Meeting Window Endpoints (for time correlation) -# ============================================================================= - -@tscm_bp.route('/meeting/start', methods=['POST']) -def start_meeting(): - """ - Mark the start of a sensitive period (meeting, briefing, etc.). - - Devices detected during this window will receive additional scoring - for meeting-correlated activity. - """ - correlation = get_correlation_engine() - correlation.start_meeting_window() - - _emit_event('meeting_started', { - 'timestamp': datetime.now().isoformat(), - 'message': 'Sensitive period monitoring active' - }) - - return jsonify({ - 'status': 'success', - 'message': 'Meeting window started - devices detected now will be flagged' - }) - - -@tscm_bp.route('/meeting/end', methods=['POST']) -def end_meeting(): - """Mark the end of a sensitive period.""" - correlation = get_correlation_engine() - correlation.end_meeting_window() - - _emit_event('meeting_ended', { - 'timestamp': datetime.now().isoformat() - }) - - return jsonify({ - 'status': 'success', - 'message': 'Meeting window ended' - }) - - -@tscm_bp.route('/meeting/status') -def meeting_status(): - """Check if currently in a meeting window.""" - correlation = get_correlation_engine() - in_meeting = correlation.is_during_meeting() - - return jsonify({ - 'status': 'success', - 'in_meeting': in_meeting, - 'windows': [ - { - 'start': start.isoformat(), - 'end': end.isoformat() if end else None - } - for start, end in correlation.meeting_windows - ] - }) - - -# ============================================================================= -# Report Generation Endpoints -# ============================================================================= - -@tscm_bp.route('/report') -def generate_report(): - """ - Generate a comprehensive TSCM sweep report. - - Includes all findings, correlations, indicators, and recommended actions - in a client-presentable format with appropriate disclaimers. - """ - correlation = get_correlation_engine() - findings = correlation.get_all_findings() - - # Build the report structure - report = { - 'generated_at': datetime.now().isoformat(), - 'report_type': 'TSCM Wireless Surveillance Screening', - - 'executive_summary': { - 'total_devices_analyzed': findings['summary']['total_devices'], - 'high_interest_items': findings['summary']['high_interest'], - 'items_requiring_review': findings['summary']['needs_review'], - 'cross_protocol_correlations': findings['summary']['correlations_found'], - 'assessment': _generate_assessment(findings['summary']), - }, - - 'methodology': { - 'protocols_scanned': ['Bluetooth Low Energy', 'WiFi 802.11', 'RF Spectrum'], - 'analysis_techniques': [ - 'Device fingerprinting', - 'Signal stability analysis', - 'Cross-protocol correlation', - 'Time-based pattern detection', - 'Manufacturer identification', - ], - 'scoring_model': { - 'informational': '0-2 points - Known or expected devices', - 'needs_review': '3-5 points - Unusual devices requiring assessment', - 'high_interest': '6+ points - Multiple indicators warrant investigation', - } - }, - - 'findings': { - 'high_interest': findings['devices']['high_interest'], - 'needs_review': findings['devices']['needs_review'], - 'informational': findings['devices']['informational'], - }, - - 'correlations': findings['correlations'], - - 'disclaimers': { - 'legal': ( - "This report documents findings from a wireless and RF surveillance " - "screening. Results indicate anomalies and items of interest, NOT " - "confirmed surveillance devices. No communications content has been " - "intercepted, recorded, or decoded. This screening does not prove " - "malicious intent, illegal activity, or the presence of surveillance " - "equipment. All findings require professional verification." - ), - 'technical': ( - "Detection capabilities are limited by equipment sensitivity, " - "environmental factors, and the technical sophistication of any " - "potential devices. Absence of findings does NOT guarantee absence " - "of surveillance equipment." - ), - 'recommendations': ( - "High-interest items should be investigated by qualified TSCM " - "professionals using appropriate physical inspection techniques. " - "This electronic sweep is one component of comprehensive TSCM." - ) - } - } - - return jsonify({ - 'status': 'success', - 'report': report - }) - - -def _generate_assessment(summary: dict) -> str: - """Generate an assessment summary based on findings.""" - high = summary.get('high_interest', 0) - review = summary.get('needs_review', 0) - correlations = summary.get('correlations_found', 0) - - if high > 0 or correlations > 0: - return ( - f"ELEVATED CONCERN: {high} high-interest item(s) and " - f"{correlations} cross-protocol correlation(s) detected. " - "Professional TSCM inspection recommended." - ) - elif review > 3: - return ( - f"MODERATE CONCERN: {review} items requiring review. " - "Further analysis recommended to characterize unknown devices." - ) - elif review > 0: - return ( - f"LOW CONCERN: {review} item(s) flagged for review. " - "Likely benign but verification recommended." - ) - else: - return ( - "BASELINE ENVIRONMENT: No significant anomalies detected. " - "Environment appears consistent with expected wireless activity." - ) - - -# ============================================================================= -# Device Identity Endpoints (MAC-Randomization Resistant Detection) -# ============================================================================= - -@tscm_bp.route('/identity/ingest/ble', methods=['POST']) -def ingest_ble_observation(): - """ - Ingest a BLE observation for device identity clustering. - - This endpoint accepts BLE advertisement data and feeds it into the - MAC-randomization resistant device detection engine. - - Expected JSON payload: - { - "timestamp": "2024-01-01T12:00:00", // ISO format or omit for now - "addr": "AA:BB:CC:DD:EE:FF", // BLE address (may be randomized) - "addr_type": "rpa", // public/random_static/rpa/nrpa/unknown - "rssi": -65, // dBm - "tx_power": -10, // dBm (optional) - "adv_type": "ADV_IND", // Advertisement type - "manufacturer_id": 1234, // Company ID (optional) - "manufacturer_data": "0102030405", // Hex string (optional) - "service_uuids": ["uuid1", "uuid2"], // List of UUIDs (optional) - "local_name": "Device Name", // Advertised name (optional) - "appearance": 960, // BLE appearance (optional) - "packet_length": 31 // Total packet length (optional) - } - """ - try: - from utils.tscm.device_identity import ingest_ble_dict - - data = request.get_json() - if not data: - return jsonify({'status': 'error', 'message': 'No data provided'}), 400 - - session = ingest_ble_dict(data) - - return jsonify({ - 'status': 'success', - 'session_id': session.session_id, - 'observation_count': len(session.observations), - }) - - except Exception as e: - logger.error(f"BLE ingestion error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 - - -@tscm_bp.route('/identity/ingest/wifi', methods=['POST']) -def ingest_wifi_observation(): - """ - Ingest a WiFi observation for device identity clustering. - - Expected JSON payload: - { - "timestamp": "2024-01-01T12:00:00", - "src_mac": "AA:BB:CC:DD:EE:FF", // Client MAC (may be randomized) - "dst_mac": "11:22:33:44:55:66", // Destination MAC - "bssid": "11:22:33:44:55:66", // AP BSSID - "ssid": "NetworkName", // SSID if available - "frame_type": "probe_request", // Frame type - "rssi": -70, // dBm - "channel": 6, // WiFi channel - "ht_capable": true, // 802.11n capable - "vht_capable": true, // 802.11ac capable - "he_capable": false, // 802.11ax capable - "supported_rates": [1, 2, 5.5, 11], // Supported rates - "vendor_ies": [["001122", 10]], // [(OUI, length), ...] - "probed_ssids": ["ssid1", "ssid2"] // For probe requests - } - """ - try: - from utils.tscm.device_identity import ingest_wifi_dict - - data = request.get_json() - if not data: - return jsonify({'status': 'error', 'message': 'No data provided'}), 400 - - session = ingest_wifi_dict(data) - - return jsonify({ - 'status': 'success', - 'session_id': session.session_id, - 'observation_count': len(session.observations), - }) - - except Exception as e: - logger.error(f"WiFi ingestion error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 - - -@tscm_bp.route('/identity/ingest/batch', methods=['POST']) -def ingest_batch_observations(): - """ - Ingest multiple observations in a single request. - - Expected JSON payload: - { - "ble": [, ...], - "wifi": [, ...] - } - """ - try: - from utils.tscm.device_identity import ingest_ble_dict, ingest_wifi_dict - - data = request.get_json() - if not data: - return jsonify({'status': 'error', 'message': 'No data provided'}), 400 - - ble_count = 0 - wifi_count = 0 - - for ble_obs in data.get('ble', []): - ingest_ble_dict(ble_obs) - ble_count += 1 - - for wifi_obs in data.get('wifi', []): - ingest_wifi_dict(wifi_obs) - wifi_count += 1 - - return jsonify({ - 'status': 'success', - 'ble_ingested': ble_count, - 'wifi_ingested': wifi_count, - }) - - except Exception as e: - logger.error(f"Batch ingestion error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 - - -@tscm_bp.route('/identity/clusters') -def get_device_clusters(): - """ - Get all device clusters (probable physical device identities). - - Query parameters: - - min_confidence: Minimum cluster confidence (0-1, default 0) - - protocol: Filter by protocol ('ble' or 'wifi') - - risk_level: Filter by risk level ('high', 'medium', 'low', 'informational') - """ - try: - from utils.tscm.device_identity import get_identity_engine - - engine = get_identity_engine() - min_conf = request.args.get('min_confidence', 0, type=float) - protocol = request.args.get('protocol') - risk_filter = request.args.get('risk_level') - - clusters = engine.get_clusters(min_confidence=min_conf) - - if protocol: - clusters = [c for c in clusters if c.protocol == protocol] - - if risk_filter: - clusters = [c for c in clusters if c.risk_level.value == risk_filter] - - return jsonify({ - 'status': 'success', - 'count': len(clusters), - 'clusters': [c.to_dict() for c in clusters], - 'disclaimer': ( - "Clusters represent PROBABLE device identities based on passive " - "fingerprinting. Results are statistical correlations, not " - "confirmed matches. False positives/negatives are expected." - ) - }) - - except Exception as e: - logger.error(f"Get clusters error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 - - -@tscm_bp.route('/identity/clusters/high-risk') -def get_high_risk_clusters(): - """Get device clusters with HIGH risk level.""" - try: - from utils.tscm.device_identity import get_identity_engine - - engine = get_identity_engine() - clusters = engine.get_high_risk_clusters() - - return jsonify({ - 'status': 'success', - 'count': len(clusters), - 'clusters': [c.to_dict() for c in clusters], - 'disclaimer': ( - "High-risk classification indicates multiple behavioral indicators " - "consistent with potential surveillance devices. This does NOT " - "confirm surveillance activity. Professional verification required." - ) - }) - - except Exception as e: - logger.error(f"Get high-risk clusters error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 - - -@tscm_bp.route('/identity/summary') -def get_identity_summary(): - """ - Get summary of device identity analysis. - - Returns statistics, cluster counts by risk level, and monitoring period. - """ - try: - from utils.tscm.device_identity import get_identity_engine - - engine = get_identity_engine() - summary = engine.get_summary() - - return jsonify({ - 'status': 'success', - 'summary': summary - }) - - except Exception as e: - logger.error(f"Get identity summary error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 - - -@tscm_bp.route('/identity/finalize', methods=['POST']) -def finalize_identity_sessions(): - """ - Finalize all active sessions and complete clustering. - - Call this at the end of a monitoring period to ensure all observations - are properly clustered and assessed. - """ - try: - from utils.tscm.device_identity import get_identity_engine - - engine = get_identity_engine() - engine.finalize_all_sessions() - summary = engine.get_summary() - - return jsonify({ - 'status': 'success', - 'message': 'All sessions finalized', - 'summary': summary - }) - - except Exception as e: - logger.error(f"Finalize sessions error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 - - -@tscm_bp.route('/identity/reset', methods=['POST']) -def reset_identity_engine(): - """ - Reset the device identity engine. - - Clears all sessions, clusters, and monitoring state. - """ - try: - from utils.tscm.device_identity import reset_identity_engine as reset_engine - - reset_engine() - - return jsonify({ - 'status': 'success', - 'message': 'Device identity engine reset' - }) - - except Exception as e: - logger.error(f"Reset identity engine error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 - - -@tscm_bp.route('/identity/cluster/') -def get_cluster_detail(cluster_id: str): - """Get detailed information for a specific cluster.""" - try: - from utils.tscm.device_identity import get_identity_engine - - engine = get_identity_engine() - - if cluster_id not in engine.clusters: - return jsonify({ - 'status': 'error', - 'message': 'Cluster not found' - }), 404 - - cluster = engine.clusters[cluster_id] - - return jsonify({ - 'status': 'success', - 'cluster': cluster.to_dict() - }) - - except Exception as e: - logger.error(f"Get cluster detail error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 - - -# ============================================================================= -# Capabilities & Coverage Endpoints -# ============================================================================= - -@tscm_bp.route('/capabilities') -def get_capabilities(): - """ - Get current system capabilities for TSCM sweeping. - - Returns what the system CAN and CANNOT detect based on OS, - privileges, adapters, and SDR hardware. - """ - try: - from utils.tscm.advanced import detect_sweep_capabilities - - wifi_interface = request.args.get('wifi_interface', '') - bt_adapter = request.args.get('bt_adapter', '') - - caps = detect_sweep_capabilities( - wifi_interface=wifi_interface, - bt_adapter=bt_adapter - ) - - return jsonify({ - 'status': 'success', - 'capabilities': caps.to_dict() - }) - - except Exception as e: - logger.error(f"Get capabilities error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 - - -@tscm_bp.route('/sweep//capabilities') -def get_sweep_stored_capabilities(sweep_id: int): - """Get stored capabilities for a specific sweep.""" - from utils.database import get_sweep_capabilities - - caps = get_sweep_capabilities(sweep_id) - if not caps: - return jsonify({'status': 'error', 'message': 'No capabilities stored for this sweep'}), 404 - - return jsonify({ - 'status': 'success', - 'capabilities': caps - }) - - -# ============================================================================= -# Baseline Diff & Health Endpoints -# ============================================================================= - -@tscm_bp.route('/baseline/diff//') -def get_baseline_diff(baseline_id: int, sweep_id: int): - """ - Get comprehensive diff between a baseline and a sweep. - - Shows new devices, missing devices, changed characteristics, - and baseline health assessment. - """ - try: - from utils.tscm.advanced import calculate_baseline_diff - - baseline = get_tscm_baseline(baseline_id) - if not baseline: - return jsonify({'status': 'error', 'message': 'Baseline not found'}), 404 - - sweep = get_tscm_sweep(sweep_id) - if not sweep: - return jsonify({'status': 'error', 'message': 'Sweep not found'}), 404 - - # Get current devices from sweep results - results = sweep.get('results', {}) - if isinstance(results, str): - import json - results = json.loads(results) - - current_wifi = results.get('wifi_devices', []) - current_wifi_clients = results.get('wifi_clients', []) - current_bt = results.get('bt_devices', []) - current_rf = results.get('rf_signals', []) - - diff = calculate_baseline_diff( - baseline=baseline, - current_wifi=current_wifi, - current_wifi_clients=current_wifi_clients, - current_bt=current_bt, - current_rf=current_rf, - sweep_id=sweep_id - ) - - return jsonify({ - 'status': 'success', - 'diff': diff.to_dict() - }) - - except Exception as e: - logger.error(f"Get baseline diff error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 - - -@tscm_bp.route('/baseline//health') -def get_baseline_health(baseline_id: int): - """Get health assessment for a baseline.""" - try: - from utils.tscm.advanced import BaselineHealth - from datetime import datetime - - baseline = get_tscm_baseline(baseline_id) - if not baseline: - return jsonify({'status': 'error', 'message': 'Baseline not found'}), 404 - - # Calculate age - created_at = baseline.get('created_at') - age_hours = 0 - if created_at: - if isinstance(created_at, str): - created = datetime.fromisoformat(created_at.replace('Z', '+00:00')) - age_hours = (datetime.now() - created.replace(tzinfo=None)).total_seconds() / 3600 - elif isinstance(created_at, datetime): - age_hours = (datetime.now() - created_at).total_seconds() / 3600 - - # Count devices - total_devices = ( - len(baseline.get('wifi_networks', [])) + - len(baseline.get('bt_devices', [])) + - len(baseline.get('rf_frequencies', [])) - ) - - # Determine health - health = 'healthy' - score = 1.0 - reasons = [] - - if age_hours > 168: - health = 'stale' - score = 0.3 - reasons.append(f'Baseline is {age_hours:.0f} hours old (over 1 week)') - elif age_hours > 72: - health = 'noisy' - score = 0.6 - reasons.append(f'Baseline is {age_hours:.0f} hours old (over 3 days)') - - if total_devices < 3: - score -= 0.2 - reasons.append(f'Baseline has few devices ({total_devices})') - if health == 'healthy': - health = 'noisy' - - return jsonify({ - 'status': 'success', - 'health': { - 'status': health, - 'score': round(max(0, score), 2), - 'age_hours': round(age_hours, 1), - 'total_devices': total_devices, - 'reasons': reasons, - } - }) - - except Exception as e: - logger.error(f"Get baseline health error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 - - -# ============================================================================= -# Device Timeline Endpoints -# ============================================================================= - -@tscm_bp.route('/device//timeline') -def get_device_timeline_endpoint(identifier: str): - """ - Get timeline of observations for a device. - - Shows behavior over time including RSSI stability, presence, - and meeting window correlation. - """ - try: - from utils.tscm.advanced import get_timeline_manager - from utils.database import get_device_timeline - - protocol = request.args.get('protocol', 'bluetooth') - since_hours = request.args.get('since_hours', 24, type=int) - - # Try in-memory timeline first - manager = get_timeline_manager() - timeline = manager.get_timeline(identifier, protocol) - - # Also get stored timeline from database - stored = get_device_timeline(identifier, since_hours=since_hours) - - result = { - 'identifier': identifier, - 'protocol': protocol, - 'observations': stored, - } - - if timeline: - result['metrics'] = { - 'first_seen': timeline.first_seen.isoformat() if timeline.first_seen else None, - 'last_seen': timeline.last_seen.isoformat() if timeline.last_seen else None, - 'total_observations': timeline.total_observations, - 'presence_ratio': round(timeline.presence_ratio, 2), - } - result['signal'] = { - 'rssi_min': timeline.rssi_min, - 'rssi_max': timeline.rssi_max, - 'rssi_mean': round(timeline.rssi_mean, 1) if timeline.rssi_mean else None, - 'stability': round(timeline.rssi_stability, 2), - } - result['movement'] = { - 'appears_stationary': timeline.appears_stationary, - 'pattern': timeline.movement_pattern, - } - result['meeting_correlation'] = { - 'correlated': timeline.meeting_correlated, - 'observations_during_meeting': timeline.meeting_observations, - } - - return jsonify({ - 'status': 'success', - 'timeline': result - }) - - except Exception as e: - logger.error(f"Get device timeline error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 - - -@tscm_bp.route('/timelines') -def get_all_device_timelines(): - """Get all device timelines.""" - try: - from utils.tscm.advanced import get_timeline_manager - - manager = get_timeline_manager() - timelines = manager.get_all_timelines() - - return jsonify({ - 'status': 'success', - 'count': len(timelines), - 'timelines': [t.to_dict() for t in timelines] - }) - - except Exception as e: - logger.error(f"Get all timelines error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 - - -# ============================================================================= -# Known-Good Registry (Whitelist) Endpoints -# ============================================================================= - -@tscm_bp.route('/known-devices', methods=['GET']) -def list_known_devices(): - """List all known-good devices.""" - from utils.database import get_all_known_devices - - location = request.args.get('location') - scope = request.args.get('scope') - - devices = get_all_known_devices(location=location, scope=scope) - - return jsonify({ - 'status': 'success', - 'count': len(devices), - 'devices': devices - }) - - -@tscm_bp.route('/known-devices', methods=['POST']) -def add_known_device_endpoint(): - """ - Add a device to the known-good registry. - - Known devices remain visible but receive reduced risk scores. - They are NOT suppressed from reports (preserves audit trail). - """ - from utils.database import add_known_device - - data = request.get_json() or {} - - identifier = data.get('identifier') - protocol = data.get('protocol') - - if not identifier or not protocol: - return jsonify({ - 'status': 'error', - 'message': 'identifier and protocol are required' - }), 400 - - device_id = add_known_device( - identifier=identifier, - protocol=protocol, - name=data.get('name'), - description=data.get('description'), - location=data.get('location'), - scope=data.get('scope', 'global'), - added_by=data.get('added_by'), - score_modifier=data.get('score_modifier', -2), - metadata=data.get('metadata') - ) - - return jsonify({ - 'status': 'success', - 'message': 'Device added to known-good registry', - 'device_id': device_id - }) - - -@tscm_bp.route('/known-devices/', methods=['GET']) -def get_known_device_endpoint(identifier: str): - """Get a known device by identifier.""" - from utils.database import get_known_device - - device = get_known_device(identifier) - if not device: - return jsonify({'status': 'error', 'message': 'Device not found'}), 404 - - return jsonify({ - 'status': 'success', - 'device': device - }) - - -@tscm_bp.route('/known-devices/', methods=['DELETE']) -def delete_known_device_endpoint(identifier: str): - """Remove a device from the known-good registry.""" - from utils.database import delete_known_device - - success = delete_known_device(identifier) - if not success: - return jsonify({'status': 'error', 'message': 'Device not found'}), 404 - - return jsonify({ - 'status': 'success', - 'message': 'Device removed from known-good registry' - }) - - -@tscm_bp.route('/known-devices/check/') -def check_known_device(identifier: str): - """Check if a device is in the known-good registry.""" - from utils.database import is_known_good_device - - location = request.args.get('location') - result = is_known_good_device(identifier, location=location) - - return jsonify({ - 'status': 'success', - 'is_known': result is not None, - 'details': result - }) - - -# ============================================================================= -# Case Management Endpoints -# ============================================================================= - -@tscm_bp.route('/cases', methods=['GET']) -def list_cases(): - """List all TSCM cases.""" - from utils.database import get_all_tscm_cases - - status = request.args.get('status') - limit = request.args.get('limit', 50, type=int) - - cases = get_all_tscm_cases(status=status, limit=limit) - - return jsonify({ - 'status': 'success', - 'count': len(cases), - 'cases': cases - }) - - -@tscm_bp.route('/cases', methods=['POST']) -def create_case(): - """Create a new TSCM case.""" - from utils.database import create_tscm_case - - data = request.get_json() or {} - - name = data.get('name') - if not name: - return jsonify({'status': 'error', 'message': 'name is required'}), 400 - - case_id = create_tscm_case( - name=name, - description=data.get('description'), - location=data.get('location'), - priority=data.get('priority', 'normal'), - created_by=data.get('created_by'), - metadata=data.get('metadata') - ) - - return jsonify({ - 'status': 'success', - 'message': 'Case created', - 'case_id': case_id - }) - - -@tscm_bp.route('/cases/', methods=['GET']) -def get_case(case_id: int): - """Get a TSCM case with all linked sweeps, threats, and notes.""" - from utils.database import get_tscm_case - - case = get_tscm_case(case_id) - if not case: - return jsonify({'status': 'error', 'message': 'Case not found'}), 404 - - return jsonify({ - 'status': 'success', - 'case': case - }) - - -@tscm_bp.route('/cases/', methods=['PUT']) -def update_case(case_id: int): - """Update a TSCM case.""" - from utils.database import update_tscm_case - - data = request.get_json() or {} - - success = update_tscm_case( - case_id=case_id, - status=data.get('status'), - priority=data.get('priority'), - assigned_to=data.get('assigned_to'), - notes=data.get('notes') - ) - - if not success: - return jsonify({'status': 'error', 'message': 'Case not found'}), 404 - - return jsonify({ - 'status': 'success', - 'message': 'Case updated' - }) - - -@tscm_bp.route('/cases//sweeps/', methods=['POST']) -def link_sweep_to_case(case_id: int, sweep_id: int): - """Link a sweep to a case.""" - from utils.database import add_sweep_to_case - - success = add_sweep_to_case(case_id, sweep_id) - - return jsonify({ - 'status': 'success' if success else 'error', - 'message': 'Sweep linked to case' if success else 'Already linked or not found' - }) - - -@tscm_bp.route('/cases//threats/', methods=['POST']) -def link_threat_to_case(case_id: int, threat_id: int): - """Link a threat to a case.""" - from utils.database import add_threat_to_case - - success = add_threat_to_case(case_id, threat_id) - - return jsonify({ - 'status': 'success' if success else 'error', - 'message': 'Threat linked to case' if success else 'Already linked or not found' - }) - - -@tscm_bp.route('/cases//notes', methods=['POST']) -def add_note_to_case(case_id: int): - """Add a note to a case.""" - from utils.database import add_case_note - - data = request.get_json() or {} - - content = data.get('content') - if not content: - return jsonify({'status': 'error', 'message': 'content is required'}), 400 - - note_id = add_case_note( - case_id=case_id, - content=content, - note_type=data.get('note_type', 'general'), - created_by=data.get('created_by') - ) - - return jsonify({ - 'status': 'success', - 'message': 'Note added', - 'note_id': note_id - }) - - -# ============================================================================= -# Meeting Window Enhanced Endpoints -# ============================================================================= - -@tscm_bp.route('/meeting/start-tracked', methods=['POST']) -def start_tracked_meeting(): - """ - Start a tracked meeting window with database persistence. - - Tracks devices first seen during meeting and behavior changes. - """ - from utils.database import start_meeting_window - from utils.tscm.advanced import get_timeline_manager - - data = request.get_json() or {} - - meeting_id = start_meeting_window( - sweep_id=_current_sweep_id, - name=data.get('name'), - location=data.get('location'), - notes=data.get('notes') - ) - - # Start meeting in correlation engine - correlation = get_correlation_engine() - correlation.start_meeting_window() - - # Start in timeline manager - manager = get_timeline_manager() - manager.start_meeting_window() - - _emit_event('meeting_started', { - 'meeting_id': meeting_id, - 'timestamp': datetime.now().isoformat(), - 'name': data.get('name'), - }) - - return jsonify({ - 'status': 'success', - 'message': 'Tracked meeting window started', - 'meeting_id': meeting_id - }) - - -@tscm_bp.route('/meeting//end', methods=['POST']) -def end_tracked_meeting(meeting_id: int): - """End a tracked meeting window.""" - from utils.database import end_meeting_window - from utils.tscm.advanced import get_timeline_manager - - success = end_meeting_window(meeting_id) - if not success: - return jsonify({'status': 'error', 'message': 'Meeting not found or already ended'}), 404 - - # End in correlation engine - correlation = get_correlation_engine() - correlation.end_meeting_window() - - # End in timeline manager - manager = get_timeline_manager() - manager.end_meeting_window() - - _emit_event('meeting_ended', { - 'meeting_id': meeting_id, - 'timestamp': datetime.now().isoformat() - }) - - return jsonify({ - 'status': 'success', - 'message': 'Meeting window ended' - }) - - -@tscm_bp.route('/meeting//summary') -def get_meeting_summary_endpoint(meeting_id: int): - """Get detailed summary of device activity during a meeting.""" - try: - from utils.database import get_meeting_windows - from utils.tscm.advanced import generate_meeting_summary, get_timeline_manager - - # Get meeting window - windows = get_meeting_windows(_current_sweep_id or 0) - meeting = None - for w in windows: - if w.get('id') == meeting_id: - meeting = w - break - - if not meeting: - return jsonify({'status': 'error', 'message': 'Meeting not found'}), 404 - - # Get timelines and profiles - manager = get_timeline_manager() - timelines = manager.get_all_timelines() - - correlation = get_correlation_engine() - profiles = [p.to_dict() for p in correlation.device_profiles.values()] - - summary = generate_meeting_summary(meeting, timelines, profiles) - - return jsonify({ - 'status': 'success', - 'summary': summary.to_dict() - }) - - except Exception as e: - logger.error(f"Get meeting summary error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 - - -@tscm_bp.route('/meeting/active') -def get_active_meeting(): - """Get currently active meeting window.""" - from utils.database import get_active_meeting_window - - meeting = get_active_meeting_window(_current_sweep_id) - - return jsonify({ - 'status': 'success', - 'meeting': meeting, - 'is_active': meeting is not None - }) - - -# ============================================================================= -# PDF Report & Technical Annex Endpoints -# ============================================================================= - -@tscm_bp.route('/report/pdf') -def get_pdf_report(): - """ - Generate client-safe PDF report. - - Contains executive summary, findings by risk tier, meeting window - summary, and mandatory disclaimers. - """ - try: - from utils.tscm.reports import generate_report, get_pdf_report - from utils.tscm.advanced import detect_sweep_capabilities, get_timeline_manager - - sweep_id = request.args.get('sweep_id', _current_sweep_id, type=int) - if not sweep_id: - return jsonify({'status': 'error', 'message': 'No sweep specified'}), 400 - - sweep = get_tscm_sweep(sweep_id) - if not sweep: - return jsonify({'status': 'error', 'message': 'Sweep not found'}), 404 - - # Get data for report - correlation = get_correlation_engine() - profiles = [p.to_dict() for p in correlation.device_profiles.values()] - caps = detect_sweep_capabilities().to_dict() - - manager = get_timeline_manager() - timelines = [t.to_dict() for t in manager.get_all_timelines()] - - # Generate report - report = generate_report( - sweep_id=sweep_id, - sweep_data=sweep, - device_profiles=profiles, - capabilities=caps, - timelines=timelines - ) - - pdf_content = get_pdf_report(report) - - return Response( - pdf_content, - mimetype='text/plain', - headers={ - 'Content-Disposition': f'attachment; filename=tscm_report_{sweep_id}.txt' - } - ) - - except Exception as e: - logger.error(f"Generate PDF report error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 - - -@tscm_bp.route('/report/annex') -def get_technical_annex(): - """ - Generate technical annex (JSON + CSV). - - Contains device timelines, all indicators, and detailed data - for audit purposes. No packet data included. - """ - try: - from utils.tscm.reports import generate_report, get_json_annex, get_csv_annex - from utils.tscm.advanced import detect_sweep_capabilities, get_timeline_manager - - sweep_id = request.args.get('sweep_id', _current_sweep_id, type=int) - format_type = request.args.get('format', 'json') - - if not sweep_id: - return jsonify({'status': 'error', 'message': 'No sweep specified'}), 400 - - sweep = get_tscm_sweep(sweep_id) - if not sweep: - return jsonify({'status': 'error', 'message': 'Sweep not found'}), 404 - - # Get data for report - correlation = get_correlation_engine() - profiles = [p.to_dict() for p in correlation.device_profiles.values()] - caps = detect_sweep_capabilities().to_dict() - - manager = get_timeline_manager() - timelines = [t.to_dict() for t in manager.get_all_timelines()] - - # Generate report - report = generate_report( - sweep_id=sweep_id, - sweep_data=sweep, - device_profiles=profiles, - capabilities=caps, - timelines=timelines - ) - - if format_type == 'csv': - csv_content = get_csv_annex(report) - return Response( - csv_content, - mimetype='text/csv', - headers={ - 'Content-Disposition': f'attachment; filename=tscm_annex_{sweep_id}.csv' - } - ) - else: - annex = get_json_annex(report) - return jsonify({ - 'status': 'success', - 'annex': annex - }) - - except Exception as e: - logger.error(f"Generate technical annex error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 - - -# ============================================================================= -# WiFi Advanced Indicators Endpoints -# ============================================================================= - -@tscm_bp.route('/wifi/advanced-indicators') -def get_wifi_advanced_indicators(): - """ - Get advanced WiFi indicators (Evil Twin, Probes, Deauth). - - These indicators require analysis of WiFi patterns. - Some features require monitor mode. - """ - try: - from utils.tscm.advanced import get_wifi_detector - - detector = get_wifi_detector() - - return jsonify({ - 'status': 'success', - 'indicators': detector.get_all_indicators(), - 'unavailable_features': detector.get_unavailable_features(), - 'disclaimer': ( - "All indicators represent pattern detections, NOT confirmed attacks. " - "Further investigation is required." - ) - }) - - except Exception as e: - logger.error(f"Get WiFi indicators error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 - - -@tscm_bp.route('/wifi/analyze-network', methods=['POST']) -def analyze_wifi_network(): - """ - Analyze a WiFi network for evil twin patterns. - - Compares against known networks to detect SSID spoofing. - """ - try: - from utils.tscm.advanced import get_wifi_detector - - data = request.get_json() or {} - detector = get_wifi_detector() - - # Set known networks from baseline if available - baseline = get_active_tscm_baseline() - if baseline: - detector.set_known_networks(baseline.get('wifi_networks', [])) - - indicators = detector.analyze_network(data) - - return jsonify({ - 'status': 'success', - 'indicators': [i.to_dict() for i in indicators] - }) - - except Exception as e: - logger.error(f"Analyze WiFi network error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 - - -# ============================================================================= -# Bluetooth Risk Explainability Endpoints -# ============================================================================= - -@tscm_bp.route('/bluetooth//explain') -def explain_bluetooth_risk(identifier: str): - """ - Get human-readable risk explanation for a BLE device. - - Includes proximity estimate, tracker explanation, and - recommended actions. - """ - try: - from utils.tscm.advanced import generate_ble_risk_explanation - - # Get device from correlation engine - correlation = get_correlation_engine() - profile = None - key = f"bluetooth:{identifier.upper()}" - if key in correlation.device_profiles: - profile = correlation.device_profiles[key].to_dict() - - # Try to find device info - device = {'mac': identifier} - if profile: - device['name'] = profile.get('name') - device['rssi'] = profile.get('rssi_samples', [None])[-1] if profile.get('rssi_samples') else None - - # Check meeting status - is_meeting = correlation.is_during_meeting() - - explanation = generate_ble_risk_explanation(device, profile, is_meeting) - - return jsonify({ - 'status': 'success', - 'explanation': explanation.to_dict() - }) - - except Exception as e: - logger.error(f"Explain BLE risk error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 - - -@tscm_bp.route('/bluetooth//proximity') -def get_bluetooth_proximity(identifier: str): - """Get proximity estimate for a BLE device.""" - try: - from utils.tscm.advanced import estimate_ble_proximity - - rssi = request.args.get('rssi', type=int) - if rssi is None: - # Try to get from correlation engine - correlation = get_correlation_engine() - key = f"bluetooth:{identifier.upper()}" - if key in correlation.device_profiles: - profile = correlation.device_profiles[key] - if profile.rssi_samples: - rssi = profile.rssi_samples[-1] - - if rssi is None: - return jsonify({ - 'status': 'error', - 'message': 'RSSI value required' - }), 400 - - proximity, explanation, distance = estimate_ble_proximity(rssi) - - return jsonify({ - 'status': 'success', - 'proximity': { - 'estimate': proximity.value, - 'explanation': explanation, - 'estimated_distance': distance, - 'rssi_used': rssi, - }, - 'disclaimer': ( - "Proximity estimates are approximate and affected by " - "environment, obstacles, and device characteristics." - ) - }) - - except Exception as e: - logger.error(f"Get BLE proximity error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 - - -# ============================================================================= -# Operator Playbook Endpoints -# ============================================================================= - -@tscm_bp.route('/playbooks') -def list_playbooks(): - """List all available operator playbooks.""" - try: - from utils.tscm.advanced import PLAYBOOKS - - # Return as array with id field for JavaScript compatibility - playbooks_list = [] - for pid, pb in PLAYBOOKS.items(): - pb_dict = pb.to_dict() - pb_dict['id'] = pid - pb_dict['name'] = pb_dict.get('title', pid) - pb_dict['category'] = pb_dict.get('risk_level', 'general') - playbooks_list.append(pb_dict) - - return jsonify({ - 'status': 'success', - 'playbooks': playbooks_list - }) - - except Exception as e: - logger.error(f"List playbooks error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 - - -@tscm_bp.route('/playbooks/') -def get_playbook(playbook_id: str): - """Get a specific playbook.""" - try: - from utils.tscm.advanced import PLAYBOOKS - - if playbook_id not in PLAYBOOKS: - return jsonify({'status': 'error', 'message': 'Playbook not found'}), 404 - - return jsonify({ - 'status': 'success', - 'playbook': PLAYBOOKS[playbook_id].to_dict() - }) - - except Exception as e: - logger.error(f"Get playbook error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 - - -@tscm_bp.route('/findings//playbook') -def get_finding_playbook(identifier: str): - """Get recommended playbook for a specific finding.""" - try: - from utils.tscm.advanced import get_playbook_for_finding - - # Get profile - correlation = get_correlation_engine() - profile = None - - for protocol in ['bluetooth', 'wifi', 'rf']: - key = f"{protocol}:{identifier.upper()}" - if key in correlation.device_profiles: - profile = correlation.device_profiles[key].to_dict() - break - - if not profile: - return jsonify({'status': 'error', 'message': 'Finding not found'}), 404 - - playbook = get_playbook_for_finding( - risk_level=profile.get('risk_level', 'informational'), - indicators=profile.get('indicators', []) - ) - - return jsonify({ - 'status': 'success', - 'playbook': playbook.to_dict(), - 'suggested_next_steps': [ - f"Step {s.step_number}: {s.action}" - for s in playbook.steps[:3] - ] - }) - - except Exception as e: - logger.error(f"Get finding playbook error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 +""" +TSCM (Technical Surveillance Countermeasures) Routes Package + +Provides endpoints for counter-surveillance sweeps, baseline management, +threat detection, and reporting. +""" + +from __future__ import annotations + +import json +import logging +import queue +import threading +import time +from datetime import datetime, timedelta, timezone +from typing import Any + +from flask import Blueprint, Response, jsonify, request + +from data.tscm_frequencies import ( + SWEEP_PRESETS, + get_all_sweep_presets, + get_sweep_preset, +) +from utils.database import ( + add_device_timeline_entry, + add_tscm_threat, + acknowledge_tscm_threat, + cleanup_old_timeline_entries, + create_tscm_schedule, + create_tscm_sweep, + delete_tscm_baseline, + delete_tscm_schedule, + get_active_tscm_baseline, + get_all_tscm_baselines, + get_all_tscm_schedules, + get_tscm_baseline, + get_tscm_schedule, + get_tscm_sweep, + get_tscm_threat_summary, + get_tscm_threats, + set_active_tscm_baseline, + update_tscm_schedule, + update_tscm_sweep, +) +from utils.tscm.baseline import ( + BaselineComparator, + BaselineRecorder, + get_comparison_for_active_baseline, +) +from utils.tscm.correlation import ( + CorrelationEngine, + get_correlation_engine, + reset_correlation_engine, +) +from utils.tscm.detector import ThreatDetector +from utils.tscm.device_identity import ( + get_identity_engine, + reset_identity_engine, + ingest_ble_dict, + ingest_wifi_dict, +) +from utils.event_pipeline import process_event +from utils.sse import sse_stream_fanout + +# Import unified Bluetooth scanner helper for TSCM integration +try: + from routes.bluetooth_v2 import get_tscm_bluetooth_snapshot + _USE_UNIFIED_BT_SCANNER = True +except ImportError: + _USE_UNIFIED_BT_SCANNER = False + +logger = logging.getLogger('intercept.tscm') + +tscm_bp = Blueprint('tscm', __name__, url_prefix='/tscm') + +try: + from zoneinfo import ZoneInfo +except ImportError: # pragma: no cover - fallback for older Python + ZoneInfo = None + +# ============================================================================= +# Global State (will be initialized from app.py) +# ============================================================================= + +# These will be set by app.py +tscm_queue: queue.Queue | None = None +tscm_lock: threading.Lock | None = None + +# Local state +_sweep_thread: threading.Thread | None = None +_sweep_running = False +_current_sweep_id: int | None = None +_baseline_recorder = BaselineRecorder() +_schedule_thread: threading.Thread | None = None +_schedule_running = False + + +def init_tscm_state(tscm_q: queue.Queue, lock: threading.Lock) -> None: + """Initialize TSCM state from app.py.""" + global tscm_queue, tscm_lock + tscm_queue = tscm_q + tscm_lock = lock + start_tscm_scheduler() + + +def _emit_event(event_type: str, data: dict) -> None: + """Emit an event to the SSE queue.""" + if tscm_queue: + try: + tscm_queue.put_nowait({ + 'type': event_type, + 'timestamp': datetime.now().isoformat(), + **data + }) + except queue.Full: + logger.warning("TSCM queue full, dropping event") + + +# ============================================================================= +# Schedule Helpers +# ============================================================================= + +def _get_schedule_timezone(zone_name: str | None) -> Any: + """Resolve schedule timezone from a zone name or fallback to local.""" + if zone_name and ZoneInfo: + try: + return ZoneInfo(zone_name) + except Exception: + logger.warning(f"Invalid timezone '{zone_name}', using local time") + return datetime.now().astimezone().tzinfo or timezone.utc + + +def _parse_cron_field(field: str, min_value: int, max_value: int) -> set[int]: + """Parse a single cron field into a set of valid integers.""" + field = field.strip() + if not field: + raise ValueError("Empty cron field") + + values: set[int] = set() + parts = field.split(',') + for part in parts: + part = part.strip() + if part == '*': + values.update(range(min_value, max_value + 1)) + continue + if part.startswith('*/'): + step = int(part[2:]) + if step <= 0: + raise ValueError("Invalid step value") + values.update(range(min_value, max_value + 1, step)) + continue + range_part = part + step = 1 + if '/' in part: + range_part, step_str = part.split('/', 1) + step = int(step_str) + if step <= 0: + raise ValueError("Invalid step value") + if '-' in range_part: + start_str, end_str = range_part.split('-', 1) + start = int(start_str) + end = int(end_str) + if start > end: + start, end = end, start + values.update(range(start, end + 1, step)) + else: + values.add(int(range_part)) + + return {v for v in values if min_value <= v <= max_value} + + +def _parse_cron_expression(expr: str) -> tuple[dict[str, set[int]], dict[str, bool]]: + """Parse a cron expression into value sets and wildcard flags.""" + fields = (expr or '').split() + if len(fields) != 5: + raise ValueError("Cron expression must have 5 fields") + + minute_field, hour_field, dom_field, month_field, dow_field = fields + + sets = { + 'minute': _parse_cron_field(minute_field, 0, 59), + 'hour': _parse_cron_field(hour_field, 0, 23), + 'dom': _parse_cron_field(dom_field, 1, 31), + 'month': _parse_cron_field(month_field, 1, 12), + 'dow': _parse_cron_field(dow_field, 0, 7), + } + + # Normalize Sunday (7 -> 0) + if 7 in sets['dow']: + sets['dow'].add(0) + sets['dow'].discard(7) + + wildcards = { + 'dom': dom_field.strip() == '*', + 'dow': dow_field.strip() == '*', + } + return sets, wildcards + + +def _cron_matches(dt: datetime, sets: dict[str, set[int]], wildcards: dict[str, bool]) -> bool: + """Check if a datetime matches cron sets.""" + if dt.minute not in sets['minute']: + return False + if dt.hour not in sets['hour']: + return False + if dt.month not in sets['month']: + return False + + dom_match = dt.day in sets['dom'] + # Cron DOW: Sunday=0 + cron_dow = (dt.weekday() + 1) % 7 + dow_match = cron_dow in sets['dow'] + + if wildcards['dom'] and wildcards['dow']: + return True + if wildcards['dom']: + return dow_match + if wildcards['dow']: + return dom_match + return dom_match or dow_match + + +def _next_run_from_cron(expr: str, after_dt: datetime) -> datetime | None: + """Calculate next run time from cron expression after a given datetime.""" + sets, wildcards = _parse_cron_expression(expr) + # Round to next minute + candidate = after_dt.replace(second=0, microsecond=0) + timedelta(minutes=1) + # Search up to 366 days ahead + for _ in range(366 * 24 * 60): + if _cron_matches(candidate, sets, wildcards): + return candidate + candidate += timedelta(minutes=1) + return None + + +def _parse_schedule_timestamp(value: Any) -> datetime | None: + """Parse stored schedule timestamp to aware datetime.""" + if not value: + return None + if isinstance(value, datetime): + return value if value.tzinfo else value.replace(tzinfo=timezone.utc) + try: + parsed = datetime.fromisoformat(str(value)) + return parsed if parsed.tzinfo else parsed.replace(tzinfo=timezone.utc) + except Exception: + return None + + +def _schedule_loop() -> None: + """Background loop to trigger scheduled sweeps.""" + global _schedule_running + + while _schedule_running: + try: + schedules = get_all_tscm_schedules(enabled=True, limit=200) + now_utc = datetime.now(timezone.utc) + + for schedule in schedules: + schedule_id = schedule.get('id') + cron_expr = schedule.get('cron_expression') or '' + tz = _get_schedule_timezone(schedule.get('zone_name')) + now_local = datetime.now(tz) + + next_run = _parse_schedule_timestamp(schedule.get('next_run')) + + if not next_run: + try: + computed = _next_run_from_cron(cron_expr, now_local) + except Exception as e: + logger.error(f"Schedule {schedule_id} cron parse error: {e}") + continue + if computed: + update_tscm_schedule( + schedule_id, + next_run=computed.astimezone(timezone.utc).isoformat() + ) + continue + + if next_run <= now_utc: + if _sweep_running: + logger.info(f"Schedule {schedule_id} due but sweep running; skipping") + try: + computed = _next_run_from_cron(cron_expr, now_local) + except Exception as e: + logger.error(f"Schedule {schedule_id} cron parse error: {e}") + continue + if computed: + update_tscm_schedule( + schedule_id, + next_run=computed.astimezone(timezone.utc).isoformat() + ) + continue + + # Trigger sweep + result = _start_sweep_internal( + sweep_type=schedule.get('sweep_type') or 'standard', + baseline_id=schedule.get('baseline_id'), + wifi_enabled=True, + bt_enabled=True, + rf_enabled=True, + wifi_interface='', + bt_interface='', + sdr_device=None, + verbose_results=False + ) + + if result.get('status') == 'success': + try: + computed = _next_run_from_cron(cron_expr, now_local) + except Exception as e: + logger.error(f"Schedule {schedule_id} cron parse error: {e}") + computed = None + + update_tscm_schedule( + schedule_id, + last_run=now_utc.isoformat(), + next_run=computed.astimezone(timezone.utc).isoformat() if computed else None + ) + logger.info(f"Scheduled sweep started for schedule {schedule_id}") + else: + try: + computed = _next_run_from_cron(cron_expr, now_local) + except Exception as e: + logger.error(f"Schedule {schedule_id} cron parse error: {e}") + computed = None + if computed: + update_tscm_schedule( + schedule_id, + next_run=computed.astimezone(timezone.utc).isoformat() + ) + logger.warning(f"Scheduled sweep failed for schedule {schedule_id}: {result.get('message')}") + + except Exception as e: + logger.error(f"TSCM schedule loop error: {e}") + + time.sleep(30) + + +def start_tscm_scheduler() -> None: + """Start background scheduler thread for TSCM sweeps.""" + global _schedule_thread, _schedule_running + if _schedule_thread and _schedule_thread.is_alive(): + return + _schedule_running = True + _schedule_thread = threading.Thread(target=_schedule_loop, daemon=True) + _schedule_thread.start() + + +# ============================================================================= +# Sweep Helpers (used by sweep routes and schedule loop) +# ============================================================================= + +def _check_available_devices(wifi: bool, bt: bool, rf: bool) -> dict: + """Check which scanning devices are available.""" + import os + import platform + import shutil + import subprocess + + available = { + 'wifi': False, + 'bluetooth': False, + 'rf': False, + 'wifi_reason': 'Not checked', + 'bt_reason': 'Not checked', + 'rf_reason': 'Not checked', + } + + # Check WiFi - use the same scanner singleton that performs actual scans + if wifi: + try: + from utils.wifi.scanner import get_wifi_scanner + scanner = get_wifi_scanner() + interfaces = scanner._detect_interfaces() + if interfaces: + available['wifi'] = True + available['wifi_reason'] = f'WiFi available ({interfaces[0]["name"]})' + else: + available['wifi_reason'] = 'No wireless interfaces found' + except Exception as e: + available['wifi_reason'] = f'WiFi detection error: {e}' + + # Check Bluetooth + if bt: + if platform.system() == 'Darwin': + # macOS: Check for Bluetooth via system_profiler + try: + result = subprocess.run( + ['system_profiler', 'SPBluetoothDataType'], + capture_output=True, + text=True, + timeout=10 + ) + if 'Bluetooth' in result.stdout and result.returncode == 0: + available['bluetooth'] = True + available['bt_reason'] = 'macOS Bluetooth available' + else: + available['bt_reason'] = 'Bluetooth not available' + except (subprocess.TimeoutExpired, FileNotFoundError): + available['bt_reason'] = 'Cannot detect Bluetooth' + else: + # Linux: Check for Bluetooth tools + if shutil.which('bluetoothctl') or shutil.which('hcitool') or shutil.which('hciconfig'): + try: + result = subprocess.run( + ['hciconfig'], + capture_output=True, + text=True, + timeout=5 + ) + if 'hci' in result.stdout.lower(): + available['bluetooth'] = True + available['bt_reason'] = 'Bluetooth adapter detected' + else: + available['bt_reason'] = 'No Bluetooth adapters found' + except (subprocess.TimeoutExpired, FileNotFoundError, subprocess.SubprocessError): + # Try bluetoothctl as fallback + try: + result = subprocess.run( + ['bluetoothctl', 'list'], + capture_output=True, + text=True, + timeout=5 + ) + if result.stdout.strip(): + available['bluetooth'] = True + available['bt_reason'] = 'Bluetooth adapter detected' + else: + # Check /sys for Bluetooth + try: + import glob + bt_devs = glob.glob('/sys/class/bluetooth/hci*') + if bt_devs: + available['bluetooth'] = True + available['bt_reason'] = 'Bluetooth adapter detected' + else: + available['bt_reason'] = 'No Bluetooth adapters found' + except Exception: + available['bt_reason'] = 'No Bluetooth adapters found' + except (subprocess.TimeoutExpired, FileNotFoundError, subprocess.SubprocessError): + # Check /sys for Bluetooth + try: + import glob + bt_devs = glob.glob('/sys/class/bluetooth/hci*') + if bt_devs: + available['bluetooth'] = True + available['bt_reason'] = 'Bluetooth adapter detected' + else: + available['bt_reason'] = 'Cannot detect Bluetooth adapters' + except Exception: + available['bt_reason'] = 'Cannot detect Bluetooth adapters' + else: + # Fallback: check /sys even without tools + try: + import glob + bt_devs = glob.glob('/sys/class/bluetooth/hci*') + if bt_devs: + available['bluetooth'] = True + available['bt_reason'] = 'Bluetooth adapter detected (no scan tools)' + else: + available['bt_reason'] = 'Bluetooth tools not installed (bluez)' + except Exception: + available['bt_reason'] = 'Bluetooth tools not installed (bluez)' + + # Check RF/SDR + if rf: + try: + from utils.sdr import SDRFactory + devices = SDRFactory.detect_devices() + if devices: + available['rf'] = True + available['rf_reason'] = f'{len(devices)} SDR device(s) detected' + else: + available['rf_reason'] = 'No SDR devices found' + except ImportError: + available['rf_reason'] = 'SDR detection unavailable' + + return available + + +def _start_sweep_internal( + sweep_type: str, + baseline_id: int | None, + wifi_enabled: bool, + bt_enabled: bool, + rf_enabled: bool, + wifi_interface: str = '', + bt_interface: str = '', + sdr_device: int | None = None, + verbose_results: bool = False, +) -> dict: + """Start a TSCM sweep without request context.""" + global _sweep_running, _sweep_thread, _current_sweep_id + + if _sweep_running: + return {'status': 'error', 'message': 'Sweep already running', 'http_status': 409} + + # Check for available devices + devices = _check_available_devices(wifi_enabled, bt_enabled, rf_enabled) + + warnings = [] + if wifi_enabled and not devices['wifi']: + warnings.append(f"WiFi: {devices['wifi_reason']}") + if bt_enabled and not devices['bluetooth']: + warnings.append(f"Bluetooth: {devices['bt_reason']}") + if rf_enabled and not devices['rf']: + warnings.append(f"RF: {devices['rf_reason']}") + + # If no devices available at all, return error + if not any([devices['wifi'], devices['bluetooth'], devices['rf']]): + return { + 'status': 'error', + 'message': 'No scanning devices available', + 'details': warnings, + 'http_status': 400, + } + + # Create sweep record + _current_sweep_id = create_tscm_sweep( + sweep_type=sweep_type, + baseline_id=baseline_id, + wifi_enabled=wifi_enabled, + bt_enabled=bt_enabled, + rf_enabled=rf_enabled + ) + + _sweep_running = True + + # Start sweep thread + _sweep_thread = threading.Thread( + target=_run_sweep, + args=(sweep_type, baseline_id, wifi_enabled, bt_enabled, rf_enabled, + wifi_interface, bt_interface, sdr_device, verbose_results), + daemon=True + ) + _sweep_thread.start() + + logger.info(f"Started TSCM sweep: type={sweep_type}, id={_current_sweep_id}") + + return { + 'status': 'success', + 'message': 'Sweep started', + 'sweep_id': _current_sweep_id, + 'sweep_type': sweep_type, + 'warnings': warnings if warnings else None, + 'devices': { + 'wifi': devices['wifi'], + 'bluetooth': devices['bluetooth'], + 'rf': devices['rf'] + } + } + + +def _scan_wifi_networks(interface: str) -> list[dict]: + """ + Scan for WiFi networks using the unified WiFi scanner. + + This is a facade that maintains backwards compatibility with TSCM + while using the new unified scanner module. + + Automatically detects monitor mode interfaces and uses deep scan + (airodump-ng) when appropriate. + + Args: + interface: WiFi interface name (optional). + + Returns: + List of network dicts with: bssid, essid, power, channel, privacy + """ + try: + from utils.wifi import get_wifi_scanner + + scanner = get_wifi_scanner() + + # Check if interface is in monitor mode + is_monitor = False + if interface: + is_monitor = scanner._is_monitor_mode_interface(interface) + + if is_monitor: + # Use deep scan for monitor mode interfaces + logger.info(f"Interface {interface} is in monitor mode, using deep scan") + + # Check if airodump-ng is available + caps = scanner.check_capabilities() + if not caps.has_airodump_ng: + logger.warning("airodump-ng not available for monitor mode scanning") + return [] + + # Start a short deep scan + if not scanner.is_scanning: + scanner.start_deep_scan(interface=interface, band='all') + + # Wait briefly for some results + import time + time.sleep(5) + + # Get current access points + networks = [] + for ap in scanner.access_points: + networks.append(ap.to_legacy_dict()) + + logger.info(f"WiFi deep scan found {len(networks)} networks") + return networks + else: + # Use quick scan for managed mode interfaces + result = scanner.quick_scan(interface=interface, timeout=15) + + if result.error: + logger.warning(f"WiFi scan error: {result.error}") + + # Convert to legacy format for TSCM + networks = [] + for ap in result.access_points: + networks.append(ap.to_legacy_dict()) + + logger.info(f"WiFi scan found {len(networks)} networks") + return networks + + except ImportError as e: + logger.error(f"Failed to import wifi scanner: {e}") + return [] + except Exception as e: + logger.exception(f"WiFi scan failed: {e}") + return [] + + +def _scan_wifi_clients(interface: str) -> list[dict]: + """ + Get WiFi client observations from the unified WiFi scanner. + + Clients are only available when monitor-mode scanning is active. + """ + try: + from utils.wifi import get_wifi_scanner + + scanner = get_wifi_scanner() + if interface: + try: + if not scanner._is_monitor_mode_interface(interface): + return [] + except Exception: + return [] + + return [client.to_dict() for client in scanner.clients] + except ImportError as e: + logger.error(f"Failed to import wifi scanner: {e}") + return [] + except Exception as e: + logger.exception(f"WiFi client scan failed: {e}") + return [] + + +def _scan_bluetooth_devices(interface: str, duration: int = 10) -> list[dict]: + """ + Scan for Bluetooth devices with manufacturer data detection. + + Uses the BLE scanner module (bleak library) for proper manufacturer ID + detection, with fallback to system tools if bleak is unavailable. + """ + import platform + import os + import re + import shutil + import subprocess + + devices = [] + seen_macs = set() + + logger.info(f"Starting Bluetooth scan (duration={duration}s, interface={interface})") + + # Try the BLE scanner module first (uses bleak for proper manufacturer detection) + try: + from utils.tscm.ble_scanner import get_ble_scanner, scan_ble_devices + + logger.info("Using BLE scanner module with manufacturer detection") + ble_devices = scan_ble_devices(duration) + + for ble_dev in ble_devices: + mac = ble_dev.get('mac', '').upper() + if mac and mac not in seen_macs: + seen_macs.add(mac) + + device = { + 'mac': mac, + 'name': ble_dev.get('name', 'Unknown'), + 'rssi': ble_dev.get('rssi'), + 'type': 'ble', + 'manufacturer': ble_dev.get('manufacturer_name'), + 'manufacturer_id': ble_dev.get('manufacturer_id'), + 'is_tracker': ble_dev.get('is_tracker', False), + 'tracker_type': ble_dev.get('tracker_type'), + 'is_airtag': ble_dev.get('is_airtag', False), + 'is_tile': ble_dev.get('is_tile', False), + 'is_smarttag': ble_dev.get('is_smarttag', False), + 'is_espressif': ble_dev.get('is_espressif', False), + 'service_uuids': ble_dev.get('service_uuids', []), + } + devices.append(device) + + if devices: + logger.info(f"BLE scanner found {len(devices)} devices") + trackers = [d for d in devices if d.get('is_tracker')] + if trackers: + logger.info(f"Trackers detected: {[d.get('tracker_type') for d in trackers]}") + return devices + + except ImportError: + logger.warning("BLE scanner module not available, using fallback") + except Exception as e: + logger.warning(f"BLE scanner failed: {e}, using fallback") + + if platform.system() == 'Darwin': + # macOS: Use system_profiler for basic Bluetooth info + try: + result = subprocess.run( + ['system_profiler', 'SPBluetoothDataType', '-json'], + capture_output=True, text=True, timeout=15 + ) + import json + data = json.loads(result.stdout) + bt_data = data.get('SPBluetoothDataType', [{}])[0] + + # Get connected/paired devices + for section in ['device_connected', 'device_title']: + section_data = bt_data.get(section, {}) + if isinstance(section_data, dict): + for name, info in section_data.items(): + if isinstance(info, dict): + mac = info.get('device_address', '') + if mac and mac not in seen_macs: + seen_macs.add(mac) + devices.append({ + 'mac': mac.upper(), + 'name': name, + 'type': info.get('device_minorType', 'unknown'), + 'connected': section == 'device_connected' + }) + logger.info(f"macOS Bluetooth scan found {len(devices)} devices") + except (FileNotFoundError, subprocess.TimeoutExpired, subprocess.SubprocessError, json.JSONDecodeError) as e: + logger.warning(f"macOS Bluetooth scan failed: {e}") + + else: + # Linux: Try multiple methods + iface = interface or 'hci0' + + # Method 1: Try hcitool scan (simpler, more reliable) + if shutil.which('hcitool'): + try: + logger.info("Trying hcitool scan...") + result = subprocess.run( + ['hcitool', '-i', iface, 'scan', '--flush'], + capture_output=True, text=True, timeout=duration + 5 + ) + for line in result.stdout.split('\n'): + line = line.strip() + if line and '\t' in line: + parts = line.split('\t') + if len(parts) >= 1 and ':' in parts[0]: + mac = parts[0].strip().upper() + name = parts[1].strip() if len(parts) > 1 else 'Unknown' + if mac not in seen_macs: + seen_macs.add(mac) + devices.append({'mac': mac, 'name': name}) + logger.info(f"hcitool scan found {len(devices)} classic BT devices") + except (subprocess.TimeoutExpired, subprocess.SubprocessError) as e: + logger.warning(f"hcitool scan failed: {e}") + + # Method 2: Try btmgmt for BLE devices + if shutil.which('btmgmt'): + try: + logger.info("Trying btmgmt find...") + result = subprocess.run( + ['btmgmt', 'find'], + capture_output=True, text=True, timeout=duration + 5 + ) + for line in result.stdout.split('\n'): + # Parse btmgmt output: "dev_found: XX:XX:XX:XX:XX:XX type LE..." + if 'dev_found' in line.lower() or ('type' in line.lower() and ':' in line): + mac_match = re.search( + r'([0-9A-Fa-f]{2}:[0-9A-Fa-f]{2}:[0-9A-Fa-f]{2}:' + r'[0-9A-Fa-f]{2}:[0-9A-Fa-f]{2}:[0-9A-Fa-f]{2})', + line + ) + if mac_match: + mac = mac_match.group(1).upper() + if mac not in seen_macs: + seen_macs.add(mac) + # Try to extract name + name_match = re.search(r'name\s+(.+?)(?:\s|$)', line, re.I) + name = name_match.group(1) if name_match else 'Unknown BLE' + devices.append({ + 'mac': mac, + 'name': name, + 'type': 'ble' if 'le' in line.lower() else 'classic' + }) + logger.info(f"btmgmt found {len(devices)} total devices") + except (subprocess.TimeoutExpired, subprocess.SubprocessError) as e: + logger.warning(f"btmgmt find failed: {e}") + + # Method 3: Try bluetoothctl as last resort + if not devices and shutil.which('bluetoothctl'): + try: + import pty + import select + + logger.info("Trying bluetoothctl scan...") + master_fd, slave_fd = pty.openpty() + process = subprocess.Popen( + ['bluetoothctl'], + stdin=slave_fd, + stdout=slave_fd, + stderr=slave_fd, + close_fds=True + ) + os.close(slave_fd) + + # Start scanning + time.sleep(0.3) + os.write(master_fd, b'power on\n') + time.sleep(0.3) + os.write(master_fd, b'scan on\n') + + # Collect devices for specified duration + scan_end = time.time() + min(duration, 10) # Cap at 10 seconds + buffer = '' + + while time.time() < scan_end: + readable, _, _ = select.select([master_fd], [], [], 1.0) + if readable: + try: + data = os.read(master_fd, 4096) + if not data: + break + buffer += data.decode('utf-8', errors='replace') + + while '\n' in buffer: + line, buffer = buffer.split('\n', 1) + line = re.sub(r'\x1b\[[0-9;]*m', '', line).strip() + + if 'Device' in line: + match = re.search( + r'([0-9A-Fa-f]{2}:[0-9A-Fa-f]{2}:[0-9A-Fa-f]{2}:' + r'[0-9A-Fa-f]{2}:[0-9A-Fa-f]{2}:[0-9A-Fa-f]{2})\s*(.*)', + line + ) + if match: + mac = match.group(1).upper() + name = match.group(2).strip() + # Remove RSSI from name if present + name = re.sub(r'\s*RSSI:\s*-?\d+\s*', '', name).strip() + + if mac not in seen_macs: + seen_macs.add(mac) + devices.append({ + 'mac': mac, + 'name': name or '[Unknown]' + }) + except OSError: + break + + # Stop scanning and cleanup + try: + os.write(master_fd, b'scan off\n') + time.sleep(0.2) + os.write(master_fd, b'quit\n') + except OSError: + pass + + process.terminate() + try: + process.wait(timeout=2) + except subprocess.TimeoutExpired: + process.kill() + + try: + os.close(master_fd) + except OSError: + pass + + logger.info(f"bluetoothctl scan found {len(devices)} devices") + + except (FileNotFoundError, subprocess.SubprocessError) as e: + logger.warning(f"bluetoothctl scan failed: {e}") + + return devices + + +def _scan_rf_signals( + sdr_device: int | None, + duration: int = 30, + stop_check: callable | None = None, + sweep_ranges: list[dict] | None = None +) -> list[dict]: + """ + Scan for RF signals using SDR (rtl_power or hackrf_sweep). + + Scans common surveillance frequency bands: + - 88-108 MHz: FM broadcast (potential FM bugs) + - 315 MHz: Common ISM band (wireless devices) + - 433 MHz: ISM band (European wireless devices, car keys) + - 868 MHz: European ISM band + - 915 MHz: US ISM band + - 1.2 GHz: Video transmitters + - 2.4 GHz: WiFi, Bluetooth, video transmitters + + Args: + sdr_device: SDR device index + duration: Scan duration per band + stop_check: Optional callable that returns True if scan should stop. + Defaults to checking module-level _sweep_running. + sweep_ranges: Optional preset ranges (MHz) from SWEEP_PRESETS. + """ + # Default stop check uses module-level _sweep_running + if stop_check is None: + stop_check = lambda: not _sweep_running + import os + import shutil + import subprocess + import tempfile + + signals = [] + + logger.info(f"Starting RF scan (device={sdr_device})") + + # Detect available SDR devices and sweep tools + rtl_power_path = shutil.which('rtl_power') + hackrf_sweep_path = shutil.which('hackrf_sweep') + + sdr_type = None + sweep_tool_path = None + + try: + from utils.sdr import SDRFactory + from utils.sdr.base import SDRType + devices = SDRFactory.detect_devices() + rtlsdr_available = any(d.sdr_type == SDRType.RTL_SDR for d in devices) + hackrf_available = any(d.sdr_type == SDRType.HACKRF for d in devices) + except ImportError: + rtlsdr_available = False + hackrf_available = False + + # Pick the best available SDR + sweep tool combo + if rtlsdr_available and rtl_power_path: + sdr_type = 'rtlsdr' + sweep_tool_path = rtl_power_path + logger.info(f"Using RTL-SDR with rtl_power at: {rtl_power_path}") + elif hackrf_available and hackrf_sweep_path: + sdr_type = 'hackrf' + sweep_tool_path = hackrf_sweep_path + logger.info(f"Using HackRF with hackrf_sweep at: {hackrf_sweep_path}") + elif rtl_power_path: + # Tool exists but no device detected — try anyway (detection may have failed) + sdr_type = 'rtlsdr' + sweep_tool_path = rtl_power_path + logger.info(f"No SDR detected but rtl_power found, attempting RTL-SDR scan") + elif hackrf_sweep_path: + sdr_type = 'hackrf' + sweep_tool_path = hackrf_sweep_path + logger.info(f"No SDR detected but hackrf_sweep found, attempting HackRF scan") + + if not sweep_tool_path: + logger.warning("No supported sweep tool found (rtl_power or hackrf_sweep)") + _emit_event('rf_status', { + 'status': 'error', + 'message': 'No SDR sweep tool installed. Install rtl-sdr (rtl_power) or HackRF (hackrf_sweep) for RF scanning.', + }) + return signals + + # Define frequency bands to scan (in Hz) + # Format: (start_freq, end_freq, bin_size, description) + scan_bands: list[tuple[int, int, int, str]] = [] + + if sweep_ranges: + for rng in sweep_ranges: + try: + start_mhz = float(rng.get('start', 0)) + end_mhz = float(rng.get('end', 0)) + step_mhz = float(rng.get('step', 0.1)) + name = rng.get('name') or f"{start_mhz:.1f}-{end_mhz:.1f} MHz" + if start_mhz > 0 and end_mhz > start_mhz: + bin_size = max(1000, int(step_mhz * 1_000_000)) + scan_bands.append(( + int(start_mhz * 1_000_000), + int(end_mhz * 1_000_000), + bin_size, + name + )) + except (TypeError, ValueError): + continue + + if not scan_bands: + # Fallback: focus on common bug frequencies + scan_bands = [ + (88000000, 108000000, 100000, 'FM Broadcast'), # FM bugs + (315000000, 316000000, 10000, '315 MHz ISM'), # US ISM + (433000000, 434000000, 10000, '433 MHz ISM'), # EU ISM + (868000000, 869000000, 10000, '868 MHz ISM'), # EU ISM + (902000000, 928000000, 100000, '915 MHz ISM'), # US ISM + (1200000000, 1300000000, 100000, '1.2 GHz Video'), # Video TX + (2400000000, 2500000000, 500000, '2.4 GHz ISM'), # WiFi/BT/Video + ] + + # Create temp file for output + with tempfile.NamedTemporaryFile(mode='w', suffix='.csv', delete=False) as tmp: + tmp_path = tmp.name + + try: + # Build device argument + device_idx = sdr_device if sdr_device is not None else 0 + + # Scan each band and look for strong signals + for start_freq, end_freq, bin_size, band_name in scan_bands: + if stop_check(): + break + + logger.info(f"Scanning {band_name} ({start_freq/1e6:.1f}-{end_freq/1e6:.1f} MHz)") + + try: + # Build sweep command based on SDR type + if sdr_type == 'hackrf': + cmd = [ + sweep_tool_path, + '-f', f'{int(start_freq / 1e6)}:{int(end_freq / 1e6)}', + '-w', str(bin_size), + '-1', # Single sweep + ] + output_mode = 'stdout' + else: + cmd = [ + sweep_tool_path, + '-f', f'{start_freq}:{end_freq}:{bin_size}', + '-g', '40', # Gain + '-i', '1', # Integration interval (1 second) + '-1', # Single shot mode + '-c', '20%', # Crop 20% of edges + '-d', str(device_idx), + tmp_path, + ] + output_mode = 'file' + + logger.debug(f"Running: {' '.join(cmd)}") + + result = subprocess.run( + cmd, + capture_output=True, + text=True, + timeout=30 + ) + + if result.returncode != 0: + logger.warning(f"{os.path.basename(sweep_tool_path)} returned {result.returncode}: {result.stderr}") + + # For HackRF, write stdout CSV data to temp file for unified parsing + if output_mode == 'stdout' and result.stdout: + with open(tmp_path, 'w') as f: + f.write(result.stdout) + + # Parse the CSV output (same format for both rtl_power and hackrf_sweep) + if os.path.exists(tmp_path) and os.path.getsize(tmp_path) > 0: + with open(tmp_path, 'r') as f: + for line in f: + parts = line.strip().split(',') + if len(parts) >= 7: + try: + # CSV format: date, time, hz_low, hz_high, hz_step, samples, db_values... + hz_low = int(parts[2].strip()) + hz_high = int(parts[3].strip()) + hz_step = float(parts[4].strip()) + db_values = [float(x) for x in parts[6:] if x.strip()] + + # Find peaks above noise floor + noise_floor = sum(db_values) / len(db_values) if db_values else -100 + threshold = noise_floor + 6 # Signal must be 6dB above noise + + for idx, db in enumerate(db_values): + if db > threshold and db > -90: # Detect signals above -90dBm + freq_hz = hz_low + (idx * hz_step) + freq_mhz = freq_hz / 1000000 + + signals.append({ + 'frequency': freq_mhz, + 'frequency_hz': freq_hz, + 'power': db, + 'band': band_name, + 'noise_floor': noise_floor, + 'signal_strength': db - noise_floor + }) + except (ValueError, IndexError): + continue + + # Clear file for next band + open(tmp_path, 'w').close() + + except subprocess.TimeoutExpired: + logger.warning(f"RF scan timeout for band {band_name}") + except Exception as e: + logger.warning(f"RF scan error for band {band_name}: {e}") + + finally: + # Cleanup temp file + try: + os.unlink(tmp_path) + except OSError: + pass + + # Deduplicate nearby frequencies (within 100kHz) + if signals: + signals.sort(key=lambda x: x['frequency']) + deduped = [signals[0]] + for sig in signals[1:]: + if sig['frequency'] - deduped[-1]['frequency'] > 0.1: # 100 kHz + deduped.append(sig) + elif sig['power'] > deduped[-1]['power']: + deduped[-1] = sig # Keep stronger signal + signals = deduped + + logger.info(f"RF scan found {len(signals)} signals") + return signals + + +def _run_sweep( + sweep_type: str, + baseline_id: int | None, + wifi_enabled: bool, + bt_enabled: bool, + rf_enabled: bool, + wifi_interface: str = '', + bt_interface: str = '', + sdr_device: int | None = None, + verbose_results: bool = False +) -> None: + """ + Run the TSCM sweep in a background thread. + + This orchestrates data collection from WiFi, BT, and RF sources, + then analyzes results for threats using the correlation engine. + """ + global _sweep_running, _current_sweep_id + + try: + # Get baseline for comparison if specified + baseline = None + if baseline_id: + baseline = get_tscm_baseline(baseline_id) + + # Get sweep preset + preset = get_sweep_preset(sweep_type) or SWEEP_PRESETS.get('standard') + duration = preset.get('duration_seconds', 300) + + _emit_event('sweep_started', { + 'sweep_id': _current_sweep_id, + 'sweep_type': sweep_type, + 'duration': duration, + 'wifi': wifi_enabled, + 'bluetooth': bt_enabled, + 'rf': rf_enabled, + }) + + # Initialize detector and correlation engine + detector = ThreatDetector(baseline) + correlation = get_correlation_engine() + # Clear old profiles from previous sweeps (keep 24h history) + correlation.clear_old_profiles(24) + + # Initialize device identity engine for MAC-randomization resistant detection + identity_engine = get_identity_engine() + identity_engine.clear() # Start fresh for this sweep + from utils.tscm.advanced import get_timeline_manager + timeline_manager = get_timeline_manager() + try: + cleanup_old_timeline_entries(72) + except Exception as e: + logger.debug(f"TSCM timeline cleanup skipped: {e}") + + last_timeline_write: dict[str, float] = {} + timeline_bucket = getattr(timeline_manager, 'bucket_seconds', 30) + + def _maybe_store_timeline( + identifier: str, + protocol: str, + rssi: int | None = None, + channel: int | None = None, + frequency: float | None = None, + attributes: dict | None = None + ) -> None: + if not identifier: + return + + identifier_norm = identifier.upper() if isinstance(identifier, str) else str(identifier) + key = f"{protocol}:{identifier_norm}" + now_ts = time.time() + last_ts = last_timeline_write.get(key) + if last_ts and (now_ts - last_ts) < timeline_bucket: + return + + last_timeline_write[key] = now_ts + try: + add_device_timeline_entry( + device_identifier=identifier_norm, + protocol=protocol, + sweep_id=_current_sweep_id, + rssi=rssi, + channel=channel, + frequency=frequency, + attributes=attributes + ) + except Exception as e: + logger.debug(f"TSCM timeline store error: {e}") + + # Collect and analyze data + threats_found = 0 + severity_counts = {'critical': 0, 'high': 0, 'medium': 0, 'low': 0} + all_wifi = {} # Use dict for deduplication by BSSID + all_wifi_clients = {} # Use dict for deduplication by client MAC + all_bt = {} # Use dict for deduplication by MAC + all_rf = [] + + start_time = time.time() + last_wifi_scan = 0 + last_bt_scan = 0 + last_rf_scan = 0 + wifi_scan_interval = 15 # Scan WiFi every 15 seconds + bt_scan_interval = 20 # Scan Bluetooth every 20 seconds + rf_scan_interval = 30 # Scan RF every 30 seconds + + while _sweep_running and (time.time() - start_time) < duration: + current_time = time.time() + + # Perform WiFi scan + if wifi_enabled and (current_time - last_wifi_scan) >= wifi_scan_interval: + try: + wifi_networks = _scan_wifi_networks(wifi_interface) + last_wifi_scan = current_time + if not wifi_networks and not all_wifi: + logger.warning("TSCM WiFi scan returned 0 networks") + _emit_event('sweep_progress', { + 'progress': min(95, int(((current_time - start_time) / duration) * 100)), + 'status': f'Scanning WiFi... ({len(wifi_networks)} found)', + 'wifi_count': len(all_wifi) + len([n for n in wifi_networks if n.get('bssid') and n.get('bssid') not in all_wifi]), + 'bt_count': len(all_bt), + 'rf_count': len(all_rf), + }) + for network in wifi_networks: + try: + bssid = network.get('bssid', '') + ssid = network.get('essid', network.get('ssid')) + try: + rssi_val = int(network.get('power', network.get('signal'))) + except (ValueError, TypeError): + rssi_val = None + if bssid: + try: + timeline_manager.add_observation( + identifier=bssid, + protocol='wifi', + rssi=rssi_val, + channel=network.get('channel'), + name=ssid, + attributes={'ssid': ssid, 'encryption': network.get('privacy')} + ) + except Exception as e: + logger.debug(f"WiFi timeline observation error: {e}") + _maybe_store_timeline( + identifier=bssid, + protocol='wifi', + rssi=rssi_val, + channel=network.get('channel'), + attributes={'ssid': ssid, 'encryption': network.get('privacy')} + ) + if bssid and bssid not in all_wifi: + all_wifi[bssid] = network + # Emit device event for frontend + is_threat = False + # Analyze for threats + threat = detector.analyze_wifi_device(network) + if threat: + _handle_threat(threat) + threats_found += 1 + is_threat = True + sev = threat.get('severity', 'low').lower() + if sev in severity_counts: + severity_counts[sev] += 1 + # Classify device and get correlation profile + classification = detector.classify_wifi_device(network) + profile = correlation.analyze_wifi_device(network) + + # Feed to identity engine for MAC-randomization resistant clustering + # Note: WiFi APs don't typically use randomized MACs, but clients do + try: + wifi_obs = { + 'timestamp': datetime.now().isoformat(), + 'src_mac': bssid, + 'bssid': bssid, + 'ssid': network.get('essid'), + 'rssi': network.get('power'), + 'channel': network.get('channel'), + 'encryption': network.get('privacy'), + 'frame_type': 'beacon', + } + ingest_wifi_dict(wifi_obs) + except Exception as e: + logger.debug(f"Identity engine WiFi ingest error: {e}") + + # Send device to frontend + _emit_event('wifi_device', { + 'bssid': bssid, + 'ssid': network.get('essid', 'Hidden'), + 'channel': network.get('channel', ''), + 'signal': network.get('power', ''), + 'security': network.get('privacy', ''), + 'vendor': network.get('vendor'), + 'is_threat': is_threat, + 'is_new': not classification.get('in_baseline', False), + 'classification': profile.risk_level.value, + 'reasons': classification.get('reasons', []), + 'score': profile.total_score, + 'score_modifier': profile.score_modifier, + 'known_device': profile.known_device, + 'known_device_name': profile.known_device_name, + 'indicators': [{'type': i.type.value, 'desc': i.description} for i in profile.indicators], + 'recommended_action': profile.recommended_action, + }) + except Exception as e: + logger.error(f"WiFi device processing error for {network.get('bssid', '?')}: {e}") + + # WiFi clients (monitor mode only) + try: + wifi_clients = _scan_wifi_clients(wifi_interface) + for client in wifi_clients: + mac = (client.get('mac') or '').upper() + if not mac or mac in all_wifi_clients: + continue + all_wifi_clients[mac] = client + + rssi_val = client.get('rssi_current') + if rssi_val is None: + rssi_val = client.get('rssi_median') or client.get('rssi_ema') + + client_device = { + 'mac': mac, + 'vendor': client.get('vendor'), + 'name': client.get('vendor') or 'WiFi Client', + 'rssi': rssi_val, + 'associated_bssid': client.get('associated_bssid'), + 'probed_ssids': client.get('probed_ssids', []), + 'probe_count': client.get('probe_count', len(client.get('probed_ssids', []))), + 'is_client': True, + } + + try: + timeline_manager.add_observation( + identifier=mac, + protocol='wifi', + rssi=rssi_val, + name=client_device.get('vendor') or f'WiFi Client {mac[-5:]}', + attributes={'client': True, 'associated_bssid': client_device.get('associated_bssid')} + ) + except Exception as e: + logger.debug(f"WiFi client timeline observation error: {e}") + _maybe_store_timeline( + identifier=mac, + protocol='wifi', + rssi=rssi_val, + attributes={'client': True, 'associated_bssid': client_device.get('associated_bssid')} + ) + + profile = correlation.analyze_wifi_device(client_device) + client_device['classification'] = profile.risk_level.value + client_device['score'] = profile.total_score + client_device['score_modifier'] = profile.score_modifier + client_device['known_device'] = profile.known_device + client_device['known_device_name'] = profile.known_device_name + client_device['indicators'] = [ + {'type': i.type.value, 'desc': i.description} + for i in profile.indicators + ] + client_device['recommended_action'] = profile.recommended_action + + # Feed to identity engine for MAC-randomization resistant clustering + try: + wifi_obs = { + 'timestamp': datetime.now().isoformat(), + 'src_mac': mac, + 'bssid': client_device.get('associated_bssid'), + 'rssi': rssi_val, + 'frame_type': 'probe_request', + 'probed_ssids': client_device.get('probed_ssids', []), + } + ingest_wifi_dict(wifi_obs) + except Exception as e: + logger.debug(f"Identity engine WiFi client ingest error: {e}") + + _emit_event('wifi_client', client_device) + except Exception as e: + logger.debug(f"WiFi client scan error: {e}") + except Exception as e: + last_wifi_scan = current_time + logger.error(f"WiFi scan error: {e}") + + # Perform Bluetooth scan + if bt_enabled and (current_time - last_bt_scan) >= bt_scan_interval: + try: + # Use unified Bluetooth scanner if available + if _USE_UNIFIED_BT_SCANNER: + logger.info("TSCM: Using unified BT scanner for snapshot") + bt_devices = get_tscm_bluetooth_snapshot(duration=8) + logger.info(f"TSCM: Unified scanner returned {len(bt_devices)} devices") + else: + logger.info(f"TSCM: Using legacy BT scanner on {bt_interface}") + bt_devices = _scan_bluetooth_devices(bt_interface, duration=8) + logger.info(f"TSCM: Legacy scanner returned {len(bt_devices)} devices") + last_bt_scan = current_time + for device in bt_devices: + try: + mac = device.get('mac', '') + try: + rssi_val = int(device.get('rssi', device.get('signal'))) + except (ValueError, TypeError): + rssi_val = None + if mac: + try: + timeline_manager.add_observation( + identifier=mac, + protocol='bluetooth', + rssi=rssi_val, + name=device.get('name'), + attributes={'device_type': device.get('type')} + ) + except Exception as e: + logger.debug(f"BT timeline observation error: {e}") + _maybe_store_timeline( + identifier=mac, + protocol='bluetooth', + rssi=rssi_val, + attributes={'device_type': device.get('type')} + ) + if mac and mac not in all_bt: + all_bt[mac] = device + is_threat = False + # Analyze for threats + threat = detector.analyze_bt_device(device) + if threat: + _handle_threat(threat) + threats_found += 1 + is_threat = True + sev = threat.get('severity', 'low').lower() + if sev in severity_counts: + severity_counts[sev] += 1 + # Classify device and get correlation profile + classification = detector.classify_bt_device(device) + profile = correlation.analyze_bluetooth_device(device) + + # Feed to identity engine for MAC-randomization resistant clustering + try: + ble_obs = { + 'timestamp': datetime.now().isoformat(), + 'addr': mac, + 'rssi': device.get('rssi'), + 'manufacturer_id': device.get('manufacturer_id') or device.get('company_id'), + 'manufacturer_data': device.get('manufacturer_data'), + 'service_uuids': device.get('services', []), + 'local_name': device.get('name'), + } + ingest_ble_dict(ble_obs) + except Exception as e: + logger.debug(f"Identity engine BLE ingest error: {e}") + + # Send device to frontend + _emit_event('bt_device', { + 'mac': mac, + 'name': device.get('name', 'Unknown'), + 'device_type': device.get('type', ''), + 'rssi': device.get('rssi', ''), + 'manufacturer': device.get('manufacturer'), + 'tracker': device.get('tracker'), + 'tracker_type': device.get('tracker_type'), + 'is_threat': is_threat, + 'is_new': not classification.get('in_baseline', False), + 'classification': profile.risk_level.value, + 'reasons': classification.get('reasons', []), + 'is_audio_capable': classification.get('is_audio_capable', False), + 'score': profile.total_score, + 'score_modifier': profile.score_modifier, + 'known_device': profile.known_device, + 'known_device_name': profile.known_device_name, + 'indicators': [{'type': i.type.value, 'desc': i.description} for i in profile.indicators], + 'recommended_action': profile.recommended_action, + }) + except Exception as e: + logger.error(f"BT device processing error for {device.get('mac', '?')}: {e}") + except Exception as e: + last_bt_scan = current_time + import traceback + logger.error(f"Bluetooth scan error: {e}\n{traceback.format_exc()}") + + # Perform RF scan using SDR + if rf_enabled and (current_time - last_rf_scan) >= rf_scan_interval: + try: + _emit_event('sweep_progress', { + 'progress': min(100, int(((current_time - start_time) / duration) * 100)), + 'status': 'Scanning RF spectrum...', + 'wifi_count': len(all_wifi), + 'bt_count': len(all_bt), + 'rf_count': len(all_rf), + }) + # Try RF scan even if sdr_device is None (will use device 0) + rf_signals = _scan_rf_signals(sdr_device, sweep_ranges=preset.get('ranges')) + + # If no signals and this is first RF scan, send info event + if not rf_signals and last_rf_scan == 0: + _emit_event('rf_status', { + 'status': 'no_signals', + 'message': 'RF scan completed - no signals above threshold. This may be normal in a quiet RF environment.', + }) + + for signal in rf_signals: + freq_key = f"{signal['frequency']:.3f}" + try: + power_val = int(float(signal.get('power', signal.get('level')))) + except (ValueError, TypeError): + power_val = None + try: + timeline_manager.add_observation( + identifier=freq_key, + protocol='rf', + rssi=power_val, + frequency=signal.get('frequency'), + name=f"{freq_key} MHz", + attributes={'band': signal.get('band')} + ) + except Exception as e: + logger.debug(f"RF timeline observation error: {e}") + _maybe_store_timeline( + identifier=freq_key, + protocol='rf', + rssi=power_val, + frequency=signal.get('frequency'), + attributes={'band': signal.get('band')} + ) + if freq_key not in [f"{s['frequency']:.3f}" for s in all_rf]: + all_rf.append(signal) + is_threat = False + # Analyze RF signal for threats + threat = detector.analyze_rf_signal(signal) + if threat: + _handle_threat(threat) + threats_found += 1 + is_threat = True + sev = threat.get('severity', 'low').lower() + if sev in severity_counts: + severity_counts[sev] += 1 + # Classify signal and get correlation profile + classification = detector.classify_rf_signal(signal) + profile = correlation.analyze_rf_signal(signal) + # Send signal to frontend + _emit_event('rf_signal', { + 'frequency': signal['frequency'], + 'power': signal['power'], + 'band': signal['band'], + 'signal_strength': signal.get('signal_strength', 0), + 'is_threat': is_threat, + 'is_new': not classification.get('in_baseline', False), + 'classification': profile.risk_level.value, + 'reasons': classification.get('reasons', []), + 'score': profile.total_score, + 'score_modifier': profile.score_modifier, + 'known_device': profile.known_device, + 'known_device_name': profile.known_device_name, + 'indicators': [{'type': i.type.value, 'desc': i.description} for i in profile.indicators], + 'recommended_action': profile.recommended_action, + }) + last_rf_scan = current_time + except Exception as e: + logger.error(f"RF scan error: {e}") + + # Update progress + elapsed = time.time() - start_time + progress = min(100, int((elapsed / duration) * 100)) + + _emit_event('sweep_progress', { + 'progress': progress, + 'elapsed': int(elapsed), + 'duration': duration, + 'wifi_count': len(all_wifi), + 'bt_count': len(all_bt), + 'rf_count': len(all_rf), + 'threats_found': threats_found, + 'severity_counts': severity_counts, + }) + + time.sleep(2) # Update every 2 seconds + + # Complete sweep (run even if stopped by user so correlations/clusters are computed) + if _current_sweep_id: + # Run cross-protocol correlation analysis + correlations = correlation.correlate_devices() + findings = correlation.get_all_findings() + + # Run baseline comparison if a baseline was provided + baseline_comparison = None + if baseline: + comparator = BaselineComparator(baseline) + baseline_comparison = comparator.compare_all( + wifi_devices=list(all_wifi.values()), + wifi_clients=list(all_wifi_clients.values()), + bt_devices=list(all_bt.values()), + rf_signals=all_rf + ) + logger.info( + f"Baseline comparison: {baseline_comparison['total_new']} new, " + f"{baseline_comparison['total_missing']} missing" + ) + + # Finalize identity engine and get MAC-randomization resistant clusters + identity_engine.finalize_all_sessions() + identity_summary = identity_engine.get_summary() + identity_clusters = [c.to_dict() for c in identity_engine.get_clusters()] + + if verbose_results: + wifi_payload = list(all_wifi.values()) + wifi_client_payload = list(all_wifi_clients.values()) + bt_payload = list(all_bt.values()) + rf_payload = list(all_rf) + else: + wifi_payload = [ + { + 'bssid': d.get('bssid') or d.get('mac'), + 'essid': d.get('essid') or d.get('ssid'), + 'ssid': d.get('ssid') or d.get('essid'), + 'channel': d.get('channel'), + 'power': d.get('power', d.get('signal')), + 'privacy': d.get('privacy', d.get('encryption')), + 'encryption': d.get('encryption', d.get('privacy')), + } + for d in all_wifi.values() + ] + wifi_client_payload = [] + for client in all_wifi_clients.values(): + mac = client.get('mac') or client.get('address') + if isinstance(mac, str): + mac = mac.upper() + probed_ssids = client.get('probed_ssids') or [] + rssi = client.get('rssi') + if rssi is None: + rssi = client.get('rssi_current') + if rssi is None: + rssi = client.get('rssi_median') + if rssi is None: + rssi = client.get('rssi_ema') + wifi_client_payload.append({ + 'mac': mac, + 'vendor': client.get('vendor'), + 'rssi': rssi, + 'associated_bssid': client.get('associated_bssid'), + 'is_associated': client.get('is_associated'), + 'probed_ssids': probed_ssids, + 'probe_count': client.get('probe_count', len(probed_ssids)), + }) + bt_payload = [ + { + 'mac': d.get('mac') or d.get('address'), + 'name': d.get('name'), + 'rssi': d.get('rssi'), + 'manufacturer': d.get('manufacturer', d.get('manufacturer_name')), + } + for d in all_bt.values() + ] + rf_payload = [ + { + 'frequency': s.get('frequency'), + 'power': s.get('power', s.get('level')), + 'modulation': s.get('modulation'), + 'band': s.get('band'), + } + for s in all_rf + ] + + update_tscm_sweep( + _current_sweep_id, + status='completed', + results={ + 'wifi_devices': wifi_payload, + 'wifi_clients': wifi_client_payload, + 'bt_devices': bt_payload, + 'rf_signals': rf_payload, + 'wifi_count': len(all_wifi), + 'wifi_client_count': len(all_wifi_clients), + 'bt_count': len(all_bt), + 'rf_count': len(all_rf), + 'severity_counts': severity_counts, + 'correlation_summary': findings.get('summary', {}), + 'identity_summary': identity_summary.get('statistics', {}), + 'baseline_comparison': baseline_comparison, + 'results_detail_level': 'full' if verbose_results else 'compact', + }, + threats_found=threats_found, + completed=True + ) + + # Emit correlation findings + _emit_event('correlation_findings', { + 'correlations': correlations, + 'high_interest_count': findings['summary'].get('high_interest', 0), + 'needs_review_count': findings['summary'].get('needs_review', 0), + }) + + # Emit baseline comparison if a baseline was used + if baseline_comparison: + _emit_event('baseline_comparison', { + 'baseline_id': baseline.get('id'), + 'baseline_name': baseline.get('name'), + 'total_new': baseline_comparison['total_new'], + 'total_missing': baseline_comparison['total_missing'], + 'wifi': baseline_comparison.get('wifi'), + 'wifi_clients': baseline_comparison.get('wifi_clients'), + 'bluetooth': baseline_comparison.get('bluetooth'), + 'rf': baseline_comparison.get('rf'), + }) + + # Emit device identity cluster findings (MAC-randomization resistant) + _emit_event('identity_clusters', { + 'total_clusters': identity_summary.get('statistics', {}).get('total_clusters', 0), + 'high_risk_count': identity_summary.get('statistics', {}).get('high_risk_count', 0), + 'medium_risk_count': identity_summary.get('statistics', {}).get('medium_risk_count', 0), + 'unique_fingerprints': identity_summary.get('statistics', {}).get('unique_fingerprints', 0), + 'clusters': identity_clusters, + }) + + _emit_event('sweep_completed', { + 'sweep_id': _current_sweep_id, + 'threats_found': threats_found, + 'wifi_count': len(all_wifi), + 'wifi_client_count': len(all_wifi_clients), + 'bt_count': len(all_bt), + 'rf_count': len(all_rf), + 'severity_counts': severity_counts, + 'high_interest_devices': findings['summary'].get('high_interest', 0), + 'needs_review_devices': findings['summary'].get('needs_review', 0), + 'correlations_found': len(correlations), + 'identity_clusters': identity_summary['statistics'].get('total_clusters', 0), + 'baseline_new_devices': baseline_comparison['total_new'] if baseline_comparison else 0, + 'baseline_missing_devices': baseline_comparison['total_missing'] if baseline_comparison else 0, + }) + + except Exception as e: + logger.error(f"Sweep error: {e}") + _emit_event('sweep_error', {'error': str(e)}) + if _current_sweep_id: + update_tscm_sweep(_current_sweep_id, status='error', completed=True) + + finally: + _sweep_running = False + + +def _handle_threat(threat: dict) -> None: + """Handle a detected threat.""" + if not _current_sweep_id: + return + + # Add to database + threat_id = add_tscm_threat( + sweep_id=_current_sweep_id, + threat_type=threat['threat_type'], + severity=threat['severity'], + source=threat['source'], + identifier=threat['identifier'], + name=threat.get('name'), + signal_strength=threat.get('signal_strength'), + frequency=threat.get('frequency'), + details=threat.get('details') + ) + + # Emit event + _emit_event('threat_detected', { + 'threat_id': threat_id, + **threat + }) + + logger.warning( + f"TSCM threat detected: {threat['threat_type']} - " + f"{threat['identifier']} ({threat['severity']})" + ) + + +def _generate_assessment(summary: dict) -> str: + """Generate an assessment summary based on findings.""" + high = summary.get('high_interest', 0) + review = summary.get('needs_review', 0) + correlations = summary.get('correlations_found', 0) + + if high > 0 or correlations > 0: + return ( + f"ELEVATED CONCERN: {high} high-interest item(s) and " + f"{correlations} cross-protocol correlation(s) detected. " + "Professional TSCM inspection recommended." + ) + elif review > 3: + return ( + f"MODERATE CONCERN: {review} items requiring review. " + "Further analysis recommended to characterize unknown devices." + ) + elif review > 0: + return ( + f"LOW CONCERN: {review} item(s) flagged for review. " + "Likely benign but verification recommended." + ) + else: + return ( + "BASELINE ENVIRONMENT: No significant anomalies detected. " + "Environment appears consistent with expected wireless activity." + ) + + +# ============================================================================= +# Import sub-modules to register routes on tscm_bp +# ============================================================================= +from routes.tscm import sweep # noqa: E402, F401 +from routes.tscm import baseline # noqa: E402, F401 +from routes.tscm import cases # noqa: E402, F401 +from routes.tscm import meeting # noqa: E402, F401 +from routes.tscm import analysis # noqa: E402, F401 +from routes.tscm import schedules # noqa: E402, F401 diff --git a/routes/tscm/analysis.py b/routes/tscm/analysis.py new file mode 100644 index 0000000..bca27d3 --- /dev/null +++ b/routes/tscm/analysis.py @@ -0,0 +1,1077 @@ +""" +TSCM Analysis Routes + +Handles /threats/*, /report/*, /wifi/*, /bluetooth/*, /playbooks/*, +/findings/*, /identity/*, /known-devices/*, /device/*/timeline, +and /timelines endpoints. +""" + +from __future__ import annotations + +import logging +from datetime import datetime + +from flask import Response, jsonify, request + +from routes.tscm import ( + _current_sweep_id, + _generate_assessment, + tscm_bp, +) +from utils.database import ( + acknowledge_tscm_threat, + get_active_tscm_baseline, + get_tscm_sweep, + get_tscm_threat_summary, + get_tscm_threats, +) +from utils.tscm.correlation import get_correlation_engine + +logger = logging.getLogger('intercept.tscm') + + +# ============================================================================= +# Threat Endpoints +# ============================================================================= + +@tscm_bp.route('/threats') +def list_threats(): + """List threats with optional filters.""" + sweep_id = request.args.get('sweep_id', type=int) + severity = request.args.get('severity') + acknowledged = request.args.get('acknowledged') + limit = request.args.get('limit', 100, type=int) + + ack_filter = None + if acknowledged is not None: + ack_filter = acknowledged.lower() in ('true', '1', 'yes') + + threats = get_tscm_threats( + sweep_id=sweep_id, + severity=severity, + acknowledged=ack_filter, + limit=limit + ) + + return jsonify({'status': 'success', 'threats': threats}) + + +@tscm_bp.route('/threats/summary') +def threat_summary(): + """Get threat count summary by severity.""" + summary = get_tscm_threat_summary() + return jsonify({'status': 'success', 'summary': summary}) + + +@tscm_bp.route('/threats/', methods=['PUT']) +def update_threat(threat_id: int): + """Update a threat (acknowledge, add notes).""" + data = request.get_json() or {} + + if data.get('acknowledge'): + notes = data.get('notes') + success = acknowledge_tscm_threat(threat_id, notes) + if not success: + return jsonify({'status': 'error', 'message': 'Threat not found'}), 404 + + return jsonify({'status': 'success', 'message': 'Threat updated'}) + + +# ============================================================================= +# Correlation & Findings Endpoints +# ============================================================================= + +@tscm_bp.route('/findings') +def get_findings(): + """ + Get comprehensive TSCM findings from the correlation engine. + + Returns all device profiles organized by risk level, cross-protocol + correlations, and summary statistics with client-safe disclaimers. + """ + correlation = get_correlation_engine() + findings = correlation.get_all_findings() + + # Add client-safe disclaimer + findings['legal_disclaimer'] = ( + "DISCLAIMER: This TSCM screening system identifies wireless and RF anomalies " + "and indicators. Results represent potential items of interest, NOT confirmed " + "surveillance devices. No content has been intercepted or decoded. Findings " + "require professional analysis and verification. This tool does not prove " + "malicious intent or illegal activity." + ) + + return jsonify({ + 'status': 'success', + 'findings': findings + }) + + +@tscm_bp.route('/findings/high-interest') +def get_high_interest(): + """Get only high-interest devices (score >= 6).""" + correlation = get_correlation_engine() + high_interest = correlation.get_high_interest_devices() + + return jsonify({ + 'status': 'success', + 'count': len(high_interest), + 'devices': [d.to_dict() for d in high_interest], + 'disclaimer': ( + "High-interest classification indicates multiple indicators warrant " + "investigation. This does NOT confirm surveillance activity." + ) + }) + + +@tscm_bp.route('/findings/correlations') +def get_correlations(): + """Get cross-protocol correlation analysis.""" + correlation = get_correlation_engine() + correlations = correlation.correlate_devices() + + return jsonify({ + 'status': 'success', + 'count': len(correlations), + 'correlations': correlations, + 'explanation': ( + "Correlations identify devices across different protocols (Bluetooth, " + "WiFi, RF) that exhibit related behavior patterns. Cross-protocol " + "activity is one indicator among many in TSCM analysis." + ) + }) + + +@tscm_bp.route('/findings/device/') +def get_device_profile(identifier: str): + """Get detailed profile for a specific device.""" + correlation = get_correlation_engine() + + # Search all protocols for the identifier + for protocol in ['bluetooth', 'wifi', 'rf']: + key = f"{protocol}:{identifier}" + if key in correlation.device_profiles: + profile = correlation.device_profiles[key] + return jsonify({ + 'status': 'success', + 'profile': profile.to_dict() + }) + + return jsonify({ + 'status': 'error', + 'message': 'Device not found' + }), 404 + + +# ============================================================================= +# Report Generation Endpoints +# ============================================================================= + +@tscm_bp.route('/report') +def generate_report(): + """ + Generate a comprehensive TSCM sweep report. + + Includes all findings, correlations, indicators, and recommended actions + in a client-presentable format with appropriate disclaimers. + """ + correlation = get_correlation_engine() + findings = correlation.get_all_findings() + + # Build the report structure + report = { + 'generated_at': datetime.now().isoformat(), + 'report_type': 'TSCM Wireless Surveillance Screening', + + 'executive_summary': { + 'total_devices_analyzed': findings['summary']['total_devices'], + 'high_interest_items': findings['summary']['high_interest'], + 'items_requiring_review': findings['summary']['needs_review'], + 'cross_protocol_correlations': findings['summary']['correlations_found'], + 'assessment': _generate_assessment(findings['summary']), + }, + + 'methodology': { + 'protocols_scanned': ['Bluetooth Low Energy', 'WiFi 802.11', 'RF Spectrum'], + 'analysis_techniques': [ + 'Device fingerprinting', + 'Signal stability analysis', + 'Cross-protocol correlation', + 'Time-based pattern detection', + 'Manufacturer identification', + ], + 'scoring_model': { + 'informational': '0-2 points - Known or expected devices', + 'needs_review': '3-5 points - Unusual devices requiring assessment', + 'high_interest': '6+ points - Multiple indicators warrant investigation', + } + }, + + 'findings': { + 'high_interest': findings['devices']['high_interest'], + 'needs_review': findings['devices']['needs_review'], + 'informational': findings['devices']['informational'], + }, + + 'correlations': findings['correlations'], + + 'disclaimers': { + 'legal': ( + "This report documents findings from a wireless and RF surveillance " + "screening. Results indicate anomalies and items of interest, NOT " + "confirmed surveillance devices. No communications content has been " + "intercepted, recorded, or decoded. This screening does not prove " + "malicious intent, illegal activity, or the presence of surveillance " + "equipment. All findings require professional verification." + ), + 'technical': ( + "Detection capabilities are limited by equipment sensitivity, " + "environmental factors, and the technical sophistication of any " + "potential devices. Absence of findings does NOT guarantee absence " + "of surveillance equipment." + ), + 'recommendations': ( + "High-interest items should be investigated by qualified TSCM " + "professionals using appropriate physical inspection techniques. " + "This electronic sweep is one component of comprehensive TSCM." + ) + } + } + + return jsonify({ + 'status': 'success', + 'report': report + }) + + +@tscm_bp.route('/report/pdf') +def get_pdf_report(): + """ + Generate client-safe PDF report. + + Contains executive summary, findings by risk tier, meeting window + summary, and mandatory disclaimers. + """ + try: + from utils.tscm.reports import generate_report, get_pdf_report + from utils.tscm.advanced import detect_sweep_capabilities, get_timeline_manager + from routes.tscm import _current_sweep_id + + sweep_id = request.args.get('sweep_id', _current_sweep_id, type=int) + if not sweep_id: + return jsonify({'status': 'error', 'message': 'No sweep specified'}), 400 + + sweep = get_tscm_sweep(sweep_id) + if not sweep: + return jsonify({'status': 'error', 'message': 'Sweep not found'}), 404 + + # Get data for report + correlation = get_correlation_engine() + profiles = [p.to_dict() for p in correlation.device_profiles.values()] + caps = detect_sweep_capabilities().to_dict() + + manager = get_timeline_manager() + timelines = [t.to_dict() for t in manager.get_all_timelines()] + + # Generate report + report = generate_report( + sweep_id=sweep_id, + sweep_data=sweep, + device_profiles=profiles, + capabilities=caps, + timelines=timelines + ) + + pdf_content = get_pdf_report(report) + + return Response( + pdf_content, + mimetype='text/plain', + headers={ + 'Content-Disposition': f'attachment; filename=tscm_report_{sweep_id}.txt' + } + ) + + except Exception as e: + logger.error(f"Generate PDF report error: {e}") + return jsonify({'status': 'error', 'message': str(e)}), 500 + + +@tscm_bp.route('/report/annex') +def get_technical_annex(): + """ + Generate technical annex (JSON + CSV). + + Contains device timelines, all indicators, and detailed data + for audit purposes. No packet data included. + """ + try: + from utils.tscm.reports import generate_report, get_json_annex, get_csv_annex + from utils.tscm.advanced import detect_sweep_capabilities, get_timeline_manager + from routes.tscm import _current_sweep_id + + sweep_id = request.args.get('sweep_id', _current_sweep_id, type=int) + format_type = request.args.get('format', 'json') + + if not sweep_id: + return jsonify({'status': 'error', 'message': 'No sweep specified'}), 400 + + sweep = get_tscm_sweep(sweep_id) + if not sweep: + return jsonify({'status': 'error', 'message': 'Sweep not found'}), 404 + + # Get data for report + correlation = get_correlation_engine() + profiles = [p.to_dict() for p in correlation.device_profiles.values()] + caps = detect_sweep_capabilities().to_dict() + + manager = get_timeline_manager() + timelines = [t.to_dict() for t in manager.get_all_timelines()] + + # Generate report + report = generate_report( + sweep_id=sweep_id, + sweep_data=sweep, + device_profiles=profiles, + capabilities=caps, + timelines=timelines + ) + + if format_type == 'csv': + csv_content = get_csv_annex(report) + return Response( + csv_content, + mimetype='text/csv', + headers={ + 'Content-Disposition': f'attachment; filename=tscm_annex_{sweep_id}.csv' + } + ) + else: + annex = get_json_annex(report) + return jsonify({ + 'status': 'success', + 'annex': annex + }) + + except Exception as e: + logger.error(f"Generate technical annex error: {e}") + return jsonify({'status': 'error', 'message': str(e)}), 500 + + +# ============================================================================= +# WiFi Advanced Indicators Endpoints +# ============================================================================= + +@tscm_bp.route('/wifi/advanced-indicators') +def get_wifi_advanced_indicators(): + """ + Get advanced WiFi indicators (Evil Twin, Probes, Deauth). + + These indicators require analysis of WiFi patterns. + Some features require monitor mode. + """ + try: + from utils.tscm.advanced import get_wifi_detector + + detector = get_wifi_detector() + + return jsonify({ + 'status': 'success', + 'indicators': detector.get_all_indicators(), + 'unavailable_features': detector.get_unavailable_features(), + 'disclaimer': ( + "All indicators represent pattern detections, NOT confirmed attacks. " + "Further investigation is required." + ) + }) + + except Exception as e: + logger.error(f"Get WiFi indicators error: {e}") + return jsonify({'status': 'error', 'message': str(e)}), 500 + + +@tscm_bp.route('/wifi/analyze-network', methods=['POST']) +def analyze_wifi_network(): + """ + Analyze a WiFi network for evil twin patterns. + + Compares against known networks to detect SSID spoofing. + """ + try: + from utils.tscm.advanced import get_wifi_detector + + data = request.get_json() or {} + detector = get_wifi_detector() + + # Set known networks from baseline if available + baseline = get_active_tscm_baseline() + if baseline: + detector.set_known_networks(baseline.get('wifi_networks', [])) + + indicators = detector.analyze_network(data) + + return jsonify({ + 'status': 'success', + 'indicators': [i.to_dict() for i in indicators] + }) + + except Exception as e: + logger.error(f"Analyze WiFi network error: {e}") + return jsonify({'status': 'error', 'message': str(e)}), 500 + + +# ============================================================================= +# Bluetooth Risk Explainability Endpoints +# ============================================================================= + +@tscm_bp.route('/bluetooth//explain') +def explain_bluetooth_risk(identifier: str): + """ + Get human-readable risk explanation for a BLE device. + + Includes proximity estimate, tracker explanation, and + recommended actions. + """ + try: + from utils.tscm.advanced import generate_ble_risk_explanation + + # Get device from correlation engine + correlation = get_correlation_engine() + profile = None + key = f"bluetooth:{identifier.upper()}" + if key in correlation.device_profiles: + profile = correlation.device_profiles[key].to_dict() + + # Try to find device info + device = {'mac': identifier} + if profile: + device['name'] = profile.get('name') + device['rssi'] = profile.get('rssi_samples', [None])[-1] if profile.get('rssi_samples') else None + + # Check meeting status + is_meeting = correlation.is_during_meeting() + + explanation = generate_ble_risk_explanation(device, profile, is_meeting) + + return jsonify({ + 'status': 'success', + 'explanation': explanation.to_dict() + }) + + except Exception as e: + logger.error(f"Explain BLE risk error: {e}") + return jsonify({'status': 'error', 'message': str(e)}), 500 + + +@tscm_bp.route('/bluetooth//proximity') +def get_bluetooth_proximity(identifier: str): + """Get proximity estimate for a BLE device.""" + try: + from utils.tscm.advanced import estimate_ble_proximity + + rssi = request.args.get('rssi', type=int) + if rssi is None: + # Try to get from correlation engine + correlation = get_correlation_engine() + key = f"bluetooth:{identifier.upper()}" + if key in correlation.device_profiles: + profile = correlation.device_profiles[key] + if profile.rssi_samples: + rssi = profile.rssi_samples[-1] + + if rssi is None: + return jsonify({ + 'status': 'error', + 'message': 'RSSI value required' + }), 400 + + proximity, explanation, distance = estimate_ble_proximity(rssi) + + return jsonify({ + 'status': 'success', + 'proximity': { + 'estimate': proximity.value, + 'explanation': explanation, + 'estimated_distance': distance, + 'rssi_used': rssi, + }, + 'disclaimer': ( + "Proximity estimates are approximate and affected by " + "environment, obstacles, and device characteristics." + ) + }) + + except Exception as e: + logger.error(f"Get BLE proximity error: {e}") + return jsonify({'status': 'error', 'message': str(e)}), 500 + + +# ============================================================================= +# Operator Playbook Endpoints +# ============================================================================= + +@tscm_bp.route('/playbooks') +def list_playbooks(): + """List all available operator playbooks.""" + try: + from utils.tscm.advanced import PLAYBOOKS + + # Return as array with id field for JavaScript compatibility + playbooks_list = [] + for pid, pb in PLAYBOOKS.items(): + pb_dict = pb.to_dict() + pb_dict['id'] = pid + pb_dict['name'] = pb_dict.get('title', pid) + pb_dict['category'] = pb_dict.get('risk_level', 'general') + playbooks_list.append(pb_dict) + + return jsonify({ + 'status': 'success', + 'playbooks': playbooks_list + }) + + except Exception as e: + logger.error(f"List playbooks error: {e}") + return jsonify({'status': 'error', 'message': str(e)}), 500 + + +@tscm_bp.route('/playbooks/') +def get_playbook(playbook_id: str): + """Get a specific playbook.""" + try: + from utils.tscm.advanced import PLAYBOOKS + + if playbook_id not in PLAYBOOKS: + return jsonify({'status': 'error', 'message': 'Playbook not found'}), 404 + + return jsonify({ + 'status': 'success', + 'playbook': PLAYBOOKS[playbook_id].to_dict() + }) + + except Exception as e: + logger.error(f"Get playbook error: {e}") + return jsonify({'status': 'error', 'message': str(e)}), 500 + + +@tscm_bp.route('/findings//playbook') +def get_finding_playbook(identifier: str): + """Get recommended playbook for a specific finding.""" + try: + from utils.tscm.advanced import get_playbook_for_finding + + # Get profile + correlation = get_correlation_engine() + profile = None + + for protocol in ['bluetooth', 'wifi', 'rf']: + key = f"{protocol}:{identifier.upper()}" + if key in correlation.device_profiles: + profile = correlation.device_profiles[key].to_dict() + break + + if not profile: + return jsonify({'status': 'error', 'message': 'Finding not found'}), 404 + + playbook = get_playbook_for_finding( + risk_level=profile.get('risk_level', 'informational'), + indicators=profile.get('indicators', []) + ) + + return jsonify({ + 'status': 'success', + 'playbook': playbook.to_dict(), + 'suggested_next_steps': [ + f"Step {s.step_number}: {s.action}" + for s in playbook.steps[:3] + ] + }) + + except Exception as e: + logger.error(f"Get finding playbook error: {e}") + return jsonify({'status': 'error', 'message': str(e)}), 500 + + +# ============================================================================= +# Device Identity Endpoints (MAC-Randomization Resistant Detection) +# ============================================================================= + +@tscm_bp.route('/identity/ingest/ble', methods=['POST']) +def ingest_ble_observation(): + """ + Ingest a BLE observation for device identity clustering. + + This endpoint accepts BLE advertisement data and feeds it into the + MAC-randomization resistant device detection engine. + + Expected JSON payload: + { + "timestamp": "2024-01-01T12:00:00", // ISO format or omit for now + "addr": "AA:BB:CC:DD:EE:FF", // BLE address (may be randomized) + "addr_type": "rpa", // public/random_static/rpa/nrpa/unknown + "rssi": -65, // dBm + "tx_power": -10, // dBm (optional) + "adv_type": "ADV_IND", // Advertisement type + "manufacturer_id": 1234, // Company ID (optional) + "manufacturer_data": "0102030405", // Hex string (optional) + "service_uuids": ["uuid1", "uuid2"], // List of UUIDs (optional) + "local_name": "Device Name", // Advertised name (optional) + "appearance": 960, // BLE appearance (optional) + "packet_length": 31 // Total packet length (optional) + } + """ + try: + from utils.tscm.device_identity import ingest_ble_dict + + data = request.get_json() + if not data: + return jsonify({'status': 'error', 'message': 'No data provided'}), 400 + + session = ingest_ble_dict(data) + + return jsonify({ + 'status': 'success', + 'session_id': session.session_id, + 'observation_count': len(session.observations), + }) + + except Exception as e: + logger.error(f"BLE ingestion error: {e}") + return jsonify({'status': 'error', 'message': str(e)}), 500 + + +@tscm_bp.route('/identity/ingest/wifi', methods=['POST']) +def ingest_wifi_observation(): + """ + Ingest a WiFi observation for device identity clustering. + + Expected JSON payload: + { + "timestamp": "2024-01-01T12:00:00", + "src_mac": "AA:BB:CC:DD:EE:FF", // Client MAC (may be randomized) + "dst_mac": "11:22:33:44:55:66", // Destination MAC + "bssid": "11:22:33:44:55:66", // AP BSSID + "ssid": "NetworkName", // SSID if available + "frame_type": "probe_request", // Frame type + "rssi": -70, // dBm + "channel": 6, // WiFi channel + "ht_capable": true, // 802.11n capable + "vht_capable": true, // 802.11ac capable + "he_capable": false, // 802.11ax capable + "supported_rates": [1, 2, 5.5, 11], // Supported rates + "vendor_ies": [["001122", 10]], // [(OUI, length), ...] + "probed_ssids": ["ssid1", "ssid2"] // For probe requests + } + """ + try: + from utils.tscm.device_identity import ingest_wifi_dict + + data = request.get_json() + if not data: + return jsonify({'status': 'error', 'message': 'No data provided'}), 400 + + session = ingest_wifi_dict(data) + + return jsonify({ + 'status': 'success', + 'session_id': session.session_id, + 'observation_count': len(session.observations), + }) + + except Exception as e: + logger.error(f"WiFi ingestion error: {e}") + return jsonify({'status': 'error', 'message': str(e)}), 500 + + +@tscm_bp.route('/identity/ingest/batch', methods=['POST']) +def ingest_batch_observations(): + """ + Ingest multiple observations in a single request. + + Expected JSON payload: + { + "ble": [, ...], + "wifi": [, ...] + } + """ + try: + from utils.tscm.device_identity import ingest_ble_dict, ingest_wifi_dict + + data = request.get_json() + if not data: + return jsonify({'status': 'error', 'message': 'No data provided'}), 400 + + ble_count = 0 + wifi_count = 0 + + for ble_obs in data.get('ble', []): + ingest_ble_dict(ble_obs) + ble_count += 1 + + for wifi_obs in data.get('wifi', []): + ingest_wifi_dict(wifi_obs) + wifi_count += 1 + + return jsonify({ + 'status': 'success', + 'ble_ingested': ble_count, + 'wifi_ingested': wifi_count, + }) + + except Exception as e: + logger.error(f"Batch ingestion error: {e}") + return jsonify({'status': 'error', 'message': str(e)}), 500 + + +@tscm_bp.route('/identity/clusters') +def get_device_clusters(): + """ + Get all device clusters (probable physical device identities). + + Query parameters: + - min_confidence: Minimum cluster confidence (0-1, default 0) + - protocol: Filter by protocol ('ble' or 'wifi') + - risk_level: Filter by risk level ('high', 'medium', 'low', 'informational') + """ + try: + from utils.tscm.device_identity import get_identity_engine + + engine = get_identity_engine() + min_conf = request.args.get('min_confidence', 0, type=float) + protocol = request.args.get('protocol') + risk_filter = request.args.get('risk_level') + + clusters = engine.get_clusters(min_confidence=min_conf) + + if protocol: + clusters = [c for c in clusters if c.protocol == protocol] + + if risk_filter: + clusters = [c for c in clusters if c.risk_level.value == risk_filter] + + return jsonify({ + 'status': 'success', + 'count': len(clusters), + 'clusters': [c.to_dict() for c in clusters], + 'disclaimer': ( + "Clusters represent PROBABLE device identities based on passive " + "fingerprinting. Results are statistical correlations, not " + "confirmed matches. False positives/negatives are expected." + ) + }) + + except Exception as e: + logger.error(f"Get clusters error: {e}") + return jsonify({'status': 'error', 'message': str(e)}), 500 + + +@tscm_bp.route('/identity/clusters/high-risk') +def get_high_risk_clusters(): + """Get device clusters with HIGH risk level.""" + try: + from utils.tscm.device_identity import get_identity_engine + + engine = get_identity_engine() + clusters = engine.get_high_risk_clusters() + + return jsonify({ + 'status': 'success', + 'count': len(clusters), + 'clusters': [c.to_dict() for c in clusters], + 'disclaimer': ( + "High-risk classification indicates multiple behavioral indicators " + "consistent with potential surveillance devices. This does NOT " + "confirm surveillance activity. Professional verification required." + ) + }) + + except Exception as e: + logger.error(f"Get high-risk clusters error: {e}") + return jsonify({'status': 'error', 'message': str(e)}), 500 + + +@tscm_bp.route('/identity/summary') +def get_identity_summary(): + """ + Get summary of device identity analysis. + + Returns statistics, cluster counts by risk level, and monitoring period. + """ + try: + from utils.tscm.device_identity import get_identity_engine + + engine = get_identity_engine() + summary = engine.get_summary() + + return jsonify({ + 'status': 'success', + 'summary': summary + }) + + except Exception as e: + logger.error(f"Get identity summary error: {e}") + return jsonify({'status': 'error', 'message': str(e)}), 500 + + +@tscm_bp.route('/identity/finalize', methods=['POST']) +def finalize_identity_sessions(): + """ + Finalize all active sessions and complete clustering. + + Call this at the end of a monitoring period to ensure all observations + are properly clustered and assessed. + """ + try: + from utils.tscm.device_identity import get_identity_engine + + engine = get_identity_engine() + engine.finalize_all_sessions() + summary = engine.get_summary() + + return jsonify({ + 'status': 'success', + 'message': 'All sessions finalized', + 'summary': summary + }) + + except Exception as e: + logger.error(f"Finalize sessions error: {e}") + return jsonify({'status': 'error', 'message': str(e)}), 500 + + +@tscm_bp.route('/identity/reset', methods=['POST']) +def reset_identity_engine(): + """ + Reset the device identity engine. + + Clears all sessions, clusters, and monitoring state. + """ + try: + from utils.tscm.device_identity import reset_identity_engine as reset_engine + + reset_engine() + + return jsonify({ + 'status': 'success', + 'message': 'Device identity engine reset' + }) + + except Exception as e: + logger.error(f"Reset identity engine error: {e}") + return jsonify({'status': 'error', 'message': str(e)}), 500 + + +@tscm_bp.route('/identity/cluster/') +def get_cluster_detail(cluster_id: str): + """Get detailed information for a specific cluster.""" + try: + from utils.tscm.device_identity import get_identity_engine + + engine = get_identity_engine() + + if cluster_id not in engine.clusters: + return jsonify({ + 'status': 'error', + 'message': 'Cluster not found' + }), 404 + + cluster = engine.clusters[cluster_id] + + return jsonify({ + 'status': 'success', + 'cluster': cluster.to_dict() + }) + + except Exception as e: + logger.error(f"Get cluster detail error: {e}") + return jsonify({'status': 'error', 'message': str(e)}), 500 + + +# ============================================================================= +# Device Timeline Endpoints +# ============================================================================= + +@tscm_bp.route('/device//timeline') +def get_device_timeline_endpoint(identifier: str): + """ + Get timeline of observations for a device. + + Shows behavior over time including RSSI stability, presence, + and meeting window correlation. + """ + try: + from utils.tscm.advanced import get_timeline_manager + from utils.database import get_device_timeline + + protocol = request.args.get('protocol', 'bluetooth') + since_hours = request.args.get('since_hours', 24, type=int) + + # Try in-memory timeline first + manager = get_timeline_manager() + timeline = manager.get_timeline(identifier, protocol) + + # Also get stored timeline from database + stored = get_device_timeline(identifier, since_hours=since_hours) + + result = { + 'identifier': identifier, + 'protocol': protocol, + 'observations': stored, + } + + if timeline: + result['metrics'] = { + 'first_seen': timeline.first_seen.isoformat() if timeline.first_seen else None, + 'last_seen': timeline.last_seen.isoformat() if timeline.last_seen else None, + 'total_observations': timeline.total_observations, + 'presence_ratio': round(timeline.presence_ratio, 2), + } + result['signal'] = { + 'rssi_min': timeline.rssi_min, + 'rssi_max': timeline.rssi_max, + 'rssi_mean': round(timeline.rssi_mean, 1) if timeline.rssi_mean else None, + 'stability': round(timeline.rssi_stability, 2), + } + result['movement'] = { + 'appears_stationary': timeline.appears_stationary, + 'pattern': timeline.movement_pattern, + } + result['meeting_correlation'] = { + 'correlated': timeline.meeting_correlated, + 'observations_during_meeting': timeline.meeting_observations, + } + + return jsonify({ + 'status': 'success', + 'timeline': result + }) + + except Exception as e: + logger.error(f"Get device timeline error: {e}") + return jsonify({'status': 'error', 'message': str(e)}), 500 + + +@tscm_bp.route('/timelines') +def get_all_device_timelines(): + """Get all device timelines.""" + try: + from utils.tscm.advanced import get_timeline_manager + + manager = get_timeline_manager() + timelines = manager.get_all_timelines() + + return jsonify({ + 'status': 'success', + 'count': len(timelines), + 'timelines': [t.to_dict() for t in timelines] + }) + + except Exception as e: + logger.error(f"Get all timelines error: {e}") + return jsonify({'status': 'error', 'message': str(e)}), 500 + + +# ============================================================================= +# Known-Good Registry (Whitelist) Endpoints +# ============================================================================= + +@tscm_bp.route('/known-devices', methods=['GET']) +def list_known_devices(): + """List all known-good devices.""" + from utils.database import get_all_known_devices + + location = request.args.get('location') + scope = request.args.get('scope') + + devices = get_all_known_devices(location=location, scope=scope) + + return jsonify({ + 'status': 'success', + 'count': len(devices), + 'devices': devices + }) + + +@tscm_bp.route('/known-devices', methods=['POST']) +def add_known_device_endpoint(): + """ + Add a device to the known-good registry. + + Known devices remain visible but receive reduced risk scores. + They are NOT suppressed from reports (preserves audit trail). + """ + from utils.database import add_known_device + + data = request.get_json() or {} + + identifier = data.get('identifier') + protocol = data.get('protocol') + + if not identifier or not protocol: + return jsonify({ + 'status': 'error', + 'message': 'identifier and protocol are required' + }), 400 + + device_id = add_known_device( + identifier=identifier, + protocol=protocol, + name=data.get('name'), + description=data.get('description'), + location=data.get('location'), + scope=data.get('scope', 'global'), + added_by=data.get('added_by'), + score_modifier=data.get('score_modifier', -2), + metadata=data.get('metadata') + ) + + return jsonify({ + 'status': 'success', + 'message': 'Device added to known-good registry', + 'device_id': device_id + }) + + +@tscm_bp.route('/known-devices/', methods=['GET']) +def get_known_device_endpoint(identifier: str): + """Get a known device by identifier.""" + from utils.database import get_known_device + + device = get_known_device(identifier) + if not device: + return jsonify({'status': 'error', 'message': 'Device not found'}), 404 + + return jsonify({ + 'status': 'success', + 'device': device + }) + + +@tscm_bp.route('/known-devices/', methods=['DELETE']) +def delete_known_device_endpoint(identifier: str): + """Remove a device from the known-good registry.""" + from utils.database import delete_known_device + + success = delete_known_device(identifier) + if not success: + return jsonify({'status': 'error', 'message': 'Device not found'}), 404 + + return jsonify({ + 'status': 'success', + 'message': 'Device removed from known-good registry' + }) + + +@tscm_bp.route('/known-devices/check/') +def check_known_device(identifier: str): + """Check if a device is in the known-good registry.""" + from utils.database import is_known_good_device + + location = request.args.get('location') + result = is_known_good_device(identifier, location=location) + + return jsonify({ + 'status': 'success', + 'is_known': result is not None, + 'details': result + }) diff --git a/routes/tscm/baseline.py b/routes/tscm/baseline.py new file mode 100644 index 0000000..d5c5104 --- /dev/null +++ b/routes/tscm/baseline.py @@ -0,0 +1,272 @@ +""" +TSCM Baseline Routes + +Handles /baseline/*, /baselines endpoints. +""" + +from __future__ import annotations + +import json +import logging +from datetime import datetime + +from flask import jsonify, request + +from routes.tscm import ( + _baseline_recorder, + tscm_bp, +) +from utils.database import ( + delete_tscm_baseline, + get_active_tscm_baseline, + get_all_tscm_baselines, + get_tscm_baseline, + get_tscm_sweep, + set_active_tscm_baseline, +) +from utils.tscm.baseline import ( + BaselineComparator, + get_comparison_for_active_baseline, +) + +logger = logging.getLogger('intercept.tscm') + + +@tscm_bp.route('/baseline/record', methods=['POST']) +def record_baseline(): + """Start recording a new baseline.""" + data = request.get_json() or {} + name = data.get('name', f'Baseline {datetime.now().strftime("%Y-%m-%d %H:%M")}') + location = data.get('location') + description = data.get('description') + + baseline_id = _baseline_recorder.start_recording(name, location, description) + + return jsonify({ + 'status': 'success', + 'message': 'Baseline recording started', + 'baseline_id': baseline_id + }) + + +@tscm_bp.route('/baseline/stop', methods=['POST']) +def stop_baseline(): + """Stop baseline recording.""" + result = _baseline_recorder.stop_recording() + + if 'error' in result: + return jsonify({'status': 'error', 'message': result['error']}) + + return jsonify({ + 'status': 'success', + 'message': 'Baseline recording complete', + **result + }) + + +@tscm_bp.route('/baseline/status') +def baseline_status(): + """Get baseline recording status.""" + return jsonify(_baseline_recorder.get_recording_status()) + + +@tscm_bp.route('/baselines') +def list_baselines(): + """List all baselines.""" + baselines = get_all_tscm_baselines() + return jsonify({'status': 'success', 'baselines': baselines}) + + +@tscm_bp.route('/baseline/') +def get_baseline(baseline_id: int): + """Get a specific baseline.""" + baseline = get_tscm_baseline(baseline_id) + if not baseline: + return jsonify({'status': 'error', 'message': 'Baseline not found'}), 404 + + return jsonify({'status': 'success', 'baseline': baseline}) + + +@tscm_bp.route('/baseline//activate', methods=['POST']) +def activate_baseline(baseline_id: int): + """Set a baseline as active.""" + success = set_active_tscm_baseline(baseline_id) + if not success: + return jsonify({'status': 'error', 'message': 'Baseline not found'}), 404 + + return jsonify({'status': 'success', 'message': 'Baseline activated'}) + + +@tscm_bp.route('/baseline/', methods=['DELETE']) +def remove_baseline(baseline_id: int): + """Delete a baseline.""" + success = delete_tscm_baseline(baseline_id) + if not success: + return jsonify({'status': 'error', 'message': 'Baseline not found'}), 404 + + return jsonify({'status': 'success', 'message': 'Baseline deleted'}) + + +@tscm_bp.route('/baseline/active') +def get_active_baseline(): + """Get the currently active baseline.""" + baseline = get_active_tscm_baseline() + if not baseline: + return jsonify({'status': 'success', 'baseline': None}) + + return jsonify({'status': 'success', 'baseline': baseline}) + + +@tscm_bp.route('/baseline/compare', methods=['POST']) +def compare_against_baseline(): + """ + Compare provided device data against the active baseline. + + Expects JSON body with: + - wifi_devices: list of WiFi devices (optional) + - wifi_clients: list of WiFi clients (optional) + - bt_devices: list of Bluetooth devices (optional) + - rf_signals: list of RF signals (optional) + + Returns comparison showing new, missing, and matching devices. + """ + data = request.get_json() or {} + + wifi_devices = data.get('wifi_devices') + wifi_clients = data.get('wifi_clients') + bt_devices = data.get('bt_devices') + rf_signals = data.get('rf_signals') + + # Use the convenience function that gets active baseline + comparison = get_comparison_for_active_baseline( + wifi_devices=wifi_devices, + wifi_clients=wifi_clients, + bt_devices=bt_devices, + rf_signals=rf_signals + ) + + if comparison is None: + return jsonify({ + 'status': 'error', + 'message': 'No active baseline set' + }), 400 + + return jsonify({ + 'status': 'success', + 'comparison': comparison + }) + + +# ============================================================================= +# Baseline Diff & Health Endpoints +# ============================================================================= + +@tscm_bp.route('/baseline/diff//') +def get_baseline_diff(baseline_id: int, sweep_id: int): + """ + Get comprehensive diff between a baseline and a sweep. + + Shows new devices, missing devices, changed characteristics, + and baseline health assessment. + """ + try: + from utils.tscm.advanced import calculate_baseline_diff + + baseline = get_tscm_baseline(baseline_id) + if not baseline: + return jsonify({'status': 'error', 'message': 'Baseline not found'}), 404 + + sweep = get_tscm_sweep(sweep_id) + if not sweep: + return jsonify({'status': 'error', 'message': 'Sweep not found'}), 404 + + # Get current devices from sweep results + results = sweep.get('results', {}) + if isinstance(results, str): + results = json.loads(results) + + current_wifi = results.get('wifi_devices', []) + current_wifi_clients = results.get('wifi_clients', []) + current_bt = results.get('bt_devices', []) + current_rf = results.get('rf_signals', []) + + diff = calculate_baseline_diff( + baseline=baseline, + current_wifi=current_wifi, + current_wifi_clients=current_wifi_clients, + current_bt=current_bt, + current_rf=current_rf, + sweep_id=sweep_id + ) + + return jsonify({ + 'status': 'success', + 'diff': diff.to_dict() + }) + + except Exception as e: + logger.error(f"Get baseline diff error: {e}") + return jsonify({'status': 'error', 'message': str(e)}), 500 + + +@tscm_bp.route('/baseline//health') +def get_baseline_health(baseline_id: int): + """Get health assessment for a baseline.""" + try: + from utils.tscm.advanced import BaselineHealth + + baseline = get_tscm_baseline(baseline_id) + if not baseline: + return jsonify({'status': 'error', 'message': 'Baseline not found'}), 404 + + # Calculate age + created_at = baseline.get('created_at') + age_hours = 0 + if created_at: + if isinstance(created_at, str): + created = datetime.fromisoformat(created_at.replace('Z', '+00:00')) + age_hours = (datetime.now() - created.replace(tzinfo=None)).total_seconds() / 3600 + elif isinstance(created_at, datetime): + age_hours = (datetime.now() - created_at).total_seconds() / 3600 + + # Count devices + total_devices = ( + len(baseline.get('wifi_networks', [])) + + len(baseline.get('bt_devices', [])) + + len(baseline.get('rf_frequencies', [])) + ) + + # Determine health + health = 'healthy' + score = 1.0 + reasons = [] + + if age_hours > 168: + health = 'stale' + score = 0.3 + reasons.append(f'Baseline is {age_hours:.0f} hours old (over 1 week)') + elif age_hours > 72: + health = 'noisy' + score = 0.6 + reasons.append(f'Baseline is {age_hours:.0f} hours old (over 3 days)') + + if total_devices < 3: + score -= 0.2 + reasons.append(f'Baseline has few devices ({total_devices})') + if health == 'healthy': + health = 'noisy' + + return jsonify({ + 'status': 'success', + 'health': { + 'status': health, + 'score': round(max(0, score), 2), + 'age_hours': round(age_hours, 1), + 'total_devices': total_devices, + 'reasons': reasons, + } + }) + + except Exception as e: + logger.error(f"Get baseline health error: {e}") + return jsonify({'status': 'error', 'message': str(e)}), 500 diff --git a/routes/tscm/cases.py b/routes/tscm/cases.py new file mode 100644 index 0000000..59c14af --- /dev/null +++ b/routes/tscm/cases.py @@ -0,0 +1,149 @@ +""" +TSCM Case Management Routes + +Handles /cases/* endpoints. +""" + +from __future__ import annotations + +import logging + +from flask import jsonify, request + +from routes.tscm import tscm_bp + +logger = logging.getLogger('intercept.tscm') + + +@tscm_bp.route('/cases', methods=['GET']) +def list_cases(): + """List all TSCM cases.""" + from utils.database import get_all_tscm_cases + + status = request.args.get('status') + limit = request.args.get('limit', 50, type=int) + + cases = get_all_tscm_cases(status=status, limit=limit) + + return jsonify({ + 'status': 'success', + 'count': len(cases), + 'cases': cases + }) + + +@tscm_bp.route('/cases', methods=['POST']) +def create_case(): + """Create a new TSCM case.""" + from utils.database import create_tscm_case + + data = request.get_json() or {} + + name = data.get('name') + if not name: + return jsonify({'status': 'error', 'message': 'name is required'}), 400 + + case_id = create_tscm_case( + name=name, + description=data.get('description'), + location=data.get('location'), + priority=data.get('priority', 'normal'), + created_by=data.get('created_by'), + metadata=data.get('metadata') + ) + + return jsonify({ + 'status': 'success', + 'message': 'Case created', + 'case_id': case_id + }) + + +@tscm_bp.route('/cases/', methods=['GET']) +def get_case(case_id: int): + """Get a TSCM case with all linked sweeps, threats, and notes.""" + from utils.database import get_tscm_case + + case = get_tscm_case(case_id) + if not case: + return jsonify({'status': 'error', 'message': 'Case not found'}), 404 + + return jsonify({ + 'status': 'success', + 'case': case + }) + + +@tscm_bp.route('/cases/', methods=['PUT']) +def update_case(case_id: int): + """Update a TSCM case.""" + from utils.database import update_tscm_case + + data = request.get_json() or {} + + success = update_tscm_case( + case_id=case_id, + status=data.get('status'), + priority=data.get('priority'), + assigned_to=data.get('assigned_to'), + notes=data.get('notes') + ) + + if not success: + return jsonify({'status': 'error', 'message': 'Case not found'}), 404 + + return jsonify({ + 'status': 'success', + 'message': 'Case updated' + }) + + +@tscm_bp.route('/cases//sweeps/', methods=['POST']) +def link_sweep_to_case(case_id: int, sweep_id: int): + """Link a sweep to a case.""" + from utils.database import add_sweep_to_case + + success = add_sweep_to_case(case_id, sweep_id) + + return jsonify({ + 'status': 'success' if success else 'error', + 'message': 'Sweep linked to case' if success else 'Already linked or not found' + }) + + +@tscm_bp.route('/cases//threats/', methods=['POST']) +def link_threat_to_case(case_id: int, threat_id: int): + """Link a threat to a case.""" + from utils.database import add_threat_to_case + + success = add_threat_to_case(case_id, threat_id) + + return jsonify({ + 'status': 'success' if success else 'error', + 'message': 'Threat linked to case' if success else 'Already linked or not found' + }) + + +@tscm_bp.route('/cases//notes', methods=['POST']) +def add_note_to_case(case_id: int): + """Add a note to a case.""" + from utils.database import add_case_note + + data = request.get_json() or {} + + content = data.get('content') + if not content: + return jsonify({'status': 'error', 'message': 'content is required'}), 400 + + note_id = add_case_note( + case_id=case_id, + content=content, + note_type=data.get('note_type', 'general'), + created_by=data.get('created_by') + ) + + return jsonify({ + 'status': 'success', + 'message': 'Note added', + 'note_id': note_id + }) diff --git a/routes/tscm/meeting.py b/routes/tscm/meeting.py new file mode 100644 index 0000000..7bccf8e --- /dev/null +++ b/routes/tscm/meeting.py @@ -0,0 +1,205 @@ +""" +TSCM Meeting Window Routes + +Handles /meeting/* endpoints for time correlation during sensitive periods. +""" + +from __future__ import annotations + +import logging +from datetime import datetime + +from flask import jsonify, request + +from routes.tscm import ( + _current_sweep_id, + _emit_event, + tscm_bp, +) +from utils.tscm.correlation import get_correlation_engine + +logger = logging.getLogger('intercept.tscm') + + +@tscm_bp.route('/meeting/start', methods=['POST']) +def start_meeting(): + """ + Mark the start of a sensitive period (meeting, briefing, etc.). + + Devices detected during this window will receive additional scoring + for meeting-correlated activity. + """ + correlation = get_correlation_engine() + correlation.start_meeting_window() + + _emit_event('meeting_started', { + 'timestamp': datetime.now().isoformat(), + 'message': 'Sensitive period monitoring active' + }) + + return jsonify({ + 'status': 'success', + 'message': 'Meeting window started - devices detected now will be flagged' + }) + + +@tscm_bp.route('/meeting/end', methods=['POST']) +def end_meeting(): + """Mark the end of a sensitive period.""" + correlation = get_correlation_engine() + correlation.end_meeting_window() + + _emit_event('meeting_ended', { + 'timestamp': datetime.now().isoformat() + }) + + return jsonify({ + 'status': 'success', + 'message': 'Meeting window ended' + }) + + +@tscm_bp.route('/meeting/status') +def meeting_status(): + """Check if currently in a meeting window.""" + correlation = get_correlation_engine() + in_meeting = correlation.is_during_meeting() + + return jsonify({ + 'status': 'success', + 'in_meeting': in_meeting, + 'windows': [ + { + 'start': start.isoformat(), + 'end': end.isoformat() if end else None + } + for start, end in correlation.meeting_windows + ] + }) + + +# ============================================================================= +# Meeting Window Enhanced Endpoints +# ============================================================================= + +@tscm_bp.route('/meeting/start-tracked', methods=['POST']) +def start_tracked_meeting(): + """ + Start a tracked meeting window with database persistence. + + Tracks devices first seen during meeting and behavior changes. + """ + from utils.database import start_meeting_window + from utils.tscm.advanced import get_timeline_manager + from routes.tscm import _current_sweep_id + + data = request.get_json() or {} + + meeting_id = start_meeting_window( + sweep_id=_current_sweep_id, + name=data.get('name'), + location=data.get('location'), + notes=data.get('notes') + ) + + # Start meeting in correlation engine + correlation = get_correlation_engine() + correlation.start_meeting_window() + + # Start in timeline manager + manager = get_timeline_manager() + manager.start_meeting_window() + + _emit_event('meeting_started', { + 'meeting_id': meeting_id, + 'timestamp': datetime.now().isoformat(), + 'name': data.get('name'), + }) + + return jsonify({ + 'status': 'success', + 'message': 'Tracked meeting window started', + 'meeting_id': meeting_id + }) + + +@tscm_bp.route('/meeting//end', methods=['POST']) +def end_tracked_meeting(meeting_id: int): + """End a tracked meeting window.""" + from utils.database import end_meeting_window + from utils.tscm.advanced import get_timeline_manager + + success = end_meeting_window(meeting_id) + if not success: + return jsonify({'status': 'error', 'message': 'Meeting not found or already ended'}), 404 + + # End in correlation engine + correlation = get_correlation_engine() + correlation.end_meeting_window() + + # End in timeline manager + manager = get_timeline_manager() + manager.end_meeting_window() + + _emit_event('meeting_ended', { + 'meeting_id': meeting_id, + 'timestamp': datetime.now().isoformat() + }) + + return jsonify({ + 'status': 'success', + 'message': 'Meeting window ended' + }) + + +@tscm_bp.route('/meeting//summary') +def get_meeting_summary_endpoint(meeting_id: int): + """Get detailed summary of device activity during a meeting.""" + try: + from utils.database import get_meeting_windows + from utils.tscm.advanced import generate_meeting_summary, get_timeline_manager + from routes.tscm import _current_sweep_id + + # Get meeting window + windows = get_meeting_windows(_current_sweep_id or 0) + meeting = None + for w in windows: + if w.get('id') == meeting_id: + meeting = w + break + + if not meeting: + return jsonify({'status': 'error', 'message': 'Meeting not found'}), 404 + + # Get timelines and profiles + manager = get_timeline_manager() + timelines = manager.get_all_timelines() + + correlation = get_correlation_engine() + profiles = [p.to_dict() for p in correlation.device_profiles.values()] + + summary = generate_meeting_summary(meeting, timelines, profiles) + + return jsonify({ + 'status': 'success', + 'summary': summary.to_dict() + }) + + except Exception as e: + logger.error(f"Get meeting summary error: {e}") + return jsonify({'status': 'error', 'message': str(e)}), 500 + + +@tscm_bp.route('/meeting/active') +def get_active_meeting(): + """Get currently active meeting window.""" + from utils.database import get_active_meeting_window + from routes.tscm import _current_sweep_id + + meeting = get_active_meeting_window(_current_sweep_id) + + return jsonify({ + 'status': 'success', + 'meeting': meeting, + 'is_active': meeting is not None + }) diff --git a/routes/tscm/schedules.py b/routes/tscm/schedules.py new file mode 100644 index 0000000..c29267a --- /dev/null +++ b/routes/tscm/schedules.py @@ -0,0 +1,186 @@ +""" +TSCM Schedule Routes + +Handles /schedules/* endpoints for automated sweep scheduling. +""" + +from __future__ import annotations + +import logging +from datetime import datetime, timezone +from typing import Any + +from flask import jsonify, request + +from routes.tscm import ( + _get_schedule_timezone, + _next_run_from_cron, + _start_sweep_internal, + _sweep_running, + tscm_bp, +) +from utils.database import ( + create_tscm_schedule, + delete_tscm_schedule, + get_all_tscm_schedules, + get_tscm_schedule, + update_tscm_schedule, +) + +logger = logging.getLogger('intercept.tscm') + + +@tscm_bp.route('/schedules', methods=['GET']) +def list_schedules(): + """List all TSCM sweep schedules.""" + enabled_param = request.args.get('enabled') + enabled = None + if enabled_param is not None: + enabled = enabled_param.lower() in ('1', 'true', 'yes') + + schedules = get_all_tscm_schedules(enabled=enabled, limit=200) + return jsonify({ + 'status': 'success', + 'count': len(schedules), + 'schedules': schedules, + }) + + +@tscm_bp.route('/schedules', methods=['POST']) +def create_schedule(): + """Create a new sweep schedule.""" + data = request.get_json() or {} + name = (data.get('name') or '').strip() + cron_expression = (data.get('cron_expression') or '').strip() + sweep_type = data.get('sweep_type', 'standard') + baseline_id = data.get('baseline_id') + zone_name = data.get('zone_name') + enabled = bool(data.get('enabled', True)) + notify_on_threat = bool(data.get('notify_on_threat', True)) + notify_email = data.get('notify_email') + + if not name: + return jsonify({'status': 'error', 'message': 'Schedule name required'}), 400 + if not cron_expression: + return jsonify({'status': 'error', 'message': 'cron_expression required'}), 400 + + next_run = None + if enabled: + try: + tz = _get_schedule_timezone(zone_name) + next_local = _next_run_from_cron(cron_expression, datetime.now(tz)) + next_run = next_local.astimezone(timezone.utc).isoformat() if next_local else None + except Exception as e: + return jsonify({'status': 'error', 'message': f'Invalid cron: {e}'}), 400 + + schedule_id = create_tscm_schedule( + name=name, + cron_expression=cron_expression, + sweep_type=sweep_type, + baseline_id=baseline_id, + zone_name=zone_name, + enabled=enabled, + notify_on_threat=notify_on_threat, + notify_email=notify_email, + next_run=next_run, + ) + schedule = get_tscm_schedule(schedule_id) + return jsonify({ + 'status': 'success', + 'message': 'Schedule created', + 'schedule': schedule + }) + + +@tscm_bp.route('/schedules/', methods=['PUT', 'PATCH']) +def update_schedule(schedule_id: int): + """Update a sweep schedule.""" + schedule = get_tscm_schedule(schedule_id) + if not schedule: + return jsonify({'status': 'error', 'message': 'Schedule not found'}), 404 + + data = request.get_json() or {} + updates: dict[str, Any] = {} + + for key in ('name', 'cron_expression', 'sweep_type', 'baseline_id', 'zone_name', 'notify_email'): + if key in data: + updates[key] = data[key] + + if 'baseline_id' in updates and updates['baseline_id'] in ('', None): + updates['baseline_id'] = None + + if 'enabled' in data: + updates['enabled'] = 1 if data['enabled'] else 0 + if 'notify_on_threat' in data: + updates['notify_on_threat'] = 1 if data['notify_on_threat'] else 0 + + # Recalculate next_run when cron/zone/enabled changes + if any(k in updates for k in ('cron_expression', 'zone_name', 'enabled')): + if updates.get('enabled', schedule.get('enabled', 1)): + cron_expr = updates.get('cron_expression', schedule.get('cron_expression', '')) + zone_name = updates.get('zone_name', schedule.get('zone_name')) + try: + tz = _get_schedule_timezone(zone_name) + next_local = _next_run_from_cron(cron_expr, datetime.now(tz)) + updates['next_run'] = next_local.astimezone(timezone.utc).isoformat() if next_local else None + except Exception as e: + return jsonify({'status': 'error', 'message': f'Invalid cron: {e}'}), 400 + else: + updates['next_run'] = None + + if not updates: + return jsonify({'status': 'error', 'message': 'No updates provided'}), 400 + + update_tscm_schedule(schedule_id, **updates) + schedule = get_tscm_schedule(schedule_id) + return jsonify({'status': 'success', 'schedule': schedule}) + + +@tscm_bp.route('/schedules/', methods=['DELETE']) +def delete_schedule(schedule_id: int): + """Delete a sweep schedule.""" + success = delete_tscm_schedule(schedule_id) + if not success: + return jsonify({'status': 'error', 'message': 'Schedule not found'}), 404 + return jsonify({'status': 'success', 'message': 'Schedule deleted'}) + + +@tscm_bp.route('/schedules//run', methods=['POST']) +def run_schedule_now(schedule_id: int): + """Trigger a scheduled sweep immediately.""" + schedule = get_tscm_schedule(schedule_id) + if not schedule: + return jsonify({'status': 'error', 'message': 'Schedule not found'}), 404 + + result = _start_sweep_internal( + sweep_type=schedule.get('sweep_type') or 'standard', + baseline_id=schedule.get('baseline_id'), + wifi_enabled=True, + bt_enabled=True, + rf_enabled=True, + wifi_interface='', + bt_interface='', + sdr_device=None, + verbose_results=False, + ) + + if result.get('status') != 'success': + status_code = result.pop('http_status', 400) + return jsonify(result), status_code + + # Update schedule run timestamps + cron_expr = schedule.get('cron_expression') or '' + tz = _get_schedule_timezone(schedule.get('zone_name')) + now_utc = datetime.now(timezone.utc) + try: + next_local = _next_run_from_cron(cron_expr, datetime.now(tz)) + except Exception: + next_local = None + + update_tscm_schedule( + schedule_id, + last_run=now_utc.isoformat(), + next_run=next_local.astimezone(timezone.utc).isoformat() if next_local else None, + ) + + return jsonify(result) diff --git a/routes/tscm/sweep.py b/routes/tscm/sweep.py new file mode 100644 index 0000000..88a8c91 --- /dev/null +++ b/routes/tscm/sweep.py @@ -0,0 +1,434 @@ +""" +TSCM Sweep Routes + +Handles /sweep/*, /status, /devices, /presets/*, /feed/*, +/capabilities, and /sweep//capabilities endpoints. +""" + +from __future__ import annotations + +import json +import logging +import os +import platform +import re +import shutil +import subprocess +from typing import Any + +from flask import Response, jsonify, request + +from routes.tscm import ( + _current_sweep_id, + _emit_event, + _start_sweep_internal, + _sweep_running, + tscm_bp, + tscm_queue, + _baseline_recorder, +) +from data.tscm_frequencies import get_all_sweep_presets, get_sweep_preset +from utils.database import get_tscm_sweep, update_tscm_sweep +from utils.event_pipeline import process_event +from utils.sse import sse_stream_fanout + +logger = logging.getLogger('intercept.tscm') + + +@tscm_bp.route('/status') +def tscm_status(): + """Check if any TSCM operation is currently running.""" + from routes.tscm import _sweep_running + return jsonify({'running': _sweep_running}) + + +@tscm_bp.route('/sweep/start', methods=['POST']) +def start_sweep(): + """Start a TSCM sweep.""" + data = request.get_json() or {} + sweep_type = data.get('sweep_type', 'standard') + baseline_id = data.get('baseline_id') + if baseline_id in ('', None): + baseline_id = None + wifi_enabled = data.get('wifi', True) + bt_enabled = data.get('bluetooth', True) + rf_enabled = data.get('rf', True) + verbose_results = bool(data.get('verbose_results', False)) + + # Get interface selections + wifi_interface = data.get('wifi_interface', '') + bt_interface = data.get('bt_interface', '') + sdr_device = data.get('sdr_device') + + result = _start_sweep_internal( + sweep_type=sweep_type, + baseline_id=baseline_id, + wifi_enabled=wifi_enabled, + bt_enabled=bt_enabled, + rf_enabled=rf_enabled, + wifi_interface=wifi_interface, + bt_interface=bt_interface, + sdr_device=sdr_device, + verbose_results=verbose_results, + ) + http_status = result.pop('http_status', 200) + return jsonify(result), http_status + + +@tscm_bp.route('/sweep/stop', methods=['POST']) +def stop_sweep(): + """Stop the current TSCM sweep.""" + import routes.tscm as _tscm_pkg + + if not _tscm_pkg._sweep_running: + return jsonify({'status': 'error', 'message': 'No sweep running'}) + + _tscm_pkg._sweep_running = False + + if _tscm_pkg._current_sweep_id: + update_tscm_sweep(_tscm_pkg._current_sweep_id, status='aborted', completed=True) + + _emit_event('sweep_stopped', {'reason': 'user_requested'}) + + logger.info("TSCM sweep stopped by user") + + return jsonify({'status': 'success', 'message': 'Sweep stopped'}) + + +@tscm_bp.route('/sweep/status') +def sweep_status(): + """Get current sweep status.""" + from routes.tscm import _sweep_running, _current_sweep_id + + status = { + 'running': _sweep_running, + 'sweep_id': _current_sweep_id, + } + + if _current_sweep_id: + sweep = get_tscm_sweep(_current_sweep_id) + if sweep: + status['sweep'] = sweep + + return jsonify(status) + + +@tscm_bp.route('/sweep/stream') +def sweep_stream(): + """SSE stream for real-time sweep updates.""" + from routes.tscm import tscm_queue + + def _on_msg(msg: dict[str, Any]) -> None: + process_event('tscm', msg, msg.get('type')) + + return Response( + sse_stream_fanout( + source_queue=tscm_queue, + channel_key='tscm', + timeout=1.0, + keepalive_interval=30.0, + on_message=_on_msg, + ), + mimetype='text/event-stream', + headers={ + 'Cache-Control': 'no-cache', + 'Connection': 'keep-alive', + 'X-Accel-Buffering': 'no' + } + ) + + +@tscm_bp.route('/devices') +def get_tscm_devices(): + """Get available scanning devices for TSCM sweeps.""" + devices = { + 'wifi_interfaces': [], + 'bt_adapters': [], + 'sdr_devices': [] + } + + # Detect WiFi interfaces + if platform.system() == 'Darwin': # macOS + try: + result = subprocess.run( + ['networksetup', '-listallhardwareports'], + capture_output=True, text=True, timeout=5 + ) + lines = result.stdout.split('\n') + for i, line in enumerate(lines): + if 'Wi-Fi' in line or 'AirPort' in line: + # Get the hardware port name (e.g., "Wi-Fi") + port_name = line.replace('Hardware Port:', '').strip() + for j in range(i + 1, min(i + 3, len(lines))): + if 'Device:' in lines[j]: + device = lines[j].split('Device:')[1].strip() + devices['wifi_interfaces'].append({ + 'name': device, + 'display_name': f'{port_name} ({device})', + 'type': 'internal', + 'monitor_capable': False + }) + break + except (FileNotFoundError, subprocess.TimeoutExpired, subprocess.SubprocessError): + pass + else: # Linux + try: + result = subprocess.run( + ['iw', 'dev'], + capture_output=True, text=True, timeout=5 + ) + current_iface = None + for line in result.stdout.split('\n'): + line = line.strip() + if line.startswith('Interface'): + current_iface = line.split()[1] + elif current_iface and 'type' in line: + iface_type = line.split()[-1] + devices['wifi_interfaces'].append({ + 'name': current_iface, + 'display_name': f'Wireless ({current_iface}) - {iface_type}', + 'type': iface_type, + 'monitor_capable': True + }) + current_iface = None + except (FileNotFoundError, subprocess.TimeoutExpired, subprocess.SubprocessError): + # Fall back to iwconfig + try: + result = subprocess.run( + ['iwconfig'], + capture_output=True, text=True, timeout=5 + ) + for line in result.stdout.split('\n'): + if 'IEEE 802.11' in line: + iface = line.split()[0] + devices['wifi_interfaces'].append({ + 'name': iface, + 'display_name': f'Wireless ({iface})', + 'type': 'managed', + 'monitor_capable': True + }) + except (FileNotFoundError, subprocess.TimeoutExpired, subprocess.SubprocessError): + pass + + # Detect Bluetooth adapters + if platform.system() == 'Linux': + try: + result = subprocess.run( + ['hciconfig'], + capture_output=True, text=True, timeout=5 + ) + blocks = re.split(r'(?=^hci\d+:)', result.stdout, flags=re.MULTILINE) + for idx, block in enumerate(blocks): + if block.strip(): + first_line = block.split('\n')[0] + match = re.match(r'(hci\d+):', first_line) + if match: + iface_name = match.group(1) + is_up = 'UP RUNNING' in block or '\tUP ' in block + devices['bt_adapters'].append({ + 'name': iface_name, + 'display_name': f'Bluetooth Adapter ({iface_name})', + 'type': 'hci', + 'status': 'up' if is_up else 'down' + }) + except (FileNotFoundError, subprocess.TimeoutExpired, subprocess.SubprocessError): + # Try bluetoothctl as fallback + try: + result = subprocess.run( + ['bluetoothctl', 'list'], + capture_output=True, text=True, timeout=5 + ) + for line in result.stdout.split('\n'): + if 'Controller' in line: + # Format: Controller XX:XX:XX:XX:XX:XX Name + parts = line.split() + if len(parts) >= 3: + addr = parts[1] + name = ' '.join(parts[2:]) if len(parts) > 2 else 'Bluetooth' + devices['bt_adapters'].append({ + 'name': addr, + 'display_name': f'{name} ({addr[-8:]})', + 'type': 'controller', + 'status': 'available' + }) + except (FileNotFoundError, subprocess.TimeoutExpired, subprocess.SubprocessError): + pass + elif platform.system() == 'Darwin': + # macOS has built-in Bluetooth - get more info via system_profiler + try: + result = subprocess.run( + ['system_profiler', 'SPBluetoothDataType'], + capture_output=True, text=True, timeout=10 + ) + # Extract controller info + bt_name = 'Built-in Bluetooth' + bt_addr = '' + for line in result.stdout.split('\n'): + if 'Address:' in line: + bt_addr = line.split('Address:')[1].strip() + break + devices['bt_adapters'].append({ + 'name': 'default', + 'display_name': f'{bt_name}' + (f' ({bt_addr[-8:]})' if bt_addr else ''), + 'type': 'macos', + 'status': 'available' + }) + except (FileNotFoundError, subprocess.TimeoutExpired, subprocess.SubprocessError): + devices['bt_adapters'].append({ + 'name': 'default', + 'display_name': 'Built-in Bluetooth', + 'type': 'macos', + 'status': 'available' + }) + + # Detect SDR devices + try: + from utils.sdr import SDRFactory + sdr_list = SDRFactory.detect_devices() + for sdr in sdr_list: + # SDRDevice is a dataclass with attributes, not a dict + sdr_type_name = sdr.sdr_type.value if hasattr(sdr.sdr_type, 'value') else str(sdr.sdr_type) + # Create a friendly display name + display_name = sdr.name + if sdr.serial and sdr.serial not in ('N/A', 'Unknown'): + display_name = f'{sdr.name} (SN: {sdr.serial[-8:]})' + devices['sdr_devices'].append({ + 'index': sdr.index, + 'name': sdr.name, + 'display_name': display_name, + 'type': sdr_type_name, + 'serial': sdr.serial, + 'driver': sdr.driver + }) + except ImportError: + logger.debug("SDR module not available") + except Exception as e: + logger.warning(f"Error detecting SDR devices: {e}") + + # Check if running as root + from flask import current_app + running_as_root = current_app.config.get('RUNNING_AS_ROOT', os.geteuid() == 0) + + warnings = [] + if not running_as_root: + warnings.append({ + 'type': 'privileges', + 'message': 'Not running as root. WiFi monitor mode and some Bluetooth features require sudo.', + 'action': 'Run with: sudo -E venv/bin/python intercept.py' + }) + + return jsonify({ + 'status': 'success', + 'devices': devices, + 'running_as_root': running_as_root, + 'warnings': warnings + }) + + +# ============================================================================= +# Preset Endpoints +# ============================================================================= + +@tscm_bp.route('/presets') +def list_presets(): + """List available sweep presets.""" + presets = get_all_sweep_presets() + return jsonify({'status': 'success', 'presets': presets}) + + +@tscm_bp.route('/presets/') +def get_preset(preset_name: str): + """Get details for a specific preset.""" + preset = get_sweep_preset(preset_name) + if not preset: + return jsonify({'status': 'error', 'message': 'Preset not found'}), 404 + + return jsonify({'status': 'success', 'preset': preset}) + + +# ============================================================================= +# Data Feed Endpoints (for adding data during sweeps/baselines) +# ============================================================================= + +@tscm_bp.route('/feed/wifi', methods=['POST']) +def feed_wifi(): + """Feed WiFi device data for baseline recording.""" + from routes.tscm import _baseline_recorder + + data = request.get_json() + if data: + if data.get('is_client'): + _baseline_recorder.add_wifi_client(data) + else: + _baseline_recorder.add_wifi_device(data) + return jsonify({'status': 'success'}) + + +@tscm_bp.route('/feed/bluetooth', methods=['POST']) +def feed_bluetooth(): + """Feed Bluetooth device data for baseline recording.""" + from routes.tscm import _baseline_recorder + + data = request.get_json() + if data: + _baseline_recorder.add_bt_device(data) + return jsonify({'status': 'success'}) + + +@tscm_bp.route('/feed/rf', methods=['POST']) +def feed_rf(): + """Feed RF signal data for baseline recording.""" + from routes.tscm import _baseline_recorder + + data = request.get_json() + if data: + _baseline_recorder.add_rf_signal(data) + return jsonify({'status': 'success'}) + + +# ============================================================================= +# Capabilities & Coverage Endpoints +# ============================================================================= + +@tscm_bp.route('/capabilities') +def get_capabilities(): + """ + Get current system capabilities for TSCM sweeping. + + Returns what the system CAN and CANNOT detect based on OS, + privileges, adapters, and SDR hardware. + """ + try: + from utils.tscm.advanced import detect_sweep_capabilities + + wifi_interface = request.args.get('wifi_interface', '') + bt_adapter = request.args.get('bt_adapter', '') + + caps = detect_sweep_capabilities( + wifi_interface=wifi_interface, + bt_adapter=bt_adapter + ) + + return jsonify({ + 'status': 'success', + 'capabilities': caps.to_dict() + }) + + except Exception as e: + logger.error(f"Get capabilities error: {e}") + return jsonify({'status': 'error', 'message': str(e)}), 500 + + +@tscm_bp.route('/sweep//capabilities') +def get_sweep_stored_capabilities(sweep_id: int): + """Get stored capabilities for a specific sweep.""" + from utils.database import get_sweep_capabilities + + caps = get_sweep_capabilities(sweep_id) + if not caps: + return jsonify({'status': 'error', 'message': 'No capabilities stored for this sweep'}), 404 + + return jsonify({ + 'status': 'success', + 'capabilities': caps + }) diff --git a/routes/updater.py b/routes/updater.py index 2583905..285471a 100644 --- a/routes/updater.py +++ b/routes/updater.py @@ -4,6 +4,7 @@ from __future__ import annotations from flask import Blueprint, Response, jsonify, request +from utils.responses import api_success, api_error from utils.logging import get_logger from utils.updater import ( check_for_updates, @@ -39,10 +40,7 @@ def check_updates() -> Response: return jsonify(result) except Exception as e: logger.error(f"Error checking for updates: {e}") - return jsonify({ - 'success': False, - 'error': str(e) - }), 500 + return api_error(str(e), 500) @updater_bp.route('/status', methods=['GET']) @@ -61,10 +59,7 @@ def update_status() -> Response: return jsonify(result) except Exception as e: logger.error(f"Error getting update status: {e}") - return jsonify({ - 'success': False, - 'error': str(e) - }), 500 + return api_error(str(e), 500) @updater_bp.route('/update', methods=['POST']) @@ -100,10 +95,7 @@ def do_update() -> Response: except Exception as e: logger.error(f"Error performing update: {e}") - return jsonify({ - 'success': False, - 'error': str(e) - }), 500 + return api_error(str(e), 500) @updater_bp.route('/dismiss', methods=['POST']) @@ -124,20 +116,14 @@ def dismiss_notification() -> Response: version = data.get('version') if not version: - return jsonify({ - 'success': False, - 'error': 'Version is required' - }), 400 + return api_error('Version is required', 400) try: result = dismiss_update(version) return jsonify(result) except Exception as e: logger.error(f"Error dismissing update: {e}") - return jsonify({ - 'success': False, - 'error': str(e) - }), 500 + return api_error(str(e), 500) @updater_bp.route('/restart', methods=['POST']) diff --git a/routes/vdl2.py b/routes/vdl2.py index 853ebbe..d6dd0f5 100644 --- a/routes/vdl2.py +++ b/routes/vdl2.py @@ -18,6 +18,7 @@ from typing import Any, Generator from flask import Blueprint, Response, jsonify, request import app as app_module +from utils.responses import api_success, api_error from utils.acars_translator import translate_message from utils.constants import ( PROCESS_START_WAIT, @@ -181,18 +182,12 @@ def start_vdl2() -> Response: with app_module.vdl2_lock: if app_module.vdl2_process and app_module.vdl2_process.poll() is None: - return jsonify({ - 'status': 'error', - 'message': 'VDL2 decoder already running' - }), 409 + return api_error('VDL2 decoder already running', 409) # Check for dumpvdl2 dumpvdl2_path = find_dumpvdl2() if not dumpvdl2_path: - return jsonify({ - 'status': 'error', - 'message': 'dumpvdl2 not found. Install from: https://github.com/szpajder/dumpvdl2' - }), 400 + return api_error('dumpvdl2 not found. Install from: https://github.com/szpajder/dumpvdl2', 400) data = request.json or {} @@ -202,7 +197,7 @@ def start_vdl2() -> Response: gain = validate_gain(data.get('gain', '40')) ppm = validate_ppm(data.get('ppm', '0')) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) # Resolve SDR type for device selection sdr_type_str = data.get('sdr_type', 'rtlsdr') @@ -215,11 +210,7 @@ def start_vdl2() -> Response: device_int = int(device) error = app_module.claim_sdr_device(device_int, 'vdl2', sdr_type_str) if error: - return jsonify({ - 'status': 'error', - 'error_type': 'DEVICE_BUSY', - 'message': error - }), 409 + return api_error(error, 409, error_type='DEVICE_BUSY') vdl2_active_device = device_int vdl2_active_sdr_type = sdr_type_str @@ -312,7 +303,7 @@ def start_vdl2() -> Response: if stderr: error_msg += f': {stderr[:500]}' logger.error(error_msg) - return jsonify({'status': 'error', 'message': error_msg}), 500 + return api_error(error_msg, 500) app_module.vdl2_process = process register_process(process) @@ -339,7 +330,7 @@ def start_vdl2() -> Response: vdl2_active_device = None vdl2_active_sdr_type = None logger.error(f"Failed to start VDL2 decoder: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 + return api_error(str(e), 500) @vdl2_bp.route('/stop', methods=['POST']) @@ -349,10 +340,7 @@ def stop_vdl2() -> Response: with app_module.vdl2_lock: if not app_module.vdl2_process: - return jsonify({ - 'status': 'error', - 'message': 'VDL2 decoder not running' - }), 400 + return api_error('VDL2 decoder not running', 400) try: app_module.vdl2_process.terminate() diff --git a/routes/weather_sat.py b/routes/weather_sat.py index 74e801c..b3085b4 100644 --- a/routes/weather_sat.py +++ b/routes/weather_sat.py @@ -10,6 +10,7 @@ import queue from flask import Blueprint, jsonify, request, Response, send_file +from utils.responses import api_success, api_error from utils.logging import get_logger from utils.sse import sse_stream from utils.validation import validate_device_index, validate_gain, validate_latitude, validate_longitude, validate_elevation, validate_rtl_tcp_host, validate_rtl_tcp_port @@ -174,7 +175,7 @@ def start_capture(): rtl_tcp_host = validate_rtl_tcp_host(rtl_tcp_host) rtl_tcp_port = validate_rtl_tcp_port(rtl_tcp_port) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) # Claim SDR device (skip for remote rtl_tcp) if not rtl_tcp_host: @@ -182,11 +183,7 @@ def start_capture(): import app as app_module error = app_module.claim_sdr_device(device_index, 'weather_sat', sdr_type_str) if error: - return jsonify({ - 'status': 'error', - 'error_type': 'DEVICE_BUSY', - 'message': error, - }), 409 + return api_error(error, 409, error_type='DEVICE_BUSY') except ImportError: pass @@ -417,15 +414,15 @@ def get_image(filename: str): # Security: only allow safe filenames if not filename.replace('_', '').replace('-', '').replace('.', '').isalnum(): - return jsonify({'status': 'error', 'message': 'Invalid filename'}), 400 + return api_error('Invalid filename', 400) if not (filename.endswith('.png') or filename.endswith('.jpg') or filename.endswith('.jpeg')): - return jsonify({'status': 'error', 'message': 'Only PNG/JPG files supported'}), 400 + return api_error('Only PNG/JPG files supported', 400) image_path = decoder._output_dir / filename if not image_path.exists(): - return jsonify({'status': 'error', 'message': 'Image not found'}), 404 + return api_error('Image not found', 404) mimetype = 'image/png' if filename.endswith('.png') else 'image/jpeg' return send_file(image_path, mimetype=mimetype) @@ -444,12 +441,12 @@ def delete_image(filename: str): decoder = get_weather_sat_decoder() if not filename.replace('_', '').replace('-', '').replace('.', '').isalnum(): - return jsonify({'status': 'error', 'message': 'Invalid filename'}), 400 + return api_error('Invalid filename', 400) if decoder.delete_image(filename): return jsonify({'status': 'deleted', 'filename': filename}) else: - return jsonify({'status': 'error', 'message': 'Image not found'}), 404 + return api_error('Image not found', 404) @weather_sat_bp.route('/images', methods=['DELETE']) @@ -500,17 +497,14 @@ def get_passes(): raw_lon = request.args.get('longitude') if raw_lat is None or raw_lon is None: - return jsonify({ - 'status': 'error', - 'message': 'latitude and longitude parameters required' - }), 400 + return api_error('latitude and longitude parameters required', 400) try: lat = validate_latitude(raw_lat) lon = validate_longitude(raw_lon) except ValueError as e: logger.warning('Invalid coordinates in get_passes: %s', e) - return jsonify({'status': 'error', 'message': 'Invalid coordinates'}), 400 + return api_error('Invalid coordinates', 400) hours = max(1, min(request.args.get('hours', 24, type=int), 72)) min_elevation = max(0, min(request.args.get('min_elevation', 15, type=float), 90)) @@ -668,10 +662,10 @@ def skip_pass(pass_id: str): from utils.weather_sat_scheduler import get_weather_sat_scheduler if not pass_id.replace('_', '').replace('-', '').isalnum(): - return jsonify({'status': 'error', 'message': 'Invalid pass ID'}), 400 + return api_error('Invalid pass ID', 400) scheduler = get_weather_sat_scheduler() if scheduler.skip_pass(pass_id): return jsonify({'status': 'skipped', 'pass_id': pass_id}) else: - return jsonify({'status': 'error', 'message': 'Pass not found or already processed'}), 404 + return api_error('Pass not found or already processed', 404) diff --git a/routes/websdr.py b/routes/websdr.py index a93528c..a7e762b 100644 --- a/routes/websdr.py +++ b/routes/websdr.py @@ -13,6 +13,8 @@ from typing import Optional from flask import Blueprint, Flask, jsonify, request, Response +from utils.responses import api_success, api_error + try: from flask_sock import Sock WEBSOCKET_AVAILABLE = True @@ -226,8 +228,7 @@ def list_receivers() -> Response: if r.get('freq_lo', 0) <= freq_khz <= r.get('freq_hi', 30000) ] - return jsonify({ - 'status': 'success', + return api_success(data={ 'receivers': filtered[:100], 'total': len(filtered), 'cached_total': len(receivers), @@ -242,7 +243,7 @@ def nearest_receivers() -> Response: freq_khz = request.args.get('freq_khz', type=float) if lat is None or lon is None: - return jsonify({'status': 'error', 'message': 'lat and lon are required'}), 400 + return api_error('lat and lon are required', 400) receivers = get_receivers() @@ -264,10 +265,7 @@ def nearest_receivers() -> Response: with_distance.sort(key=lambda x: x['distance_km']) - return jsonify({ - 'status': 'success', - 'receivers': with_distance[:10], - }) + return api_success(data={'receivers': with_distance[:10]}) @websdr_bp.route('/spy-station//receivers') @@ -276,7 +274,7 @@ def spy_station_receivers(station_id: str) -> Response: try: from routes.spy_stations import STATIONS except ImportError: - return jsonify({'status': 'error', 'message': 'Spy stations module not available'}), 503 + return api_error('Spy stations module not available', 503) # Find the station station = None @@ -286,7 +284,7 @@ def spy_station_receivers(station_id: str) -> Response: break if not station: - return jsonify({'status': 'error', 'message': 'Station not found'}), 404 + return api_error('Station not found', 404) # Get primary frequency freq_khz = None @@ -298,7 +296,7 @@ def spy_station_receivers(station_id: str) -> Response: freq_khz = station['frequencies'][0].get('freq_khz') if freq_khz is None: - return jsonify({'status': 'error', 'message': 'No frequency found for station'}), 404 + return api_error('No frequency found for station', 404) receivers = get_receivers() @@ -308,8 +306,7 @@ def spy_station_receivers(station_id: str) -> Response: if r.get('freq_lo', 0) <= freq_khz <= r.get('freq_hi', 30000) and r.get('available', True) ] - return jsonify({ - 'status': 'success', + return api_success(data={ 'station': { 'id': station['id'], 'name': station.get('name', ''), diff --git a/routes/wefax.py b/routes/wefax.py index 401672a..f534e85 100644 --- a/routes/wefax.py +++ b/routes/wefax.py @@ -10,6 +10,7 @@ import queue from flask import Blueprint, Response, jsonify, request, send_file +from utils.responses import api_success, api_error import app as app_module from utils.logging import get_logger from utils.sdr import SDRType @@ -109,10 +110,7 @@ def start_decoder(): # Validate frequency (required) frequency_khz = data.get('frequency_khz') if frequency_khz is None: - return jsonify({ - 'status': 'error', - 'message': 'frequency_khz is required', - }), 400 + return api_error('frequency_khz is required', 400) try: frequency_khz = float(frequency_khz) @@ -120,10 +118,7 @@ def start_decoder(): freq_mhz = frequency_khz / 1000.0 validate_frequency(freq_mhz, min_mhz=2.0, max_mhz=30.0) except (TypeError, ValueError) as e: - return jsonify({ - 'status': 'error', - 'message': f'Invalid frequency: {e}', - }), 400 + return api_error(f'Invalid frequency: {e}', 400) station = str(data.get('station', '')).strip() device_index = data.get('device', 0) @@ -152,34 +147,21 @@ def start_decoder(): tuned_mhz = tuned_frequency_khz / 1000.0 validate_frequency(tuned_mhz, min_mhz=2.0, max_mhz=30.0) except ValueError as e: - return jsonify({ - 'status': 'error', - 'message': f'Invalid frequency settings: {e}', - }), 400 + return api_error(f'Invalid frequency settings: {e}', 400) # Validate IOC and LPM if ioc not in (288, 576): - return jsonify({ - 'status': 'error', - 'message': 'IOC must be 288 or 576', - }), 400 + return api_error('IOC must be 288 or 576', 400) if lpm not in (60, 120): - return jsonify({ - 'status': 'error', - 'message': 'LPM must be 60 or 120', - }), 400 + return api_error('LPM must be 60 or 120', 400) # Claim SDR device global wefax_active_device, wefax_active_sdr_type device_int = int(device_index) error = app_module.claim_sdr_device(device_int, 'wefax', sdr_type_str) if error: - return jsonify({ - 'status': 'error', - 'error_type': 'DEVICE_BUSY', - 'message': error, - }), 409 + return api_error(error, 409, error_type='DEVICE_BUSY') # Set callback and start decoder.set_callback(_progress_callback) @@ -213,10 +195,7 @@ def start_decoder(): }) else: app_module.release_sdr_device(device_int, sdr_type_str) - return jsonify({ - 'status': 'error', - 'message': 'Failed to start decoder', - }), 500 + return api_error('Failed to start decoder', 500) @wefax_bp.route('/stop', methods=['POST']) @@ -275,14 +254,14 @@ def get_image(filename: str): decoder = get_wefax_decoder() if not filename.replace('_', '').replace('-', '').replace('.', '').isalnum(): - return jsonify({'status': 'error', 'message': 'Invalid filename'}), 400 + return api_error('Invalid filename', 400) if not filename.endswith('.png'): - return jsonify({'status': 'error', 'message': 'Only PNG files supported'}), 400 + return api_error('Only PNG files supported', 400) image_path = decoder._output_dir / filename if not image_path.exists(): - return jsonify({'status': 'error', 'message': 'Image not found'}), 404 + return api_error('Image not found', 404) return send_file(image_path, mimetype='image/png') @@ -293,15 +272,15 @@ def delete_image(filename: str): decoder = get_wefax_decoder() if not filename.replace('_', '').replace('-', '').replace('.', '').isalnum(): - return jsonify({'status': 'error', 'message': 'Invalid filename'}), 400 + return api_error('Invalid filename', 400) if not filename.endswith('.png'): - return jsonify({'status': 'error', 'message': 'Only PNG files supported'}), 400 + return api_error('Only PNG files supported', 400) if decoder.delete_image(filename): return jsonify({'status': 'ok'}) else: - return jsonify({'status': 'error', 'message': 'Image not found'}), 404 + return api_error('Image not found', 404) @wefax_bp.route('/images', methods=['DELETE']) @@ -354,27 +333,18 @@ def enable_schedule(): station = str(data.get('station', '')).strip() if not station: - return jsonify({ - 'status': 'error', - 'message': 'station is required', - }), 400 + return api_error('station is required', 400) frequency_khz = data.get('frequency_khz') if frequency_khz is None: - return jsonify({ - 'status': 'error', - 'message': 'frequency_khz is required', - }), 400 + return api_error('frequency_khz is required', 400) try: frequency_khz = float(frequency_khz) freq_mhz = frequency_khz / 1000.0 validate_frequency(freq_mhz, min_mhz=2.0, max_mhz=30.0) except (TypeError, ValueError) as e: - return jsonify({ - 'status': 'error', - 'message': f'Invalid frequency: {e}', - }), 400 + return api_error(f'Invalid frequency: {e}', 400) device = int(data.get('device', 0)) gain = float(data.get('gain', 40.0)) @@ -396,10 +366,7 @@ def enable_schedule(): tuned_mhz = tuned_frequency_khz / 1000.0 validate_frequency(tuned_mhz, min_mhz=2.0, max_mhz=30.0) except ValueError as e: - return jsonify({ - 'status': 'error', - 'message': f'Invalid frequency settings: {e}', - }), 400 + return api_error(f'Invalid frequency settings: {e}', 400) scheduler = get_wefax_scheduler() scheduler.set_callbacks(_progress_callback, _scheduler_event_callback) @@ -416,10 +383,7 @@ def enable_schedule(): ) except Exception: logger.exception("Failed to enable WeFax scheduler") - return jsonify({ - 'status': 'error', - 'message': 'Failed to enable scheduler', - }), 500 + return api_error('Failed to enable scheduler', 500) return jsonify({ 'status': 'ok', @@ -473,19 +437,13 @@ def skip_broadcast(broadcast_id: str): from utils.wefax_scheduler import get_wefax_scheduler if not broadcast_id.replace('_', '').replace('-', '').isalnum(): - return jsonify({ - 'status': 'error', - 'message': 'Invalid broadcast ID', - }), 400 + return api_error('Invalid broadcast ID', 400) scheduler = get_wefax_scheduler() if scheduler.skip_broadcast(broadcast_id): return jsonify({'status': 'skipped', 'broadcast_id': broadcast_id}) else: - return jsonify({ - 'status': 'error', - 'message': 'Broadcast not found or already processed', - }), 404 + return api_error('Broadcast not found or already processed', 404) @wefax_bp.route('/stations') @@ -504,10 +462,7 @@ def station_detail(callsign: str): """Get station detail including current schedule info.""" station = get_station(callsign) if not station: - return jsonify({ - 'status': 'error', - 'message': f'Station {callsign} not found', - }), 404 + return api_error(f'Station {callsign} not found', 404) current = get_current_broadcasts(callsign) diff --git a/routes/wifi.py b/routes/wifi.py index 38f85a6..61fbbbb 100644 --- a/routes/wifi.py +++ b/routes/wifi.py @@ -15,14 +15,15 @@ from typing import Any, Generator from flask import Blueprint, jsonify, request, Response +from utils.responses import api_success, api_error import app as app_module from utils.dependencies import check_tool, get_tool_path -from utils.logging import wifi_logger as logger -from utils.process import is_valid_mac, is_valid_channel -from utils.validation import validate_wifi_channel, validate_mac_address, validate_network_interface -from utils.sse import format_sse, sse_stream_fanout -from utils.event_pipeline import process_event -from data.oui import get_manufacturer +from utils.logging import wifi_logger as logger +from utils.process import is_valid_mac, is_valid_channel +from utils.validation import validate_wifi_channel, validate_mac_address, validate_network_interface +from utils.sse import format_sse, sse_stream_fanout +from utils.event_pipeline import process_event +from data.oui import get_manufacturer from utils.constants import ( WIFI_TERMINATE_TIMEOUT, PMKID_TERMINATE_TIMEOUT, @@ -46,34 +47,52 @@ from utils.constants import ( wifi_bp = Blueprint('wifi', __name__, url_prefix='/wifi') +# --- v1 deprecation --- +# These endpoints are deprecated in favor of /wifi/v2/*. +# Frontend still uses v1, so they remain active. +# Migration: switch frontend to v2 endpoints, then remove this file. +_v1_deprecation_logged = set() + + +@wifi_bp.after_request +def _add_deprecation_header(response): + """Add X-Deprecated header to all v1 WiFi responses.""" + response.headers['X-Deprecated'] = 'Use /wifi/v2/* endpoints instead' + endpoint = request.endpoint or '' + if endpoint not in _v1_deprecation_logged: + _v1_deprecation_logged.add(endpoint) + logger.warning(f"Deprecated v1 WiFi endpoint called: {request.path} — migrate to /wifi/v2/*") + return response + + # PMKID process state -pmkid_process = None -pmkid_lock = threading.Lock() - - -def _parse_channel_list(raw_channels: Any) -> list[int] | None: - """Parse a channel list from string/list input.""" - if raw_channels in (None, '', []): - return None - - if isinstance(raw_channels, str): - parts = [p.strip() for p in re.split(r'[\s,]+', raw_channels) if p.strip()] - elif isinstance(raw_channels, (list, tuple, set)): - parts = list(raw_channels) - else: - parts = [raw_channels] - - channels: list[int] = [] - seen = set() - for part in parts: - if part in (None, ''): - continue - ch = validate_wifi_channel(part) - if ch not in seen: - channels.append(ch) - seen.add(ch) - - return channels or None +pmkid_process = None +pmkid_lock = threading.Lock() + + +def _parse_channel_list(raw_channels: Any) -> list[int] | None: + """Parse a channel list from string/list input.""" + if raw_channels in (None, '', []): + return None + + if isinstance(raw_channels, str): + parts = [p.strip() for p in re.split(r'[\s,]+', raw_channels) if p.strip()] + elif isinstance(raw_channels, (list, tuple, set)): + parts = list(raw_channels) + else: + parts = [raw_channels] + + channels: list[int] = [] + seen = set() + for part in parts: + if part in (None, ''): + continue + ch = validate_wifi_channel(part) + if ch not in seen: + channels.append(ch) + seen.add(ch) + + return channels or None def detect_wifi_interfaces(): @@ -455,7 +474,7 @@ def toggle_monitor_mode(): try: interface = validate_network_interface(data.get('interface')) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) if action == 'start': if check_tool('airmon-ng'): @@ -575,20 +594,17 @@ def toggle_monitor_mode(): all_wireless = [f for f in os.listdir('/sys/class/net') if os.path.exists(f'/sys/class/net/{f}/wireless') or 'mon' in f or f.startswith('wl')] logger.error(f"Monitor interface not found. Tried: {monitor_iface}. Available: {all_wireless}") - return jsonify({ - 'status': 'error', - 'message': f'Monitor interface not created. airmon-ng output: {output[:500]}. Available interfaces: {all_wireless}' - }) + return api_error(f'Monitor interface not created. airmon-ng output: {output[:500]}. Available interfaces: {all_wireless}') app_module.wifi_monitor_interface = monitor_iface app_module.wifi_queue.put({'type': 'info', 'text': f'Monitor mode enabled on {app_module.wifi_monitor_interface}'}) logger.info(f"Monitor mode enabled on {monitor_iface}") - return jsonify({'status': 'success', 'monitor_interface': app_module.wifi_monitor_interface}) + return api_success(data={'monitor_interface': app_module.wifi_monitor_interface}) except Exception as e: import traceback logger.error(f"Error enabling monitor mode: {e}", exc_info=True) - return jsonify({'status': 'error', 'message': str(e)}) + return api_error(str(e)) elif check_tool('iw'): try: @@ -596,11 +612,11 @@ def toggle_monitor_mode(): subprocess.run(['iw', interface, 'set', 'monitor', 'control'], capture_output=True) subprocess.run(['ip', 'link', 'set', interface, 'up'], capture_output=True) app_module.wifi_monitor_interface = interface - return jsonify({'status': 'success', 'monitor_interface': interface}) + return api_success(data={'monitor_interface': interface}) except Exception as e: - return jsonify({'status': 'error', 'message': str(e)}) + return api_error(str(e)) else: - return jsonify({'status': 'error', 'message': 'No monitor mode tools available.'}) + return api_error('No monitor mode tools available.') else: # stop if check_tool('airmon-ng'): @@ -609,20 +625,20 @@ def toggle_monitor_mode(): subprocess.run([airmon_path, 'stop', app_module.wifi_monitor_interface or interface], capture_output=True, text=True, timeout=15) app_module.wifi_monitor_interface = None - return jsonify({'status': 'success', 'message': 'Monitor mode disabled'}) + return api_success(message='Monitor mode disabled') except Exception as e: - return jsonify({'status': 'error', 'message': str(e)}) + return api_error(str(e)) elif check_tool('iw'): try: subprocess.run(['ip', 'link', 'set', interface, 'down'], capture_output=True) subprocess.run(['iw', interface, 'set', 'type', 'managed'], capture_output=True) subprocess.run(['ip', 'link', 'set', interface, 'up'], capture_output=True) app_module.wifi_monitor_interface = None - return jsonify({'status': 'success', 'message': 'Monitor mode disabled'}) + return api_success(message='Monitor mode disabled') except Exception as e: - return jsonify({'status': 'error', 'message': str(e)}) + return api_error(str(e)) - return jsonify({'status': 'error', 'message': 'Unknown action'}) + return api_error('Unknown action') @wifi_bp.route('/scan/start', methods=['POST']) @@ -630,12 +646,12 @@ def start_wifi_scan(): """Start WiFi scanning with airodump-ng.""" with app_module.wifi_lock: if app_module.wifi_process: - return jsonify({'status': 'error', 'message': 'Scan already running'}) + return api_error('Scan already running') data = request.json - channel = data.get('channel') - channels = data.get('channels') - band = data.get('band', 'abg') + channel = data.get('channel') + channels = data.get('channels') + band = data.get('band', 'abg') # Use provided interface or fall back to stored monitor interface interface = data.get('interface') @@ -643,21 +659,18 @@ def start_wifi_scan(): try: interface = validate_network_interface(interface) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) else: interface = app_module.wifi_monitor_interface if not interface: - return jsonify({'status': 'error', 'message': 'No monitor interface available.'}) + return api_error('No monitor interface available.') # Verify interface exists if not os.path.exists(f'/sys/class/net/{interface}'): all_wireless = [f for f in os.listdir('/sys/class/net') if os.path.exists(f'/sys/class/net/{f}/wireless') or 'mon' in f or f.startswith('wl')] - return jsonify({ - 'status': 'error', - 'message': f'Interface "{interface}" does not exist. Available: {all_wireless}' - }) + return api_error(f'Interface "{interface}" does not exist. Available: {all_wireless}') app_module.wifi_networks = {} app_module.wifi_clients = {} @@ -685,17 +698,17 @@ def start_wifi_scan(): interface ] - channel_list = None - if channels: - try: - channel_list = _parse_channel_list(channels) - except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 - - if channel_list: - cmd.extend(['-c', ','.join(str(c) for c in channel_list)]) - elif channel: - cmd.extend(['-c', str(channel)]) + channel_list = None + if channels: + try: + channel_list = _parse_channel_list(channels) + except ValueError as e: + return api_error(str(e), 400) + + if channel_list: + cmd.extend(['-c', ','.join(str(c) for c in channel_list)]) + elif channel: + cmd.extend(['-c', str(channel)]) logger.info(f"Running: {' '.join(cmd)}") @@ -723,7 +736,7 @@ def start_wifi_scan(): error_msg = 'Permission denied. Try running with sudo.' logger.error(f"airodump-ng failed for interface '{interface}': {error_msg}") - return jsonify({'status': 'error', 'message': error_msg, 'interface': interface}) + return api_error(error_msg) thread = threading.Thread(target=stream_airodump_output, args=(app_module.wifi_process, csv_path)) thread.daemon = True @@ -734,9 +747,9 @@ def start_wifi_scan(): return jsonify({'status': 'started', 'interface': interface}) except FileNotFoundError: - return jsonify({'status': 'error', 'message': 'airodump-ng not found.'}) + return api_error('airodump-ng not found.') except Exception as e: - return jsonify({'status': 'error', 'message': str(e)}) + return api_error(str(e)) @wifi_bp.route('/scan/stop', methods=['POST']) @@ -768,18 +781,18 @@ def send_deauth(): try: interface = validate_network_interface(interface) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) else: interface = app_module.wifi_monitor_interface if not target_bssid: - return jsonify({'status': 'error', 'message': 'Target BSSID required'}) + return api_error('Target BSSID required') if not is_valid_mac(target_bssid): - return jsonify({'status': 'error', 'message': 'Invalid BSSID format'}) + return api_error('Invalid BSSID format') if not is_valid_mac(target_client): - return jsonify({'status': 'error', 'message': 'Invalid client MAC format'}) + return api_error('Invalid client MAC format') try: count = int(count) @@ -789,10 +802,10 @@ def send_deauth(): count = 5 if not interface: - return jsonify({'status': 'error', 'message': 'No monitor interface'}) + return api_error('No monitor interface') if not check_tool('aireplay-ng'): - return jsonify({'status': 'error', 'message': 'aireplay-ng not found'}) + return api_error('aireplay-ng not found') try: aireplay_path = get_tool_path('aireplay-ng') @@ -809,14 +822,14 @@ def send_deauth(): result = subprocess.run(cmd, capture_output=True, text=True, timeout=30) if result.returncode == 0: - return jsonify({'status': 'success', 'message': f'Sent {count} deauth packets'}) + return api_success(message=f'Sent {count} deauth packets') else: - return jsonify({'status': 'error', 'message': result.stderr}) + return api_error(result.stderr) except subprocess.TimeoutExpired: - return jsonify({'status': 'success', 'message': 'Deauth sent (timed out)'}) + return api_success(message='Deauth sent (timed out)') except Exception as e: - return jsonify({'status': 'error', 'message': str(e)}) + return api_error(str(e)) @wifi_bp.route('/handshake/capture', methods=['POST']) @@ -832,22 +845,22 @@ def capture_handshake(): try: interface = validate_network_interface(interface) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) else: interface = app_module.wifi_monitor_interface if not target_bssid or not channel: - return jsonify({'status': 'error', 'message': 'BSSID and channel required'}) + return api_error('BSSID and channel required') if not is_valid_mac(target_bssid): - return jsonify({'status': 'error', 'message': 'Invalid BSSID format'}) + return api_error('Invalid BSSID format') if not is_valid_channel(channel): - return jsonify({'status': 'error', 'message': 'Invalid channel'}) + return api_error('Invalid channel') with app_module.wifi_lock: if app_module.wifi_process: - return jsonify({'status': 'error', 'message': 'Scan already running.'}) + return api_error('Scan already running.') capture_path = f'/tmp/intercept_handshake_{target_bssid.replace(":", "")}' @@ -866,7 +879,7 @@ def capture_handshake(): app_module.wifi_queue.put({'type': 'info', 'text': f'Capturing handshakes for {target_bssid}'}) return jsonify({'status': 'started', 'capture_file': capture_path + '-01.cap'}) except Exception as e: - return jsonify({'status': 'error', 'message': str(e)}) + return api_error(str(e)) @wifi_bp.route('/handshake/status', methods=['POST']) @@ -877,7 +890,7 @@ def check_handshake_status(): target_bssid = data.get('bssid', '') if not capture_file.startswith('/tmp/intercept_handshake_') or '..' in capture_file: - return jsonify({'status': 'error', 'message': 'Invalid capture file path'}) + return api_error('Invalid capture file path') if not os.path.exists(capture_file): with app_module.wifi_lock: @@ -887,53 +900,53 @@ def check_handshake_status(): return jsonify({'status': 'stopped', 'file_exists': False, 'handshake_found': False}) file_size = os.path.getsize(capture_file) - handshake_found = False - handshake_valid: bool | None = None - handshake_checked = False - handshake_reason: str | None = None + handshake_found = False + handshake_valid: bool | None = None + handshake_checked = False + handshake_reason: str | None = None try: - if target_bssid and is_valid_mac(target_bssid): - aircrack_path = get_tool_path('aircrack-ng') - if aircrack_path: - result = subprocess.run( - [aircrack_path, '-a', '2', '-b', target_bssid, capture_file], - capture_output=True, text=True, timeout=10 - ) - output = result.stdout + result.stderr - output_lower = output.lower() - handshake_checked = True - - if 'no valid wpa handshakes found' in output_lower: - handshake_valid = False - handshake_reason = 'No valid WPA handshake found' - elif '0 handshake' in output_lower: - handshake_valid = False - elif '1 handshake' in output_lower or ('handshake' in output_lower and 'wpa' in output_lower): - handshake_valid = True - else: - handshake_valid = False + if target_bssid and is_valid_mac(target_bssid): + aircrack_path = get_tool_path('aircrack-ng') + if aircrack_path: + result = subprocess.run( + [aircrack_path, '-a', '2', '-b', target_bssid, capture_file], + capture_output=True, text=True, timeout=10 + ) + output = result.stdout + result.stderr + output_lower = output.lower() + handshake_checked = True + + if 'no valid wpa handshakes found' in output_lower: + handshake_valid = False + handshake_reason = 'No valid WPA handshake found' + elif '0 handshake' in output_lower: + handshake_valid = False + elif '1 handshake' in output_lower or ('handshake' in output_lower and 'wpa' in output_lower): + handshake_valid = True + else: + handshake_valid = False except subprocess.TimeoutExpired: pass - except Exception as e: - logger.error(f"Error checking handshake: {e}") - - if handshake_valid: - handshake_found = True - normalized_bssid = target_bssid.upper() if target_bssid else None - if normalized_bssid and normalized_bssid not in app_module.wifi_handshakes: - app_module.wifi_handshakes.append(normalized_bssid) - - return jsonify({ - 'status': 'running' if app_module.wifi_process and app_module.wifi_process.poll() is None else 'stopped', - 'file_exists': True, - 'file_size': file_size, - 'file': capture_file, - 'handshake_found': handshake_found, - 'handshake_valid': handshake_valid, - 'handshake_checked': handshake_checked, - 'handshake_reason': handshake_reason - }) + except Exception as e: + logger.error(f"Error checking handshake: {e}") + + if handshake_valid: + handshake_found = True + normalized_bssid = target_bssid.upper() if target_bssid else None + if normalized_bssid and normalized_bssid not in app_module.wifi_handshakes: + app_module.wifi_handshakes.append(normalized_bssid) + + return jsonify({ + 'status': 'running' if app_module.wifi_process and app_module.wifi_process.poll() is None else 'stopped', + 'file_exists': True, + 'file_size': file_size, + 'file': capture_file, + 'handshake_found': handshake_found, + 'handshake_valid': handshake_valid, + 'handshake_checked': handshake_checked, + 'handshake_reason': handshake_reason + }) @wifi_bp.route('/pmkid/capture', methods=['POST']) @@ -951,19 +964,19 @@ def capture_pmkid(): try: interface = validate_network_interface(interface) except ValueError as e: - return jsonify({'status': 'error', 'message': str(e)}), 400 + return api_error(str(e), 400) else: interface = app_module.wifi_monitor_interface if not target_bssid: - return jsonify({'status': 'error', 'message': 'BSSID required'}) + return api_error('BSSID required') if not is_valid_mac(target_bssid): - return jsonify({'status': 'error', 'message': 'Invalid BSSID format'}) + return api_error('Invalid BSSID format') with pmkid_lock: if pmkid_process and pmkid_process.poll() is None: - return jsonify({'status': 'error', 'message': 'PMKID capture already running'}) + return api_error('PMKID capture already running') capture_path = f'/tmp/intercept_pmkid_{target_bssid.replace(":", "")}.pcapng' filter_file = f'/tmp/pmkid_filter_{target_bssid.replace(":", "")}' @@ -986,9 +999,9 @@ def capture_pmkid(): pmkid_process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) return jsonify({'status': 'started', 'file': capture_path}) except FileNotFoundError: - return jsonify({'status': 'error', 'message': 'hcxdumptool not found.'}) + return api_error('hcxdumptool not found.') except Exception as e: - return jsonify({'status': 'error', 'message': str(e)}) + return api_error(str(e)) @wifi_bp.route('/pmkid/status', methods=['POST']) @@ -998,7 +1011,7 @@ def check_pmkid_status(): capture_file = data.get('file', '') if not capture_file.startswith('/tmp/intercept_pmkid_') or '..' in capture_file: - return jsonify({'status': 'error', 'message': 'Invalid capture file path'}) + return api_error('Invalid capture file path') if not os.path.exists(capture_file): return jsonify({'pmkid_found': False, 'file_exists': False}) @@ -1054,23 +1067,23 @@ def crack_handshake(): # Validate paths to prevent path traversal if not capture_file.startswith('/tmp/intercept_handshake_') or '..' in capture_file: - return jsonify({'status': 'error', 'message': 'Invalid capture file path'}), 400 + return api_error('Invalid capture file path', 400) if '..' in wordlist: - return jsonify({'status': 'error', 'message': 'Invalid wordlist path'}), 400 + return api_error('Invalid wordlist path', 400) if not os.path.exists(capture_file): - return jsonify({'status': 'error', 'message': 'Capture file not found'}), 404 + return api_error('Capture file not found', 404) if not os.path.exists(wordlist): - return jsonify({'status': 'error', 'message': 'Wordlist file not found'}), 404 + return api_error('Wordlist file not found', 404) if target_bssid and not is_valid_mac(target_bssid): - return jsonify({'status': 'error', 'message': 'Invalid BSSID format'}), 400 + return api_error('Invalid BSSID format', 400) aircrack_path = get_tool_path('aircrack-ng') if not aircrack_path: - return jsonify({'status': 'error', 'message': 'aircrack-ng not found'}), 500 + return api_error('aircrack-ng not found', 500) try: cmd = [aircrack_path, '-a', '2', '-w', wordlist] @@ -1099,8 +1112,7 @@ def crack_handshake(): if match: password = match.group(1) logger.info(f"Password cracked for {target_bssid}: {password}") - return jsonify({ - 'status': 'success', + return api_success(data={ 'password': password, 'bssid': target_bssid }) @@ -1118,7 +1130,7 @@ def crack_handshake(): }) except Exception as e: logger.error(f"Crack error: {e}") - return jsonify({'status': 'error', 'message': str(e)}), 500 + return api_error(str(e), 500) @wifi_bp.route('/networks') @@ -1132,26 +1144,26 @@ def get_wifi_networks(): }) -@wifi_bp.route('/stream') -def stream_wifi(): - """SSE stream for WiFi events.""" - def _on_msg(msg: dict[str, Any]) -> None: - process_event('wifi', msg, msg.get('type')) - - response = Response( - sse_stream_fanout( - source_queue=app_module.wifi_queue, - channel_key='wifi', - timeout=1.0, - keepalive_interval=30.0, - on_message=_on_msg, - ), - mimetype='text/event-stream', - ) - response.headers['Cache-Control'] = 'no-cache' - response.headers['X-Accel-Buffering'] = 'no' - response.headers['Connection'] = 'keep-alive' - return response +@wifi_bp.route('/stream') +def stream_wifi(): + """SSE stream for WiFi events.""" + def _on_msg(msg: dict[str, Any]) -> None: + process_event('wifi', msg, msg.get('type')) + + response = Response( + sse_stream_fanout( + source_queue=app_module.wifi_queue, + channel_key='wifi', + timeout=1.0, + keepalive_interval=30.0, + on_message=_on_msg, + ), + mimetype='text/event-stream', + ) + response.headers['Cache-Control'] = 'no-cache' + response.headers['X-Accel-Buffering'] = 'no' + response.headers['Connection'] = 'keep-alive' + return response # ============================================================================= @@ -1189,7 +1201,7 @@ def get_v2_capabilities(): }) except Exception as e: logger.exception("Error checking capabilities") - return jsonify({'error': str(e)}), 500 + return api_error(str(e), 500) @wifi_bp.route('/v2/scan/quick', methods=['POST']) @@ -1220,7 +1232,7 @@ def v2_quick_scan(): }) except Exception as e: logger.exception("Error in quick scan") - return jsonify({'error': str(e)}), 500 + return api_error(str(e), 500) @wifi_bp.route('/v2/scan/start', methods=['POST']) @@ -1239,10 +1251,10 @@ def v2_start_scan(): return jsonify({'status': 'started'}) else: status = scanner.get_status() - return jsonify({'error': status.error or 'Failed to start scan'}), 400 + return api_error(status.error or 'Failed to start scan', 400) except Exception as e: logger.exception("Error starting deep scan") - return jsonify({'error': str(e)}), 500 + return api_error(str(e), 500) @wifi_bp.route('/v2/scan/stop', methods=['POST']) @@ -1254,7 +1266,7 @@ def v2_stop_scan(): return jsonify({'status': 'stopped'}) except Exception as e: logger.exception("Error stopping scan") - return jsonify({'error': str(e)}), 500 + return api_error(str(e), 500) @wifi_bp.route('/v2/scan/status') @@ -1274,7 +1286,7 @@ def v2_scan_status(): }) except Exception as e: logger.exception("Error getting scan status") - return jsonify({'error': str(e)}), 500 + return api_error(str(e), 500) @wifi_bp.route('/v2/networks') @@ -1289,7 +1301,7 @@ def v2_get_networks(): }) except Exception as e: logger.exception("Error getting networks") - return jsonify({'error': str(e)}), 500 + return api_error(str(e), 500) @wifi_bp.route('/v2/clients') @@ -1326,7 +1338,7 @@ def v2_get_clients(): }) except Exception as e: logger.exception("Error getting clients") - return jsonify({'error': str(e)}), 500 + return api_error(str(e), 500) @wifi_bp.route('/v2/probes') @@ -1341,7 +1353,7 @@ def v2_get_probes(): }) except Exception as e: logger.exception("Error getting probes") - return jsonify({'error': str(e)}), 500 + return api_error(str(e), 500) @wifi_bp.route('/v2/channels') @@ -1357,7 +1369,7 @@ def v2_get_channels(): }) except Exception as e: logger.exception("Error getting channel stats") - return jsonify({'error': str(e)}), 500 + return api_error(str(e), 500) @wifi_bp.route('/v2/stream') @@ -1448,11 +1460,11 @@ def v2_export(): return response else: - return jsonify({'error': f'Unknown format: {format_type}'}), 400 + return api_error(f'Unknown format: {format_type}', 400) except Exception as e: logger.exception("Error exporting data") - return jsonify({'error': str(e)}), 500 + return api_error(str(e), 500) @wifi_bp.route('/v2/baseline/set', methods=['POST']) @@ -1464,7 +1476,7 @@ def v2_set_baseline(): return jsonify({'status': 'baseline_set', 'count': len(scanner._baseline_networks)}) except Exception as e: logger.exception("Error setting baseline") - return jsonify({'error': str(e)}), 500 + return api_error(str(e), 500) @wifi_bp.route('/v2/baseline/clear', methods=['POST']) @@ -1476,7 +1488,7 @@ def v2_clear_baseline(): return jsonify({'status': 'baseline_cleared'}) except Exception as e: logger.exception("Error clearing baseline") - return jsonify({'error': str(e)}), 500 + return api_error(str(e), 500) @wifi_bp.route('/v2/clear', methods=['POST']) @@ -1488,7 +1500,7 @@ def v2_clear_data(): return jsonify({'status': 'cleared'}) except Exception as e: logger.exception("Error clearing data") - return jsonify({'error': str(e)}), 500 + return api_error(str(e), 500) # ============================================================================= @@ -1535,11 +1547,11 @@ def v2_deauth_status(): }) except Exception as e: logger.exception("Error getting deauth status") - return jsonify({'error': str(e)}), 500 + return api_error(str(e), 500) -@wifi_bp.route('/v2/deauth/stream') -def v2_deauth_stream(): +@wifi_bp.route('/v2/deauth/stream') +def v2_deauth_stream(): """ SSE stream for real-time deauth alerts. @@ -1550,18 +1562,18 @@ def v2_deauth_stream(): - deauth_error: An error occurred - keepalive: Periodic keepalive """ - response = Response( - sse_stream_fanout( - source_queue=app_module.deauth_detector_queue, - channel_key='wifi_deauth', - timeout=SSE_QUEUE_TIMEOUT, - keepalive_interval=SSE_KEEPALIVE_INTERVAL, - ), - mimetype='text/event-stream', - ) - response.headers['Cache-Control'] = 'no-cache' - response.headers['X-Accel-Buffering'] = 'no' - response.headers['Connection'] = 'keep-alive' + response = Response( + sse_stream_fanout( + source_queue=app_module.deauth_detector_queue, + channel_key='wifi_deauth', + timeout=SSE_QUEUE_TIMEOUT, + keepalive_interval=SSE_KEEPALIVE_INTERVAL, + ), + mimetype='text/event-stream', + ) + response.headers['Cache-Control'] = 'no-cache' + response.headers['X-Accel-Buffering'] = 'no' + response.headers['Connection'] = 'keep-alive' return response @@ -1600,7 +1612,7 @@ def v2_deauth_alerts(): }) except Exception as e: logger.exception("Error getting deauth alerts") - return jsonify({'error': str(e)}), 500 + return api_error(str(e), 500) @wifi_bp.route('/v2/deauth/clear', methods=['POST']) @@ -1620,4 +1632,4 @@ def v2_deauth_clear(): return jsonify({'status': 'cleared'}) except Exception as e: logger.exception("Error clearing deauth alerts") - return jsonify({'error': str(e)}), 500 + return api_error(str(e), 500) diff --git a/routes/wifi_v2.py b/routes/wifi_v2.py index ec9748e..9ab04bc 100644 --- a/routes/wifi_v2.py +++ b/routes/wifi_v2.py @@ -16,16 +16,17 @@ from typing import Generator from flask import Blueprint, jsonify, request, Response -from utils.wifi import ( - get_wifi_scanner, - analyze_channels, - get_hidden_correlator, - SCAN_MODE_QUICK, - SCAN_MODE_DEEP, -) -from utils.sse import format_sse -from utils.validation import validate_wifi_channel -from utils.event_pipeline import process_event +from utils.wifi import ( + get_wifi_scanner, + analyze_channels, + get_hidden_correlator, + SCAN_MODE_QUICK, + SCAN_MODE_DEEP, +) +from utils.responses import api_success, api_error +from utils.sse import format_sse +from utils.validation import validate_wifi_channel +from utils.event_pipeline import process_event logger = logging.getLogger(__name__) @@ -87,44 +88,44 @@ def start_deep_scan(): Requires monitor mode interface and root privileges. - Request body: - interface: Monitor mode interface (e.g., 'wlan0mon') - band: Band to scan ('2.4', '5', 'all') - channel: Optional specific channel to monitor - channels: Optional list or comma-separated channels to monitor + Request body: + interface: Monitor mode interface (e.g., 'wlan0mon') + band: Band to scan ('2.4', '5', 'all') + channel: Optional specific channel to monitor + channels: Optional list or comma-separated channels to monitor """ data = request.get_json() or {} interface = data.get('interface') band = data.get('band', 'all') - channel = data.get('channel') - channels = data.get('channels') - - channel_list = None - if channels: - if isinstance(channels, str): - channel_list = [c.strip() for c in channels.split(',') if c.strip()] - elif isinstance(channels, (list, tuple, set)): - channel_list = list(channels) - else: - channel_list = [channels] - try: - channel_list = [validate_wifi_channel(c) for c in channel_list] - except (TypeError, ValueError): - return jsonify({'error': 'Invalid channels'}), 400 - - if channel: - try: - channel = validate_wifi_channel(channel) - except ValueError: - return jsonify({'error': 'Invalid channel'}), 400 + channel = data.get('channel') + channels = data.get('channels') + + channel_list = None + if channels: + if isinstance(channels, str): + channel_list = [c.strip() for c in channels.split(',') if c.strip()] + elif isinstance(channels, (list, tuple, set)): + channel_list = list(channels) + else: + channel_list = [channels] + try: + channel_list = [validate_wifi_channel(c) for c in channel_list] + except (TypeError, ValueError): + return api_error('Invalid channels', 400) + + if channel: + try: + channel = validate_wifi_channel(channel) + except ValueError: + return api_error('Invalid channel', 400) scanner = get_wifi_scanner() - success = scanner.start_deep_scan( - interface=interface, - band=band, - channel=channel, - channels=channel_list, - ) + success = scanner.start_deep_scan( + interface=interface, + band=band, + channel=channel, + channels=channel_list, + ) if success: return jsonify({ @@ -133,10 +134,7 @@ def start_deep_scan(): 'interface': interface or scanner._capabilities.monitor_interface, }) else: - return jsonify({ - 'status': 'error', - 'error': scanner._status.error, - }), 400 + return api_error(scanner._status.error or 'Scan failed', 400) @wifi_v2_bp.route('/scan/stop', methods=['POST']) @@ -235,7 +233,7 @@ def get_network(bssid): if network: return jsonify(network.to_dict()) else: - return jsonify({'error': 'Network not found'}), 404 + return api_error('Network not found', 404) @wifi_v2_bp.route('/clients', methods=['GET']) @@ -282,7 +280,7 @@ def get_client(mac): if client: return jsonify(client.to_dict()) else: - return jsonify({'error': 'Client not found'}), 404 + return api_error('Client not found', 404) @wifi_v2_bp.route('/probes', methods=['GET']) @@ -406,14 +404,14 @@ def event_stream(): - keepalive: Periodic keepalive """ def generate() -> Generator[str, None, None]: - scanner = get_wifi_scanner() - - for event in scanner.get_event_stream(): - try: - process_event('wifi', event, event.get('type')) - except Exception: - pass - yield format_sse(event) + scanner = get_wifi_scanner() + + for event in scanner.get_event_stream(): + try: + process_event('wifi', event, event.get('type')) + except Exception: + pass + yield format_sse(event) response = Response(generate(), mimetype='text/event-stream') response.headers['Cache-Control'] = 'no-cache' diff --git a/start.sh b/start.sh index 2ff6bdf..2f8e625 100755 --- a/start.sh +++ b/start.sh @@ -86,6 +86,11 @@ done export INTERCEPT_HOST="$HOST" export INTERCEPT_PORT="$PORT" +# ── macOS: allow fork() after ObjC initialisation (gunicorn + gevent) ──── +if [[ "$(uname)" == "Darwin" ]]; then + export OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES +fi + # ── Fix ownership of user data dirs when run via sudo ──────────────────────── # When invoked via sudo the server process runs as root, so every file it # creates (configs, logs, database) ends up owned by root. On the *next* @@ -152,7 +157,17 @@ fi # ── Resolve LAN address for display ────────────────────────────────────────── if [[ "$HOST" == "0.0.0.0" ]]; then - LAN_IP=$(hostname -I 2>/dev/null | awk '{print $1}') + LAN_IP=$(hostname -I 2>/dev/null | awk '{print $1}' || true) + # hostname -I on macOS fails or returns empty — try macOS methods + if [[ -z "$LAN_IP" ]]; then + LAN_IP=$(ipconfig getifaddr en0 2>/dev/null || true) + fi + if [[ -z "$LAN_IP" ]]; then + LAN_IP=$(ipconfig getifaddr en1 2>/dev/null || true) + fi + if [[ -z "$LAN_IP" ]]; then + LAN_IP=$(ifconfig 2>/dev/null | grep "inet " | grep -v 127.0.0.1 | head -1 | awk '{print $2}' || true) + fi LAN_IP="${LAN_IP:-localhost}" else LAN_IP="$HOST" diff --git a/static/css/core/components.css b/static/css/core/components.css index f6ba3ed..ef4dc7f 100644 --- a/static/css/core/components.css +++ b/static/css/core/components.css @@ -84,6 +84,18 @@ border-color: var(--accent-red-hover); } +.btn-danger-outline { + background: transparent; + color: var(--accent-red); + border-color: var(--accent-red); +} + +.btn-danger-outline:hover:not(:disabled) { + background: var(--accent-red-dim); + color: var(--accent-red); + border-color: var(--accent-red); +} + .btn-success { background: var(--accent-green); color: var(--text-inverse); @@ -878,6 +890,67 @@ textarea:focus { filter: grayscale(30%); } +/* ============================================ + MODAL CLOSE BUTTON + ============================================ */ +.modal-close-btn { + position: absolute; + top: 8px; + right: 8px; + display: inline-flex; + align-items: center; + justify-content: center; + width: 28px; + height: 28px; + background: transparent; + border: 1px solid var(--border-color); + border-radius: var(--radius-md); + color: var(--text-secondary); + cursor: pointer; + font-size: 16px; + line-height: 1; + transition: all var(--transition-fast); +} +.modal-close-btn:hover { background: var(--bg-elevated); color: var(--text-primary); } +.modal-close-btn:focus-visible { outline: 2px solid var(--border-focus); outline-offset: 2px; } + +/* Aliases for existing modal-specific close classes */ +.settings-close, +.help-close, +.wifi-detail-close, +.bt-modal-close, +.tscm-modal-close, +.signal-details-modal-close { + position: absolute; + top: 8px; + right: 8px; + display: inline-flex; + align-items: center; + justify-content: center; + width: 28px; + height: 28px; + background: transparent; + border: 1px solid var(--border-color); + border-radius: var(--radius-md); + color: var(--text-secondary); + cursor: pointer; + font-size: 16px; + line-height: 1; + transition: all var(--transition-fast); +} +.settings-close:hover, +.help-close:hover, +.wifi-detail-close:hover, +.bt-modal-close:hover, +.tscm-modal-close:hover, +.signal-details-modal-close:hover { background: var(--bg-elevated); color: var(--text-primary); } +.settings-close:focus-visible, +.help-close:focus-visible, +.wifi-detail-close:focus-visible, +.bt-modal-close:focus-visible, +.tscm-modal-close:focus-visible, +.signal-details-modal-close:focus-visible { outline: 2px solid var(--border-focus); outline-offset: 2px; } + /* ============================================ CONFIRMATION MODAL ============================================ */ diff --git a/static/css/core/variables.css b/static/css/core/variables.css index cd20962..850953a 100644 --- a/static/css/core/variables.css +++ b/static/css/core/variables.css @@ -160,7 +160,7 @@ /* ============================================ LAYOUT ============================================ */ - --header-height: 60px; + --header-height: 48px; --nav-height: 44px; --sidebar-width: 280px; --stats-strip-height: 36px; @@ -224,8 +224,8 @@ --text-primary: #122034; --text-secondary: #3a4a5f; - --text-dim: #6b7c93; - --text-muted: #aab6c8; + --text-dim: #566a7f; + --text-muted: #7a8a9e; --text-inverse: #f4f7fb; --border-color: #d1d9e6; diff --git a/static/css/index.css b/static/css/index.css index e5580af..56954b2 100644 --- a/static/css/index.css +++ b/static/css/index.css @@ -212,10 +212,6 @@ body { } .welcome-settings-btn { - position: absolute; - top: 12px; - right: 12px; - z-index: 2; background: none; border: none; cursor: pointer; @@ -223,6 +219,8 @@ body { border-radius: 6px; color: var(--text-dim, rgba(255, 255, 255, 0.3)); transition: color 0.2s, background 0.2s; + margin-left: auto; + flex-shrink: 0; } .welcome-settings-btn:hover { @@ -251,10 +249,9 @@ body { .welcome-header { display: flex; align-items: center; - justify-content: center; - gap: 20px; - margin-bottom: 30px; - padding-bottom: 20px; + gap: 14px; + margin-bottom: 24px; + padding-bottom: 16px; border-bottom: 1px solid var(--border-color); } @@ -300,38 +297,37 @@ body { } } -.welcome-title-block { - text-align: left; -} - .welcome-title { font-family: var(--font-mono); - font-size: 2.5rem; + font-size: 1.6rem; font-weight: 700; color: var(--text-primary); letter-spacing: 0.2em; margin: 0; text-shadow: 0 0 20px rgba(0, 212, 255, 0.3); + white-space: nowrap; } .welcome-tagline { font-family: var(--font-mono); - font-size: 0.9rem; + font-size: 0.8rem; color: var(--accent-cyan); - letter-spacing: 0.15em; - margin: 4px 0 0 0; + letter-spacing: 0.1em; + margin: 0; + opacity: 0.7; + white-space: nowrap; } .welcome-version { display: inline-block; font-family: var(--font-mono); - font-size: 0.65rem; + font-size: 0.6rem; color: var(--bg-primary); background: var(--accent-cyan); padding: 2px 8px; border-radius: 3px; letter-spacing: 0.05em; - margin-top: 8px; + flex-shrink: 0; } /* Welcome Content Grid */ @@ -572,6 +568,21 @@ body { margin: 0; } +.welcome-footer-credit { + display: inline-block; + font-family: var(--font-mono); + font-size: 0.65rem; + color: var(--text-dim); + text-decoration: none; + letter-spacing: 0.08em; + margin-top: 6px; + transition: color 0.2s; +} + +.welcome-footer-credit:hover { + color: var(--accent-cyan); +} + /* Welcome Scanline */ .welcome-scanline { position: absolute; @@ -606,12 +617,9 @@ body { } .welcome-header { - flex-direction: column; - text-align: center; -} - -.welcome-title-block { - text-align: center; + flex-wrap: wrap; + justify-content: center; + gap: 8px 14px; } .mode-grid { @@ -639,12 +647,7 @@ body { } .welcome-header { - flex-direction: row; - text-align: left; - } - - .welcome-title-block { - text-align: left; + flex-wrap: nowrap; } .mode-grid-compact { @@ -664,28 +667,27 @@ body { header { background: var(--bg-secondary); - padding: 10px 12px; + padding: 0 16px; display: flex; align-items: center; - justify-content: flex-start; - gap: 10px; + justify-content: space-between; border-bottom: 1px solid var(--border-color); position: relative; - min-height: 52px; + height: 48px; } -@media (min-width: 768px) { - header { - justify-content: center; - padding: 12px 20px; - } +.header-left { + display: flex; + align-items: center; + gap: 10px; + min-width: 0; } -@media (min-width: 1024px) { - header { - text-align: center; - display: block; - } +.header-right { + display: flex; + align-items: center; + gap: 12px; + flex-shrink: 0; } header::before { @@ -709,14 +711,13 @@ header h1 { font-weight: 600; letter-spacing: 0.15em; margin: 0; - display: inline; - vertical-align: middle; + white-space: nowrap; } .logo { - display: inline-block; - vertical-align: middle; - margin-right: 8px; + display: flex; + align-items: center; + flex-shrink: 0; } .logo svg { @@ -917,7 +918,7 @@ header h1 { left: 0; margin-top: 4px; min-width: 180px; - background: var(--bg-secondary); + background: #101823; border: 1px solid var(--border-color); border-radius: 6px; box-shadow: 0 8px 24px rgba(0, 0, 0, 0.4); @@ -1088,19 +1089,7 @@ header h1 { border: 1px solid var(--border-color); } -header p { - color: var(--text-secondary); - font-size: 11px; - letter-spacing: 0.1em; - text-transform: uppercase; - margin: 4px 0 8px 0; -} - -header p.subtitle { - font-size: 10px; - color: var(--text-secondary); - margin: 0 0 8px 0; -} +/* subtitle removed — compact header */ header h1 .tagline { font-weight: 400; @@ -1204,15 +1193,14 @@ header h1 .tagline { display: inline-flex; align-items: center; gap: 6px; - padding: 4px 12px; + padding: 4px 14px; background: var(--accent-cyan); color: var(--bg-primary); border-radius: 4px; - font-size: 10px; - font-weight: 600; + font-size: 11px; + font-weight: 700; text-transform: uppercase; letter-spacing: 1px; - margin-left: 10px; } .active-mode-indicator .pulse-dot { @@ -1631,10 +1619,9 @@ header h1 .tagline { .section.collapsed h3 { border-bottom: none; - margin-bottom: 0 !important; + margin: 0 !important; min-height: 0 !important; - padding-top: 10px !important; - padding-bottom: 10px !important; + padding: 10px 12px !important; } .section.collapsed h3::after { @@ -1643,7 +1630,7 @@ header h1 .tagline { } .section.collapsed { - padding-bottom: 0 !important; + padding: 0 !important; min-height: 0; } @@ -2433,6 +2420,19 @@ header h1 .tagline { font-style: italic; } +.mode-content h3 { + font-size: 11px; + font-weight: 600; + text-transform: uppercase; + letter-spacing: 0.08em; + color: var(--text-secondary); + margin: 0 0 10px 0; + padding: 0; + display: flex; + align-items: center; + gap: 6px; +} + .mode-content { display: none; } @@ -7261,6 +7261,12 @@ body[data-mode="tscm"] { box-shadow: var(--visual-glow-soft), inset 0 1px 0 rgba(255, 255, 255, 0.03); } +.mode-nav-dropdown-menu { + background: linear-gradient(180deg, #162130 0%, #0e1621 100%); + border-color: rgba(74, 163, 255, 0.22); + box-shadow: 0 8px 24px rgba(0, 0, 0, 0.5), 0 0 0 1px rgba(74, 163, 255, 0.1); +} + .run-state-strip { margin: 8px var(--top-rail-gutter) 0; border-color: rgba(74, 163, 255, 0.3); @@ -7312,7 +7318,7 @@ body[data-mode="tscm"] { } .section h3 { - background: linear-gradient(180deg, rgba(28, 44, 63, 0.88) 0%, rgba(20, 31, 44, 0.9) 100%); + background: linear-gradient(180deg, #1c2c3f 0%, #141f2c 100%); border-bottom-color: rgba(74, 163, 255, 0.2); } @@ -7419,6 +7425,10 @@ body[data-mode="tscm"] { box-shadow: 0 10px 24px rgba(18, 40, 66, 0.08), inset 0 1px 0 rgba(255, 255, 255, 0.7); } +[data-theme="light"] .mode-nav-dropdown-menu { + background: #e9eef5; +} + [data-theme="light"] .run-state-strip { background: linear-gradient(180deg, rgba(245, 248, 253, 0.97) 0%, rgba(238, 243, 250, 0.98) 100%); border-color: rgba(31, 95, 168, 0.18); @@ -7445,7 +7455,7 @@ body[data-mode="tscm"] { } [data-theme="light"] .section h3 { - background: linear-gradient(180deg, rgba(235, 241, 250, 0.92) 0%, rgba(228, 236, 248, 0.94) 100%); + background: linear-gradient(180deg, #ebf1fa 0%, #e4ecf8 100%); border-bottom-color: rgba(31, 95, 168, 0.14); } diff --git a/static/css/responsive.css b/static/css/responsive.css index d45f4d1..de99fa3 100644 --- a/static/css/responsive.css +++ b/static/css/responsive.css @@ -25,20 +25,20 @@ --font-2xl: clamp(24px, 6vw, 40px); /* Header height for calculations */ - --header-height: 52px; + --header-height: 48px; --nav-height: 44px; } @media (min-width: 768px) { :root { - --header-height: 60px; + --header-height: 48px; --nav-height: 48px; } } @media (min-width: 1024px) { :root { - --header-height: 96px; + --header-height: 48px; --nav-height: 0px; } } @@ -632,16 +632,17 @@ font-size: 16px; } - .app-shell header h1 .tagline, - .app-shell header h1 .version-badge { + .app-shell header h1 .tagline { display: none; } - .app-shell header .subtitle { - font-size: 10px; - white-space: nowrap; - overflow: hidden; - text-overflow: ellipsis; + .app-shell header .version-badge { + display: none; + } + + .app-shell header .active-mode-indicator { + font-size: 9px; + padding: 3px 8px; } .app-shell header .logo svg { @@ -691,18 +692,18 @@ } .app-shell .welcome-header { - flex-direction: column; - text-align: center; - gap: 10px; + flex-wrap: wrap; + justify-content: center; + gap: 6px 10px; } .app-shell .welcome-logo svg { - width: 50px; - height: 50px; + width: 40px; + height: 40px; } .app-shell .welcome-title { - font-size: 24px; + font-size: 1.2rem; } .app-shell .welcome-content { diff --git a/static/js/modes/bluetooth.js b/static/js/modes/bluetooth.js index bad84ae..da54180 100644 --- a/static/js/modes/bluetooth.js +++ b/static/js/modes/bluetooth.js @@ -895,6 +895,7 @@ const BluetoothMode = (function() { const isAgentMode = typeof currentAgent !== 'undefined' && currentAgent !== 'local'; + if (startBtn) startBtn.classList.add('btn-loading'); try { let response; if (isAgentMode) { @@ -943,6 +944,8 @@ const BluetoothMode = (function() { reportActionableError('Start Bluetooth Scan', err, { onRetry: () => startScan() }); + } finally { + if (startBtn) startBtn.classList.remove('btn-loading'); } } @@ -1738,21 +1741,34 @@ const BluetoothMode = (function() { } function doLocateHandoff(device) { - console.log('[BT] doLocateHandoff, BtLocate defined:', typeof BtLocate !== 'undefined'); + const payload = { + device_id: device.device_id, + device_key: device.device_key || null, + mac_address: device.address, + address_type: device.address_type || null, + irk_hex: device.irk_hex || null, + known_name: device.name || null, + known_manufacturer: device.manufacturer_name || null, + last_known_rssi: device.rssi_current, + tx_power: device.tx_power || null, + appearance_name: device.appearance_name || null, + fingerprint_id: device.fingerprint_id || device.fingerprint?.id || null, + mac_cluster_count: device.mac_cluster_count || 0 + }; + + // If BtLocate is already loaded, hand off directly if (typeof BtLocate !== 'undefined') { - BtLocate.handoff({ - device_id: device.device_id, - device_key: device.device_key || null, - mac_address: device.address, - address_type: device.address_type || null, - irk_hex: device.irk_hex || null, - known_name: device.name || null, - known_manufacturer: device.manufacturer_name || null, - last_known_rssi: device.rssi_current, - tx_power: device.tx_power || null, - appearance_name: device.appearance_name || null, - fingerprint_id: device.fingerprint_id || device.fingerprint?.id || null, - mac_cluster_count: device.mac_cluster_count || 0 + BtLocate.handoff(payload); + return; + } + + // Switch to bt_locate mode first — this loads the script, styles, + // and initializes the module. Then hand off the device data. + if (typeof switchMode === 'function') { + switchMode('bt_locate').then(function() { + if (typeof BtLocate !== 'undefined') { + BtLocate.handoff(payload); + } }); } } diff --git a/static/js/modes/meshtastic.js b/static/js/modes/meshtastic.js index 80d40c6..e017ad1 100644 --- a/static/js/modes/meshtastic.js +++ b/static/js/modes/meshtastic.js @@ -110,19 +110,27 @@ const Meshtastic = (function() { meshMap = L.map('meshMap').setView([defaultLat, defaultLon], 4); window.meshMap = meshMap; - // Use settings manager for tile layer (allows runtime changes) + // Add fallback tiles immediately so the map is visible instantly + const fallbackTiles = L.tileLayer('https://cartodb-basemaps-{s}.global.ssl.fastly.net/dark_all/{z}/{x}/{y}.png', { + attribution: '© OSM © CARTO', + maxZoom: 19, + subdomains: 'abcd', + className: 'tile-layer-cyan' + }).addTo(meshMap); + + // Upgrade tiles in background via Settings (with timeout fallback) if (typeof Settings !== 'undefined') { - // Wait for settings to load from server before applying tiles - await Settings.init(); - Settings.createTileLayer().addTo(meshMap); - Settings.registerMap(meshMap); - } else { - L.tileLayer('https://cartodb-basemaps-{s}.global.ssl.fastly.net/dark_all/{z}/{x}/{y}.png', { - attribution: '© OSM © CARTO', - maxZoom: 19, - subdomains: 'abcd', - className: 'tile-layer-cyan' - }).addTo(meshMap); + try { + await Promise.race([ + Settings.init(), + new Promise((_, reject) => setTimeout(() => reject(new Error('Settings timeout')), 5000)) + ]); + meshMap.removeLayer(fallbackTiles); + Settings.createTileLayer().addTo(meshMap); + Settings.registerMap(meshMap); + } catch (e) { + console.warn('Meshtastic: Settings init failed/timed out, using fallback tiles:', e); + } } // Handle resize diff --git a/static/js/modes/spy-stations.js b/static/js/modes/spy-stations.js index 09b4955..254f60c 100644 --- a/static/js/modes/spy-stations.js +++ b/static/js/modes/spy-stations.js @@ -280,18 +280,19 @@ const SpyStations = (function() { showNotification('Tuning to ' + stationName, formatFrequency(freqKhz) + ' (' + tuneMode.toUpperCase() + ')'); } - // Switch to spectrum waterfall mode and tune after mode init. - if (typeof switchMode === 'function') { - switchMode('waterfall'); - } else if (typeof selectMode === 'function') { - selectMode('waterfall'); - } - - setTimeout(() => { + // Switch to spectrum waterfall mode and tune after init completes. + const doTune = () => { if (typeof Waterfall !== 'undefined' && typeof Waterfall.quickTune === 'function') { Waterfall.quickTune(freqMhz, tuneMode); } - }, 220); + }; + + if (typeof switchMode === 'function') { + switchMode('waterfall').then(doTune); + } else if (typeof selectMode === 'function') { + selectMode('waterfall'); + setTimeout(doTune, 300); + } } /** diff --git a/static/js/modes/sstv.js b/static/js/modes/sstv.js index 4e4a461..ca8e34a 100644 --- a/static/js/modes/sstv.js +++ b/static/js/modes/sstv.js @@ -215,18 +215,25 @@ const SSTV = (function() { }); window.issMap = issMap; - // Add tile layer using settings manager if available + // Add fallback tiles immediately so the map is visible instantly + const fallbackTiles = L.tileLayer('https://{s}.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}{r}.png', { + maxZoom: 19, + className: 'tile-layer-cyan' + }).addTo(issMap); + + // Upgrade tiles in background via Settings (with timeout fallback) if (typeof Settings !== 'undefined') { - // Wait for settings to load from server before applying tiles - await Settings.init(); - Settings.createTileLayer().addTo(issMap); - Settings.registerMap(issMap); - } else { - // Fallback to dark theme tiles - L.tileLayer('https://{s}.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}{r}.png', { - maxZoom: 19, - className: 'tile-layer-cyan' - }).addTo(issMap); + try { + await Promise.race([ + Settings.init(), + new Promise((_, reject) => setTimeout(() => reject(new Error('Settings timeout')), 5000)) + ]); + issMap.removeLayer(fallbackTiles); + Settings.createTileLayer().addTo(issMap); + Settings.registerMap(issMap); + } catch (e) { + console.warn('SSTV: Settings init failed/timed out, using fallback tiles:', e); + } } // Create ISS icon diff --git a/static/js/modes/weather-satellite.js b/static/js/modes/weather-satellite.js index 39fcdd6..14dc0d4 100644 --- a/static/js/modes/weather-satellite.js +++ b/static/js/modes/weather-satellite.js @@ -252,6 +252,8 @@ const WeatherSat = (function() { addConsoleEntry('Starting capture...', 'info'); updateStatusUI('connecting', 'Starting...'); + const startBtn = document.getElementById('weatherSatStartBtn'); + if (startBtn) startBtn.classList.add('btn-loading'); try { const config = { satellite, @@ -295,6 +297,8 @@ const WeatherSat = (function() { onRetry: () => start() }); updateStatusUI('idle', 'Error'); + } finally { + if (startBtn) startBtn.classList.remove('btn-loading'); } } @@ -445,6 +449,8 @@ const WeatherSat = (function() { }; eventSource.onerror = () => { + // Close the failed connection first to avoid leaking it + stopStream(); setTimeout(() => { if (isRunning || schedulerEnabled) startStream(); }, 3000); @@ -887,18 +893,28 @@ const WeatherSat = (function() { preferCanvas: true, }); + // Add fallback tiles immediately so the map is visible instantly + const fallbackTiles = L.tileLayer('https://{s}.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}{r}.png', { + subdomains: 'abcd', + maxZoom: 18, + noWrap: false, + crossOrigin: true, + className: 'tile-layer-cyan', + }).addTo(groundMap); + + // Upgrade tiles in background via Settings (with timeout fallback) if (typeof Settings !== 'undefined' && Settings.createTileLayer) { - await Settings.init(); - Settings.createTileLayer().addTo(groundMap); - Settings.registerMap(groundMap); - } else { - L.tileLayer('https://{s}.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}{r}.png', { - subdomains: 'abcd', - maxZoom: 18, - noWrap: false, - crossOrigin: true, - className: 'tile-layer-cyan', - }).addTo(groundMap); + try { + await Promise.race([ + Settings.init(), + new Promise((_, reject) => setTimeout(() => reject(new Error('Settings timeout')), 5000)) + ]); + groundMap.removeLayer(fallbackTiles); + Settings.createTileLayer().addTo(groundMap); + Settings.registerMap(groundMap); + } catch (e) { + console.warn('WeatherSat: Settings init failed/timed out, using fallback tiles:', e); + } } groundGridLayer = L.layerGroup().addTo(groundMap); @@ -1874,10 +1890,24 @@ const WeatherSat = (function() { } } + /** + * Unconditionally tear down the SSE stream on mode switch so we don't + * leak browser connections. The server-side capture/scheduler keeps + * running independently — the stream will reconnect on next init(). + */ + function destroy() { + if (countdownInterval) { + clearInterval(countdownInterval); + countdownInterval = null; + } + stopStream(); + } + // Public API return { init, suspend, + destroy, start, stop, startPass, diff --git a/static/js/modes/websdr.js b/static/js/modes/websdr.js index f99a6ea..2fb58f2 100644 --- a/static/js/modes/websdr.js +++ b/static/js/modes/websdr.js @@ -314,17 +314,27 @@ async function initWebsdrLeaflet(mapEl) { maxBoundsViscosity: 1.0, }); + // Add fallback tiles immediately so the map is visible instantly + const fallbackTiles = L.tileLayer('https://{s}.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}{r}.png', { + attribution: '© OpenStreetMap contributors © CARTO', + subdomains: 'abcd', + maxZoom: 19, + className: 'tile-layer-cyan', + }).addTo(websdrMap); + + // Upgrade tiles in background via Settings (with timeout fallback) if (typeof Settings !== 'undefined' && Settings.createTileLayer) { - await Settings.init(); - Settings.createTileLayer().addTo(websdrMap); - Settings.registerMap(websdrMap); - } else { - L.tileLayer('https://{s}.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}{r}.png', { - attribution: '© OpenStreetMap contributors © CARTO', - subdomains: 'abcd', - maxZoom: 19, - className: 'tile-layer-cyan', - }).addTo(websdrMap); + try { + await Promise.race([ + Settings.init(), + new Promise((_, reject) => setTimeout(() => reject(new Error('Settings timeout')), 5000)) + ]); + websdrMap.removeLayer(fallbackTiles); + Settings.createTileLayer().addTo(websdrMap); + Settings.registerMap(websdrMap); + } catch (e) { + console.warn('WebSDR: Settings init failed/timed out, using fallback tiles:', e); + } } mapEl.style.background = '#1a1d29'; diff --git a/static/js/modes/wifi.js b/static/js/modes/wifi.js index 01c5861..787ea66 100644 --- a/static/js/modes/wifi.js +++ b/static/js/modes/wifi.js @@ -247,6 +247,8 @@ const WiFiMode = (function() { // ========================================================================== async function checkCapabilities() { + const capBtn = document.getElementById('wifiQuickScanBtn'); + if (capBtn) capBtn.classList.add('btn-loading'); try { const isAgentMode = typeof currentAgent !== 'undefined' && currentAgent !== 'local'; let response; @@ -291,6 +293,8 @@ const WiFiMode = (function() { } catch (error) { console.error('[WiFiMode] Capability check failed:', error); showCapabilityError('Failed to check WiFi capabilities'); + } finally { + if (capBtn) capBtn.classList.remove('btn-loading'); } } @@ -386,18 +390,40 @@ const WiFiMode = (function() { if (elements.scanModeDeep) { elements.scanModeDeep.addEventListener('click', () => setScanMode('deep')); } + // Arrow key navigation between tabs + const tabContainer = document.querySelector('.wifi-scan-mode-tabs'); + if (tabContainer) { + tabContainer.addEventListener('keydown', (e) => { + const tabs = Array.from(tabContainer.querySelectorAll('[role="tab"]')); + const idx = tabs.indexOf(document.activeElement); + if (idx === -1) return; + if (e.key === 'ArrowRight' || e.key === 'ArrowDown') { + e.preventDefault(); + const next = tabs[(idx + 1) % tabs.length]; + next.focus(); + next.click(); + } else if (e.key === 'ArrowLeft' || e.key === 'ArrowUp') { + e.preventDefault(); + const prev = tabs[(idx - 1 + tabs.length) % tabs.length]; + prev.focus(); + prev.click(); + } + }); + } listenersBound.scanTabs = true; } function setScanMode(mode) { scanMode = mode; - // Update tab UI + // Update tab UI and ARIA states if (elements.scanModeQuick) { elements.scanModeQuick.classList.toggle('active', mode === 'quick'); + elements.scanModeQuick.setAttribute('aria-selected', mode === 'quick' ? 'true' : 'false'); } if (elements.scanModeDeep) { elements.scanModeDeep.classList.toggle('active', mode === 'deep'); + elements.scanModeDeep.setAttribute('aria-selected', mode === 'deep' ? 'true' : 'false'); } console.log('[WiFiMode] Scan mode set to:', mode); @@ -416,6 +442,7 @@ const WiFiMode = (function() { } console.log('[WiFiMode] Starting quick scan...'); + if (elements.quickScanBtn) elements.quickScanBtn.classList.add('btn-loading'); setScanning(true, 'quick'); try { @@ -496,6 +523,8 @@ const WiFiMode = (function() { console.error('[WiFiMode] Quick scan error:', error); showError(error.message + '. Try using Deep Scan instead.'); setScanning(false); + } finally { + if (elements.quickScanBtn) elements.quickScanBtn.classList.remove('btn-loading'); } } @@ -508,6 +537,7 @@ const WiFiMode = (function() { } console.log('[WiFiMode] Starting deep scan...'); + if (elements.deepScanBtn) elements.deepScanBtn.classList.add('btn-loading'); setScanning(true, 'deep'); try { @@ -569,6 +599,8 @@ const WiFiMode = (function() { console.error('[WiFiMode] Deep scan error:', error); showError(error.message); setScanning(false); + } finally { + if (elements.deepScanBtn) elements.deepScanBtn.classList.remove('btn-loading'); } } diff --git a/templates/adsb_dashboard.html b/templates/adsb_dashboard.html index dbd08c6..9e6c6da 100644 --- a/templates/adsb_dashboard.html +++ b/templates/adsb_dashboard.html @@ -4,32 +4,45 @@ AIRCRAFT RADAR // INTERCEPT - See the Invisible + + {% if offline_settings.assets_source != 'local' %} + + {% endif %} + {% if offline_settings.fonts_source != 'local' %} + + + {% endif %} + {% if offline_settings.fonts_source == 'local' %} {% else %} {% endif %} - + {% if offline_settings.assets_source == 'local' %} - {% else %} - {% endif %} - + + - + {% if offline_settings.assets_source == 'local' %} + + {% else %} + + {% endif %} +
@@ -1716,6 +1729,12 @@ ACARS: ${r.statistics.acarsMessages} messages`; // ============================================ // INITIALIZATION // ============================================ + // Clean up SSE connections on page unload to prevent orphaned streams + window.addEventListener('pagehide', function() { + if (eventSource) { eventSource.close(); eventSource = null; } + if (gpsEventSource) { gpsEventSource.close(); gpsEventSource = null; } + }); + document.addEventListener('DOMContentLoaded', () => { // Initialize observer location input fields from saved location const obsLatInput = document.getElementById('obsLat'); @@ -2078,6 +2097,10 @@ sudo make install } async function initMap() { + // Guard against double initialization (e.g. bfcache restore) + const container = document.getElementById('radarMap'); + if (!container || container._leaflet_id) return; + radarMap = L.map('radarMap', { center: [observerLocation.lat, observerLocation.lon], zoom: 7, @@ -2087,19 +2110,14 @@ sudo make install // Use settings manager for tile layer (allows runtime changes) window.radarMap = radarMap; - if (typeof Settings !== 'undefined') { - // Wait for settings to load from server before applying tiles - await Settings.init(); - Settings.createTileLayer().addTo(radarMap); - Settings.registerMap(radarMap); - } else { - L.tileLayer('https://cartodb-basemaps-{s}.global.ssl.fastly.net/dark_all/{z}/{x}/{y}.png', { - attribution: '© OSM © CARTO', - maxZoom: 19, - subdomains: 'abcd', - className: 'tile-layer-cyan' - }).addTo(radarMap); - } + + // Add fallback tiles immediately so the map is never blank + const fallbackTiles = L.tileLayer('https://cartodb-basemaps-{s}.global.ssl.fastly.net/dark_all/{z}/{x}/{y}.png', { + attribution: '© OSM © CARTO', + maxZoom: 19, + subdomains: 'abcd', + className: 'tile-layer-cyan' + }).addTo(radarMap); // Draw range rings after map is ready setTimeout(() => drawRangeRings(), 100); @@ -2113,6 +2131,21 @@ sudo make install setTimeout(() => { if (radarMap) radarMap.invalidateSize(); }, 500); + + // Upgrade tiles via Settings in the background (non-blocking) + if (typeof Settings !== 'undefined') { + try { + await Promise.race([ + Settings.init(), + new Promise((_, reject) => setTimeout(() => reject(new Error('Settings timeout')), 5000)) + ]); + radarMap.removeLayer(fallbackTiles); + Settings.createTileLayer().addTo(radarMap); + Settings.registerMap(radarMap); + } catch (e) { + console.warn('Settings init failed/timed out, using fallback tiles:', e); + } + } } // Handle window resize for map (especially important on mobile) @@ -5917,5 +5950,6 @@ sudo make install } } + diff --git a/templates/ais_dashboard.html b/templates/ais_dashboard.html index bc9db96..0ae4acb 100644 --- a/templates/ais_dashboard.html +++ b/templates/ais_dashboard.html @@ -4,31 +4,44 @@ VESSEL RADAR // INTERCEPT - See the Invisible + + {% if offline_settings.assets_source != 'local' %} + + {% endif %} + {% if offline_settings.fonts_source != 'local' %} + + + {% endif %} + {% if offline_settings.fonts_source == 'local' %} {% else %} {% endif %} - + {% if offline_settings.assets_source == 'local' %} - {% else %} - {% endif %} - + + - + {% if offline_settings.assets_source == 'local' %} + + {% else %} + + {% endif %} + @@ -393,6 +406,10 @@ // Initialize map async function initMap() { + // Guard against double initialization (e.g. bfcache restore) + const container = document.getElementById('vesselMap'); + if (!container || container._leaflet_id) return; + if (observerLocation) { document.getElementById('obsLat').value = observerLocation.lat; document.getElementById('obsLon').value = observerLocation.lon; @@ -406,18 +423,29 @@ // Use settings manager for tile layer (allows runtime changes) window.vesselMap = vesselMap; + + // Add fallback tile layer immediately so the map is never blank + const fallbackTiles = L.tileLayer('https://cartodb-basemaps-{s}.global.ssl.fastly.net/dark_all/{z}/{x}/{y}.png', { + attribution: '© OSM © CARTO', + maxZoom: 19, + subdomains: 'abcd', + className: 'tile-layer-cyan' + }).addTo(vesselMap); + + // Then try to upgrade tiles via Settings (non-blocking) if (typeof Settings !== 'undefined') { - // Wait for settings to load from server before applying tiles - await Settings.init(); - Settings.createTileLayer().addTo(vesselMap); - Settings.registerMap(vesselMap); - } else { - L.tileLayer('https://cartodb-basemaps-{s}.global.ssl.fastly.net/dark_all/{z}/{x}/{y}.png', { - attribution: '© OSM © CARTO', - maxZoom: 19, - subdomains: 'abcd', - className: 'tile-layer-cyan' - }).addTo(vesselMap); + try { + await Promise.race([ + Settings.init(), + new Promise((_, reject) => setTimeout(() => reject(new Error('Settings timeout')), 5000)) + ]); + vesselMap.removeLayer(fallbackTiles); + Settings.createTileLayer().addTo(vesselMap); + Settings.registerMap(vesselMap); + } catch (e) { + console.warn('Settings init failed/timed out, using fallback tiles:', e); + // fallback tiles already added above + } } // Add observer marker @@ -547,7 +575,7 @@ } } - function startTracking() { + async function startTracking() { const device = document.getElementById('aisDeviceSelect').value; const gain = document.getElementById('aisGain').value; @@ -1502,6 +1530,13 @@ // Auto-connect to gpsd if available autoConnectGps(); }); + + // Clean up SSE connections on page unload to prevent orphaned streams + window.addEventListener('pagehide', function() { + if (eventSource) { eventSource.close(); eventSource = null; } + if (dscEventSource) { dscEventSource.close(); dscEventSource = null; } + if (gpsEventSource) { gpsEventSource.close(); gpsEventSource = null; } + }); diff --git a/templates/index.html b/templates/index.html index 9599930..f156887 100644 --- a/templates/index.html +++ b/templates/index.html @@ -11,6 +11,16 @@ + + {% if offline_settings.assets_source != 'local' %} + + + {% endif %} + {% if offline_settings.fonts_source != 'local' %} + + + {% endif %} + - {% if offline_settings.fonts_source == 'local' %} {% else %} {% endif %} - + {% if offline_settings.assets_source == 'local' %} - - {% else %} - - {% endif %} - - {% if offline_settings.assets_source == 'local' %} - - {% else %} - - {% endif %} - - + @@ -70,6 +68,21 @@ + + + {% if offline_settings.assets_source == 'local' %} + + + {% else %} + + + {% endif %} + {% if offline_settings.assets_source == 'local' %} + + {% else %} + + {% endif %} + + @@ -203,16 +271,10 @@
-
-
-

iNTERCEPT

-

// See the Invisible

- v{{ version }} -
+

iNTERCEPT

+

// See the Invisible

+ v{{ version }} +
@@ -407,6 +473,7 @@
@@ -490,42 +557,44 @@
- - - -

iNTERCEPT // See the Invisible v{{ version - }}

-

Signal Intelligence & Counter Surveillance Platform PAGER

- +
+ + + +

iNTERCEPT // See the Invisible

+
+
+ PAGER + v{{ version }} +
@@ -894,7 +963,7 @@ Network Name 00:00:00:00:00:00
- @@ -1346,7 +1415,7 @@ -
+