diff --git a/routes/listening_post.py b/routes/listening_post.py
index c2282f8..1ae8428 100644
--- a/routes/listening_post.py
+++ b/routes/listening_post.py
@@ -38,15 +38,15 @@ receiver_bp = Blueprint('receiver', __name__, url_prefix='/receiver')
# ============================================
# Audio demodulation state
-audio_process = None
-audio_rtl_process = None
-audio_lock = threading.Lock()
-audio_start_lock = threading.Lock()
-audio_running = False
-audio_frequency = 0.0
-audio_modulation = 'fm'
-audio_source = 'process'
-audio_start_token = 0
+audio_process = None
+audio_rtl_process = None
+audio_lock = threading.Lock()
+audio_start_lock = threading.Lock()
+audio_running = False
+audio_frequency = 0.0
+audio_modulation = 'fm'
+audio_source = 'process'
+audio_start_token = 0
# Scanner state
scanner_thread: Optional[threading.Thread] = None
@@ -665,238 +665,244 @@ def scanner_loop_power():
logger.info("Power sweep scanner thread stopped")
-def _start_audio_stream(
- frequency: float,
- modulation: str,
- *,
- device: int | None = None,
- sdr_type: str | None = None,
- gain: int | None = None,
- squelch: int | None = None,
- bias_t: bool | None = None,
-):
- """Start audio streaming at given frequency."""
- global audio_process, audio_rtl_process, audio_running, audio_frequency, audio_modulation
-
- with audio_lock:
- # Stop any existing stream
- _stop_audio_stream_internal()
+def _start_audio_stream(
+ frequency: float,
+ modulation: str,
+ *,
+ device: int | None = None,
+ sdr_type: str | None = None,
+ gain: int | None = None,
+ squelch: int | None = None,
+ bias_t: bool | None = None,
+):
+ """Start audio streaming at given frequency."""
+ global audio_process, audio_rtl_process, audio_running, audio_frequency, audio_modulation
+
+ # Stop existing stream and snapshot config under lock
+ with audio_lock:
+ _stop_audio_stream_internal()
ffmpeg_path = find_ffmpeg()
if not ffmpeg_path:
logger.error("ffmpeg not found")
return
- # Snapshot runtime tuning config so the spawned demod command cannot
- # drift if shared scanner_config changes while startup is in-flight.
- device_index = int(device if device is not None else scanner_config.get('device', 0))
- gain_value = int(gain if gain is not None else scanner_config.get('gain', 40))
- squelch_value = int(squelch if squelch is not None else scanner_config.get('squelch', 0))
- bias_t_enabled = bool(scanner_config.get('bias_t', False) if bias_t is None else bias_t)
- sdr_type_str = str(sdr_type if sdr_type is not None else scanner_config.get('sdr_type', 'rtlsdr')).lower()
-
- # Determine SDR type and build appropriate command
- try:
- sdr_type = SDRType(sdr_type_str)
- except ValueError:
- sdr_type = SDRType.RTL_SDR
+ # Snapshot runtime tuning config so the spawned demod command cannot
+ # drift if shared scanner_config changes while startup is in-flight.
+ device_index = int(device if device is not None else scanner_config.get('device', 0))
+ gain_value = int(gain if gain is not None else scanner_config.get('gain', 40))
+ squelch_value = int(squelch if squelch is not None else scanner_config.get('squelch', 0))
+ bias_t_enabled = bool(scanner_config.get('bias_t', False) if bias_t is None else bias_t)
+ sdr_type_str = str(sdr_type if sdr_type is not None else scanner_config.get('sdr_type', 'rtlsdr')).lower()
- # Set sample rates based on modulation
- if modulation == 'wfm':
- sample_rate = 170000
- resample_rate = 32000
- elif modulation in ['usb', 'lsb']:
- sample_rate = 12000
- resample_rate = 12000
- else:
- sample_rate = 24000
- resample_rate = 24000
+ # Build commands outside lock (no blocking I/O, just command construction)
+ try:
+ resolved_sdr_type = SDRType(sdr_type_str)
+ except ValueError:
+ resolved_sdr_type = SDRType.RTL_SDR
- # Build the SDR command based on device type
- if sdr_type == SDRType.RTL_SDR:
- # Use rtl_fm for RTL-SDR devices
- rtl_fm_path = find_rtl_fm()
- if not rtl_fm_path:
- logger.error("rtl_fm not found")
- return
+ # Set sample rates based on modulation
+ if modulation == 'wfm':
+ sample_rate = 170000
+ resample_rate = 32000
+ elif modulation in ['usb', 'lsb']:
+ sample_rate = 12000
+ resample_rate = 12000
+ else:
+ sample_rate = 24000
+ resample_rate = 24000
- freq_hz = int(frequency * 1e6)
- sdr_cmd = [
- rtl_fm_path,
- '-M', _rtl_fm_demod_mode(modulation),
- '-f', str(freq_hz),
- '-s', str(sample_rate),
- '-r', str(resample_rate),
- '-g', str(gain_value),
- '-d', str(device_index),
- '-l', str(squelch_value),
- ]
- if bias_t_enabled:
- sdr_cmd.append('-T')
- # Omit explicit filename: rtl_fm defaults to stdout.
- # (Some builds intermittently stall when '-' is passed explicitly.)
- else:
- # Use SDR abstraction layer for HackRF, Airspy, LimeSDR, SDRPlay
- rx_fm_path = find_rx_fm()
- if not rx_fm_path:
- logger.error(f"rx_fm not found - required for {sdr_type.value}. Install SoapySDR utilities.")
- return
-
- # Create device and get command builder
- sdr_device = SDRFactory.create_default_device(sdr_type, index=device_index)
- builder = SDRFactory.get_builder(sdr_type)
-
- # Build FM demod command
- sdr_cmd = builder.build_fm_demod_command(
- device=sdr_device,
- frequency_mhz=frequency,
- sample_rate=resample_rate,
- gain=float(gain_value),
- modulation=modulation,
- squelch=squelch_value,
- bias_t=bias_t_enabled,
- )
- # Ensure we use the found rx_fm path
- sdr_cmd[0] = rx_fm_path
+ # Build the SDR command based on device type
+ if resolved_sdr_type == SDRType.RTL_SDR:
+ rtl_fm_path = find_rtl_fm()
+ if not rtl_fm_path:
+ logger.error("rtl_fm not found")
+ return
- encoder_cmd = [
- ffmpeg_path,
- '-hide_banner',
- '-loglevel', 'error',
- '-fflags', 'nobuffer',
- '-flags', 'low_delay',
- '-probesize', '32',
- '-analyzeduration', '0',
- '-f', 's16le',
- '-ar', str(resample_rate),
- '-ac', '1',
- '-i', 'pipe:0',
- '-acodec', 'pcm_s16le',
- '-ar', '44100',
- '-f', 'wav',
- 'pipe:1'
+ freq_hz = int(frequency * 1e6)
+ sdr_cmd = [
+ rtl_fm_path,
+ '-M', _rtl_fm_demod_mode(modulation),
+ '-f', str(freq_hz),
+ '-s', str(sample_rate),
+ '-r', str(resample_rate),
+ '-g', str(gain_value),
+ '-d', str(device_index),
+ '-l', str(squelch_value),
]
+ if bias_t_enabled:
+ sdr_cmd.append('-T')
+ else:
+ rx_fm_path = find_rx_fm()
+ if not rx_fm_path:
+ logger.error(f"rx_fm not found - required for {resolved_sdr_type.value}. Install SoapySDR utilities.")
+ return
- try:
- # Use subprocess piping for reliable streaming.
- # Log stderr to temp files for error diagnosis.
- rtl_stderr_log = '/tmp/rtl_fm_stderr.log'
- ffmpeg_stderr_log = '/tmp/ffmpeg_stderr.log'
- logger.info(f"Starting audio: {frequency} MHz, mod={modulation}, device={device_index}")
+ sdr_device = SDRFactory.create_default_device(resolved_sdr_type, index=device_index)
+ builder = SDRFactory.get_builder(resolved_sdr_type)
+ sdr_cmd = builder.build_fm_demod_command(
+ device=sdr_device,
+ frequency_mhz=frequency,
+ sample_rate=resample_rate,
+ gain=float(gain_value),
+ modulation=modulation,
+ squelch=squelch_value,
+ bias_t=bias_t_enabled,
+ )
+ sdr_cmd[0] = rx_fm_path
- # Retry loop for USB device contention (device may not be
- # released immediately after a previous process exits)
- max_attempts = 3
- for attempt in range(max_attempts):
- audio_rtl_process = None
- audio_process = None
- rtl_err_handle = None
- ffmpeg_err_handle = None
- try:
- rtl_err_handle = open(rtl_stderr_log, 'w')
- ffmpeg_err_handle = open(ffmpeg_stderr_log, 'w')
- audio_rtl_process = subprocess.Popen(
- sdr_cmd,
- stdout=subprocess.PIPE,
- stderr=rtl_err_handle,
- bufsize=0,
- start_new_session=True # Create new process group for clean shutdown
- )
- audio_process = subprocess.Popen(
- encoder_cmd,
- stdin=audio_rtl_process.stdout,
- stdout=subprocess.PIPE,
- stderr=ffmpeg_err_handle,
- bufsize=0,
- start_new_session=True # Create new process group for clean shutdown
- )
- if audio_rtl_process.stdout:
- audio_rtl_process.stdout.close()
- finally:
- if rtl_err_handle:
- rtl_err_handle.close()
- if ffmpeg_err_handle:
- ffmpeg_err_handle.close()
+ encoder_cmd = [
+ ffmpeg_path,
+ '-hide_banner',
+ '-loglevel', 'error',
+ '-fflags', 'nobuffer',
+ '-flags', 'low_delay',
+ '-probesize', '32',
+ '-analyzeduration', '0',
+ '-f', 's16le',
+ '-ar', str(resample_rate),
+ '-ac', '1',
+ '-i', 'pipe:0',
+ '-acodec', 'pcm_s16le',
+ '-ar', '44100',
+ '-f', 'wav',
+ 'pipe:1'
+ ]
- # Brief delay to check if process started successfully
- time.sleep(0.3)
+ # Retry loop outside lock — spawning + health check sleeps don't block
+ # other operations. audio_start_lock already serializes callers.
+ try:
+ rtl_stderr_log = '/tmp/rtl_fm_stderr.log'
+ ffmpeg_stderr_log = '/tmp/ffmpeg_stderr.log'
+ logger.info(f"Starting audio: {frequency} MHz, mod={modulation}, device={device_index}")
- if (audio_rtl_process and audio_rtl_process.poll() is not None) or (
- audio_process and audio_process.poll() is not None
- ):
- # Read stderr from temp files
- rtl_stderr = ''
- ffmpeg_stderr = ''
- try:
- with open(rtl_stderr_log, 'r') as f:
- rtl_stderr = f.read().strip()
- except Exception:
- pass
- try:
- with open(ffmpeg_stderr_log, 'r') as f:
- ffmpeg_stderr = f.read().strip()
- except Exception:
- pass
+ new_rtl_proc = None
+ new_audio_proc = None
+ max_attempts = 3
+ for attempt in range(max_attempts):
+ new_rtl_proc = None
+ new_audio_proc = None
+ rtl_err_handle = None
+ ffmpeg_err_handle = None
+ try:
+ rtl_err_handle = open(rtl_stderr_log, 'w')
+ ffmpeg_err_handle = open(ffmpeg_stderr_log, 'w')
+ new_rtl_proc = subprocess.Popen(
+ sdr_cmd,
+ stdout=subprocess.PIPE,
+ stderr=rtl_err_handle,
+ bufsize=0,
+ start_new_session=True
+ )
+ new_audio_proc = subprocess.Popen(
+ encoder_cmd,
+ stdin=new_rtl_proc.stdout,
+ stdout=subprocess.PIPE,
+ stderr=ffmpeg_err_handle,
+ bufsize=0,
+ start_new_session=True
+ )
+ if new_rtl_proc.stdout:
+ new_rtl_proc.stdout.close()
+ finally:
+ if rtl_err_handle:
+ rtl_err_handle.close()
+ if ffmpeg_err_handle:
+ ffmpeg_err_handle.close()
- if 'usb_claim_interface' in rtl_stderr and attempt < max_attempts - 1:
- logger.warning(f"USB device busy (attempt {attempt + 1}/{max_attempts}), waiting for release...")
- if audio_process:
- try:
- audio_process.terminate()
- audio_process.wait(timeout=0.5)
- except Exception:
- pass
- if audio_rtl_process:
- try:
- audio_rtl_process.terminate()
- audio_rtl_process.wait(timeout=0.5)
- except Exception:
- pass
- time.sleep(1.0)
- continue
+ # Brief delay to check if process started successfully
+ time.sleep(0.3)
- if audio_process and audio_process.poll() is None:
- try:
- audio_process.terminate()
- audio_process.wait(timeout=0.5)
- except Exception:
- pass
- if audio_rtl_process and audio_rtl_process.poll() is None:
- try:
- audio_rtl_process.terminate()
- audio_rtl_process.wait(timeout=0.5)
- except Exception:
- pass
- audio_process = None
- audio_rtl_process = None
-
- logger.error(
- f"Audio pipeline exited immediately. rtl_fm stderr: {rtl_stderr}, ffmpeg stderr: {ffmpeg_stderr}"
- )
- return
-
- # Pipeline started successfully
- break
-
- # Keep monitor startup tolerant: some demod chains can take
- # several seconds before producing stream bytes.
- if (
- not audio_process
- or not audio_rtl_process
- or audio_process.poll() is not None
- or audio_rtl_process.poll() is not None
+ if (new_rtl_proc and new_rtl_proc.poll() is not None) or (
+ new_audio_proc and new_audio_proc.poll() is not None
):
- logger.warning("Audio pipeline did not remain alive after startup")
- _stop_audio_stream_internal()
+ rtl_stderr = ''
+ ffmpeg_stderr = ''
+ try:
+ with open(rtl_stderr_log, 'r') as f:
+ rtl_stderr = f.read().strip()
+ except Exception:
+ pass
+ try:
+ with open(ffmpeg_stderr_log, 'r') as f:
+ ffmpeg_stderr = f.read().strip()
+ except Exception:
+ pass
+
+ if 'usb_claim_interface' in rtl_stderr and attempt < max_attempts - 1:
+ logger.warning(f"USB device busy (attempt {attempt + 1}/{max_attempts}), waiting for release...")
+ if new_audio_proc:
+ try:
+ new_audio_proc.terminate()
+ new_audio_proc.wait(timeout=0.5)
+ except Exception:
+ pass
+ if new_rtl_proc:
+ try:
+ new_rtl_proc.terminate()
+ new_rtl_proc.wait(timeout=0.5)
+ except Exception:
+ pass
+ time.sleep(1.0)
+ continue
+
+ if new_audio_proc and new_audio_proc.poll() is None:
+ try:
+ new_audio_proc.terminate()
+ new_audio_proc.wait(timeout=0.5)
+ except Exception:
+ pass
+ if new_rtl_proc and new_rtl_proc.poll() is None:
+ try:
+ new_rtl_proc.terminate()
+ new_rtl_proc.wait(timeout=0.5)
+ except Exception:
+ pass
+ new_audio_proc = None
+ new_rtl_proc = None
+
+ logger.error(
+ f"Audio pipeline exited immediately. rtl_fm stderr: {rtl_stderr}, ffmpeg stderr: {ffmpeg_stderr}"
+ )
return
+ # Pipeline started successfully
+ break
+
+ # Verify pipeline is still alive, then install under lock
+ if (
+ not new_audio_proc
+ or not new_rtl_proc
+ or new_audio_proc.poll() is not None
+ or new_rtl_proc.poll() is not None
+ ):
+ logger.warning("Audio pipeline did not remain alive after startup")
+ # Clean up failed processes
+ if new_audio_proc:
+ try:
+ new_audio_proc.terminate()
+ new_audio_proc.wait(timeout=0.5)
+ except Exception:
+ pass
+ if new_rtl_proc:
+ try:
+ new_rtl_proc.terminate()
+ new_rtl_proc.wait(timeout=0.5)
+ except Exception:
+ pass
+ return
+
+ # Install processes under lock
+ with audio_lock:
+ audio_rtl_process = new_rtl_proc
+ audio_process = new_audio_proc
audio_running = True
audio_frequency = frequency
audio_modulation = modulation
- logger.info(f"Audio stream started: {frequency} MHz ({modulation}) via {sdr_type.value}")
+ logger.info(f"Audio stream started: {frequency} MHz ({modulation}) via {resolved_sdr_type.value}")
- except Exception as e:
- logger.error(f"Failed to start audio stream: {e}")
+ except Exception as e:
+ logger.error(f"Failed to start audio stream: {e}")
def _stop_audio_stream():
@@ -1287,211 +1293,223 @@ def get_presets() -> Response:
# MANUAL AUDIO ENDPOINTS (for direct listening)
# ============================================
-@receiver_bp.route('/audio/start', methods=['POST'])
-def start_audio() -> Response:
- """Start audio at specific frequency (manual mode)."""
- global scanner_running, scanner_active_device, receiver_active_device, scanner_power_process, scanner_thread
- global audio_running, audio_frequency, audio_modulation, audio_source, audio_start_token
-
- data = request.json or {}
-
- try:
- frequency = float(data.get('frequency', 0))
- modulation = normalize_modulation(data.get('modulation', 'wfm'))
- squelch = int(data.get('squelch', 0))
- gain = int(data.get('gain', 40))
- device = int(data.get('device', 0))
- sdr_type = str(data.get('sdr_type', 'rtlsdr')).lower()
- request_token_raw = data.get('request_token')
- request_token = int(request_token_raw) if request_token_raw is not None else None
- bias_t_raw = data.get('bias_t', scanner_config.get('bias_t', False))
- if isinstance(bias_t_raw, str):
- bias_t = bias_t_raw.strip().lower() in {'1', 'true', 'yes', 'on'}
- else:
- bias_t = bool(bias_t_raw)
- except (ValueError, TypeError) as e:
- return jsonify({
- 'status': 'error',
- 'message': f'Invalid parameter: {e}'
- }), 400
-
- if frequency <= 0:
- return jsonify({
- 'status': 'error',
- 'message': 'frequency is required'
- }), 400
-
- valid_sdr_types = ['rtlsdr', 'hackrf', 'airspy', 'limesdr', 'sdrplay']
- if sdr_type not in valid_sdr_types:
- return jsonify({
- 'status': 'error',
- 'message': f'Invalid sdr_type. Use: {", ".join(valid_sdr_types)}'
- }), 400
-
- with audio_start_lock:
- if request_token is not None:
- if request_token < audio_start_token:
- return jsonify({
- 'status': 'stale',
- 'message': 'Superseded audio start request',
- 'source': audio_source,
- 'superseded': True,
- 'current_token': audio_start_token,
- }), 409
- audio_start_token = request_token
- else:
- audio_start_token += 1
- request_token = audio_start_token
-
- # Stop scanner if running
- if scanner_running:
- scanner_running = False
- if scanner_active_device is not None:
- app_module.release_sdr_device(scanner_active_device)
- scanner_active_device = None
- if scanner_thread and scanner_thread.is_alive():
- try:
- scanner_thread.join(timeout=2.0)
- except Exception:
- pass
- if scanner_power_process and scanner_power_process.poll() is None:
- try:
- scanner_power_process.terminate()
- scanner_power_process.wait(timeout=1)
- except Exception:
- try:
- scanner_power_process.kill()
- except Exception:
- pass
- scanner_power_process = None
- try:
- subprocess.run(['pkill', '-9', 'rtl_power'], capture_output=True, timeout=0.5)
- except Exception:
- pass
- time.sleep(0.5)
-
- # Update config for audio
- scanner_config['squelch'] = squelch
- scanner_config['gain'] = gain
- scanner_config['device'] = device
- scanner_config['sdr_type'] = sdr_type
- scanner_config['bias_t'] = bias_t
-
- # Preferred path: when waterfall WebSocket is active on the same SDR,
- # derive monitor audio from that IQ stream instead of spawning rtl_fm.
- try:
- from routes.waterfall_websocket import (
- get_shared_capture_status,
- start_shared_monitor_from_capture,
- )
-
- shared = get_shared_capture_status()
- if shared.get('running') and shared.get('device') == device:
- _stop_audio_stream()
- ok, msg = start_shared_monitor_from_capture(
- device=device,
- frequency_mhz=frequency,
- modulation=modulation,
- squelch=squelch,
- )
- if ok:
- audio_running = True
- audio_frequency = frequency
- audio_modulation = modulation
- audio_source = 'waterfall'
- # Shared monitor uses the waterfall's existing SDR claim.
- if receiver_active_device is not None:
- app_module.release_sdr_device(receiver_active_device)
- receiver_active_device = None
- return jsonify({
- 'status': 'started',
- 'frequency': frequency,
- 'modulation': modulation,
- 'source': 'waterfall',
- 'request_token': request_token,
- })
- logger.warning(f"Shared waterfall monitor unavailable: {msg}")
- except Exception as e:
- logger.debug(f"Shared waterfall monitor probe failed: {e}")
-
- # Stop waterfall if it's using the same SDR (SSE path)
- if waterfall_running and waterfall_active_device == device:
- _stop_waterfall_internal()
- time.sleep(0.2)
-
- # Claim device for listening audio. The WebSocket waterfall handler
- # may still be tearing down its IQ capture process (thread join +
- # safe_terminate can take several seconds), so we retry with back-off
- # to give the USB device time to be fully released.
- if receiver_active_device is None or receiver_active_device != device:
- if receiver_active_device is not None:
- app_module.release_sdr_device(receiver_active_device)
- receiver_active_device = None
-
- error = None
- max_claim_attempts = 6
- for attempt in range(max_claim_attempts):
- error = app_module.claim_sdr_device(device, 'receiver')
- if not error:
- break
- if attempt < max_claim_attempts - 1:
- logger.debug(
- f"Device claim attempt {attempt + 1}/{max_claim_attempts} "
- f"failed, retrying in 0.5s: {error}"
- )
- time.sleep(0.5)
-
- if error:
- return jsonify({
- 'status': 'error',
- 'error_type': 'DEVICE_BUSY',
- 'message': error
- }), 409
- receiver_active_device = device
-
- _start_audio_stream(
- frequency,
- modulation,
- device=device,
- sdr_type=sdr_type,
- gain=gain,
- squelch=squelch,
- bias_t=bias_t,
- )
-
- if audio_running:
- audio_source = 'process'
- return jsonify({
- 'status': 'started',
- 'frequency': audio_frequency,
- 'modulation': audio_modulation,
- 'source': 'process',
- 'request_token': request_token,
- })
-
- # Avoid leaving a stale device claim after startup failure.
- if receiver_active_device is not None:
- app_module.release_sdr_device(receiver_active_device)
- receiver_active_device = None
-
- start_error = ''
- for log_path in ('/tmp/rtl_fm_stderr.log', '/tmp/ffmpeg_stderr.log'):
- try:
- with open(log_path, 'r') as handle:
- content = handle.read().strip()
- if content:
- start_error = content.splitlines()[-1]
- break
- except Exception:
- continue
-
- message = 'Failed to start audio. Check SDR device.'
- if start_error:
- message = f'Failed to start audio: {start_error}'
- return jsonify({
- 'status': 'error',
- 'message': message
- }), 500
+@receiver_bp.route('/audio/start', methods=['POST'])
+def start_audio() -> Response:
+ """Start audio at specific frequency (manual mode)."""
+ global scanner_running, scanner_active_device, receiver_active_device, scanner_power_process, scanner_thread
+ global audio_running, audio_frequency, audio_modulation, audio_source, audio_start_token
+
+ data = request.json or {}
+
+ try:
+ frequency = float(data.get('frequency', 0))
+ modulation = normalize_modulation(data.get('modulation', 'wfm'))
+ squelch = int(data.get('squelch', 0))
+ gain = int(data.get('gain', 40))
+ device = int(data.get('device', 0))
+ sdr_type = str(data.get('sdr_type', 'rtlsdr')).lower()
+ request_token_raw = data.get('request_token')
+ request_token = int(request_token_raw) if request_token_raw is not None else None
+ bias_t_raw = data.get('bias_t', scanner_config.get('bias_t', False))
+ if isinstance(bias_t_raw, str):
+ bias_t = bias_t_raw.strip().lower() in {'1', 'true', 'yes', 'on'}
+ else:
+ bias_t = bool(bias_t_raw)
+ except (ValueError, TypeError) as e:
+ return jsonify({
+ 'status': 'error',
+ 'message': f'Invalid parameter: {e}'
+ }), 400
+
+ if frequency <= 0:
+ return jsonify({
+ 'status': 'error',
+ 'message': 'frequency is required'
+ }), 400
+
+ valid_sdr_types = ['rtlsdr', 'hackrf', 'airspy', 'limesdr', 'sdrplay']
+ if sdr_type not in valid_sdr_types:
+ return jsonify({
+ 'status': 'error',
+ 'message': f'Invalid sdr_type. Use: {", ".join(valid_sdr_types)}'
+ }), 400
+
+ with audio_start_lock:
+ if request_token is not None:
+ if request_token < audio_start_token:
+ return jsonify({
+ 'status': 'stale',
+ 'message': 'Superseded audio start request',
+ 'source': audio_source,
+ 'superseded': True,
+ 'current_token': audio_start_token,
+ }), 409
+ audio_start_token = request_token
+ else:
+ audio_start_token += 1
+ request_token = audio_start_token
+
+ # Grab scanner refs inside lock, signal stop, clear state
+ need_scanner_teardown = False
+ scanner_thread_ref = None
+ scanner_proc_ref = None
+ if scanner_running:
+ scanner_running = False
+ if scanner_active_device is not None:
+ app_module.release_sdr_device(scanner_active_device)
+ scanner_active_device = None
+ scanner_thread_ref = scanner_thread
+ scanner_proc_ref = scanner_power_process
+ scanner_power_process = None
+ need_scanner_teardown = True
+
+ # Update config for audio
+ scanner_config['squelch'] = squelch
+ scanner_config['gain'] = gain
+ scanner_config['device'] = device
+ scanner_config['sdr_type'] = sdr_type
+ scanner_config['bias_t'] = bias_t
+
+ # Scanner teardown outside lock (blocking: thread join, process wait, pkill, sleep)
+ if need_scanner_teardown:
+ if scanner_thread_ref and scanner_thread_ref.is_alive():
+ try:
+ scanner_thread_ref.join(timeout=2.0)
+ except Exception:
+ pass
+ if scanner_proc_ref and scanner_proc_ref.poll() is None:
+ try:
+ scanner_proc_ref.terminate()
+ scanner_proc_ref.wait(timeout=1)
+ except Exception:
+ try:
+ scanner_proc_ref.kill()
+ except Exception:
+ pass
+ try:
+ subprocess.run(['pkill', '-9', 'rtl_power'], capture_output=True, timeout=0.5)
+ except Exception:
+ pass
+ time.sleep(0.5)
+
+ # Re-acquire lock for waterfall check and device claim
+ with audio_start_lock:
+
+ # Preferred path: when waterfall WebSocket is active on the same SDR,
+ # derive monitor audio from that IQ stream instead of spawning rtl_fm.
+ try:
+ from routes.waterfall_websocket import (
+ get_shared_capture_status,
+ start_shared_monitor_from_capture,
+ )
+
+ shared = get_shared_capture_status()
+ if shared.get('running') and shared.get('device') == device:
+ _stop_audio_stream()
+ ok, msg = start_shared_monitor_from_capture(
+ device=device,
+ frequency_mhz=frequency,
+ modulation=modulation,
+ squelch=squelch,
+ )
+ if ok:
+ audio_running = True
+ audio_frequency = frequency
+ audio_modulation = modulation
+ audio_source = 'waterfall'
+ # Shared monitor uses the waterfall's existing SDR claim.
+ if receiver_active_device is not None:
+ app_module.release_sdr_device(receiver_active_device)
+ receiver_active_device = None
+ return jsonify({
+ 'status': 'started',
+ 'frequency': frequency,
+ 'modulation': modulation,
+ 'source': 'waterfall',
+ 'request_token': request_token,
+ })
+ logger.warning(f"Shared waterfall monitor unavailable: {msg}")
+ except Exception as e:
+ logger.debug(f"Shared waterfall monitor probe failed: {e}")
+
+ # Stop waterfall if it's using the same SDR (SSE path)
+ if waterfall_running and waterfall_active_device == device:
+ _stop_waterfall_internal()
+ time.sleep(0.2)
+
+ # Claim device for listening audio. The WebSocket waterfall handler
+ # may still be tearing down its IQ capture process (thread join +
+ # safe_terminate can take several seconds), so we retry with back-off
+ # to give the USB device time to be fully released.
+ if receiver_active_device is None or receiver_active_device != device:
+ if receiver_active_device is not None:
+ app_module.release_sdr_device(receiver_active_device)
+ receiver_active_device = None
+
+ error = None
+ max_claim_attempts = 6
+ for attempt in range(max_claim_attempts):
+ error = app_module.claim_sdr_device(device, 'receiver')
+ if not error:
+ break
+ if attempt < max_claim_attempts - 1:
+ logger.debug(
+ f"Device claim attempt {attempt + 1}/{max_claim_attempts} "
+ f"failed, retrying in 0.5s: {error}"
+ )
+ time.sleep(0.5)
+
+ if error:
+ return jsonify({
+ 'status': 'error',
+ 'error_type': 'DEVICE_BUSY',
+ 'message': error
+ }), 409
+ receiver_active_device = device
+
+ _start_audio_stream(
+ frequency,
+ modulation,
+ device=device,
+ sdr_type=sdr_type,
+ gain=gain,
+ squelch=squelch,
+ bias_t=bias_t,
+ )
+
+ if audio_running:
+ audio_source = 'process'
+ return jsonify({
+ 'status': 'started',
+ 'frequency': audio_frequency,
+ 'modulation': audio_modulation,
+ 'source': 'process',
+ 'request_token': request_token,
+ })
+
+ # Avoid leaving a stale device claim after startup failure.
+ if receiver_active_device is not None:
+ app_module.release_sdr_device(receiver_active_device)
+ receiver_active_device = None
+
+ start_error = ''
+ for log_path in ('/tmp/rtl_fm_stderr.log', '/tmp/ffmpeg_stderr.log'):
+ try:
+ with open(log_path, 'r') as handle:
+ content = handle.read().strip()
+ if content:
+ start_error = content.splitlines()[-1]
+ break
+ except Exception:
+ continue
+
+ message = 'Failed to start audio. Check SDR device.'
+ if start_error:
+ message = f'Failed to start audio: {start_error}'
+ return jsonify({
+ 'status': 'error',
+ 'message': message
+ }), 500
@receiver_bp.route('/audio/stop', methods=['POST'])
@@ -1606,64 +1624,64 @@ def audio_probe() -> Response:
return jsonify({'status': 'ok', 'bytes': size})
-@receiver_bp.route('/audio/stream')
-def stream_audio() -> Response:
- """Stream WAV audio."""
- request_token_raw = request.args.get('request_token')
- request_token = None
- if request_token_raw is not None:
- try:
- request_token = int(request_token_raw)
- except (ValueError, TypeError):
- request_token = None
-
- if request_token is not None and request_token < audio_start_token:
- return Response(b'', mimetype='audio/wav', status=204)
-
- if audio_source == 'waterfall':
- for _ in range(40):
- if audio_running:
- break
- time.sleep(0.05)
+@receiver_bp.route('/audio/stream')
+def stream_audio() -> Response:
+ """Stream WAV audio."""
+ request_token_raw = request.args.get('request_token')
+ request_token = None
+ if request_token_raw is not None:
+ try:
+ request_token = int(request_token_raw)
+ except (ValueError, TypeError):
+ request_token = None
+
+ if request_token is not None and request_token < audio_start_token:
+ return Response(b'', mimetype='audio/wav', status=204)
+
+ if audio_source == 'waterfall':
+ for _ in range(40):
+ if audio_running:
+ break
+ time.sleep(0.05)
if not audio_running:
return Response(b'', mimetype='audio/wav', status=204)
- def generate_shared():
- global audio_running, audio_source
- try:
- from routes.waterfall_websocket import (
- get_shared_capture_status,
+ def generate_shared():
+ global audio_running, audio_source
+ try:
+ from routes.waterfall_websocket import (
+ get_shared_capture_status,
read_shared_monitor_audio_chunk,
)
except Exception:
return
- # Browser expects an immediate WAV header.
- yield _wav_header(sample_rate=48000)
- inactive_since: float | None = None
-
- while audio_running and audio_source == 'waterfall':
- if request_token is not None and request_token < audio_start_token:
- break
- chunk = read_shared_monitor_audio_chunk(timeout=1.0)
- if chunk:
- inactive_since = None
- yield chunk
- continue
- shared = get_shared_capture_status()
- if shared.get('running') and shared.get('monitor_enabled'):
- inactive_since = None
- continue
- if inactive_since is None:
- inactive_since = time.monotonic()
- continue
- if (time.monotonic() - inactive_since) < 4.0:
- continue
- if not shared.get('running') or not shared.get('monitor_enabled'):
- audio_running = False
- audio_source = 'process'
- break
+ # Browser expects an immediate WAV header.
+ yield _wav_header(sample_rate=48000)
+ inactive_since: float | None = None
+
+ while audio_running and audio_source == 'waterfall':
+ if request_token is not None and request_token < audio_start_token:
+ break
+ chunk = read_shared_monitor_audio_chunk(timeout=1.0)
+ if chunk:
+ inactive_since = None
+ yield chunk
+ continue
+ shared = get_shared_capture_status()
+ if shared.get('running') and shared.get('monitor_enabled'):
+ inactive_since = None
+ continue
+ if inactive_since is None:
+ inactive_since = time.monotonic()
+ continue
+ if (time.monotonic() - inactive_since) < 4.0:
+ continue
+ if not shared.get('running') or not shared.get('monitor_enabled'):
+ audio_running = False
+ audio_source = 'process'
+ break
return Response(
generate_shared(),
@@ -1685,11 +1703,11 @@ def stream_audio() -> Response:
if not audio_running or not audio_process:
return Response(b'', mimetype='audio/wav', status=204)
- def generate():
- # Capture local reference to avoid race condition with stop
- proc = audio_process
- if not proc or not proc.stdout:
- return
+ def generate():
+ # Capture local reference to avoid race condition with stop
+ proc = audio_process
+ if not proc or not proc.stdout:
+ return
try:
# Drain stale audio that accumulated in the pipe buffer
# between pipeline start and stream connection. Keep the
@@ -1708,17 +1726,17 @@ def stream_audio() -> Response:
if header_chunk:
yield header_chunk
- # Stream real-time audio
- first_chunk_deadline = time.time() + 20.0
- warned_wait = False
- while audio_running and proc.poll() is None:
- if request_token is not None and request_token < audio_start_token:
- break
- # Use select to avoid blocking forever
- ready, _, _ = select.select([proc.stdout], [], [], 2.0)
- if ready:
- chunk = proc.stdout.read(8192)
- if chunk:
+ # Stream real-time audio
+ first_chunk_deadline = time.time() + 20.0
+ warned_wait = False
+ while audio_running and proc.poll() is None:
+ if request_token is not None and request_token < audio_start_token:
+ break
+ # Use select to avoid blocking forever
+ ready, _, _ = select.select([proc.stdout], [], [], 2.0)
+ if ready:
+ chunk = proc.stdout.read(8192)
+ if chunk:
warned_wait = False
yield chunk
else:
diff --git a/routes/rtlamr.py b/routes/rtlamr.py
index 96bdc44..3acb291 100644
--- a/routes/rtlamr.py
+++ b/routes/rtlamr.py
@@ -138,36 +138,34 @@ def start_rtlamr() -> Response:
output_format = data.get('format', 'json')
# Start rtl_tcp first
+ rtl_tcp_just_started = False
+ rtl_tcp_cmd_str = ''
with rtl_tcp_lock:
if not rtl_tcp_process:
logger.info("Starting rtl_tcp server...")
try:
rtl_tcp_cmd = ['rtl_tcp', '-a', '0.0.0.0']
-
+
# Add device index if not 0
if device and device != '0':
rtl_tcp_cmd.extend(['-d', str(device)])
-
+
# Add gain if not auto
if gain and gain != '0':
rtl_tcp_cmd.extend(['-g', str(gain)])
-
+
# Add PPM correction if not 0
if ppm and ppm != '0':
rtl_tcp_cmd.extend(['-p', str(ppm)])
-
+
rtl_tcp_process = subprocess.Popen(
rtl_tcp_cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
register_process(rtl_tcp_process)
-
- # Wait a moment for rtl_tcp to start
- time.sleep(3)
-
- logger.info(f"rtl_tcp started: {' '.join(rtl_tcp_cmd)}")
- app_module.rtlamr_queue.put({'type': 'info', 'text': f'rtl_tcp: {" ".join(rtl_tcp_cmd)}'})
+ rtl_tcp_just_started = True
+ rtl_tcp_cmd_str = ' '.join(rtl_tcp_cmd)
except Exception as e:
logger.error(f"Failed to start rtl_tcp: {e}")
# Release SDR device on rtl_tcp failure
@@ -176,6 +174,12 @@ def start_rtlamr() -> Response:
rtlamr_active_device = None
return jsonify({'status': 'error', 'message': f'Failed to start rtl_tcp: {e}'}), 500
+ # Wait for rtl_tcp to start outside lock
+ if rtl_tcp_just_started:
+ time.sleep(3)
+ logger.info(f"rtl_tcp started: {rtl_tcp_cmd_str}")
+ app_module.rtlamr_queue.put({'type': 'info', 'text': f'rtl_tcp: {rtl_tcp_cmd_str}'})
+
# Build rtlamr command
cmd = [
'rtlamr',
@@ -258,25 +262,34 @@ def start_rtlamr() -> Response:
def stop_rtlamr() -> Response:
global rtl_tcp_process, rtlamr_active_device
+ # Grab process refs inside locks, clear state, then terminate outside
+ rtlamr_proc = None
with app_module.rtlamr_lock:
if app_module.rtlamr_process:
- app_module.rtlamr_process.terminate()
- try:
- app_module.rtlamr_process.wait(timeout=2)
- except subprocess.TimeoutExpired:
- app_module.rtlamr_process.kill()
+ rtlamr_proc = app_module.rtlamr_process
app_module.rtlamr_process = None
+ if rtlamr_proc:
+ rtlamr_proc.terminate()
+ try:
+ rtlamr_proc.wait(timeout=2)
+ except subprocess.TimeoutExpired:
+ rtlamr_proc.kill()
+
# Also stop rtl_tcp
+ tcp_proc = None
with rtl_tcp_lock:
if rtl_tcp_process:
- rtl_tcp_process.terminate()
- try:
- rtl_tcp_process.wait(timeout=2)
- except subprocess.TimeoutExpired:
- rtl_tcp_process.kill()
+ tcp_proc = rtl_tcp_process
rtl_tcp_process = None
- logger.info("rtl_tcp stopped")
+
+ if tcp_proc:
+ tcp_proc.terminate()
+ try:
+ tcp_proc.wait(timeout=2)
+ except subprocess.TimeoutExpired:
+ tcp_proc.kill()
+ logger.info("rtl_tcp stopped")
# Release device from registry
if rtlamr_active_device is not None:
diff --git a/routes/tscm.py b/routes/tscm.py
index 4f60b42..dd3a862 100644
--- a/routes/tscm.py
+++ b/routes/tscm.py
@@ -1345,7 +1345,7 @@ def _scan_rf_signals(
sweep_ranges: list[dict] | None = None
) -> list[dict]:
"""
- Scan for RF signals using SDR (rtl_power).
+ Scan for RF signals using SDR (rtl_power or hackrf_sweep).
Scans common surveillance frequency bands:
- 88-108 MHz: FM broadcast (potential FM bugs)
@@ -1375,39 +1375,50 @@ def _scan_rf_signals(
logger.info(f"Starting RF scan (device={sdr_device})")
+ # Detect available SDR devices and sweep tools
rtl_power_path = shutil.which('rtl_power')
- if not rtl_power_path:
- logger.warning("rtl_power not found in PATH, RF scanning unavailable")
+ hackrf_sweep_path = shutil.which('hackrf_sweep')
+
+ sdr_type = None
+ sweep_tool_path = None
+
+ try:
+ from utils.sdr import SDRFactory
+ from utils.sdr.base import SDRType
+ devices = SDRFactory.detect_devices()
+ rtlsdr_available = any(d.sdr_type == SDRType.RTL_SDR for d in devices)
+ hackrf_available = any(d.sdr_type == SDRType.HACKRF for d in devices)
+ except ImportError:
+ rtlsdr_available = False
+ hackrf_available = False
+
+ # Pick the best available SDR + sweep tool combo
+ if rtlsdr_available and rtl_power_path:
+ sdr_type = 'rtlsdr'
+ sweep_tool_path = rtl_power_path
+ logger.info(f"Using RTL-SDR with rtl_power at: {rtl_power_path}")
+ elif hackrf_available and hackrf_sweep_path:
+ sdr_type = 'hackrf'
+ sweep_tool_path = hackrf_sweep_path
+ logger.info(f"Using HackRF with hackrf_sweep at: {hackrf_sweep_path}")
+ elif rtl_power_path:
+ # Tool exists but no device detected — try anyway (detection may have failed)
+ sdr_type = 'rtlsdr'
+ sweep_tool_path = rtl_power_path
+ logger.info(f"No SDR detected but rtl_power found, attempting RTL-SDR scan")
+ elif hackrf_sweep_path:
+ sdr_type = 'hackrf'
+ sweep_tool_path = hackrf_sweep_path
+ logger.info(f"No SDR detected but hackrf_sweep found, attempting HackRF scan")
+
+ if not sweep_tool_path:
+ logger.warning("No supported sweep tool found (rtl_power or hackrf_sweep)")
_emit_event('rf_status', {
'status': 'error',
- 'message': 'rtl_power not installed. Install rtl-sdr package for RF scanning.',
+ 'message': 'No SDR sweep tool installed. Install rtl-sdr (rtl_power) or HackRF (hackrf_sweep) for RF scanning.',
})
return signals
- logger.info(f"Found rtl_power at: {rtl_power_path}")
-
- # Test if RTL-SDR device is accessible
- rtl_test_path = shutil.which('rtl_test')
- if rtl_test_path:
- try:
- test_result = subprocess.run(
- [rtl_test_path, '-t'],
- capture_output=True,
- text=True,
- timeout=5
- )
- if 'No supported devices found' in test_result.stderr or test_result.returncode != 0:
- logger.warning("No RTL-SDR device found")
- _emit_event('rf_status', {
- 'status': 'error',
- 'message': 'No RTL-SDR device connected. Connect an RTL-SDR dongle for RF scanning.',
- })
- return signals
- except subprocess.TimeoutExpired:
- pass # Device might be busy, continue anyway
- except Exception as e:
- logger.debug(f"rtl_test check failed: {e}")
-
# Define frequency bands to scan (in Hz)
# Format: (start_freq, end_freq, bin_size, description)
scan_bands: list[tuple[int, int, int, str]] = []
@@ -1448,7 +1459,7 @@ def _scan_rf_signals(
try:
# Build device argument
- device_arg = ['-d', str(sdr_device if sdr_device is not None else 0)]
+ device_idx = sdr_device if sdr_device is not None else 0
# Scan each band and look for strong signals
for start_freq, end_freq, bin_size, band_name in scan_bands:
@@ -1458,15 +1469,27 @@ def _scan_rf_signals(
logger.info(f"Scanning {band_name} ({start_freq/1e6:.1f}-{end_freq/1e6:.1f} MHz)")
try:
- # Run rtl_power for a quick sweep of this band
- cmd = [
- rtl_power_path,
- '-f', f'{start_freq}:{end_freq}:{bin_size}',
- '-g', '40', # Gain
- '-i', '1', # Integration interval (1 second)
- '-1', # Single shot mode
- '-c', '20%', # Crop 20% of edges
- ] + device_arg + [tmp_path]
+ # Build sweep command based on SDR type
+ if sdr_type == 'hackrf':
+ cmd = [
+ sweep_tool_path,
+ '-f', f'{int(start_freq / 1e6)}:{int(end_freq / 1e6)}',
+ '-w', str(bin_size),
+ '-1', # Single sweep
+ ]
+ output_mode = 'stdout'
+ else:
+ cmd = [
+ sweep_tool_path,
+ '-f', f'{start_freq}:{end_freq}:{bin_size}',
+ '-g', '40', # Gain
+ '-i', '1', # Integration interval (1 second)
+ '-1', # Single shot mode
+ '-c', '20%', # Crop 20% of edges
+ '-d', str(device_idx),
+ tmp_path,
+ ]
+ output_mode = 'file'
logger.debug(f"Running: {' '.join(cmd)}")
@@ -1478,9 +1501,14 @@ def _scan_rf_signals(
)
if result.returncode != 0:
- logger.warning(f"rtl_power returned {result.returncode}: {result.stderr}")
+ logger.warning(f"{os.path.basename(sweep_tool_path)} returned {result.returncode}: {result.stderr}")
- # Parse the CSV output
+ # For HackRF, write stdout CSV data to temp file for unified parsing
+ if output_mode == 'stdout' and result.stdout:
+ with open(tmp_path, 'w') as f:
+ f.write(result.stdout)
+
+ # Parse the CSV output (same format for both rtl_power and hackrf_sweep)
if os.path.exists(tmp_path) and os.path.getsize(tmp_path) > 0:
with open(tmp_path, 'r') as f:
for line in f:
@@ -1488,13 +1516,12 @@ def _scan_rf_signals(
if len(parts) >= 7:
try:
# CSV format: date, time, hz_low, hz_high, hz_step, samples, db_values...
- hz_low = int(parts[2])
- hz_high = int(parts[3])
- hz_step = float(parts[4])
+ hz_low = int(parts[2].strip())
+ hz_high = int(parts[3].strip())
+ hz_step = float(parts[4].strip())
db_values = [float(x) for x in parts[6:] if x.strip()]
# Find peaks above noise floor
- # RTL-SDR dongles have higher noise figures, so use permissive thresholds
noise_floor = sum(db_values) / len(db_values) if db_values else -100
threshold = noise_floor + 6 # Signal must be 6dB above noise
diff --git a/static/js/modes/weather-satellite.js b/static/js/modes/weather-satellite.js
index b1c7eee..1416604 100644
--- a/static/js/modes/weather-satellite.js
+++ b/static/js/modes/weather-satellite.js
@@ -3,14 +3,14 @@
* NOAA APT and Meteor LRPT decoder interface with auto-scheduler,
* polar plot, styled real-world map, countdown, and timeline.
*/
-
-const WeatherSat = (function() {
- // State
- let isRunning = false;
- let eventSource = null;
- let images = [];
- let passes = [];
- let selectedPassIndex = -1;
+
+const WeatherSat = (function() {
+ // State
+ let isRunning = false;
+ let eventSource = null;
+ let images = [];
+ let passes = [];
+ let selectedPassIndex = -1;
let currentSatellite = null;
let countdownInterval = null;
let schedulerEnabled = false;
@@ -21,22 +21,22 @@ const WeatherSat = (function() {
let satCrosshairMarker = null;
let observerMarker = null;
let consoleEntries = [];
- let consoleCollapsed = false;
- let currentPhase = 'idle';
+ let consoleCollapsed = false;
+ let currentPhase = 'idle';
let consoleAutoHideTimer = null;
let currentModalFilename = null;
let locationListenersAttached = false;
-
- /**
- * Initialize the Weather Satellite mode
- */
+
+ /**
+ * Initialize the Weather Satellite mode
+ */
function init() {
checkStatus();
loadImages();
loadLocationInputs();
loadPasses();
- startCountdownTimer();
- checkSchedulerStatus();
+ startCountdownTimer();
+ checkSchedulerStatus();
initGroundMap();
}
@@ -78,42 +78,42 @@ const WeatherSat = (function() {
*/
function loadLocationInputs() {
const latInput = document.getElementById('wxsatObsLat');
- const lonInput = document.getElementById('wxsatObsLon');
-
- let storedLat = localStorage.getItem('observerLat');
- let storedLon = localStorage.getItem('observerLon');
- if (window.ObserverLocation && ObserverLocation.isSharedEnabled()) {
- const shared = ObserverLocation.getShared();
- storedLat = shared.lat.toString();
- storedLon = shared.lon.toString();
- }
-
- if (latInput && storedLat) latInput.value = storedLat;
- if (lonInput && storedLon) lonInput.value = storedLon;
-
- // Only attach listeners once — re-calling init() on mode switch must not
- // accumulate duplicate listeners that fire loadPasses() multiple times.
- if (!locationListenersAttached) {
- if (latInput) latInput.addEventListener('change', saveLocationFromInputs);
- if (lonInput) lonInput.addEventListener('change', saveLocationFromInputs);
- locationListenersAttached = true;
- }
- }
-
- /**
- * Save location from inputs and refresh passes
- */
- function saveLocationFromInputs() {
- const latInput = document.getElementById('wxsatObsLat');
- const lonInput = document.getElementById('wxsatObsLon');
-
- const lat = parseFloat(latInput?.value);
- const lon = parseFloat(lonInput?.value);
-
- if (!isNaN(lat) && lat >= -90 && lat <= 90 &&
- !isNaN(lon) && lon >= -180 && lon <= 180) {
- if (window.ObserverLocation && ObserverLocation.isSharedEnabled()) {
- ObserverLocation.setShared({ lat, lon });
+ const lonInput = document.getElementById('wxsatObsLon');
+
+ let storedLat = localStorage.getItem('observerLat');
+ let storedLon = localStorage.getItem('observerLon');
+ if (window.ObserverLocation && ObserverLocation.isSharedEnabled()) {
+ const shared = ObserverLocation.getShared();
+ storedLat = shared.lat.toString();
+ storedLon = shared.lon.toString();
+ }
+
+ if (latInput && storedLat) latInput.value = storedLat;
+ if (lonInput && storedLon) lonInput.value = storedLon;
+
+ // Only attach listeners once — re-calling init() on mode switch must not
+ // accumulate duplicate listeners that fire loadPasses() multiple times.
+ if (!locationListenersAttached) {
+ if (latInput) latInput.addEventListener('change', saveLocationFromInputs);
+ if (lonInput) lonInput.addEventListener('change', saveLocationFromInputs);
+ locationListenersAttached = true;
+ }
+ }
+
+ /**
+ * Save location from inputs and refresh passes
+ */
+ function saveLocationFromInputs() {
+ const latInput = document.getElementById('wxsatObsLat');
+ const lonInput = document.getElementById('wxsatObsLon');
+
+ const lat = parseFloat(latInput?.value);
+ const lon = parseFloat(lonInput?.value);
+
+ if (!isNaN(lat) && lat >= -90 && lat <= 90 &&
+ !isNaN(lon) && lon >= -180 && lon <= 180) {
+ if (window.ObserverLocation && ObserverLocation.isSharedEnabled()) {
+ ObserverLocation.setShared({ lat, lon });
} else {
localStorage.setItem('observerLat', lat.toString());
localStorage.setItem('observerLon', lon.toString());
@@ -122,419 +122,422 @@ const WeatherSat = (function() {
centerGroundMapOnObserver(1);
}
}
-
- /**
- * Use GPS for location
- */
- function useGPS(btn) {
- if (!navigator.geolocation) {
- showNotification('Weather Sat', 'GPS not available in this browser');
- return;
- }
-
- const originalText = btn.innerHTML;
- btn.innerHTML = '...';
- btn.disabled = true;
-
- navigator.geolocation.getCurrentPosition(
- (pos) => {
- const latInput = document.getElementById('wxsatObsLat');
- const lonInput = document.getElementById('wxsatObsLon');
-
- const lat = pos.coords.latitude.toFixed(4);
- const lon = pos.coords.longitude.toFixed(4);
-
- if (latInput) latInput.value = lat;
- if (lonInput) lonInput.value = lon;
-
- if (window.ObserverLocation && ObserverLocation.isSharedEnabled()) {
- ObserverLocation.setShared({ lat: parseFloat(lat), lon: parseFloat(lon) });
- } else {
- localStorage.setItem('observerLat', lat);
- localStorage.setItem('observerLon', lon);
- }
-
+
+ /**
+ * Use GPS for location
+ */
+ function useGPS(btn) {
+ if (!navigator.geolocation) {
+ showNotification('Weather Sat', 'GPS not available in this browser');
+ return;
+ }
+
+ const originalText = btn.innerHTML;
+ btn.innerHTML = '...';
+ btn.disabled = true;
+
+ navigator.geolocation.getCurrentPosition(
+ (pos) => {
+ const latInput = document.getElementById('wxsatObsLat');
+ const lonInput = document.getElementById('wxsatObsLon');
+
+ const lat = pos.coords.latitude.toFixed(4);
+ const lon = pos.coords.longitude.toFixed(4);
+
+ if (latInput) latInput.value = lat;
+ if (lonInput) lonInput.value = lon;
+
+ if (window.ObserverLocation && ObserverLocation.isSharedEnabled()) {
+ ObserverLocation.setShared({ lat: parseFloat(lat), lon: parseFloat(lon) });
+ } else {
+ localStorage.setItem('observerLat', lat);
+ localStorage.setItem('observerLon', lon);
+ }
+
btn.innerHTML = originalText;
btn.disabled = false;
showNotification('Weather Sat', 'Location updated');
loadPasses();
centerGroundMapOnObserver(1);
},
- (err) => {
- btn.innerHTML = originalText;
- btn.disabled = false;
- showNotification('Weather Sat', 'Failed to get location');
- },
- { enableHighAccuracy: true, timeout: 10000 }
- );
- }
-
- /**
- * Check decoder status
- */
- async function checkStatus() {
- try {
- const response = await fetch('/weather-sat/status');
- const data = await response.json();
-
- if (!data.available) {
- updateStatusUI('unavailable', 'SatDump not installed');
- return;
- }
-
- if (data.running) {
- isRunning = true;
- currentSatellite = data.satellite;
- updateStatusUI('capturing', `Capturing ${data.satellite}...`);
- startStream();
- } else {
- updateStatusUI('idle', 'Idle');
- }
- } catch (err) {
- console.error('Failed to check weather sat status:', err);
- }
- }
-
- /**
- * Start capture
- */
- async function start() {
- const satSelect = document.getElementById('weatherSatSelect');
- const gainInput = document.getElementById('weatherSatGain');
- const biasTInput = document.getElementById('weatherSatBiasT');
- const deviceSelect = document.getElementById('deviceSelect');
-
- const satellite = satSelect?.value || 'METEOR-M2-3';
- const gain = parseFloat(gainInput?.value || '40');
- const biasT = biasTInput?.checked || false;
- const device = parseInt(deviceSelect?.value || '0', 10);
-
- clearConsole();
- showConsole(true);
- updatePhaseIndicator('tuning');
- addConsoleEntry('Starting capture...', 'info');
- updateStatusUI('connecting', 'Starting...');
-
- try {
- const response = await fetch('/weather-sat/start', {
- method: 'POST',
- headers: { 'Content-Type': 'application/json' },
- body: JSON.stringify({
- satellite,
- device,
- gain,
- bias_t: biasT,
- })
- });
-
- const data = await response.json();
-
- if (data.status === 'started' || data.status === 'already_running') {
- isRunning = true;
- currentSatellite = data.satellite || satellite;
- updateStatusUI('capturing', `${data.satellite} ${data.frequency} MHz`);
- updateFreqDisplay(data.frequency, data.mode);
- startStream();
- showNotification('Weather Sat', `Capturing ${data.satellite} on ${data.frequency} MHz`);
- } else {
- updateStatusUI('idle', 'Start failed');
- showNotification('Weather Sat', data.message || 'Failed to start');
- }
- } catch (err) {
- console.error('Failed to start weather sat:', err);
- updateStatusUI('idle', 'Error');
- showNotification('Weather Sat', 'Connection error');
- }
- }
-
- /**
- * Start capture for a specific pass
- */
- function startPass(satellite) {
- const satSelect = document.getElementById('weatherSatSelect');
- if (satSelect) {
- satSelect.value = satellite;
- }
- start();
- }
-
- /**
- * Stop capture
- */
- async function stop() {
- try {
- await fetch('/weather-sat/stop', { method: 'POST' });
- isRunning = false;
- stopStream();
- updateStatusUI('idle', 'Stopped');
- showNotification('Weather Sat', 'Capture stopped');
- } catch (err) {
- console.error('Failed to stop weather sat:', err);
- }
- }
-
- /**
- * Start test decode from a pre-recorded file
- */
- async function testDecode() {
- const satSelect = document.getElementById('wxsatTestSatSelect');
- const fileInput = document.getElementById('wxsatTestFilePath');
- const rateSelect = document.getElementById('wxsatTestSampleRate');
-
- const satellite = satSelect?.value || 'METEOR-M2-3';
- const inputFile = (fileInput?.value || '').trim();
- const sampleRate = parseInt(rateSelect?.value || '1000000', 10);
-
- if (!inputFile) {
- showNotification('Weather Sat', 'Enter a file path');
- return;
- }
-
- clearConsole();
- showConsole(true);
- updatePhaseIndicator('decoding');
- addConsoleEntry(`Test decode: ${inputFile}`, 'info');
- updateStatusUI('connecting', 'Starting file decode...');
-
- try {
- const response = await fetch('/weather-sat/test-decode', {
- method: 'POST',
- headers: { 'Content-Type': 'application/json' },
- body: JSON.stringify({
- satellite,
- input_file: inputFile,
- sample_rate: sampleRate,
- })
- });
-
- const data = await response.json();
-
- if (data.status === 'started' || data.status === 'already_running') {
- isRunning = true;
- currentSatellite = data.satellite || satellite;
- updateStatusUI('decoding', `Decoding ${data.satellite} from file`);
- updateFreqDisplay(data.frequency, data.mode);
- startStream();
- showNotification('Weather Sat', `Decoding ${data.satellite} from file`);
- } else {
- updateStatusUI('idle', 'Decode failed');
- showNotification('Weather Sat', data.message || 'Failed to start decode');
- addConsoleEntry(data.message || 'Failed to start decode', 'error');
- }
- } catch (err) {
- console.error('Failed to start test decode:', err);
- updateStatusUI('idle', 'Error');
- showNotification('Weather Sat', 'Connection error');
- }
- }
-
- /**
- * Update status UI
- */
- function updateStatusUI(status, text) {
- const dot = document.getElementById('wxsatStripDot');
- const statusText = document.getElementById('wxsatStripStatus');
- const startBtn = document.getElementById('wxsatStartBtn');
- const stopBtn = document.getElementById('wxsatStopBtn');
-
- if (dot) {
- dot.className = 'wxsat-strip-dot';
- if (status === 'capturing') dot.classList.add('capturing');
- else if (status === 'decoding') dot.classList.add('decoding');
- }
-
- if (statusText) statusText.textContent = text || status;
-
- if (startBtn && stopBtn) {
- if (status === 'capturing' || status === 'decoding') {
- startBtn.style.display = 'none';
- stopBtn.style.display = 'inline-block';
- } else {
- startBtn.style.display = 'inline-block';
- stopBtn.style.display = 'none';
- }
- }
- }
-
- /**
- * Update frequency display in strip
- */
- function updateFreqDisplay(freq, mode) {
- const freqEl = document.getElementById('wxsatStripFreq');
- const modeEl = document.getElementById('wxsatStripMode');
- if (freqEl) freqEl.textContent = freq || '--';
- if (modeEl) modeEl.textContent = mode || '--';
- }
-
- /**
- * Start SSE stream
- */
- function startStream() {
- if (eventSource) eventSource.close();
-
- eventSource = new EventSource('/weather-sat/stream');
-
- eventSource.onmessage = (e) => {
- try {
- const data = JSON.parse(e.data);
- if (data.type === 'weather_sat_progress') {
- handleProgress(data);
- } else if (data.type && data.type.startsWith('schedule_')) {
- handleSchedulerSSE(data);
- }
- } catch (err) {
- console.error('Failed to parse SSE:', err);
- }
- };
-
- eventSource.onerror = () => {
- setTimeout(() => {
- if (isRunning || schedulerEnabled) startStream();
- }, 3000);
- };
- }
-
- /**
- * Stop SSE stream
- */
- function stopStream() {
- if (eventSource) {
- eventSource.close();
- eventSource = null;
- }
- }
-
- /**
- * Handle progress update
- */
- function handleProgress(data) {
- const captureStatus = document.getElementById('wxsatCaptureStatus');
- const captureMsg = document.getElementById('wxsatCaptureMsg');
- const captureElapsed = document.getElementById('wxsatCaptureElapsed');
- const progressBar = document.getElementById('wxsatProgressFill');
-
- if (data.status === 'capturing' || data.status === 'decoding') {
- updateStatusUI(data.status, `${data.status === 'decoding' ? 'Decoding' : 'Capturing'} ${data.satellite}...`);
-
- if (captureStatus) captureStatus.classList.add('active');
- if (captureMsg) captureMsg.textContent = data.message || '';
- if (captureElapsed) captureElapsed.textContent = formatElapsed(data.elapsed_seconds || 0);
- if (progressBar) progressBar.style.width = (data.progress || 0) + '%';
-
- // Console updates
- showConsole(true);
- if (data.message) addConsoleEntry(data.message, data.log_type || 'info');
- if (data.capture_phase) updatePhaseIndicator(data.capture_phase);
-
- } else if (data.status === 'complete') {
- if (data.image) {
- images.unshift(data.image);
- updateImageCount(images.length);
- renderGallery();
- showNotification('Weather Sat', `New image: ${data.image.product || data.image.satellite}`);
- }
-
- if (!data.image) {
- // Capture ended
- isRunning = false;
- if (!schedulerEnabled) stopStream();
- updateStatusUI('idle', 'Capture complete');
- if (captureStatus) captureStatus.classList.remove('active');
-
- addConsoleEntry('Capture complete', 'signal');
- updatePhaseIndicator('complete');
- if (consoleAutoHideTimer) clearTimeout(consoleAutoHideTimer);
- consoleAutoHideTimer = setTimeout(() => showConsole(false), 30000);
- }
-
- } else if (data.status === 'error') {
- isRunning = false;
- if (!schedulerEnabled) stopStream();
- updateStatusUI('idle', 'Error');
- showNotification('Weather Sat', data.message || 'Capture error');
- if (captureStatus) captureStatus.classList.remove('active');
-
- if (data.message) addConsoleEntry(data.message, 'error');
- updatePhaseIndicator('error');
- if (consoleAutoHideTimer) clearTimeout(consoleAutoHideTimer);
- consoleAutoHideTimer = setTimeout(() => showConsole(false), 15000);
- }
- }
-
- /**
- * Handle scheduler SSE events
- */
- function handleSchedulerSSE(data) {
- if (data.type === 'schedule_capture_start') {
- isRunning = true;
- const p = data.pass || {};
- currentSatellite = p.satellite;
- updateStatusUI('capturing', `Auto: ${p.name || p.satellite} ${p.frequency} MHz`);
- showNotification('Weather Sat', `Auto-capture started: ${p.name || p.satellite}`);
- } else if (data.type === 'schedule_capture_complete') {
- const p = data.pass || {};
- showNotification('Weather Sat', `Auto-capture complete: ${p.name || ''}`);
- // Reset UI — the decoder's stop() doesn't emit a progress complete event
- // when called internally by the scheduler, so we handle it here.
- isRunning = false;
- updateStatusUI('idle', 'Auto-capture complete');
- const captureStatus = document.getElementById('wxsatCaptureStatus');
- if (captureStatus) captureStatus.classList.remove('active');
- updatePhaseIndicator('complete');
- loadImages();
- loadPasses();
- } else if (data.type === 'schedule_capture_skipped') {
- const reason = data.reason || 'unknown';
- const p = data.pass || {};
- showNotification('Weather Sat', `Pass skipped (${reason}): ${p.name || p.satellite}`);
- }
- }
-
- /**
- * Format elapsed seconds
- */
- function formatElapsed(seconds) {
- const m = Math.floor(seconds / 60);
- const s = seconds % 60;
- return `${m}:${s.toString().padStart(2, '0')}`;
- }
-
- /**
- * Parse pass timestamps, accepting legacy malformed UTC strings (+00:00Z).
- */
- function parsePassDate(value) {
- if (!value || typeof value !== 'string') return null;
-
- let parsed = new Date(value);
- if (!Number.isNaN(parsed.getTime())) {
- return parsed;
- }
-
- // Backward-compatible cleanup for accidentally double-suffixed UTC timestamps.
- parsed = new Date(value.replace(/\+00:00Z$/, 'Z'));
- if (!Number.isNaN(parsed.getTime())) {
- return parsed;
- }
-
- return null;
- }
-
- /**
- * Load pass predictions (with trajectory + ground track)
- */
+ (err) => {
+ btn.innerHTML = originalText;
+ btn.disabled = false;
+ showNotification('Weather Sat', 'Failed to get location');
+ },
+ { enableHighAccuracy: true, timeout: 10000 }
+ );
+ }
+
+ /**
+ * Check decoder status
+ */
+ async function checkStatus() {
+ try {
+ const response = await fetch('/weather-sat/status');
+ const data = await response.json();
+
+ if (!data.available) {
+ updateStatusUI('unavailable', 'SatDump not installed');
+ return;
+ }
+
+ if (data.running) {
+ isRunning = true;
+ currentSatellite = data.satellite;
+ updateStatusUI('capturing', `Capturing ${data.satellite}...`);
+ startStream();
+ } else {
+ updateStatusUI('idle', 'Idle');
+ }
+ } catch (err) {
+ console.error('Failed to check weather sat status:', err);
+ }
+ }
+
+ /**
+ * Start capture
+ */
+ async function start() {
+ const satSelect = document.getElementById('weatherSatSelect');
+ const gainInput = document.getElementById('weatherSatGain');
+ const biasTInput = document.getElementById('weatherSatBiasT');
+ const deviceSelect = document.getElementById('deviceSelect');
+
+ const satellite = satSelect?.value || 'METEOR-M2-3';
+ const gain = parseFloat(gainInput?.value || '40');
+ const biasT = biasTInput?.checked || false;
+ const device = parseInt(deviceSelect?.value || '0', 10);
+
+ clearConsole();
+ showConsole(true);
+ updatePhaseIndicator('tuning');
+ addConsoleEntry('Starting capture...', 'info');
+ updateStatusUI('connecting', 'Starting...');
+
+ try {
+ const response = await fetch('/weather-sat/start', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ satellite,
+ device,
+ gain,
+ bias_t: biasT,
+ })
+ });
+
+ const data = await response.json();
+
+ if (data.status === 'started' || data.status === 'already_running') {
+ isRunning = true;
+ currentSatellite = data.satellite || satellite;
+ updateStatusUI('capturing', `${data.satellite} ${data.frequency} MHz`);
+ updateFreqDisplay(data.frequency, data.mode);
+ startStream();
+ showNotification('Weather Sat', `Capturing ${data.satellite} on ${data.frequency} MHz`);
+ } else {
+ updateStatusUI('idle', 'Start failed');
+ showNotification('Weather Sat', data.message || 'Failed to start');
+ }
+ } catch (err) {
+ console.error('Failed to start weather sat:', err);
+ updateStatusUI('idle', 'Error');
+ showNotification('Weather Sat', 'Connection error');
+ }
+ }
+
+ /**
+ * Start capture for a specific pass
+ */
+ function startPass(satellite) {
+ const satSelect = document.getElementById('weatherSatSelect');
+ if (satSelect) {
+ satSelect.value = satellite;
+ }
+ start();
+ }
+
+ /**
+ * Stop capture
+ */
+ async function stop() {
+ // Optimistically update UI immediately so stop feels responsive,
+ // even if the server takes time to terminate the process.
+ isRunning = false;
+ stopStream();
+ updateStatusUI('idle', 'Stopping...');
+ try {
+ await fetch('/weather-sat/stop', { method: 'POST' });
+ updateStatusUI('idle', 'Stopped');
+ showNotification('Weather Sat', 'Capture stopped');
+ } catch (err) {
+ console.error('Failed to stop weather sat:', err);
+ }
+ }
+
+ /**
+ * Start test decode from a pre-recorded file
+ */
+ async function testDecode() {
+ const satSelect = document.getElementById('wxsatTestSatSelect');
+ const fileInput = document.getElementById('wxsatTestFilePath');
+ const rateSelect = document.getElementById('wxsatTestSampleRate');
+
+ const satellite = satSelect?.value || 'METEOR-M2-3';
+ const inputFile = (fileInput?.value || '').trim();
+ const sampleRate = parseInt(rateSelect?.value || '1000000', 10);
+
+ if (!inputFile) {
+ showNotification('Weather Sat', 'Enter a file path');
+ return;
+ }
+
+ clearConsole();
+ showConsole(true);
+ updatePhaseIndicator('decoding');
+ addConsoleEntry(`Test decode: ${inputFile}`, 'info');
+ updateStatusUI('connecting', 'Starting file decode...');
+
+ try {
+ const response = await fetch('/weather-sat/test-decode', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ satellite,
+ input_file: inputFile,
+ sample_rate: sampleRate,
+ })
+ });
+
+ const data = await response.json();
+
+ if (data.status === 'started' || data.status === 'already_running') {
+ isRunning = true;
+ currentSatellite = data.satellite || satellite;
+ updateStatusUI('decoding', `Decoding ${data.satellite} from file`);
+ updateFreqDisplay(data.frequency, data.mode);
+ startStream();
+ showNotification('Weather Sat', `Decoding ${data.satellite} from file`);
+ } else {
+ updateStatusUI('idle', 'Decode failed');
+ showNotification('Weather Sat', data.message || 'Failed to start decode');
+ addConsoleEntry(data.message || 'Failed to start decode', 'error');
+ }
+ } catch (err) {
+ console.error('Failed to start test decode:', err);
+ updateStatusUI('idle', 'Error');
+ showNotification('Weather Sat', 'Connection error');
+ }
+ }
+
+ /**
+ * Update status UI
+ */
+ function updateStatusUI(status, text) {
+ const dot = document.getElementById('wxsatStripDot');
+ const statusText = document.getElementById('wxsatStripStatus');
+ const startBtn = document.getElementById('wxsatStartBtn');
+ const stopBtn = document.getElementById('wxsatStopBtn');
+
+ if (dot) {
+ dot.className = 'wxsat-strip-dot';
+ if (status === 'capturing') dot.classList.add('capturing');
+ else if (status === 'decoding') dot.classList.add('decoding');
+ }
+
+ if (statusText) statusText.textContent = text || status;
+
+ if (startBtn && stopBtn) {
+ if (status === 'capturing' || status === 'decoding') {
+ startBtn.style.display = 'none';
+ stopBtn.style.display = 'inline-block';
+ } else {
+ startBtn.style.display = 'inline-block';
+ stopBtn.style.display = 'none';
+ }
+ }
+ }
+
+ /**
+ * Update frequency display in strip
+ */
+ function updateFreqDisplay(freq, mode) {
+ const freqEl = document.getElementById('wxsatStripFreq');
+ const modeEl = document.getElementById('wxsatStripMode');
+ if (freqEl) freqEl.textContent = freq || '--';
+ if (modeEl) modeEl.textContent = mode || '--';
+ }
+
+ /**
+ * Start SSE stream
+ */
+ function startStream() {
+ if (eventSource) eventSource.close();
+
+ eventSource = new EventSource('/weather-sat/stream');
+
+ eventSource.onmessage = (e) => {
+ try {
+ const data = JSON.parse(e.data);
+ if (data.type === 'weather_sat_progress') {
+ handleProgress(data);
+ } else if (data.type && data.type.startsWith('schedule_')) {
+ handleSchedulerSSE(data);
+ }
+ } catch (err) {
+ console.error('Failed to parse SSE:', err);
+ }
+ };
+
+ eventSource.onerror = () => {
+ setTimeout(() => {
+ if (isRunning || schedulerEnabled) startStream();
+ }, 3000);
+ };
+ }
+
+ /**
+ * Stop SSE stream
+ */
+ function stopStream() {
+ if (eventSource) {
+ eventSource.close();
+ eventSource = null;
+ }
+ }
+
+ /**
+ * Handle progress update
+ */
+ function handleProgress(data) {
+ const captureStatus = document.getElementById('wxsatCaptureStatus');
+ const captureMsg = document.getElementById('wxsatCaptureMsg');
+ const captureElapsed = document.getElementById('wxsatCaptureElapsed');
+ const progressBar = document.getElementById('wxsatProgressFill');
+
+ if (data.status === 'capturing' || data.status === 'decoding') {
+ updateStatusUI(data.status, `${data.status === 'decoding' ? 'Decoding' : 'Capturing'} ${data.satellite}...`);
+
+ if (captureStatus) captureStatus.classList.add('active');
+ if (captureMsg) captureMsg.textContent = data.message || '';
+ if (captureElapsed) captureElapsed.textContent = formatElapsed(data.elapsed_seconds || 0);
+ if (progressBar) progressBar.style.width = (data.progress || 0) + '%';
+
+ // Console updates
+ showConsole(true);
+ if (data.message) addConsoleEntry(data.message, data.log_type || 'info');
+ if (data.capture_phase) updatePhaseIndicator(data.capture_phase);
+
+ } else if (data.status === 'complete') {
+ if (data.image) {
+ images.unshift(data.image);
+ updateImageCount(images.length);
+ renderGallery();
+ showNotification('Weather Sat', `New image: ${data.image.product || data.image.satellite}`);
+ }
+
+ if (!data.image) {
+ // Capture ended
+ isRunning = false;
+ if (!schedulerEnabled) stopStream();
+ updateStatusUI('idle', 'Capture complete');
+ if (captureStatus) captureStatus.classList.remove('active');
+
+ addConsoleEntry('Capture complete', 'signal');
+ updatePhaseIndicator('complete');
+ if (consoleAutoHideTimer) clearTimeout(consoleAutoHideTimer);
+ consoleAutoHideTimer = setTimeout(() => showConsole(false), 30000);
+ }
+
+ } else if (data.status === 'error') {
+ isRunning = false;
+ if (!schedulerEnabled) stopStream();
+ updateStatusUI('idle', 'Error');
+ showNotification('Weather Sat', data.message || 'Capture error');
+ if (captureStatus) captureStatus.classList.remove('active');
+
+ if (data.message) addConsoleEntry(data.message, 'error');
+ updatePhaseIndicator('error');
+ if (consoleAutoHideTimer) clearTimeout(consoleAutoHideTimer);
+ consoleAutoHideTimer = setTimeout(() => showConsole(false), 15000);
+ }
+ }
+
+ /**
+ * Handle scheduler SSE events
+ */
+ function handleSchedulerSSE(data) {
+ if (data.type === 'schedule_capture_start') {
+ isRunning = true;
+ const p = data.pass || {};
+ currentSatellite = p.satellite;
+ updateStatusUI('capturing', `Auto: ${p.name || p.satellite} ${p.frequency} MHz`);
+ showNotification('Weather Sat', `Auto-capture started: ${p.name || p.satellite}`);
+ } else if (data.type === 'schedule_capture_complete') {
+ const p = data.pass || {};
+ showNotification('Weather Sat', `Auto-capture complete: ${p.name || ''}`);
+ // Reset UI — the decoder's stop() doesn't emit a progress complete event
+ // when called internally by the scheduler, so we handle it here.
+ isRunning = false;
+ updateStatusUI('idle', 'Auto-capture complete');
+ const captureStatus = document.getElementById('wxsatCaptureStatus');
+ if (captureStatus) captureStatus.classList.remove('active');
+ updatePhaseIndicator('complete');
+ loadImages();
+ loadPasses();
+ } else if (data.type === 'schedule_capture_skipped') {
+ const reason = data.reason || 'unknown';
+ const p = data.pass || {};
+ showNotification('Weather Sat', `Pass skipped (${reason}): ${p.name || p.satellite}`);
+ }
+ }
+
+ /**
+ * Format elapsed seconds
+ */
+ function formatElapsed(seconds) {
+ const m = Math.floor(seconds / 60);
+ const s = seconds % 60;
+ return `${m}:${s.toString().padStart(2, '0')}`;
+ }
+
+ /**
+ * Parse pass timestamps, accepting legacy malformed UTC strings (+00:00Z).
+ */
+ function parsePassDate(value) {
+ if (!value || typeof value !== 'string') return null;
+
+ let parsed = new Date(value);
+ if (!Number.isNaN(parsed.getTime())) {
+ return parsed;
+ }
+
+ // Backward-compatible cleanup for accidentally double-suffixed UTC timestamps.
+ parsed = new Date(value.replace(/\+00:00Z$/, 'Z'));
+ if (!Number.isNaN(parsed.getTime())) {
+ return parsed;
+ }
+
+ return null;
+ }
+
+ /**
+ * Load pass predictions (with trajectory + ground track)
+ */
async function loadPasses() {
- let storedLat, storedLon;
-
- // Use ObserverLocation if available, otherwise fall back to localStorage
- if (window.ObserverLocation && ObserverLocation.isSharedEnabled()) {
- const shared = ObserverLocation.getShared();
- storedLat = shared?.lat?.toString();
- storedLon = shared?.lon?.toString();
- } else {
- storedLat = localStorage.getItem('observerLat');
- storedLon = localStorage.getItem('observerLon');
- }
-
+ let storedLat, storedLon;
+
+ // Use ObserverLocation if available, otherwise fall back to localStorage
+ if (window.ObserverLocation && ObserverLocation.isSharedEnabled()) {
+ const shared = ObserverLocation.getShared();
+ storedLat = shared?.lat?.toString();
+ storedLon = shared?.lon?.toString();
+ } else {
+ storedLat = localStorage.getItem('observerLat');
+ storedLon = localStorage.getItem('observerLon');
+ }
+
if (!storedLat || !storedLon) {
passes = [];
selectedPassIndex = -1;
@@ -544,12 +547,12 @@ const WeatherSat = (function() {
updateGroundTrack(null);
return;
}
-
- try {
- const url = `/weather-sat/passes?latitude=${storedLat}&longitude=${storedLon}&hours=24&min_elevation=15&trajectory=true&ground_track=true`;
- const response = await fetch(url);
- const data = await response.json();
-
+
+ try {
+ const url = `/weather-sat/passes?latitude=${storedLat}&longitude=${storedLon}&hours=24&min_elevation=15&trajectory=true&ground_track=true`;
+ const response = await fetch(url);
+ const data = await response.json();
+
if (data.status === 'ok') {
passes = data.passes || [];
selectedPassIndex = -1;
@@ -567,224 +570,224 @@ const WeatherSat = (function() {
} catch (err) {
console.error('Failed to load passes:', err);
}
- }
-
- /**
- * Select a pass to display in polar plot and map
- */
- function selectPass(index) {
- if (index < 0 || index >= passes.length) return;
- selectedPassIndex = index;
- const pass = passes[index];
-
- // Highlight active card
- document.querySelectorAll('.wxsat-pass-card').forEach((card, i) => {
- card.classList.toggle('selected', i === index);
- });
-
- // Update polar plot
- drawPolarPlot(pass);
-
- // Update ground track
- updateGroundTrack(pass);
-
- // Update polar panel subtitle
- const polarSat = document.getElementById('wxsatPolarSat');
- if (polarSat) polarSat.textContent = `${pass.name} ${pass.maxEl}\u00b0`;
- }
-
- /**
- * Render pass predictions list
- */
- function renderPasses(passList) {
- const container = document.getElementById('wxsatPassesList');
- const countEl = document.getElementById('wxsatPassesCount');
-
- if (countEl) countEl.textContent = passList.length;
-
- if (!container) return;
-
- if (passList.length === 0) {
- const hasLocation = localStorage.getItem('observerLat') !== null;
- container.innerHTML = `
-
-
${hasLocation ? 'No passes in next 24h' : 'Set location to see pass predictions'}
-
- `;
- return;
- }
-
- container.innerHTML = passList.map((pass, idx) => {
- const modeClass = pass.mode === 'APT' ? 'apt' : 'lrpt';
- const timeStr = pass.startTime || '--';
- const now = new Date();
- const passStart = parsePassDate(pass.startTimeISO);
- const diffMs = passStart ? passStart - now : NaN;
- const diffMins = Number.isFinite(diffMs) ? Math.floor(diffMs / 60000) : NaN;
- const isSelected = idx === selectedPassIndex;
-
- let countdown = '--';
- if (!Number.isFinite(diffMs)) {
- countdown = '--';
- } else if (diffMs < 0) {
- countdown = 'NOW';
- } else if (diffMins < 60) {
- countdown = `in ${diffMins}m`;
- } else {
- const hrs = Math.floor(diffMins / 60);
- const mins = diffMins % 60;
- countdown = `in ${hrs}h${mins}m`;
- }
-
- return `
-
-
- ${escapeHtml(pass.name)}
- ${escapeHtml(pass.mode)}
-
-
- Time
- ${escapeHtml(timeStr)}
- Max El
- ${pass.maxEl}°
- Duration
- ${pass.duration} min
- Freq
- ${pass.frequency} MHz
-
-
- ${pass.quality}
- ${countdown}
-
-
-
-
-
- `;
- }).join('');
- }
-
- // ========================
- // Polar Plot
- // ========================
-
- /**
- * Draw polar plot for a pass trajectory
- */
- function drawPolarPlot(pass) {
- const canvas = document.getElementById('wxsatPolarCanvas');
- if (!canvas) return;
-
- const ctx = canvas.getContext('2d');
- const w = canvas.width;
- const h = canvas.height;
- const cx = w / 2;
- const cy = h / 2;
- const r = Math.min(cx, cy) - 20;
-
- ctx.clearRect(0, 0, w, h);
-
- // Background
- ctx.fillStyle = '#0d1117';
- ctx.fillRect(0, 0, w, h);
-
- // Grid circles (30, 60, 90 deg elevation)
- ctx.strokeStyle = '#2a3040';
- ctx.lineWidth = 0.5;
- [90, 60, 30].forEach((el, i) => {
- const gr = r * (1 - el / 90);
- ctx.beginPath();
- ctx.arc(cx, cy, gr, 0, Math.PI * 2);
- ctx.stroke();
- // Label
- ctx.fillStyle = '#555';
- ctx.font = '9px Roboto Condensed, monospace';
- ctx.textAlign = 'left';
- ctx.fillText(el + '\u00b0', cx + gr + 3, cy - 2);
- });
-
- // Horizon circle
- ctx.strokeStyle = '#3a4050';
- ctx.lineWidth = 1;
- ctx.beginPath();
- ctx.arc(cx, cy, r, 0, Math.PI * 2);
- ctx.stroke();
-
- // Cardinal directions
- ctx.fillStyle = '#666';
- ctx.font = '10px Roboto Condensed, monospace';
- ctx.textAlign = 'center';
- ctx.textBaseline = 'middle';
- ctx.fillText('N', cx, cy - r - 10);
- ctx.fillText('S', cx, cy + r + 10);
- ctx.fillText('E', cx + r + 10, cy);
- ctx.fillText('W', cx - r - 10, cy);
-
- // Cross hairs
- ctx.strokeStyle = '#2a3040';
- ctx.lineWidth = 0.5;
- ctx.beginPath();
- ctx.moveTo(cx, cy - r);
- ctx.lineTo(cx, cy + r);
- ctx.moveTo(cx - r, cy);
- ctx.lineTo(cx + r, cy);
- ctx.stroke();
-
- // Trajectory
- const trajectory = pass.trajectory;
- if (!trajectory || trajectory.length === 0) return;
-
- const color = pass.mode === 'LRPT' ? '#00ff88' : '#00d4ff';
-
- ctx.beginPath();
- ctx.strokeStyle = color;
- ctx.lineWidth = 2;
-
- trajectory.forEach((pt, i) => {
- const elRad = (90 - pt.el) / 90;
- const azRad = (pt.az - 90) * Math.PI / 180; // offset: N is up
- const px = cx + r * elRad * Math.cos(azRad);
- const py = cy + r * elRad * Math.sin(azRad);
-
- if (i === 0) ctx.moveTo(px, py);
- else ctx.lineTo(px, py);
- });
- ctx.stroke();
-
- // Start point (green dot)
- const start = trajectory[0];
- const startR = (90 - start.el) / 90;
- const startAz = (start.az - 90) * Math.PI / 180;
- ctx.fillStyle = '#00ff88';
- ctx.beginPath();
- ctx.arc(cx + r * startR * Math.cos(startAz), cy + r * startR * Math.sin(startAz), 4, 0, Math.PI * 2);
- ctx.fill();
-
- // End point (red dot)
- const end = trajectory[trajectory.length - 1];
- const endR = (90 - end.el) / 90;
- const endAz = (end.az - 90) * Math.PI / 180;
- ctx.fillStyle = '#ff4444';
- ctx.beginPath();
- ctx.arc(cx + r * endR * Math.cos(endAz), cy + r * endR * Math.sin(endAz), 4, 0, Math.PI * 2);
- ctx.fill();
-
- // Max elevation marker
- let maxEl = 0;
- let maxPt = trajectory[0];
- trajectory.forEach(pt => { if (pt.el > maxEl) { maxEl = pt.el; maxPt = pt; } });
- const maxR = (90 - maxPt.el) / 90;
- const maxAz = (maxPt.az - 90) * Math.PI / 180;
- ctx.fillStyle = color;
- ctx.beginPath();
- ctx.arc(cx + r * maxR * Math.cos(maxAz), cy + r * maxR * Math.sin(maxAz), 3, 0, Math.PI * 2);
- ctx.fill();
- ctx.fillStyle = color;
- ctx.font = '9px Roboto Condensed, monospace';
- ctx.textAlign = 'center';
- ctx.fillText(Math.round(maxEl) + '\u00b0', cx + r * maxR * Math.cos(maxAz), cy + r * maxR * Math.sin(maxAz) - 8);
- }
-
+ }
+
+ /**
+ * Select a pass to display in polar plot and map
+ */
+ function selectPass(index) {
+ if (index < 0 || index >= passes.length) return;
+ selectedPassIndex = index;
+ const pass = passes[index];
+
+ // Highlight active card
+ document.querySelectorAll('.wxsat-pass-card').forEach((card, i) => {
+ card.classList.toggle('selected', i === index);
+ });
+
+ // Update polar plot
+ drawPolarPlot(pass);
+
+ // Update ground track
+ updateGroundTrack(pass);
+
+ // Update polar panel subtitle
+ const polarSat = document.getElementById('wxsatPolarSat');
+ if (polarSat) polarSat.textContent = `${pass.name} ${pass.maxEl}\u00b0`;
+ }
+
+ /**
+ * Render pass predictions list
+ */
+ function renderPasses(passList) {
+ const container = document.getElementById('wxsatPassesList');
+ const countEl = document.getElementById('wxsatPassesCount');
+
+ if (countEl) countEl.textContent = passList.length;
+
+ if (!container) return;
+
+ if (passList.length === 0) {
+ const hasLocation = localStorage.getItem('observerLat') !== null;
+ container.innerHTML = `
+
+
${hasLocation ? 'No passes in next 24h' : 'Set location to see pass predictions'}
+
+ `;
+ return;
+ }
+
+ container.innerHTML = passList.map((pass, idx) => {
+ const modeClass = pass.mode === 'APT' ? 'apt' : 'lrpt';
+ const timeStr = pass.startTime || '--';
+ const now = new Date();
+ const passStart = parsePassDate(pass.startTimeISO);
+ const diffMs = passStart ? passStart - now : NaN;
+ const diffMins = Number.isFinite(diffMs) ? Math.floor(diffMs / 60000) : NaN;
+ const isSelected = idx === selectedPassIndex;
+
+ let countdown = '--';
+ if (!Number.isFinite(diffMs)) {
+ countdown = '--';
+ } else if (diffMs < 0) {
+ countdown = 'NOW';
+ } else if (diffMins < 60) {
+ countdown = `in ${diffMins}m`;
+ } else {
+ const hrs = Math.floor(diffMins / 60);
+ const mins = diffMins % 60;
+ countdown = `in ${hrs}h${mins}m`;
+ }
+
+ return `
+
+
+ ${escapeHtml(pass.name)}
+ ${escapeHtml(pass.mode)}
+
+
+ Time
+ ${escapeHtml(timeStr)}
+ Max El
+ ${pass.maxEl}°
+ Duration
+ ${pass.duration} min
+ Freq
+ ${pass.frequency} MHz
+
+
+ ${pass.quality}
+ ${countdown}
+
+
+
+
+
+ `;
+ }).join('');
+ }
+
+ // ========================
+ // Polar Plot
+ // ========================
+
+ /**
+ * Draw polar plot for a pass trajectory
+ */
+ function drawPolarPlot(pass) {
+ const canvas = document.getElementById('wxsatPolarCanvas');
+ if (!canvas) return;
+
+ const ctx = canvas.getContext('2d');
+ const w = canvas.width;
+ const h = canvas.height;
+ const cx = w / 2;
+ const cy = h / 2;
+ const r = Math.min(cx, cy) - 20;
+
+ ctx.clearRect(0, 0, w, h);
+
+ // Background
+ ctx.fillStyle = '#0d1117';
+ ctx.fillRect(0, 0, w, h);
+
+ // Grid circles (30, 60, 90 deg elevation)
+ ctx.strokeStyle = '#2a3040';
+ ctx.lineWidth = 0.5;
+ [90, 60, 30].forEach((el, i) => {
+ const gr = r * (1 - el / 90);
+ ctx.beginPath();
+ ctx.arc(cx, cy, gr, 0, Math.PI * 2);
+ ctx.stroke();
+ // Label
+ ctx.fillStyle = '#555';
+ ctx.font = '9px Roboto Condensed, monospace';
+ ctx.textAlign = 'left';
+ ctx.fillText(el + '\u00b0', cx + gr + 3, cy - 2);
+ });
+
+ // Horizon circle
+ ctx.strokeStyle = '#3a4050';
+ ctx.lineWidth = 1;
+ ctx.beginPath();
+ ctx.arc(cx, cy, r, 0, Math.PI * 2);
+ ctx.stroke();
+
+ // Cardinal directions
+ ctx.fillStyle = '#666';
+ ctx.font = '10px Roboto Condensed, monospace';
+ ctx.textAlign = 'center';
+ ctx.textBaseline = 'middle';
+ ctx.fillText('N', cx, cy - r - 10);
+ ctx.fillText('S', cx, cy + r + 10);
+ ctx.fillText('E', cx + r + 10, cy);
+ ctx.fillText('W', cx - r - 10, cy);
+
+ // Cross hairs
+ ctx.strokeStyle = '#2a3040';
+ ctx.lineWidth = 0.5;
+ ctx.beginPath();
+ ctx.moveTo(cx, cy - r);
+ ctx.lineTo(cx, cy + r);
+ ctx.moveTo(cx - r, cy);
+ ctx.lineTo(cx + r, cy);
+ ctx.stroke();
+
+ // Trajectory
+ const trajectory = pass.trajectory;
+ if (!trajectory || trajectory.length === 0) return;
+
+ const color = pass.mode === 'LRPT' ? '#00ff88' : '#00d4ff';
+
+ ctx.beginPath();
+ ctx.strokeStyle = color;
+ ctx.lineWidth = 2;
+
+ trajectory.forEach((pt, i) => {
+ const elRad = (90 - pt.el) / 90;
+ const azRad = (pt.az - 90) * Math.PI / 180; // offset: N is up
+ const px = cx + r * elRad * Math.cos(azRad);
+ const py = cy + r * elRad * Math.sin(azRad);
+
+ if (i === 0) ctx.moveTo(px, py);
+ else ctx.lineTo(px, py);
+ });
+ ctx.stroke();
+
+ // Start point (green dot)
+ const start = trajectory[0];
+ const startR = (90 - start.el) / 90;
+ const startAz = (start.az - 90) * Math.PI / 180;
+ ctx.fillStyle = '#00ff88';
+ ctx.beginPath();
+ ctx.arc(cx + r * startR * Math.cos(startAz), cy + r * startR * Math.sin(startAz), 4, 0, Math.PI * 2);
+ ctx.fill();
+
+ // End point (red dot)
+ const end = trajectory[trajectory.length - 1];
+ const endR = (90 - end.el) / 90;
+ const endAz = (end.az - 90) * Math.PI / 180;
+ ctx.fillStyle = '#ff4444';
+ ctx.beginPath();
+ ctx.arc(cx + r * endR * Math.cos(endAz), cy + r * endR * Math.sin(endAz), 4, 0, Math.PI * 2);
+ ctx.fill();
+
+ // Max elevation marker
+ let maxEl = 0;
+ let maxPt = trajectory[0];
+ trajectory.forEach(pt => { if (pt.el > maxEl) { maxEl = pt.el; maxPt = pt; } });
+ const maxR = (90 - maxPt.el) / 90;
+ const maxAz = (maxPt.az - 90) * Math.PI / 180;
+ ctx.fillStyle = color;
+ ctx.beginPath();
+ ctx.arc(cx + r * maxR * Math.cos(maxAz), cy + r * maxR * Math.sin(maxAz), 3, 0, Math.PI * 2);
+ ctx.fill();
+ ctx.fillStyle = color;
+ ctx.font = '9px Roboto Condensed, monospace';
+ ctx.textAlign = 'center';
+ ctx.fillText(Math.round(maxEl) + '\u00b0', cx + r * maxR * Math.cos(maxAz), cy + r * maxR * Math.sin(maxAz) - 8);
+ }
+
// ========================
// Ground Track Map
// ========================
@@ -1121,230 +1124,230 @@ const WeatherSat = (function() {
satCrosshairMarker.setTooltipContent(infoText);
}
}
-
- // ========================
- // Countdown
- // ========================
-
- /**
- * Start the countdown interval timer
- */
- function startCountdownTimer() {
- if (countdownInterval) clearInterval(countdownInterval);
- countdownInterval = setInterval(updateCountdownFromPasses, 1000);
- }
-
- /**
- * Update countdown display from passes array
- */
- function updateCountdownFromPasses() {
- const now = new Date();
- let nextPass = null;
- let isActive = false;
-
- for (const pass of passes) {
- const start = parsePassDate(pass.startTimeISO);
- const end = parsePassDate(pass.endTimeISO);
- if (!start || !end) {
- continue;
- }
- if (end > now) {
- nextPass = pass;
- isActive = start <= now;
- break;
- }
- }
-
- const daysEl = document.getElementById('wxsatCdDays');
- const hoursEl = document.getElementById('wxsatCdHours');
- const minsEl = document.getElementById('wxsatCdMins');
- const secsEl = document.getElementById('wxsatCdSecs');
- const satEl = document.getElementById('wxsatCountdownSat');
- const detailEl = document.getElementById('wxsatCountdownDetail');
- const boxes = document.getElementById('wxsatCountdownBoxes');
-
- if (!nextPass) {
- if (daysEl) daysEl.textContent = '--';
- if (hoursEl) hoursEl.textContent = '--';
- if (minsEl) minsEl.textContent = '--';
- if (secsEl) secsEl.textContent = '--';
- if (satEl) satEl.textContent = '--';
- if (detailEl) detailEl.textContent = 'No passes predicted';
- if (boxes) boxes.querySelectorAll('.wxsat-countdown-box').forEach(b => {
- b.classList.remove('imminent', 'active');
- });
- return;
- }
-
- const target = parsePassDate(nextPass.startTimeISO);
- if (!target) {
- if (daysEl) daysEl.textContent = '--';
- if (hoursEl) hoursEl.textContent = '--';
- if (minsEl) minsEl.textContent = '--';
- if (secsEl) secsEl.textContent = '--';
- if (satEl) satEl.textContent = '--';
- if (detailEl) detailEl.textContent = 'Invalid pass time';
- if (boxes) boxes.querySelectorAll('.wxsat-countdown-box').forEach(b => {
- b.classList.remove('imminent', 'active');
- });
- return;
- }
- let diffMs = target - now;
-
- if (isActive) {
- diffMs = 0;
- }
-
- const totalSec = Math.max(0, Math.floor(diffMs / 1000));
- const d = Math.floor(totalSec / 86400);
- const h = Math.floor((totalSec % 86400) / 3600);
- const m = Math.floor((totalSec % 3600) / 60);
- const s = totalSec % 60;
-
- if (daysEl) daysEl.textContent = d.toString().padStart(2, '0');
- if (hoursEl) hoursEl.textContent = h.toString().padStart(2, '0');
- if (minsEl) minsEl.textContent = m.toString().padStart(2, '0');
- if (secsEl) secsEl.textContent = s.toString().padStart(2, '0');
- if (satEl) satEl.textContent = `${nextPass.name} ${nextPass.frequency} MHz`;
- if (detailEl) {
- if (isActive) {
- detailEl.textContent = `ACTIVE - ${nextPass.maxEl}\u00b0 max el`;
- } else {
- detailEl.textContent = `${nextPass.maxEl}\u00b0 max el / ${nextPass.duration} min`;
- }
- }
-
- // Countdown box states
- if (boxes) {
- const isImminent = totalSec < 600 && totalSec > 0; // < 10 min
- boxes.querySelectorAll('.wxsat-countdown-box').forEach(b => {
- b.classList.toggle('imminent', isImminent);
- b.classList.toggle('active', isActive);
- });
- }
-
+
+ // ========================
+ // Countdown
+ // ========================
+
+ /**
+ * Start the countdown interval timer
+ */
+ function startCountdownTimer() {
+ if (countdownInterval) clearInterval(countdownInterval);
+ countdownInterval = setInterval(updateCountdownFromPasses, 1000);
+ }
+
+ /**
+ * Update countdown display from passes array
+ */
+ function updateCountdownFromPasses() {
+ const now = new Date();
+ let nextPass = null;
+ let isActive = false;
+
+ for (const pass of passes) {
+ const start = parsePassDate(pass.startTimeISO);
+ const end = parsePassDate(pass.endTimeISO);
+ if (!start || !end) {
+ continue;
+ }
+ if (end > now) {
+ nextPass = pass;
+ isActive = start <= now;
+ break;
+ }
+ }
+
+ const daysEl = document.getElementById('wxsatCdDays');
+ const hoursEl = document.getElementById('wxsatCdHours');
+ const minsEl = document.getElementById('wxsatCdMins');
+ const secsEl = document.getElementById('wxsatCdSecs');
+ const satEl = document.getElementById('wxsatCountdownSat');
+ const detailEl = document.getElementById('wxsatCountdownDetail');
+ const boxes = document.getElementById('wxsatCountdownBoxes');
+
+ if (!nextPass) {
+ if (daysEl) daysEl.textContent = '--';
+ if (hoursEl) hoursEl.textContent = '--';
+ if (minsEl) minsEl.textContent = '--';
+ if (secsEl) secsEl.textContent = '--';
+ if (satEl) satEl.textContent = '--';
+ if (detailEl) detailEl.textContent = 'No passes predicted';
+ if (boxes) boxes.querySelectorAll('.wxsat-countdown-box').forEach(b => {
+ b.classList.remove('imminent', 'active');
+ });
+ return;
+ }
+
+ const target = parsePassDate(nextPass.startTimeISO);
+ if (!target) {
+ if (daysEl) daysEl.textContent = '--';
+ if (hoursEl) hoursEl.textContent = '--';
+ if (minsEl) minsEl.textContent = '--';
+ if (secsEl) secsEl.textContent = '--';
+ if (satEl) satEl.textContent = '--';
+ if (detailEl) detailEl.textContent = 'Invalid pass time';
+ if (boxes) boxes.querySelectorAll('.wxsat-countdown-box').forEach(b => {
+ b.classList.remove('imminent', 'active');
+ });
+ return;
+ }
+ let diffMs = target - now;
+
+ if (isActive) {
+ diffMs = 0;
+ }
+
+ const totalSec = Math.max(0, Math.floor(diffMs / 1000));
+ const d = Math.floor(totalSec / 86400);
+ const h = Math.floor((totalSec % 86400) / 3600);
+ const m = Math.floor((totalSec % 3600) / 60);
+ const s = totalSec % 60;
+
+ if (daysEl) daysEl.textContent = d.toString().padStart(2, '0');
+ if (hoursEl) hoursEl.textContent = h.toString().padStart(2, '0');
+ if (minsEl) minsEl.textContent = m.toString().padStart(2, '0');
+ if (secsEl) secsEl.textContent = s.toString().padStart(2, '0');
+ if (satEl) satEl.textContent = `${nextPass.name} ${nextPass.frequency} MHz`;
+ if (detailEl) {
+ if (isActive) {
+ detailEl.textContent = `ACTIVE - ${nextPass.maxEl}\u00b0 max el`;
+ } else {
+ detailEl.textContent = `${nextPass.maxEl}\u00b0 max el / ${nextPass.duration} min`;
+ }
+ }
+
+ // Countdown box states
+ if (boxes) {
+ const isImminent = totalSec < 600 && totalSec > 0; // < 10 min
+ boxes.querySelectorAll('.wxsat-countdown-box').forEach(b => {
+ b.classList.toggle('imminent', isImminent);
+ b.classList.toggle('active', isActive);
+ });
+ }
+
// Keep timeline cursor in sync
updateTimelineCursor();
// Keep selected satellite marker synchronized with time progression.
updateSatelliteCrosshair(getSelectedPass());
}
-
- // ========================
- // Timeline
- // ========================
-
- /**
- * Render 24h timeline with pass markers
- */
- function renderTimeline(passList) {
- const track = document.getElementById('wxsatTimelineTrack');
- const cursor = document.getElementById('wxsatTimelineCursor');
- if (!track) return;
-
- // Clear existing pass markers
- track.querySelectorAll('.wxsat-timeline-pass').forEach(el => el.remove());
-
- const now = new Date();
- const dayStart = new Date(now);
- dayStart.setHours(0, 0, 0, 0);
- const dayMs = 24 * 60 * 60 * 1000;
-
- passList.forEach((pass, idx) => {
- const start = parsePassDate(pass.startTimeISO);
- const end = parsePassDate(pass.endTimeISO);
- if (!start || !end) return;
-
- const startPct = Math.max(0, Math.min(100, ((start - dayStart) / dayMs) * 100));
- const endPct = Math.max(0, Math.min(100, ((end - dayStart) / dayMs) * 100));
- const widthPct = Math.max(0.5, endPct - startPct);
-
- const marker = document.createElement('div');
- marker.className = `wxsat-timeline-pass ${pass.mode === 'LRPT' ? 'lrpt' : 'apt'}`;
- marker.style.left = startPct + '%';
- marker.style.width = widthPct + '%';
- marker.title = `${pass.name} ${pass.startTime} (${pass.maxEl}\u00b0)`;
- marker.onclick = () => selectPass(idx);
- track.appendChild(marker);
- });
-
- // Update cursor position
- updateTimelineCursor();
- }
-
- /**
- * Update timeline cursor to current time
- */
- function updateTimelineCursor() {
- const cursor = document.getElementById('wxsatTimelineCursor');
- if (!cursor) return;
-
- const now = new Date();
- const dayStart = new Date(now);
- dayStart.setHours(0, 0, 0, 0);
- const pct = ((now - dayStart) / (24 * 60 * 60 * 1000)) * 100;
- cursor.style.left = pct + '%';
- }
-
- // ========================
- // Auto-Scheduler
- // ========================
-
- /**
- * Toggle auto-scheduler
- */
- async function toggleScheduler(source) {
- const checked = source?.checked ?? false;
-
- const stripCheckbox = document.getElementById('wxsatAutoSchedule');
- const sidebarCheckbox = document.getElementById('wxsatSidebarAutoSchedule');
-
- // Sync both checkboxes to the source of truth
- if (stripCheckbox) stripCheckbox.checked = checked;
- if (sidebarCheckbox) sidebarCheckbox.checked = checked;
-
- if (checked) {
- await enableScheduler();
- } else {
- await disableScheduler();
- }
- }
-
- /**
- * Enable auto-scheduler
- */
+
+ // ========================
+ // Timeline
+ // ========================
+
+ /**
+ * Render 24h timeline with pass markers
+ */
+ function renderTimeline(passList) {
+ const track = document.getElementById('wxsatTimelineTrack');
+ const cursor = document.getElementById('wxsatTimelineCursor');
+ if (!track) return;
+
+ // Clear existing pass markers
+ track.querySelectorAll('.wxsat-timeline-pass').forEach(el => el.remove());
+
+ const now = new Date();
+ const dayStart = new Date(now);
+ dayStart.setHours(0, 0, 0, 0);
+ const dayMs = 24 * 60 * 60 * 1000;
+
+ passList.forEach((pass, idx) => {
+ const start = parsePassDate(pass.startTimeISO);
+ const end = parsePassDate(pass.endTimeISO);
+ if (!start || !end) return;
+
+ const startPct = Math.max(0, Math.min(100, ((start - dayStart) / dayMs) * 100));
+ const endPct = Math.max(0, Math.min(100, ((end - dayStart) / dayMs) * 100));
+ const widthPct = Math.max(0.5, endPct - startPct);
+
+ const marker = document.createElement('div');
+ marker.className = `wxsat-timeline-pass ${pass.mode === 'LRPT' ? 'lrpt' : 'apt'}`;
+ marker.style.left = startPct + '%';
+ marker.style.width = widthPct + '%';
+ marker.title = `${pass.name} ${pass.startTime} (${pass.maxEl}\u00b0)`;
+ marker.onclick = () => selectPass(idx);
+ track.appendChild(marker);
+ });
+
+ // Update cursor position
+ updateTimelineCursor();
+ }
+
+ /**
+ * Update timeline cursor to current time
+ */
+ function updateTimelineCursor() {
+ const cursor = document.getElementById('wxsatTimelineCursor');
+ if (!cursor) return;
+
+ const now = new Date();
+ const dayStart = new Date(now);
+ dayStart.setHours(0, 0, 0, 0);
+ const pct = ((now - dayStart) / (24 * 60 * 60 * 1000)) * 100;
+ cursor.style.left = pct + '%';
+ }
+
+ // ========================
+ // Auto-Scheduler
+ // ========================
+
+ /**
+ * Toggle auto-scheduler
+ */
+ async function toggleScheduler(source) {
+ const checked = source?.checked ?? false;
+
+ const stripCheckbox = document.getElementById('wxsatAutoSchedule');
+ const sidebarCheckbox = document.getElementById('wxsatSidebarAutoSchedule');
+
+ // Sync both checkboxes to the source of truth
+ if (stripCheckbox) stripCheckbox.checked = checked;
+ if (sidebarCheckbox) sidebarCheckbox.checked = checked;
+
+ if (checked) {
+ await enableScheduler();
+ } else {
+ await disableScheduler();
+ }
+ }
+
+ /**
+ * Enable auto-scheduler
+ */
async function enableScheduler() {
- let lat, lon;
- if (window.ObserverLocation && ObserverLocation.isSharedEnabled()) {
- const shared = ObserverLocation.getShared();
- lat = shared?.lat;
- lon = shared?.lon;
- } else {
- lat = parseFloat(localStorage.getItem('observerLat'));
- lon = parseFloat(localStorage.getItem('observerLon'));
- }
-
- if (isNaN(lat) || isNaN(lon)) {
- showNotification('Weather Sat', 'Set observer location first');
- const stripCheckbox = document.getElementById('wxsatAutoSchedule');
- const sidebarCheckbox = document.getElementById('wxsatSidebarAutoSchedule');
- if (stripCheckbox) stripCheckbox.checked = false;
- if (sidebarCheckbox) sidebarCheckbox.checked = false;
- return;
- }
-
- const deviceSelect = document.getElementById('deviceSelect');
- const gainInput = document.getElementById('weatherSatGain');
- const biasTInput = document.getElementById('weatherSatBiasT');
-
+ let lat, lon;
+ if (window.ObserverLocation && ObserverLocation.isSharedEnabled()) {
+ const shared = ObserverLocation.getShared();
+ lat = shared?.lat;
+ lon = shared?.lon;
+ } else {
+ lat = parseFloat(localStorage.getItem('observerLat'));
+ lon = parseFloat(localStorage.getItem('observerLon'));
+ }
+
+ if (isNaN(lat) || isNaN(lon)) {
+ showNotification('Weather Sat', 'Set observer location first');
+ const stripCheckbox = document.getElementById('wxsatAutoSchedule');
+ const sidebarCheckbox = document.getElementById('wxsatSidebarAutoSchedule');
+ if (stripCheckbox) stripCheckbox.checked = false;
+ if (sidebarCheckbox) sidebarCheckbox.checked = false;
+ return;
+ }
+
+ const deviceSelect = document.getElementById('deviceSelect');
+ const gainInput = document.getElementById('weatherSatGain');
+ const biasTInput = document.getElementById('weatherSatBiasT');
+
try {
const response = await fetch('/weather-sat/schedule/enable', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
- latitude: lat,
- longitude: lon,
- device: parseInt(deviceSelect?.value || '0', 10),
- gain: parseFloat(gainInput?.value || '40'),
+ latitude: lat,
+ longitude: lon,
+ device: parseInt(deviceSelect?.value || '0', 10),
+ gain: parseFloat(gainInput?.value || '40'),
bias_t: biasTInput?.checked || false,
}),
});
@@ -1374,10 +1377,10 @@ const WeatherSat = (function() {
showNotification('Weather Sat', 'Failed to enable auto-scheduler');
}
}
-
- /**
- * Disable auto-scheduler
- */
+
+ /**
+ * Disable auto-scheduler
+ */
async function disableScheduler() {
try {
const response = await fetch('/weather-sat/schedule/disable', { method: 'POST' });
@@ -1390,13 +1393,13 @@ const WeatherSat = (function() {
if (!isRunning) stopStream();
showNotification('Weather Sat', 'Auto-scheduler disabled');
} catch (err) {
- console.error('Failed to disable scheduler:', err);
- }
- }
-
- /**
- * Check current scheduler status
- */
+ console.error('Failed to disable scheduler:', err);
+ }
+ }
+
+ /**
+ * Check current scheduler status
+ */
async function checkSchedulerStatus() {
try {
const response = await fetch('/weather-sat/schedule/status');
@@ -1406,249 +1409,249 @@ const WeatherSat = (function() {
updateSchedulerUI(data);
if (schedulerEnabled) startStream();
} catch (err) {
- // Scheduler endpoint may not exist yet
- }
- }
-
- /**
- * Update scheduler UI elements
- */
- function updateSchedulerUI(data) {
- const stripCheckbox = document.getElementById('wxsatAutoSchedule');
- const sidebarCheckbox = document.getElementById('wxsatSidebarAutoSchedule');
- const statusEl = document.getElementById('wxsatSchedulerStatus');
-
- if (stripCheckbox) stripCheckbox.checked = data.enabled;
- if (sidebarCheckbox) sidebarCheckbox.checked = data.enabled;
- if (statusEl) {
- if (data.enabled) {
- statusEl.textContent = `Active: ${data.scheduled_count || 0} passes queued`;
- statusEl.style.color = '#00ff88';
- } else {
- statusEl.textContent = 'Disabled';
- statusEl.style.color = '';
- }
- }
- }
-
- // ========================
- // Images
- // ========================
-
- /**
- * Load decoded images
- */
- async function loadImages() {
- try {
- const response = await fetch('/weather-sat/images');
- const data = await response.json();
-
- if (data.status === 'ok') {
- images = data.images || [];
- updateImageCount(images.length);
- renderGallery();
- }
- } catch (err) {
- console.error('Failed to load weather sat images:', err);
- }
- }
-
- /**
- * Update image count
- */
- function updateImageCount(count) {
- const countEl = document.getElementById('wxsatImageCount');
- const stripCount = document.getElementById('wxsatStripImageCount');
- if (countEl) countEl.textContent = count;
- if (stripCount) stripCount.textContent = count;
- }
-
- /**
- * Render image gallery grouped by date
- */
- function renderGallery() {
- const gallery = document.getElementById('wxsatGallery');
- if (!gallery) return;
-
- if (images.length === 0) {
- gallery.innerHTML = `
-
-
-
No images decoded yet
-
Select a satellite pass and start capturing
-
- `;
- return;
- }
-
- // Sort by timestamp descending
- const sorted = [...images].sort((a, b) => {
- return new Date(b.timestamp || 0) - new Date(a.timestamp || 0);
- });
-
- // Group by date
- const groups = {};
- sorted.forEach(img => {
- const dateKey = img.timestamp
- ? new Date(img.timestamp).toLocaleDateString(undefined, { year: 'numeric', month: 'short', day: 'numeric' })
- : 'Unknown Date';
- if (!groups[dateKey]) groups[dateKey] = [];
- groups[dateKey].push(img);
- });
-
- let html = '';
- for (const [date, imgs] of Object.entries(groups)) {
- html += ``;
- html += imgs.map(img => {
- const fn = escapeHtml(img.filename || img.url.split('/').pop());
- return `
-
-
-
})
-
-
${escapeHtml(img.satellite)}
-
${escapeHtml(img.product || img.mode)}
-
${formatTimestamp(img.timestamp)}
-
-
-
-
`;
- }).join('');
- }
-
- gallery.innerHTML = html;
- }
-
- /**
- * Show full-size image
- */
- function showImage(url, satellite, product, filename) {
- currentModalFilename = filename || null;
-
- let modal = document.getElementById('wxsatImageModal');
- if (!modal) {
- modal = document.createElement('div');
- modal.id = 'wxsatImageModal';
- modal.className = 'wxsat-image-modal';
- modal.innerHTML = `
-
-
-
-
- `;
- modal.addEventListener('click', (e) => {
- if (e.target === modal) closeImage();
- });
- document.body.appendChild(modal);
- }
-
- modal.querySelector('img').src = url;
- const info = modal.querySelector('.wxsat-modal-info');
- if (info) {
- info.textContent = `${satellite || ''} ${product ? '// ' + product : ''}`;
- }
- modal.classList.add('show');
- }
-
- /**
- * Close image modal
- */
- function closeImage() {
- const modal = document.getElementById('wxsatImageModal');
- if (modal) modal.classList.remove('show');
- }
-
- /**
- * Delete a single image
- */
- async function deleteImage(filename) {
- if (!filename) return;
- if (!confirm(`Delete this image?`)) return;
-
- try {
- const response = await fetch(`/weather-sat/images/${encodeURIComponent(filename)}`, { method: 'DELETE' });
- const data = await response.json();
-
- if (data.status === 'deleted') {
- images = images.filter(img => {
- const imgFn = img.filename || img.url.split('/').pop();
- return imgFn !== filename;
- });
- updateImageCount(images.length);
- renderGallery();
- closeImage();
- } else {
- showNotification('Weather Sat', data.message || 'Failed to delete image');
- }
- } catch (err) {
- console.error('Failed to delete image:', err);
- showNotification('Weather Sat', 'Failed to delete image');
- }
- }
-
- /**
- * Delete all images
- */
- async function deleteAllImages() {
- if (images.length === 0) return;
- if (!confirm(`Delete all ${images.length} decoded images?`)) return;
-
- try {
- const response = await fetch('/weather-sat/images', { method: 'DELETE' });
- const data = await response.json();
-
- if (data.status === 'ok') {
- images = [];
- updateImageCount(0);
- renderGallery();
- showNotification('Weather Sat', `Deleted ${data.deleted} images`);
- } else {
- showNotification('Weather Sat', 'Failed to delete images');
- }
- } catch (err) {
- console.error('Failed to delete all images:', err);
- showNotification('Weather Sat', 'Failed to delete images');
- }
- }
-
- /**
- * Format timestamp
- */
- function formatTimestamp(isoString) {
- if (!isoString) return '--';
- try {
- return new Date(isoString).toLocaleString();
- } catch {
- return isoString;
- }
- }
-
- /**
- * Escape HTML
- */
- function escapeHtml(text) {
- if (!text) return '';
- const div = document.createElement('div');
- div.textContent = text;
- return div.innerHTML;
- }
-
+ // Scheduler endpoint may not exist yet
+ }
+ }
+
+ /**
+ * Update scheduler UI elements
+ */
+ function updateSchedulerUI(data) {
+ const stripCheckbox = document.getElementById('wxsatAutoSchedule');
+ const sidebarCheckbox = document.getElementById('wxsatSidebarAutoSchedule');
+ const statusEl = document.getElementById('wxsatSchedulerStatus');
+
+ if (stripCheckbox) stripCheckbox.checked = data.enabled;
+ if (sidebarCheckbox) sidebarCheckbox.checked = data.enabled;
+ if (statusEl) {
+ if (data.enabled) {
+ statusEl.textContent = `Active: ${data.scheduled_count || 0} passes queued`;
+ statusEl.style.color = '#00ff88';
+ } else {
+ statusEl.textContent = 'Disabled';
+ statusEl.style.color = '';
+ }
+ }
+ }
+
+ // ========================
+ // Images
+ // ========================
+
+ /**
+ * Load decoded images
+ */
+ async function loadImages() {
+ try {
+ const response = await fetch('/weather-sat/images');
+ const data = await response.json();
+
+ if (data.status === 'ok') {
+ images = data.images || [];
+ updateImageCount(images.length);
+ renderGallery();
+ }
+ } catch (err) {
+ console.error('Failed to load weather sat images:', err);
+ }
+ }
+
+ /**
+ * Update image count
+ */
+ function updateImageCount(count) {
+ const countEl = document.getElementById('wxsatImageCount');
+ const stripCount = document.getElementById('wxsatStripImageCount');
+ if (countEl) countEl.textContent = count;
+ if (stripCount) stripCount.textContent = count;
+ }
+
+ /**
+ * Render image gallery grouped by date
+ */
+ function renderGallery() {
+ const gallery = document.getElementById('wxsatGallery');
+ if (!gallery) return;
+
+ if (images.length === 0) {
+ gallery.innerHTML = `
+
+
+
No images decoded yet
+
Select a satellite pass and start capturing
+
+ `;
+ return;
+ }
+
+ // Sort by timestamp descending
+ const sorted = [...images].sort((a, b) => {
+ return new Date(b.timestamp || 0) - new Date(a.timestamp || 0);
+ });
+
+ // Group by date
+ const groups = {};
+ sorted.forEach(img => {
+ const dateKey = img.timestamp
+ ? new Date(img.timestamp).toLocaleDateString(undefined, { year: 'numeric', month: 'short', day: 'numeric' })
+ : 'Unknown Date';
+ if (!groups[dateKey]) groups[dateKey] = [];
+ groups[dateKey].push(img);
+ });
+
+ let html = '';
+ for (const [date, imgs] of Object.entries(groups)) {
+ html += ``;
+ html += imgs.map(img => {
+ const fn = escapeHtml(img.filename || img.url.split('/').pop());
+ return `
+
+
+
})
+
+
${escapeHtml(img.satellite)}
+
${escapeHtml(img.product || img.mode)}
+
${formatTimestamp(img.timestamp)}
+
+
+
+
`;
+ }).join('');
+ }
+
+ gallery.innerHTML = html;
+ }
+
+ /**
+ * Show full-size image
+ */
+ function showImage(url, satellite, product, filename) {
+ currentModalFilename = filename || null;
+
+ let modal = document.getElementById('wxsatImageModal');
+ if (!modal) {
+ modal = document.createElement('div');
+ modal.id = 'wxsatImageModal';
+ modal.className = 'wxsat-image-modal';
+ modal.innerHTML = `
+
+
+
+
+ `;
+ modal.addEventListener('click', (e) => {
+ if (e.target === modal) closeImage();
+ });
+ document.body.appendChild(modal);
+ }
+
+ modal.querySelector('img').src = url;
+ const info = modal.querySelector('.wxsat-modal-info');
+ if (info) {
+ info.textContent = `${satellite || ''} ${product ? '// ' + product : ''}`;
+ }
+ modal.classList.add('show');
+ }
+
+ /**
+ * Close image modal
+ */
+ function closeImage() {
+ const modal = document.getElementById('wxsatImageModal');
+ if (modal) modal.classList.remove('show');
+ }
+
+ /**
+ * Delete a single image
+ */
+ async function deleteImage(filename) {
+ if (!filename) return;
+ if (!confirm(`Delete this image?`)) return;
+
+ try {
+ const response = await fetch(`/weather-sat/images/${encodeURIComponent(filename)}`, { method: 'DELETE' });
+ const data = await response.json();
+
+ if (data.status === 'deleted') {
+ images = images.filter(img => {
+ const imgFn = img.filename || img.url.split('/').pop();
+ return imgFn !== filename;
+ });
+ updateImageCount(images.length);
+ renderGallery();
+ closeImage();
+ } else {
+ showNotification('Weather Sat', data.message || 'Failed to delete image');
+ }
+ } catch (err) {
+ console.error('Failed to delete image:', err);
+ showNotification('Weather Sat', 'Failed to delete image');
+ }
+ }
+
+ /**
+ * Delete all images
+ */
+ async function deleteAllImages() {
+ if (images.length === 0) return;
+ if (!confirm(`Delete all ${images.length} decoded images?`)) return;
+
+ try {
+ const response = await fetch('/weather-sat/images', { method: 'DELETE' });
+ const data = await response.json();
+
+ if (data.status === 'ok') {
+ images = [];
+ updateImageCount(0);
+ renderGallery();
+ showNotification('Weather Sat', `Deleted ${data.deleted} images`);
+ } else {
+ showNotification('Weather Sat', 'Failed to delete images');
+ }
+ } catch (err) {
+ console.error('Failed to delete all images:', err);
+ showNotification('Weather Sat', 'Failed to delete images');
+ }
+ }
+
+ /**
+ * Format timestamp
+ */
+ function formatTimestamp(isoString) {
+ if (!isoString) return '--';
+ try {
+ return new Date(isoString).toLocaleString();
+ } catch {
+ return isoString;
+ }
+ }
+
+ /**
+ * Escape HTML
+ */
+ function escapeHtml(text) {
+ if (!text) return '';
+ const div = document.createElement('div');
+ div.textContent = text;
+ return div.innerHTML;
+ }
+
/**
* Invalidate ground map size (call after container becomes visible)
*/
@@ -1662,151 +1665,151 @@ const WeatherSat = (function() {
updateGroundTrack(getSelectedPass());
}, 100);
}
-
- // ========================
- // Decoder Console
- // ========================
-
- /**
- * Add an entry to the decoder console log
- */
- function addConsoleEntry(message, logType) {
- const log = document.getElementById('wxsatConsoleLog');
- if (!log) return;
-
- const entry = document.createElement('div');
- entry.className = `wxsat-console-entry wxsat-log-${logType || 'info'}`;
- entry.textContent = message;
- log.appendChild(entry);
-
- consoleEntries.push(entry);
-
- // Cap at 200 entries
- while (consoleEntries.length > 200) {
- const old = consoleEntries.shift();
- if (old.parentNode) old.parentNode.removeChild(old);
- }
-
- // Auto-scroll to bottom
- log.scrollTop = log.scrollHeight;
- }
-
- /**
- * Update the phase indicator steps
- */
- function updatePhaseIndicator(phase) {
- if (!phase || phase === currentPhase) return;
- currentPhase = phase;
-
- const phases = ['tuning', 'listening', 'signal_detected', 'decoding', 'complete'];
- const phaseIndex = phases.indexOf(phase);
- const isError = phase === 'error';
-
- document.querySelectorAll('#wxsatPhaseIndicator .wxsat-phase-step').forEach(step => {
- const stepPhase = step.dataset.phase;
- const stepIndex = phases.indexOf(stepPhase);
-
- step.classList.remove('active', 'completed', 'error');
-
- if (isError) {
- if (stepPhase === currentPhase || stepIndex === phaseIndex) {
- step.classList.add('error');
- }
- } else if (stepIndex === phaseIndex) {
- step.classList.add('active');
- } else if (stepIndex < phaseIndex && phaseIndex >= 0) {
- step.classList.add('completed');
- }
- });
- }
-
- /**
- * Show or hide the decoder console
- */
- function showConsole(visible) {
- const el = document.getElementById('wxsatSignalConsole');
- if (el) el.classList.toggle('active', visible);
-
- if (consoleAutoHideTimer) {
- clearTimeout(consoleAutoHideTimer);
- consoleAutoHideTimer = null;
- }
- }
-
- /**
- * Toggle console body collapsed state
- */
- function toggleConsole() {
- const body = document.getElementById('wxsatConsoleBody');
- const btn = document.getElementById('wxsatConsoleToggle');
- if (!body) return;
-
- consoleCollapsed = !consoleCollapsed;
- body.classList.toggle('collapsed', consoleCollapsed);
- if (btn) btn.classList.toggle('collapsed', consoleCollapsed);
- }
-
- /**
- * Clear console entries and reset phase indicator
- */
- function clearConsole() {
- const log = document.getElementById('wxsatConsoleLog');
- if (log) log.innerHTML = '';
- consoleEntries = [];
- currentPhase = 'idle';
-
- document.querySelectorAll('#wxsatPhaseIndicator .wxsat-phase-step').forEach(step => {
- step.classList.remove('active', 'completed', 'error');
- });
-
- if (consoleAutoHideTimer) {
- clearTimeout(consoleAutoHideTimer);
- consoleAutoHideTimer = null;
- }
- }
-
- /**
- * Suspend background activity when leaving the mode.
- * Closes the SSE stream and stops the countdown interval so they don't
- * keep running while another mode is active. The stream is re-opened
- * by init() or startStream() when the mode is next entered.
- */
- function suspend() {
- if (countdownInterval) {
- clearInterval(countdownInterval);
- countdownInterval = null;
- }
- // Only close the stream if nothing is actively capturing/scheduling —
- // if a capture or scheduler is running we want it to continue on the
- // server and the stream will reconnect on next init().
- if (!isRunning && !schedulerEnabled) {
- stopStream();
- }
- }
-
- // Public API
- return {
- init,
- suspend,
- start,
- stop,
- startPass,
- selectPass,
- testDecode,
- loadImages,
- loadPasses,
- showImage,
- closeImage,
- deleteImage,
- deleteAllImages,
- useGPS,
- toggleScheduler,
- invalidateMap,
- toggleConsole,
- _getModalFilename: () => currentModalFilename,
- };
-})();
-
-document.addEventListener('DOMContentLoaded', function() {
- // Initialization happens via selectMode when weather-satellite mode is activated
-});
+
+ // ========================
+ // Decoder Console
+ // ========================
+
+ /**
+ * Add an entry to the decoder console log
+ */
+ function addConsoleEntry(message, logType) {
+ const log = document.getElementById('wxsatConsoleLog');
+ if (!log) return;
+
+ const entry = document.createElement('div');
+ entry.className = `wxsat-console-entry wxsat-log-${logType || 'info'}`;
+ entry.textContent = message;
+ log.appendChild(entry);
+
+ consoleEntries.push(entry);
+
+ // Cap at 200 entries
+ while (consoleEntries.length > 200) {
+ const old = consoleEntries.shift();
+ if (old.parentNode) old.parentNode.removeChild(old);
+ }
+
+ // Auto-scroll to bottom
+ log.scrollTop = log.scrollHeight;
+ }
+
+ /**
+ * Update the phase indicator steps
+ */
+ function updatePhaseIndicator(phase) {
+ if (!phase || phase === currentPhase) return;
+ currentPhase = phase;
+
+ const phases = ['tuning', 'listening', 'signal_detected', 'decoding', 'complete'];
+ const phaseIndex = phases.indexOf(phase);
+ const isError = phase === 'error';
+
+ document.querySelectorAll('#wxsatPhaseIndicator .wxsat-phase-step').forEach(step => {
+ const stepPhase = step.dataset.phase;
+ const stepIndex = phases.indexOf(stepPhase);
+
+ step.classList.remove('active', 'completed', 'error');
+
+ if (isError) {
+ if (stepPhase === currentPhase || stepIndex === phaseIndex) {
+ step.classList.add('error');
+ }
+ } else if (stepIndex === phaseIndex) {
+ step.classList.add('active');
+ } else if (stepIndex < phaseIndex && phaseIndex >= 0) {
+ step.classList.add('completed');
+ }
+ });
+ }
+
+ /**
+ * Show or hide the decoder console
+ */
+ function showConsole(visible) {
+ const el = document.getElementById('wxsatSignalConsole');
+ if (el) el.classList.toggle('active', visible);
+
+ if (consoleAutoHideTimer) {
+ clearTimeout(consoleAutoHideTimer);
+ consoleAutoHideTimer = null;
+ }
+ }
+
+ /**
+ * Toggle console body collapsed state
+ */
+ function toggleConsole() {
+ const body = document.getElementById('wxsatConsoleBody');
+ const btn = document.getElementById('wxsatConsoleToggle');
+ if (!body) return;
+
+ consoleCollapsed = !consoleCollapsed;
+ body.classList.toggle('collapsed', consoleCollapsed);
+ if (btn) btn.classList.toggle('collapsed', consoleCollapsed);
+ }
+
+ /**
+ * Clear console entries and reset phase indicator
+ */
+ function clearConsole() {
+ const log = document.getElementById('wxsatConsoleLog');
+ if (log) log.innerHTML = '';
+ consoleEntries = [];
+ currentPhase = 'idle';
+
+ document.querySelectorAll('#wxsatPhaseIndicator .wxsat-phase-step').forEach(step => {
+ step.classList.remove('active', 'completed', 'error');
+ });
+
+ if (consoleAutoHideTimer) {
+ clearTimeout(consoleAutoHideTimer);
+ consoleAutoHideTimer = null;
+ }
+ }
+
+ /**
+ * Suspend background activity when leaving the mode.
+ * Closes the SSE stream and stops the countdown interval so they don't
+ * keep running while another mode is active. The stream is re-opened
+ * by init() or startStream() when the mode is next entered.
+ */
+ function suspend() {
+ if (countdownInterval) {
+ clearInterval(countdownInterval);
+ countdownInterval = null;
+ }
+ // Only close the stream if nothing is actively capturing/scheduling —
+ // if a capture or scheduler is running we want it to continue on the
+ // server and the stream will reconnect on next init().
+ if (!isRunning && !schedulerEnabled) {
+ stopStream();
+ }
+ }
+
+ // Public API
+ return {
+ init,
+ suspend,
+ start,
+ stop,
+ startPass,
+ selectPass,
+ testDecode,
+ loadImages,
+ loadPasses,
+ showImage,
+ closeImage,
+ deleteImage,
+ deleteAllImages,
+ useGPS,
+ toggleScheduler,
+ invalidateMap,
+ toggleConsole,
+ _getModalFilename: () => currentModalFilename,
+ };
+})();
+
+document.addEventListener('DOMContentLoaded', function() {
+ // Initialization happens via selectMode when weather-satellite mode is activated
+});
diff --git a/tests/test_subghz.py b/tests/test_subghz.py
index 6e73ba8..5de373f 100644
--- a/tests/test_subghz.py
+++ b/tests/test_subghz.py
@@ -76,12 +76,12 @@ class TestReceive:
mock_proc.stderr = MagicMock()
mock_proc.stderr.readline = MagicMock(return_value=b'')
- with patch('shutil.which', return_value='/usr/bin/hackrf_transfer'), \
- patch('subprocess.Popen', return_value=mock_proc), \
- patch.object(manager, 'check_hackrf_device', return_value=True), \
- patch('utils.subghz.register_process'):
- manager._hackrf_available = None
- result = manager.start_receive(
+ with patch('shutil.which', return_value='/usr/bin/hackrf_transfer'), \
+ patch('subprocess.Popen', return_value=mock_proc), \
+ patch.object(manager, 'check_hackrf_device', return_value=True), \
+ patch('utils.subghz.register_process'):
+ manager._hackrf_available = None
+ result = manager.start_receive(
frequency_hz=433920000,
sample_rate=2000000,
lna_gain=32,
@@ -92,9 +92,14 @@ class TestReceive:
assert manager.active_mode == 'rx'
def test_start_receive_already_running(self, manager):
+ import time as _time
mock_proc = MagicMock()
mock_proc.poll.return_value = None
manager._rx_process = mock_proc
+ # Pre-lock device checks now run before active_mode guard
+ manager._hackrf_available = True
+ manager._hackrf_device_cache = True
+ manager._hackrf_device_cache_ts = _time.time()
result = manager.start_receive(frequency_hz=433920000)
assert result['status'] == 'error'
@@ -104,10 +109,10 @@ class TestReceive:
result = manager.stop_receive()
assert result['status'] == 'not_running'
- def test_stop_receive_creates_metadata(self, manager, tmp_data_dir):
- # Create a fake IQ file
- iq_file = tmp_data_dir / 'captures' / 'test.iq'
- iq_file.write_bytes(b'\x00' * 1024)
+ def test_stop_receive_creates_metadata(self, manager, tmp_data_dir):
+ # Create a fake IQ file
+ iq_file = tmp_data_dir / 'captures' / 'test.iq'
+ iq_file.write_bytes(b'\x00' * 1024)
mock_proc = MagicMock()
mock_proc.poll.return_value = None
@@ -115,10 +120,10 @@ class TestReceive:
manager._rx_file = iq_file
manager._rx_frequency_hz = 433920000
manager._rx_sample_rate = 2000000
- manager._rx_lna_gain = 32
- manager._rx_vga_gain = 20
- manager._rx_start_time = 1000.0
- manager._rx_bursts = [{'start_seconds': 1.23, 'duration_seconds': 0.15, 'peak_level': 42}]
+ manager._rx_lna_gain = 32
+ manager._rx_vga_gain = 20
+ manager._rx_start_time = 1000.0
+ manager._rx_bursts = [{'start_seconds': 1.23, 'duration_seconds': 0.15, 'peak_level': 42}]
with patch('utils.subghz.safe_terminate'), \
patch('time.time', return_value=1005.0):
@@ -131,10 +136,10 @@ class TestReceive:
# Verify JSON sidecar was written
meta_path = iq_file.with_suffix('.json')
assert meta_path.exists()
- meta = json.loads(meta_path.read_text())
- assert meta['frequency_hz'] == 433920000
- assert isinstance(meta.get('bursts'), list)
- assert meta['bursts'][0]['peak_level'] == 42
+ meta = json.loads(meta_path.read_text())
+ assert meta['frequency_hz'] == 433920000
+ assert isinstance(meta.get('bursts'), list)
+ assert meta['bursts'][0]['peak_level'] == 42
class TestTxSafety:
@@ -165,13 +170,13 @@ class TestTxSafety:
result = manager.transmit(capture_id='abc123')
assert result['status'] == 'error'
- def test_transmit_capture_not_found(self, manager):
- with patch('shutil.which', return_value='/usr/bin/hackrf_transfer'), \
- patch.object(manager, 'check_hackrf_device', return_value=True):
- manager._hackrf_available = None
- result = manager.transmit(capture_id='nonexistent')
- assert result['status'] == 'error'
- assert 'not found' in result['message']
+ def test_transmit_capture_not_found(self, manager):
+ with patch('shutil.which', return_value='/usr/bin/hackrf_transfer'), \
+ patch.object(manager, 'check_hackrf_device', return_value=True):
+ manager._hackrf_available = None
+ result = manager.transmit(capture_id='nonexistent')
+ assert result['status'] == 'error'
+ assert 'not found' in result['message']
def test_transmit_out_of_band_rejected(self, manager, tmp_data_dir):
# Create a capture with out-of-band frequency
@@ -188,64 +193,79 @@ class TestTxSafety:
meta_path.write_text(json.dumps(meta))
(tmp_data_dir / 'captures' / 'test.iq').write_bytes(b'\x00' * 100)
- with patch('shutil.which', return_value='/usr/bin/hackrf_transfer'), \
- patch.object(manager, 'check_hackrf_device', return_value=True):
- manager._hackrf_available = None
- result = manager.transmit(capture_id='test123')
- assert result['status'] == 'error'
+ with patch('shutil.which', return_value='/usr/bin/hackrf_transfer'), \
+ patch.object(manager, 'check_hackrf_device', return_value=True):
+ manager._hackrf_available = None
+ result = manager.transmit(capture_id='test123')
+ assert result['status'] == 'error'
assert 'outside allowed TX bands' in result['message']
- def test_transmit_already_running(self, manager):
- mock_proc = MagicMock()
- mock_proc.poll.return_value = None
- manager._rx_process = mock_proc
-
- result = manager.transmit(capture_id='test123')
- assert result['status'] == 'error'
- assert 'Already running' in result['message']
-
- def test_transmit_segment_extracts_range(self, manager, tmp_data_dir):
- meta = {
- 'id': 'seg001',
- 'filename': 'seg.iq',
- 'frequency_hz': 433920000,
- 'sample_rate': 1000,
- 'lna_gain': 24,
- 'vga_gain': 20,
- 'timestamp': '2026-01-01T00:00:00Z',
- 'duration_seconds': 1.0,
- 'size_bytes': 2000,
- }
- (tmp_data_dir / 'captures' / 'seg.json').write_text(json.dumps(meta))
- (tmp_data_dir / 'captures' / 'seg.iq').write_bytes(bytes(range(200)) * 10)
-
- mock_proc = MagicMock()
- mock_proc.poll.return_value = None
- mock_timer = MagicMock()
- mock_timer.start = MagicMock()
-
- with patch('shutil.which', return_value='/usr/bin/hackrf_transfer'), \
- patch.object(manager, 'check_hackrf_device', return_value=True), \
- patch('subprocess.Popen', return_value=mock_proc), \
- patch('utils.subghz.register_process'), \
- patch('threading.Timer', return_value=mock_timer), \
- patch('threading.Thread') as mock_thread_cls:
- mock_thread = MagicMock()
- mock_thread.start = MagicMock()
- mock_thread_cls.return_value = mock_thread
-
- manager._hackrf_available = None
- result = manager.transmit(
- capture_id='seg001',
- start_seconds=0.2,
- duration_seconds=0.3,
- )
-
- assert result['status'] == 'transmitting'
- assert result['segment'] is not None
- assert result['segment']['duration_seconds'] == pytest.approx(0.3, abs=0.01)
- assert manager._tx_temp_file is not None
- assert manager._tx_temp_file.exists()
+ def test_transmit_already_running(self, manager, tmp_data_dir):
+ import time as _time
+ mock_proc = MagicMock()
+ mock_proc.poll.return_value = None
+ manager._rx_process = mock_proc
+ # Pre-lock device checks now run before active_mode guard
+ manager._hackrf_available = True
+ manager._hackrf_device_cache = True
+ manager._hackrf_device_cache_ts = _time.time()
+ # Capture lookup also runs pre-lock now; provide a valid capture + IQ file
+ meta = {
+ 'id': 'test123',
+ 'filename': 'test.iq',
+ 'frequency_hz': 433920000,
+ 'sample_rate': 2000000,
+ 'timestamp': '2025-01-01T00:00:00',
+ }
+ (tmp_data_dir / 'captures' / 'test.json').write_text(json.dumps(meta))
+ (tmp_data_dir / 'captures' / 'test.iq').write_bytes(b'\x00' * 64)
+
+ result = manager.transmit(capture_id='test123')
+ assert result['status'] == 'error'
+ assert 'Already running' in result['message']
+
+ def test_transmit_segment_extracts_range(self, manager, tmp_data_dir):
+ meta = {
+ 'id': 'seg001',
+ 'filename': 'seg.iq',
+ 'frequency_hz': 433920000,
+ 'sample_rate': 1000,
+ 'lna_gain': 24,
+ 'vga_gain': 20,
+ 'timestamp': '2026-01-01T00:00:00Z',
+ 'duration_seconds': 1.0,
+ 'size_bytes': 2000,
+ }
+ (tmp_data_dir / 'captures' / 'seg.json').write_text(json.dumps(meta))
+ (tmp_data_dir / 'captures' / 'seg.iq').write_bytes(bytes(range(200)) * 10)
+
+ mock_proc = MagicMock()
+ mock_proc.poll.return_value = None
+ mock_timer = MagicMock()
+ mock_timer.start = MagicMock()
+
+ with patch('shutil.which', return_value='/usr/bin/hackrf_transfer'), \
+ patch.object(manager, 'check_hackrf_device', return_value=True), \
+ patch('subprocess.Popen', return_value=mock_proc), \
+ patch('utils.subghz.register_process'), \
+ patch('threading.Timer', return_value=mock_timer), \
+ patch('threading.Thread') as mock_thread_cls:
+ mock_thread = MagicMock()
+ mock_thread.start = MagicMock()
+ mock_thread_cls.return_value = mock_thread
+
+ manager._hackrf_available = None
+ result = manager.transmit(
+ capture_id='seg001',
+ start_seconds=0.2,
+ duration_seconds=0.3,
+ )
+
+ assert result['status'] == 'transmitting'
+ assert result['segment'] is not None
+ assert result['segment']['duration_seconds'] == pytest.approx(0.3, abs=0.01)
+ assert manager._tx_temp_file is not None
+ assert manager._tx_temp_file.exists()
class TestCaptureLibrary:
@@ -311,11 +331,11 @@ class TestCaptureLibrary:
def test_delete_capture_not_found(self, manager):
assert manager.delete_capture('nonexistent') is False
- def test_update_label(self, manager, tmp_data_dir):
- meta = {
- 'id': 'lbl001',
- 'filename': 'label_test.iq',
- 'frequency_hz': 433920000,
+ def test_update_label(self, manager, tmp_data_dir):
+ meta = {
+ 'id': 'lbl001',
+ 'filename': 'label_test.iq',
+ 'frequency_hz': 433920000,
'sample_rate': 2000000,
'timestamp': '2026-01-01T00:00:00Z',
'label': '',
@@ -324,10 +344,10 @@ class TestCaptureLibrary:
meta_path.write_text(json.dumps(meta))
assert manager.update_capture_label('lbl001', 'Garage Remote') is True
-
- updated = json.loads(meta_path.read_text())
- assert updated['label'] == 'Garage Remote'
- assert updated['label_source'] == 'manual'
+
+ updated = json.loads(meta_path.read_text())
+ assert updated['label'] == 'Garage Remote'
+ assert updated['label_source'] == 'manual'
def test_update_label_not_found(self, manager):
assert manager.update_capture_label('nonexistent', 'test') is False
@@ -348,100 +368,100 @@ class TestCaptureLibrary:
assert path is not None
assert path.name == 'path_test.iq'
- def test_get_capture_path_not_found(self, manager):
- assert manager.get_capture_path('nonexistent') is None
-
- def test_trim_capture_manual_segment(self, manager, tmp_data_dir):
- captures_dir = tmp_data_dir / 'captures'
- iq_path = captures_dir / 'trim_src.iq'
- iq_path.write_bytes(bytes(range(200)) * 20) # 4000 bytes at 1000 sps => 2.0s
- (captures_dir / 'trim_src.json').write_text(json.dumps({
- 'id': 'trim001',
- 'filename': 'trim_src.iq',
- 'frequency_hz': 433920000,
- 'sample_rate': 1000,
- 'lna_gain': 24,
- 'vga_gain': 20,
- 'timestamp': '2026-01-01T00:00:00Z',
- 'duration_seconds': 2.0,
- 'size_bytes': 4000,
- 'label': 'Weather Burst',
- 'bursts': [
- {
- 'start_seconds': 0.55,
- 'duration_seconds': 0.2,
- 'peak_level': 67,
- 'fingerprint': 'abc123',
- 'modulation_hint': 'OOK/ASK',
- 'modulation_confidence': 0.9,
- }
- ],
- }))
-
- result = manager.trim_capture(
- capture_id='trim001',
- start_seconds=0.5,
- duration_seconds=0.4,
- )
-
- assert result['status'] == 'ok'
- assert result['capture']['id'] != 'trim001'
- assert result['capture']['size_bytes'] == 800
- assert result['capture']['label'].endswith('(Trim)')
- trimmed_iq = captures_dir / result['capture']['filename']
- assert trimmed_iq.exists()
- trimmed_meta = trimmed_iq.with_suffix('.json')
- assert trimmed_meta.exists()
-
- def test_trim_capture_auto_burst(self, manager, tmp_data_dir):
- captures_dir = tmp_data_dir / 'captures'
- iq_path = captures_dir / 'auto_src.iq'
- iq_path.write_bytes(bytes(range(100)) * 40) # 4000 bytes
- (captures_dir / 'auto_src.json').write_text(json.dumps({
- 'id': 'trim002',
- 'filename': 'auto_src.iq',
- 'frequency_hz': 433920000,
- 'sample_rate': 1000,
- 'lna_gain': 24,
- 'vga_gain': 20,
- 'timestamp': '2026-01-01T00:00:00Z',
- 'duration_seconds': 2.0,
- 'size_bytes': 4000,
- 'bursts': [
- {'start_seconds': 0.2, 'duration_seconds': 0.1, 'peak_level': 12},
- {'start_seconds': 1.2, 'duration_seconds': 0.25, 'peak_level': 88},
- ],
- }))
-
- result = manager.trim_capture(capture_id='trim002')
- assert result['status'] == 'ok'
- assert result['segment']['auto_selected'] is True
- assert result['capture']['duration_seconds'] > 0.25
-
- def test_list_captures_groups_same_fingerprint(self, manager, tmp_data_dir):
- cap_a = {
- 'id': 'grp001',
- 'filename': 'a.iq',
- 'frequency_hz': 433920000,
- 'sample_rate': 2000000,
- 'timestamp': '2026-01-01T00:00:00Z',
- 'dominant_fingerprint': 'deadbeefcafebabe',
- }
- cap_b = {
- 'id': 'grp002',
- 'filename': 'b.iq',
- 'frequency_hz': 433920000,
- 'sample_rate': 2000000,
- 'timestamp': '2026-01-01T00:01:00Z',
- 'dominant_fingerprint': 'deadbeefcafebabe',
- }
- (tmp_data_dir / 'captures' / 'a.json').write_text(json.dumps(cap_a))
- (tmp_data_dir / 'captures' / 'b.json').write_text(json.dumps(cap_b))
-
- captures = manager.list_captures()
- assert len(captures) == 2
- assert all(c.fingerprint_group.startswith('SIG-') for c in captures)
- assert all(c.fingerprint_group_size == 2 for c in captures)
+ def test_get_capture_path_not_found(self, manager):
+ assert manager.get_capture_path('nonexistent') is None
+
+ def test_trim_capture_manual_segment(self, manager, tmp_data_dir):
+ captures_dir = tmp_data_dir / 'captures'
+ iq_path = captures_dir / 'trim_src.iq'
+ iq_path.write_bytes(bytes(range(200)) * 20) # 4000 bytes at 1000 sps => 2.0s
+ (captures_dir / 'trim_src.json').write_text(json.dumps({
+ 'id': 'trim001',
+ 'filename': 'trim_src.iq',
+ 'frequency_hz': 433920000,
+ 'sample_rate': 1000,
+ 'lna_gain': 24,
+ 'vga_gain': 20,
+ 'timestamp': '2026-01-01T00:00:00Z',
+ 'duration_seconds': 2.0,
+ 'size_bytes': 4000,
+ 'label': 'Weather Burst',
+ 'bursts': [
+ {
+ 'start_seconds': 0.55,
+ 'duration_seconds': 0.2,
+ 'peak_level': 67,
+ 'fingerprint': 'abc123',
+ 'modulation_hint': 'OOK/ASK',
+ 'modulation_confidence': 0.9,
+ }
+ ],
+ }))
+
+ result = manager.trim_capture(
+ capture_id='trim001',
+ start_seconds=0.5,
+ duration_seconds=0.4,
+ )
+
+ assert result['status'] == 'ok'
+ assert result['capture']['id'] != 'trim001'
+ assert result['capture']['size_bytes'] == 800
+ assert result['capture']['label'].endswith('(Trim)')
+ trimmed_iq = captures_dir / result['capture']['filename']
+ assert trimmed_iq.exists()
+ trimmed_meta = trimmed_iq.with_suffix('.json')
+ assert trimmed_meta.exists()
+
+ def test_trim_capture_auto_burst(self, manager, tmp_data_dir):
+ captures_dir = tmp_data_dir / 'captures'
+ iq_path = captures_dir / 'auto_src.iq'
+ iq_path.write_bytes(bytes(range(100)) * 40) # 4000 bytes
+ (captures_dir / 'auto_src.json').write_text(json.dumps({
+ 'id': 'trim002',
+ 'filename': 'auto_src.iq',
+ 'frequency_hz': 433920000,
+ 'sample_rate': 1000,
+ 'lna_gain': 24,
+ 'vga_gain': 20,
+ 'timestamp': '2026-01-01T00:00:00Z',
+ 'duration_seconds': 2.0,
+ 'size_bytes': 4000,
+ 'bursts': [
+ {'start_seconds': 0.2, 'duration_seconds': 0.1, 'peak_level': 12},
+ {'start_seconds': 1.2, 'duration_seconds': 0.25, 'peak_level': 88},
+ ],
+ }))
+
+ result = manager.trim_capture(capture_id='trim002')
+ assert result['status'] == 'ok'
+ assert result['segment']['auto_selected'] is True
+ assert result['capture']['duration_seconds'] > 0.25
+
+ def test_list_captures_groups_same_fingerprint(self, manager, tmp_data_dir):
+ cap_a = {
+ 'id': 'grp001',
+ 'filename': 'a.iq',
+ 'frequency_hz': 433920000,
+ 'sample_rate': 2000000,
+ 'timestamp': '2026-01-01T00:00:00Z',
+ 'dominant_fingerprint': 'deadbeefcafebabe',
+ }
+ cap_b = {
+ 'id': 'grp002',
+ 'filename': 'b.iq',
+ 'frequency_hz': 433920000,
+ 'sample_rate': 2000000,
+ 'timestamp': '2026-01-01T00:01:00Z',
+ 'dominant_fingerprint': 'deadbeefcafebabe',
+ }
+ (tmp_data_dir / 'captures' / 'a.json').write_text(json.dumps(cap_a))
+ (tmp_data_dir / 'captures' / 'b.json').write_text(json.dumps(cap_b))
+
+ captures = manager.list_captures()
+ assert len(captures) == 2
+ assert all(c.fingerprint_group.startswith('SIG-') for c in captures)
+ assert all(c.fingerprint_group_size == 2 for c in captures)
class TestSweep:
@@ -452,6 +472,7 @@ class TestSweep:
assert result['status'] == 'error'
def test_start_sweep_success(self, manager):
+ import time as _time
mock_proc = MagicMock()
mock_proc.poll.return_value = None
mock_proc.stdout = MagicMock()
@@ -460,6 +481,8 @@ class TestSweep:
patch('subprocess.Popen', return_value=mock_proc), \
patch('utils.subghz.register_process'):
manager._sweep_available = None
+ manager._hackrf_device_cache = True
+ manager._hackrf_device_cache_ts = _time.time()
result = manager.start_sweep(freq_start_mhz=300, freq_end_mhz=928)
assert result['status'] == 'started'
@@ -517,8 +540,11 @@ class TestDecode:
with patch('shutil.which', return_value='/usr/bin/tool'), \
patch('subprocess.Popen', side_effect=popen_side_effect) as mock_popen, \
patch('utils.subghz.register_process'):
+ import time as _time
manager._hackrf_available = None
manager._rtl433_available = None
+ manager._hackrf_device_cache = True
+ manager._hackrf_device_cache_ts = _time.time()
result = manager.start_decode(
frequency_hz=433920000,
sample_rate=2000000,
@@ -536,10 +562,10 @@ class TestDecode:
assert '-r' in hackrf_cmd
# Verify rtl_433 command
- rtl433_cmd = mock_popen.call_args_list[1][0][0]
- assert rtl433_cmd[0] == 'rtl_433'
- assert '-r' in rtl433_cmd
- assert 'cs8:-' in rtl433_cmd
+ rtl433_cmd = mock_popen.call_args_list[1][0][0]
+ assert rtl433_cmd[0] == 'rtl_433'
+ assert '-r' in rtl433_cmd
+ assert 'cs8:-' in rtl433_cmd
# Both processes tracked
assert manager._decode_hackrf_process is mock_hackrf_proc
diff --git a/tests/test_weather_sat_decoder.py b/tests/test_weather_sat_decoder.py
index 1f48642..45f975d 100644
--- a/tests/test_weather_sat_decoder.py
+++ b/tests/test_weather_sat_decoder.py
@@ -138,7 +138,8 @@ class TestWeatherSatDecoder:
@patch('pty.openpty')
def test_start_already_running(self, mock_pty, mock_popen):
"""start() should return True when already running."""
- with patch('shutil.which', return_value='/usr/bin/satdump'):
+ with patch('shutil.which', return_value='/usr/bin/satdump'), \
+ patch('utils.weather_sat.WeatherSatDecoder._resolve_device_id', return_value='0'):
decoder = WeatherSatDecoder()
decoder._running = True
diff --git a/utils/meshtastic.py b/utils/meshtastic.py
index 7cebcbc..4df4dac 100644
--- a/utils/meshtastic.py
+++ b/utils/meshtastic.py
@@ -376,63 +376,82 @@ class MeshtasticClient:
self._error = "Meshtastic SDK not installed. Install with: pip install meshtastic"
return False
+ # Quick check under lock — bail if already running
with self._lock:
if self._running:
return True
- try:
- # Subscribe to message events before connecting
- pub.subscribe(self._on_receive, "meshtastic.receive")
- pub.subscribe(self._on_connection, "meshtastic.connection.established")
- pub.subscribe(self._on_disconnect, "meshtastic.connection.lost")
+ # Create interface outside lock (blocking I/O: serial/TCP connect)
+ new_interface = None
+ new_device_path = None
+ new_connection_type = None
+ try:
+ # Subscribe to message events before connecting
+ pub.subscribe(self._on_receive, "meshtastic.receive")
+ pub.subscribe(self._on_connection, "meshtastic.connection.established")
+ pub.subscribe(self._on_disconnect, "meshtastic.connection.lost")
- # Connect based on connection type
- if connection_type == 'tcp':
- if not hostname:
- self._error = "Hostname is required for TCP connections"
- self._cleanup_subscriptions()
- return False
- self._interface = meshtastic.tcp_interface.TCPInterface(hostname=hostname)
- self._device_path = hostname
- self._connection_type = 'tcp'
- logger.info(f"Connected to Meshtastic device via TCP: {hostname}")
+ if connection_type == 'tcp':
+ if not hostname:
+ self._error = "Hostname is required for TCP connections"
+ self._cleanup_subscriptions()
+ return False
+ new_interface = meshtastic.tcp_interface.TCPInterface(hostname=hostname)
+ new_device_path = hostname
+ new_connection_type = 'tcp'
+ logger.info(f"Connected to Meshtastic device via TCP: {hostname}")
+ else:
+ if device:
+ new_interface = meshtastic.serial_interface.SerialInterface(device)
+ new_device_path = device
else:
- # Serial connection (default)
- if device:
- self._interface = meshtastic.serial_interface.SerialInterface(device)
- self._device_path = device
- else:
- # Auto-discover
- self._interface = meshtastic.serial_interface.SerialInterface()
- self._device_path = "auto"
- self._connection_type = 'serial'
- logger.info(f"Connected to Meshtastic device via serial: {self._device_path}")
+ new_interface = meshtastic.serial_interface.SerialInterface()
+ new_device_path = "auto"
+ new_connection_type = 'serial'
+ logger.info(f"Connected to Meshtastic device via serial: {new_device_path}")
+ except Exception as e:
+ self._error = str(e)
+ logger.error(f"Failed to connect to Meshtastic: {e}")
+ self._cleanup_subscriptions()
+ return False
- self._running = True
- self._error = None
+ # Install interface under lock
+ with self._lock:
+ if self._running:
+ # Another thread connected while we were connecting — discard ours
+ if new_interface:
+ try:
+ new_interface.close()
+ except Exception:
+ pass
return True
- except Exception as e:
- self._error = str(e)
- logger.error(f"Failed to connect to Meshtastic: {e}")
- self._cleanup_subscriptions()
- return False
+ self._interface = new_interface
+ self._device_path = new_device_path
+ self._connection_type = new_connection_type
+ self._running = True
+ self._error = None
+ return True
def disconnect(self) -> None:
"""Disconnect from the Meshtastic device."""
+ iface_to_close = None
with self._lock:
- if self._interface:
- try:
- self._interface.close()
- except Exception as e:
- logger.warning(f"Error closing Meshtastic interface: {e}")
- self._interface = None
-
+ iface_to_close = self._interface
+ self._interface = None
self._cleanup_subscriptions()
self._running = False
self._device_path = None
self._connection_type = None
- logger.info("Disconnected from Meshtastic device")
+
+ # Close interface outside lock (blocking I/O)
+ if iface_to_close:
+ try:
+ iface_to_close.close()
+ except Exception as e:
+ logger.warning(f"Error closing Meshtastic interface: {e}")
+
+ logger.info("Disconnected from Meshtastic device")
def _cleanup_subscriptions(self) -> None:
"""Unsubscribe from pubsub topics."""
diff --git a/utils/process_monitor.py b/utils/process_monitor.py
index 4cd5ff4..ecfe786 100644
--- a/utils/process_monitor.py
+++ b/utils/process_monitor.py
@@ -112,6 +112,8 @@ class ProcessMonitor:
def _check_all_processes(self) -> None:
"""Check health of all registered processes."""
+ # Collect crashed processes under lock, handle restarts outside
+ crashed: list[tuple[str, ProcessInfo]] = []
with self._lock:
for name, info in list(self.processes.items()):
if not info.enabled:
@@ -126,10 +128,14 @@ class ProcessMonitor:
logger.warning(
f"Process '{name}' terminated with code {return_code}"
)
- self._handle_crash(name, info)
+ crashed.append((name, info))
+
+ # Handle restarts outside lock (involves sleeps and callbacks)
+ for name, info in crashed:
+ self._handle_crash(name, info)
def _handle_crash(self, name: str, info: ProcessInfo) -> None:
- """Handle a crashed process."""
+ """Handle a crashed process. Must be called WITHOUT holding self._lock."""
if info.restart_callback is None:
logger.info(f"No restart callback for '{name}', skipping auto-restart")
return
@@ -139,7 +145,8 @@ class ProcessMonitor:
f"Process '{name}' exceeded max restarts ({info.max_restarts}), "
"disabling auto-restart"
)
- info.enabled = False
+ with self._lock:
+ info.enabled = False
return
# Calculate backoff with exponential increase
@@ -149,18 +156,20 @@ class ProcessMonitor:
f"(attempt {info.restart_count + 1}/{info.max_restarts})"
)
- # Wait for backoff period
+ # Wait for backoff period outside lock
time.sleep(backoff)
# Attempt restart
try:
info.restart_callback()
- info.restart_count += 1
- info.last_restart = datetime.now()
+ with self._lock:
+ info.restart_count += 1
+ info.last_restart = datetime.now()
logger.info(f"Successfully restarted '{name}'")
except Exception as e:
logger.error(f"Failed to restart '{name}': {e}")
- info.restart_count += 1
+ with self._lock:
+ info.restart_count += 1
def get_status(self) -> Dict[str, Any]:
"""
diff --git a/utils/sstv/sstv_decoder.py b/utils/sstv/sstv_decoder.py
index 322458b..078781c 100644
--- a/utils/sstv/sstv_decoder.py
+++ b/utils/sstv/sstv_decoder.py
@@ -552,15 +552,20 @@ class SSTVDecoder:
# Clean up if the thread exits while we thought we were running.
# This prevents a "ghost running" state where is_running is True
# but the thread has already died (e.g. rtl_fm exited).
+ orphan_proc = None
with self._lock:
was_running = self._running
self._running = False
if was_running and self._rtl_process:
- with contextlib.suppress(Exception):
- self._rtl_process.terminate()
- self._rtl_process.wait(timeout=2)
+ orphan_proc = self._rtl_process
self._rtl_process = None
+ # Terminate outside lock to avoid blocking other operations
+ if orphan_proc:
+ with contextlib.suppress(Exception):
+ orphan_proc.terminate()
+ orphan_proc.wait(timeout=2)
+
if was_running:
logger.warning("Audio decode thread stopped unexpectedly")
err_detail = rtl_fm_error.split('\n')[-1] if rtl_fm_error else ''
@@ -661,38 +666,52 @@ class SSTVDecoder:
def _retune_rtl_fm(self, new_freq_hz: int) -> None:
"""Retune rtl_fm to a new frequency by restarting the process."""
+ old_proc = None
with self._lock:
if not self._running:
return
+ old_proc = self._rtl_process
+ self._rtl_process = None
- if self._rtl_process:
- try:
- self._rtl_process.terminate()
- self._rtl_process.wait(timeout=2)
- except Exception:
- with contextlib.suppress(Exception):
- self._rtl_process.kill()
+ # Terminate old process outside lock
+ if old_proc:
+ try:
+ old_proc.terminate()
+ old_proc.wait(timeout=2)
+ except Exception:
+ with contextlib.suppress(Exception):
+ old_proc.kill()
- rtl_cmd = [
- 'rtl_fm',
- '-d', str(self._device_index),
- '-f', str(new_freq_hz),
- '-M', self._modulation,
- '-s', str(SAMPLE_RATE),
- '-r', str(SAMPLE_RATE),
- '-l', '0',
- '-'
- ]
+ # Build and start new process outside lock
+ rtl_cmd = [
+ 'rtl_fm',
+ '-d', str(self._device_index),
+ '-f', str(new_freq_hz),
+ '-M', self._modulation,
+ '-s', str(SAMPLE_RATE),
+ '-r', str(SAMPLE_RATE),
+ '-l', '0',
+ '-'
+ ]
- logger.debug(f"Restarting rtl_fm: {' '.join(rtl_cmd)}")
+ logger.debug(f"Restarting rtl_fm: {' '.join(rtl_cmd)}")
- self._rtl_process = subprocess.Popen(
- rtl_cmd,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE
- )
+ new_proc = subprocess.Popen(
+ rtl_cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE
+ )
- self._current_tuned_freq_hz = new_freq_hz
+ # Re-acquire lock to install new process
+ with self._lock:
+ if self._running:
+ self._rtl_process = new_proc
+ self._current_tuned_freq_hz = new_freq_hz
+ else:
+ # stop() was called during retune — clean up new process
+ with contextlib.suppress(Exception):
+ new_proc.terminate()
+ new_proc.wait(timeout=2)
@property
def last_doppler_info(self) -> DopplerInfo | None:
@@ -706,19 +725,22 @@ class SSTVDecoder:
def stop(self) -> None:
"""Stop SSTV decoder."""
+ proc_to_terminate = None
with self._lock:
self._running = False
+ proc_to_terminate = self._rtl_process
+ self._rtl_process = None
- if self._rtl_process:
- try:
- self._rtl_process.terminate()
- self._rtl_process.wait(timeout=5)
- except Exception:
- with contextlib.suppress(Exception):
- self._rtl_process.kill()
- self._rtl_process = None
+ # Terminate outside lock to avoid blocking other operations
+ if proc_to_terminate:
+ try:
+ proc_to_terminate.terminate()
+ proc_to_terminate.wait(timeout=5)
+ except Exception:
+ with contextlib.suppress(Exception):
+ proc_to_terminate.kill()
- logger.info("SSTV decoder stopped")
+ logger.info("SSTV decoder stopped")
def get_images(self) -> list[SSTVImage]:
"""Get list of decoded images."""
diff --git a/utils/subghz.py b/utils/subghz.py
index adb82d3..99686d9 100644
--- a/utils/subghz.py
+++ b/utils/subghz.py
@@ -7,19 +7,19 @@ sweeps via hackrf_sweep.
from __future__ import annotations
-import json
-import hashlib
-import os
-import queue
-import shutil
-import subprocess
-import threading
-import time
-import uuid
-from dataclasses import dataclass, field
-from datetime import datetime, timezone
-from pathlib import Path
-from typing import BinaryIO, Callable
+import json
+import hashlib
+import os
+import queue
+import shutil
+import subprocess
+import threading
+import time
+import uuid
+from dataclasses import dataclass, field
+from datetime import datetime, timezone
+from pathlib import Path
+from typing import BinaryIO, Callable
import numpy as np
@@ -42,7 +42,7 @@ logger = get_logger('intercept.subghz')
@dataclass
-class SubGhzCapture:
+class SubGhzCapture:
"""Metadata for a saved IQ capture."""
capture_id: str
filename: str
@@ -51,47 +51,47 @@ class SubGhzCapture:
lna_gain: int
vga_gain: int
timestamp: str
- duration_seconds: float = 0.0
- size_bytes: int = 0
- label: str = ''
- label_source: str = ''
- decoded_protocols: list[str] = field(default_factory=list)
- bursts: list[dict] = field(default_factory=list)
- modulation_hint: str = ''
- modulation_confidence: float = 0.0
- protocol_hint: str = ''
- dominant_fingerprint: str = ''
- fingerprint_group: str = ''
- fingerprint_group_size: int = 0
- trigger_enabled: bool = False
- trigger_pre_seconds: float = 0.0
- trigger_post_seconds: float = 0.0
-
- def to_dict(self) -> dict:
- return {
- 'id': self.capture_id,
- 'filename': self.filename,
+ duration_seconds: float = 0.0
+ size_bytes: int = 0
+ label: str = ''
+ label_source: str = ''
+ decoded_protocols: list[str] = field(default_factory=list)
+ bursts: list[dict] = field(default_factory=list)
+ modulation_hint: str = ''
+ modulation_confidence: float = 0.0
+ protocol_hint: str = ''
+ dominant_fingerprint: str = ''
+ fingerprint_group: str = ''
+ fingerprint_group_size: int = 0
+ trigger_enabled: bool = False
+ trigger_pre_seconds: float = 0.0
+ trigger_post_seconds: float = 0.0
+
+ def to_dict(self) -> dict:
+ return {
+ 'id': self.capture_id,
+ 'filename': self.filename,
'frequency_hz': self.frequency_hz,
'sample_rate': self.sample_rate,
'lna_gain': self.lna_gain,
'vga_gain': self.vga_gain,
'timestamp': self.timestamp,
- 'duration_seconds': self.duration_seconds,
- 'size_bytes': self.size_bytes,
- 'label': self.label,
- 'label_source': self.label_source,
- 'decoded_protocols': self.decoded_protocols,
- 'bursts': self.bursts,
- 'modulation_hint': self.modulation_hint,
- 'modulation_confidence': self.modulation_confidence,
- 'protocol_hint': self.protocol_hint,
- 'dominant_fingerprint': self.dominant_fingerprint,
- 'fingerprint_group': self.fingerprint_group,
- 'fingerprint_group_size': self.fingerprint_group_size,
- 'trigger_enabled': self.trigger_enabled,
- 'trigger_pre_seconds': self.trigger_pre_seconds,
- 'trigger_post_seconds': self.trigger_post_seconds,
- }
+ 'duration_seconds': self.duration_seconds,
+ 'size_bytes': self.size_bytes,
+ 'label': self.label,
+ 'label_source': self.label_source,
+ 'decoded_protocols': self.decoded_protocols,
+ 'bursts': self.bursts,
+ 'modulation_hint': self.modulation_hint,
+ 'modulation_confidence': self.modulation_confidence,
+ 'protocol_hint': self.protocol_hint,
+ 'dominant_fingerprint': self.dominant_fingerprint,
+ 'fingerprint_group': self.fingerprint_group,
+ 'fingerprint_group_size': self.fingerprint_group_size,
+ 'trigger_enabled': self.trigger_enabled,
+ 'trigger_pre_seconds': self.trigger_pre_seconds,
+ 'trigger_post_seconds': self.trigger_post_seconds,
+ }
@dataclass
@@ -126,52 +126,52 @@ class SubGhzManager:
self._lock = threading.RLock()
self._callback: Callable[[dict], None] | None = None
- # RX state
- self._rx_start_time: float = 0
- self._rx_frequency_hz: int = 0
- self._rx_sample_rate: int = 0
- self._rx_lna_gain: int = 0
- self._rx_vga_gain: int = 0
- self._rx_file: Path | None = None
- self._rx_file_handle: BinaryIO | None = None
- self._rx_thread: threading.Thread | None = None
- self._rx_stop = False
- self._rx_bytes_written = 0
- self._rx_bursts: list[dict] = []
- self._rx_trigger_enabled = False
- self._rx_trigger_pre_s = 0.35
- self._rx_trigger_post_s = 0.7
- self._rx_trigger_first_burst_start: float | None = None
- self._rx_trigger_last_burst_end: float | None = None
- self._rx_autostop_pending = False
- self._rx_modulation_hint = ''
- self._rx_modulation_confidence = 0.0
- self._rx_protocol_hint = ''
- self._rx_fingerprint_counts: dict[str, int] = {}
+ # RX state
+ self._rx_start_time: float = 0
+ self._rx_frequency_hz: int = 0
+ self._rx_sample_rate: int = 0
+ self._rx_lna_gain: int = 0
+ self._rx_vga_gain: int = 0
+ self._rx_file: Path | None = None
+ self._rx_file_handle: BinaryIO | None = None
+ self._rx_thread: threading.Thread | None = None
+ self._rx_stop = False
+ self._rx_bytes_written = 0
+ self._rx_bursts: list[dict] = []
+ self._rx_trigger_enabled = False
+ self._rx_trigger_pre_s = 0.35
+ self._rx_trigger_post_s = 0.7
+ self._rx_trigger_first_burst_start: float | None = None
+ self._rx_trigger_last_burst_end: float | None = None
+ self._rx_autostop_pending = False
+ self._rx_modulation_hint = ''
+ self._rx_modulation_confidence = 0.0
+ self._rx_protocol_hint = ''
+ self._rx_fingerprint_counts: dict[str, int] = {}
- # Decode state
- self._decode_start_time: float = 0
- self._decode_frequency_hz: int = 0
- self._decode_sample_rate: int = 0
- self._decode_stop = False
+ # Decode state
+ self._decode_start_time: float = 0
+ self._decode_frequency_hz: int = 0
+ self._decode_sample_rate: int = 0
+ self._decode_stop = False
# TX state
- self._tx_start_time: float = 0
- self._tx_watchdog: threading.Timer | None = None
- self._tx_capture_id: str = ''
- self._tx_temp_file: Path | None = None
+ self._tx_start_time: float = 0
+ self._tx_watchdog: threading.Timer | None = None
+ self._tx_capture_id: str = ''
+ self._tx_temp_file: Path | None = None
# Sweep state
self._sweep_running = False
self._sweep_thread: threading.Thread | None = None
- # Tool availability
- self._hackrf_available: bool | None = None
- self._hackrf_info_available: bool | None = None
- self._hackrf_device_cache: bool | None = None
- self._hackrf_device_cache_ts: float = 0.0
- self._rtl433_available: bool | None = None
- self._sweep_available: bool | None = None
+ # Tool availability
+ self._hackrf_available: bool | None = None
+ self._hackrf_info_available: bool | None = None
+ self._hackrf_device_cache: bool | None = None
+ self._hackrf_device_cache_ts: float = 0.0
+ self._rtl433_available: bool | None = None
+ self._sweep_available: bool | None = None
@property
def data_dir(self) -> Path:
@@ -191,42 +191,42 @@ class SubGhzManager:
# Tool detection
# ------------------------------------------------------------------
- def check_hackrf(self) -> bool:
- if self._hackrf_available is None:
- self._hackrf_available = shutil.which('hackrf_transfer') is not None
- return self._hackrf_available
-
- def check_hackrf_info(self) -> bool:
- if self._hackrf_info_available is None:
- self._hackrf_info_available = shutil.which('hackrf_info') is not None
- return self._hackrf_info_available
-
- def check_hackrf_device(self) -> bool | None:
- """Return True if a HackRF device is detected, False if not, or None if detection unavailable."""
- if not self.check_hackrf_info():
- return None
-
- now = time.time()
- if self._hackrf_device_cache is not None and (now - self._hackrf_device_cache_ts) < 2.0:
- return self._hackrf_device_cache
-
- try:
- from utils.sdr.detection import detect_hackrf_devices
- connected = len(detect_hackrf_devices()) > 0
- except Exception as exc:
- logger.debug(f"HackRF device detection failed: {exc}")
- connected = False
-
- self._hackrf_device_cache = connected
- self._hackrf_device_cache_ts = now
- return connected
-
- def _require_hackrf_device(self) -> str | None:
- """Return an error string if HackRF is explicitly not detected."""
- detected = self.check_hackrf_device()
- if detected is False:
- return 'HackRF device not detected'
- return None
+ def check_hackrf(self) -> bool:
+ if self._hackrf_available is None:
+ self._hackrf_available = shutil.which('hackrf_transfer') is not None
+ return self._hackrf_available
+
+ def check_hackrf_info(self) -> bool:
+ if self._hackrf_info_available is None:
+ self._hackrf_info_available = shutil.which('hackrf_info') is not None
+ return self._hackrf_info_available
+
+ def check_hackrf_device(self) -> bool | None:
+ """Return True if a HackRF device is detected, False if not, or None if detection unavailable."""
+ if not self.check_hackrf_info():
+ return None
+
+ now = time.time()
+ if self._hackrf_device_cache is not None and (now - self._hackrf_device_cache_ts) < 2.0:
+ return self._hackrf_device_cache
+
+ try:
+ from utils.sdr.detection import detect_hackrf_devices
+ connected = len(detect_hackrf_devices()) > 0
+ except Exception as exc:
+ logger.debug(f"HackRF device detection failed: {exc}")
+ connected = False
+
+ self._hackrf_device_cache = connected
+ self._hackrf_device_cache_ts = now
+ return connected
+
+ def _require_hackrf_device(self) -> str | None:
+ """Return an error string if HackRF is explicitly not detected."""
+ detected = self.check_hackrf_device()
+ if detected is False:
+ return 'HackRF device not detected'
+ return None
def check_rtl433(self) -> bool:
if self._rtl433_available is None:
@@ -256,45 +256,45 @@ class SubGhzManager:
return 'sweep'
return 'idle'
- def get_status(self) -> dict:
- mode = self.active_mode
- hackrf_info_available = self.check_hackrf_info()
- detect_paused = mode in {'rx', 'decode', 'tx', 'sweep'}
- if detect_paused:
- # Avoid probing HackRF while a stream is active. A fresh "disconnected"
- # cache result should still surface to the UI, otherwise mark unknown.
- if self._hackrf_device_cache is False and (time.time() - self._hackrf_device_cache_ts) < 15.0:
- hackrf_connected: bool | None = False
- else:
- hackrf_connected = None
- else:
- hackrf_connected = self.check_hackrf_device()
- status: dict = {
- 'mode': mode,
- 'hackrf_available': self.check_hackrf(),
- 'hackrf_info_available': hackrf_info_available,
- 'hackrf_connected': hackrf_connected,
- 'hackrf_detection_paused': detect_paused,
- 'rtl433_available': self.check_rtl433(),
- 'sweep_available': self.check_sweep(),
- }
- if mode == 'rx':
- elapsed = time.time() - self._rx_start_time if self._rx_start_time else 0
- status.update({
- 'frequency_hz': self._rx_frequency_hz,
- 'sample_rate': self._rx_sample_rate,
- 'elapsed_seconds': round(elapsed, 1),
- 'trigger_enabled': self._rx_trigger_enabled,
- 'trigger_pre_seconds': round(self._rx_trigger_pre_s, 3),
- 'trigger_post_seconds': round(self._rx_trigger_post_s, 3),
- })
- elif mode == 'decode':
- elapsed = time.time() - self._decode_start_time if self._decode_start_time else 0
- status.update({
- 'frequency_hz': self._decode_frequency_hz,
- 'sample_rate': self._decode_sample_rate,
- 'elapsed_seconds': round(elapsed, 1),
- })
+ def get_status(self) -> dict:
+ mode = self.active_mode
+ hackrf_info_available = self.check_hackrf_info()
+ detect_paused = mode in {'rx', 'decode', 'tx', 'sweep'}
+ if detect_paused:
+ # Avoid probing HackRF while a stream is active. A fresh "disconnected"
+ # cache result should still surface to the UI, otherwise mark unknown.
+ if self._hackrf_device_cache is False and (time.time() - self._hackrf_device_cache_ts) < 15.0:
+ hackrf_connected: bool | None = False
+ else:
+ hackrf_connected = None
+ else:
+ hackrf_connected = self.check_hackrf_device()
+ status: dict = {
+ 'mode': mode,
+ 'hackrf_available': self.check_hackrf(),
+ 'hackrf_info_available': hackrf_info_available,
+ 'hackrf_connected': hackrf_connected,
+ 'hackrf_detection_paused': detect_paused,
+ 'rtl433_available': self.check_rtl433(),
+ 'sweep_available': self.check_sweep(),
+ }
+ if mode == 'rx':
+ elapsed = time.time() - self._rx_start_time if self._rx_start_time else 0
+ status.update({
+ 'frequency_hz': self._rx_frequency_hz,
+ 'sample_rate': self._rx_sample_rate,
+ 'elapsed_seconds': round(elapsed, 1),
+ 'trigger_enabled': self._rx_trigger_enabled,
+ 'trigger_pre_seconds': round(self._rx_trigger_pre_s, 3),
+ 'trigger_post_seconds': round(self._rx_trigger_post_s, 3),
+ })
+ elif mode == 'decode':
+ elapsed = time.time() - self._decode_start_time if self._decode_start_time else 0
+ status.update({
+ 'frequency_hz': self._decode_frequency_hz,
+ 'sample_rate': self._decode_sample_rate,
+ 'elapsed_seconds': round(elapsed, 1),
+ })
elif mode == 'tx':
elapsed = time.time() - self._tx_start_time if self._tx_start_time else 0
status.update({
@@ -304,30 +304,31 @@ class SubGhzManager:
return status
# ------------------------------------------------------------------
- # RECEIVE (IQ capture via hackrf_transfer -r)
- # ------------------------------------------------------------------
-
- def start_receive(
- self,
- frequency_hz: int,
- sample_rate: int = 2000000,
- lna_gain: int = 32,
- vga_gain: int = 20,
- trigger_enabled: bool = False,
- trigger_pre_ms: int = 350,
- trigger_post_ms: int = 700,
- device_serial: str | None = None,
- ) -> dict:
+ # RECEIVE (IQ capture via hackrf_transfer -r)
+ # ------------------------------------------------------------------
+
+ def start_receive(
+ self,
+ frequency_hz: int,
+ sample_rate: int = 2000000,
+ lna_gain: int = 32,
+ vga_gain: int = 20,
+ trigger_enabled: bool = False,
+ trigger_pre_ms: int = 350,
+ trigger_post_ms: int = 700,
+ device_serial: str | None = None,
+ ) -> dict:
+ # Pre-lock: tool availability & device detection (blocking I/O)
+ if not self.check_hackrf():
+ return {'status': 'error', 'message': 'hackrf_transfer not found'}
+ device_err = self._require_hackrf_device()
+ if device_err:
+ return {'status': 'error', 'message': device_err}
+
with self._lock:
if self.active_mode != 'idle':
return {'status': 'error', 'message': f'Already running: {self.active_mode}'}
- if not self.check_hackrf():
- return {'status': 'error', 'message': 'hackrf_transfer not found'}
- device_err = self._require_hackrf_device()
- if device_err:
- return {'status': 'error', 'message': device_err}
-
# Validate gains
lna_gain = max(SUBGHZ_LNA_GAIN_MIN, min(SUBGHZ_LNA_GAIN_MAX, lna_gain))
vga_gain = max(SUBGHZ_VGA_GAIN_MIN, min(SUBGHZ_VGA_GAIN_MAX, vga_gain))
@@ -335,1063 +336,1068 @@ class SubGhzManager:
# Generate filename
ts = datetime.now().strftime('%Y%m%d_%H%M%S')
freq_mhz = frequency_hz / 1_000_000
- basename = f"{freq_mhz:.3f}MHz_{ts}"
- iq_file = self._captures_dir / f"{basename}.iq"
-
- cmd = [
- 'hackrf_transfer',
- '-r', str(iq_file),
- '-f', str(frequency_hz),
- '-s', str(sample_rate),
- '-l', str(lna_gain),
- '-g', str(vga_gain),
- ]
- if device_serial:
- cmd.extend(['-d', device_serial])
-
- logger.info(f"SubGHz RX: {' '.join(cmd)}")
-
- try:
- try:
- iq_file.touch(exist_ok=True)
- except OSError as e:
- logger.error(f"Failed to create RX file: {e}")
- return {'status': 'error', 'message': 'Failed to create capture file'}
-
- self._rx_process = subprocess.Popen(
- cmd,
- stdout=subprocess.DEVNULL,
- stderr=subprocess.PIPE,
- )
- register_process(self._rx_process)
-
- try:
- self._rx_file_handle = open(iq_file, 'rb', buffering=0)
- except OSError as e:
- safe_terminate(self._rx_process)
- unregister_process(self._rx_process)
- self._rx_process = None
- logger.error(f"Failed to open RX file: {e}")
- return {'status': 'error', 'message': 'Failed to open capture file'}
-
- self._rx_start_time = time.time()
- self._rx_frequency_hz = frequency_hz
- self._rx_sample_rate = sample_rate
- self._rx_lna_gain = lna_gain
- self._rx_vga_gain = vga_gain
- self._rx_file = iq_file
- self._rx_stop = False
- self._rx_bytes_written = 0
- self._rx_bursts = []
- self._rx_trigger_enabled = bool(trigger_enabled)
- self._rx_trigger_pre_s = max(0.05, min(5.0, float(trigger_pre_ms) / 1000.0))
- self._rx_trigger_post_s = max(0.10, min(10.0, float(trigger_post_ms) / 1000.0))
- self._rx_trigger_first_burst_start = None
- self._rx_trigger_last_burst_end = None
- self._rx_autostop_pending = False
- self._rx_modulation_hint = ''
- self._rx_modulation_confidence = 0.0
- self._rx_protocol_hint = ''
- self._rx_fingerprint_counts = {}
-
- # Start capture stream reader
- self._rx_thread = threading.Thread(
- target=self._rx_capture_loop,
- daemon=True,
- )
- self._rx_thread.start()
-
- # Monitor stderr in background
- threading.Thread(
- target=self._monitor_rx_stderr,
- daemon=True,
+ basename = f"{freq_mhz:.3f}MHz_{ts}"
+ iq_file = self._captures_dir / f"{basename}.iq"
+
+ cmd = [
+ 'hackrf_transfer',
+ '-r', str(iq_file),
+ '-f', str(frequency_hz),
+ '-s', str(sample_rate),
+ '-l', str(lna_gain),
+ '-g', str(vga_gain),
+ ]
+ if device_serial:
+ cmd.extend(['-d', device_serial])
+
+ logger.info(f"SubGHz RX: {' '.join(cmd)}")
+
+ try:
+ try:
+ iq_file.touch(exist_ok=True)
+ except OSError as e:
+ logger.error(f"Failed to create RX file: {e}")
+ return {'status': 'error', 'message': 'Failed to create capture file'}
+
+ self._rx_process = subprocess.Popen(
+ cmd,
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.PIPE,
+ )
+ register_process(self._rx_process)
+
+ try:
+ self._rx_file_handle = open(iq_file, 'rb', buffering=0)
+ except OSError as e:
+ safe_terminate(self._rx_process)
+ unregister_process(self._rx_process)
+ self._rx_process = None
+ logger.error(f"Failed to open RX file: {e}")
+ return {'status': 'error', 'message': 'Failed to open capture file'}
+
+ self._rx_start_time = time.time()
+ self._rx_frequency_hz = frequency_hz
+ self._rx_sample_rate = sample_rate
+ self._rx_lna_gain = lna_gain
+ self._rx_vga_gain = vga_gain
+ self._rx_file = iq_file
+ self._rx_stop = False
+ self._rx_bytes_written = 0
+ self._rx_bursts = []
+ self._rx_trigger_enabled = bool(trigger_enabled)
+ self._rx_trigger_pre_s = max(0.05, min(5.0, float(trigger_pre_ms) / 1000.0))
+ self._rx_trigger_post_s = max(0.10, min(10.0, float(trigger_post_ms) / 1000.0))
+ self._rx_trigger_first_burst_start = None
+ self._rx_trigger_last_burst_end = None
+ self._rx_autostop_pending = False
+ self._rx_modulation_hint = ''
+ self._rx_modulation_confidence = 0.0
+ self._rx_protocol_hint = ''
+ self._rx_fingerprint_counts = {}
+
+ # Start capture stream reader
+ self._rx_thread = threading.Thread(
+ target=self._rx_capture_loop,
+ daemon=True,
+ )
+ self._rx_thread.start()
+
+ # Monitor stderr in background
+ threading.Thread(
+ target=self._monitor_rx_stderr,
+ daemon=True,
).start()
self._emit({
'type': 'status',
'mode': 'rx',
- 'status': 'started',
- 'frequency_hz': frequency_hz,
- 'sample_rate': sample_rate,
- 'trigger_enabled': self._rx_trigger_enabled,
- 'trigger_pre_seconds': round(self._rx_trigger_pre_s, 3),
- 'trigger_post_seconds': round(self._rx_trigger_post_s, 3),
- })
-
- if self._rx_trigger_enabled:
- self._emit({
- 'type': 'info',
- 'text': (
- f'[rx] Smart trigger armed '
- f'(pre {self._rx_trigger_pre_s:.2f}s, post {self._rx_trigger_post_s:.2f}s)'
- ),
- })
-
- return {
- 'status': 'started',
- 'frequency_hz': frequency_hz,
- 'sample_rate': sample_rate,
- 'file': iq_file.name,
- 'trigger_enabled': self._rx_trigger_enabled,
- 'trigger_pre_seconds': round(self._rx_trigger_pre_s, 3),
- 'trigger_post_seconds': round(self._rx_trigger_post_s, 3),
- }
+ 'status': 'started',
+ 'frequency_hz': frequency_hz,
+ 'sample_rate': sample_rate,
+ 'trigger_enabled': self._rx_trigger_enabled,
+ 'trigger_pre_seconds': round(self._rx_trigger_pre_s, 3),
+ 'trigger_post_seconds': round(self._rx_trigger_post_s, 3),
+ })
- except FileNotFoundError:
- return {'status': 'error', 'message': 'hackrf_transfer not found'}
- except Exception as e:
- logger.error(f"Failed to start RX: {e}")
- return {'status': 'error', 'message': str(e)}
-
- def _estimate_modulation_hint(
- self,
- data: bytes,
- ) -> tuple[str, float, str]:
- """Estimate coarse modulation family from raw IQ characteristics."""
- if not data:
- return 'Unknown', 0.0, 'No samples'
- try:
- raw = np.frombuffer(data, dtype=np.int8).astype(np.float32)
- if raw.size < 2048:
- return 'Unknown', 0.0, 'Insufficient samples'
-
- i_vals = raw[0::2]
- q_vals = raw[1::2]
- if i_vals.size == 0 or q_vals.size == 0:
- return 'Unknown', 0.0, 'Invalid IQ frame'
-
- # Light decimation for lower CPU while preserving burst shape.
- i_vals = i_vals[::4]
- q_vals = q_vals[::4]
- if i_vals.size < 256 or q_vals.size < 256:
- return 'Unknown', 0.0, 'Short frame'
-
- iq = i_vals + 1j * q_vals
- amp = np.abs(iq)
- mean_amp = float(np.mean(amp))
- std_amp = float(np.std(amp))
- amp_cv = std_amp / max(mean_amp, 1.0)
-
- phase_step = np.angle(iq[1:] * np.conj(iq[:-1]))
- phase_var = float(np.std(phase_step))
-
- # Simple pulse run-length profile on envelope.
- envelope = amp - float(np.median(amp))
- env_scale = float(np.percentile(np.abs(envelope), 92))
- if env_scale <= 1e-6:
- pulse_density = 0.0
- mean_run = 0.0
- else:
- norm = np.clip(envelope / env_scale, -1.0, 1.0)
- high = norm > 0.25
- pulse_density = float(np.mean(high))
- changes = np.where(np.diff(high.astype(np.int8)) != 0)[0]
- if changes.size >= 2:
- runs = np.diff(np.concatenate(([0], changes, [high.size - 1])))
- mean_run = float(np.mean(runs))
- else:
- mean_run = float(high.size)
-
- scores = {
- 'OOK/ASK': 0.0,
- 'FSK/GFSK': 0.0,
- 'PWM/PPM': 0.0,
- }
-
- # OOK: stronger amplitude contrast and moderate pulse occupancy.
- scores['OOK/ASK'] += max(0.0, min(1.0, (amp_cv - 0.22) / 0.35))
- scores['OOK/ASK'] += max(0.0, 1.0 - abs(pulse_density - 0.4) / 0.4) * 0.35
-
- # FSK: flatter amplitude, more phase movement.
- scores['FSK/GFSK'] += max(0.0, min(1.0, (phase_var - 0.45) / 0.9))
- scores['FSK/GFSK'] += max(0.0, min(1.0, (0.33 - amp_cv) / 0.28)) * 0.45
-
- # PWM/PPM: high edge density with short run lengths.
- edge_density = 0.0 if mean_run <= 0 else min(1.0, 28.0 / max(mean_run, 1.0))
- scores['PWM/PPM'] += max(0.0, min(1.0, (amp_cv - 0.28) / 0.45))
- scores['PWM/PPM'] += edge_density * 0.6
-
- best_family = max(scores, key=scores.get)
- best_score = float(scores[best_family])
- confidence = max(0.0, min(0.97, best_score))
- if confidence < 0.25:
- return 'Unknown', confidence, 'No clear modulation signature'
-
- reason = (
- f'amp_cv={amp_cv:.2f} phase_var={phase_var:.2f} '
- f'pulse_density={pulse_density:.2f}'
- )
- return best_family, confidence, reason
- except Exception:
- return 'Unknown', 0.0, 'Modulation analysis failed'
-
- def _fingerprint_burst_bytes(
- self,
- data: bytes,
- sample_rate: int,
- duration_seconds: float,
- ) -> str:
- """Create a stable burst fingerprint for grouping similar signals."""
- if not data:
- return ''
- try:
- raw = np.frombuffer(data, dtype=np.int8).astype(np.float32)
- if raw.size < 512:
- return ''
-
- i_vals = raw[0::2]
- q_vals = raw[1::2]
- if i_vals.size == 0 or q_vals.size == 0:
- return ''
-
- amp = np.sqrt(i_vals * i_vals + q_vals * q_vals)
- if amp.size < 64:
- return ''
-
- # Normalize and downsample envelope into a fixed-size shape vector.
- amp = amp - float(np.median(amp))
- scale = float(np.percentile(np.abs(amp), 95))
- if scale <= 1e-6:
- scale = 1.0
- amp = np.clip(amp / scale, -1.0, 1.0)
- target = 128
- if amp.size != target:
- idx = np.linspace(0, amp.size - 1, target).astype(int)
- amp = amp[idx]
- quant = np.round((amp + 1.0) * 7.5).astype(np.uint8)
-
- # Include coarse timing and center-energy traits.
- burst_ms = int(max(1, round(duration_seconds * 1000)))
- sr_khz = int(max(1, round(sample_rate / 1000)))
- payload = (
- quant.tobytes()
- + burst_ms.to_bytes(2, 'little', signed=False)
- + sr_khz.to_bytes(2, 'little', signed=False)
- )
- return hashlib.sha1(payload).hexdigest()[:16]
- except Exception:
- return ''
-
- def _protocol_hint_from_capture(
- self,
- frequency_hz: int,
- modulation_hint: str,
- burst_count: int,
- ) -> str:
- freq = frequency_hz / 1_000_000
- mod = (modulation_hint or '').upper()
- if burst_count <= 0:
- return 'No burst activity'
- if 433.70 <= freq <= 434.10 and 'OOK' in mod and burst_count >= 2:
- return 'Likely weather sensor / simple remote telemetry'
- if 868.0 <= freq <= 870.0 and 'OOK' in mod:
- return 'Likely EU ISM OOK sensor/remote'
- if 902.0 <= freq <= 928.0 and 'FSK' in mod:
- return 'Likely ISM telemetry (FSK/GFSK)'
- if 'PWM' in mod:
- return 'Likely pulse-width/distance keyed remote'
- if 'FSK' in mod:
- return 'Likely continuous-tone telemetry'
- if 'OOK' in mod:
- return 'Likely OOK keyed burst transmitter'
- return 'Unknown protocol family'
-
- def _auto_capture_label(
- self,
- frequency_hz: int,
- burst_count: int,
- modulation_hint: str,
- protocol_hint: str,
- ) -> str:
- freq = frequency_hz / 1_000_000
- mod = (modulation_hint or '').upper()
- if burst_count <= 0:
- return f'Raw Capture {freq:.3f} MHz'
- if 'weather' in protocol_hint.lower():
- return f'Weather-like Burst ({burst_count})'
- if 'OOK' in mod:
- return f'OOK Burst Cluster ({burst_count})'
- if 'FSK' in mod:
- return f'FSK Telemetry Burst ({burst_count})'
- if 'PWM' in mod:
- return f'PWM/PPM Burst ({burst_count})'
- return f'RF Burst Capture ({burst_count})'
-
- def _trim_capture_to_trigger_window(
- self,
- iq_file: Path,
- sample_rate: int,
- duration_seconds: float,
- bursts: list[dict],
- ) -> tuple[float, list[dict]]:
- """Trim a full capture to trigger window using configured pre/post roll."""
- if not self._rx_trigger_enabled or not bursts or sample_rate <= 0:
- return duration_seconds, bursts
-
- first_start = min(float(b.get('start_seconds', 0.0)) for b in bursts)
- last_end = max(
- float(b.get('start_seconds', 0.0)) + float(b.get('duration_seconds', 0.0))
- for b in bursts
- )
- start_s = max(0.0, first_start - self._rx_trigger_pre_s)
- end_s = min(duration_seconds, last_end + self._rx_trigger_post_s)
- if end_s <= start_s:
- return duration_seconds, bursts
- if start_s <= 0.001 and (duration_seconds - end_s) <= 0.001:
- return duration_seconds, bursts
-
- bytes_per_second = max(2, int(sample_rate) * 2)
- start_byte = int(start_s * bytes_per_second) & ~1
- end_byte = int(end_s * bytes_per_second) & ~1
- if end_byte <= start_byte:
- return duration_seconds, bursts
-
- tmp_path = iq_file.with_suffix('.trimtmp')
- try:
- with open(iq_file, 'rb') as src, open(tmp_path, 'wb') as dst:
- src.seek(start_byte)
- remaining = end_byte - start_byte
- while remaining > 0:
- chunk = src.read(min(262144, remaining))
- if not chunk:
- break
- dst.write(chunk)
- remaining -= len(chunk)
- os.replace(tmp_path, iq_file)
- except OSError as exc:
- logger.error(f"Failed trimming trigger capture: {exc}")
- try:
- if tmp_path.exists():
- tmp_path.unlink()
- except OSError:
- pass
- return duration_seconds, bursts
-
- trimmed_duration = max(0.0, float(end_byte - start_byte) / float(bytes_per_second))
- adjusted_bursts: list[dict] = []
- for burst in bursts:
- raw_start = float(burst.get('start_seconds', 0.0))
- raw_dur = max(0.0, float(burst.get('duration_seconds', 0.0)))
- raw_end = raw_start + raw_dur
- if raw_end < start_s or raw_start > end_s:
- continue
- adjusted = dict(burst)
- adjusted['start_seconds'] = round(max(0.0, raw_start - start_s), 3)
- adjusted['duration_seconds'] = round(raw_dur, 3)
- adjusted_bursts.append(adjusted)
- return trimmed_duration, adjusted_bursts if adjusted_bursts else bursts
-
- def _rx_capture_loop(self) -> None:
- """Read IQ data from the capture file and emit UI metrics."""
- process = self._rx_process
- file_handle = self._rx_file_handle
-
- if not process or not file_handle:
- logger.error("RX capture loop missing process/file handle")
- return
-
- CHUNK = 262144 # 256 KB (~64 ms @ 2 Msps complex int8 IQ)
- LEVEL_INTERVAL = 0.05
- WAVE_INTERVAL = 0.25
- SPECTRUM_INTERVAL = 0.25
- STATS_INTERVAL = 1.0
- HINT_EVAL_INTERVAL = 0.25
- HINT_EMIT_INTERVAL = 1.5
-
- last_level = 0.0
- last_wave = 0.0
- last_spectrum = 0.0
- last_stats = time.time()
- last_log = time.time()
- last_hint_eval = 0.0
- last_hint_emit = 0.0
- bytes_since_stats = 0
- first_chunk = True
- burst_active = False
- burst_start = 0.0
- burst_last_high = 0.0
- burst_peak = 0
- burst_bytes = bytearray()
- burst_hint_family = 'Unknown'
- burst_hint_conf = 0.0
- BURST_OFF_HOLD = 0.18
- BURST_MIN_DURATION = 0.04
- MAX_BURST_BYTES = max(262144, int(max(1, self._rx_sample_rate) * 2 * 2))
- smooth_level = 0.0
- prev_smooth_level = 0.0
- noise_floor = 0.0
- peak_tracker = 0.0
- on_threshold = 0.0
- warmup_until = time.time() + 1.0
- modulation_scores: dict[str, float] = {
- 'OOK/ASK': 0.0,
- 'FSK/GFSK': 0.0,
- 'PWM/PPM': 0.0,
- }
- last_hint_reason = ''
-
- try:
- fd = file_handle.fileno()
- if not isinstance(fd, int) or fd < 0:
- logger.error("Invalid file descriptor from RX file handle")
- return
- except (OSError, ValueError, TypeError):
- logger.error("Failed to obtain RX file descriptor")
- return
-
- try:
- while not self._rx_stop:
- try:
- data = os.read(fd, CHUNK)
- except OSError:
- break
- if not data:
- if process.poll() is not None:
- break
- time.sleep(0.05)
- continue
-
- self._rx_bytes_written += len(data)
- bytes_since_stats += len(data)
- if burst_active and len(burst_bytes) < MAX_BURST_BYTES:
- room = MAX_BURST_BYTES - len(burst_bytes)
- burst_bytes.extend(data[:room])
-
- if first_chunk:
- first_chunk = False
- self._emit({'type': 'info', 'text': '[rx] Receiving IQ data...'})
-
- now = time.time()
- if now - last_hint_eval >= HINT_EVAL_INTERVAL:
- for key in modulation_scores:
- modulation_scores[key] *= 0.97
- hint_family, hint_conf, hint_reason = self._estimate_modulation_hint(data)
- if hint_family in modulation_scores:
- modulation_scores[hint_family] += max(0.05, hint_conf)
- last_hint_reason = hint_reason
- last_hint_eval = now
-
- if now - last_level >= LEVEL_INTERVAL:
- level = float(self._compute_rx_level(data))
- prev_smooth_level = smooth_level
- if smooth_level <= 0:
- smooth_level = level
- else:
- smooth_level = (smooth_level * 0.72) + (level * 0.28)
-
- if noise_floor <= 0:
- noise_floor = smooth_level
- elif not burst_active:
- # Track receiver noise floor when we are not inside a burst.
- noise_floor = (noise_floor * 0.94) + (smooth_level * 0.06)
-
- peak_tracker = max(smooth_level, peak_tracker * 0.985)
- spread = max(2.0, peak_tracker - noise_floor)
- on_delta = max(2.8, spread * 0.52)
- off_delta = max(1.2, spread * 0.24)
- on_threshold = min(95.0, noise_floor + on_delta)
- off_threshold = max(0.8, min(on_threshold - 0.5, noise_floor + off_delta))
- rising = smooth_level - prev_smooth_level
-
- self._emit({'type': 'rx_level', 'level': int(round(smooth_level))})
-
- if not burst_active:
- if now >= warmup_until and smooth_level >= on_threshold and rising >= 0.35:
- burst_active = True
- burst_start = now
- burst_last_high = now
- burst_peak = int(round(smooth_level))
- burst_bytes = bytearray(data[: min(len(data), MAX_BURST_BYTES)])
- burst_hint_family = 'Unknown'
- burst_hint_conf = 0.0
- if self._rx_trigger_enabled and self._rx_trigger_first_burst_start is None:
- self._rx_trigger_first_burst_start = max(
- 0.0, now - self._rx_start_time
- )
- self._emit({
- 'type': 'info',
- 'text': '[rx] Trigger fired - capturing burst window',
- })
- self._emit({
- 'type': 'rx_burst',
- 'mode': 'rx',
- 'event': 'start',
- 'start_offset_s': round(
- max(0.0, now - self._rx_start_time), 3
- ),
- 'level': int(round(smooth_level)),
- })
- else:
- if smooth_level >= off_threshold:
- burst_last_high = now
- burst_peak = max(burst_peak, int(round(smooth_level)))
- elif (now - burst_last_high) >= BURST_OFF_HOLD:
- duration = now - burst_start
- if duration >= BURST_MIN_DURATION:
- fp = self._fingerprint_burst_bytes(
- bytes(burst_bytes),
- self._rx_sample_rate,
- duration,
- )
- if fp:
- self._rx_fingerprint_counts[fp] = (
- self._rx_fingerprint_counts.get(fp, 0) + 1
- )
- burst_hint_family, burst_hint_conf, burst_reason = self._estimate_modulation_hint(
- bytes(burst_bytes)
- )
- if burst_hint_family in modulation_scores and burst_hint_conf > 0:
- modulation_scores[burst_hint_family] += burst_hint_conf * 1.8
- last_hint_reason = burst_reason
- burst_data = {
- 'start_seconds': round(
- max(0.0, burst_start - self._rx_start_time), 3
- ),
- 'duration_seconds': round(duration, 3),
- 'peak_level': int(burst_peak),
- 'fingerprint': fp,
- 'modulation_hint': burst_hint_family,
- 'modulation_confidence': round(float(burst_hint_conf), 3),
- }
- if len(self._rx_bursts) < 512:
- self._rx_bursts.append(burst_data)
- self._rx_trigger_last_burst_end = max(
- 0.0, now - self._rx_start_time
- )
- self._emit({
- 'type': 'rx_burst',
- 'mode': 'rx',
- 'event': 'end',
- 'start_offset_s': burst_data['start_seconds'],
- 'duration_ms': int(duration * 1000),
- 'peak_level': int(burst_peak),
- 'fingerprint': fp,
- 'modulation_hint': burst_hint_family,
- 'modulation_confidence': round(float(burst_hint_conf), 3),
- })
- burst_active = False
- burst_peak = 0
- burst_bytes = bytearray()
- last_level = now
-
- # Emit live modulation/protocol hint periodically.
- if now - last_hint_emit >= HINT_EMIT_INTERVAL:
- best_family = max(modulation_scores, key=modulation_scores.get)
- total_score = sum(max(0.0, v) for v in modulation_scores.values())
- best_score = max(0.0, modulation_scores.get(best_family, 0.0))
- hint_conf = 0.0 if total_score <= 0 else min(0.98, best_score / total_score)
- protocol_hint = self._protocol_hint_from_capture(
- self._rx_frequency_hz,
- best_family if hint_conf >= 0.3 else 'Unknown',
- len(self._rx_bursts),
- )
- self._rx_protocol_hint = protocol_hint
- if hint_conf >= 0.30:
- self._rx_modulation_hint = best_family
- self._rx_modulation_confidence = hint_conf
- self._emit({
- 'type': 'rx_hint',
- 'modulation_hint': best_family,
- 'confidence': round(hint_conf, 3),
- 'protocol_hint': protocol_hint,
- 'reason': last_hint_reason,
- })
- last_hint_emit = now
-
- # Smart-trigger auto-stop after quiet post-roll window.
- if (
- self._rx_trigger_enabled
- and self._rx_trigger_first_burst_start is not None
- and not burst_active
- and not self._rx_autostop_pending
- ):
- last_end = self._rx_trigger_last_burst_end
- if last_end is not None and (max(0.0, now - self._rx_start_time) - last_end) >= self._rx_trigger_post_s:
- self._rx_autostop_pending = True
- self._emit({
- 'type': 'info',
- 'text': '[rx] Trigger window complete - finalizing capture',
- })
- threading.Thread(target=self.stop_receive, daemon=True).start()
- break
-
- if now - last_wave >= WAVE_INTERVAL:
- samples = self._extract_waveform(data)
- if samples:
- self._emit({'type': 'rx_waveform', 'samples': samples})
- last_wave = now
-
- if now - last_spectrum >= SPECTRUM_INTERVAL:
- bins = self._compute_rx_spectrum(data)
- if bins:
- self._emit({'type': 'rx_spectrum', 'bins': bins})
- last_spectrum = now
-
- if now - last_stats >= STATS_INTERVAL:
- rate_kb = bytes_since_stats / (now - last_stats) / 1024
- file_size = 0
- if self._rx_file and self._rx_file.exists():
- try:
- file_size = self._rx_file.stat().st_size
- except OSError:
- file_size = 0
- self._emit({
- 'type': 'rx_stats',
- 'rate_kb': round(rate_kb, 1),
- 'file_size': file_size,
- 'elapsed_seconds': round(time.time() - self._rx_start_time, 1) if self._rx_start_time else 0,
- })
- if now - last_log >= 5.0:
- self._emit({
- 'type': 'info',
- 'text': (
- f'[rx] IQ: {rate_kb:.0f} KB/s '
- f'(lvl {smooth_level:.1f}, floor {noise_floor:.1f}, thr {on_threshold:.1f})'
- ),
- })
- last_log = now
- bytes_since_stats = 0
- last_stats = now
-
- if burst_active:
- duration = max(0.0, time.time() - burst_start)
- if duration >= BURST_MIN_DURATION:
- fp = self._fingerprint_burst_bytes(
- bytes(burst_bytes),
- self._rx_sample_rate,
- duration,
- )
- if fp:
- self._rx_fingerprint_counts[fp] = (
- self._rx_fingerprint_counts.get(fp, 0) + 1
- )
- burst_hint_family, burst_hint_conf, burst_reason = self._estimate_modulation_hint(
- bytes(burst_bytes)
- )
- if burst_hint_family in modulation_scores and burst_hint_conf > 0:
- modulation_scores[burst_hint_family] += burst_hint_conf * 1.8
- last_hint_reason = burst_reason
- burst_data = {
- 'start_seconds': round(
- max(0.0, burst_start - self._rx_start_time), 3
- ),
- 'duration_seconds': round(duration, 3),
- 'peak_level': int(burst_peak),
- 'fingerprint': fp,
- 'modulation_hint': burst_hint_family,
- 'modulation_confidence': round(float(burst_hint_conf), 3),
- }
- if len(self._rx_bursts) < 512:
- self._rx_bursts.append(burst_data)
- self._rx_trigger_last_burst_end = max(
- 0.0, time.time() - self._rx_start_time
- )
- self._emit({
- 'type': 'rx_burst',
- 'mode': 'rx',
- 'event': 'end',
- 'start_offset_s': burst_data['start_seconds'],
- 'duration_ms': int(duration * 1000),
- 'peak_level': int(burst_peak),
- 'fingerprint': fp,
- 'modulation_hint': burst_hint_family,
- 'modulation_confidence': round(float(burst_hint_conf), 3),
- })
-
- # Finalize modulation summary for capture metadata.
- if modulation_scores:
- best_family = max(modulation_scores, key=modulation_scores.get)
- total_score = sum(max(0.0, v) for v in modulation_scores.values())
- best_score = max(0.0, modulation_scores.get(best_family, 0.0))
- hint_conf = 0.0 if total_score <= 0 else min(0.98, best_score / total_score)
- if hint_conf >= 0.3:
- self._rx_modulation_hint = best_family
- self._rx_modulation_confidence = hint_conf
- self._rx_protocol_hint = self._protocol_hint_from_capture(
- self._rx_frequency_hz,
- self._rx_modulation_hint,
- len(self._rx_bursts),
- )
- finally:
- try:
- file_handle.close()
- except OSError:
- pass
- with self._lock:
- if self._rx_file_handle is file_handle:
- self._rx_file_handle = None
-
- def _compute_rx_level(self, data: bytes) -> int:
- """Compute a gain-tolerant 0-100 signal activity score from raw IQ bytes."""
- if not data:
- return 0
- try:
- samples = np.frombuffer(data, dtype=np.int8).astype(np.float32)
- if samples.size < 2:
- return 0
- i_vals = samples[0::2]
- q_vals = samples[1::2]
- if i_vals.size == 0 or q_vals.size == 0:
- return 0
- i_vals = i_vals[::4]
- q_vals = q_vals[::4]
- if i_vals.size == 0 or q_vals.size == 0:
- return 0
- mag = np.sqrt(i_vals * i_vals + q_vals * q_vals)
- if mag.size == 0:
- return 0
-
- noise = float(np.percentile(mag, 30))
- signal = float(np.percentile(mag, 90))
- peak = float(np.percentile(mag, 99))
- contrast = max(0.0, signal - noise)
- crest = max(0.0, peak - signal)
- mean_mag = float(np.mean(mag))
-
- # Normalize by local floor so changing gain is less likely to break
- # burst visibility (low gain still detectable, high gain not always "on").
- contrast_norm = contrast / max(8.0, noise + 8.0)
- crest_norm = crest / max(8.0, signal + 8.0)
- energy_norm = mean_mag / 60.0
- level_f = (contrast_norm * 55.0) + (crest_norm * 20.0) + (energy_norm * 10.0)
- level = int(max(0, min(100, level_f)))
- if level == 0 and contrast > 0.5:
- level = 1
- return level
- except Exception:
- return 0
-
- def _extract_waveform(self, data: bytes, points: int = 256) -> list[float]:
- """Extract a normalized envelope waveform for UI display."""
- try:
- samples = np.frombuffer(data, dtype=np.int8).astype(np.float32)
- if samples.size < 2:
- return []
- i_vals = samples[0::2]
- q_vals = samples[1::2]
- if i_vals.size == 0 or q_vals.size == 0:
- return []
- mag = np.sqrt(i_vals * i_vals + q_vals * q_vals)
- if mag.size == 0:
- return []
- step = max(1, mag.size // points)
- scoped = mag[::step][:points]
- if scoped.size == 0:
- return []
- baseline = float(np.median(scoped))
- centered = scoped - baseline
- scale = float(np.percentile(np.abs(centered), 95))
- if scale <= 1e-6:
- normalized = np.zeros_like(centered)
- else:
- normalized = np.clip(centered / (scale * 2.5), -1.0, 1.0)
- return [round(float(x), 3) for x in normalized.tolist()]
- except Exception:
- return []
-
- def _compute_rx_spectrum(self, data: bytes, bins: int = 256) -> list[int]:
- """Compute a simple FFT magnitude slice for waterfall rendering."""
- try:
- samples = np.frombuffer(data, dtype=np.int8)
- if samples.size < bins * 2:
- return []
- fft_size = max(256, bins)
- needed = fft_size * 2
- if samples.size < needed:
- return []
- samples = samples[:needed].astype(np.float32)
- i_vals = samples[0::2]
- q_vals = samples[1::2]
- iq = i_vals + 1j * q_vals
- window = np.hanning(fft_size)
- spectrum = np.fft.fftshift(np.fft.fft(iq * window))
- mag = 20 * np.log10(np.abs(spectrum) + 1e-6)
- mag -= np.max(mag)
- # Map -60..0 dB range to 0..255
- scaled = np.clip((mag + 60.0) / 60.0, 0.0, 1.0)
- bins_vals = (scaled * 255).astype(np.uint8)
- if bins_vals.size != bins:
- idx = np.linspace(0, bins_vals.size - 1, bins).astype(int)
- bins_vals = bins_vals[idx]
- return bins_vals.tolist()
- except Exception:
- return []
-
- def _monitor_rx_stderr(self) -> None:
- process = self._rx_process
- if not process or not process.stderr:
- return
- try:
- for line in iter(process.stderr.readline, b''):
- text = line.decode('utf-8', errors='replace').strip()
- if text:
- logger.debug(f"[hackrf_rx] {text}")
- if 'error' in text.lower():
- self._emit({'type': 'info', 'text': f'[hackrf_rx] {text}'})
- except Exception:
- pass
-
- def stop_receive(self) -> dict:
- thread_to_join: threading.Thread | None = None
- file_handle: BinaryIO | None = None
- with self._lock:
- if not self._rx_process or self._rx_process.poll() is not None:
- return {'status': 'not_running'}
-
- self._rx_stop = True
- thread_to_join = self._rx_thread
- self._rx_thread = None
- file_handle = self._rx_file_handle
-
- safe_terminate(self._rx_process)
- unregister_process(self._rx_process)
- self._rx_process = None
-
- if thread_to_join and thread_to_join.is_alive():
- thread_to_join.join(timeout=2.0)
-
- if file_handle:
- try:
- file_handle.close()
- except OSError:
- pass
- with self._lock:
- if self._rx_file_handle is file_handle:
- self._rx_file_handle = None
-
- duration = time.time() - self._rx_start_time if self._rx_start_time else 0
- iq_file = self._rx_file
-
- # Write JSON sidecar metadata
- capture = None
- if iq_file and iq_file.exists():
- bursts = list(self._rx_bursts)
- duration, bursts = self._trim_capture_to_trigger_window(
- iq_file=iq_file,
- sample_rate=self._rx_sample_rate,
- duration_seconds=duration,
- bursts=bursts,
- )
- size = iq_file.stat().st_size
- dominant_fingerprint = ''
- dominant_fingerprint_count = 0
- for fp, count in self._rx_fingerprint_counts.items():
- if count > dominant_fingerprint_count:
- dominant_fingerprint = fp
- dominant_fingerprint_count = count
-
- modulation_hint = self._rx_modulation_hint
- modulation_confidence = float(self._rx_modulation_confidence or 0.0)
- if not modulation_hint and bursts:
- burst_hint_totals: dict[str, float] = {}
- for burst in bursts:
- hint_name = str(burst.get('modulation_hint') or '').strip()
- hint_conf = float(burst.get('modulation_confidence') or 0.0)
- if not hint_name or hint_name.lower() == 'unknown':
- continue
- burst_hint_totals[hint_name] = burst_hint_totals.get(hint_name, 0.0) + max(0.05, hint_conf)
- if burst_hint_totals:
- modulation_hint = max(burst_hint_totals, key=burst_hint_totals.get)
- total_score = sum(burst_hint_totals.values())
- modulation_confidence = min(
- 0.98,
- burst_hint_totals[modulation_hint] / max(total_score, 0.001),
- )
-
- protocol_hint = self._protocol_hint_from_capture(
- self._rx_frequency_hz,
- modulation_hint,
- len(bursts),
- )
- label = self._auto_capture_label(
- self._rx_frequency_hz,
- len(bursts),
- modulation_hint,
- protocol_hint,
- )
- capture_id = uuid.uuid4().hex[:12]
- capture = SubGhzCapture(
- capture_id=capture_id,
- filename=iq_file.name,
- frequency_hz=self._rx_frequency_hz,
- sample_rate=self._rx_sample_rate,
- lna_gain=self._rx_lna_gain,
- vga_gain=self._rx_vga_gain,
- timestamp=datetime.now(timezone.utc).isoformat(),
- duration_seconds=round(duration, 1),
- size_bytes=size,
- label=label,
- label_source='auto',
- bursts=bursts,
- modulation_hint=modulation_hint,
- modulation_confidence=round(modulation_confidence, 3),
- protocol_hint=protocol_hint,
- dominant_fingerprint=dominant_fingerprint,
- trigger_enabled=self._rx_trigger_enabled,
- trigger_pre_seconds=round(self._rx_trigger_pre_s, 3),
- trigger_post_seconds=round(self._rx_trigger_post_s, 3),
- )
- meta_path = iq_file.with_suffix('.json')
- try:
- meta_path.write_text(json.dumps(capture.to_dict(), indent=2))
- except OSError as e:
- logger.error(f"Failed to write capture metadata: {e}")
-
- with self._lock:
- self._rx_file = None
- self._rx_start_time = 0
- self._rx_bytes_written = 0
- self._rx_bursts = []
- self._rx_trigger_enabled = False
- self._rx_trigger_first_burst_start = None
- self._rx_trigger_last_burst_end = None
- self._rx_autostop_pending = False
- self._rx_modulation_hint = ''
- self._rx_modulation_confidence = 0.0
- self._rx_protocol_hint = ''
- self._rx_fingerprint_counts = {}
-
- self._emit({
- 'type': 'status',
- 'mode': 'idle',
- 'status': 'stopped',
- 'duration_seconds': round(duration, 1),
- })
-
- result = {'status': 'stopped', 'duration_seconds': round(duration, 1)}
- if capture:
- result['capture'] = capture.to_dict()
- return result
+ if self._rx_trigger_enabled:
+ self._emit({
+ 'type': 'info',
+ 'text': (
+ f'[rx] Smart trigger armed '
+ f'(pre {self._rx_trigger_pre_s:.2f}s, post {self._rx_trigger_post_s:.2f}s)'
+ ),
+ })
+
+ return {
+ 'status': 'started',
+ 'frequency_hz': frequency_hz,
+ 'sample_rate': sample_rate,
+ 'file': iq_file.name,
+ 'trigger_enabled': self._rx_trigger_enabled,
+ 'trigger_pre_seconds': round(self._rx_trigger_pre_s, 3),
+ 'trigger_post_seconds': round(self._rx_trigger_post_s, 3),
+ }
+
+ except FileNotFoundError:
+ return {'status': 'error', 'message': 'hackrf_transfer not found'}
+ except Exception as e:
+ logger.error(f"Failed to start RX: {e}")
+ return {'status': 'error', 'message': str(e)}
+
+ def _estimate_modulation_hint(
+ self,
+ data: bytes,
+ ) -> tuple[str, float, str]:
+ """Estimate coarse modulation family from raw IQ characteristics."""
+ if not data:
+ return 'Unknown', 0.0, 'No samples'
+ try:
+ raw = np.frombuffer(data, dtype=np.int8).astype(np.float32)
+ if raw.size < 2048:
+ return 'Unknown', 0.0, 'Insufficient samples'
+
+ i_vals = raw[0::2]
+ q_vals = raw[1::2]
+ if i_vals.size == 0 or q_vals.size == 0:
+ return 'Unknown', 0.0, 'Invalid IQ frame'
+
+ # Light decimation for lower CPU while preserving burst shape.
+ i_vals = i_vals[::4]
+ q_vals = q_vals[::4]
+ if i_vals.size < 256 or q_vals.size < 256:
+ return 'Unknown', 0.0, 'Short frame'
+
+ iq = i_vals + 1j * q_vals
+ amp = np.abs(iq)
+ mean_amp = float(np.mean(amp))
+ std_amp = float(np.std(amp))
+ amp_cv = std_amp / max(mean_amp, 1.0)
+
+ phase_step = np.angle(iq[1:] * np.conj(iq[:-1]))
+ phase_var = float(np.std(phase_step))
+
+ # Simple pulse run-length profile on envelope.
+ envelope = amp - float(np.median(amp))
+ env_scale = float(np.percentile(np.abs(envelope), 92))
+ if env_scale <= 1e-6:
+ pulse_density = 0.0
+ mean_run = 0.0
+ else:
+ norm = np.clip(envelope / env_scale, -1.0, 1.0)
+ high = norm > 0.25
+ pulse_density = float(np.mean(high))
+ changes = np.where(np.diff(high.astype(np.int8)) != 0)[0]
+ if changes.size >= 2:
+ runs = np.diff(np.concatenate(([0], changes, [high.size - 1])))
+ mean_run = float(np.mean(runs))
+ else:
+ mean_run = float(high.size)
+
+ scores = {
+ 'OOK/ASK': 0.0,
+ 'FSK/GFSK': 0.0,
+ 'PWM/PPM': 0.0,
+ }
+
+ # OOK: stronger amplitude contrast and moderate pulse occupancy.
+ scores['OOK/ASK'] += max(0.0, min(1.0, (amp_cv - 0.22) / 0.35))
+ scores['OOK/ASK'] += max(0.0, 1.0 - abs(pulse_density - 0.4) / 0.4) * 0.35
+
+ # FSK: flatter amplitude, more phase movement.
+ scores['FSK/GFSK'] += max(0.0, min(1.0, (phase_var - 0.45) / 0.9))
+ scores['FSK/GFSK'] += max(0.0, min(1.0, (0.33 - amp_cv) / 0.28)) * 0.45
+
+ # PWM/PPM: high edge density with short run lengths.
+ edge_density = 0.0 if mean_run <= 0 else min(1.0, 28.0 / max(mean_run, 1.0))
+ scores['PWM/PPM'] += max(0.0, min(1.0, (amp_cv - 0.28) / 0.45))
+ scores['PWM/PPM'] += edge_density * 0.6
+
+ best_family = max(scores, key=scores.get)
+ best_score = float(scores[best_family])
+ confidence = max(0.0, min(0.97, best_score))
+ if confidence < 0.25:
+ return 'Unknown', confidence, 'No clear modulation signature'
+
+ reason = (
+ f'amp_cv={amp_cv:.2f} phase_var={phase_var:.2f} '
+ f'pulse_density={pulse_density:.2f}'
+ )
+ return best_family, confidence, reason
+ except Exception:
+ return 'Unknown', 0.0, 'Modulation analysis failed'
+
+ def _fingerprint_burst_bytes(
+ self,
+ data: bytes,
+ sample_rate: int,
+ duration_seconds: float,
+ ) -> str:
+ """Create a stable burst fingerprint for grouping similar signals."""
+ if not data:
+ return ''
+ try:
+ raw = np.frombuffer(data, dtype=np.int8).astype(np.float32)
+ if raw.size < 512:
+ return ''
+
+ i_vals = raw[0::2]
+ q_vals = raw[1::2]
+ if i_vals.size == 0 or q_vals.size == 0:
+ return ''
+
+ amp = np.sqrt(i_vals * i_vals + q_vals * q_vals)
+ if amp.size < 64:
+ return ''
+
+ # Normalize and downsample envelope into a fixed-size shape vector.
+ amp = amp - float(np.median(amp))
+ scale = float(np.percentile(np.abs(amp), 95))
+ if scale <= 1e-6:
+ scale = 1.0
+ amp = np.clip(amp / scale, -1.0, 1.0)
+ target = 128
+ if amp.size != target:
+ idx = np.linspace(0, amp.size - 1, target).astype(int)
+ amp = amp[idx]
+ quant = np.round((amp + 1.0) * 7.5).astype(np.uint8)
+
+ # Include coarse timing and center-energy traits.
+ burst_ms = int(max(1, round(duration_seconds * 1000)))
+ sr_khz = int(max(1, round(sample_rate / 1000)))
+ payload = (
+ quant.tobytes()
+ + burst_ms.to_bytes(2, 'little', signed=False)
+ + sr_khz.to_bytes(2, 'little', signed=False)
+ )
+ return hashlib.sha1(payload).hexdigest()[:16]
+ except Exception:
+ return ''
+
+ def _protocol_hint_from_capture(
+ self,
+ frequency_hz: int,
+ modulation_hint: str,
+ burst_count: int,
+ ) -> str:
+ freq = frequency_hz / 1_000_000
+ mod = (modulation_hint or '').upper()
+ if burst_count <= 0:
+ return 'No burst activity'
+ if 433.70 <= freq <= 434.10 and 'OOK' in mod and burst_count >= 2:
+ return 'Likely weather sensor / simple remote telemetry'
+ if 868.0 <= freq <= 870.0 and 'OOK' in mod:
+ return 'Likely EU ISM OOK sensor/remote'
+ if 902.0 <= freq <= 928.0 and 'FSK' in mod:
+ return 'Likely ISM telemetry (FSK/GFSK)'
+ if 'PWM' in mod:
+ return 'Likely pulse-width/distance keyed remote'
+ if 'FSK' in mod:
+ return 'Likely continuous-tone telemetry'
+ if 'OOK' in mod:
+ return 'Likely OOK keyed burst transmitter'
+ return 'Unknown protocol family'
+
+ def _auto_capture_label(
+ self,
+ frequency_hz: int,
+ burst_count: int,
+ modulation_hint: str,
+ protocol_hint: str,
+ ) -> str:
+ freq = frequency_hz / 1_000_000
+ mod = (modulation_hint or '').upper()
+ if burst_count <= 0:
+ return f'Raw Capture {freq:.3f} MHz'
+ if 'weather' in protocol_hint.lower():
+ return f'Weather-like Burst ({burst_count})'
+ if 'OOK' in mod:
+ return f'OOK Burst Cluster ({burst_count})'
+ if 'FSK' in mod:
+ return f'FSK Telemetry Burst ({burst_count})'
+ if 'PWM' in mod:
+ return f'PWM/PPM Burst ({burst_count})'
+ return f'RF Burst Capture ({burst_count})'
+
+ def _trim_capture_to_trigger_window(
+ self,
+ iq_file: Path,
+ sample_rate: int,
+ duration_seconds: float,
+ bursts: list[dict],
+ ) -> tuple[float, list[dict]]:
+ """Trim a full capture to trigger window using configured pre/post roll."""
+ if not self._rx_trigger_enabled or not bursts or sample_rate <= 0:
+ return duration_seconds, bursts
+
+ first_start = min(float(b.get('start_seconds', 0.0)) for b in bursts)
+ last_end = max(
+ float(b.get('start_seconds', 0.0)) + float(b.get('duration_seconds', 0.0))
+ for b in bursts
+ )
+ start_s = max(0.0, first_start - self._rx_trigger_pre_s)
+ end_s = min(duration_seconds, last_end + self._rx_trigger_post_s)
+ if end_s <= start_s:
+ return duration_seconds, bursts
+ if start_s <= 0.001 and (duration_seconds - end_s) <= 0.001:
+ return duration_seconds, bursts
+
+ bytes_per_second = max(2, int(sample_rate) * 2)
+ start_byte = int(start_s * bytes_per_second) & ~1
+ end_byte = int(end_s * bytes_per_second) & ~1
+ if end_byte <= start_byte:
+ return duration_seconds, bursts
+
+ tmp_path = iq_file.with_suffix('.trimtmp')
+ try:
+ with open(iq_file, 'rb') as src, open(tmp_path, 'wb') as dst:
+ src.seek(start_byte)
+ remaining = end_byte - start_byte
+ while remaining > 0:
+ chunk = src.read(min(262144, remaining))
+ if not chunk:
+ break
+ dst.write(chunk)
+ remaining -= len(chunk)
+ os.replace(tmp_path, iq_file)
+ except OSError as exc:
+ logger.error(f"Failed trimming trigger capture: {exc}")
+ try:
+ if tmp_path.exists():
+ tmp_path.unlink()
+ except OSError:
+ pass
+ return duration_seconds, bursts
+
+ trimmed_duration = max(0.0, float(end_byte - start_byte) / float(bytes_per_second))
+ adjusted_bursts: list[dict] = []
+ for burst in bursts:
+ raw_start = float(burst.get('start_seconds', 0.0))
+ raw_dur = max(0.0, float(burst.get('duration_seconds', 0.0)))
+ raw_end = raw_start + raw_dur
+ if raw_end < start_s or raw_start > end_s:
+ continue
+ adjusted = dict(burst)
+ adjusted['start_seconds'] = round(max(0.0, raw_start - start_s), 3)
+ adjusted['duration_seconds'] = round(raw_dur, 3)
+ adjusted_bursts.append(adjusted)
+ return trimmed_duration, adjusted_bursts if adjusted_bursts else bursts
+
+ def _rx_capture_loop(self) -> None:
+ """Read IQ data from the capture file and emit UI metrics."""
+ process = self._rx_process
+ file_handle = self._rx_file_handle
+
+ if not process or not file_handle:
+ logger.error("RX capture loop missing process/file handle")
+ return
+
+ CHUNK = 262144 # 256 KB (~64 ms @ 2 Msps complex int8 IQ)
+ LEVEL_INTERVAL = 0.05
+ WAVE_INTERVAL = 0.25
+ SPECTRUM_INTERVAL = 0.25
+ STATS_INTERVAL = 1.0
+ HINT_EVAL_INTERVAL = 0.25
+ HINT_EMIT_INTERVAL = 1.5
+
+ last_level = 0.0
+ last_wave = 0.0
+ last_spectrum = 0.0
+ last_stats = time.time()
+ last_log = time.time()
+ last_hint_eval = 0.0
+ last_hint_emit = 0.0
+ bytes_since_stats = 0
+ first_chunk = True
+ burst_active = False
+ burst_start = 0.0
+ burst_last_high = 0.0
+ burst_peak = 0
+ burst_bytes = bytearray()
+ burst_hint_family = 'Unknown'
+ burst_hint_conf = 0.0
+ BURST_OFF_HOLD = 0.18
+ BURST_MIN_DURATION = 0.04
+ MAX_BURST_BYTES = max(262144, int(max(1, self._rx_sample_rate) * 2 * 2))
+ smooth_level = 0.0
+ prev_smooth_level = 0.0
+ noise_floor = 0.0
+ peak_tracker = 0.0
+ on_threshold = 0.0
+ warmup_until = time.time() + 1.0
+ modulation_scores: dict[str, float] = {
+ 'OOK/ASK': 0.0,
+ 'FSK/GFSK': 0.0,
+ 'PWM/PPM': 0.0,
+ }
+ last_hint_reason = ''
+
+ try:
+ fd = file_handle.fileno()
+ if not isinstance(fd, int) or fd < 0:
+ logger.error("Invalid file descriptor from RX file handle")
+ return
+ except (OSError, ValueError, TypeError):
+ logger.error("Failed to obtain RX file descriptor")
+ return
+
+ try:
+ while not self._rx_stop:
+ try:
+ data = os.read(fd, CHUNK)
+ except OSError:
+ break
+ if not data:
+ if process.poll() is not None:
+ break
+ time.sleep(0.05)
+ continue
+
+ self._rx_bytes_written += len(data)
+ bytes_since_stats += len(data)
+ if burst_active and len(burst_bytes) < MAX_BURST_BYTES:
+ room = MAX_BURST_BYTES - len(burst_bytes)
+ burst_bytes.extend(data[:room])
+
+ if first_chunk:
+ first_chunk = False
+ self._emit({'type': 'info', 'text': '[rx] Receiving IQ data...'})
+
+ now = time.time()
+ if now - last_hint_eval >= HINT_EVAL_INTERVAL:
+ for key in modulation_scores:
+ modulation_scores[key] *= 0.97
+ hint_family, hint_conf, hint_reason = self._estimate_modulation_hint(data)
+ if hint_family in modulation_scores:
+ modulation_scores[hint_family] += max(0.05, hint_conf)
+ last_hint_reason = hint_reason
+ last_hint_eval = now
+
+ if now - last_level >= LEVEL_INTERVAL:
+ level = float(self._compute_rx_level(data))
+ prev_smooth_level = smooth_level
+ if smooth_level <= 0:
+ smooth_level = level
+ else:
+ smooth_level = (smooth_level * 0.72) + (level * 0.28)
+
+ if noise_floor <= 0:
+ noise_floor = smooth_level
+ elif not burst_active:
+ # Track receiver noise floor when we are not inside a burst.
+ noise_floor = (noise_floor * 0.94) + (smooth_level * 0.06)
+
+ peak_tracker = max(smooth_level, peak_tracker * 0.985)
+ spread = max(2.0, peak_tracker - noise_floor)
+ on_delta = max(2.8, spread * 0.52)
+ off_delta = max(1.2, spread * 0.24)
+ on_threshold = min(95.0, noise_floor + on_delta)
+ off_threshold = max(0.8, min(on_threshold - 0.5, noise_floor + off_delta))
+ rising = smooth_level - prev_smooth_level
+
+ self._emit({'type': 'rx_level', 'level': int(round(smooth_level))})
+
+ if not burst_active:
+ if now >= warmup_until and smooth_level >= on_threshold and rising >= 0.35:
+ burst_active = True
+ burst_start = now
+ burst_last_high = now
+ burst_peak = int(round(smooth_level))
+ burst_bytes = bytearray(data[: min(len(data), MAX_BURST_BYTES)])
+ burst_hint_family = 'Unknown'
+ burst_hint_conf = 0.0
+ if self._rx_trigger_enabled and self._rx_trigger_first_burst_start is None:
+ self._rx_trigger_first_burst_start = max(
+ 0.0, now - self._rx_start_time
+ )
+ self._emit({
+ 'type': 'info',
+ 'text': '[rx] Trigger fired - capturing burst window',
+ })
+ self._emit({
+ 'type': 'rx_burst',
+ 'mode': 'rx',
+ 'event': 'start',
+ 'start_offset_s': round(
+ max(0.0, now - self._rx_start_time), 3
+ ),
+ 'level': int(round(smooth_level)),
+ })
+ else:
+ if smooth_level >= off_threshold:
+ burst_last_high = now
+ burst_peak = max(burst_peak, int(round(smooth_level)))
+ elif (now - burst_last_high) >= BURST_OFF_HOLD:
+ duration = now - burst_start
+ if duration >= BURST_MIN_DURATION:
+ fp = self._fingerprint_burst_bytes(
+ bytes(burst_bytes),
+ self._rx_sample_rate,
+ duration,
+ )
+ if fp:
+ self._rx_fingerprint_counts[fp] = (
+ self._rx_fingerprint_counts.get(fp, 0) + 1
+ )
+ burst_hint_family, burst_hint_conf, burst_reason = self._estimate_modulation_hint(
+ bytes(burst_bytes)
+ )
+ if burst_hint_family in modulation_scores and burst_hint_conf > 0:
+ modulation_scores[burst_hint_family] += burst_hint_conf * 1.8
+ last_hint_reason = burst_reason
+ burst_data = {
+ 'start_seconds': round(
+ max(0.0, burst_start - self._rx_start_time), 3
+ ),
+ 'duration_seconds': round(duration, 3),
+ 'peak_level': int(burst_peak),
+ 'fingerprint': fp,
+ 'modulation_hint': burst_hint_family,
+ 'modulation_confidence': round(float(burst_hint_conf), 3),
+ }
+ if len(self._rx_bursts) < 512:
+ self._rx_bursts.append(burst_data)
+ self._rx_trigger_last_burst_end = max(
+ 0.0, now - self._rx_start_time
+ )
+ self._emit({
+ 'type': 'rx_burst',
+ 'mode': 'rx',
+ 'event': 'end',
+ 'start_offset_s': burst_data['start_seconds'],
+ 'duration_ms': int(duration * 1000),
+ 'peak_level': int(burst_peak),
+ 'fingerprint': fp,
+ 'modulation_hint': burst_hint_family,
+ 'modulation_confidence': round(float(burst_hint_conf), 3),
+ })
+ burst_active = False
+ burst_peak = 0
+ burst_bytes = bytearray()
+ last_level = now
+
+ # Emit live modulation/protocol hint periodically.
+ if now - last_hint_emit >= HINT_EMIT_INTERVAL:
+ best_family = max(modulation_scores, key=modulation_scores.get)
+ total_score = sum(max(0.0, v) for v in modulation_scores.values())
+ best_score = max(0.0, modulation_scores.get(best_family, 0.0))
+ hint_conf = 0.0 if total_score <= 0 else min(0.98, best_score / total_score)
+ protocol_hint = self._protocol_hint_from_capture(
+ self._rx_frequency_hz,
+ best_family if hint_conf >= 0.3 else 'Unknown',
+ len(self._rx_bursts),
+ )
+ self._rx_protocol_hint = protocol_hint
+ if hint_conf >= 0.30:
+ self._rx_modulation_hint = best_family
+ self._rx_modulation_confidence = hint_conf
+ self._emit({
+ 'type': 'rx_hint',
+ 'modulation_hint': best_family,
+ 'confidence': round(hint_conf, 3),
+ 'protocol_hint': protocol_hint,
+ 'reason': last_hint_reason,
+ })
+ last_hint_emit = now
+
+ # Smart-trigger auto-stop after quiet post-roll window.
+ if (
+ self._rx_trigger_enabled
+ and self._rx_trigger_first_burst_start is not None
+ and not burst_active
+ and not self._rx_autostop_pending
+ ):
+ last_end = self._rx_trigger_last_burst_end
+ if last_end is not None and (max(0.0, now - self._rx_start_time) - last_end) >= self._rx_trigger_post_s:
+ self._rx_autostop_pending = True
+ self._emit({
+ 'type': 'info',
+ 'text': '[rx] Trigger window complete - finalizing capture',
+ })
+ threading.Thread(target=self.stop_receive, daemon=True).start()
+ break
+
+ if now - last_wave >= WAVE_INTERVAL:
+ samples = self._extract_waveform(data)
+ if samples:
+ self._emit({'type': 'rx_waveform', 'samples': samples})
+ last_wave = now
+
+ if now - last_spectrum >= SPECTRUM_INTERVAL:
+ bins = self._compute_rx_spectrum(data)
+ if bins:
+ self._emit({'type': 'rx_spectrum', 'bins': bins})
+ last_spectrum = now
+
+ if now - last_stats >= STATS_INTERVAL:
+ rate_kb = bytes_since_stats / (now - last_stats) / 1024
+ file_size = 0
+ if self._rx_file and self._rx_file.exists():
+ try:
+ file_size = self._rx_file.stat().st_size
+ except OSError:
+ file_size = 0
+ self._emit({
+ 'type': 'rx_stats',
+ 'rate_kb': round(rate_kb, 1),
+ 'file_size': file_size,
+ 'elapsed_seconds': round(time.time() - self._rx_start_time, 1) if self._rx_start_time else 0,
+ })
+ if now - last_log >= 5.0:
+ self._emit({
+ 'type': 'info',
+ 'text': (
+ f'[rx] IQ: {rate_kb:.0f} KB/s '
+ f'(lvl {smooth_level:.1f}, floor {noise_floor:.1f}, thr {on_threshold:.1f})'
+ ),
+ })
+ last_log = now
+ bytes_since_stats = 0
+ last_stats = now
+
+ if burst_active:
+ duration = max(0.0, time.time() - burst_start)
+ if duration >= BURST_MIN_DURATION:
+ fp = self._fingerprint_burst_bytes(
+ bytes(burst_bytes),
+ self._rx_sample_rate,
+ duration,
+ )
+ if fp:
+ self._rx_fingerprint_counts[fp] = (
+ self._rx_fingerprint_counts.get(fp, 0) + 1
+ )
+ burst_hint_family, burst_hint_conf, burst_reason = self._estimate_modulation_hint(
+ bytes(burst_bytes)
+ )
+ if burst_hint_family in modulation_scores and burst_hint_conf > 0:
+ modulation_scores[burst_hint_family] += burst_hint_conf * 1.8
+ last_hint_reason = burst_reason
+ burst_data = {
+ 'start_seconds': round(
+ max(0.0, burst_start - self._rx_start_time), 3
+ ),
+ 'duration_seconds': round(duration, 3),
+ 'peak_level': int(burst_peak),
+ 'fingerprint': fp,
+ 'modulation_hint': burst_hint_family,
+ 'modulation_confidence': round(float(burst_hint_conf), 3),
+ }
+ if len(self._rx_bursts) < 512:
+ self._rx_bursts.append(burst_data)
+ self._rx_trigger_last_burst_end = max(
+ 0.0, time.time() - self._rx_start_time
+ )
+ self._emit({
+ 'type': 'rx_burst',
+ 'mode': 'rx',
+ 'event': 'end',
+ 'start_offset_s': burst_data['start_seconds'],
+ 'duration_ms': int(duration * 1000),
+ 'peak_level': int(burst_peak),
+ 'fingerprint': fp,
+ 'modulation_hint': burst_hint_family,
+ 'modulation_confidence': round(float(burst_hint_conf), 3),
+ })
+
+ # Finalize modulation summary for capture metadata.
+ if modulation_scores:
+ best_family = max(modulation_scores, key=modulation_scores.get)
+ total_score = sum(max(0.0, v) for v in modulation_scores.values())
+ best_score = max(0.0, modulation_scores.get(best_family, 0.0))
+ hint_conf = 0.0 if total_score <= 0 else min(0.98, best_score / total_score)
+ if hint_conf >= 0.3:
+ self._rx_modulation_hint = best_family
+ self._rx_modulation_confidence = hint_conf
+ self._rx_protocol_hint = self._protocol_hint_from_capture(
+ self._rx_frequency_hz,
+ self._rx_modulation_hint,
+ len(self._rx_bursts),
+ )
+ finally:
+ try:
+ file_handle.close()
+ except OSError:
+ pass
+ with self._lock:
+ if self._rx_file_handle is file_handle:
+ self._rx_file_handle = None
+
+ def _compute_rx_level(self, data: bytes) -> int:
+ """Compute a gain-tolerant 0-100 signal activity score from raw IQ bytes."""
+ if not data:
+ return 0
+ try:
+ samples = np.frombuffer(data, dtype=np.int8).astype(np.float32)
+ if samples.size < 2:
+ return 0
+ i_vals = samples[0::2]
+ q_vals = samples[1::2]
+ if i_vals.size == 0 or q_vals.size == 0:
+ return 0
+ i_vals = i_vals[::4]
+ q_vals = q_vals[::4]
+ if i_vals.size == 0 or q_vals.size == 0:
+ return 0
+ mag = np.sqrt(i_vals * i_vals + q_vals * q_vals)
+ if mag.size == 0:
+ return 0
+
+ noise = float(np.percentile(mag, 30))
+ signal = float(np.percentile(mag, 90))
+ peak = float(np.percentile(mag, 99))
+ contrast = max(0.0, signal - noise)
+ crest = max(0.0, peak - signal)
+ mean_mag = float(np.mean(mag))
+
+ # Normalize by local floor so changing gain is less likely to break
+ # burst visibility (low gain still detectable, high gain not always "on").
+ contrast_norm = contrast / max(8.0, noise + 8.0)
+ crest_norm = crest / max(8.0, signal + 8.0)
+ energy_norm = mean_mag / 60.0
+ level_f = (contrast_norm * 55.0) + (crest_norm * 20.0) + (energy_norm * 10.0)
+ level = int(max(0, min(100, level_f)))
+ if level == 0 and contrast > 0.5:
+ level = 1
+ return level
+ except Exception:
+ return 0
+
+ def _extract_waveform(self, data: bytes, points: int = 256) -> list[float]:
+ """Extract a normalized envelope waveform for UI display."""
+ try:
+ samples = np.frombuffer(data, dtype=np.int8).astype(np.float32)
+ if samples.size < 2:
+ return []
+ i_vals = samples[0::2]
+ q_vals = samples[1::2]
+ if i_vals.size == 0 or q_vals.size == 0:
+ return []
+ mag = np.sqrt(i_vals * i_vals + q_vals * q_vals)
+ if mag.size == 0:
+ return []
+ step = max(1, mag.size // points)
+ scoped = mag[::step][:points]
+ if scoped.size == 0:
+ return []
+ baseline = float(np.median(scoped))
+ centered = scoped - baseline
+ scale = float(np.percentile(np.abs(centered), 95))
+ if scale <= 1e-6:
+ normalized = np.zeros_like(centered)
+ else:
+ normalized = np.clip(centered / (scale * 2.5), -1.0, 1.0)
+ return [round(float(x), 3) for x in normalized.tolist()]
+ except Exception:
+ return []
+
+ def _compute_rx_spectrum(self, data: bytes, bins: int = 256) -> list[int]:
+ """Compute a simple FFT magnitude slice for waterfall rendering."""
+ try:
+ samples = np.frombuffer(data, dtype=np.int8)
+ if samples.size < bins * 2:
+ return []
+ fft_size = max(256, bins)
+ needed = fft_size * 2
+ if samples.size < needed:
+ return []
+ samples = samples[:needed].astype(np.float32)
+ i_vals = samples[0::2]
+ q_vals = samples[1::2]
+ iq = i_vals + 1j * q_vals
+ window = np.hanning(fft_size)
+ spectrum = np.fft.fftshift(np.fft.fft(iq * window))
+ mag = 20 * np.log10(np.abs(spectrum) + 1e-6)
+ mag -= np.max(mag)
+ # Map -60..0 dB range to 0..255
+ scaled = np.clip((mag + 60.0) / 60.0, 0.0, 1.0)
+ bins_vals = (scaled * 255).astype(np.uint8)
+ if bins_vals.size != bins:
+ idx = np.linspace(0, bins_vals.size - 1, bins).astype(int)
+ bins_vals = bins_vals[idx]
+ return bins_vals.tolist()
+ except Exception:
+ return []
+
+ def _monitor_rx_stderr(self) -> None:
+ process = self._rx_process
+ if not process or not process.stderr:
+ return
+ try:
+ for line in iter(process.stderr.readline, b''):
+ text = line.decode('utf-8', errors='replace').strip()
+ if text:
+ logger.debug(f"[hackrf_rx] {text}")
+ if 'error' in text.lower():
+ self._emit({'type': 'info', 'text': f'[hackrf_rx] {text}'})
+ except Exception:
+ pass
+
+ def stop_receive(self) -> dict:
+ thread_to_join: threading.Thread | None = None
+ file_handle: BinaryIO | None = None
+ proc_to_terminate: subprocess.Popen | None = None
+ with self._lock:
+ if not self._rx_process or self._rx_process.poll() is not None:
+ return {'status': 'not_running'}
+
+ self._rx_stop = True
+ thread_to_join = self._rx_thread
+ self._rx_thread = None
+ file_handle = self._rx_file_handle
+ proc_to_terminate = self._rx_process
+ self._rx_process = None
+
+ # Terminate outside lock to avoid blocking other operations
+ if proc_to_terminate:
+ safe_terminate(proc_to_terminate)
+ unregister_process(proc_to_terminate)
+
+ if thread_to_join and thread_to_join.is_alive():
+ thread_to_join.join(timeout=2.0)
+
+ if file_handle:
+ try:
+ file_handle.close()
+ except OSError:
+ pass
+ with self._lock:
+ if self._rx_file_handle is file_handle:
+ self._rx_file_handle = None
+
+ duration = time.time() - self._rx_start_time if self._rx_start_time else 0
+ iq_file = self._rx_file
+
+ # Write JSON sidecar metadata
+ capture = None
+ if iq_file and iq_file.exists():
+ bursts = list(self._rx_bursts)
+ duration, bursts = self._trim_capture_to_trigger_window(
+ iq_file=iq_file,
+ sample_rate=self._rx_sample_rate,
+ duration_seconds=duration,
+ bursts=bursts,
+ )
+ size = iq_file.stat().st_size
+ dominant_fingerprint = ''
+ dominant_fingerprint_count = 0
+ for fp, count in self._rx_fingerprint_counts.items():
+ if count > dominant_fingerprint_count:
+ dominant_fingerprint = fp
+ dominant_fingerprint_count = count
+
+ modulation_hint = self._rx_modulation_hint
+ modulation_confidence = float(self._rx_modulation_confidence or 0.0)
+ if not modulation_hint and bursts:
+ burst_hint_totals: dict[str, float] = {}
+ for burst in bursts:
+ hint_name = str(burst.get('modulation_hint') or '').strip()
+ hint_conf = float(burst.get('modulation_confidence') or 0.0)
+ if not hint_name or hint_name.lower() == 'unknown':
+ continue
+ burst_hint_totals[hint_name] = burst_hint_totals.get(hint_name, 0.0) + max(0.05, hint_conf)
+ if burst_hint_totals:
+ modulation_hint = max(burst_hint_totals, key=burst_hint_totals.get)
+ total_score = sum(burst_hint_totals.values())
+ modulation_confidence = min(
+ 0.98,
+ burst_hint_totals[modulation_hint] / max(total_score, 0.001),
+ )
+
+ protocol_hint = self._protocol_hint_from_capture(
+ self._rx_frequency_hz,
+ modulation_hint,
+ len(bursts),
+ )
+ label = self._auto_capture_label(
+ self._rx_frequency_hz,
+ len(bursts),
+ modulation_hint,
+ protocol_hint,
+ )
+ capture_id = uuid.uuid4().hex[:12]
+ capture = SubGhzCapture(
+ capture_id=capture_id,
+ filename=iq_file.name,
+ frequency_hz=self._rx_frequency_hz,
+ sample_rate=self._rx_sample_rate,
+ lna_gain=self._rx_lna_gain,
+ vga_gain=self._rx_vga_gain,
+ timestamp=datetime.now(timezone.utc).isoformat(),
+ duration_seconds=round(duration, 1),
+ size_bytes=size,
+ label=label,
+ label_source='auto',
+ bursts=bursts,
+ modulation_hint=modulation_hint,
+ modulation_confidence=round(modulation_confidence, 3),
+ protocol_hint=protocol_hint,
+ dominant_fingerprint=dominant_fingerprint,
+ trigger_enabled=self._rx_trigger_enabled,
+ trigger_pre_seconds=round(self._rx_trigger_pre_s, 3),
+ trigger_post_seconds=round(self._rx_trigger_post_s, 3),
+ )
+ meta_path = iq_file.with_suffix('.json')
+ try:
+ meta_path.write_text(json.dumps(capture.to_dict(), indent=2))
+ except OSError as e:
+ logger.error(f"Failed to write capture metadata: {e}")
+
+ with self._lock:
+ self._rx_file = None
+ self._rx_start_time = 0
+ self._rx_bytes_written = 0
+ self._rx_bursts = []
+ self._rx_trigger_enabled = False
+ self._rx_trigger_first_burst_start = None
+ self._rx_trigger_last_burst_end = None
+ self._rx_autostop_pending = False
+ self._rx_modulation_hint = ''
+ self._rx_modulation_confidence = 0.0
+ self._rx_protocol_hint = ''
+ self._rx_fingerprint_counts = {}
+
+ self._emit({
+ 'type': 'status',
+ 'mode': 'idle',
+ 'status': 'stopped',
+ 'duration_seconds': round(duration, 1),
+ })
+
+ result = {'status': 'stopped', 'duration_seconds': round(duration, 1)}
+ if capture:
+ result['capture'] = capture.to_dict()
+ return result
# ------------------------------------------------------------------
# DECODE (hackrf_transfer piped to rtl_433)
# ------------------------------------------------------------------
- def start_decode(
- self,
- frequency_hz: int,
- sample_rate: int = 2_000_000,
- lna_gain: int = 32,
- vga_gain: int = 20,
- decode_profile: str = 'weather',
- device_serial: str | None = None,
- ) -> dict:
+ def start_decode(
+ self,
+ frequency_hz: int,
+ sample_rate: int = 2_000_000,
+ lna_gain: int = 32,
+ vga_gain: int = 20,
+ decode_profile: str = 'weather',
+ device_serial: str | None = None,
+ ) -> dict:
+ # Pre-lock: tool availability & device detection (blocking I/O)
+ if not self.check_hackrf():
+ return {'status': 'error', 'message': 'hackrf_transfer not found'}
+ if not self.check_rtl433():
+ return {'status': 'error', 'message': 'rtl_433 not found'}
+ device_err = self._require_hackrf_device()
+ if device_err:
+ return {'status': 'error', 'message': device_err}
+
with self._lock:
if self.active_mode != 'idle':
return {'status': 'error', 'message': f'Already running: {self.active_mode}'}
- if not self.check_hackrf():
- return {'status': 'error', 'message': 'hackrf_transfer not found'}
- if not self.check_rtl433():
- return {'status': 'error', 'message': 'rtl_433 not found'}
- device_err = self._require_hackrf_device()
- if device_err:
- return {'status': 'error', 'message': device_err}
-
- # Keep decode bandwidth conservative for stability. 2 Msps is enough
- # for common SubGHz protocols while staying within HackRF support.
- requested_sample_rate = int(sample_rate)
- stable_sample_rate = max(2_000_000, min(2_000_000, requested_sample_rate))
-
- # Build hackrf_transfer command (producer: raw IQ to stdout)
- hackrf_cmd = [
- 'hackrf_transfer',
- '-r', '-',
- '-f', str(frequency_hz),
- '-s', str(stable_sample_rate),
- '-l', str(max(SUBGHZ_LNA_GAIN_MIN, min(SUBGHZ_LNA_GAIN_MAX, lna_gain))),
- '-g', str(max(SUBGHZ_VGA_GAIN_MIN, min(SUBGHZ_VGA_GAIN_MAX, vga_gain))),
- ]
- if device_serial:
- hackrf_cmd.extend(['-d', device_serial])
+ # Keep decode bandwidth conservative for stability. 2 Msps is enough
+ # for common SubGHz protocols while staying within HackRF support.
+ requested_sample_rate = int(sample_rate)
+ stable_sample_rate = max(2_000_000, min(2_000_000, requested_sample_rate))
- # Build rtl_433 command (consumer: reads IQ from stdin)
- # Feed signed 8-bit complex IQ directly from hackrf_transfer.
- rtl433_cmd = [
- 'rtl_433',
- '-r', 'cs8:-',
- '-s', str(stable_sample_rate),
- '-f', str(frequency_hz),
- '-F', 'json',
- '-F', 'log',
- '-M', 'level',
- '-M', 'noise:5',
- '-Y', 'autolevel',
- '-Y', 'ampest',
- '-Y', 'minsnr=2.5',
- ]
- profile = (decode_profile or 'weather').strip().lower()
- if profile == 'weather':
- # Limit decoder set to weather/temperature/humidity/rain/wind
- # protocols for better sensitivity and lower CPU load.
- weather_protocol_ids = [
- 2, 3, 8, 12, 16, 18, 19, 20, 31, 32, 34, 40, 47, 50, 52,
- 54, 55, 56, 57, 69, 73, 74, 75, 76, 78, 79, 85, 91, 92,
- 108, 109, 111, 112, 113, 119, 120, 124, 127, 132, 133,
- 134, 138, 141, 143, 144, 145, 146, 147, 152, 153, 157,
- 158, 163, 165, 166, 170, 171, 172, 173, 175, 182, 183,
- 184, 194, 195, 196, 205, 206, 213, 214, 215, 217, 219,
- 221, 222,
- ]
- rtl433_cmd.extend(['-R', '0'])
- for proto_id in weather_protocol_ids:
- rtl433_cmd.extend(['-R', str(proto_id)])
- else:
- profile = 'all'
+ # Build hackrf_transfer command (producer: raw IQ to stdout)
+ hackrf_cmd = [
+ 'hackrf_transfer',
+ '-r', '-',
+ '-f', str(frequency_hz),
+ '-s', str(stable_sample_rate),
+ '-l', str(max(SUBGHZ_LNA_GAIN_MIN, min(SUBGHZ_LNA_GAIN_MAX, lna_gain))),
+ '-g', str(max(SUBGHZ_VGA_GAIN_MIN, min(SUBGHZ_VGA_GAIN_MAX, vga_gain))),
+ ]
+ if device_serial:
+ hackrf_cmd.extend(['-d', device_serial])
+
+ # Build rtl_433 command (consumer: reads IQ from stdin)
+ # Feed signed 8-bit complex IQ directly from hackrf_transfer.
+ rtl433_cmd = [
+ 'rtl_433',
+ '-r', 'cs8:-',
+ '-s', str(stable_sample_rate),
+ '-f', str(frequency_hz),
+ '-F', 'json',
+ '-F', 'log',
+ '-M', 'level',
+ '-M', 'noise:5',
+ '-Y', 'autolevel',
+ '-Y', 'ampest',
+ '-Y', 'minsnr=2.5',
+ ]
+ profile = (decode_profile or 'weather').strip().lower()
+ if profile == 'weather':
+ # Limit decoder set to weather/temperature/humidity/rain/wind
+ # protocols for better sensitivity and lower CPU load.
+ weather_protocol_ids = [
+ 2, 3, 8, 12, 16, 18, 19, 20, 31, 32, 34, 40, 47, 50, 52,
+ 54, 55, 56, 57, 69, 73, 74, 75, 76, 78, 79, 85, 91, 92,
+ 108, 109, 111, 112, 113, 119, 120, 124, 127, 132, 133,
+ 134, 138, 141, 143, 144, 145, 146, 147, 152, 153, 157,
+ 158, 163, 165, 166, 170, 171, 172, 173, 175, 182, 183,
+ 184, 194, 195, 196, 205, 206, 213, 214, 215, 217, 219,
+ 221, 222,
+ ]
+ rtl433_cmd.extend(['-R', '0'])
+ for proto_id in weather_protocol_ids:
+ rtl433_cmd.extend(['-R', str(proto_id)])
+ else:
+ profile = 'all'
logger.info(f"SubGHz decode: {' '.join(hackrf_cmd)} | {' '.join(rtl433_cmd)}")
try:
- # Start hackrf_transfer (producer). stderr is consumed by a
- # dedicated monitor thread so we can surface stream failures.
- hackrf_proc = subprocess.Popen(
- hackrf_cmd,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- bufsize=0,
- )
- register_process(hackrf_proc)
-
- # Start rtl_433 (consumer)
- rtl433_proc = subprocess.Popen(
- rtl433_cmd,
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- bufsize=0,
- )
- register_process(rtl433_proc)
+ # Start hackrf_transfer (producer). stderr is consumed by a
+ # dedicated monitor thread so we can surface stream failures.
+ hackrf_proc = subprocess.Popen(
+ hackrf_cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ bufsize=0,
+ )
+ register_process(hackrf_proc)
- self._decode_hackrf_process = hackrf_proc
- self._decode_process = rtl433_proc
- self._decode_start_time = time.time()
- self._decode_frequency_hz = frequency_hz
- self._decode_sample_rate = stable_sample_rate
- self._decode_stop = False
- self._emit({'type': 'info', 'text': f'[decode] Profile: {profile}'})
- if requested_sample_rate != stable_sample_rate:
- self._emit({
- 'type': 'info',
- 'text': (
- f'[decode] Using {stable_sample_rate} sps '
- f'(requested {requested_sample_rate}) for stable live decode'
- ),
- })
-
- # Buffered relay: hackrf stdout → queue → rtl_433 stdin
- # with auto-restart when HackRF USB disconnects.
- iq_queue: queue.Queue[bytes | None] = queue.Queue(maxsize=512)
+ # Start rtl_433 (consumer)
+ rtl433_proc = subprocess.Popen(
+ rtl433_cmd,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ bufsize=0,
+ )
+ register_process(rtl433_proc)
- threading.Thread(
- target=self._hackrf_reader,
- args=(hackrf_cmd, rtl433_proc, iq_queue),
- daemon=True,
- ).start()
- threading.Thread(
- target=self._monitor_decode_hackrf_stderr,
- args=(hackrf_proc,),
- daemon=True,
- ).start()
+ self._decode_hackrf_process = hackrf_proc
+ self._decode_process = rtl433_proc
+ self._decode_start_time = time.time()
+ self._decode_frequency_hz = frequency_hz
+ self._decode_sample_rate = stable_sample_rate
+ self._decode_stop = False
+ self._emit({'type': 'info', 'text': f'[decode] Profile: {profile}'})
+ if requested_sample_rate != stable_sample_rate:
+ self._emit({
+ 'type': 'info',
+ 'text': (
+ f'[decode] Using {stable_sample_rate} sps '
+ f'(requested {requested_sample_rate}) for stable live decode'
+ ),
+ })
+
+ # Buffered relay: hackrf stdout → queue → rtl_433 stdin
+ # with auto-restart when HackRF USB disconnects.
+ iq_queue: queue.Queue[bytes | None] = queue.Queue(maxsize=512)
+
+ threading.Thread(
+ target=self._hackrf_reader,
+ args=(hackrf_cmd, rtl433_proc, iq_queue),
+ daemon=True,
+ ).start()
+ threading.Thread(
+ target=self._monitor_decode_hackrf_stderr,
+ args=(hackrf_proc,),
+ daemon=True,
+ ).start()
threading.Thread(
target=self._rtl433_writer,
@@ -1411,19 +1417,19 @@ class SubGhzManager:
daemon=True,
).start()
- self._emit({
- 'type': 'status',
- 'mode': 'decode',
- 'status': 'started',
- 'frequency_hz': frequency_hz,
- 'sample_rate': stable_sample_rate,
- })
-
- return {
- 'status': 'started',
- 'frequency_hz': frequency_hz,
- 'sample_rate': stable_sample_rate,
- }
+ self._emit({
+ 'type': 'status',
+ 'mode': 'decode',
+ 'status': 'started',
+ 'frequency_hz': frequency_hz,
+ 'sample_rate': stable_sample_rate,
+ })
+
+ return {
+ 'status': 'started',
+ 'frequency_hz': frequency_hz,
+ 'sample_rate': stable_sample_rate,
+ }
except FileNotFoundError as e:
if self._decode_hackrf_process:
@@ -1456,17 +1462,17 @@ class SubGhzManager:
Uses os.read() on the raw fd to drain the pipe immediately (no Python
buffering), minimising backpressure on the USB transfer path.
"""
- CHUNK = 65536 # 64 KB read size for lower latency
- RESTART_DELAY = 0.15 # seconds before restart attempt
- MAX_RESTARTS = 3600 # allow longer sessions
- MAX_QUICK_RESTARTS = 6
- QUICK_RESTART_WINDOW = 20.0
-
- restart_times: list[float] = []
- first_chunk = True
-
- restarts = 0
- while not self._decode_stop:
+ CHUNK = 65536 # 64 KB read size for lower latency
+ RESTART_DELAY = 0.15 # seconds before restart attempt
+ MAX_RESTARTS = 3600 # allow longer sessions
+ MAX_QUICK_RESTARTS = 6
+ QUICK_RESTART_WINDOW = 20.0
+
+ restart_times: list[float] = []
+ first_chunk = True
+
+ restarts = 0
+ while not self._decode_stop:
if rtl433_proc.poll() is not None:
break
if self._decode_process is not rtl433_proc:
@@ -1498,48 +1504,48 @@ class SubGhzManager:
with self._lock:
if self._decode_stop:
break
- try:
- hackrf_proc = subprocess.Popen(
- hackrf_cmd,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- bufsize=0,
- )
- register_process(hackrf_proc)
- self._decode_hackrf_process = hackrf_proc
- src = hackrf_proc.stdout
- restarts += 1
- now = time.time()
- restart_times.append(now)
- restart_times = [t for t in restart_times if (now - t) <= QUICK_RESTART_WINDOW]
- if len(restart_times) >= MAX_QUICK_RESTARTS:
- self._emit({
- 'type': 'error',
- 'message': (
- 'HackRF stream is unstable (restarting repeatedly). '
- 'Try lower gain/sample-rate or reconnect the device.'
- ),
- })
- break
- logger.info(f"hackrf_transfer restarted ({restarts})")
- self._emit({'type': 'info', 'text': f'[decode] HackRF stream restarted ({restarts})'})
- threading.Thread(
- target=self._monitor_decode_hackrf_stderr,
- args=(hackrf_proc,),
- daemon=True,
- ).start()
- except Exception as e:
- logger.error(f"Failed to restart hackrf_transfer: {e}")
- self._emit({
- 'type': 'error',
- 'message': f'Failed to restart hackrf_transfer: {e}',
- })
+ try:
+ hackrf_proc = subprocess.Popen(
+ hackrf_cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ bufsize=0,
+ )
+ register_process(hackrf_proc)
+ self._decode_hackrf_process = hackrf_proc
+ src = hackrf_proc.stdout
+ restarts += 1
+ now = time.time()
+ restart_times.append(now)
+ restart_times = [t for t in restart_times if (now - t) <= QUICK_RESTART_WINDOW]
+ if len(restart_times) >= MAX_QUICK_RESTARTS:
+ self._emit({
+ 'type': 'error',
+ 'message': (
+ 'HackRF stream is unstable (restarting repeatedly). '
+ 'Try lower gain/sample-rate or reconnect the device.'
+ ),
+ })
+ break
+ logger.info(f"hackrf_transfer restarted ({restarts})")
+ self._emit({'type': 'info', 'text': f'[decode] HackRF stream restarted ({restarts})'})
+ threading.Thread(
+ target=self._monitor_decode_hackrf_stderr,
+ args=(hackrf_proc,),
+ daemon=True,
+ ).start()
+ except Exception as e:
+ logger.error(f"Failed to restart hackrf_transfer: {e}")
+ self._emit({
+ 'type': 'error',
+ 'message': f'Failed to restart hackrf_transfer: {e}',
+ })
break
- if not src:
- break
-
- # Use raw fd reads to drain the pipe without Python buffering.
+ if not src:
+ break
+
+ # Use raw fd reads to drain the pipe without Python buffering.
# This returns immediately with whatever bytes are available
# (up to CHUNK), avoiding the backpressure that buffered reads
# can cause when they block waiting for a full chunk.
@@ -1551,19 +1557,19 @@ class SubGhzManager:
except (OSError, ValueError, TypeError):
break
- try:
- while not self._decode_stop:
- data = os.read(fd, CHUNK)
- if not data:
- if hackrf_proc and hackrf_proc.poll() is not None:
- self._emit({'type': 'info', 'text': '[decode] HackRF stream stopped'})
- break
- if first_chunk:
- first_chunk = False
- self._emit({'type': 'info', 'text': '[decode] IQ source active'})
- try:
- iq_queue.put_nowait(data)
- except queue.Full:
+ try:
+ while not self._decode_stop:
+ data = os.read(fd, CHUNK)
+ if not data:
+ if hackrf_proc and hackrf_proc.poll() is not None:
+ self._emit({'type': 'info', 'text': '[decode] HackRF stream stopped'})
+ break
+ if first_chunk:
+ first_chunk = False
+ self._emit({'type': 'info', 'text': '[decode] IQ source active'})
+ try:
+ iq_queue.put_nowait(data)
+ except queue.Full:
# Drop oldest chunk to prevent backpressure
logger.debug("IQ queue full, dropping oldest chunk")
try:
@@ -1583,156 +1589,156 @@ class SubGhzManager:
except queue.Full:
pass
- def _rtl433_writer(
- self,
- rtl433_proc: subprocess.Popen,
- iq_queue: queue.Queue,
- ) -> None:
- """Drain the IQ queue into rtl_433 stdin."""
- dst = rtl433_proc.stdin
- if not dst:
- logger.error("rtl_433 stdin is None — cannot write IQ data")
- return
-
- first_chunk = True
- last_level = 0.0
- last_wave = 0.0
- last_spectrum = 0.0
- last_stats = time.time()
- bytes_since_stats = 0
- LEVEL_INTERVAL = 0.35
- WAVE_INTERVAL = 0.5
- SPECTRUM_INTERVAL = 0.55
- STATS_INTERVAL = 6.0
- writes_since_flush = 0
- burst_active = False
- burst_start = 0.0
- burst_last_high = 0.0
- burst_peak = 0
- BURST_ON_LEVEL = 9
- BURST_OFF_HOLD = 0.45
- BURST_MIN_DURATION = 0.05
- try:
- while True:
- try:
- data = iq_queue.get(timeout=2.0)
- except queue.Empty:
+ def _rtl433_writer(
+ self,
+ rtl433_proc: subprocess.Popen,
+ iq_queue: queue.Queue,
+ ) -> None:
+ """Drain the IQ queue into rtl_433 stdin."""
+ dst = rtl433_proc.stdin
+ if not dst:
+ logger.error("rtl_433 stdin is None — cannot write IQ data")
+ return
+
+ first_chunk = True
+ last_level = 0.0
+ last_wave = 0.0
+ last_spectrum = 0.0
+ last_stats = time.time()
+ bytes_since_stats = 0
+ LEVEL_INTERVAL = 0.35
+ WAVE_INTERVAL = 0.5
+ SPECTRUM_INTERVAL = 0.55
+ STATS_INTERVAL = 6.0
+ writes_since_flush = 0
+ burst_active = False
+ burst_start = 0.0
+ burst_last_high = 0.0
+ burst_peak = 0
+ BURST_ON_LEVEL = 9
+ BURST_OFF_HOLD = 0.45
+ BURST_MIN_DURATION = 0.05
+ try:
+ while True:
+ try:
+ data = iq_queue.get(timeout=2.0)
+ except queue.Empty:
if rtl433_proc.poll() is not None:
break
- continue
- if data is None:
- break
-
- now = time.time()
- bytes_since_stats += len(data)
-
- if now - last_level >= LEVEL_INTERVAL:
- level = self._compute_rx_level(data)
- self._emit({'type': 'decode_level', 'level': level})
- if level >= BURST_ON_LEVEL:
- burst_last_high = now
- if not burst_active:
- burst_active = True
- burst_start = now
- burst_peak = level
- self._emit({
- 'type': 'rx_burst',
- 'mode': 'decode',
- 'event': 'start',
- 'start_offset_s': round(
- max(0.0, now - self._decode_start_time), 3
- ),
- 'level': int(level),
- })
- else:
- burst_peak = max(burst_peak, level)
- elif burst_active and (now - burst_last_high) >= BURST_OFF_HOLD:
- duration = now - burst_start
- if duration >= BURST_MIN_DURATION:
- self._emit({
- 'type': 'rx_burst',
- 'mode': 'decode',
- 'event': 'end',
- 'start_offset_s': round(
- max(0.0, burst_start - self._decode_start_time), 3
- ),
- 'duration_ms': int(duration * 1000),
- 'peak_level': int(burst_peak),
- })
- burst_active = False
- burst_peak = 0
- last_level = now
-
- if now - last_wave >= WAVE_INTERVAL:
- samples = self._extract_waveform(data, points=160)
- if samples:
- self._emit({'type': 'decode_waveform', 'samples': samples})
- last_wave = now
-
- if now - last_spectrum >= SPECTRUM_INTERVAL:
- bins = self._compute_rx_spectrum(data, bins=128)
- if bins:
- self._emit({'type': 'decode_spectrum', 'bins': bins})
- last_spectrum = now
-
- # Pass HackRF cs8 IQ bytes through directly.
- dst.write(data)
- writes_since_flush += 1
- if writes_since_flush >= 8:
- dst.flush()
- writes_since_flush = 0
-
- if first_chunk:
- first_chunk = False
- logger.info(f"IQ data flowing to rtl_433 ({len(data)} bytes)")
- self._emit({
- 'type': 'info',
- 'text': '[decode] Receiving IQ data from HackRF...',
- })
-
- elapsed = now - last_stats
- if elapsed >= STATS_INTERVAL:
- rate_kb = bytes_since_stats / elapsed / 1024
- self._emit({
- 'type': 'info',
- 'text': f'[decode] IQ: {rate_kb:.0f} KB/s — listening for signals...',
- })
- self._emit({
- 'type': 'decode_raw',
- 'text': f'IQ stream active: {rate_kb:.0f} KB/s',
- })
- bytes_since_stats = 0
- last_stats = now
-
- except (BrokenPipeError, OSError) as e:
- logger.debug(f"rtl_433 writer pipe closed: {e}")
- self._emit({'type': 'info', 'text': f'[decode] Writer pipe closed: {e}'})
- except Exception as e:
- logger.error(f"rtl_433 writer error: {e}")
- self._emit({'type': 'error', 'message': f'Decode writer error: {e}'})
- finally:
- if burst_active:
- duration = max(0.0, time.time() - burst_start)
- if duration >= BURST_MIN_DURATION:
- self._emit({
- 'type': 'rx_burst',
- 'mode': 'decode',
- 'event': 'end',
- 'start_offset_s': round(
- max(0.0, burst_start - self._decode_start_time), 3
- ),
- 'duration_ms': int(duration * 1000),
- 'peak_level': int(burst_peak),
- })
- try:
- dst.close()
- except OSError:
- pass
+ continue
+ if data is None:
+ break
- def _read_decode_output(self) -> None:
- process = self._decode_process
- if not process or not process.stdout:
- return
+ now = time.time()
+ bytes_since_stats += len(data)
+
+ if now - last_level >= LEVEL_INTERVAL:
+ level = self._compute_rx_level(data)
+ self._emit({'type': 'decode_level', 'level': level})
+ if level >= BURST_ON_LEVEL:
+ burst_last_high = now
+ if not burst_active:
+ burst_active = True
+ burst_start = now
+ burst_peak = level
+ self._emit({
+ 'type': 'rx_burst',
+ 'mode': 'decode',
+ 'event': 'start',
+ 'start_offset_s': round(
+ max(0.0, now - self._decode_start_time), 3
+ ),
+ 'level': int(level),
+ })
+ else:
+ burst_peak = max(burst_peak, level)
+ elif burst_active and (now - burst_last_high) >= BURST_OFF_HOLD:
+ duration = now - burst_start
+ if duration >= BURST_MIN_DURATION:
+ self._emit({
+ 'type': 'rx_burst',
+ 'mode': 'decode',
+ 'event': 'end',
+ 'start_offset_s': round(
+ max(0.0, burst_start - self._decode_start_time), 3
+ ),
+ 'duration_ms': int(duration * 1000),
+ 'peak_level': int(burst_peak),
+ })
+ burst_active = False
+ burst_peak = 0
+ last_level = now
+
+ if now - last_wave >= WAVE_INTERVAL:
+ samples = self._extract_waveform(data, points=160)
+ if samples:
+ self._emit({'type': 'decode_waveform', 'samples': samples})
+ last_wave = now
+
+ if now - last_spectrum >= SPECTRUM_INTERVAL:
+ bins = self._compute_rx_spectrum(data, bins=128)
+ if bins:
+ self._emit({'type': 'decode_spectrum', 'bins': bins})
+ last_spectrum = now
+
+ # Pass HackRF cs8 IQ bytes through directly.
+ dst.write(data)
+ writes_since_flush += 1
+ if writes_since_flush >= 8:
+ dst.flush()
+ writes_since_flush = 0
+
+ if first_chunk:
+ first_chunk = False
+ logger.info(f"IQ data flowing to rtl_433 ({len(data)} bytes)")
+ self._emit({
+ 'type': 'info',
+ 'text': '[decode] Receiving IQ data from HackRF...',
+ })
+
+ elapsed = now - last_stats
+ if elapsed >= STATS_INTERVAL:
+ rate_kb = bytes_since_stats / elapsed / 1024
+ self._emit({
+ 'type': 'info',
+ 'text': f'[decode] IQ: {rate_kb:.0f} KB/s — listening for signals...',
+ })
+ self._emit({
+ 'type': 'decode_raw',
+ 'text': f'IQ stream active: {rate_kb:.0f} KB/s',
+ })
+ bytes_since_stats = 0
+ last_stats = now
+
+ except (BrokenPipeError, OSError) as e:
+ logger.debug(f"rtl_433 writer pipe closed: {e}")
+ self._emit({'type': 'info', 'text': f'[decode] Writer pipe closed: {e}'})
+ except Exception as e:
+ logger.error(f"rtl_433 writer error: {e}")
+ self._emit({'type': 'error', 'message': f'Decode writer error: {e}'})
+ finally:
+ if burst_active:
+ duration = max(0.0, time.time() - burst_start)
+ if duration >= BURST_MIN_DURATION:
+ self._emit({
+ 'type': 'rx_burst',
+ 'mode': 'decode',
+ 'event': 'end',
+ 'start_offset_s': round(
+ max(0.0, burst_start - self._decode_start_time), 3
+ ),
+ 'duration_ms': int(duration * 1000),
+ 'peak_level': int(burst_peak),
+ })
+ try:
+ dst.close()
+ except OSError:
+ pass
+
+ def _read_decode_output(self) -> None:
+ process = self._decode_process
+ if not process or not process.stdout:
+ return
got_output = False
try:
for line in iter(process.stdout.readline, b''):
@@ -1750,91 +1756,94 @@ class SubGhzManager:
self._emit({'type': 'decode_raw', 'text': text})
except Exception as e:
logger.error(f"Error reading decode output: {e}")
- finally:
- rc = process.poll()
- unregister_process(process)
- if rc is not None and rc != 0 and rc != -15:
- logger.warning(f"rtl_433 exited with code {rc}")
- self._emit({
- 'type': 'info',
- 'text': f'[rtl_433] Exited with code {rc}',
- })
- with self._lock:
- if self._decode_process is process:
- self._decode_process = None
- self._decode_frequency_hz = 0
- self._decode_sample_rate = 0
- self._decode_start_time = 0
- self._emit({
- 'type': 'status',
- 'mode': 'idle',
- 'status': 'decode_stopped',
- })
-
- def _monitor_decode_hackrf_stderr(self, process: subprocess.Popen) -> None:
- if not process or not process.stderr:
- return
- fatal_disconnect_emitted = False
- try:
- for line in iter(process.stderr.readline, b''):
- text = line.decode('utf-8', errors='replace').strip()
- if not text:
- continue
- logger.debug(f"[hackrf_decode] {text}")
- lower = text.lower()
- if (
- not fatal_disconnect_emitted
- and (
- 'no such device' in lower
- or 'device not found' in lower
- or 'disconnected' in lower
- )
- ):
- fatal_disconnect_emitted = True
- self._hackrf_device_cache = False
- self._hackrf_device_cache_ts = time.time()
- self._decode_stop = True
- self._emit({
- 'type': 'error',
- 'message': (
- 'HackRF disconnected during decode. '
- 'Reconnect the device, then press Start again.'
- ),
- })
- if (
- 'error' in lower
- or 'usb' in lower
- or 'overflow' in lower
- or 'underflow' in lower
- or 'failed' in lower
- or 'couldn' in lower
- or 'transfer' in lower
- ):
- self._emit({'type': 'info', 'text': f'[hackrf] {text}'})
- except Exception:
- pass
-
- def _monitor_decode_stderr(self) -> None:
- process = self._decode_process
- if not process or not process.stderr:
- return
- decode_keywords = (
- 'pulse', 'sync', 'message', 'decoded', 'snr', 'rssi',
- 'level', 'modulation', 'bitbuffer', 'symbol', 'short',
- 'noise', 'detected',
- )
- try:
- for line in iter(process.stderr.readline, b''):
- text = line.decode('utf-8', errors='replace').strip()
- if text:
- logger.debug(f"[rtl_433] {text}")
- self._emit({'type': 'info', 'text': f'[rtl_433] {text}'})
- if any(k in text.lower() for k in decode_keywords):
- self._emit({'type': 'decode_raw', 'text': text})
- except Exception:
- pass
+ finally:
+ rc = process.poll()
+ unregister_process(process)
+ if rc is not None and rc != 0 and rc != -15:
+ logger.warning(f"rtl_433 exited with code {rc}")
+ self._emit({
+ 'type': 'info',
+ 'text': f'[rtl_433] Exited with code {rc}',
+ })
+ with self._lock:
+ if self._decode_process is process:
+ self._decode_process = None
+ self._decode_frequency_hz = 0
+ self._decode_sample_rate = 0
+ self._decode_start_time = 0
+ self._emit({
+ 'type': 'status',
+ 'mode': 'idle',
+ 'status': 'decode_stopped',
+ })
+
+ def _monitor_decode_hackrf_stderr(self, process: subprocess.Popen) -> None:
+ if not process or not process.stderr:
+ return
+ fatal_disconnect_emitted = False
+ try:
+ for line in iter(process.stderr.readline, b''):
+ text = line.decode('utf-8', errors='replace').strip()
+ if not text:
+ continue
+ logger.debug(f"[hackrf_decode] {text}")
+ lower = text.lower()
+ if (
+ not fatal_disconnect_emitted
+ and (
+ 'no such device' in lower
+ or 'device not found' in lower
+ or 'disconnected' in lower
+ )
+ ):
+ fatal_disconnect_emitted = True
+ self._hackrf_device_cache = False
+ self._hackrf_device_cache_ts = time.time()
+ self._decode_stop = True
+ self._emit({
+ 'type': 'error',
+ 'message': (
+ 'HackRF disconnected during decode. '
+ 'Reconnect the device, then press Start again.'
+ ),
+ })
+ if (
+ 'error' in lower
+ or 'usb' in lower
+ or 'overflow' in lower
+ or 'underflow' in lower
+ or 'failed' in lower
+ or 'couldn' in lower
+ or 'transfer' in lower
+ ):
+ self._emit({'type': 'info', 'text': f'[hackrf] {text}'})
+ except Exception:
+ pass
+
+ def _monitor_decode_stderr(self) -> None:
+ process = self._decode_process
+ if not process or not process.stderr:
+ return
+ decode_keywords = (
+ 'pulse', 'sync', 'message', 'decoded', 'snr', 'rssi',
+ 'level', 'modulation', 'bitbuffer', 'symbol', 'short',
+ 'noise', 'detected',
+ )
+ try:
+ for line in iter(process.stderr.readline, b''):
+ text = line.decode('utf-8', errors='replace').strip()
+ if text:
+ logger.debug(f"[rtl_433] {text}")
+ self._emit({'type': 'info', 'text': f'[rtl_433] {text}'})
+ if any(k in text.lower() for k in decode_keywords):
+ self._emit({'type': 'decode_raw', 'text': text})
+ except Exception:
+ pass
def stop_decode(self) -> dict:
+ hackrf_proc: subprocess.Popen | None = None
+ rtl433_proc: subprocess.Popen | None = None
+
with self._lock:
hackrf_running = (
self._decode_hackrf_process
@@ -1852,43 +1861,50 @@ class SubGhzManager:
# preventing it from spawning a new hackrf_transfer during cleanup.
self._decode_stop = True
- # Terminate upstream (hackrf_transfer) first, then consumer (rtl_433)
+ # Grab process refs and clear state inside lock
+ hackrf_proc = self._decode_hackrf_process
+ self._decode_hackrf_process = None
+ rtl433_proc = self._decode_process
+ self._decode_process = None
+
+ self._decode_frequency_hz = 0
+ self._decode_sample_rate = 0
+ self._decode_start_time = 0
+
+ # Terminate outside lock — upstream (hackrf_transfer) first, then consumer (rtl_433)
+ if hackrf_proc:
+ safe_terminate(hackrf_proc)
+ unregister_process(hackrf_proc)
+ if rtl433_proc:
+ safe_terminate(rtl433_proc)
+ unregister_process(rtl433_proc)
+
+ # Clean up any hackrf_transfer spawned during the race window
+ time.sleep(0.1)
+ race_proc: subprocess.Popen | None = None
+ with self._lock:
if self._decode_hackrf_process:
- safe_terminate(self._decode_hackrf_process)
- unregister_process(self._decode_hackrf_process)
+ race_proc = self._decode_hackrf_process
self._decode_hackrf_process = None
+ if race_proc:
+ safe_terminate(race_proc)
+ unregister_process(race_proc)
- if self._decode_process:
- safe_terminate(self._decode_process)
- unregister_process(self._decode_process)
- self._decode_process = None
-
- self._decode_frequency_hz = 0
- self._decode_sample_rate = 0
- self._decode_start_time = 0
+ self._emit({
+ 'type': 'status',
+ 'mode': 'idle',
+ 'status': 'stopped',
+ })
- # Clean up any hackrf_transfer spawned during the race window
- time.sleep(0.1)
- if self._decode_hackrf_process:
- safe_terminate(self._decode_hackrf_process)
- unregister_process(self._decode_hackrf_process)
- self._decode_hackrf_process = None
-
- self._emit({
- 'type': 'status',
- 'mode': 'idle',
- 'status': 'stopped',
- })
-
- return {'status': 'stopped'}
+ return {'status': 'stopped'}
# ------------------------------------------------------------------
# TRANSMIT (replay via hackrf_transfer -t)
# ------------------------------------------------------------------
@staticmethod
- def validate_tx_frequency(frequency_hz: int) -> str | None:
- """Validate that a frequency is within allowed ISM TX bands.
+ def validate_tx_frequency(frequency_hz: int) -> str | None:
+ """Validate that a frequency is within allowed ISM TX bands.
Returns None if valid, or an error message if invalid.
"""
@@ -1898,143 +1914,150 @@ class SubGhzManager:
return None
bands_str = ', '.join(
f'{lo}-{hi} MHz' for lo, hi in SUBGHZ_TX_ALLOWED_BANDS
- )
- return f'Frequency {freq_mhz:.3f} MHz is outside allowed TX bands: {bands_str}'
-
- @staticmethod
- def _estimate_capture_duration_seconds(capture: SubGhzCapture, file_size: int) -> float:
- if capture.duration_seconds and capture.duration_seconds > 0:
- return float(capture.duration_seconds)
- if capture.sample_rate > 0 and file_size > 0:
- return float(file_size) / float(capture.sample_rate * 2)
- return 0.0
-
- def _cleanup_tx_temp_file(self) -> None:
- path = self._tx_temp_file
- self._tx_temp_file = None
- if not path:
- return
- try:
- if path.exists():
- path.unlink()
- except OSError as exc:
- logger.debug(f"Failed to remove TX temp file {path}: {exc}")
+ )
+ return f'Frequency {freq_mhz:.3f} MHz is outside allowed TX bands: {bands_str}'
- def transmit(
- self,
- capture_id: str,
- tx_gain: int = 20,
- max_duration: int = 10,
- start_seconds: float | None = None,
- duration_seconds: float | None = None,
- device_serial: str | None = None,
- ) -> dict:
- with self._lock:
+ @staticmethod
+ def _estimate_capture_duration_seconds(capture: SubGhzCapture, file_size: int) -> float:
+ if capture.duration_seconds and capture.duration_seconds > 0:
+ return float(capture.duration_seconds)
+ if capture.sample_rate > 0 and file_size > 0:
+ return float(file_size) / float(capture.sample_rate * 2)
+ return 0.0
+
+ def _cleanup_tx_temp_file(self) -> None:
+ path = self._tx_temp_file
+ self._tx_temp_file = None
+ if not path:
+ return
+ try:
+ if path.exists():
+ path.unlink()
+ except OSError as exc:
+ logger.debug(f"Failed to remove TX temp file {path}: {exc}")
+
+ def transmit(
+ self,
+ capture_id: str,
+ tx_gain: int = 20,
+ max_duration: int = 10,
+ start_seconds: float | None = None,
+ duration_seconds: float | None = None,
+ device_serial: str | None = None,
+ ) -> dict:
+ # Pre-lock: tool availability & device detection (blocking I/O)
+ if not self.check_hackrf():
+ return {'status': 'error', 'message': 'hackrf_transfer not found'}
+ device_err = self._require_hackrf_device()
+ if device_err:
+ return {'status': 'error', 'message': device_err}
+
+ # Pre-lock: capture lookup, validation, and segment I/O (can be large)
+ capture = self._load_capture(capture_id)
+ if not capture:
+ return {'status': 'error', 'message': f'Capture not found: {capture_id}'}
+
+ freq_error = self.validate_tx_frequency(capture.frequency_hz)
+ if freq_error:
+ return {'status': 'error', 'message': freq_error}
+
+ tx_gain = max(SUBGHZ_TX_VGA_GAIN_MIN, min(SUBGHZ_TX_VGA_GAIN_MAX, tx_gain))
+ max_duration = max(1, min(SUBGHZ_TX_MAX_DURATION, max_duration))
+
+ iq_path = self._captures_dir / capture.filename
+ if not iq_path.exists():
+ return {'status': 'error', 'message': 'IQ file missing'}
+
+ # Build segment file outside lock (potentially megabytes of read/write)
+ tx_path = iq_path
+ segment_info = None
+ segment_path_for_cleanup: Path | None = None
+ if start_seconds is not None or duration_seconds is not None:
+ try:
+ start_s = max(0.0, float(start_seconds or 0.0))
+ except (TypeError, ValueError):
+ return {'status': 'error', 'message': 'Invalid start_seconds'}
+ try:
+ seg_s = None if duration_seconds is None else float(duration_seconds)
+ except (TypeError, ValueError):
+ return {'status': 'error', 'message': 'Invalid duration_seconds'}
+ if seg_s is not None and seg_s <= 0:
+ return {'status': 'error', 'message': 'duration_seconds must be greater than 0'}
+
+ file_size = iq_path.stat().st_size
+ total_duration = self._estimate_capture_duration_seconds(capture, file_size)
+ if total_duration <= 0:
+ return {'status': 'error', 'message': 'Unable to determine capture duration for segment TX'}
+ if start_s >= total_duration:
+ return {'status': 'error', 'message': 'start_seconds is beyond end of capture'}
+
+ end_s = total_duration if seg_s is None else min(total_duration, start_s + seg_s)
+ if end_s <= start_s:
+ return {'status': 'error', 'message': 'Selected segment is empty'}
+
+ bytes_per_second = max(2, int(capture.sample_rate) * 2)
+ start_byte = int(start_s * bytes_per_second) & ~1
+ end_byte = int(end_s * bytes_per_second) & ~1
+ if end_byte <= start_byte:
+ return {'status': 'error', 'message': 'Selected segment is too short'}
+
+ segment_size = end_byte - start_byte
+ segment_name = f".txseg_{capture.capture_id}_{uuid.uuid4().hex[:8]}.iq"
+ segment_path = self._captures_dir / segment_name
+ segment_path_for_cleanup = segment_path
+ try:
+ with open(iq_path, 'rb') as src, open(segment_path, 'wb') as dst:
+ src.seek(start_byte)
+ remaining = segment_size
+ while remaining > 0:
+ chunk = src.read(min(262144, remaining))
+ if not chunk:
+ break
+ dst.write(chunk)
+ remaining -= len(chunk)
+ written = segment_path.stat().st_size if segment_path.exists() else 0
+ except OSError as exc:
+ logger.error(f"Failed to build TX segment: {exc}")
+ return {'status': 'error', 'message': 'Failed to create TX segment'}
+
+ if written < 2:
+ try:
+ segment_path.unlink(missing_ok=True) # type: ignore[arg-type]
+ except Exception:
+ pass
+ return {'status': 'error', 'message': 'Selected TX segment has no IQ data'}
+
+ tx_path = segment_path
+ segment_info = {
+ 'start_seconds': round(start_s, 3),
+ 'duration_seconds': round(written / bytes_per_second, 3),
+ 'bytes': int(written),
+ }
+
+ with self._lock:
if self.active_mode != 'idle':
+ # Clean up segment file if we prepared one
+ if segment_path_for_cleanup:
+ try:
+ segment_path_for_cleanup.unlink(missing_ok=True) # type: ignore[arg-type]
+ except Exception:
+ pass
return {'status': 'error', 'message': f'Already running: {self.active_mode}'}
- if not self.check_hackrf():
- return {'status': 'error', 'message': 'hackrf_transfer not found'}
- device_err = self._require_hackrf_device()
- if device_err:
- return {'status': 'error', 'message': device_err}
+ # Clear any orphaned temp segment from a previous TX attempt.
+ self._cleanup_tx_temp_file()
+ if segment_path_for_cleanup:
+ self._tx_temp_file = segment_path_for_cleanup
- # Look up capture
- capture = self._load_capture(capture_id)
- if not capture:
- return {'status': 'error', 'message': f'Capture not found: {capture_id}'}
-
- # Validate TX frequency
- freq_error = self.validate_tx_frequency(capture.frequency_hz)
- if freq_error:
- return {'status': 'error', 'message': freq_error}
-
- # Enforce gain limit
- tx_gain = max(SUBGHZ_TX_VGA_GAIN_MIN, min(SUBGHZ_TX_VGA_GAIN_MAX, tx_gain))
-
- # Enforce max duration limit
- max_duration = max(1, min(SUBGHZ_TX_MAX_DURATION, max_duration))
-
- iq_path = self._captures_dir / capture.filename
- if not iq_path.exists():
- return {'status': 'error', 'message': 'IQ file missing'}
-
- # Clear any orphaned temp segment from a previous TX attempt.
- self._cleanup_tx_temp_file()
-
- tx_path = iq_path
- segment_info = None
- if start_seconds is not None or duration_seconds is not None:
- try:
- start_s = max(0.0, float(start_seconds or 0.0))
- except (TypeError, ValueError):
- return {'status': 'error', 'message': 'Invalid start_seconds'}
- try:
- seg_s = None if duration_seconds is None else float(duration_seconds)
- except (TypeError, ValueError):
- return {'status': 'error', 'message': 'Invalid duration_seconds'}
- if seg_s is not None and seg_s <= 0:
- return {'status': 'error', 'message': 'duration_seconds must be greater than 0'}
-
- file_size = iq_path.stat().st_size
- total_duration = self._estimate_capture_duration_seconds(capture, file_size)
- if total_duration <= 0:
- return {'status': 'error', 'message': 'Unable to determine capture duration for segment TX'}
- if start_s >= total_duration:
- return {'status': 'error', 'message': 'start_seconds is beyond end of capture'}
-
- end_s = total_duration if seg_s is None else min(total_duration, start_s + seg_s)
- if end_s <= start_s:
- return {'status': 'error', 'message': 'Selected segment is empty'}
-
- bytes_per_second = max(2, int(capture.sample_rate) * 2)
- start_byte = int(start_s * bytes_per_second) & ~1
- end_byte = int(end_s * bytes_per_second) & ~1
- if end_byte <= start_byte:
- return {'status': 'error', 'message': 'Selected segment is too short'}
-
- segment_size = end_byte - start_byte
- segment_name = f".txseg_{capture.capture_id}_{uuid.uuid4().hex[:8]}.iq"
- segment_path = self._captures_dir / segment_name
- try:
- with open(iq_path, 'rb') as src, open(segment_path, 'wb') as dst:
- src.seek(start_byte)
- remaining = segment_size
- while remaining > 0:
- chunk = src.read(min(262144, remaining))
- if not chunk:
- break
- dst.write(chunk)
- remaining -= len(chunk)
- written = segment_path.stat().st_size if segment_path.exists() else 0
- except OSError as exc:
- logger.error(f"Failed to build TX segment: {exc}")
- return {'status': 'error', 'message': 'Failed to create TX segment'}
-
- if written < 2:
- try:
- segment_path.unlink(missing_ok=True) # type: ignore[arg-type]
- except Exception:
- pass
- return {'status': 'error', 'message': 'Selected TX segment has no IQ data'}
-
- tx_path = segment_path
- self._tx_temp_file = segment_path
- segment_info = {
- 'start_seconds': round(start_s, 3),
- 'duration_seconds': round(written / bytes_per_second, 3),
- 'bytes': int(written),
- }
-
- cmd = [
- 'hackrf_transfer',
- '-t', str(tx_path),
- '-f', str(capture.frequency_hz),
- '-s', str(capture.sample_rate),
- '-x', str(tx_gain),
- ]
- if device_serial:
- cmd.extend(['-d', device_serial])
+ cmd = [
+ 'hackrf_transfer',
+ '-t', str(tx_path),
+ '-f', str(capture.frequency_hz),
+ '-s', str(capture.sample_rate),
+ '-x', str(tx_gain),
+ ]
+ if device_serial:
+ cmd.extend(['-d', device_serial])
logger.info(f"SubGHz TX: {' '.join(cmd)}")
@@ -2061,95 +2084,100 @@ class SubGhzManager:
daemon=True,
).start()
- self._emit({
- 'type': 'tx_status',
- 'status': 'transmitting',
- 'capture_id': capture_id,
- 'frequency_hz': capture.frequency_hz,
- 'max_duration': max_duration,
- 'segment': segment_info,
- })
-
- return {
- 'status': 'transmitting',
- 'capture_id': capture_id,
- 'frequency_hz': capture.frequency_hz,
- 'max_duration': max_duration,
- 'segment': segment_info,
- }
+ self._emit({
+ 'type': 'tx_status',
+ 'status': 'transmitting',
+ 'capture_id': capture_id,
+ 'frequency_hz': capture.frequency_hz,
+ 'max_duration': max_duration,
+ 'segment': segment_info,
+ })
- except FileNotFoundError:
- self._cleanup_tx_temp_file()
- return {'status': 'error', 'message': 'hackrf_transfer not found'}
- except Exception as e:
- self._cleanup_tx_temp_file()
- logger.error(f"Failed to start TX: {e}")
- return {'status': 'error', 'message': str(e)}
+ return {
+ 'status': 'transmitting',
+ 'capture_id': capture_id,
+ 'frequency_hz': capture.frequency_hz,
+ 'max_duration': max_duration,
+ 'segment': segment_info,
+ }
+
+ except FileNotFoundError:
+ self._cleanup_tx_temp_file()
+ return {'status': 'error', 'message': 'hackrf_transfer not found'}
+ except Exception as e:
+ self._cleanup_tx_temp_file()
+ logger.error(f"Failed to start TX: {e}")
+ return {'status': 'error', 'message': str(e)}
def _tx_watchdog_kill(self) -> None:
"""Kill TX process when max duration is exceeded."""
logger.warning("SubGHz TX watchdog triggered - killing transmission")
self.stop_transmit()
- def _monitor_tx(self) -> None:
- process = self._tx_process
- if not process:
- return
- try:
- returncode = process.wait()
- except Exception:
- returncode = -1
- with self._lock:
- # Only emit if this is still the active TX process
- if self._tx_process is not process:
- return
- unregister_process(process)
- duration = time.time() - self._tx_start_time if self._tx_start_time else 0
- if returncode and returncode != 0 and returncode != -15:
- # Non-zero exit (not SIGTERM) means unexpected death
- logger.warning(f"hackrf_transfer TX exited unexpectedly (rc={returncode})")
- self._emit({
- 'type': 'error',
- 'message': f'Transmission failed (hackrf_transfer exited with code {returncode})',
- })
- self._tx_process = None
- self._tx_start_time = 0
- self._tx_capture_id = ''
- self._emit({
- 'type': 'tx_status',
- 'status': 'tx_complete',
- 'duration_seconds': round(duration, 1),
- })
- if self._tx_watchdog:
- self._tx_watchdog.cancel()
- self._tx_watchdog = None
- self._cleanup_tx_temp_file()
-
- def stop_transmit(self) -> dict:
- with self._lock:
- if self._tx_watchdog:
- self._tx_watchdog.cancel()
- self._tx_watchdog = None
-
- if not self._tx_process or self._tx_process.poll() is not None:
- self._cleanup_tx_temp_file()
- return {'status': 'not_running'}
-
- safe_terminate(self._tx_process)
- unregister_process(self._tx_process)
- self._tx_process = None
+ def _monitor_tx(self) -> None:
+ process = self._tx_process
+ if not process:
+ return
+ try:
+ returncode = process.wait()
+ except Exception:
+ returncode = -1
+ with self._lock:
+ # Only emit if this is still the active TX process
+ if self._tx_process is not process:
+ return
+ unregister_process(process)
duration = time.time() - self._tx_start_time if self._tx_start_time else 0
- self._tx_start_time = 0
- self._tx_capture_id = ''
- self._cleanup_tx_temp_file()
-
+ if returncode and returncode != 0 and returncode != -15:
+ # Non-zero exit (not SIGTERM) means unexpected death
+ logger.warning(f"hackrf_transfer TX exited unexpectedly (rc={returncode})")
+ self._emit({
+ 'type': 'error',
+ 'message': f'Transmission failed (hackrf_transfer exited with code {returncode})',
+ })
+ self._tx_process = None
+ self._tx_start_time = 0
+ self._tx_capture_id = ''
self._emit({
'type': 'tx_status',
- 'status': 'tx_stopped',
+ 'status': 'tx_complete',
'duration_seconds': round(duration, 1),
})
+ if self._tx_watchdog:
+ self._tx_watchdog.cancel()
+ self._tx_watchdog = None
+ self._cleanup_tx_temp_file()
- return {'status': 'stopped', 'duration_seconds': round(duration, 1)}
+ def stop_transmit(self) -> dict:
+ proc_to_terminate: subprocess.Popen | None = None
+ with self._lock:
+ if self._tx_watchdog:
+ self._tx_watchdog.cancel()
+ self._tx_watchdog = None
+
+ if not self._tx_process or self._tx_process.poll() is not None:
+ self._cleanup_tx_temp_file()
+ return {'status': 'not_running'}
+
+ proc_to_terminate = self._tx_process
+ self._tx_process = None
+ duration = time.time() - self._tx_start_time if self._tx_start_time else 0
+ self._tx_start_time = 0
+ self._tx_capture_id = ''
+ self._cleanup_tx_temp_file()
+
+ # Terminate outside lock to avoid blocking other operations
+ if proc_to_terminate:
+ safe_terminate(proc_to_terminate)
+ unregister_process(proc_to_terminate)
+
+ self._emit({
+ 'type': 'tx_status',
+ 'status': 'tx_stopped',
+ 'duration_seconds': round(duration, 1),
+ })
+
+ return {'status': 'stopped', 'duration_seconds': round(duration, 1)}
# ------------------------------------------------------------------
# SWEEP (hackrf_sweep)
@@ -2162,16 +2190,23 @@ class SubGhzManager:
bin_width: int = 100000,
device_serial: str | None = None,
) -> dict:
+ # Pre-lock: tool availability & device detection (blocking I/O)
+ if not self.check_sweep():
+ return {'status': 'error', 'message': 'hackrf_sweep not found'}
+ device_err = self._require_hackrf_device()
+ if device_err:
+ return {'status': 'error', 'message': device_err}
+
+ # Wait for previous sweep thread to exit (blocking) before lock
+ if self._sweep_thread and self._sweep_thread.is_alive():
+ self._sweep_thread.join(timeout=2.0)
+ if self._sweep_thread.is_alive():
+ return {'status': 'error', 'message': 'Previous sweep still shutting down'}
+
with self._lock:
if self.active_mode != 'idle':
return {'status': 'error', 'message': f'Already running: {self.active_mode}'}
- if not self.check_sweep():
- return {'status': 'error', 'message': 'hackrf_sweep not found'}
- device_err = self._require_hackrf_device()
- if device_err:
- return {'status': 'error', 'message': device_err}
-
cmd = [
'hackrf_sweep',
'-f', f'{int(freq_start_mhz)}:{int(freq_end_mhz)}',
@@ -2182,12 +2217,6 @@ class SubGhzManager:
logger.info(f"SubGHz sweep: {' '.join(cmd)}")
- # Wait for previous sweep thread to exit
- if self._sweep_thread and self._sweep_thread.is_alive():
- self._sweep_thread.join(timeout=2.0)
- if self._sweep_thread.is_alive():
- return {'status': 'error', 'message': 'Previous sweep still shutting down'}
-
try:
self._sweep_process = subprocess.Popen(
cmd,
@@ -2313,15 +2342,20 @@ class SubGhzManager:
logger.error(f"Error reading sweep output: {e}")
def stop_sweep(self) -> dict:
+ proc_to_terminate: subprocess.Popen | None = None
with self._lock:
self._sweep_running = False
if not self._sweep_process or self._sweep_process.poll() is not None:
return {'status': 'not_running'}
- safe_terminate(self._sweep_process)
- unregister_process(self._sweep_process)
+ proc_to_terminate = self._sweep_process
self._sweep_process = None
+ # Terminate outside lock to avoid blocking other operations
+ if proc_to_terminate:
+ safe_terminate(proc_to_terminate)
+ unregister_process(proc_to_terminate)
+
# Join sweep thread outside the lock to avoid blocking other operations
if self._sweep_thread and self._sweep_thread.is_alive():
self._sweep_thread.join(timeout=2.0)
@@ -2338,108 +2372,108 @@ class SubGhzManager:
# CAPTURE LIBRARY
# ------------------------------------------------------------------
- def list_captures(self) -> list[SubGhzCapture]:
- captures = []
- for meta_path in sorted(self._captures_dir.glob('*.json'), reverse=True):
- try:
- data = json.loads(meta_path.read_text())
- bursts = data.get('bursts', [])
- dominant_fingerprint = data.get('dominant_fingerprint', '')
- if not dominant_fingerprint and isinstance(bursts, list):
- fp_counts: dict[str, int] = {}
- for burst in bursts:
- fp = ''
- if isinstance(burst, dict):
- fp = str(burst.get('fingerprint') or '').strip()
- if not fp:
- continue
- fp_counts[fp] = fp_counts.get(fp, 0) + 1
- if fp_counts:
- dominant_fingerprint = max(fp_counts, key=fp_counts.get)
- captures.append(SubGhzCapture(
- capture_id=data['id'],
- filename=data['filename'],
- frequency_hz=data['frequency_hz'],
+ def list_captures(self) -> list[SubGhzCapture]:
+ captures = []
+ for meta_path in sorted(self._captures_dir.glob('*.json'), reverse=True):
+ try:
+ data = json.loads(meta_path.read_text())
+ bursts = data.get('bursts', [])
+ dominant_fingerprint = data.get('dominant_fingerprint', '')
+ if not dominant_fingerprint and isinstance(bursts, list):
+ fp_counts: dict[str, int] = {}
+ for burst in bursts:
+ fp = ''
+ if isinstance(burst, dict):
+ fp = str(burst.get('fingerprint') or '').strip()
+ if not fp:
+ continue
+ fp_counts[fp] = fp_counts.get(fp, 0) + 1
+ if fp_counts:
+ dominant_fingerprint = max(fp_counts, key=fp_counts.get)
+ captures.append(SubGhzCapture(
+ capture_id=data['id'],
+ filename=data['filename'],
+ frequency_hz=data['frequency_hz'],
sample_rate=data['sample_rate'],
lna_gain=data.get('lna_gain', 0),
vga_gain=data.get('vga_gain', 0),
timestamp=data['timestamp'],
- duration_seconds=data.get('duration_seconds', 0),
- size_bytes=data.get('size_bytes', 0),
- label=data.get('label', ''),
- label_source=data.get('label_source', ''),
- decoded_protocols=data.get('decoded_protocols', []),
- bursts=bursts,
- modulation_hint=data.get('modulation_hint', ''),
- modulation_confidence=data.get('modulation_confidence', 0.0),
- protocol_hint=data.get('protocol_hint', ''),
- dominant_fingerprint=dominant_fingerprint,
- fingerprint_group=data.get('fingerprint_group', ''),
- fingerprint_group_size=data.get('fingerprint_group_size', 0),
- trigger_enabled=bool(data.get('trigger_enabled', False)),
- trigger_pre_seconds=data.get('trigger_pre_seconds', 0.0),
- trigger_post_seconds=data.get('trigger_post_seconds', 0.0),
- ))
- except (json.JSONDecodeError, KeyError, OSError) as e:
- logger.debug(f"Skipping invalid capture metadata {meta_path}: {e}")
-
- # Auto-group repeated fingerprints as likely same button/device clusters.
- fingerprint_groups: dict[str, list[SubGhzCapture]] = {}
- for capture in captures:
- fp = (capture.dominant_fingerprint or '').strip().lower()
- if not fp:
- continue
- fingerprint_groups.setdefault(fp, []).append(capture)
- for fp, grouped in fingerprint_groups.items():
- group_id = f"SIG-{fp[:6].upper()}"
- for capture in grouped:
- capture.fingerprint_group = group_id
- capture.fingerprint_group_size = len(grouped)
-
- return captures
+ duration_seconds=data.get('duration_seconds', 0),
+ size_bytes=data.get('size_bytes', 0),
+ label=data.get('label', ''),
+ label_source=data.get('label_source', ''),
+ decoded_protocols=data.get('decoded_protocols', []),
+ bursts=bursts,
+ modulation_hint=data.get('modulation_hint', ''),
+ modulation_confidence=data.get('modulation_confidence', 0.0),
+ protocol_hint=data.get('protocol_hint', ''),
+ dominant_fingerprint=dominant_fingerprint,
+ fingerprint_group=data.get('fingerprint_group', ''),
+ fingerprint_group_size=data.get('fingerprint_group_size', 0),
+ trigger_enabled=bool(data.get('trigger_enabled', False)),
+ trigger_pre_seconds=data.get('trigger_pre_seconds', 0.0),
+ trigger_post_seconds=data.get('trigger_post_seconds', 0.0),
+ ))
+ except (json.JSONDecodeError, KeyError, OSError) as e:
+ logger.debug(f"Skipping invalid capture metadata {meta_path}: {e}")
+
+ # Auto-group repeated fingerprints as likely same button/device clusters.
+ fingerprint_groups: dict[str, list[SubGhzCapture]] = {}
+ for capture in captures:
+ fp = (capture.dominant_fingerprint or '').strip().lower()
+ if not fp:
+ continue
+ fingerprint_groups.setdefault(fp, []).append(capture)
+ for fp, grouped in fingerprint_groups.items():
+ group_id = f"SIG-{fp[:6].upper()}"
+ for capture in grouped:
+ capture.fingerprint_group = group_id
+ capture.fingerprint_group_size = len(grouped)
+
+ return captures
def _load_capture(self, capture_id: str) -> SubGhzCapture | None:
for meta_path in self._captures_dir.glob('*.json'):
try:
- data = json.loads(meta_path.read_text())
- if data.get('id') == capture_id:
- bursts = data.get('bursts', [])
- dominant_fingerprint = data.get('dominant_fingerprint', '')
- if not dominant_fingerprint and isinstance(bursts, list):
- fp_counts: dict[str, int] = {}
- for burst in bursts:
- fp = ''
- if isinstance(burst, dict):
- fp = str(burst.get('fingerprint') or '').strip()
- if not fp:
- continue
- fp_counts[fp] = fp_counts.get(fp, 0) + 1
- if fp_counts:
- dominant_fingerprint = max(fp_counts, key=fp_counts.get)
- return SubGhzCapture(
- capture_id=data['id'],
- filename=data['filename'],
- frequency_hz=data['frequency_hz'],
- sample_rate=data['sample_rate'],
+ data = json.loads(meta_path.read_text())
+ if data.get('id') == capture_id:
+ bursts = data.get('bursts', [])
+ dominant_fingerprint = data.get('dominant_fingerprint', '')
+ if not dominant_fingerprint and isinstance(bursts, list):
+ fp_counts: dict[str, int] = {}
+ for burst in bursts:
+ fp = ''
+ if isinstance(burst, dict):
+ fp = str(burst.get('fingerprint') or '').strip()
+ if not fp:
+ continue
+ fp_counts[fp] = fp_counts.get(fp, 0) + 1
+ if fp_counts:
+ dominant_fingerprint = max(fp_counts, key=fp_counts.get)
+ return SubGhzCapture(
+ capture_id=data['id'],
+ filename=data['filename'],
+ frequency_hz=data['frequency_hz'],
+ sample_rate=data['sample_rate'],
lna_gain=data.get('lna_gain', 0),
vga_gain=data.get('vga_gain', 0),
timestamp=data['timestamp'],
- duration_seconds=data.get('duration_seconds', 0),
- size_bytes=data.get('size_bytes', 0),
- label=data.get('label', ''),
- label_source=data.get('label_source', ''),
- decoded_protocols=data.get('decoded_protocols', []),
- bursts=bursts,
- modulation_hint=data.get('modulation_hint', ''),
- modulation_confidence=data.get('modulation_confidence', 0.0),
- protocol_hint=data.get('protocol_hint', ''),
- dominant_fingerprint=dominant_fingerprint,
- fingerprint_group=data.get('fingerprint_group', ''),
- fingerprint_group_size=data.get('fingerprint_group_size', 0),
- trigger_enabled=bool(data.get('trigger_enabled', False)),
- trigger_pre_seconds=data.get('trigger_pre_seconds', 0.0),
- trigger_post_seconds=data.get('trigger_post_seconds', 0.0),
- )
+ duration_seconds=data.get('duration_seconds', 0),
+ size_bytes=data.get('size_bytes', 0),
+ label=data.get('label', ''),
+ label_source=data.get('label_source', ''),
+ decoded_protocols=data.get('decoded_protocols', []),
+ bursts=bursts,
+ modulation_hint=data.get('modulation_hint', ''),
+ modulation_confidence=data.get('modulation_confidence', 0.0),
+ protocol_hint=data.get('protocol_hint', ''),
+ dominant_fingerprint=dominant_fingerprint,
+ fingerprint_group=data.get('fingerprint_group', ''),
+ fingerprint_group_size=data.get('fingerprint_group_size', 0),
+ trigger_enabled=bool(data.get('trigger_enabled', False)),
+ trigger_pre_seconds=data.get('trigger_pre_seconds', 0.0),
+ trigger_post_seconds=data.get('trigger_post_seconds', 0.0),
+ )
except (json.JSONDecodeError, KeyError, OSError):
continue
return None
@@ -2447,255 +2481,255 @@ class SubGhzManager:
def get_capture(self, capture_id: str) -> SubGhzCapture | None:
return self._load_capture(capture_id)
- def get_capture_path(self, capture_id: str) -> Path | None:
- capture = self._load_capture(capture_id)
- if not capture:
- return None
- path = self._captures_dir / capture.filename
- if path.exists():
- return path
- return None
-
- def trim_capture(
- self,
- capture_id: str,
- start_seconds: float | None = None,
- duration_seconds: float | None = None,
- label: str = '',
- ) -> dict:
- """Create a trimmed capture from a selected IQ time window.
-
- If start/duration are omitted and burst markers exist, the strongest burst
- window is selected automatically with short padding.
- """
- with self._lock:
- if self.active_mode != 'idle':
- return {'status': 'error', 'message': f'Already running: {self.active_mode}'}
-
- capture = self._load_capture(capture_id)
- if not capture:
- return {'status': 'error', 'message': f'Capture not found: {capture_id}'}
-
- src_path = self._captures_dir / capture.filename
- if not src_path.exists():
- return {'status': 'error', 'message': 'IQ file missing'}
-
- try:
- src_size = src_path.stat().st_size
- except OSError:
- return {'status': 'error', 'message': 'Unable to read capture file'}
- if src_size < 2:
- return {'status': 'error', 'message': 'Capture file has no IQ data'}
-
- total_duration = self._estimate_capture_duration_seconds(capture, src_size)
- if total_duration <= 0:
- return {'status': 'error', 'message': 'Unable to determine capture duration'}
-
- use_auto_burst = start_seconds is None and duration_seconds is None
- auto_pad = 0.06
- if use_auto_burst:
- bursts = capture.bursts if isinstance(capture.bursts, list) else []
- best_burst: dict | None = None
- for burst in bursts:
- if not isinstance(burst, dict):
- continue
- dur = float(burst.get('duration_seconds', 0.0) or 0.0)
- if dur <= 0:
- continue
- if best_burst is None:
- best_burst = burst
- continue
- best_peak = float(best_burst.get('peak_level', 0.0) or 0.0)
- cur_peak = float(burst.get('peak_level', 0.0) or 0.0)
- if cur_peak > best_peak:
- best_burst = burst
- elif cur_peak == best_peak and dur > float(best_burst.get('duration_seconds', 0.0) or 0.0):
- best_burst = burst
-
- if best_burst:
- burst_start = max(0.0, float(best_burst.get('start_seconds', 0.0) or 0.0))
- burst_dur = max(0.0, float(best_burst.get('duration_seconds', 0.0) or 0.0))
- start_seconds = max(0.0, burst_start - auto_pad)
- end_seconds = min(total_duration, burst_start + burst_dur + auto_pad)
- duration_seconds = max(0.0, end_seconds - start_seconds)
- else:
- return {
- 'status': 'error',
- 'message': 'No burst markers available. Select a segment manually before trimming.',
- }
-
- try:
- start_s = max(0.0, float(start_seconds or 0.0))
- except (TypeError, ValueError):
- return {'status': 'error', 'message': 'Invalid start_seconds'}
- try:
- seg_s = None if duration_seconds is None else float(duration_seconds)
- except (TypeError, ValueError):
- return {'status': 'error', 'message': 'Invalid duration_seconds'}
-
- if seg_s is not None and seg_s <= 0:
- return {'status': 'error', 'message': 'duration_seconds must be greater than 0'}
- if start_s >= total_duration:
- return {'status': 'error', 'message': 'start_seconds is beyond end of capture'}
-
- end_s = total_duration if seg_s is None else min(total_duration, start_s + seg_s)
- if end_s <= start_s:
- return {'status': 'error', 'message': 'Selected segment is empty'}
-
- bytes_per_second = max(2, int(capture.sample_rate) * 2)
- start_byte = int(start_s * bytes_per_second) & ~1
- end_byte = int(end_s * bytes_per_second) & ~1
- if end_byte <= start_byte:
- return {'status': 'error', 'message': 'Selected segment is too short'}
-
- trim_size = end_byte - start_byte
- source_stem = Path(capture.filename).stem
- trim_name = f"{source_stem}_trim_{datetime.now().strftime('%H%M%S')}_{uuid.uuid4().hex[:4]}.iq"
- trim_path = self._captures_dir / trim_name
- try:
- with open(src_path, 'rb') as src, open(trim_path, 'wb') as dst:
- src.seek(start_byte)
- remaining = trim_size
- while remaining > 0:
- chunk = src.read(min(262144, remaining))
- if not chunk:
- break
- dst.write(chunk)
- remaining -= len(chunk)
- written = trim_path.stat().st_size if trim_path.exists() else 0
- except OSError as exc:
- logger.error(f"Failed to create trimmed capture: {exc}")
- try:
- trim_path.unlink(missing_ok=True) # type: ignore[arg-type]
- except Exception:
- pass
- return {'status': 'error', 'message': 'Failed to write trimmed capture'}
-
- if written < 2:
- try:
- trim_path.unlink(missing_ok=True) # type: ignore[arg-type]
- except Exception:
- pass
- return {'status': 'error', 'message': 'Trimmed capture has no IQ data'}
-
- trimmed_duration = round(written / bytes_per_second, 3)
-
- adjusted_bursts: list[dict] = []
- if isinstance(capture.bursts, list):
- for burst in capture.bursts:
- if not isinstance(burst, dict):
- continue
- burst_start = max(0.0, float(burst.get('start_seconds', 0.0) or 0.0))
- burst_dur = max(0.0, float(burst.get('duration_seconds', 0.0) or 0.0))
- burst_end = burst_start + burst_dur
- overlap_start = max(start_s, burst_start)
- overlap_end = min(end_s, burst_end)
- overlap_dur = overlap_end - overlap_start
- if overlap_dur <= 0:
- continue
- adjusted = dict(burst)
- adjusted['start_seconds'] = round(overlap_start - start_s, 3)
- adjusted['duration_seconds'] = round(overlap_dur, 3)
- adjusted_bursts.append(adjusted)
-
- dominant_fingerprint = ''
- fp_counts: dict[str, int] = {}
- for burst in adjusted_bursts:
- fp = str(burst.get('fingerprint') or '').strip()
- if not fp:
- continue
- fp_counts[fp] = fp_counts.get(fp, 0) + 1
- if fp_counts:
- dominant_fingerprint = max(fp_counts, key=fp_counts.get)
- elif capture.dominant_fingerprint:
- dominant_fingerprint = capture.dominant_fingerprint
-
- modulation_hint = capture.modulation_hint
- modulation_confidence = float(capture.modulation_confidence or 0.0)
- if adjusted_bursts:
- hint_totals: dict[str, float] = {}
- for burst in adjusted_bursts:
- hint = str(burst.get('modulation_hint') or '').strip()
- conf = float(burst.get('modulation_confidence') or 0.0)
- if not hint or hint.lower() == 'unknown':
- continue
- hint_totals[hint] = hint_totals.get(hint, 0.0) + max(0.05, conf)
- if hint_totals:
- modulation_hint = max(hint_totals, key=hint_totals.get)
- total_score = max(sum(hint_totals.values()), 0.001)
- modulation_confidence = min(0.98, hint_totals[modulation_hint] / total_score)
-
- protocol_hint = self._protocol_hint_from_capture(
- capture.frequency_hz,
- modulation_hint,
- len(adjusted_bursts),
- )
-
- manual_label = str(label or '').strip()
- if manual_label:
- capture_label = manual_label
- label_source = 'manual'
- elif capture.label:
- capture_label = f'{capture.label} (Trim)'
- label_source = 'auto'
- else:
- capture_label = self._auto_capture_label(
- capture.frequency_hz,
- len(adjusted_bursts),
- modulation_hint,
- protocol_hint,
- ) + ' (Trim)'
- label_source = 'auto'
-
- trimmed_capture = SubGhzCapture(
- capture_id=uuid.uuid4().hex[:12],
- filename=trim_path.name,
- frequency_hz=capture.frequency_hz,
- sample_rate=capture.sample_rate,
- lna_gain=capture.lna_gain,
- vga_gain=capture.vga_gain,
- timestamp=datetime.now(timezone.utc).isoformat(),
- duration_seconds=round(trimmed_duration, 3),
- size_bytes=int(written),
- label=capture_label,
- label_source=label_source,
- decoded_protocols=list(capture.decoded_protocols),
- bursts=adjusted_bursts,
- modulation_hint=modulation_hint,
- modulation_confidence=round(modulation_confidence, 3),
- protocol_hint=protocol_hint,
- dominant_fingerprint=dominant_fingerprint,
- trigger_enabled=False,
- trigger_pre_seconds=0.0,
- trigger_post_seconds=0.0,
- )
-
- meta_path = trim_path.with_suffix('.json')
- try:
- meta_path.write_text(json.dumps(trimmed_capture.to_dict(), indent=2))
- except OSError as exc:
- logger.error(f"Failed to write trimmed capture metadata: {exc}")
- try:
- trim_path.unlink(missing_ok=True) # type: ignore[arg-type]
- except Exception:
- pass
- return {'status': 'error', 'message': 'Failed to write trimmed capture metadata'}
-
- return {
- 'status': 'ok',
- 'capture': trimmed_capture.to_dict(),
- 'source_capture_id': capture_id,
- 'segment': {
- 'start_seconds': round(start_s, 3),
- 'duration_seconds': round(trimmed_duration, 3),
- 'auto_selected': bool(use_auto_burst),
- },
- }
-
- def delete_capture(self, capture_id: str) -> bool:
- capture = self._load_capture(capture_id)
- if not capture:
- return False
+ def get_capture_path(self, capture_id: str) -> Path | None:
+ capture = self._load_capture(capture_id)
+ if not capture:
+ return None
+ path = self._captures_dir / capture.filename
+ if path.exists():
+ return path
+ return None
+
+ def trim_capture(
+ self,
+ capture_id: str,
+ start_seconds: float | None = None,
+ duration_seconds: float | None = None,
+ label: str = '',
+ ) -> dict:
+ """Create a trimmed capture from a selected IQ time window.
+
+ If start/duration are omitted and burst markers exist, the strongest burst
+ window is selected automatically with short padding.
+ """
+ with self._lock:
+ if self.active_mode != 'idle':
+ return {'status': 'error', 'message': f'Already running: {self.active_mode}'}
+
+ capture = self._load_capture(capture_id)
+ if not capture:
+ return {'status': 'error', 'message': f'Capture not found: {capture_id}'}
+
+ src_path = self._captures_dir / capture.filename
+ if not src_path.exists():
+ return {'status': 'error', 'message': 'IQ file missing'}
+
+ try:
+ src_size = src_path.stat().st_size
+ except OSError:
+ return {'status': 'error', 'message': 'Unable to read capture file'}
+ if src_size < 2:
+ return {'status': 'error', 'message': 'Capture file has no IQ data'}
+
+ total_duration = self._estimate_capture_duration_seconds(capture, src_size)
+ if total_duration <= 0:
+ return {'status': 'error', 'message': 'Unable to determine capture duration'}
+
+ use_auto_burst = start_seconds is None and duration_seconds is None
+ auto_pad = 0.06
+ if use_auto_burst:
+ bursts = capture.bursts if isinstance(capture.bursts, list) else []
+ best_burst: dict | None = None
+ for burst in bursts:
+ if not isinstance(burst, dict):
+ continue
+ dur = float(burst.get('duration_seconds', 0.0) or 0.0)
+ if dur <= 0:
+ continue
+ if best_burst is None:
+ best_burst = burst
+ continue
+ best_peak = float(best_burst.get('peak_level', 0.0) or 0.0)
+ cur_peak = float(burst.get('peak_level', 0.0) or 0.0)
+ if cur_peak > best_peak:
+ best_burst = burst
+ elif cur_peak == best_peak and dur > float(best_burst.get('duration_seconds', 0.0) or 0.0):
+ best_burst = burst
+
+ if best_burst:
+ burst_start = max(0.0, float(best_burst.get('start_seconds', 0.0) or 0.0))
+ burst_dur = max(0.0, float(best_burst.get('duration_seconds', 0.0) or 0.0))
+ start_seconds = max(0.0, burst_start - auto_pad)
+ end_seconds = min(total_duration, burst_start + burst_dur + auto_pad)
+ duration_seconds = max(0.0, end_seconds - start_seconds)
+ else:
+ return {
+ 'status': 'error',
+ 'message': 'No burst markers available. Select a segment manually before trimming.',
+ }
+
+ try:
+ start_s = max(0.0, float(start_seconds or 0.0))
+ except (TypeError, ValueError):
+ return {'status': 'error', 'message': 'Invalid start_seconds'}
+ try:
+ seg_s = None if duration_seconds is None else float(duration_seconds)
+ except (TypeError, ValueError):
+ return {'status': 'error', 'message': 'Invalid duration_seconds'}
+
+ if seg_s is not None and seg_s <= 0:
+ return {'status': 'error', 'message': 'duration_seconds must be greater than 0'}
+ if start_s >= total_duration:
+ return {'status': 'error', 'message': 'start_seconds is beyond end of capture'}
+
+ end_s = total_duration if seg_s is None else min(total_duration, start_s + seg_s)
+ if end_s <= start_s:
+ return {'status': 'error', 'message': 'Selected segment is empty'}
+
+ bytes_per_second = max(2, int(capture.sample_rate) * 2)
+ start_byte = int(start_s * bytes_per_second) & ~1
+ end_byte = int(end_s * bytes_per_second) & ~1
+ if end_byte <= start_byte:
+ return {'status': 'error', 'message': 'Selected segment is too short'}
+
+ trim_size = end_byte - start_byte
+ source_stem = Path(capture.filename).stem
+ trim_name = f"{source_stem}_trim_{datetime.now().strftime('%H%M%S')}_{uuid.uuid4().hex[:4]}.iq"
+ trim_path = self._captures_dir / trim_name
+ try:
+ with open(src_path, 'rb') as src, open(trim_path, 'wb') as dst:
+ src.seek(start_byte)
+ remaining = trim_size
+ while remaining > 0:
+ chunk = src.read(min(262144, remaining))
+ if not chunk:
+ break
+ dst.write(chunk)
+ remaining -= len(chunk)
+ written = trim_path.stat().st_size if trim_path.exists() else 0
+ except OSError as exc:
+ logger.error(f"Failed to create trimmed capture: {exc}")
+ try:
+ trim_path.unlink(missing_ok=True) # type: ignore[arg-type]
+ except Exception:
+ pass
+ return {'status': 'error', 'message': 'Failed to write trimmed capture'}
+
+ if written < 2:
+ try:
+ trim_path.unlink(missing_ok=True) # type: ignore[arg-type]
+ except Exception:
+ pass
+ return {'status': 'error', 'message': 'Trimmed capture has no IQ data'}
+
+ trimmed_duration = round(written / bytes_per_second, 3)
+
+ adjusted_bursts: list[dict] = []
+ if isinstance(capture.bursts, list):
+ for burst in capture.bursts:
+ if not isinstance(burst, dict):
+ continue
+ burst_start = max(0.0, float(burst.get('start_seconds', 0.0) or 0.0))
+ burst_dur = max(0.0, float(burst.get('duration_seconds', 0.0) or 0.0))
+ burst_end = burst_start + burst_dur
+ overlap_start = max(start_s, burst_start)
+ overlap_end = min(end_s, burst_end)
+ overlap_dur = overlap_end - overlap_start
+ if overlap_dur <= 0:
+ continue
+ adjusted = dict(burst)
+ adjusted['start_seconds'] = round(overlap_start - start_s, 3)
+ adjusted['duration_seconds'] = round(overlap_dur, 3)
+ adjusted_bursts.append(adjusted)
+
+ dominant_fingerprint = ''
+ fp_counts: dict[str, int] = {}
+ for burst in adjusted_bursts:
+ fp = str(burst.get('fingerprint') or '').strip()
+ if not fp:
+ continue
+ fp_counts[fp] = fp_counts.get(fp, 0) + 1
+ if fp_counts:
+ dominant_fingerprint = max(fp_counts, key=fp_counts.get)
+ elif capture.dominant_fingerprint:
+ dominant_fingerprint = capture.dominant_fingerprint
+
+ modulation_hint = capture.modulation_hint
+ modulation_confidence = float(capture.modulation_confidence or 0.0)
+ if adjusted_bursts:
+ hint_totals: dict[str, float] = {}
+ for burst in adjusted_bursts:
+ hint = str(burst.get('modulation_hint') or '').strip()
+ conf = float(burst.get('modulation_confidence') or 0.0)
+ if not hint or hint.lower() == 'unknown':
+ continue
+ hint_totals[hint] = hint_totals.get(hint, 0.0) + max(0.05, conf)
+ if hint_totals:
+ modulation_hint = max(hint_totals, key=hint_totals.get)
+ total_score = max(sum(hint_totals.values()), 0.001)
+ modulation_confidence = min(0.98, hint_totals[modulation_hint] / total_score)
+
+ protocol_hint = self._protocol_hint_from_capture(
+ capture.frequency_hz,
+ modulation_hint,
+ len(adjusted_bursts),
+ )
+
+ manual_label = str(label or '').strip()
+ if manual_label:
+ capture_label = manual_label
+ label_source = 'manual'
+ elif capture.label:
+ capture_label = f'{capture.label} (Trim)'
+ label_source = 'auto'
+ else:
+ capture_label = self._auto_capture_label(
+ capture.frequency_hz,
+ len(adjusted_bursts),
+ modulation_hint,
+ protocol_hint,
+ ) + ' (Trim)'
+ label_source = 'auto'
+
+ trimmed_capture = SubGhzCapture(
+ capture_id=uuid.uuid4().hex[:12],
+ filename=trim_path.name,
+ frequency_hz=capture.frequency_hz,
+ sample_rate=capture.sample_rate,
+ lna_gain=capture.lna_gain,
+ vga_gain=capture.vga_gain,
+ timestamp=datetime.now(timezone.utc).isoformat(),
+ duration_seconds=round(trimmed_duration, 3),
+ size_bytes=int(written),
+ label=capture_label,
+ label_source=label_source,
+ decoded_protocols=list(capture.decoded_protocols),
+ bursts=adjusted_bursts,
+ modulation_hint=modulation_hint,
+ modulation_confidence=round(modulation_confidence, 3),
+ protocol_hint=protocol_hint,
+ dominant_fingerprint=dominant_fingerprint,
+ trigger_enabled=False,
+ trigger_pre_seconds=0.0,
+ trigger_post_seconds=0.0,
+ )
+
+ meta_path = trim_path.with_suffix('.json')
+ try:
+ meta_path.write_text(json.dumps(trimmed_capture.to_dict(), indent=2))
+ except OSError as exc:
+ logger.error(f"Failed to write trimmed capture metadata: {exc}")
+ try:
+ trim_path.unlink(missing_ok=True) # type: ignore[arg-type]
+ except Exception:
+ pass
+ return {'status': 'error', 'message': 'Failed to write trimmed capture metadata'}
+
+ return {
+ 'status': 'ok',
+ 'capture': trimmed_capture.to_dict(),
+ 'source_capture_id': capture_id,
+ 'segment': {
+ 'start_seconds': round(start_s, 3),
+ 'duration_seconds': round(trimmed_duration, 3),
+ 'auto_selected': bool(use_auto_burst),
+ },
+ }
+
+ def delete_capture(self, capture_id: str) -> bool:
+ capture = self._load_capture(capture_id)
+ if not capture:
+ return False
iq_path = self._captures_dir / capture.filename
meta_path = iq_path.with_suffix('.json')
@@ -2710,88 +2744,88 @@ class SubGhzManager:
logger.error(f"Failed to delete {path}: {e}")
return deleted
- def update_capture_label(self, capture_id: str, label: str) -> bool:
- for meta_path in self._captures_dir.glob('*.json'):
- try:
- data = json.loads(meta_path.read_text())
- if data.get('id') == capture_id:
- data['label'] = label
- data['label_source'] = 'manual' if label else data.get('label_source', '')
- meta_path.write_text(json.dumps(data, indent=2))
- return True
- except (json.JSONDecodeError, KeyError, OSError):
- continue
- return False
+ def update_capture_label(self, capture_id: str, label: str) -> bool:
+ for meta_path in self._captures_dir.glob('*.json'):
+ try:
+ data = json.loads(meta_path.read_text())
+ if data.get('id') == capture_id:
+ data['label'] = label
+ data['label_source'] = 'manual' if label else data.get('label_source', '')
+ meta_path.write_text(json.dumps(data, indent=2))
+ return True
+ except (json.JSONDecodeError, KeyError, OSError):
+ continue
+ return False
# ------------------------------------------------------------------
# STOP ALL
# ------------------------------------------------------------------
- def stop_all(self) -> None:
- """Stop any running SubGHz process."""
- rx_thread: threading.Thread | None = None
- sweep_thread: threading.Thread | None = None
- rx_file_handle: BinaryIO | None = None
-
- with self._lock:
- self._decode_stop = True
- self._sweep_running = False
- self._rx_stop = True
-
- if self._tx_watchdog:
- self._tx_watchdog.cancel()
- self._tx_watchdog = None
-
- for proc_attr in (
- '_rx_process',
- '_decode_hackrf_process',
- '_decode_process',
- '_tx_process',
- '_sweep_process',
- ):
- process = getattr(self, proc_attr, None)
- if process and process.poll() is None:
- safe_terminate(process)
- unregister_process(process)
- setattr(self, proc_attr, None)
-
- rx_thread = self._rx_thread
- self._rx_thread = None
- sweep_thread = self._sweep_thread
- self._sweep_thread = None
- rx_file_handle = self._rx_file_handle
- self._rx_file_handle = None
-
- self._cleanup_tx_temp_file()
- self._rx_file = None
- self._tx_capture_id = ''
-
- self._rx_start_time = 0
- self._rx_bytes_written = 0
- self._rx_bursts = []
- self._rx_trigger_enabled = False
- self._rx_trigger_first_burst_start = None
- self._rx_trigger_last_burst_end = None
- self._rx_autostop_pending = False
- self._rx_modulation_hint = ''
- self._rx_modulation_confidence = 0.0
- self._rx_protocol_hint = ''
- self._rx_fingerprint_counts = {}
- self._tx_start_time = 0
- self._decode_start_time = 0
- self._decode_frequency_hz = 0
- self._decode_sample_rate = 0
-
- if rx_thread and rx_thread.is_alive():
- rx_thread.join(timeout=1.5)
- if sweep_thread and sweep_thread.is_alive():
- sweep_thread.join(timeout=1.5)
-
- if rx_file_handle:
- try:
- rx_file_handle.close()
- except OSError:
- pass
+ def stop_all(self) -> None:
+ """Stop any running SubGHz process."""
+ rx_thread: threading.Thread | None = None
+ sweep_thread: threading.Thread | None = None
+ rx_file_handle: BinaryIO | None = None
+
+ with self._lock:
+ self._decode_stop = True
+ self._sweep_running = False
+ self._rx_stop = True
+
+ if self._tx_watchdog:
+ self._tx_watchdog.cancel()
+ self._tx_watchdog = None
+
+ for proc_attr in (
+ '_rx_process',
+ '_decode_hackrf_process',
+ '_decode_process',
+ '_tx_process',
+ '_sweep_process',
+ ):
+ process = getattr(self, proc_attr, None)
+ if process and process.poll() is None:
+ safe_terminate(process)
+ unregister_process(process)
+ setattr(self, proc_attr, None)
+
+ rx_thread = self._rx_thread
+ self._rx_thread = None
+ sweep_thread = self._sweep_thread
+ self._sweep_thread = None
+ rx_file_handle = self._rx_file_handle
+ self._rx_file_handle = None
+
+ self._cleanup_tx_temp_file()
+ self._rx_file = None
+ self._tx_capture_id = ''
+
+ self._rx_start_time = 0
+ self._rx_bytes_written = 0
+ self._rx_bursts = []
+ self._rx_trigger_enabled = False
+ self._rx_trigger_first_burst_start = None
+ self._rx_trigger_last_burst_end = None
+ self._rx_autostop_pending = False
+ self._rx_modulation_hint = ''
+ self._rx_modulation_confidence = 0.0
+ self._rx_protocol_hint = ''
+ self._rx_fingerprint_counts = {}
+ self._tx_start_time = 0
+ self._decode_start_time = 0
+ self._decode_frequency_hz = 0
+ self._decode_sample_rate = 0
+
+ if rx_thread and rx_thread.is_alive():
+ rx_thread.join(timeout=1.5)
+ if sweep_thread and sweep_thread.is_alive():
+ sweep_thread.join(timeout=1.5)
+
+ if rx_file_handle:
+ try:
+ rx_file_handle.close()
+ except OSError:
+ pass
# Global singleton
diff --git a/utils/weather_sat.py b/utils/weather_sat.py
index f30b36d..30ce6c6 100644
--- a/utils/weather_sat.py
+++ b/utils/weather_sat.py
@@ -173,7 +173,7 @@ class WeatherSatDecoder:
self._current_frequency: float = 0.0
self._current_mode: str = ''
self._capture_start_time: float = 0
- self._device_index: int = -1
+ self._device_index: int = -1
self._capture_output_dir: Path | None = None
self._on_complete_callback: Callable[[], None] | None = None
self._capture_phase: str = 'idle'
@@ -303,13 +303,13 @@ class WeatherSatDecoder:
))
return False
- self._current_satellite = satellite
- self._current_frequency = sat_info['frequency']
- self._current_mode = sat_info['mode']
- self._device_index = -1 # Offline decode does not claim an SDR device
- self._capture_start_time = time.time()
- self._capture_phase = 'decoding'
- self._stop_event.clear()
+ self._current_satellite = satellite
+ self._current_frequency = sat_info['frequency']
+ self._current_mode = sat_info['mode']
+ self._device_index = -1 # Offline decode does not claim an SDR device
+ self._capture_start_time = time.time()
+ self._capture_phase = 'decoding'
+ self._stop_event.clear()
try:
self._running = True
@@ -363,6 +363,20 @@ class WeatherSatDecoder:
Returns:
True if started successfully
"""
+ # Validate satellite BEFORE acquiring the lock
+ sat_info = WEATHER_SATELLITES.get(satellite)
+ if not sat_info:
+ logger.error(f"Unknown satellite: {satellite}")
+ self._emit_progress(CaptureProgress(
+ status='error',
+ message=f'Unknown satellite: {satellite}'
+ ))
+ return False
+
+ # Resolve device ID BEFORE lock — this runs rtl_test which can
+ # take up to 5s and has no side effects on instance state.
+ source_id = self._resolve_device_id(device_index)
+
with self._lock:
if self._running:
return True
@@ -375,15 +389,6 @@ class WeatherSatDecoder:
))
return False
- sat_info = WEATHER_SATELLITES.get(satellite)
- if not sat_info:
- logger.error(f"Unknown satellite: {satellite}")
- self._emit_progress(CaptureProgress(
- status='error',
- message=f'Unknown satellite: {satellite}'
- ))
- return False
-
self._current_satellite = satellite
self._current_frequency = sat_info['frequency']
self._current_mode = sat_info['mode']
@@ -394,7 +399,7 @@ class WeatherSatDecoder:
try:
self._running = True
- self._start_satdump(sat_info, device_index, gain, sample_rate, bias_t)
+ self._start_satdump(sat_info, device_index, gain, sample_rate, bias_t, source_id)
logger.info(
f"Weather satellite capture started: {satellite} "
@@ -429,6 +434,7 @@ class WeatherSatDecoder:
gain: float,
sample_rate: int,
bias_t: bool,
+ source_id: str | None = None,
) -> None:
"""Start SatDump live capture and decode."""
# Create timestamped output directory for this capture
@@ -439,9 +445,9 @@ class WeatherSatDecoder:
freq_hz = int(sat_info['frequency'] * 1_000_000)
- # SatDump v1.2+ uses string source_id (device serial) not numeric index.
- # Auto-detect serial by querying rtl_eeprom, fall back to string index.
- source_id = self._resolve_device_id(device_index)
+ # Use pre-resolved source_id, or fall back to resolving now
+ if source_id is None:
+ source_id = self._resolve_device_id(device_index)
cmd = [
'satdump', 'live',
@@ -465,18 +471,18 @@ class WeatherSatDecoder:
master_fd, slave_fd = pty.openpty()
self._pty_master_fd = master_fd
- self._process = subprocess.Popen(
- cmd,
- stdout=slave_fd,
- stderr=slave_fd,
- stdin=subprocess.DEVNULL,
- close_fds=True,
- )
- register_process(self._process)
- try:
- os.close(slave_fd) # parent doesn't need the slave side
- except OSError:
- pass
+ self._process = subprocess.Popen(
+ cmd,
+ stdout=slave_fd,
+ stderr=slave_fd,
+ stdin=subprocess.DEVNULL,
+ close_fds=True,
+ )
+ register_process(self._process)
+ try:
+ os.close(slave_fd) # parent doesn't need the slave side
+ except OSError:
+ pass
# Check for early exit asynchronously (avoid blocking /start for 3s)
def _check_early_exit():
@@ -568,18 +574,18 @@ class WeatherSatDecoder:
master_fd, slave_fd = pty.openpty()
self._pty_master_fd = master_fd
- self._process = subprocess.Popen(
- cmd,
- stdout=slave_fd,
- stderr=slave_fd,
- stdin=subprocess.DEVNULL,
- close_fds=True,
- )
- register_process(self._process)
- try:
- os.close(slave_fd) # parent doesn't need the slave side
- except OSError:
- pass
+ self._process = subprocess.Popen(
+ cmd,
+ stdout=slave_fd,
+ stderr=slave_fd,
+ stdin=subprocess.DEVNULL,
+ close_fds=True,
+ )
+ register_process(self._process)
+ try:
+ os.close(slave_fd) # parent doesn't need the slave side
+ except OSError:
+ pass
# For offline mode, don't check for early exit — file decoding
# may complete very quickly and exit code 0 is normal success.
@@ -812,20 +818,23 @@ class WeatherSatDecoder:
# Signal watcher thread to do final scan and exit
self._stop_event.set()
- # Process ended — release resources
- was_running = self._running
- self._running = False
+ # Acquire lock when modifying shared state to avoid racing
+ # with stop() which may have already cleaned up.
+ with self._lock:
+ was_running = self._running
+ self._running = False
+ process = self._process
elapsed = int(time.time() - self._capture_start_time) if self._capture_start_time else 0
if was_running:
# Collect exit status (returncode is only set after poll/wait)
- if self._process and self._process.returncode is None:
+ if process and process.returncode is None:
try:
- self._process.wait(timeout=5)
+ process.wait(timeout=5)
except subprocess.TimeoutExpired:
- self._process.kill()
- self._process.wait()
- retcode = self._process.returncode if self._process else None
+ process.kill()
+ process.wait()
+ retcode = process.returncode if process else None
if retcode and retcode != 0:
self._capture_phase = 'error'
self._emit_progress(CaptureProgress(
@@ -899,24 +908,24 @@ class WeatherSatDecoder:
except OSError:
continue
- # Determine product type from filename/path
- product = self._parse_product_name(filepath)
-
- # Copy image to main output dir for serving
- safe_sat = re.sub(r'[^A-Za-z0-9_-]+', '_', self._current_satellite).strip('_') or 'satellite'
- safe_stem = re.sub(r'[^A-Za-z0-9_-]+', '_', filepath.stem).strip('_') or 'image'
- suffix = filepath.suffix.lower()
- if suffix not in ('.png', '.jpg', '.jpeg'):
- suffix = '.png'
- serve_name = (
- f"{safe_sat}_{safe_stem}_{datetime.now().strftime('%Y%m%d_%H%M%S_%f')}"
- f"{suffix}"
- )
- serve_path = self._output_dir / serve_name
- try:
- shutil.copy2(filepath, serve_path)
- except OSError:
- # Copy failed — don't mark as known so it can be retried
+ # Determine product type from filename/path
+ product = self._parse_product_name(filepath)
+
+ # Copy image to main output dir for serving
+ safe_sat = re.sub(r'[^A-Za-z0-9_-]+', '_', self._current_satellite).strip('_') or 'satellite'
+ safe_stem = re.sub(r'[^A-Za-z0-9_-]+', '_', filepath.stem).strip('_') or 'image'
+ suffix = filepath.suffix.lower()
+ if suffix not in ('.png', '.jpg', '.jpeg'):
+ suffix = '.png'
+ serve_name = (
+ f"{safe_sat}_{safe_stem}_{datetime.now().strftime('%Y%m%d_%H%M%S_%f')}"
+ f"{suffix}"
+ )
+ serve_path = self._output_dir / serve_name
+ try:
+ shutil.copy2(filepath, serve_path)
+ except OSError:
+ # Copy failed — don't mark as known so it can be retried
continue
# Only mark as known after successful copy
@@ -960,12 +969,12 @@ class WeatherSatDecoder:
return 'Multispectral Analysis'
if 'thermal' in name or 'temp' in name:
return 'Thermal'
- if 'ndvi' in name:
- return 'NDVI Vegetation'
- if 'channel' in name or 'ch' in name:
- match = re.search(r'(?:channel|ch)[\s_-]*(\d+)', name)
- if match:
- return f'Channel {match.group(1)}'
+ if 'ndvi' in name:
+ return 'NDVI Vegetation'
+ if 'channel' in name or 'ch' in name:
+ match = re.search(r'(?:channel|ch)[\s_-]*(\d+)', name)
+ if match:
+ return f'Channel {match.group(1)}'
if 'avhrr' in name:
return 'AVHRR'
if 'msu' in name or 'mtvza' in name:
@@ -986,14 +995,16 @@ class WeatherSatDecoder:
self._running = False
self._stop_event.set()
self._close_pty()
+ process = self._process
+ self._process = None
+ elapsed = int(time.time() - self._capture_start_time) if self._capture_start_time else 0
+ logger.info(f"Weather satellite capture stopped after {elapsed}s")
+ self._device_index = -1
- if self._process:
- safe_terminate(self._process)
- self._process = None
-
- elapsed = int(time.time() - self._capture_start_time) if self._capture_start_time else 0
- logger.info(f"Weather satellite capture stopped after {elapsed}s")
- self._device_index = -1
+ # Terminate outside the lock so stop() returns quickly
+ # and doesn't block start() or other lock acquisitions
+ if process:
+ safe_terminate(process)
def get_images(self) -> list[WeatherSatImage]:
"""Get list of decoded images."""
@@ -1029,18 +1040,18 @@ class WeatherSatDecoder:
sat_info = WEATHER_SATELLITES.get(satellite, {})
- image = WeatherSatImage(
- filename=filepath.name,
- path=filepath,
- satellite=satellite,
- mode=sat_info.get('mode', 'Unknown'),
+ image = WeatherSatImage(
+ filename=filepath.name,
+ path=filepath,
+ satellite=satellite,
+ mode=sat_info.get('mode', 'Unknown'),
timestamp=datetime.fromtimestamp(stat.st_mtime, tz=timezone.utc),
frequency=sat_info.get('frequency', 0.0),
- size_bytes=stat.st_size,
- product=self._parse_product_name(filepath),
- )
- self._images.append(image)
- known_filenames.add(filepath.name)
+ size_bytes=stat.st_size,
+ product=self._parse_product_name(filepath),
+ )
+ self._images.append(image)
+ known_filenames.add(filepath.name)
def delete_image(self, filename: str) -> bool:
"""Delete a decoded image."""
diff --git a/utils/wefax.py b/utils/wefax.py
index 8dd9917..cfc1465 100644
--- a/utils/wefax.py
+++ b/utils/wefax.py
@@ -299,14 +299,7 @@ class WeFaxDecoder:
try:
self._running = True
self._last_error = ''
- self._start_pipeline()
-
- logger.info(
- f"WeFax decoder started: {frequency_khz} kHz, "
- f"station={station}, IOC={ioc}, LPM={lpm}"
- )
- return True
-
+ self._start_pipeline_spawn()
except Exception as e:
self._running = False
self._last_error = str(e)
@@ -317,8 +310,32 @@ class WeFaxDecoder:
))
return False
+ # Health check sleep outside lock
+ try:
+ self._start_pipeline_health_check()
+ logger.info(
+ f"WeFax decoder started: {frequency_khz} kHz, "
+ f"station={station}, IOC={ioc}, LPM={lpm}"
+ )
+ return True
+ except Exception as e:
+ with self._lock:
+ self._running = False
+ self._last_error = str(e)
+ logger.error(f"Failed to start WeFax decoder: {e}")
+ self._emit_progress(WeFaxProgress(
+ status='error',
+ message=str(e),
+ ))
+ return False
+
def _start_pipeline(self) -> None:
"""Start SDR FM demod subprocess in USB mode for WeFax."""
+ self._start_pipeline_spawn()
+ self._start_pipeline_health_check()
+
+ def _start_pipeline_spawn(self) -> None:
+ """Spawn the SDR FM demod subprocess. Must hold self._lock."""
try:
sdr_type_enum = SDRType(self._sdr_type)
except ValueError:
@@ -361,21 +378,24 @@ class WeFaxDecoder:
stderr=subprocess.PIPE,
)
- # Post-spawn health check — catch immediate failures
+ def _start_pipeline_health_check(self) -> None:
+ """Post-spawn health check and decode thread start. Called outside lock."""
time.sleep(0.3)
- if self._sdr_process.poll() is not None:
- stderr_detail = ''
- if self._sdr_process.stderr:
- stderr_detail = self._sdr_process.stderr.read().decode(
- errors='replace').strip()
- rc = self._sdr_process.returncode
- self._sdr_process = None
- detail = stderr_detail.split('\n')[-1] if stderr_detail else f'exit code {rc}'
- raise RuntimeError(f'{self._sdr_tool_name} failed: {detail}')
- self._decode_thread = threading.Thread(
- target=self._decode_audio_stream, daemon=True)
- self._decode_thread.start()
+ with self._lock:
+ if self._sdr_process and self._sdr_process.poll() is not None:
+ stderr_detail = ''
+ if self._sdr_process.stderr:
+ stderr_detail = self._sdr_process.stderr.read().decode(
+ errors='replace').strip()
+ rc = self._sdr_process.returncode
+ self._sdr_process = None
+ detail = stderr_detail.split('\n')[-1] if stderr_detail else f'exit code {rc}'
+ raise RuntimeError(f'{self._sdr_tool_name} failed: {detail}')
+
+ self._decode_thread = threading.Thread(
+ target=self._decode_audio_stream, daemon=True)
+ self._decode_thread.start()
def _decode_audio_stream(self) -> None:
"""Read audio from SDR FM demod and decode WeFax images.