From 3c932f0ce99f3026a381e7c8dd306a2833ecb4c3 Mon Sep 17 00:00:00 2001 From: Will Greenberg Date: Tue, 30 Apr 2024 14:43:38 -0700 Subject: [PATCH 1/3] daemon: run analysis in realtime Currently we just show the results of analysis as a
 tagged
JSON blob, but eventually we can make some actual UI
---
 bin/src/analysis.rs           |  56 +++++++++
 bin/src/check.rs              | 108 +---------------
 bin/src/daemon.rs             |  23 +++-
 bin/src/diag.rs               |  22 +++-
 bin/src/pcap.rs               |   5 +-
 bin/src/server.rs             |   2 +
 bin/static/index.html         |   4 +
 bin/static/js/main.js         |   8 ++
 lib/src/analysis/analyzer.rs  | 130 ++++++++++++++++++-
 lib/src/gsmtap_parser.rs      | 229 ++++++++++++++++------------------
 lib/tests/test_lte_parsing.rs |  35 +++---
 11 files changed, 361 insertions(+), 261 deletions(-)
 create mode 100644 bin/src/analysis.rs

diff --git a/bin/src/analysis.rs b/bin/src/analysis.rs
new file mode 100644
index 0000000..a9d23c5
--- /dev/null
+++ b/bin/src/analysis.rs
@@ -0,0 +1,56 @@
+use std::sync::Arc;
+
+use axum::{extract::State, http::StatusCode, Json};
+use log::error;
+use rayhunter::{analysis::analyzer::{AnalysisReport, Harness}, diag::MessagesContainer};
+use tokio::sync;
+use tokio_util::task::TaskTracker;
+
+use crate::server::ServerState;
+
+#[derive(Debug)]
+pub enum AnalysisMessage {
+    Reset,
+    GetReport(sync::oneshot::Sender),
+    AnalyzeContainer(MessagesContainer),
+    StopThread,
+}
+
+pub fn run_analysis_thread(task_tracker: &TaskTracker) -> sync::mpsc::Sender {
+    let (tx, mut rx) = sync::mpsc::channel(5);
+
+    task_tracker.spawn(async move {
+        let mut harness = Harness::new_with_all_analyzers();
+        loop {
+            match rx.recv().await {
+                Some(AnalysisMessage::GetReport(sender)) => {
+                    // this might fail if the client closes their connection
+                    // before we're done building the report
+                    if let Err(e) = sender.send(harness.build_analysis_report()) {
+                        error!("failed to send analysis report: {:?}", e);
+                    }
+                },
+                Some(AnalysisMessage::Reset) => harness = Harness::new_with_all_analyzers(),
+                Some(AnalysisMessage::AnalyzeContainer(container)) => harness.analyze_qmdl_messages(container),
+                Some(AnalysisMessage::StopThread) | None => break,
+            }
+        }
+    });
+
+    tx
+}
+
+pub async fn get_analysis_report(State(state): State>) -> Result, (StatusCode, String)> {
+    if state.readonly_mode {
+        return Err((StatusCode::FORBIDDEN, "server is in readonly mode".to_string()));
+    }
+    let analysis_tx = state.maybe_analysis_tx.as_ref().unwrap();
+    let (report_tx, report_rx) = tokio::sync::oneshot::channel();
+    if let Err(e) = analysis_tx.send(AnalysisMessage::GetReport(report_tx)).await {
+        return Err((StatusCode::INTERNAL_SERVER_ERROR, format!("error reaching analysis thread: {:?}", e)));
+    }
+    match report_rx.await {
+        Ok(report) => Ok(Json(report)),
+        Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, format!("error fetching analysis report: {:?}", e)))
+    }
+}
diff --git a/bin/src/check.rs b/bin/src/check.rs
index 7f627e7..0383c50 100644
--- a/bin/src/check.rs
+++ b/bin/src/check.rs
@@ -1,8 +1,6 @@
 use std::{future, path::PathBuf, pin::pin};
-use chrono::{DateTime, FixedOffset};
-use rayhunter::{analysis::{analyzer::{Event, EventType, Harness}, information_element::InformationElement, lte_downgrade::LteSib6And7DowngradeAnalyzer}, diag::DataType, gsmtap_parser::GsmtapParser, qmdl::QmdlReader};
+use rayhunter::{analysis::analyzer::Harness, diag::DataType, qmdl::QmdlReader};
 use tokio::fs::File;
-use serde::Serialize;
 use clap::Parser;
 use futures::TryStreamExt;
 
@@ -13,120 +11,22 @@ struct Args {
     qmdl_path: PathBuf,
 }
 
-#[derive(Serialize, Debug)]
-struct AnalyzerMetadata {
-    name: String,
-    description: String,
-}
-
-#[derive(Serialize, Debug)]
-struct ReportMetadata {
-    num_packets_analyzed: usize,
-    num_packets_skipped: usize,
-    num_warnings: usize,
-    first_packet_time: DateTime,
-    last_packet_time: DateTime,
-    analyzers: Vec,
-}
-
-#[derive(Serialize, Debug)]
-struct PacketAnalysis {
-    timestamp: DateTime,
-    events: Vec>,
-}
-
-#[derive(Serialize, Debug)]
-struct AnalysisReport {
-    metadata: ReportMetadata,
-    analysis: Vec,
-}
-
 #[tokio::main]
 async fn main() {
     env_logger::init();
     let args = Args::parse();
 
-    let mut harness = Harness::new();
-    harness.add_analyzer(Box::new(LteSib6And7DowngradeAnalyzer{}));
-
-    let mut num_packets_analyzed = 0;
-    let mut num_warnings = 0;
-    let mut first_packet_time: Option> = None;
-    let mut last_packet_time: Option> = None;
-    let mut skipped_message_reasons: Vec = Vec::new();
-    let mut analysis: Vec = Vec::new();
-    let mut analyzers: Vec = Vec::new();
-
-    let names = harness.get_names();
-    let descriptions = harness.get_names();
-    for (name, description) in names.iter().zip(descriptions.iter()) {
-        analyzers.push(AnalyzerMetadata {
-            name: name.to_string(),
-            description: description.to_string(),
-        });
-    }
+    let mut harness = Harness::new_with_all_analyzers();
 
     let qmdl_file = File::open(args.qmdl_path).await.expect("failed to open QMDL file");
     let file_size = qmdl_file.metadata().await.expect("failed to get QMDL file metadata").len();
-    let mut gsmtap_parser = GsmtapParser::new();
     let mut qmdl_reader = QmdlReader::new(qmdl_file, Some(file_size as usize));
     let mut qmdl_stream = pin!(qmdl_reader.as_stream()
         .try_filter(|container| future::ready(container.data_type == DataType::UserSpace)));
     while let Some(container) = qmdl_stream.try_next().await.expect("failed getting QMDL container") {
-        for maybe_qmdl_message in container.into_messages() {
-            let qmdl_message = match maybe_qmdl_message {
-                Ok(msg) => msg,
-                Err(err) => {
-                    skipped_message_reasons.push(format!("{:?}", err));
-                    continue;
-                }
-            };
-            let gsmtap_message = match gsmtap_parser.parse(qmdl_message) {
-                Ok(msg) => msg,
-                Err(err) => {
-                    skipped_message_reasons.push(format!("{:?}", err));
-                    continue;
-                }
-            };
-            let Some((timestamp, gsmtap_msg)) = gsmtap_message else {
-                continue;
-            };
-            let element = match InformationElement::try_from(&gsmtap_msg) {
-                Ok(element) => element,
-                Err(err) => {
-                    skipped_message_reasons.push(format!("{:?}", err));
-                    continue;
-                }
-            };
-            if first_packet_time.is_none() {
-                first_packet_time = Some(timestamp.to_datetime());
-            }
-            last_packet_time = Some(timestamp.to_datetime());
-            num_packets_analyzed += 1;
-            let analysis_result = harness.analyze_information_element(&element);
-            if analysis_result.iter().any(Option::is_some) {
-                num_warnings += analysis_result.iter()
-                    .filter(|maybe_event| matches!(maybe_event, Some(Event { event_type: EventType::QualitativeWarning { .. }, .. })))
-                    .count();
-                analysis.push(PacketAnalysis {
-                    timestamp: timestamp.to_datetime(),
-                    events: analysis_result,
-                });
-            }
-        }
+        harness.analyze_qmdl_messages(container)
     }
 
-    let report = AnalysisReport {
-        metadata: ReportMetadata {
-            num_packets_analyzed,
-            num_packets_skipped: skipped_message_reasons.len(),
-            num_warnings,
-            first_packet_time: first_packet_time.expect("no packet times set"),
-            last_packet_time: last_packet_time.expect("no packet times set"),
-            analyzers,
-        },
-        analysis,
-    };
-
+    let report = harness.build_analysis_report();
     println!("{}", serde_json::to_string(&report).expect("failed to serialize report"));
 }
diff --git a/bin/src/daemon.rs b/bin/src/daemon.rs
index 4d65521..9aad6c5 100644
--- a/bin/src/daemon.rs
+++ b/bin/src/daemon.rs
@@ -1,3 +1,4 @@
+mod analysis;
 mod config;
 mod error;
 mod pcap;
@@ -14,6 +15,7 @@ use crate::pcap::get_pcap;
 use crate::stats::get_system_stats;
 use crate::error::RayhunterError;
 
+use analysis::{get_analysis_report, run_analysis_thread, AnalysisMessage};
 use axum::response::Redirect;
 use diag::{DiagDeviceCtrlMessage, start_recording, stop_recording};
 use log::{info, error};
@@ -37,12 +39,14 @@ async fn run_server(
     config: &config::Config,
     qmdl_store_lock: Arc>,
     server_shutdown_rx: oneshot::Receiver<()>,
-    diag_device_sender: Sender
+    diag_device_sender: Sender,
+    maybe_analysis_tx: Option>
 ) -> JoinHandle<()> {
     let state = Arc::new(ServerState {
         qmdl_store_lock,
         diag_device_ctrl_sender: diag_device_sender,
         readonly_mode: config.readonly_mode,
+        maybe_analysis_tx,
     });
 
     let app = Router::new()
@@ -52,6 +56,7 @@ async fn run_server(
         .route("/api/qmdl-manifest", get(get_qmdl_manifest))
         .route("/api/start-recording", post(start_recording))
         .route("/api/stop-recording", post(stop_recording))
+        .route("/api/analysis-report", get(get_analysis_report))
         .route("/", get(|| async { Redirect::permanent("/index.html") }))
         .route("/*path", get(serve_static))
         .with_state(state);
@@ -87,7 +92,8 @@ fn run_ctrl_c_thread(
     task_tracker: &TaskTracker,
     diag_device_sender: Sender,
     server_shutdown_tx: oneshot::Sender<()>,
-    qmdl_store_lock: Arc>
+    qmdl_store_lock: Arc>,
+    maybe_analysis_tx: Option>
 ) -> JoinHandle> {
     task_tracker.spawn(async move {
         match tokio::signal::ctrl_c().await {
@@ -103,6 +109,10 @@ fn run_ctrl_c_thread(
                     .expect("couldn't send server shutdown signal");
                 diag_device_sender.send(DiagDeviceCtrlMessage::Exit).await
                     .expect("couldn't send Exit message to diag thread");
+                if let Some(analysis_tx) = maybe_analysis_tx {
+                    analysis_tx.send(AnalysisMessage::StopThread).await
+                        .expect("couldn't send Exit message to analysis thread")
+                }
             },
             Err(err) => {
                 error!("Unable to listen for shutdown signal: {}", err);
@@ -125,18 +135,21 @@ async fn main() -> Result<(), RayhunterError> {
 
     let qmdl_store_lock = Arc::new(RwLock::new(init_qmdl_store(&config).await?));
     let (tx, rx) = mpsc::channel::(1);
+    let mut maybe_analysis_tx = None;
     if !config.readonly_mode {
         let mut dev = DiagDevice::new().await
             .map_err(RayhunterError::DiagInitError)?;
         dev.config_logs().await
             .map_err(RayhunterError::DiagInitError)?;
 
-        run_diag_read_thread(&task_tracker, dev, rx, qmdl_store_lock.clone());
+        let analysis_tx = run_analysis_thread(&task_tracker);
+        run_diag_read_thread(&task_tracker, dev, rx, qmdl_store_lock.clone(), analysis_tx.clone());
+        maybe_analysis_tx = Some(analysis_tx);
     }
 
     let (server_shutdown_tx, server_shutdown_rx) = oneshot::channel::<()>();
-    run_ctrl_c_thread(&task_tracker, tx.clone(), server_shutdown_tx, qmdl_store_lock.clone());
-    run_server(&task_tracker, &config, qmdl_store_lock.clone(), server_shutdown_rx, tx).await;
+    run_ctrl_c_thread(&task_tracker, tx.clone(), server_shutdown_tx, qmdl_store_lock.clone(), maybe_analysis_tx.clone());
+    run_server(&task_tracker, &config, qmdl_store_lock.clone(), server_shutdown_rx, tx, maybe_analysis_tx).await;
 
     task_tracker.close();
     task_tracker.wait().await;
diff --git a/bin/src/diag.rs b/bin/src/diag.rs
index 440f8be..e9746eb 100644
--- a/bin/src/diag.rs
+++ b/bin/src/diag.rs
@@ -6,13 +6,14 @@ use axum::http::StatusCode;
 use rayhunter::diag::DataType;
 use rayhunter::diag_device::DiagDevice;
 use tokio::sync::RwLock;
-use tokio::sync::mpsc::Receiver;
+use tokio::sync::mpsc::{Receiver, Sender};
 use rayhunter::qmdl::QmdlWriter;
 use log::{debug, error, info};
 use tokio::fs::File;
 use tokio_util::task::TaskTracker;
 use futures::{StreamExt, TryStreamExt};
 
+use crate::analysis::AnalysisMessage;
 use crate::qmdl_store::QmdlStore;
 use crate::server::ServerState;
 
@@ -22,7 +23,13 @@ pub enum DiagDeviceCtrlMessage {
     Exit,
 }
 
-pub fn run_diag_read_thread(task_tracker: &TaskTracker, mut dev: DiagDevice, mut qmdl_file_rx: Receiver, qmdl_store_lock: Arc>) {
+pub fn run_diag_read_thread(
+    task_tracker: &TaskTracker,
+    mut dev: DiagDevice,
+    mut qmdl_file_rx: Receiver,
+    qmdl_store_lock: Arc>,
+    analysis_tx: Sender
+) {
     task_tracker.spawn(async move {
         let initial_file = qmdl_store_lock.write().await.new_entry().await.expect("failed creating QMDL file entry");
         let mut qmdl_writer: Option> = Some(QmdlWriter::new(initial_file));
@@ -33,8 +40,14 @@ pub fn run_diag_read_thread(task_tracker: &TaskTracker, mut dev: DiagDevice, mut
                     match msg {
                         Some(DiagDeviceCtrlMessage::StartRecording(new_writer)) => {
                             qmdl_writer = Some(new_writer);
+                            analysis_tx.send(AnalysisMessage::Reset).await
+                                .expect("failed to send message to analysis thread");
+                        },
+                        Some(DiagDeviceCtrlMessage::StopRecording) => {
+                            qmdl_writer = None;
+                            analysis_tx.send(AnalysisMessage::Reset).await
+                                .expect("failed to send message to analysis thread");
                         },
-                        Some(DiagDeviceCtrlMessage::StopRecording) => qmdl_writer = None,
                         // None means all the Senders have been dropped, so it's
                         // time to go
                         Some(DiagDeviceCtrlMessage::Exit) | None => {
@@ -59,6 +72,9 @@ pub fn run_diag_read_thread(task_tracker: &TaskTracker, mut dev: DiagDevice, mut
                                 let index = qmdl_store.current_entry.expect("DiagDevice had qmdl_writer, but QmdlStore didn't have current entry???");
                                 qmdl_store.update_entry(index, writer.total_written).await
                                     .expect("failed to update qmdl file size");
+                                debug!("sending container to analysis thread...");
+                                analysis_tx.send(AnalysisMessage::AnalyzeContainer(container)).await
+                                    .expect("failed sending messages container to analysis thread");
                                 debug!("done!");
                             } else {
                                 debug!("no qmdl_writer set, continuing...");
diff --git a/bin/src/pcap.rs b/bin/src/pcap.rs
index 95f655a..78a5802 100644
--- a/bin/src/pcap.rs
+++ b/bin/src/pcap.rs
@@ -1,7 +1,7 @@
 use crate::ServerState;
 
 use rayhunter::diag::DataType;
-use rayhunter::gsmtap_parser::GsmtapParser;
+use rayhunter::gsmtap_parser;
 use rayhunter::pcap::GsmtapPcapWriter;
 use rayhunter::qmdl::QmdlReader;
 use axum::body::Body;
@@ -34,7 +34,6 @@ pub async fn get_pcap(State(state): State>, Path(qmdl_name): Pa
         .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("{:?}", e)))?;
     // the QMDL reader should stop at the last successfully written data chunk
     // (entry.size_bytes)
-    let mut gsmtap_parser = GsmtapParser::new();
     let (reader, writer) = duplex(1024);
     let mut pcap_writer = GsmtapPcapWriter::new(writer).await.unwrap();
     pcap_writer.write_iface_header().await.unwrap();
@@ -48,7 +47,7 @@ pub async fn get_pcap(State(state): State>, Path(qmdl_name): Pa
             for maybe_msg in container.into_messages() {
                 match maybe_msg {
                     Ok(msg) => {
-                        let maybe_gsmtap_msg = gsmtap_parser.parse(msg)
+                        let maybe_gsmtap_msg = gsmtap_parser::parse(msg)
                             .expect("error parsing gsmtap message");
                         if let Some((timestamp, gsmtap_msg)) = maybe_gsmtap_msg {
                             pcap_writer.write_gsmtap_message(gsmtap_msg, timestamp).await
diff --git a/bin/src/server.rs b/bin/src/server.rs
index 783130d..290bd4b 100644
--- a/bin/src/server.rs
+++ b/bin/src/server.rs
@@ -11,6 +11,7 @@ use tokio::sync::RwLock;
 use tokio_util::io::ReaderStream;
 use include_dir::{include_dir, Dir};
 
+use crate::analysis::AnalysisMessage;
 use crate::DiagDeviceCtrlMessage;
 use crate::qmdl_store::QmdlStore;
 
@@ -18,6 +19,7 @@ pub struct ServerState {
     pub qmdl_store_lock: Arc>,
     pub diag_device_ctrl_sender: Sender,
     pub readonly_mode: bool,
+    pub maybe_analysis_tx: Option>,
 }
 
 pub async fn get_qmdl(State(state): State>, Path(qmdl_name): Path) -> Result {
diff --git a/bin/static/index.html b/bin/static/index.html
index 4b1e308..878f79d 100644
--- a/bin/static/index.html
+++ b/bin/static/index.html
@@ -34,5 +34,9 @@
         

System stats

Loading...
+
+

Analysis Report

+
Loading...
+
diff --git a/bin/static/js/main.js b/bin/static/js/main.js index a6e1323..6cfa738 100644 --- a/bin/static/js/main.js +++ b/bin/static/js/main.js @@ -3,6 +3,10 @@ async function populateDivs() { const systemStatsDiv = document.getElementById('system-stats'); systemStatsDiv.innerHTML = JSON.stringify(systemStats, null, 2); + const analysisReport = await getAnalysisReport(); + const analysisReportDiv = document.getElementById('analysis-report'); + analysisReportDiv.innerHTML = JSON.stringify(analysisReport, null, 2); + const qmdlManifest = await getQmdlManifest(); updateQmdlManifestTable(qmdlManifest); } @@ -49,6 +53,10 @@ function createEntryRow(entry) { return row; } +async function getAnalysisReport() { + return JSON.parse(await req('GET', '/api/analysis-report')); +} + async function getSystemStats() { return JSON.parse(await req('GET', '/api/system-stats')); } diff --git a/lib/src/analysis/analyzer.rs b/lib/src/analysis/analyzer.rs index f80fdd0..5501de9 100644 --- a/lib/src/analysis/analyzer.rs +++ b/lib/src/analysis/analyzer.rs @@ -1,7 +1,10 @@ use std::borrow::Cow; +use chrono::{DateTime, FixedOffset}; use serde::Serialize; -use super::information_element::InformationElement; +use crate::{diag::MessagesContainer, gsmtap_parser}; + +use super::{information_element::InformationElement, lte_downgrade::LteSib6And7DowngradeAnalyzer}; /// Qualitative measure of how severe a Warning event type is. /// The levels should break down like this: @@ -55,22 +58,117 @@ pub trait Analyzer { fn analyze_information_element(&mut self, ie: &InformationElement) -> Option; } +#[derive(Serialize, Debug)] +pub struct AnalyzerMetadata { + name: String, + description: String, +} + +#[derive(Serialize, Debug)] +pub struct ReportMetadata { + num_packets_analyzed: usize, + num_packets_skipped: usize, + num_warnings: usize, + first_packet_time: Option>, + last_packet_time: Option>, + analyzers: Vec, +} + +#[derive(Serialize, Debug, Clone)] +pub struct PacketAnalysis { + timestamp: DateTime, + events: Vec>, +} + +#[derive(Serialize, Debug)] +pub struct AnalysisReport { + metadata: ReportMetadata, + analysis: Vec, +} + pub struct Harness { - analyzers: Vec>, + analyzers: Vec>, + pub num_packets_analyzed: usize, + pub num_warnings: usize, + pub skipped_message_reasons: Vec, + pub first_packet_time: Option>, + pub last_packet_time: Option>, + pub analysis: Vec, } impl Harness { pub fn new() -> Self { Self { analyzers: Vec::new(), + num_packets_analyzed: 0, + skipped_message_reasons: Vec::new(), + num_warnings: 0, + first_packet_time: None, + last_packet_time: None, + analysis: Vec::new(), } } - pub fn add_analyzer(&mut self, analyzer: Box) { + pub fn new_with_all_analyzers() -> Self { + let mut harness = Harness::new(); + harness.add_analyzer(Box::new(LteSib6And7DowngradeAnalyzer{})); + harness + } + + pub fn add_analyzer(&mut self, analyzer: Box) { self.analyzers.push(analyzer); } - pub fn analyze_information_element(&mut self, ie: &InformationElement) -> Vec> { + pub fn analyze_qmdl_messages(&mut self, container: MessagesContainer) { + for maybe_qmdl_message in container.into_messages() { + let qmdl_message = match maybe_qmdl_message { + Ok(msg) => msg, + Err(err) => { + self.skipped_message_reasons.push(format!("{:?}", err)); + continue; + } + }; + + let gsmtap_message = match gsmtap_parser::parse(qmdl_message) { + Ok(msg) => msg, + Err(err) => { + self.skipped_message_reasons.push(format!("{:?}", err)); + continue; + } + }; + + let Some((timestamp, gsmtap_msg)) = gsmtap_message else { + continue; + }; + + let element = match InformationElement::try_from(&gsmtap_msg) { + Ok(element) => element, + Err(err) => { + self.skipped_message_reasons.push(format!("{:?}", err)); + continue; + } + }; + + if self.first_packet_time.is_none() { + self.first_packet_time = Some(timestamp.to_datetime()); + } + + self.last_packet_time = Some(timestamp.to_datetime()); + self.num_packets_analyzed += 1; + let analysis_result = self.analyze_information_element(&element); + if analysis_result.iter().any(Option::is_some) { + self.num_warnings += analysis_result.iter() + .filter(|maybe_event| matches!(maybe_event, Some(Event { event_type: EventType::QualitativeWarning { .. }, .. }))) + .count(); + self.analysis.push(PacketAnalysis { + timestamp: timestamp.to_datetime(), + events: analysis_result, + }); + } + } + } + + fn analyze_information_element(&mut self, ie: &InformationElement) -> Vec> { self.analyzers.iter_mut() .map(|analyzer| analyzer.analyze_information_element(ie)) .collect() @@ -87,4 +185,28 @@ impl Harness { .map(|analyzer| analyzer.get_description()) .collect() } + + pub fn build_analysis_report(&self) -> AnalysisReport { + let names = self.get_names(); + let descriptions = self.get_names(); + let mut analyzers = Vec::new(); + for (name, description) in names.iter().zip(descriptions.iter()) { + analyzers.push(AnalyzerMetadata { + name: name.to_string(), + description: description.to_string(), + }); + } + + AnalysisReport { + metadata: ReportMetadata { + num_packets_analyzed: self.num_packets_analyzed, + num_packets_skipped: self.skipped_message_reasons.len(), + num_warnings: self.num_warnings, + first_packet_time: self.first_packet_time, + last_packet_time: self.last_packet_time, + analyzers, + }, + analysis: self.analysis.clone(), + } + } } diff --git a/lib/src/gsmtap_parser.rs b/lib/src/gsmtap_parser.rs index eacfc61..3460e9c 100644 --- a/lib/src/gsmtap_parser.rs +++ b/lib/src/gsmtap_parser.rs @@ -4,15 +4,6 @@ use crate::gsmtap::*; use log::error; use thiserror::Error; -pub struct GsmtapParser { -} - -impl Default for GsmtapParser { - fn default() -> Self { - GsmtapParser::new() - } -} - #[derive(Debug, Error)] pub enum GsmtapParserError { #[error("Invalid LteRrcOtaMessage ext header version {0}")] @@ -21,119 +12,113 @@ pub enum GsmtapParserError { InvalidLteRrcOtaHeaderPduNum(u8, u8), } -impl GsmtapParser { - pub fn new() -> Self { - GsmtapParser {} - } - - pub fn parse(&mut self, msg: Message) -> Result, GsmtapParserError> { - if let Message::Log { timestamp, body, .. } = msg { - match self.log_to_gsmtap(body)? { - Some(msg) => Ok(Some((timestamp, msg))), - None => Ok(None), - } - } else { - Ok(None) - } - } - - fn log_to_gsmtap(&self, value: LogBody) -> Result, GsmtapParserError> { - match value { - LogBody::LteRrcOtaMessage { ext_header_version, packet } => { - let gsmtap_type = match ext_header_version { - 0x02 | 0x03 | 0x04 | 0x06 | 0x07 | 0x08 | 0x0d | 0x16 => match packet.get_pdu_num() { - 1 => GsmtapType::LteRrc(LteRrcSubtype::BcchBch), - 2 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSch), - 3 => GsmtapType::LteRrc(LteRrcSubtype::MCCH), - 4 => GsmtapType::LteRrc(LteRrcSubtype::PCCH), - 5 => GsmtapType::LteRrc(LteRrcSubtype::DlCcch), - 6 => GsmtapType::LteRrc(LteRrcSubtype::DlDcch), - 7 => GsmtapType::LteRrc(LteRrcSubtype::UlCcch), - 8 => GsmtapType::LteRrc(LteRrcSubtype::UlDcch), - pdu => return Err(GsmtapParserError::InvalidLteRrcOtaHeaderPduNum(ext_header_version, pdu)), - }, - 0x09 | 0x0c => match packet.get_pdu_num() { - 8 => GsmtapType::LteRrc(LteRrcSubtype::BcchBch), - 9 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSch), - 10 => GsmtapType::LteRrc(LteRrcSubtype::MCCH), - 11 => GsmtapType::LteRrc(LteRrcSubtype::PCCH), - 12 => GsmtapType::LteRrc(LteRrcSubtype::DlCcch), - 13 => GsmtapType::LteRrc(LteRrcSubtype::DlDcch), - 14 => GsmtapType::LteRrc(LteRrcSubtype::UlCcch), - 15 => GsmtapType::LteRrc(LteRrcSubtype::UlDcch), - pdu => return Err(GsmtapParserError::InvalidLteRrcOtaHeaderPduNum(ext_header_version, pdu)), - }, - 0x0e..=0x10 => match packet.get_pdu_num() { - 1 => GsmtapType::LteRrc(LteRrcSubtype::BcchBch), - 2 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSch), - 4 => GsmtapType::LteRrc(LteRrcSubtype::MCCH), - 5 => GsmtapType::LteRrc(LteRrcSubtype::PCCH), - 6 => GsmtapType::LteRrc(LteRrcSubtype::DlCcch), - 7 => GsmtapType::LteRrc(LteRrcSubtype::DlDcch), - 8 => GsmtapType::LteRrc(LteRrcSubtype::UlCcch), - 9 => GsmtapType::LteRrc(LteRrcSubtype::UlDcch), - pdu => return Err(GsmtapParserError::InvalidLteRrcOtaHeaderPduNum(ext_header_version, pdu)), - }, - 0x13 | 0x1a | 0x1b => match packet.get_pdu_num() { - 1 => GsmtapType::LteRrc(LteRrcSubtype::BcchBch), - 3 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSch), - 6 => GsmtapType::LteRrc(LteRrcSubtype::MCCH), - 7 => GsmtapType::LteRrc(LteRrcSubtype::PCCH), - 8 => GsmtapType::LteRrc(LteRrcSubtype::DlCcch), - 9 => GsmtapType::LteRrc(LteRrcSubtype::DlDcch), - 10 => GsmtapType::LteRrc(LteRrcSubtype::UlCcch), - 11 => GsmtapType::LteRrc(LteRrcSubtype::UlDcch), - 45 => GsmtapType::LteRrc(LteRrcSubtype::BcchBchNb), - 46 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSchNb), - 47 => GsmtapType::LteRrc(LteRrcSubtype::PcchNb), - 48 => GsmtapType::LteRrc(LteRrcSubtype::DlCcchNb), - 49 => GsmtapType::LteRrc(LteRrcSubtype::DlDcchNb), - 50 => GsmtapType::LteRrc(LteRrcSubtype::UlCcchNb), - 52 => GsmtapType::LteRrc(LteRrcSubtype::UlDcchNb), - pdu => return Err(GsmtapParserError::InvalidLteRrcOtaHeaderPduNum(ext_header_version, pdu)), - } - 0x14 | 0x18 | 0x19 => match packet.get_pdu_num() { - 1 => GsmtapType::LteRrc(LteRrcSubtype::BcchBch), - 2 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSch), - 4 => GsmtapType::LteRrc(LteRrcSubtype::MCCH), - 5 => GsmtapType::LteRrc(LteRrcSubtype::PCCH), - 6 => GsmtapType::LteRrc(LteRrcSubtype::DlCcch), - 7 => GsmtapType::LteRrc(LteRrcSubtype::DlDcch), - 8 => GsmtapType::LteRrc(LteRrcSubtype::UlCcch), - 9 => GsmtapType::LteRrc(LteRrcSubtype::UlDcch), - 54 => GsmtapType::LteRrc(LteRrcSubtype::BcchBchNb), - 55 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSchNb), - 56 => GsmtapType::LteRrc(LteRrcSubtype::PcchNb), - 57 => GsmtapType::LteRrc(LteRrcSubtype::DlCcchNb), - 58 => GsmtapType::LteRrc(LteRrcSubtype::DlDcchNb), - 59 => GsmtapType::LteRrc(LteRrcSubtype::UlCcchNb), - 61 => GsmtapType::LteRrc(LteRrcSubtype::UlDcchNb), - pdu => return Err(GsmtapParserError::InvalidLteRrcOtaHeaderPduNum(ext_header_version, pdu)), - }, - _ => return Err(GsmtapParserError::InvalidLteRrcOtaExtHeaderVersion(ext_header_version)), - }; - let mut header = GsmtapHeader::new(gsmtap_type); - // Wireshark GSMTAP only accepts 14 bits of ARFCN - header.arfcn = packet.get_earfcn().try_into().unwrap_or(0); - header.frame_number = packet.get_sfn(); - header.subslot = packet.get_subfn(); - Ok(Some(GsmtapMessage { - header, - payload: packet.take_payload(), - })) - }, - LogBody::Nas4GMessage { msg, .. } => { - // currently we only handle "plain" (i.e. non-secure) NAS messages - let header = GsmtapHeader::new(GsmtapType::LteNas(LteNasSubtype::Plain)); - Ok(Some(GsmtapMessage { - header, - payload: msg, - })) - }, - _ => { - error!("gsmtap_sink: ignoring unhandled log type: {:?}", value); - Ok(None) - }, +pub fn parse(msg: Message) -> Result, GsmtapParserError> { + if let Message::Log { timestamp, body, .. } = msg { + match log_to_gsmtap(body)? { + Some(msg) => Ok(Some((timestamp, msg))), + None => Ok(None), } + } else { + Ok(None) + } +} + +fn log_to_gsmtap(value: LogBody) -> Result, GsmtapParserError> { + match value { + LogBody::LteRrcOtaMessage { ext_header_version, packet } => { + let gsmtap_type = match ext_header_version { + 0x02 | 0x03 | 0x04 | 0x06 | 0x07 | 0x08 | 0x0d | 0x16 => match packet.get_pdu_num() { + 1 => GsmtapType::LteRrc(LteRrcSubtype::BcchBch), + 2 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSch), + 3 => GsmtapType::LteRrc(LteRrcSubtype::MCCH), + 4 => GsmtapType::LteRrc(LteRrcSubtype::PCCH), + 5 => GsmtapType::LteRrc(LteRrcSubtype::DlCcch), + 6 => GsmtapType::LteRrc(LteRrcSubtype::DlDcch), + 7 => GsmtapType::LteRrc(LteRrcSubtype::UlCcch), + 8 => GsmtapType::LteRrc(LteRrcSubtype::UlDcch), + pdu => return Err(GsmtapParserError::InvalidLteRrcOtaHeaderPduNum(ext_header_version, pdu)), + }, + 0x09 | 0x0c => match packet.get_pdu_num() { + 8 => GsmtapType::LteRrc(LteRrcSubtype::BcchBch), + 9 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSch), + 10 => GsmtapType::LteRrc(LteRrcSubtype::MCCH), + 11 => GsmtapType::LteRrc(LteRrcSubtype::PCCH), + 12 => GsmtapType::LteRrc(LteRrcSubtype::DlCcch), + 13 => GsmtapType::LteRrc(LteRrcSubtype::DlDcch), + 14 => GsmtapType::LteRrc(LteRrcSubtype::UlCcch), + 15 => GsmtapType::LteRrc(LteRrcSubtype::UlDcch), + pdu => return Err(GsmtapParserError::InvalidLteRrcOtaHeaderPduNum(ext_header_version, pdu)), + }, + 0x0e..=0x10 => match packet.get_pdu_num() { + 1 => GsmtapType::LteRrc(LteRrcSubtype::BcchBch), + 2 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSch), + 4 => GsmtapType::LteRrc(LteRrcSubtype::MCCH), + 5 => GsmtapType::LteRrc(LteRrcSubtype::PCCH), + 6 => GsmtapType::LteRrc(LteRrcSubtype::DlCcch), + 7 => GsmtapType::LteRrc(LteRrcSubtype::DlDcch), + 8 => GsmtapType::LteRrc(LteRrcSubtype::UlCcch), + 9 => GsmtapType::LteRrc(LteRrcSubtype::UlDcch), + pdu => return Err(GsmtapParserError::InvalidLteRrcOtaHeaderPduNum(ext_header_version, pdu)), + }, + 0x13 | 0x1a | 0x1b => match packet.get_pdu_num() { + 1 => GsmtapType::LteRrc(LteRrcSubtype::BcchBch), + 3 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSch), + 6 => GsmtapType::LteRrc(LteRrcSubtype::MCCH), + 7 => GsmtapType::LteRrc(LteRrcSubtype::PCCH), + 8 => GsmtapType::LteRrc(LteRrcSubtype::DlCcch), + 9 => GsmtapType::LteRrc(LteRrcSubtype::DlDcch), + 10 => GsmtapType::LteRrc(LteRrcSubtype::UlCcch), + 11 => GsmtapType::LteRrc(LteRrcSubtype::UlDcch), + 45 => GsmtapType::LteRrc(LteRrcSubtype::BcchBchNb), + 46 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSchNb), + 47 => GsmtapType::LteRrc(LteRrcSubtype::PcchNb), + 48 => GsmtapType::LteRrc(LteRrcSubtype::DlCcchNb), + 49 => GsmtapType::LteRrc(LteRrcSubtype::DlDcchNb), + 50 => GsmtapType::LteRrc(LteRrcSubtype::UlCcchNb), + 52 => GsmtapType::LteRrc(LteRrcSubtype::UlDcchNb), + pdu => return Err(GsmtapParserError::InvalidLteRrcOtaHeaderPduNum(ext_header_version, pdu)), + } + 0x14 | 0x18 | 0x19 => match packet.get_pdu_num() { + 1 => GsmtapType::LteRrc(LteRrcSubtype::BcchBch), + 2 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSch), + 4 => GsmtapType::LteRrc(LteRrcSubtype::MCCH), + 5 => GsmtapType::LteRrc(LteRrcSubtype::PCCH), + 6 => GsmtapType::LteRrc(LteRrcSubtype::DlCcch), + 7 => GsmtapType::LteRrc(LteRrcSubtype::DlDcch), + 8 => GsmtapType::LteRrc(LteRrcSubtype::UlCcch), + 9 => GsmtapType::LteRrc(LteRrcSubtype::UlDcch), + 54 => GsmtapType::LteRrc(LteRrcSubtype::BcchBchNb), + 55 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSchNb), + 56 => GsmtapType::LteRrc(LteRrcSubtype::PcchNb), + 57 => GsmtapType::LteRrc(LteRrcSubtype::DlCcchNb), + 58 => GsmtapType::LteRrc(LteRrcSubtype::DlDcchNb), + 59 => GsmtapType::LteRrc(LteRrcSubtype::UlCcchNb), + 61 => GsmtapType::LteRrc(LteRrcSubtype::UlDcchNb), + pdu => return Err(GsmtapParserError::InvalidLteRrcOtaHeaderPduNum(ext_header_version, pdu)), + }, + _ => return Err(GsmtapParserError::InvalidLteRrcOtaExtHeaderVersion(ext_header_version)), + }; + let mut header = GsmtapHeader::new(gsmtap_type); + // Wireshark GSMTAP only accepts 14 bits of ARFCN + header.arfcn = packet.get_earfcn().try_into().unwrap_or(0); + header.frame_number = packet.get_sfn(); + header.subslot = packet.get_subfn(); + Ok(Some(GsmtapMessage { + header, + payload: packet.take_payload(), + })) + }, + LogBody::Nas4GMessage { msg, .. } => { + // currently we only handle "plain" (i.e. non-secure) NAS messages + let header = GsmtapHeader::new(GsmtapType::LteNas(LteNasSubtype::Plain)); + Ok(Some(GsmtapMessage { + header, + payload: msg, + })) + }, + _ => { + error!("gsmtap_sink: ignoring unhandled log type: {:?}", value); + Ok(None) + }, } } diff --git a/lib/tests/test_lte_parsing.rs b/lib/tests/test_lte_parsing.rs index 187807d..afc3829 100644 --- a/lib/tests/test_lte_parsing.rs +++ b/lib/tests/test_lte_parsing.rs @@ -1,17 +1,12 @@ -use rayhunter::diag::{ - Message, - LogBody, - LteRrcOtaPacket, - Timestamp, -}; -use rayhunter::gsmtap_parser::GsmtapParser; +use rayhunter::{diag::{ + LogBody, LteRrcOtaPacket, Message, Timestamp +}, gsmtap_parser}; use deku::prelude::*; -// Tests here are based on https://github.com/fgsect/scat/blob/97442580e628de414c9f7c2a185f4e28d0ee7523/tests/test_diagltelogparser.py +// Tests here are based on https://github.com/fgsect/scat/blob/97442580e628de414c9f7c2a185f4e28d0ee7523/tests/test_diaglteloggsmtap_parser::py #[test] fn test_lte_rrc_ota() { - let mut parser = GsmtapParser::new(); let v26_binary = &[ 0x10, 0x0, 0x23, 0x0, 0x23, 0x0, 0xc0, 0xb0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1a, 0xf, 0x40, 0xf, 0x40, 0x1, 0xe, 0x1, 0x13, 0x7, @@ -42,7 +37,7 @@ fn test_lte_rrc_ota() { } } }); - let (_, gsmtap_msg) = parser.parse(parsed).unwrap().unwrap(); + let (_, gsmtap_msg) = gsmtap_parser::parse(parsed).unwrap().unwrap(); assert_eq!(&gsmtap_msg.payload, &[0x10, 0x15]); assert_eq!(gsmtap_msg.header.packet_type, 13); assert_eq!(gsmtap_msg.header.timeslot, 0); @@ -85,7 +80,7 @@ fn test_lte_rrc_ota() { }, }, }); - let (_, gsmtap_msg) = parser.parse(parsed).unwrap().unwrap(); + let (_, gsmtap_msg) = gsmtap_parser::parse(parsed).unwrap().unwrap(); assert_eq!(&gsmtap_msg.payload, &[ 0x10, 0x15, ]); @@ -132,7 +127,7 @@ fn test_lte_rrc_ota() { }, }, }); - let (_, gsmtap_msg) = parser.parse(parsed).unwrap().unwrap(); + let (_, gsmtap_msg) = gsmtap_parser::parse(parsed).unwrap().unwrap(); assert_eq!(&gsmtap_msg.payload, &[ 0x40, 0x85, 0x8e, 0xc4, 0xe5, 0xbf, 0xe0, 0x50, 0xdc, 0x29, 0x15, 0x16, 0x00, @@ -183,7 +178,7 @@ fn test_lte_rrc_ota() { }, }, }); - let (_, gsmtap_msg) = parser.parse(parsed).unwrap().unwrap(); + let (_, gsmtap_msg) = gsmtap_parser::parse(parsed).unwrap().unwrap(); assert_eq!(&gsmtap_msg.payload, &[ 0x08, 0x10, 0xa7, 0x14, 0x53, 0x59, 0xa6, 0x05, 0x43, 0x68, 0xc0, 0x3b, 0xda, 0x30, 0x04, 0xa6, @@ -229,7 +224,7 @@ fn test_lte_rrc_ota() { }, }, }); - let (_, gsmtap_msg) = parser.parse(parsed).unwrap().unwrap(); + let (_, gsmtap_msg) = gsmtap_parser::parse(parsed).unwrap().unwrap(); assert_eq!(&gsmtap_msg.payload, &[ 0x28, 0x18, 0x40, 0x16, 0x08, 0x08, 0x80, 0x00, 0x00, @@ -274,7 +269,7 @@ fn test_lte_rrc_ota() { }, }, }); - let (_, gsmtap_msg) = parser.parse(parsed).unwrap().unwrap(); + let (_, gsmtap_msg) = gsmtap_parser::parse(parsed).unwrap().unwrap(); assert_eq!(&gsmtap_msg.payload, &[ 0x40, 0x0c, 0x8e, 0xc9, 0x42, 0x89, 0xe0, ]); @@ -324,7 +319,7 @@ fn test_lte_rrc_ota() { }, }, }); - let (_, gsmtap_msg) = parser.parse(parsed).unwrap().unwrap(); + let (_, gsmtap_msg) = gsmtap_parser::parse(parsed).unwrap().unwrap(); assert_eq!(&gsmtap_msg.payload, &[ 0x08, 0x10, 0xa5, 0x34, 0x61, 0x41, 0xa3, 0x1c, 0x31, 0x68, 0x04, 0x40, 0x1a, 0x00, 0x49, 0x16, @@ -370,7 +365,7 @@ fn test_lte_rrc_ota() { }, }, }); - let (_, gsmtap_msg) = parser.parse(parsed).unwrap().unwrap(); + let (_, gsmtap_msg) = gsmtap_parser::parse(parsed).unwrap().unwrap(); assert_eq!(&gsmtap_msg.payload, &[0x2c, 0x00]); assert_eq!(gsmtap_msg.header.packet_type, 13); assert_eq!(gsmtap_msg.header.timeslot, 0); @@ -412,7 +407,7 @@ fn test_lte_rrc_ota() { }, }, }); - let (_, gsmtap_msg) = parser.parse(parsed).unwrap().unwrap(); + let (_, gsmtap_msg) = gsmtap_parser::parse(parsed).unwrap().unwrap(); assert_eq!(&gsmtap_msg.payload, &[ 0x40, 0x0b, 0x8e, 0xc1, 0xdd, 0x13, 0xb0, ]); @@ -455,7 +450,7 @@ fn test_lte_rrc_ota() { }, }, }); - let (_, gsmtap_msg) = parser.parse(parsed).unwrap().unwrap(); + let (_, gsmtap_msg) = gsmtap_parser::parse(parsed).unwrap().unwrap(); assert_eq!(&gsmtap_msg.payload, &[0x2e, 0x02]); assert_eq!(gsmtap_msg.header.packet_type, 13); assert_eq!(gsmtap_msg.header.timeslot, 0); @@ -501,7 +496,7 @@ fn test_lte_rrc_ota() { }, }, }); - let (_, gsmtap_msg) = parser.parse(parsed).unwrap().unwrap(); + let (_, gsmtap_msg) = gsmtap_parser::parse(parsed).unwrap().unwrap(); assert_eq!(&gsmtap_msg.payload, &[ 0x40, 0x49, 0x88, 0x05, 0xc0, 0x97, 0x02, 0xd3, 0xb0, 0x98, 0x1c, 0x20, 0xa0, 0x81, 0x8c, 0x43, From 4a5bede4ee8de1655fd0698f8f11b759dc03c6e5 Mon Sep 17 00:00:00 2001 From: Will Greenberg Date: Wed, 8 May 2024 15:00:24 -0700 Subject: [PATCH 2/3] lib: fix overzealous regex --- lib/tests/test_lte_parsing.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/tests/test_lte_parsing.rs b/lib/tests/test_lte_parsing.rs index afc3829..475545a 100644 --- a/lib/tests/test_lte_parsing.rs +++ b/lib/tests/test_lte_parsing.rs @@ -3,7 +3,7 @@ use rayhunter::{diag::{ }, gsmtap_parser}; use deku::prelude::*; -// Tests here are based on https://github.com/fgsect/scat/blob/97442580e628de414c9f7c2a185f4e28d0ee7523/tests/test_diaglteloggsmtap_parser::py +// Tests here are based on https://github.com/fgsect/scat/blob/97442580e628de414c9f7c2a185f4e28d0ee7523/tests/test_diagltelogparser.py #[test] fn test_lte_rrc_ota() { From bfc688ad21f106a16ba2d5deadfcbe35e49ff012 Mon Sep 17 00:00:00 2001 From: Will Greenberg Date: Thu, 9 May 2024 14:46:41 -0700 Subject: [PATCH 3/3] daemon: switch to writing heuristics output to ND-JSON ND-JSON (newline-delimited JSON) is just a file with a list of JSON objects separated by newlines. This way, as the analyzer harness processes new packets, it can simply append JSON-serialized results to a file without parsing the entire thing first. Also simplifies the analysis stuff to all operate in the diag thread. --- bin/src/analysis.rs | 56 -------------- bin/src/check.rs | 7 +- bin/src/daemon.rs | 38 ++++------ bin/src/diag.rs | 138 +++++++++++++++++++++++++++------- bin/src/error.rs | 4 +- bin/src/pcap.rs | 6 +- bin/src/qmdl_store.rs | 139 ++++++++++++++++++++++------------- bin/src/server.rs | 12 ++- bin/static/js/main.js | 7 +- lib/src/analysis/analyzer.rs | 73 +++++++----------- 10 files changed, 256 insertions(+), 224 deletions(-) delete mode 100644 bin/src/analysis.rs diff --git a/bin/src/analysis.rs b/bin/src/analysis.rs deleted file mode 100644 index a9d23c5..0000000 --- a/bin/src/analysis.rs +++ /dev/null @@ -1,56 +0,0 @@ -use std::sync::Arc; - -use axum::{extract::State, http::StatusCode, Json}; -use log::error; -use rayhunter::{analysis::analyzer::{AnalysisReport, Harness}, diag::MessagesContainer}; -use tokio::sync; -use tokio_util::task::TaskTracker; - -use crate::server::ServerState; - -#[derive(Debug)] -pub enum AnalysisMessage { - Reset, - GetReport(sync::oneshot::Sender), - AnalyzeContainer(MessagesContainer), - StopThread, -} - -pub fn run_analysis_thread(task_tracker: &TaskTracker) -> sync::mpsc::Sender { - let (tx, mut rx) = sync::mpsc::channel(5); - - task_tracker.spawn(async move { - let mut harness = Harness::new_with_all_analyzers(); - loop { - match rx.recv().await { - Some(AnalysisMessage::GetReport(sender)) => { - // this might fail if the client closes their connection - // before we're done building the report - if let Err(e) = sender.send(harness.build_analysis_report()) { - error!("failed to send analysis report: {:?}", e); - } - }, - Some(AnalysisMessage::Reset) => harness = Harness::new_with_all_analyzers(), - Some(AnalysisMessage::AnalyzeContainer(container)) => harness.analyze_qmdl_messages(container), - Some(AnalysisMessage::StopThread) | None => break, - } - } - }); - - tx -} - -pub async fn get_analysis_report(State(state): State>) -> Result, (StatusCode, String)> { - if state.readonly_mode { - return Err((StatusCode::FORBIDDEN, "server is in readonly mode".to_string())); - } - let analysis_tx = state.maybe_analysis_tx.as_ref().unwrap(); - let (report_tx, report_rx) = tokio::sync::oneshot::channel(); - if let Err(e) = analysis_tx.send(AnalysisMessage::GetReport(report_tx)).await { - return Err((StatusCode::INTERNAL_SERVER_ERROR, format!("error reaching analysis thread: {:?}", e))); - } - match report_rx.await { - Ok(report) => Ok(Json(report)), - Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, format!("error fetching analysis report: {:?}", e))) - } -} diff --git a/bin/src/check.rs b/bin/src/check.rs index 0383c50..d844a53 100644 --- a/bin/src/check.rs +++ b/bin/src/check.rs @@ -23,10 +23,9 @@ async fn main() { let mut qmdl_reader = QmdlReader::new(qmdl_file, Some(file_size as usize)); let mut qmdl_stream = pin!(qmdl_reader.as_stream() .try_filter(|container| future::ready(container.data_type == DataType::UserSpace))); + println!("{}\n", serde_json::to_string(&harness.get_metadata()).expect("failed to serialize report metadata")); while let Some(container) = qmdl_stream.try_next().await.expect("failed getting QMDL container") { - harness.analyze_qmdl_messages(container) + let row = harness.analyze_qmdl_messages(container); + println!("{}\n", serde_json::to_string(&row).expect("failed to serialize row")); } - - let report = harness.build_analysis_report(); - println!("{}", serde_json::to_string(&report).expect("failed to serialize report")); } diff --git a/bin/src/daemon.rs b/bin/src/daemon.rs index 9aad6c5..e1771da 100644 --- a/bin/src/daemon.rs +++ b/bin/src/daemon.rs @@ -1,4 +1,3 @@ -mod analysis; mod config; mod error; mod pcap; @@ -9,15 +8,14 @@ mod diag; use crate::config::{parse_config, parse_args}; use crate::diag::run_diag_read_thread; -use crate::qmdl_store::QmdlStore; +use crate::qmdl_store::RecordingStore; use crate::server::{ServerState, get_qmdl, serve_static}; use crate::pcap::get_pcap; use crate::stats::get_system_stats; use crate::error::RayhunterError; -use analysis::{get_analysis_report, run_analysis_thread, AnalysisMessage}; use axum::response::Redirect; -use diag::{DiagDeviceCtrlMessage, start_recording, stop_recording}; +use diag::{get_analysis_report, start_recording, stop_recording, DiagDeviceCtrlMessage}; use log::{info, error}; use rayhunter::diag_device::DiagDevice; use axum::routing::{get, post}; @@ -37,16 +35,14 @@ use std::sync::Arc; async fn run_server( task_tracker: &TaskTracker, config: &config::Config, - qmdl_store_lock: Arc>, + qmdl_store_lock: Arc>, server_shutdown_rx: oneshot::Receiver<()>, - diag_device_sender: Sender, - maybe_analysis_tx: Option> + diag_device_sender: Sender ) -> JoinHandle<()> { let state = Arc::new(ServerState { qmdl_store_lock, diag_device_ctrl_sender: diag_device_sender, - readonly_mode: config.readonly_mode, - maybe_analysis_tx, + readonly_mode: config.readonly_mode }); let app = Router::new() @@ -77,10 +73,10 @@ async fn server_shutdown_signal(server_shutdown_rx: oneshot::Receiver<()>) { // Loads a QmdlStore if one exists, and if not, only create one if we're not in // readonly mode. -async fn init_qmdl_store(config: &config::Config) -> Result { - match (QmdlStore::exists(&config.qmdl_store_path).await?, config.readonly_mode) { - (true, _) => Ok(QmdlStore::load(&config.qmdl_store_path).await?), - (false, false) => Ok(QmdlStore::create(&config.qmdl_store_path).await?), +async fn init_qmdl_store(config: &config::Config) -> Result { + match (RecordingStore::exists(&config.qmdl_store_path).await?, config.readonly_mode) { + (true, _) => Ok(RecordingStore::load(&config.qmdl_store_path).await?), + (false, false) => Ok(RecordingStore::create(&config.qmdl_store_path).await?), (false, true) => Err(RayhunterError::NoStoreReadonlyMode(config.qmdl_store_path.clone())), } } @@ -92,8 +88,7 @@ fn run_ctrl_c_thread( task_tracker: &TaskTracker, diag_device_sender: Sender, server_shutdown_tx: oneshot::Sender<()>, - qmdl_store_lock: Arc>, - maybe_analysis_tx: Option> + qmdl_store_lock: Arc> ) -> JoinHandle> { task_tracker.spawn(async move { match tokio::signal::ctrl_c().await { @@ -109,10 +104,6 @@ fn run_ctrl_c_thread( .expect("couldn't send server shutdown signal"); diag_device_sender.send(DiagDeviceCtrlMessage::Exit).await .expect("couldn't send Exit message to diag thread"); - if let Some(analysis_tx) = maybe_analysis_tx { - analysis_tx.send(AnalysisMessage::StopThread).await - .expect("couldn't send Exit message to analysis thread") - } }, Err(err) => { error!("Unable to listen for shutdown signal: {}", err); @@ -135,21 +126,18 @@ async fn main() -> Result<(), RayhunterError> { let qmdl_store_lock = Arc::new(RwLock::new(init_qmdl_store(&config).await?)); let (tx, rx) = mpsc::channel::(1); - let mut maybe_analysis_tx = None; if !config.readonly_mode { let mut dev = DiagDevice::new().await .map_err(RayhunterError::DiagInitError)?; dev.config_logs().await .map_err(RayhunterError::DiagInitError)?; - let analysis_tx = run_analysis_thread(&task_tracker); - run_diag_read_thread(&task_tracker, dev, rx, qmdl_store_lock.clone(), analysis_tx.clone()); - maybe_analysis_tx = Some(analysis_tx); + run_diag_read_thread(&task_tracker, dev, rx, qmdl_store_lock.clone()); } let (server_shutdown_tx, server_shutdown_rx) = oneshot::channel::<()>(); - run_ctrl_c_thread(&task_tracker, tx.clone(), server_shutdown_tx, qmdl_store_lock.clone(), maybe_analysis_tx.clone()); - run_server(&task_tracker, &config, qmdl_store_lock.clone(), server_shutdown_rx, tx, maybe_analysis_tx).await; + run_ctrl_c_thread(&task_tracker, tx.clone(), server_shutdown_tx, qmdl_store_lock.clone()); + run_server(&task_tracker, &config, qmdl_store_lock.clone(), server_shutdown_rx, tx).await; task_tracker.close(); task_tracker.wait().await; diff --git a/bin/src/diag.rs b/bin/src/diag.rs index e9746eb..b7bf850 100644 --- a/bin/src/diag.rs +++ b/bin/src/diag.rs @@ -1,57 +1,122 @@ use std::pin::pin; use std::sync::Arc; +use axum::body::Body; use axum::extract::State; +use axum::http::header::CONTENT_TYPE; use axum::http::StatusCode; -use rayhunter::diag::DataType; +use axum::response::{IntoResponse, Response}; +use rayhunter::analysis::analyzer::Harness; +use rayhunter::diag::{DataType, MessagesContainer}; use rayhunter::diag_device::DiagDevice; +use serde::Serialize; use tokio::sync::RwLock; -use tokio::sync::mpsc::{Receiver, Sender}; +use tokio::sync::mpsc::Receiver; use rayhunter::qmdl::QmdlWriter; use log::{debug, error, info}; use tokio::fs::File; +use tokio::io::{BufWriter, AsyncWriteExt}; +use tokio_util::io::ReaderStream; use tokio_util::task::TaskTracker; use futures::{StreamExt, TryStreamExt}; -use crate::analysis::AnalysisMessage; -use crate::qmdl_store::QmdlStore; +use crate::qmdl_store::RecordingStore; use crate::server::ServerState; pub enum DiagDeviceCtrlMessage { StopRecording, - StartRecording(QmdlWriter), + StartRecording((QmdlWriter, File)), Exit, } +struct AnalysisWriter { + writer: BufWriter, + harness: Harness, + bytes_written: usize, +} + +// We write our analysis results to a file immediately to minimize the amount of +// state Rayhunter has to keep track of in memory. The analysis file's format is +// Newline Delimited JSON +// (https://docs.mulesoft.com/dataweave/latest/dataweave-formats-ndjson), which +// lets us simply append new rows to the end without parsing the entire JSON +// object beforehand. +impl AnalysisWriter { + pub async fn new(file: File) -> Result { + let mut result = Self { + writer: BufWriter::new(file), + harness: Harness::new_with_all_analyzers(), + bytes_written: 0, + }; + let metadata = result.harness.get_metadata(); + result.write(&metadata).await?; + Ok(result) + } + + // Runs the analysis harness on the given container, serializing the results + // to the analysis file and returning the file's new length. + pub async fn analyze(&mut self, container: MessagesContainer) -> Result { + let row = self.harness.analyze_qmdl_messages(container); + if !row.is_empty() { + self.write(&row).await?; + } + Ok(self.bytes_written) + } + + async fn write(&mut self, value: &T) -> Result<(), std::io::Error> { + let mut value_str = serde_json::to_string(value).unwrap(); + value_str.push('\n'); + self.bytes_written += value_str.len(); + self.writer.write_all(value_str.as_bytes()).await?; + self.writer.flush().await?; + Ok(()) + } + + // Flushes any pending I/O to disk before dropping the writer + pub async fn close(mut self) -> Result<(), std::io::Error> { + self.writer.flush().await?; + Ok(()) + } +} + pub fn run_diag_read_thread( task_tracker: &TaskTracker, mut dev: DiagDevice, mut qmdl_file_rx: Receiver, - qmdl_store_lock: Arc>, - analysis_tx: Sender + qmdl_store_lock: Arc> ) { task_tracker.spawn(async move { - let initial_file = qmdl_store_lock.write().await.new_entry().await.expect("failed creating QMDL file entry"); - let mut qmdl_writer: Option> = Some(QmdlWriter::new(initial_file)); + let (initial_qmdl_file, initial_analysis_file) = qmdl_store_lock.write().await.new_entry().await.expect("failed creating QMDL file entry"); + let mut maybe_qmdl_writer: Option> = Some(QmdlWriter::new(initial_qmdl_file)); let mut diag_stream = pin!(dev.as_stream().into_stream()); + let mut maybe_analysis_writer = Some(AnalysisWriter::new(initial_analysis_file).await + .expect("failed to create analysis writer")); loop { tokio::select! { msg = qmdl_file_rx.recv() => { match msg { - Some(DiagDeviceCtrlMessage::StartRecording(new_writer)) => { - qmdl_writer = Some(new_writer); - analysis_tx.send(AnalysisMessage::Reset).await - .expect("failed to send message to analysis thread"); + Some(DiagDeviceCtrlMessage::StartRecording((new_writer, new_analysis_file))) => { + maybe_qmdl_writer = Some(new_writer); + if let Some(analysis_writer) = maybe_analysis_writer { + analysis_writer.close().await.expect("failed to close analysis writer"); + } + maybe_analysis_writer = Some(AnalysisWriter::new(new_analysis_file).await + .expect("failed to write to analysis file")); }, Some(DiagDeviceCtrlMessage::StopRecording) => { - qmdl_writer = None; - analysis_tx.send(AnalysisMessage::Reset).await - .expect("failed to send message to analysis thread"); + maybe_qmdl_writer = None; + if let Some(analysis_writer) = maybe_analysis_writer { + analysis_writer.close().await.expect("failed to close analysis writer"); + } + maybe_analysis_writer = None; }, // None means all the Senders have been dropped, so it's // time to go Some(DiagDeviceCtrlMessage::Exit) | None => { info!("Diag reader thread exiting..."); + if let Some(analysis_writer) = maybe_analysis_writer { + analysis_writer.close().await.expect("failed to close analysis writer"); + } return Ok(()) }, } @@ -65,20 +130,26 @@ pub fn run_diag_read_thread( } // keep track of how many bytes were written to the QMDL file so we can read // a valid block of data from it in the HTTP server - if let Some(writer) = qmdl_writer.as_mut() { - writer.write_container(&container).await.expect("failed to write to QMDL writer"); - debug!("total QMDL bytes written: {}, updating manifest...", writer.total_written); + if let Some(qmdl_writer) = maybe_qmdl_writer.as_mut() { + qmdl_writer.write_container(&container).await.expect("failed to write to QMDL writer"); + debug!("total QMDL bytes written: {}, updating manifest...", qmdl_writer.total_written); let mut qmdl_store = qmdl_store_lock.write().await; let index = qmdl_store.current_entry.expect("DiagDevice had qmdl_writer, but QmdlStore didn't have current entry???"); - qmdl_store.update_entry(index, writer.total_written).await + qmdl_store.update_entry_qmdl_size(index, qmdl_writer.total_written).await .expect("failed to update qmdl file size"); - debug!("sending container to analysis thread..."); - analysis_tx.send(AnalysisMessage::AnalyzeContainer(container)).await - .expect("failed sending messages container to analysis thread"); debug!("done!"); } else { debug!("no qmdl_writer set, continuing..."); } + + if let Some(analysis_writer) = maybe_analysis_writer.as_mut() { + let analysis_file_len = analysis_writer.analyze(container).await + .expect("failed to analyze container"); + let mut qmdl_store = qmdl_store_lock.write().await; + let index = qmdl_store.current_entry.expect("DiagDevice had qmdl_writer, but QmdlStore didn't have current entry???"); + qmdl_store.update_entry_analysis_size(index, analysis_file_len as usize).await + .expect("failed to update analysis file size"); + } }, Err(err) => { error!("error reading diag device: {}", err); @@ -96,10 +167,10 @@ pub async fn start_recording(State(state): State>) -> Result<(S return Err((StatusCode::FORBIDDEN, "server is in readonly mode".to_string())); } let mut qmdl_store = state.qmdl_store_lock.write().await; - let qmdl_file = qmdl_store.new_entry().await + let (qmdl_file, analysis_file) = qmdl_store.new_entry().await .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("couldn't create new qmdl entry: {}", e)))?; let qmdl_writer = QmdlWriter::new(qmdl_file); - state.diag_device_ctrl_sender.send(DiagDeviceCtrlMessage::StartRecording(qmdl_writer)).await + state.diag_device_ctrl_sender.send(DiagDeviceCtrlMessage::StartRecording((qmdl_writer, analysis_file))).await .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("couldn't send stop recording message: {}", e)))?; Ok((StatusCode::ACCEPTED, "ok".to_string())) } @@ -115,3 +186,20 @@ pub async fn stop_recording(State(state): State>) -> Result<(St .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("couldn't send stop recording message: {}", e)))?; Ok((StatusCode::ACCEPTED, "ok".to_string())) } + +pub async fn get_analysis_report(State(state): State>) -> Result { + let qmdl_store = state.qmdl_store_lock.read().await; + let Some(entry) = qmdl_store.get_current_entry() else { + return Err(( + StatusCode::SERVICE_UNAVAILABLE, + "No QMDL data's being recorded to analyze, try starting a new recording!".to_string() + )); + }; + let analysis_file = qmdl_store.open_entry_analysis(entry).await + .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("{:?}", e)))?; + let analysis_stream = ReaderStream::new(analysis_file); + + let headers = [(CONTENT_TYPE, "application/x-ndjson")]; + let body = Body::from_stream(analysis_stream); + Ok((headers, body).into_response()) +} diff --git a/bin/src/error.rs b/bin/src/error.rs index f7a3470..6942ed1 100644 --- a/bin/src/error.rs +++ b/bin/src/error.rs @@ -1,7 +1,7 @@ use thiserror::Error; use rayhunter::diag_device::DiagDeviceError; -use crate::qmdl_store::QmdlStoreError; +use crate::qmdl_store::RecordingStoreError; #[derive(Error, Debug)] pub enum RayhunterError{ @@ -12,7 +12,7 @@ pub enum RayhunterError{ #[error("Tokio error: {0}")] TokioError(#[from] tokio::io::Error), #[error("QmdlStore error: {0}")] - QmdlStoreError(#[from] QmdlStoreError), + QmdlStoreError(#[from] RecordingStoreError), #[error("No QMDL store found at path {0}, but can't create a new one due to readonly mode")] NoStoreReadonlyMode(String), } diff --git a/bin/src/pcap.rs b/bin/src/pcap.rs index 78a5802..0a0b46c 100644 --- a/bin/src/pcap.rs +++ b/bin/src/pcap.rs @@ -23,14 +23,14 @@ pub async fn get_pcap(State(state): State>, Path(qmdl_name): Pa let qmdl_store = state.qmdl_store_lock.read().await; let entry = qmdl_store.entry_for_name(&qmdl_name) .ok_or((StatusCode::NOT_FOUND, format!("couldn't find qmdl file with name {}", qmdl_name)))?; - if entry.size_bytes == 0 { + if entry.qmdl_size_bytes == 0 { return Err(( StatusCode::SERVICE_UNAVAILABLE, "QMDL file is empty, try again in a bit!".to_string() )); } - let qmdl_file = qmdl_store.open_entry(&entry).await + let qmdl_file = qmdl_store.open_entry_qmdl(&entry).await .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("{:?}", e)))?; // the QMDL reader should stop at the last successfully written data chunk // (entry.size_bytes) @@ -39,7 +39,7 @@ pub async fn get_pcap(State(state): State>, Path(qmdl_name): Pa pcap_writer.write_iface_header().await.unwrap(); tokio::spawn(async move { - let mut reader = QmdlReader::new(qmdl_file, Some(entry.size_bytes)); + let mut reader = QmdlReader::new(qmdl_file, Some(entry.qmdl_size_bytes)); let mut messages_stream = pin!(reader.as_stream() .try_filter(|container| future::ready(container.data_type == DataType::UserSpace))); diff --git a/bin/src/qmdl_store.rs b/bin/src/qmdl_store.rs index 0567909..510747f 100644 --- a/bin/src/qmdl_store.rs +++ b/bin/src/qmdl_store.rs @@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize}; use chrono::{DateTime, Local}; #[derive(Debug, Error)] -pub enum QmdlStoreError { +pub enum RecordingStoreError { #[error("Can't close an entry when there's no current entry")] NoCurrentEntry, #[error("Couldn't create file: {0}")] @@ -22,7 +22,7 @@ pub enum QmdlStoreError { ParseManifestError(toml::de::Error) } -pub struct QmdlStore { +pub struct RecordingStore { pub path: PathBuf, pub manifest: Manifest, pub current_entry: Option, // index into manifest @@ -38,7 +38,8 @@ pub struct ManifestEntry { pub name: String, pub start_time: DateTime, pub last_message_time: Option>, - pub size_bytes: usize, + pub qmdl_size_bytes: usize, + pub analysis_size_bytes: usize, } impl ManifestEntry { @@ -48,113 +49,142 @@ impl ManifestEntry { name: format!("{}", now.timestamp()), start_time: now, last_message_time: None, - size_bytes: 0, + qmdl_size_bytes: 0, + analysis_size_bytes: 0, } } + + pub fn get_qmdl_filepath>(&self, path: P) -> PathBuf { + let mut filepath = path.as_ref().join(&self.name); + filepath.set_extension("qmdl"); + filepath + } + + pub fn get_analysis_filepath>(&self, path: P) -> PathBuf { + let mut filepath = path.as_ref().join(&self.name); + filepath.set_extension("ndjson"); + filepath + } } -impl QmdlStore { +impl RecordingStore { // Returns whether a directory with a "manifest.toml" exists at the given // path (though doesn't check if that manifest is valid) - pub async fn exists

(path: P) -> Result where P: AsRef { + pub async fn exists

(path: P) -> Result where P: AsRef { let manifest_path = path.as_ref().join("manifest.toml"); - let dir_exists = try_exists(path).await.map_err(QmdlStoreError::OpenDirError)?; - let manifest_exists = try_exists(manifest_path).await.map_err(QmdlStoreError::ReadManifestError)?; + let dir_exists = try_exists(path).await.map_err(RecordingStoreError::OpenDirError)?; + let manifest_exists = try_exists(manifest_path).await.map_err(RecordingStoreError::ReadManifestError)?; Ok(dir_exists && manifest_exists) } - // Loads an existing QmdlStore at the given path. Errors if no store exists, + // Loads an existing RecordingStore at the given path. Errors if no store exists, // or if it's malformed. - pub async fn load

(path: P) -> Result where P: AsRef { + pub async fn load

(path: P) -> Result where P: AsRef { let path: PathBuf = path.as_ref().to_path_buf(); - let manifest = QmdlStore::read_manifest(&path).await?; - Ok(QmdlStore { + let manifest = RecordingStore::read_manifest(&path).await?; + Ok(RecordingStore { path, manifest, current_entry: None, }) } - // Creates a new QmdlStore at the given path. This involves creating a dir + // Creates a new RecordingStore at the given path. This involves creating a dir // and writing an empty manifest. - pub async fn create

(path: P) -> Result where P: AsRef { + pub async fn create

(path: P) -> Result where P: AsRef { let manifest_path = path.as_ref().join("manifest.toml"); fs::create_dir_all(&path).await - .map_err(QmdlStoreError::OpenDirError)?; + .map_err(RecordingStoreError::OpenDirError)?; let mut manifest_file = File::create(&manifest_path).await - .map_err(QmdlStoreError::WriteManifestError)?; + .map_err(RecordingStoreError::WriteManifestError)?; let empty_manifest = Manifest { entries: Vec::new() }; let empty_manifest_contents = toml::to_string_pretty(&empty_manifest) .expect("failed to serialize manifest"); manifest_file.write_all(empty_manifest_contents.as_bytes()).await - .map_err(QmdlStoreError::WriteManifestError)?; - QmdlStore::load(path).await + .map_err(RecordingStoreError::WriteManifestError)?; + RecordingStore::load(path).await } - async fn read_manifest

(path: P) -> Result where P: AsRef { + async fn read_manifest

(path: P) -> Result where P: AsRef { let manifest_path = path.as_ref().join("manifest.toml"); let file_contents = fs::read_to_string(&manifest_path).await - .map_err(QmdlStoreError::ReadManifestError)?; + .map_err(RecordingStoreError::ReadManifestError)?; toml::from_str(&file_contents) - .map_err(QmdlStoreError::ParseManifestError) + .map_err(RecordingStoreError::ParseManifestError) } // Closes the current entry (if needed), creates a new entry based on the - // current time, and updates the manifest - pub async fn new_entry(&mut self) -> Result { + // current time, and updates the manifest. Returns a tuple of the entry's + // newly created QMDL file and analysis file. + pub async fn new_entry(&mut self) -> Result<(File, File), RecordingStoreError> { // if we've already got an entry open, close it if self.current_entry.is_some() { self.close_current_entry().await?; } let new_entry = ManifestEntry::new(); - let mut file_path = self.path.join(&new_entry.name); - file_path.set_extension("qmdl"); - let file = File::options() + let qmdl_filepath = new_entry.get_qmdl_filepath(&self.path); + let qmdl_file = File::options() .create(true) .write(true) - .open(&file_path).await - .map_err(QmdlStoreError::CreateFileError)?; + .open(&qmdl_filepath).await + .map_err(RecordingStoreError::CreateFileError)?; + let analysis_filepath = new_entry.get_analysis_filepath(&self.path); + let analysis_file = File::options() + .create(true) + .write(true) + .open(&analysis_filepath).await + .map_err(RecordingStoreError::CreateFileError)?; self.manifest.entries.push(new_entry); self.current_entry = Some(self.manifest.entries.len() - 1); self.write_manifest().await?; - Ok(file) + Ok((qmdl_file, analysis_file)) } // Returns the corresponding QMDL file for a given entry - pub async fn open_entry(&self, entry: &ManifestEntry) -> Result { - let mut file_path = self.path.join(&entry.name); - file_path.set_extension("qmdl"); - File::open(file_path).await - .map_err(QmdlStoreError::ReadFileError) + pub async fn open_entry_qmdl(&self, entry: &ManifestEntry) -> Result { + File::open(entry.get_qmdl_filepath(&self.path)).await + .map_err(RecordingStoreError::ReadFileError) + } + + // Returns the corresponding QMDL file for a given entry + pub async fn open_entry_analysis(&self, entry: &ManifestEntry) -> Result { + File::open(entry.get_analysis_filepath(&self.path)).await + .map_err(RecordingStoreError::ReadFileError) } // Unsets the current entry - pub async fn close_current_entry(&mut self) -> Result<(), QmdlStoreError> { + pub async fn close_current_entry(&mut self) -> Result<(), RecordingStoreError> { match self.current_entry { Some(_) => { self.current_entry = None; Ok(()) }, - None => Err(QmdlStoreError::NoCurrentEntry) + None => Err(RecordingStoreError::NoCurrentEntry) } } // Sets the given entry's size and updates the last_message_time to now, updating the manifest - pub async fn update_entry(&mut self, entry_index: usize, size_bytes: usize) -> Result<(), QmdlStoreError> { - self.manifest.entries[entry_index].size_bytes = size_bytes; + pub async fn update_entry_qmdl_size(&mut self, entry_index: usize, size_bytes: usize) -> Result<(), RecordingStoreError> { + self.manifest.entries[entry_index].qmdl_size_bytes = size_bytes; self.manifest.entries[entry_index].last_message_time = Some(Local::now()); self.write_manifest().await } - async fn write_manifest(&mut self) -> Result<(), QmdlStoreError> { + // Sets the given entry's analysis file size + pub async fn update_entry_analysis_size(&mut self, entry_index: usize, size_bytes: usize) -> Result<(), RecordingStoreError> { + self.manifest.entries[entry_index].analysis_size_bytes = size_bytes; + self.write_manifest().await + } + + async fn write_manifest(&mut self) -> Result<(), RecordingStoreError> { let mut manifest_file = File::options() .write(true) .open(self.path.join("manifest.toml")).await - .map_err(QmdlStoreError::WriteManifestError)?; + .map_err(RecordingStoreError::WriteManifestError)?; let manifest_contents = toml::to_string_pretty(&self.manifest) .expect("failed to serialize manifest"); manifest_file.write_all(manifest_contents.as_bytes()).await - .map_err(QmdlStoreError::WriteManifestError)?; + .map_err(RecordingStoreError::WriteManifestError)?; Ok(()) } @@ -164,6 +194,11 @@ impl QmdlStore { .find(|entry| entry.name == name) .cloned() } + + pub fn get_current_entry(&self) -> Option<&ManifestEntry> { + let entry_index = self.current_entry?; + self.manifest.entries.get(entry_index) + } } #[cfg(test)] @@ -174,36 +209,36 @@ mod tests { #[tokio::test] async fn test_load_from_empty_dir() { let dir = TempDir::new("qmdl_store_test").unwrap(); - assert!(!QmdlStore::exists(dir.path()).await.unwrap()); - let _created_store = QmdlStore::create(dir.path()).await.unwrap(); - assert!(QmdlStore::exists(dir.path()).await.unwrap()); - let loaded_store = QmdlStore::load(dir.path()).await.unwrap(); + assert!(!RecordingStore::exists(dir.path()).await.unwrap()); + let _created_store = RecordingStore::create(dir.path()).await.unwrap(); + assert!(RecordingStore::exists(dir.path()).await.unwrap()); + let loaded_store = RecordingStore::load(dir.path()).await.unwrap(); assert_eq!(loaded_store.manifest.entries.len(), 0); } #[tokio::test] async fn test_creating_updating_and_closing_entries() { let dir = TempDir::new("qmdl_store_test").unwrap(); - let mut store = QmdlStore::create(dir.path()).await.unwrap(); + let mut store = RecordingStore::create(dir.path()).await.unwrap(); let _ = store.new_entry().await.unwrap(); let entry_index = store.current_entry.unwrap(); - assert_eq!(QmdlStore::read_manifest(dir.path()).await.unwrap(), store.manifest); + assert_eq!(RecordingStore::read_manifest(dir.path()).await.unwrap(), store.manifest); assert!(store.manifest.entries[entry_index].last_message_time.is_none()); - store.update_entry(entry_index, 1000).await.unwrap(); + store.update_entry_qmdl_size(entry_index, 1000).await.unwrap(); let entry = store.entry_for_name(&store.manifest.entries[entry_index].name).unwrap(); assert!(entry.last_message_time.is_some()); - assert_eq!(store.manifest.entries[entry_index].size_bytes, 1000); - assert_eq!(QmdlStore::read_manifest(dir.path()).await.unwrap(), store.manifest); + assert_eq!(store.manifest.entries[entry_index].qmdl_size_bytes, 1000); + assert_eq!(RecordingStore::read_manifest(dir.path()).await.unwrap(), store.manifest); store.close_current_entry().await.unwrap(); - assert!(matches!(store.close_current_entry().await, Err(QmdlStoreError::NoCurrentEntry))); + assert!(matches!(store.close_current_entry().await, Err(RecordingStoreError::NoCurrentEntry))); } #[tokio::test] async fn test_repeated_new_entries() { let dir = TempDir::new("qmdl_store_test").unwrap(); - let mut store = QmdlStore::create(dir.path()).await.unwrap(); + let mut store = RecordingStore::create(dir.path()).await.unwrap(); let _ = store.new_entry().await.unwrap(); let entry_index = store.current_entry.unwrap(); let _ = store.new_entry().await.unwrap(); diff --git a/bin/src/server.rs b/bin/src/server.rs index 290bd4b..bef6311 100644 --- a/bin/src/server.rs +++ b/bin/src/server.rs @@ -11,24 +11,22 @@ use tokio::sync::RwLock; use tokio_util::io::ReaderStream; use include_dir::{include_dir, Dir}; -use crate::analysis::AnalysisMessage; use crate::DiagDeviceCtrlMessage; -use crate::qmdl_store::QmdlStore; +use crate::qmdl_store::RecordingStore; pub struct ServerState { - pub qmdl_store_lock: Arc>, + pub qmdl_store_lock: Arc>, pub diag_device_ctrl_sender: Sender, - pub readonly_mode: bool, - pub maybe_analysis_tx: Option>, + pub readonly_mode: bool } pub async fn get_qmdl(State(state): State>, Path(qmdl_name): Path) -> Result { let qmdl_store = state.qmdl_store_lock.read().await; let entry = qmdl_store.entry_for_name(&qmdl_name) .ok_or((StatusCode::NOT_FOUND, format!("couldn't find qmdl file with name {}", qmdl_name)))?; - let qmdl_file = qmdl_store.open_entry(&entry).await + let qmdl_file = qmdl_store.open_entry_qmdl(&entry).await .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("error opening QMDL file: {}", e)))?; - let limited_qmdl_file = qmdl_file.take(entry.size_bytes as u64); + let limited_qmdl_file = qmdl_file.take(entry.qmdl_size_bytes as u64); let qmdl_stream = ReaderStream::new(limited_qmdl_file); let headers = [(CONTENT_TYPE, "application/octet-stream")]; diff --git a/bin/static/js/main.js b/bin/static/js/main.js index 6cfa738..fd78bee 100644 --- a/bin/static/js/main.js +++ b/bin/static/js/main.js @@ -33,7 +33,7 @@ function createEntryRow(entry) { name.scope = 'row'; name.innerText = entry.name; row.appendChild(name); - for (const key of ['start_time', 'last_message_time', 'size_bytes']) { + for (const key of ['start_time', 'last_message_time', 'qmdl_size_bytes']) { const td = document.createElement('td'); td.innerText = entry[key]; row.appendChild(td); @@ -54,7 +54,10 @@ function createEntryRow(entry) { } async function getAnalysisReport() { - return JSON.parse(await req('GET', '/api/analysis-report')); + const rows = await req('GET', '/api/analysis-report'); + return rows.split('\n') + .filter(row => row.length > 0) + .map(row => JSON.parse(row)); } async function getSystemStats() { diff --git a/lib/src/analysis/analyzer.rs b/lib/src/analysis/analyzer.rs index 5501de9..ef56bb6 100644 --- a/lib/src/analysis/analyzer.rs +++ b/lib/src/analysis/analyzer.rs @@ -66,11 +66,6 @@ pub struct AnalyzerMetadata { #[derive(Serialize, Debug)] pub struct ReportMetadata { - num_packets_analyzed: usize, - num_packets_skipped: usize, - num_warnings: usize, - first_packet_time: Option>, - last_packet_time: Option>, analyzers: Vec, } @@ -81,32 +76,25 @@ pub struct PacketAnalysis { } #[derive(Serialize, Debug)] -pub struct AnalysisReport { - metadata: ReportMetadata, - analysis: Vec, +pub struct AnalysisRow { + pub timestamp: DateTime, + pub skipped_message_reasons: Vec, + pub analysis: Vec, +} + +impl AnalysisRow { + pub fn is_empty(&self) -> bool { + self.skipped_message_reasons.is_empty() && self.analysis.is_empty() + } } pub struct Harness { analyzers: Vec>, - pub num_packets_analyzed: usize, - pub num_warnings: usize, - pub skipped_message_reasons: Vec, - pub first_packet_time: Option>, - pub last_packet_time: Option>, - pub analysis: Vec, } impl Harness { pub fn new() -> Self { - Self { - analyzers: Vec::new(), - num_packets_analyzed: 0, - skipped_message_reasons: Vec::new(), - num_warnings: 0, - first_packet_time: None, - last_packet_time: None, - analysis: Vec::new(), - } + Self { analyzers: Vec::new() } } pub fn new_with_all_analyzers() -> Self { @@ -119,12 +107,17 @@ impl Harness { self.analyzers.push(analyzer); } - pub fn analyze_qmdl_messages(&mut self, container: MessagesContainer) { + pub fn analyze_qmdl_messages(&mut self, container: MessagesContainer) -> AnalysisRow { + let mut row = AnalysisRow { + timestamp: chrono::Local::now().fixed_offset(), + skipped_message_reasons: Vec::new(), + analysis: Vec::new(), + }; for maybe_qmdl_message in container.into_messages() { let qmdl_message = match maybe_qmdl_message { Ok(msg) => msg, Err(err) => { - self.skipped_message_reasons.push(format!("{:?}", err)); + row.skipped_message_reasons.push(format!("{:?}", err)); continue; } }; @@ -132,7 +125,7 @@ impl Harness { let gsmtap_message = match gsmtap_parser::parse(qmdl_message) { Ok(msg) => msg, Err(err) => { - self.skipped_message_reasons.push(format!("{:?}", err)); + row.skipped_message_reasons.push(format!("{:?}", err)); continue; } }; @@ -144,28 +137,20 @@ impl Harness { let element = match InformationElement::try_from(&gsmtap_msg) { Ok(element) => element, Err(err) => { - self.skipped_message_reasons.push(format!("{:?}", err)); + row.skipped_message_reasons.push(format!("{:?}", err)); continue; } }; - if self.first_packet_time.is_none() { - self.first_packet_time = Some(timestamp.to_datetime()); - } - - self.last_packet_time = Some(timestamp.to_datetime()); - self.num_packets_analyzed += 1; let analysis_result = self.analyze_information_element(&element); if analysis_result.iter().any(Option::is_some) { - self.num_warnings += analysis_result.iter() - .filter(|maybe_event| matches!(maybe_event, Some(Event { event_type: EventType::QualitativeWarning { .. }, .. }))) - .count(); - self.analysis.push(PacketAnalysis { + row.analysis.push(PacketAnalysis { timestamp: timestamp.to_datetime(), events: analysis_result, }); } } + row } fn analyze_information_element(&mut self, ie: &InformationElement) -> Vec> { @@ -186,7 +171,7 @@ impl Harness { .collect() } - pub fn build_analysis_report(&self) -> AnalysisReport { + pub fn get_metadata(&self) -> ReportMetadata { let names = self.get_names(); let descriptions = self.get_names(); let mut analyzers = Vec::new(); @@ -197,16 +182,8 @@ impl Harness { }); } - AnalysisReport { - metadata: ReportMetadata { - num_packets_analyzed: self.num_packets_analyzed, - num_packets_skipped: self.skipped_message_reasons.len(), - num_warnings: self.num_warnings, - first_packet_time: self.first_packet_time, - last_packet_time: self.last_packet_time, - analyzers, - }, - analysis: self.analysis.clone(), + ReportMetadata { + analyzers, } } }