Merge pull request #43 from EFForg/webui-heuristics

daemon: run analysis in realtime
This commit is contained in:
Cooper Quintin
2024-06-06 12:58:54 -07:00
committed by GitHub
12 changed files with 476 additions and 344 deletions

View File

@@ -1,8 +1,6 @@
use std::{future, path::PathBuf, pin::pin};
use chrono::{DateTime, FixedOffset};
use rayhunter::{analysis::{analyzer::{Event, EventType, Harness}, information_element::InformationElement, lte_downgrade::LteSib6And7DowngradeAnalyzer}, diag::DataType, gsmtap_parser::GsmtapParser, qmdl::QmdlReader};
use rayhunter::{analysis::analyzer::Harness, diag::DataType, qmdl::QmdlReader};
use tokio::fs::File;
use serde::Serialize;
use clap::Parser;
use futures::TryStreamExt;
@@ -13,120 +11,21 @@ struct Args {
qmdl_path: PathBuf,
}
#[derive(Serialize, Debug)]
struct AnalyzerMetadata {
name: String,
description: String,
}
#[derive(Serialize, Debug)]
struct ReportMetadata {
num_packets_analyzed: usize,
num_packets_skipped: usize,
num_warnings: usize,
first_packet_time: DateTime<FixedOffset>,
last_packet_time: DateTime<FixedOffset>,
analyzers: Vec<AnalyzerMetadata>,
}
#[derive(Serialize, Debug)]
struct PacketAnalysis {
timestamp: DateTime<FixedOffset>,
events: Vec<Option<Event>>,
}
#[derive(Serialize, Debug)]
struct AnalysisReport {
metadata: ReportMetadata,
analysis: Vec<PacketAnalysis>,
}
#[tokio::main]
async fn main() {
env_logger::init();
let args = Args::parse();
let mut harness = Harness::new();
harness.add_analyzer(Box::new(LteSib6And7DowngradeAnalyzer{}));
let mut num_packets_analyzed = 0;
let mut num_warnings = 0;
let mut first_packet_time: Option<DateTime<FixedOffset>> = None;
let mut last_packet_time: Option<DateTime<FixedOffset>> = None;
let mut skipped_message_reasons: Vec<String> = Vec::new();
let mut analysis: Vec<PacketAnalysis> = Vec::new();
let mut analyzers: Vec<AnalyzerMetadata> = Vec::new();
let names = harness.get_names();
let descriptions = harness.get_names();
for (name, description) in names.iter().zip(descriptions.iter()) {
analyzers.push(AnalyzerMetadata {
name: name.to_string(),
description: description.to_string(),
});
}
let mut harness = Harness::new_with_all_analyzers();
let qmdl_file = File::open(args.qmdl_path).await.expect("failed to open QMDL file");
let file_size = qmdl_file.metadata().await.expect("failed to get QMDL file metadata").len();
let mut gsmtap_parser = GsmtapParser::new();
let mut qmdl_reader = QmdlReader::new(qmdl_file, Some(file_size as usize));
let mut qmdl_stream = pin!(qmdl_reader.as_stream()
.try_filter(|container| future::ready(container.data_type == DataType::UserSpace)));
println!("{}\n", serde_json::to_string(&harness.get_metadata()).expect("failed to serialize report metadata"));
while let Some(container) = qmdl_stream.try_next().await.expect("failed getting QMDL container") {
for maybe_qmdl_message in container.into_messages() {
let qmdl_message = match maybe_qmdl_message {
Ok(msg) => msg,
Err(err) => {
skipped_message_reasons.push(format!("{:?}", err));
continue;
}
};
let gsmtap_message = match gsmtap_parser.parse(qmdl_message) {
Ok(msg) => msg,
Err(err) => {
skipped_message_reasons.push(format!("{:?}", err));
continue;
}
};
let Some((timestamp, gsmtap_msg)) = gsmtap_message else {
continue;
};
let element = match InformationElement::try_from(&gsmtap_msg) {
Ok(element) => element,
Err(err) => {
skipped_message_reasons.push(format!("{:?}", err));
continue;
}
};
if first_packet_time.is_none() {
first_packet_time = Some(timestamp.to_datetime());
}
last_packet_time = Some(timestamp.to_datetime());
num_packets_analyzed += 1;
let analysis_result = harness.analyze_information_element(&element);
if analysis_result.iter().any(Option::is_some) {
num_warnings += analysis_result.iter()
.filter(|maybe_event| matches!(maybe_event, Some(Event { event_type: EventType::QualitativeWarning { .. }, .. })))
.count();
analysis.push(PacketAnalysis {
timestamp: timestamp.to_datetime(),
events: analysis_result,
});
}
}
let row = harness.analyze_qmdl_messages(container);
println!("{}\n", serde_json::to_string(&row).expect("failed to serialize row"));
}
let report = AnalysisReport {
metadata: ReportMetadata {
num_packets_analyzed,
num_packets_skipped: skipped_message_reasons.len(),
num_warnings,
first_packet_time: first_packet_time.expect("no packet times set"),
last_packet_time: last_packet_time.expect("no packet times set"),
analyzers,
},
analysis,
};
println!("{}", serde_json::to_string(&report).expect("failed to serialize report"));
}

View File

@@ -8,14 +8,14 @@ mod diag;
use crate::config::{parse_config, parse_args};
use crate::diag::run_diag_read_thread;
use crate::qmdl_store::QmdlStore;
use crate::qmdl_store::RecordingStore;
use crate::server::{ServerState, get_qmdl, serve_static};
use crate::pcap::get_pcap;
use crate::stats::get_system_stats;
use crate::error::RayhunterError;
use axum::response::Redirect;
use diag::{DiagDeviceCtrlMessage, start_recording, stop_recording};
use diag::{get_analysis_report, start_recording, stop_recording, DiagDeviceCtrlMessage};
use log::{info, error};
use rayhunter::diag_device::DiagDevice;
use axum::routing::{get, post};
@@ -35,14 +35,14 @@ use std::sync::Arc;
async fn run_server(
task_tracker: &TaskTracker,
config: &config::Config,
qmdl_store_lock: Arc<RwLock<QmdlStore>>,
qmdl_store_lock: Arc<RwLock<RecordingStore>>,
server_shutdown_rx: oneshot::Receiver<()>,
diag_device_sender: Sender<DiagDeviceCtrlMessage>
) -> JoinHandle<()> {
let state = Arc::new(ServerState {
qmdl_store_lock,
diag_device_ctrl_sender: diag_device_sender,
readonly_mode: config.readonly_mode,
readonly_mode: config.readonly_mode
});
let app = Router::new()
@@ -52,6 +52,7 @@ async fn run_server(
.route("/api/qmdl-manifest", get(get_qmdl_manifest))
.route("/api/start-recording", post(start_recording))
.route("/api/stop-recording", post(stop_recording))
.route("/api/analysis-report", get(get_analysis_report))
.route("/", get(|| async { Redirect::permanent("/index.html") }))
.route("/*path", get(serve_static))
.with_state(state);
@@ -72,10 +73,10 @@ async fn server_shutdown_signal(server_shutdown_rx: oneshot::Receiver<()>) {
// Loads a QmdlStore if one exists, and if not, only create one if we're not in
// readonly mode.
async fn init_qmdl_store(config: &config::Config) -> Result<QmdlStore, RayhunterError> {
match (QmdlStore::exists(&config.qmdl_store_path).await?, config.readonly_mode) {
(true, _) => Ok(QmdlStore::load(&config.qmdl_store_path).await?),
(false, false) => Ok(QmdlStore::create(&config.qmdl_store_path).await?),
async fn init_qmdl_store(config: &config::Config) -> Result<RecordingStore, RayhunterError> {
match (RecordingStore::exists(&config.qmdl_store_path).await?, config.readonly_mode) {
(true, _) => Ok(RecordingStore::load(&config.qmdl_store_path).await?),
(false, false) => Ok(RecordingStore::create(&config.qmdl_store_path).await?),
(false, true) => Err(RayhunterError::NoStoreReadonlyMode(config.qmdl_store_path.clone())),
}
}
@@ -87,7 +88,7 @@ fn run_ctrl_c_thread(
task_tracker: &TaskTracker,
diag_device_sender: Sender<DiagDeviceCtrlMessage>,
server_shutdown_tx: oneshot::Sender<()>,
qmdl_store_lock: Arc<RwLock<QmdlStore>>
qmdl_store_lock: Arc<RwLock<RecordingStore>>
) -> JoinHandle<Result<(), RayhunterError>> {
task_tracker.spawn(async move {
match tokio::signal::ctrl_c().await {

View File

@@ -1,44 +1,122 @@
use std::pin::pin;
use std::sync::Arc;
use axum::body::Body;
use axum::extract::State;
use axum::http::header::CONTENT_TYPE;
use axum::http::StatusCode;
use rayhunter::diag::DataType;
use axum::response::{IntoResponse, Response};
use rayhunter::analysis::analyzer::Harness;
use rayhunter::diag::{DataType, MessagesContainer};
use rayhunter::diag_device::DiagDevice;
use serde::Serialize;
use tokio::sync::RwLock;
use tokio::sync::mpsc::Receiver;
use rayhunter::qmdl::QmdlWriter;
use log::{debug, error, info};
use tokio::fs::File;
use tokio::io::{BufWriter, AsyncWriteExt};
use tokio_util::io::ReaderStream;
use tokio_util::task::TaskTracker;
use futures::{StreamExt, TryStreamExt};
use crate::qmdl_store::QmdlStore;
use crate::qmdl_store::RecordingStore;
use crate::server::ServerState;
pub enum DiagDeviceCtrlMessage {
StopRecording,
StartRecording(QmdlWriter<File>),
StartRecording((QmdlWriter<File>, File)),
Exit,
}
pub fn run_diag_read_thread(task_tracker: &TaskTracker, mut dev: DiagDevice, mut qmdl_file_rx: Receiver<DiagDeviceCtrlMessage>, qmdl_store_lock: Arc<RwLock<QmdlStore>>) {
struct AnalysisWriter {
writer: BufWriter<File>,
harness: Harness,
bytes_written: usize,
}
// We write our analysis results to a file immediately to minimize the amount of
// state Rayhunter has to keep track of in memory. The analysis file's format is
// Newline Delimited JSON
// (https://docs.mulesoft.com/dataweave/latest/dataweave-formats-ndjson), which
// lets us simply append new rows to the end without parsing the entire JSON
// object beforehand.
impl AnalysisWriter {
pub async fn new(file: File) -> Result<Self, std::io::Error> {
let mut result = Self {
writer: BufWriter::new(file),
harness: Harness::new_with_all_analyzers(),
bytes_written: 0,
};
let metadata = result.harness.get_metadata();
result.write(&metadata).await?;
Ok(result)
}
// Runs the analysis harness on the given container, serializing the results
// to the analysis file and returning the file's new length.
pub async fn analyze(&mut self, container: MessagesContainer) -> Result<usize, std::io::Error> {
let row = self.harness.analyze_qmdl_messages(container);
if !row.is_empty() {
self.write(&row).await?;
}
Ok(self.bytes_written)
}
async fn write<T: Serialize>(&mut self, value: &T) -> Result<(), std::io::Error> {
let mut value_str = serde_json::to_string(value).unwrap();
value_str.push('\n');
self.bytes_written += value_str.len();
self.writer.write_all(value_str.as_bytes()).await?;
self.writer.flush().await?;
Ok(())
}
// Flushes any pending I/O to disk before dropping the writer
pub async fn close(mut self) -> Result<(), std::io::Error> {
self.writer.flush().await?;
Ok(())
}
}
pub fn run_diag_read_thread(
task_tracker: &TaskTracker,
mut dev: DiagDevice,
mut qmdl_file_rx: Receiver<DiagDeviceCtrlMessage>,
qmdl_store_lock: Arc<RwLock<RecordingStore>>
) {
task_tracker.spawn(async move {
let initial_file = qmdl_store_lock.write().await.new_entry().await.expect("failed creating QMDL file entry");
let mut qmdl_writer: Option<QmdlWriter<File>> = Some(QmdlWriter::new(initial_file));
let (initial_qmdl_file, initial_analysis_file) = qmdl_store_lock.write().await.new_entry().await.expect("failed creating QMDL file entry");
let mut maybe_qmdl_writer: Option<QmdlWriter<File>> = Some(QmdlWriter::new(initial_qmdl_file));
let mut diag_stream = pin!(dev.as_stream().into_stream());
let mut maybe_analysis_writer = Some(AnalysisWriter::new(initial_analysis_file).await
.expect("failed to create analysis writer"));
loop {
tokio::select! {
msg = qmdl_file_rx.recv() => {
match msg {
Some(DiagDeviceCtrlMessage::StartRecording(new_writer)) => {
qmdl_writer = Some(new_writer);
Some(DiagDeviceCtrlMessage::StartRecording((new_writer, new_analysis_file))) => {
maybe_qmdl_writer = Some(new_writer);
if let Some(analysis_writer) = maybe_analysis_writer {
analysis_writer.close().await.expect("failed to close analysis writer");
}
maybe_analysis_writer = Some(AnalysisWriter::new(new_analysis_file).await
.expect("failed to write to analysis file"));
},
Some(DiagDeviceCtrlMessage::StopRecording) => {
maybe_qmdl_writer = None;
if let Some(analysis_writer) = maybe_analysis_writer {
analysis_writer.close().await.expect("failed to close analysis writer");
}
maybe_analysis_writer = None;
},
Some(DiagDeviceCtrlMessage::StopRecording) => qmdl_writer = None,
// None means all the Senders have been dropped, so it's
// time to go
Some(DiagDeviceCtrlMessage::Exit) | None => {
info!("Diag reader thread exiting...");
if let Some(analysis_writer) = maybe_analysis_writer {
analysis_writer.close().await.expect("failed to close analysis writer");
}
return Ok(())
},
}
@@ -52,17 +130,26 @@ pub fn run_diag_read_thread(task_tracker: &TaskTracker, mut dev: DiagDevice, mut
}
// keep track of how many bytes were written to the QMDL file so we can read
// a valid block of data from it in the HTTP server
if let Some(writer) = qmdl_writer.as_mut() {
writer.write_container(&container).await.expect("failed to write to QMDL writer");
debug!("total QMDL bytes written: {}, updating manifest...", writer.total_written);
if let Some(qmdl_writer) = maybe_qmdl_writer.as_mut() {
qmdl_writer.write_container(&container).await.expect("failed to write to QMDL writer");
debug!("total QMDL bytes written: {}, updating manifest...", qmdl_writer.total_written);
let mut qmdl_store = qmdl_store_lock.write().await;
let index = qmdl_store.current_entry.expect("DiagDevice had qmdl_writer, but QmdlStore didn't have current entry???");
qmdl_store.update_entry(index, writer.total_written).await
qmdl_store.update_entry_qmdl_size(index, qmdl_writer.total_written).await
.expect("failed to update qmdl file size");
debug!("done!");
} else {
debug!("no qmdl_writer set, continuing...");
}
if let Some(analysis_writer) = maybe_analysis_writer.as_mut() {
let analysis_file_len = analysis_writer.analyze(container).await
.expect("failed to analyze container");
let mut qmdl_store = qmdl_store_lock.write().await;
let index = qmdl_store.current_entry.expect("DiagDevice had qmdl_writer, but QmdlStore didn't have current entry???");
qmdl_store.update_entry_analysis_size(index, analysis_file_len as usize).await
.expect("failed to update analysis file size");
}
},
Err(err) => {
error!("error reading diag device: {}", err);
@@ -80,10 +167,10 @@ pub async fn start_recording(State(state): State<Arc<ServerState>>) -> Result<(S
return Err((StatusCode::FORBIDDEN, "server is in readonly mode".to_string()));
}
let mut qmdl_store = state.qmdl_store_lock.write().await;
let qmdl_file = qmdl_store.new_entry().await
let (qmdl_file, analysis_file) = qmdl_store.new_entry().await
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("couldn't create new qmdl entry: {}", e)))?;
let qmdl_writer = QmdlWriter::new(qmdl_file);
state.diag_device_ctrl_sender.send(DiagDeviceCtrlMessage::StartRecording(qmdl_writer)).await
state.diag_device_ctrl_sender.send(DiagDeviceCtrlMessage::StartRecording((qmdl_writer, analysis_file))).await
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("couldn't send stop recording message: {}", e)))?;
Ok((StatusCode::ACCEPTED, "ok".to_string()))
}
@@ -99,3 +186,20 @@ pub async fn stop_recording(State(state): State<Arc<ServerState>>) -> Result<(St
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("couldn't send stop recording message: {}", e)))?;
Ok((StatusCode::ACCEPTED, "ok".to_string()))
}
pub async fn get_analysis_report(State(state): State<Arc<ServerState>>) -> Result<Response, (StatusCode, String)> {
let qmdl_store = state.qmdl_store_lock.read().await;
let Some(entry) = qmdl_store.get_current_entry() else {
return Err((
StatusCode::SERVICE_UNAVAILABLE,
"No QMDL data's being recorded to analyze, try starting a new recording!".to_string()
));
};
let analysis_file = qmdl_store.open_entry_analysis(entry).await
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("{:?}", e)))?;
let analysis_stream = ReaderStream::new(analysis_file);
let headers = [(CONTENT_TYPE, "application/x-ndjson")];
let body = Body::from_stream(analysis_stream);
Ok((headers, body).into_response())
}

View File

@@ -1,7 +1,7 @@
use thiserror::Error;
use rayhunter::diag_device::DiagDeviceError;
use crate::qmdl_store::QmdlStoreError;
use crate::qmdl_store::RecordingStoreError;
#[derive(Error, Debug)]
pub enum RayhunterError{
@@ -12,7 +12,7 @@ pub enum RayhunterError{
#[error("Tokio error: {0}")]
TokioError(#[from] tokio::io::Error),
#[error("QmdlStore error: {0}")]
QmdlStoreError(#[from] QmdlStoreError),
QmdlStoreError(#[from] RecordingStoreError),
#[error("No QMDL store found at path {0}, but can't create a new one due to readonly mode")]
NoStoreReadonlyMode(String),
}

View File

@@ -1,7 +1,7 @@
use crate::ServerState;
use rayhunter::diag::DataType;
use rayhunter::gsmtap_parser::GsmtapParser;
use rayhunter::gsmtap_parser;
use rayhunter::pcap::GsmtapPcapWriter;
use rayhunter::qmdl::QmdlReader;
use axum::body::Body;
@@ -23,24 +23,23 @@ pub async fn get_pcap(State(state): State<Arc<ServerState>>, Path(qmdl_name): Pa
let qmdl_store = state.qmdl_store_lock.read().await;
let entry = qmdl_store.entry_for_name(&qmdl_name)
.ok_or((StatusCode::NOT_FOUND, format!("couldn't find qmdl file with name {}", qmdl_name)))?;
if entry.size_bytes == 0 {
if entry.qmdl_size_bytes == 0 {
return Err((
StatusCode::SERVICE_UNAVAILABLE,
"QMDL file is empty, try again in a bit!".to_string()
));
}
let qmdl_file = qmdl_store.open_entry(&entry).await
let qmdl_file = qmdl_store.open_entry_qmdl(&entry).await
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("{:?}", e)))?;
// the QMDL reader should stop at the last successfully written data chunk
// (entry.size_bytes)
let mut gsmtap_parser = GsmtapParser::new();
let (reader, writer) = duplex(1024);
let mut pcap_writer = GsmtapPcapWriter::new(writer).await.unwrap();
pcap_writer.write_iface_header().await.unwrap();
tokio::spawn(async move {
let mut reader = QmdlReader::new(qmdl_file, Some(entry.size_bytes));
let mut reader = QmdlReader::new(qmdl_file, Some(entry.qmdl_size_bytes));
let mut messages_stream = pin!(reader.as_stream()
.try_filter(|container| future::ready(container.data_type == DataType::UserSpace)));
@@ -48,7 +47,7 @@ pub async fn get_pcap(State(state): State<Arc<ServerState>>, Path(qmdl_name): Pa
for maybe_msg in container.into_messages() {
match maybe_msg {
Ok(msg) => {
let maybe_gsmtap_msg = gsmtap_parser.parse(msg)
let maybe_gsmtap_msg = gsmtap_parser::parse(msg)
.expect("error parsing gsmtap message");
if let Some((timestamp, gsmtap_msg)) = maybe_gsmtap_msg {
pcap_writer.write_gsmtap_message(gsmtap_msg, timestamp).await

View File

@@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize};
use chrono::{DateTime, Local};
#[derive(Debug, Error)]
pub enum QmdlStoreError {
pub enum RecordingStoreError {
#[error("Can't close an entry when there's no current entry")]
NoCurrentEntry,
#[error("Couldn't create file: {0}")]
@@ -22,7 +22,7 @@ pub enum QmdlStoreError {
ParseManifestError(toml::de::Error)
}
pub struct QmdlStore {
pub struct RecordingStore {
pub path: PathBuf,
pub manifest: Manifest,
pub current_entry: Option<usize>, // index into manifest
@@ -38,7 +38,8 @@ pub struct ManifestEntry {
pub name: String,
pub start_time: DateTime<Local>,
pub last_message_time: Option<DateTime<Local>>,
pub size_bytes: usize,
pub qmdl_size_bytes: usize,
pub analysis_size_bytes: usize,
}
impl ManifestEntry {
@@ -48,113 +49,142 @@ impl ManifestEntry {
name: format!("{}", now.timestamp()),
start_time: now,
last_message_time: None,
size_bytes: 0,
qmdl_size_bytes: 0,
analysis_size_bytes: 0,
}
}
pub fn get_qmdl_filepath<P: AsRef<Path>>(&self, path: P) -> PathBuf {
let mut filepath = path.as_ref().join(&self.name);
filepath.set_extension("qmdl");
filepath
}
pub fn get_analysis_filepath<P: AsRef<Path>>(&self, path: P) -> PathBuf {
let mut filepath = path.as_ref().join(&self.name);
filepath.set_extension("ndjson");
filepath
}
}
impl QmdlStore {
impl RecordingStore {
// Returns whether a directory with a "manifest.toml" exists at the given
// path (though doesn't check if that manifest is valid)
pub async fn exists<P>(path: P) -> Result<bool, QmdlStoreError> where P: AsRef<Path> {
pub async fn exists<P>(path: P) -> Result<bool, RecordingStoreError> where P: AsRef<Path> {
let manifest_path = path.as_ref().join("manifest.toml");
let dir_exists = try_exists(path).await.map_err(QmdlStoreError::OpenDirError)?;
let manifest_exists = try_exists(manifest_path).await.map_err(QmdlStoreError::ReadManifestError)?;
let dir_exists = try_exists(path).await.map_err(RecordingStoreError::OpenDirError)?;
let manifest_exists = try_exists(manifest_path).await.map_err(RecordingStoreError::ReadManifestError)?;
Ok(dir_exists && manifest_exists)
}
// Loads an existing QmdlStore at the given path. Errors if no store exists,
// Loads an existing RecordingStore at the given path. Errors if no store exists,
// or if it's malformed.
pub async fn load<P>(path: P) -> Result<Self, QmdlStoreError> where P: AsRef<Path> {
pub async fn load<P>(path: P) -> Result<Self, RecordingStoreError> where P: AsRef<Path> {
let path: PathBuf = path.as_ref().to_path_buf();
let manifest = QmdlStore::read_manifest(&path).await?;
Ok(QmdlStore {
let manifest = RecordingStore::read_manifest(&path).await?;
Ok(RecordingStore {
path,
manifest,
current_entry: None,
})
}
// Creates a new QmdlStore at the given path. This involves creating a dir
// Creates a new RecordingStore at the given path. This involves creating a dir
// and writing an empty manifest.
pub async fn create<P>(path: P) -> Result<Self, QmdlStoreError> where P: AsRef<Path> {
pub async fn create<P>(path: P) -> Result<Self, RecordingStoreError> where P: AsRef<Path> {
let manifest_path = path.as_ref().join("manifest.toml");
fs::create_dir_all(&path).await
.map_err(QmdlStoreError::OpenDirError)?;
.map_err(RecordingStoreError::OpenDirError)?;
let mut manifest_file = File::create(&manifest_path).await
.map_err(QmdlStoreError::WriteManifestError)?;
.map_err(RecordingStoreError::WriteManifestError)?;
let empty_manifest = Manifest { entries: Vec::new() };
let empty_manifest_contents = toml::to_string_pretty(&empty_manifest)
.expect("failed to serialize manifest");
manifest_file.write_all(empty_manifest_contents.as_bytes()).await
.map_err(QmdlStoreError::WriteManifestError)?;
QmdlStore::load(path).await
.map_err(RecordingStoreError::WriteManifestError)?;
RecordingStore::load(path).await
}
async fn read_manifest<P>(path: P) -> Result<Manifest, QmdlStoreError> where P: AsRef<Path> {
async fn read_manifest<P>(path: P) -> Result<Manifest, RecordingStoreError> where P: AsRef<Path> {
let manifest_path = path.as_ref().join("manifest.toml");
let file_contents = fs::read_to_string(&manifest_path).await
.map_err(QmdlStoreError::ReadManifestError)?;
.map_err(RecordingStoreError::ReadManifestError)?;
toml::from_str(&file_contents)
.map_err(QmdlStoreError::ParseManifestError)
.map_err(RecordingStoreError::ParseManifestError)
}
// Closes the current entry (if needed), creates a new entry based on the
// current time, and updates the manifest
pub async fn new_entry(&mut self) -> Result<File, QmdlStoreError> {
// current time, and updates the manifest. Returns a tuple of the entry's
// newly created QMDL file and analysis file.
pub async fn new_entry(&mut self) -> Result<(File, File), RecordingStoreError> {
// if we've already got an entry open, close it
if self.current_entry.is_some() {
self.close_current_entry().await?;
}
let new_entry = ManifestEntry::new();
let mut file_path = self.path.join(&new_entry.name);
file_path.set_extension("qmdl");
let file = File::options()
let qmdl_filepath = new_entry.get_qmdl_filepath(&self.path);
let qmdl_file = File::options()
.create(true)
.write(true)
.open(&file_path).await
.map_err(QmdlStoreError::CreateFileError)?;
.open(&qmdl_filepath).await
.map_err(RecordingStoreError::CreateFileError)?;
let analysis_filepath = new_entry.get_analysis_filepath(&self.path);
let analysis_file = File::options()
.create(true)
.write(true)
.open(&analysis_filepath).await
.map_err(RecordingStoreError::CreateFileError)?;
self.manifest.entries.push(new_entry);
self.current_entry = Some(self.manifest.entries.len() - 1);
self.write_manifest().await?;
Ok(file)
Ok((qmdl_file, analysis_file))
}
// Returns the corresponding QMDL file for a given entry
pub async fn open_entry(&self, entry: &ManifestEntry) -> Result<File, QmdlStoreError> {
let mut file_path = self.path.join(&entry.name);
file_path.set_extension("qmdl");
File::open(file_path).await
.map_err(QmdlStoreError::ReadFileError)
pub async fn open_entry_qmdl(&self, entry: &ManifestEntry) -> Result<File, RecordingStoreError> {
File::open(entry.get_qmdl_filepath(&self.path)).await
.map_err(RecordingStoreError::ReadFileError)
}
// Returns the corresponding QMDL file for a given entry
pub async fn open_entry_analysis(&self, entry: &ManifestEntry) -> Result<File, RecordingStoreError> {
File::open(entry.get_analysis_filepath(&self.path)).await
.map_err(RecordingStoreError::ReadFileError)
}
// Unsets the current entry
pub async fn close_current_entry(&mut self) -> Result<(), QmdlStoreError> {
pub async fn close_current_entry(&mut self) -> Result<(), RecordingStoreError> {
match self.current_entry {
Some(_) => {
self.current_entry = None;
Ok(())
},
None => Err(QmdlStoreError::NoCurrentEntry)
None => Err(RecordingStoreError::NoCurrentEntry)
}
}
// Sets the given entry's size and updates the last_message_time to now, updating the manifest
pub async fn update_entry(&mut self, entry_index: usize, size_bytes: usize) -> Result<(), QmdlStoreError> {
self.manifest.entries[entry_index].size_bytes = size_bytes;
pub async fn update_entry_qmdl_size(&mut self, entry_index: usize, size_bytes: usize) -> Result<(), RecordingStoreError> {
self.manifest.entries[entry_index].qmdl_size_bytes = size_bytes;
self.manifest.entries[entry_index].last_message_time = Some(Local::now());
self.write_manifest().await
}
async fn write_manifest(&mut self) -> Result<(), QmdlStoreError> {
// Sets the given entry's analysis file size
pub async fn update_entry_analysis_size(&mut self, entry_index: usize, size_bytes: usize) -> Result<(), RecordingStoreError> {
self.manifest.entries[entry_index].analysis_size_bytes = size_bytes;
self.write_manifest().await
}
async fn write_manifest(&mut self) -> Result<(), RecordingStoreError> {
let mut manifest_file = File::options()
.write(true)
.open(self.path.join("manifest.toml")).await
.map_err(QmdlStoreError::WriteManifestError)?;
.map_err(RecordingStoreError::WriteManifestError)?;
let manifest_contents = toml::to_string_pretty(&self.manifest)
.expect("failed to serialize manifest");
manifest_file.write_all(manifest_contents.as_bytes()).await
.map_err(QmdlStoreError::WriteManifestError)?;
.map_err(RecordingStoreError::WriteManifestError)?;
Ok(())
}
@@ -164,6 +194,11 @@ impl QmdlStore {
.find(|entry| entry.name == name)
.cloned()
}
pub fn get_current_entry(&self) -> Option<&ManifestEntry> {
let entry_index = self.current_entry?;
self.manifest.entries.get(entry_index)
}
}
#[cfg(test)]
@@ -174,36 +209,36 @@ mod tests {
#[tokio::test]
async fn test_load_from_empty_dir() {
let dir = TempDir::new("qmdl_store_test").unwrap();
assert!(!QmdlStore::exists(dir.path()).await.unwrap());
let _created_store = QmdlStore::create(dir.path()).await.unwrap();
assert!(QmdlStore::exists(dir.path()).await.unwrap());
let loaded_store = QmdlStore::load(dir.path()).await.unwrap();
assert!(!RecordingStore::exists(dir.path()).await.unwrap());
let _created_store = RecordingStore::create(dir.path()).await.unwrap();
assert!(RecordingStore::exists(dir.path()).await.unwrap());
let loaded_store = RecordingStore::load(dir.path()).await.unwrap();
assert_eq!(loaded_store.manifest.entries.len(), 0);
}
#[tokio::test]
async fn test_creating_updating_and_closing_entries() {
let dir = TempDir::new("qmdl_store_test").unwrap();
let mut store = QmdlStore::create(dir.path()).await.unwrap();
let mut store = RecordingStore::create(dir.path()).await.unwrap();
let _ = store.new_entry().await.unwrap();
let entry_index = store.current_entry.unwrap();
assert_eq!(QmdlStore::read_manifest(dir.path()).await.unwrap(), store.manifest);
assert_eq!(RecordingStore::read_manifest(dir.path()).await.unwrap(), store.manifest);
assert!(store.manifest.entries[entry_index].last_message_time.is_none());
store.update_entry(entry_index, 1000).await.unwrap();
store.update_entry_qmdl_size(entry_index, 1000).await.unwrap();
let entry = store.entry_for_name(&store.manifest.entries[entry_index].name).unwrap();
assert!(entry.last_message_time.is_some());
assert_eq!(store.manifest.entries[entry_index].size_bytes, 1000);
assert_eq!(QmdlStore::read_manifest(dir.path()).await.unwrap(), store.manifest);
assert_eq!(store.manifest.entries[entry_index].qmdl_size_bytes, 1000);
assert_eq!(RecordingStore::read_manifest(dir.path()).await.unwrap(), store.manifest);
store.close_current_entry().await.unwrap();
assert!(matches!(store.close_current_entry().await, Err(QmdlStoreError::NoCurrentEntry)));
assert!(matches!(store.close_current_entry().await, Err(RecordingStoreError::NoCurrentEntry)));
}
#[tokio::test]
async fn test_repeated_new_entries() {
let dir = TempDir::new("qmdl_store_test").unwrap();
let mut store = QmdlStore::create(dir.path()).await.unwrap();
let mut store = RecordingStore::create(dir.path()).await.unwrap();
let _ = store.new_entry().await.unwrap();
let entry_index = store.current_entry.unwrap();
let _ = store.new_entry().await.unwrap();

View File

@@ -12,21 +12,21 @@ use tokio_util::io::ReaderStream;
use include_dir::{include_dir, Dir};
use crate::DiagDeviceCtrlMessage;
use crate::qmdl_store::QmdlStore;
use crate::qmdl_store::RecordingStore;
pub struct ServerState {
pub qmdl_store_lock: Arc<RwLock<QmdlStore>>,
pub qmdl_store_lock: Arc<RwLock<RecordingStore>>,
pub diag_device_ctrl_sender: Sender<DiagDeviceCtrlMessage>,
pub readonly_mode: bool,
pub readonly_mode: bool
}
pub async fn get_qmdl(State(state): State<Arc<ServerState>>, Path(qmdl_name): Path<String>) -> Result<Response, (StatusCode, String)> {
let qmdl_store = state.qmdl_store_lock.read().await;
let entry = qmdl_store.entry_for_name(&qmdl_name)
.ok_or((StatusCode::NOT_FOUND, format!("couldn't find qmdl file with name {}", qmdl_name)))?;
let qmdl_file = qmdl_store.open_entry(&entry).await
let qmdl_file = qmdl_store.open_entry_qmdl(&entry).await
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("error opening QMDL file: {}", e)))?;
let limited_qmdl_file = qmdl_file.take(entry.size_bytes as u64);
let limited_qmdl_file = qmdl_file.take(entry.qmdl_size_bytes as u64);
let qmdl_stream = ReaderStream::new(limited_qmdl_file);
let headers = [(CONTENT_TYPE, "application/octet-stream")];

View File

@@ -34,5 +34,9 @@
<h3>System stats</h3>
<pre id="system-stats">Loading...</pre>
</div>
<div>
<h3>Analysis Report</h3>
<pre id="analysis-report">Loading...</pre>
</div>
</body>
</html>

View File

@@ -3,6 +3,10 @@ async function populateDivs() {
const systemStatsDiv = document.getElementById('system-stats');
systemStatsDiv.innerHTML = JSON.stringify(systemStats, null, 2);
const analysisReport = await getAnalysisReport();
const analysisReportDiv = document.getElementById('analysis-report');
analysisReportDiv.innerHTML = JSON.stringify(analysisReport, null, 2);
const qmdlManifest = await getQmdlManifest();
updateQmdlManifestTable(qmdlManifest);
}
@@ -29,7 +33,7 @@ function createEntryRow(entry) {
name.scope = 'row';
name.innerText = entry.name;
row.appendChild(name);
for (const key of ['start_time', 'last_message_time', 'size_bytes']) {
for (const key of ['start_time', 'last_message_time', 'qmdl_size_bytes']) {
const td = document.createElement('td');
td.innerText = entry[key];
row.appendChild(td);
@@ -49,6 +53,13 @@ function createEntryRow(entry) {
return row;
}
async function getAnalysisReport() {
const rows = await req('GET', '/api/analysis-report');
return rows.split('\n')
.filter(row => row.length > 0)
.map(row => JSON.parse(row));
}
async function getSystemStats() {
return JSON.parse(await req('GET', '/api/system-stats'));
}

View File

@@ -1,7 +1,10 @@
use std::borrow::Cow;
use chrono::{DateTime, FixedOffset};
use serde::Serialize;
use super::information_element::InformationElement;
use crate::{diag::MessagesContainer, gsmtap_parser};
use super::{information_element::InformationElement, lte_downgrade::LteSib6And7DowngradeAnalyzer};
/// Qualitative measure of how severe a Warning event type is.
/// The levels should break down like this:
@@ -55,22 +58,102 @@ pub trait Analyzer {
fn analyze_information_element(&mut self, ie: &InformationElement) -> Option<Event>;
}
#[derive(Serialize, Debug)]
pub struct AnalyzerMetadata {
name: String,
description: String,
}
#[derive(Serialize, Debug)]
pub struct ReportMetadata {
analyzers: Vec<AnalyzerMetadata>,
}
#[derive(Serialize, Debug, Clone)]
pub struct PacketAnalysis {
timestamp: DateTime<FixedOffset>,
events: Vec<Option<Event>>,
}
#[derive(Serialize, Debug)]
pub struct AnalysisRow {
pub timestamp: DateTime<FixedOffset>,
pub skipped_message_reasons: Vec<String>,
pub analysis: Vec<PacketAnalysis>,
}
impl AnalysisRow {
pub fn is_empty(&self) -> bool {
self.skipped_message_reasons.is_empty() && self.analysis.is_empty()
}
}
pub struct Harness {
analyzers: Vec<Box<dyn Analyzer>>,
analyzers: Vec<Box<dyn Analyzer + Send>>,
}
impl Harness {
pub fn new() -> Self {
Self {
analyzers: Vec::new(),
}
Self { analyzers: Vec::new() }
}
pub fn add_analyzer(&mut self, analyzer: Box<dyn Analyzer>) {
pub fn new_with_all_analyzers() -> Self {
let mut harness = Harness::new();
harness.add_analyzer(Box::new(LteSib6And7DowngradeAnalyzer{}));
harness
}
pub fn add_analyzer(&mut self, analyzer: Box<dyn Analyzer + Send>) {
self.analyzers.push(analyzer);
}
pub fn analyze_information_element(&mut self, ie: &InformationElement) -> Vec<Option<Event>> {
pub fn analyze_qmdl_messages(&mut self, container: MessagesContainer) -> AnalysisRow {
let mut row = AnalysisRow {
timestamp: chrono::Local::now().fixed_offset(),
skipped_message_reasons: Vec::new(),
analysis: Vec::new(),
};
for maybe_qmdl_message in container.into_messages() {
let qmdl_message = match maybe_qmdl_message {
Ok(msg) => msg,
Err(err) => {
row.skipped_message_reasons.push(format!("{:?}", err));
continue;
}
};
let gsmtap_message = match gsmtap_parser::parse(qmdl_message) {
Ok(msg) => msg,
Err(err) => {
row.skipped_message_reasons.push(format!("{:?}", err));
continue;
}
};
let Some((timestamp, gsmtap_msg)) = gsmtap_message else {
continue;
};
let element = match InformationElement::try_from(&gsmtap_msg) {
Ok(element) => element,
Err(err) => {
row.skipped_message_reasons.push(format!("{:?}", err));
continue;
}
};
let analysis_result = self.analyze_information_element(&element);
if analysis_result.iter().any(Option::is_some) {
row.analysis.push(PacketAnalysis {
timestamp: timestamp.to_datetime(),
events: analysis_result,
});
}
}
row
}
fn analyze_information_element(&mut self, ie: &InformationElement) -> Vec<Option<Event>> {
self.analyzers.iter_mut()
.map(|analyzer| analyzer.analyze_information_element(ie))
.collect()
@@ -87,4 +170,20 @@ impl Harness {
.map(|analyzer| analyzer.get_description())
.collect()
}
pub fn get_metadata(&self) -> ReportMetadata {
let names = self.get_names();
let descriptions = self.get_names();
let mut analyzers = Vec::new();
for (name, description) in names.iter().zip(descriptions.iter()) {
analyzers.push(AnalyzerMetadata {
name: name.to_string(),
description: description.to_string(),
});
}
ReportMetadata {
analyzers,
}
}
}

View File

@@ -4,15 +4,6 @@ use crate::gsmtap::*;
use log::error;
use thiserror::Error;
pub struct GsmtapParser {
}
impl Default for GsmtapParser {
fn default() -> Self {
GsmtapParser::new()
}
}
#[derive(Debug, Error)]
pub enum GsmtapParserError {
#[error("Invalid LteRrcOtaMessage ext header version {0}")]
@@ -21,119 +12,113 @@ pub enum GsmtapParserError {
InvalidLteRrcOtaHeaderPduNum(u8, u8),
}
impl GsmtapParser {
pub fn new() -> Self {
GsmtapParser {}
}
pub fn parse(&mut self, msg: Message) -> Result<Option<(Timestamp, GsmtapMessage)>, GsmtapParserError> {
if let Message::Log { timestamp, body, .. } = msg {
match self.log_to_gsmtap(body)? {
Some(msg) => Ok(Some((timestamp, msg))),
None => Ok(None),
}
} else {
Ok(None)
}
}
fn log_to_gsmtap(&self, value: LogBody) -> Result<Option<GsmtapMessage>, GsmtapParserError> {
match value {
LogBody::LteRrcOtaMessage { ext_header_version, packet } => {
let gsmtap_type = match ext_header_version {
0x02 | 0x03 | 0x04 | 0x06 | 0x07 | 0x08 | 0x0d | 0x16 => match packet.get_pdu_num() {
1 => GsmtapType::LteRrc(LteRrcSubtype::BcchBch),
2 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSch),
3 => GsmtapType::LteRrc(LteRrcSubtype::MCCH),
4 => GsmtapType::LteRrc(LteRrcSubtype::PCCH),
5 => GsmtapType::LteRrc(LteRrcSubtype::DlCcch),
6 => GsmtapType::LteRrc(LteRrcSubtype::DlDcch),
7 => GsmtapType::LteRrc(LteRrcSubtype::UlCcch),
8 => GsmtapType::LteRrc(LteRrcSubtype::UlDcch),
pdu => return Err(GsmtapParserError::InvalidLteRrcOtaHeaderPduNum(ext_header_version, pdu)),
},
0x09 | 0x0c => match packet.get_pdu_num() {
8 => GsmtapType::LteRrc(LteRrcSubtype::BcchBch),
9 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSch),
10 => GsmtapType::LteRrc(LteRrcSubtype::MCCH),
11 => GsmtapType::LteRrc(LteRrcSubtype::PCCH),
12 => GsmtapType::LteRrc(LteRrcSubtype::DlCcch),
13 => GsmtapType::LteRrc(LteRrcSubtype::DlDcch),
14 => GsmtapType::LteRrc(LteRrcSubtype::UlCcch),
15 => GsmtapType::LteRrc(LteRrcSubtype::UlDcch),
pdu => return Err(GsmtapParserError::InvalidLteRrcOtaHeaderPduNum(ext_header_version, pdu)),
},
0x0e..=0x10 => match packet.get_pdu_num() {
1 => GsmtapType::LteRrc(LteRrcSubtype::BcchBch),
2 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSch),
4 => GsmtapType::LteRrc(LteRrcSubtype::MCCH),
5 => GsmtapType::LteRrc(LteRrcSubtype::PCCH),
6 => GsmtapType::LteRrc(LteRrcSubtype::DlCcch),
7 => GsmtapType::LteRrc(LteRrcSubtype::DlDcch),
8 => GsmtapType::LteRrc(LteRrcSubtype::UlCcch),
9 => GsmtapType::LteRrc(LteRrcSubtype::UlDcch),
pdu => return Err(GsmtapParserError::InvalidLteRrcOtaHeaderPduNum(ext_header_version, pdu)),
},
0x13 | 0x1a | 0x1b => match packet.get_pdu_num() {
1 => GsmtapType::LteRrc(LteRrcSubtype::BcchBch),
3 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSch),
6 => GsmtapType::LteRrc(LteRrcSubtype::MCCH),
7 => GsmtapType::LteRrc(LteRrcSubtype::PCCH),
8 => GsmtapType::LteRrc(LteRrcSubtype::DlCcch),
9 => GsmtapType::LteRrc(LteRrcSubtype::DlDcch),
10 => GsmtapType::LteRrc(LteRrcSubtype::UlCcch),
11 => GsmtapType::LteRrc(LteRrcSubtype::UlDcch),
45 => GsmtapType::LteRrc(LteRrcSubtype::BcchBchNb),
46 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSchNb),
47 => GsmtapType::LteRrc(LteRrcSubtype::PcchNb),
48 => GsmtapType::LteRrc(LteRrcSubtype::DlCcchNb),
49 => GsmtapType::LteRrc(LteRrcSubtype::DlDcchNb),
50 => GsmtapType::LteRrc(LteRrcSubtype::UlCcchNb),
52 => GsmtapType::LteRrc(LteRrcSubtype::UlDcchNb),
pdu => return Err(GsmtapParserError::InvalidLteRrcOtaHeaderPduNum(ext_header_version, pdu)),
}
0x14 | 0x18 | 0x19 => match packet.get_pdu_num() {
1 => GsmtapType::LteRrc(LteRrcSubtype::BcchBch),
2 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSch),
4 => GsmtapType::LteRrc(LteRrcSubtype::MCCH),
5 => GsmtapType::LteRrc(LteRrcSubtype::PCCH),
6 => GsmtapType::LteRrc(LteRrcSubtype::DlCcch),
7 => GsmtapType::LteRrc(LteRrcSubtype::DlDcch),
8 => GsmtapType::LteRrc(LteRrcSubtype::UlCcch),
9 => GsmtapType::LteRrc(LteRrcSubtype::UlDcch),
54 => GsmtapType::LteRrc(LteRrcSubtype::BcchBchNb),
55 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSchNb),
56 => GsmtapType::LteRrc(LteRrcSubtype::PcchNb),
57 => GsmtapType::LteRrc(LteRrcSubtype::DlCcchNb),
58 => GsmtapType::LteRrc(LteRrcSubtype::DlDcchNb),
59 => GsmtapType::LteRrc(LteRrcSubtype::UlCcchNb),
61 => GsmtapType::LteRrc(LteRrcSubtype::UlDcchNb),
pdu => return Err(GsmtapParserError::InvalidLteRrcOtaHeaderPduNum(ext_header_version, pdu)),
},
_ => return Err(GsmtapParserError::InvalidLteRrcOtaExtHeaderVersion(ext_header_version)),
};
let mut header = GsmtapHeader::new(gsmtap_type);
// Wireshark GSMTAP only accepts 14 bits of ARFCN
header.arfcn = packet.get_earfcn().try_into().unwrap_or(0);
header.frame_number = packet.get_sfn();
header.subslot = packet.get_subfn();
Ok(Some(GsmtapMessage {
header,
payload: packet.take_payload(),
}))
},
LogBody::Nas4GMessage { msg, .. } => {
// currently we only handle "plain" (i.e. non-secure) NAS messages
let header = GsmtapHeader::new(GsmtapType::LteNas(LteNasSubtype::Plain));
Ok(Some(GsmtapMessage {
header,
payload: msg,
}))
},
_ => {
error!("gsmtap_sink: ignoring unhandled log type: {:?}", value);
Ok(None)
},
pub fn parse(msg: Message) -> Result<Option<(Timestamp, GsmtapMessage)>, GsmtapParserError> {
if let Message::Log { timestamp, body, .. } = msg {
match log_to_gsmtap(body)? {
Some(msg) => Ok(Some((timestamp, msg))),
None => Ok(None),
}
} else {
Ok(None)
}
}
fn log_to_gsmtap(value: LogBody) -> Result<Option<GsmtapMessage>, GsmtapParserError> {
match value {
LogBody::LteRrcOtaMessage { ext_header_version, packet } => {
let gsmtap_type = match ext_header_version {
0x02 | 0x03 | 0x04 | 0x06 | 0x07 | 0x08 | 0x0d | 0x16 => match packet.get_pdu_num() {
1 => GsmtapType::LteRrc(LteRrcSubtype::BcchBch),
2 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSch),
3 => GsmtapType::LteRrc(LteRrcSubtype::MCCH),
4 => GsmtapType::LteRrc(LteRrcSubtype::PCCH),
5 => GsmtapType::LteRrc(LteRrcSubtype::DlCcch),
6 => GsmtapType::LteRrc(LteRrcSubtype::DlDcch),
7 => GsmtapType::LteRrc(LteRrcSubtype::UlCcch),
8 => GsmtapType::LteRrc(LteRrcSubtype::UlDcch),
pdu => return Err(GsmtapParserError::InvalidLteRrcOtaHeaderPduNum(ext_header_version, pdu)),
},
0x09 | 0x0c => match packet.get_pdu_num() {
8 => GsmtapType::LteRrc(LteRrcSubtype::BcchBch),
9 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSch),
10 => GsmtapType::LteRrc(LteRrcSubtype::MCCH),
11 => GsmtapType::LteRrc(LteRrcSubtype::PCCH),
12 => GsmtapType::LteRrc(LteRrcSubtype::DlCcch),
13 => GsmtapType::LteRrc(LteRrcSubtype::DlDcch),
14 => GsmtapType::LteRrc(LteRrcSubtype::UlCcch),
15 => GsmtapType::LteRrc(LteRrcSubtype::UlDcch),
pdu => return Err(GsmtapParserError::InvalidLteRrcOtaHeaderPduNum(ext_header_version, pdu)),
},
0x0e..=0x10 => match packet.get_pdu_num() {
1 => GsmtapType::LteRrc(LteRrcSubtype::BcchBch),
2 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSch),
4 => GsmtapType::LteRrc(LteRrcSubtype::MCCH),
5 => GsmtapType::LteRrc(LteRrcSubtype::PCCH),
6 => GsmtapType::LteRrc(LteRrcSubtype::DlCcch),
7 => GsmtapType::LteRrc(LteRrcSubtype::DlDcch),
8 => GsmtapType::LteRrc(LteRrcSubtype::UlCcch),
9 => GsmtapType::LteRrc(LteRrcSubtype::UlDcch),
pdu => return Err(GsmtapParserError::InvalidLteRrcOtaHeaderPduNum(ext_header_version, pdu)),
},
0x13 | 0x1a | 0x1b => match packet.get_pdu_num() {
1 => GsmtapType::LteRrc(LteRrcSubtype::BcchBch),
3 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSch),
6 => GsmtapType::LteRrc(LteRrcSubtype::MCCH),
7 => GsmtapType::LteRrc(LteRrcSubtype::PCCH),
8 => GsmtapType::LteRrc(LteRrcSubtype::DlCcch),
9 => GsmtapType::LteRrc(LteRrcSubtype::DlDcch),
10 => GsmtapType::LteRrc(LteRrcSubtype::UlCcch),
11 => GsmtapType::LteRrc(LteRrcSubtype::UlDcch),
45 => GsmtapType::LteRrc(LteRrcSubtype::BcchBchNb),
46 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSchNb),
47 => GsmtapType::LteRrc(LteRrcSubtype::PcchNb),
48 => GsmtapType::LteRrc(LteRrcSubtype::DlCcchNb),
49 => GsmtapType::LteRrc(LteRrcSubtype::DlDcchNb),
50 => GsmtapType::LteRrc(LteRrcSubtype::UlCcchNb),
52 => GsmtapType::LteRrc(LteRrcSubtype::UlDcchNb),
pdu => return Err(GsmtapParserError::InvalidLteRrcOtaHeaderPduNum(ext_header_version, pdu)),
}
0x14 | 0x18 | 0x19 => match packet.get_pdu_num() {
1 => GsmtapType::LteRrc(LteRrcSubtype::BcchBch),
2 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSch),
4 => GsmtapType::LteRrc(LteRrcSubtype::MCCH),
5 => GsmtapType::LteRrc(LteRrcSubtype::PCCH),
6 => GsmtapType::LteRrc(LteRrcSubtype::DlCcch),
7 => GsmtapType::LteRrc(LteRrcSubtype::DlDcch),
8 => GsmtapType::LteRrc(LteRrcSubtype::UlCcch),
9 => GsmtapType::LteRrc(LteRrcSubtype::UlDcch),
54 => GsmtapType::LteRrc(LteRrcSubtype::BcchBchNb),
55 => GsmtapType::LteRrc(LteRrcSubtype::BcchDlSchNb),
56 => GsmtapType::LteRrc(LteRrcSubtype::PcchNb),
57 => GsmtapType::LteRrc(LteRrcSubtype::DlCcchNb),
58 => GsmtapType::LteRrc(LteRrcSubtype::DlDcchNb),
59 => GsmtapType::LteRrc(LteRrcSubtype::UlCcchNb),
61 => GsmtapType::LteRrc(LteRrcSubtype::UlDcchNb),
pdu => return Err(GsmtapParserError::InvalidLteRrcOtaHeaderPduNum(ext_header_version, pdu)),
},
_ => return Err(GsmtapParserError::InvalidLteRrcOtaExtHeaderVersion(ext_header_version)),
};
let mut header = GsmtapHeader::new(gsmtap_type);
// Wireshark GSMTAP only accepts 14 bits of ARFCN
header.arfcn = packet.get_earfcn().try_into().unwrap_or(0);
header.frame_number = packet.get_sfn();
header.subslot = packet.get_subfn();
Ok(Some(GsmtapMessage {
header,
payload: packet.take_payload(),
}))
},
LogBody::Nas4GMessage { msg, .. } => {
// currently we only handle "plain" (i.e. non-secure) NAS messages
let header = GsmtapHeader::new(GsmtapType::LteNas(LteNasSubtype::Plain));
Ok(Some(GsmtapMessage {
header,
payload: msg,
}))
},
_ => {
error!("gsmtap_sink: ignoring unhandled log type: {:?}", value);
Ok(None)
},
}
}

View File

@@ -1,17 +1,12 @@
use rayhunter::diag::{
Message,
LogBody,
LteRrcOtaPacket,
Timestamp,
};
use rayhunter::gsmtap_parser::GsmtapParser;
use rayhunter::{diag::{
LogBody, LteRrcOtaPacket, Message, Timestamp
}, gsmtap_parser};
use deku::prelude::*;
// Tests here are based on https://github.com/fgsect/scat/blob/97442580e628de414c9f7c2a185f4e28d0ee7523/tests/test_diagltelogparser.py
#[test]
fn test_lte_rrc_ota() {
let mut parser = GsmtapParser::new();
let v26_binary = &[
0x10, 0x0, 0x23, 0x0, 0x23, 0x0, 0xc0, 0xb0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x1a, 0xf, 0x40, 0xf, 0x40, 0x1, 0xe, 0x1, 0x13, 0x7,
@@ -42,7 +37,7 @@ fn test_lte_rrc_ota() {
}
}
});
let (_, gsmtap_msg) = parser.parse(parsed).unwrap().unwrap();
let (_, gsmtap_msg) = gsmtap_parser::parse(parsed).unwrap().unwrap();
assert_eq!(&gsmtap_msg.payload, &[0x10, 0x15]);
assert_eq!(gsmtap_msg.header.packet_type, 13);
assert_eq!(gsmtap_msg.header.timeslot, 0);
@@ -85,7 +80,7 @@ fn test_lte_rrc_ota() {
},
},
});
let (_, gsmtap_msg) = parser.parse(parsed).unwrap().unwrap();
let (_, gsmtap_msg) = gsmtap_parser::parse(parsed).unwrap().unwrap();
assert_eq!(&gsmtap_msg.payload, &[
0x10, 0x15,
]);
@@ -132,7 +127,7 @@ fn test_lte_rrc_ota() {
},
},
});
let (_, gsmtap_msg) = parser.parse(parsed).unwrap().unwrap();
let (_, gsmtap_msg) = gsmtap_parser::parse(parsed).unwrap().unwrap();
assert_eq!(&gsmtap_msg.payload, &[
0x40, 0x85, 0x8e, 0xc4, 0xe5, 0xbf, 0xe0, 0x50,
0xdc, 0x29, 0x15, 0x16, 0x00,
@@ -183,7 +178,7 @@ fn test_lte_rrc_ota() {
},
},
});
let (_, gsmtap_msg) = parser.parse(parsed).unwrap().unwrap();
let (_, gsmtap_msg) = gsmtap_parser::parse(parsed).unwrap().unwrap();
assert_eq!(&gsmtap_msg.payload, &[
0x08, 0x10, 0xa7, 0x14, 0x53, 0x59, 0xa6, 0x05,
0x43, 0x68, 0xc0, 0x3b, 0xda, 0x30, 0x04, 0xa6,
@@ -229,7 +224,7 @@ fn test_lte_rrc_ota() {
},
},
});
let (_, gsmtap_msg) = parser.parse(parsed).unwrap().unwrap();
let (_, gsmtap_msg) = gsmtap_parser::parse(parsed).unwrap().unwrap();
assert_eq!(&gsmtap_msg.payload, &[
0x28, 0x18, 0x40, 0x16, 0x08, 0x08, 0x80, 0x00,
0x00,
@@ -274,7 +269,7 @@ fn test_lte_rrc_ota() {
},
},
});
let (_, gsmtap_msg) = parser.parse(parsed).unwrap().unwrap();
let (_, gsmtap_msg) = gsmtap_parser::parse(parsed).unwrap().unwrap();
assert_eq!(&gsmtap_msg.payload, &[
0x40, 0x0c, 0x8e, 0xc9, 0x42, 0x89, 0xe0,
]);
@@ -324,7 +319,7 @@ fn test_lte_rrc_ota() {
},
},
});
let (_, gsmtap_msg) = parser.parse(parsed).unwrap().unwrap();
let (_, gsmtap_msg) = gsmtap_parser::parse(parsed).unwrap().unwrap();
assert_eq!(&gsmtap_msg.payload, &[
0x08, 0x10, 0xa5, 0x34, 0x61, 0x41, 0xa3, 0x1c,
0x31, 0x68, 0x04, 0x40, 0x1a, 0x00, 0x49, 0x16,
@@ -370,7 +365,7 @@ fn test_lte_rrc_ota() {
},
},
});
let (_, gsmtap_msg) = parser.parse(parsed).unwrap().unwrap();
let (_, gsmtap_msg) = gsmtap_parser::parse(parsed).unwrap().unwrap();
assert_eq!(&gsmtap_msg.payload, &[0x2c, 0x00]);
assert_eq!(gsmtap_msg.header.packet_type, 13);
assert_eq!(gsmtap_msg.header.timeslot, 0);
@@ -412,7 +407,7 @@ fn test_lte_rrc_ota() {
},
},
});
let (_, gsmtap_msg) = parser.parse(parsed).unwrap().unwrap();
let (_, gsmtap_msg) = gsmtap_parser::parse(parsed).unwrap().unwrap();
assert_eq!(&gsmtap_msg.payload, &[
0x40, 0x0b, 0x8e, 0xc1, 0xdd, 0x13, 0xb0,
]);
@@ -455,7 +450,7 @@ fn test_lte_rrc_ota() {
},
},
});
let (_, gsmtap_msg) = parser.parse(parsed).unwrap().unwrap();
let (_, gsmtap_msg) = gsmtap_parser::parse(parsed).unwrap().unwrap();
assert_eq!(&gsmtap_msg.payload, &[0x2e, 0x02]);
assert_eq!(gsmtap_msg.header.packet_type, 13);
assert_eq!(gsmtap_msg.header.timeslot, 0);
@@ -501,7 +496,7 @@ fn test_lte_rrc_ota() {
},
},
});
let (_, gsmtap_msg) = parser.parse(parsed).unwrap().unwrap();
let (_, gsmtap_msg) = gsmtap_parser::parse(parsed).unwrap().unwrap();
assert_eq!(&gsmtap_msg.payload, &[
0x40, 0x49, 0x88, 0x05, 0xc0, 0x97, 0x02, 0xd3,
0xb0, 0x98, 0x1c, 0x20, 0xa0, 0x81, 0x8c, 0x43,