mirror of
https://github.com/bitcoinresearchkit/brk.git
synced 2026-04-29 09:09:58 -07:00
global: snapshot
This commit is contained in:
191
server/src/api/handlers/dataset.rs
Normal file
191
server/src/api/handlers/dataset.rs
Normal file
@@ -0,0 +1,191 @@
|
||||
use std::{collections::BTreeMap, path::PathBuf};
|
||||
|
||||
use axum::{
|
||||
extract::{Path, Query, State},
|
||||
http::HeaderMap,
|
||||
response::{IntoResponse, Response},
|
||||
};
|
||||
use color_eyre::{eyre::eyre, owo_colors::OwoColorize};
|
||||
use reqwest::{header::HOST, StatusCode};
|
||||
use serde::Deserialize;
|
||||
|
||||
use parser::{log, Date, DateMap, Height, HeightMap, MapChunkId, HEIGHT_MAP_CHUNK_SIZE, OHLC};
|
||||
|
||||
use crate::{
|
||||
api::structs::{Chunk, Kind, Route},
|
||||
header_map::HeaderMapUtils,
|
||||
response::typed_value_to_response,
|
||||
AppState,
|
||||
};
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct Params {
|
||||
chunk: Option<usize>,
|
||||
}
|
||||
|
||||
pub async fn dataset_handler(
|
||||
headers: HeaderMap,
|
||||
path: Path<String>,
|
||||
query: Query<Params>,
|
||||
State(app_state): State<AppState>,
|
||||
) -> Response {
|
||||
match _dataset_handler(headers, path, query, app_state) {
|
||||
Ok(response) => response,
|
||||
Err(error) => {
|
||||
let mut response =
|
||||
(StatusCode::INTERNAL_SERVER_ERROR, error.to_string()).into_response();
|
||||
|
||||
response.headers_mut().insert_cors();
|
||||
|
||||
response
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn _dataset_handler(
|
||||
headers: HeaderMap,
|
||||
Path(path): Path<String>,
|
||||
query: Query<Params>,
|
||||
AppState { routes }: AppState,
|
||||
) -> color_eyre::Result<Response> {
|
||||
if path.contains("favicon") {
|
||||
return Err(eyre!("Don't support favicon"));
|
||||
}
|
||||
|
||||
log(&format!(
|
||||
"{}{}",
|
||||
path,
|
||||
query.chunk.map_or("".to_string(), |chunk| format!(
|
||||
"{}{chunk}",
|
||||
"?chunk=".bright_black()
|
||||
))
|
||||
));
|
||||
|
||||
let date_prefix = "date-to-";
|
||||
let height_prefix = "height-to-";
|
||||
|
||||
let (kind, route) = if path.starts_with(date_prefix) {
|
||||
(
|
||||
Kind::Date,
|
||||
routes.date.get(&replace_dash_by_underscore(
|
||||
path.strip_prefix(date_prefix).unwrap(),
|
||||
)),
|
||||
)
|
||||
} else if path.starts_with(height_prefix) {
|
||||
(
|
||||
Kind::Height,
|
||||
routes.height.get(&replace_dash_by_underscore(
|
||||
path.strip_prefix(height_prefix).unwrap(),
|
||||
)),
|
||||
)
|
||||
} else {
|
||||
(
|
||||
Kind::Last,
|
||||
routes.last.get(&replace_dash_by_underscore(&path)),
|
||||
)
|
||||
};
|
||||
|
||||
if route.is_none() {
|
||||
return Err(eyre!("Path error"));
|
||||
}
|
||||
|
||||
let mut route = route.unwrap().to_owned();
|
||||
|
||||
let mut chunk = None;
|
||||
|
||||
if kind != Kind::Last {
|
||||
match kind {
|
||||
Kind::Date => {
|
||||
let datasets = DateMap::<usize>::_read_dir(&route.file_path, &route.serialization);
|
||||
process_datasets(headers, kind, &mut chunk, &mut route, query, datasets)?;
|
||||
}
|
||||
Kind::Height => {
|
||||
let datasets =
|
||||
HeightMap::<usize>::_read_dir(&route.file_path, &route.serialization);
|
||||
process_datasets(headers, kind, &mut chunk, &mut route, query, datasets)?;
|
||||
}
|
||||
_ => panic!(),
|
||||
};
|
||||
}
|
||||
|
||||
let type_name = route.values_type.split("::").last().unwrap();
|
||||
|
||||
let value = match type_name {
|
||||
"u8" => typed_value_to_response::<u8>(kind, &route, chunk)?,
|
||||
"u16" => typed_value_to_response::<u16>(kind, &route, chunk)?,
|
||||
"u32" => typed_value_to_response::<u32>(kind, &route, chunk)?,
|
||||
"u64" => typed_value_to_response::<u64>(kind, &route, chunk)?,
|
||||
"usize" => typed_value_to_response::<usize>(kind, &route, chunk)?,
|
||||
"f32" => typed_value_to_response::<f32>(kind, &route, chunk)?,
|
||||
"f64" => typed_value_to_response::<f64>(kind, &route, chunk)?,
|
||||
"OHLC" => typed_value_to_response::<OHLC>(kind, &route, chunk)?,
|
||||
"Date" => typed_value_to_response::<Date>(kind, &route, chunk)?,
|
||||
"Height" => typed_value_to_response::<Height>(kind, &route, chunk)?,
|
||||
_ => panic!("Incompatible type: {type_name}"),
|
||||
};
|
||||
|
||||
Ok(value)
|
||||
}
|
||||
|
||||
fn replace_dash_by_underscore(s: &str) -> String {
|
||||
s.replace('-', "_")
|
||||
}
|
||||
|
||||
fn process_datasets<ChunkId>(
|
||||
headers: HeaderMap,
|
||||
kind: Kind,
|
||||
chunk: &mut Option<Chunk>,
|
||||
route: &mut Route,
|
||||
query: Query<Params>,
|
||||
datasets: BTreeMap<ChunkId, PathBuf>,
|
||||
) -> color_eyre::Result<()>
|
||||
where
|
||||
ChunkId: MapChunkId,
|
||||
{
|
||||
let (last_chunk_id, _) = datasets.last_key_value().unwrap_or_else(|| {
|
||||
dbg!(&datasets, &route);
|
||||
panic!()
|
||||
});
|
||||
|
||||
let chunk_id = query
|
||||
.chunk
|
||||
.map(|id| ChunkId::from_usize(id))
|
||||
.unwrap_or(*last_chunk_id);
|
||||
|
||||
let path = datasets.get(&chunk_id);
|
||||
|
||||
if path.is_none() {
|
||||
return Err(eyre!("Couldn't find chunk"));
|
||||
}
|
||||
|
||||
route.file_path = path.unwrap().to_str().unwrap().to_string();
|
||||
|
||||
let offset = match kind {
|
||||
Kind::Date => 1,
|
||||
Kind::Height => HEIGHT_MAP_CHUNK_SIZE as usize,
|
||||
_ => panic!(),
|
||||
};
|
||||
|
||||
let offsetted_to_url = |offseted| {
|
||||
datasets.get(&ChunkId::from_usize(offseted)).map(|_| {
|
||||
let host = headers[HOST].to_str().unwrap();
|
||||
let scheme = if host.contains("0.0.0.0") || host.contains("localhost") {
|
||||
"http"
|
||||
} else {
|
||||
"https"
|
||||
};
|
||||
|
||||
format!("{scheme}://{host}{}?chunk={offseted}", route.url_path)
|
||||
})
|
||||
};
|
||||
|
||||
let chunk_id = chunk_id.to_usize();
|
||||
|
||||
chunk.replace(Chunk {
|
||||
id: chunk_id,
|
||||
next: chunk_id.checked_add(offset).and_then(offsetted_to_url),
|
||||
previous: chunk_id.checked_sub(offset).and_then(offsetted_to_url),
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
14
server/src/api/handlers/fallback.rs
Normal file
14
server/src/api/handlers/fallback.rs
Normal file
@@ -0,0 +1,14 @@
|
||||
use axum::{extract::State, http::HeaderMap, response::Response};
|
||||
use reqwest::header::HOST;
|
||||
|
||||
use crate::{response::generic_to_reponse, AppState};
|
||||
|
||||
pub async fn fallback(headers: HeaderMap, State(app_state): State<AppState>) -> Response {
|
||||
generic_to_reponse(
|
||||
app_state
|
||||
.routes
|
||||
.to_full_paths(headers[HOST].to_str().unwrap().to_string()),
|
||||
None,
|
||||
60,
|
||||
)
|
||||
}
|
||||
5
server/src/api/handlers/mod.rs
Normal file
5
server/src/api/handlers/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
mod dataset;
|
||||
mod fallback;
|
||||
|
||||
pub use dataset::*;
|
||||
pub use fallback::*;
|
||||
19
server/src/api/mod.rs
Normal file
19
server/src/api/mod.rs
Normal file
@@ -0,0 +1,19 @@
|
||||
use axum::{routing::get, Router};
|
||||
use handlers::{dataset_handler, fallback};
|
||||
|
||||
use crate::AppState;
|
||||
|
||||
mod handlers;
|
||||
pub mod structs;
|
||||
|
||||
pub trait ApiRoutes {
|
||||
fn add_api_routes(self) -> Self;
|
||||
}
|
||||
|
||||
impl ApiRoutes for Router<AppState> {
|
||||
fn add_api_routes(self) -> Self {
|
||||
self.route("/api/*path", get(dataset_handler))
|
||||
.route("/api/", get(fallback))
|
||||
.route("/api", get(fallback))
|
||||
}
|
||||
}
|
||||
8
server/src/api/structs/chunk.rs
Normal file
8
server/src/api/structs/chunk.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Chunk {
|
||||
pub id: usize,
|
||||
pub previous: Option<String>,
|
||||
pub next: Option<String>,
|
||||
}
|
||||
6
server/src/api/structs/kind.rs
Normal file
6
server/src/api/structs/kind.rs
Normal file
@@ -0,0 +1,6 @@
|
||||
#[derive(PartialEq, Eq, Clone, Copy)]
|
||||
pub enum Kind {
|
||||
Date,
|
||||
Height,
|
||||
Last,
|
||||
}
|
||||
9
server/src/api/structs/mod.rs
Normal file
9
server/src/api/structs/mod.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
mod chunk;
|
||||
mod kind;
|
||||
mod paths;
|
||||
mod routes;
|
||||
|
||||
pub use chunk::*;
|
||||
pub use kind::*;
|
||||
pub use paths::*;
|
||||
pub use routes::*;
|
||||
9
server/src/api/structs/paths.rs
Normal file
9
server/src/api/structs/paths.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use derive_deref::{Deref, DerefMut};
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::Grouped;
|
||||
|
||||
#[derive(Clone, Default, Deref, DerefMut, Debug, Serialize)]
|
||||
pub struct Paths(pub Grouped<BTreeMap<String, String>>);
|
||||
152
server/src/api/structs/routes.rs
Normal file
152
server/src/api/structs/routes.rs
Normal file
@@ -0,0 +1,152 @@
|
||||
use std::{
|
||||
collections::{BTreeMap, HashMap},
|
||||
fs,
|
||||
path::Path,
|
||||
};
|
||||
|
||||
use derive_deref::{Deref, DerefMut};
|
||||
use itertools::Itertools;
|
||||
use parser::{Json, Serialization};
|
||||
|
||||
use crate::Grouped;
|
||||
|
||||
use super::Paths;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Route {
|
||||
pub url_path: String,
|
||||
pub file_path: String,
|
||||
pub values_type: String,
|
||||
pub serialization: Serialization,
|
||||
}
|
||||
|
||||
#[derive(Clone, Default, Deref, DerefMut)]
|
||||
pub struct Routes(pub Grouped<HashMap<String, Route>>);
|
||||
|
||||
const INPUTS_PATH: &str = "./in";
|
||||
const APP_TYPES_PATH: &str = "../app/src/types";
|
||||
|
||||
impl Routes {
|
||||
pub fn build() -> Self {
|
||||
let path_to_type: BTreeMap<String, String> =
|
||||
Json::import(Path::new(&format!("{INPUTS_PATH}/disk_path_to_type.json"))).unwrap();
|
||||
|
||||
let mut routes = Routes::default();
|
||||
|
||||
path_to_type.into_iter().for_each(|(key, value)| {
|
||||
let mut split_key = key.split('/').collect_vec();
|
||||
let last = split_key.pop().unwrap().to_owned();
|
||||
|
||||
let mut skip = 2;
|
||||
|
||||
let mut serialization = Serialization::Binary;
|
||||
|
||||
if *split_key.get(1).unwrap() == "price" {
|
||||
skip = 1;
|
||||
serialization = Serialization::Json;
|
||||
}
|
||||
|
||||
let split_key = split_key.iter().skip(skip).collect_vec();
|
||||
|
||||
let map_key = split_key.iter().join("_");
|
||||
|
||||
let url_path = split_key.iter().join("-");
|
||||
|
||||
let file_path = key.to_owned();
|
||||
let values_type = value.to_owned();
|
||||
|
||||
if last == "date" {
|
||||
routes.date.insert(
|
||||
map_key,
|
||||
Route {
|
||||
url_path: format!("date-to-{url_path}"),
|
||||
file_path,
|
||||
values_type,
|
||||
serialization,
|
||||
},
|
||||
);
|
||||
} else if last == "height" {
|
||||
routes.height.insert(
|
||||
map_key,
|
||||
Route {
|
||||
url_path: format!("height-to-{url_path}"),
|
||||
file_path,
|
||||
values_type,
|
||||
serialization,
|
||||
},
|
||||
);
|
||||
} else if last == "last" {
|
||||
routes.last.insert(
|
||||
map_key,
|
||||
Route {
|
||||
url_path,
|
||||
file_path,
|
||||
values_type,
|
||||
serialization,
|
||||
},
|
||||
);
|
||||
} else {
|
||||
dbg!(&key, value, &last);
|
||||
panic!("")
|
||||
}
|
||||
});
|
||||
|
||||
routes
|
||||
}
|
||||
|
||||
pub fn generate_dts_file(&self) {
|
||||
let map_to_type = |name: &str, map: &HashMap<String, Route>| -> String {
|
||||
let paths = map
|
||||
.values()
|
||||
.map(|route| format!("\"{}\"", route.url_path))
|
||||
.join(" | ");
|
||||
|
||||
format!("// This file is auto generated by the server\n// Manual changes are forbidden\n\ntype {}Path = {};\n", name, paths)
|
||||
};
|
||||
|
||||
let date_type = map_to_type("Date", &self.date);
|
||||
|
||||
let height_type = map_to_type("Height", &self.height);
|
||||
|
||||
let last_type = map_to_type("Last", &self.last);
|
||||
|
||||
fs::write(
|
||||
format!("{APP_TYPES_PATH}/paths.d.ts"),
|
||||
format!("{date_type}\n{height_type}\n{last_type}"),
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
pub fn to_full_paths(&self, host: String) -> Paths {
|
||||
let url = {
|
||||
let scheme = if host.contains("0.0.0.0") || host.contains("localhost") {
|
||||
"http"
|
||||
} else {
|
||||
"https"
|
||||
};
|
||||
|
||||
format!("{scheme}://{host}")
|
||||
};
|
||||
|
||||
let transform = |map: &HashMap<String, Route>| -> BTreeMap<String, String> {
|
||||
map.iter()
|
||||
.map(|(key, route)| {
|
||||
(
|
||||
key.to_owned(),
|
||||
format!("{url}{}", route.url_path.to_owned()),
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
};
|
||||
|
||||
let date_paths = transform(&self.date);
|
||||
let height_paths = transform(&self.height);
|
||||
let last_paths = transform(&self.last);
|
||||
|
||||
Paths(Grouped {
|
||||
date: date_paths,
|
||||
height: height_paths,
|
||||
last: last_paths,
|
||||
})
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user