git: reset

This commit is contained in:
k
2024-06-23 17:38:53 +02:00
commit a1a576d088
375 changed files with 40952 additions and 0 deletions

8
server/src/chunk.rs Normal file
View File

@@ -0,0 +1,8 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize)]
pub struct Chunk {
pub id: usize,
pub previous: Option<String>,
pub next: Option<String>,
}

150
server/src/handler.rs Normal file
View File

@@ -0,0 +1,150 @@
use axum::{
extract::{Path, Query, State},
http::HeaderMap,
response::{IntoResponse, Response},
};
use color_eyre::{eyre::eyre, owo_colors::OwoColorize};
use reqwest::{header::HOST, StatusCode};
use serde::Deserialize;
use parser::{log, DateMap, HeightMap, WNaiveDate, HEIGHT_MAP_CHUNK_SIZE, OHLC};
use crate::{
chunk::Chunk, headers::add_cors_to_headers, kind::Kind, response::typed_value_to_response,
AppState,
};
#[derive(Deserialize)]
pub struct Params {
chunk: Option<usize>,
}
pub async fn file_handler(
headers: HeaderMap,
path: Path<String>,
query: Query<Params>,
State(app_state): State<AppState>,
) -> Response {
match _file_handler(headers, path, query, app_state) {
Ok(response) => response,
Err(error) => {
let mut response =
(StatusCode::INTERNAL_SERVER_ERROR, error.to_string()).into_response();
add_cors_to_headers(response.headers_mut());
response
}
}
}
fn _file_handler(
headers: HeaderMap,
Path(path): Path<String>,
query: Query<Params>,
AppState { routes }: AppState,
) -> color_eyre::Result<Response> {
if path.contains("favicon") {
return Err(eyre!("Don't support favicon"));
}
log(&format!(
"{}{}",
path,
query.chunk.map_or("".to_string(), |chunk| format!(
"{}{chunk}",
"?chunk=".bright_black()
))
));
let date_prefix = "date-to-";
let height_prefix = "height-to-";
let (kind, route) = if path.starts_with(date_prefix) {
(
Kind::Date,
routes
.date
.get(&path.strip_prefix(date_prefix).unwrap().replace('-', "_")),
)
} else if path.starts_with(height_prefix) {
(
Kind::Height,
routes
.height
.get(&path.strip_prefix(height_prefix).unwrap().replace('-', "_")),
)
} else {
(Kind::Last, routes.last.get(&path.replace('-', "_")))
};
if route.is_none() {
return Err(eyre!("Path error"));
}
let mut route = route.unwrap().to_owned();
let mut chunk = None;
if kind != Kind::Last {
let datasets = match kind {
Kind::Date => DateMap::<usize>::_read_dir(&route.file_path, &route.serialization),
Kind::Height => HeightMap::<usize>::_read_dir(&route.file_path, &route.serialization),
_ => panic!(),
};
let (last_chunk_id, _) = datasets.last_key_value().unwrap();
let chunk_id = query.chunk.unwrap_or(*last_chunk_id);
let path = datasets.get(&chunk_id);
if path.is_none() {
return Err(eyre!("Couldn't find chunk"));
}
route.file_path = path.unwrap().to_str().unwrap().to_string();
let offset = match kind {
Kind::Date => 1,
Kind::Height => HEIGHT_MAP_CHUNK_SIZE,
_ => panic!(),
};
let offsetted_to_url = |offseted| {
datasets.get(&offseted).map(|_| {
let host = headers[HOST].to_str().unwrap();
let scheme = if host.contains("0.0.0.0") || host.contains("localhost") {
"http"
} else {
"https"
};
format!("{scheme}://{host}{}?chunk={offseted}", route.url_path)
})
};
chunk = Some(Chunk {
id: chunk_id,
next: chunk_id.checked_add(offset).and_then(offsetted_to_url),
previous: chunk_id.checked_sub(offset).and_then(offsetted_to_url),
})
}
let type_name = route.values_type.split("::").last().unwrap();
let value = match type_name {
"u8" => typed_value_to_response::<u8>(kind, &route.file_path, chunk)?,
"u16" => typed_value_to_response::<u16>(kind, &route.file_path, chunk)?,
"u32" => typed_value_to_response::<u32>(kind, &route.file_path, chunk)?,
"u64" => typed_value_to_response::<u64>(kind, &route.file_path, chunk)?,
"usize" => typed_value_to_response::<usize>(kind, &route.file_path, chunk)?,
"f32" => typed_value_to_response::<f32>(kind, &route.file_path, chunk)?,
"f64" => typed_value_to_response::<f64>(kind, &route.file_path, chunk)?,
"OHLC" => typed_value_to_response::<OHLC>(kind, &route.file_path, chunk)?,
"WNaiveDate" => typed_value_to_response::<WNaiveDate>(kind, &route.file_path, chunk)?,
_ => panic!("Incompatible type: {type_name}"),
};
Ok(value)
}

26
server/src/headers.rs Normal file
View File

@@ -0,0 +1,26 @@
use axum::http::{header, HeaderMap};
const STALE_IF_ERROR: u64 = 604800; // 1 Week
pub fn add_cors_to_headers(headers: &mut HeaderMap) {
headers.insert(header::ACCESS_CONTROL_ALLOW_ORIGIN, "*".parse().unwrap());
headers.insert(header::ACCESS_CONTROL_ALLOW_HEADERS, "*".parse().unwrap());
}
pub fn add_json_type_to_headers(headers: &mut HeaderMap) {
headers.insert(header::CONTENT_TYPE, "application/json".parse().unwrap());
}
pub fn add_cache_control_to_headers(
headers: &mut HeaderMap,
max_age: u64,
stale_while_revalidate: u64,
) {
headers.insert(
header::CACHE_CONTROL,
format!(
"public, max-age={max_age}, stale-while-revalidate={stale_while_revalidate}, stale-if-error={STALE_IF_ERROR}")
.parse()
.unwrap(),
);
}

27
server/src/imports.rs Normal file
View File

@@ -0,0 +1,27 @@
use std::fmt::Debug;
use bincode::Decode;
use parser::{Serialization, SerializedDateMap, SerializedHeightMap};
use serde::{de::DeserializeOwned, Serialize};
pub fn import_map<T>(relative_path: &str) -> color_eyre::Result<SerializedDateMap<T>>
where
T: Serialize + Debug + DeserializeOwned + Decode,
{
Serialization::from_extension(relative_path.split('.').last().unwrap()).import(relative_path)
}
pub fn import_vec<T>(relative_path: &str) -> color_eyre::Result<SerializedHeightMap<T>>
where
T: Serialize + Debug + DeserializeOwned + Decode,
{
Serialization::from_extension(relative_path.split('.').last().unwrap()).import(relative_path)
}
pub fn import_value<T>(relative_path: &str) -> color_eyre::Result<T>
where
T: Serialize + Debug + DeserializeOwned + Decode,
{
Serialization::from_extension(relative_path.split('.').last().unwrap())
.import::<T>(relative_path)
}

6
server/src/kind.rs Normal file
View File

@@ -0,0 +1,6 @@
#[derive(PartialEq, Eq)]
pub enum Kind {
Date,
Height,
Last,
}

78
server/src/main.rs Normal file
View File

@@ -0,0 +1,78 @@
use std::sync::Arc;
use axum::{extract::State, http::HeaderMap, response::Response, routing::get, serve, Router};
use parser::log;
use reqwest::header::HOST;
use response::generic_to_reponse;
use routes::Routes;
use serde::Serialize;
use tokio::net::TcpListener;
use tower_http::compression::CompressionLayer;
mod chunk;
mod handler;
mod headers;
mod imports;
mod kind;
mod paths;
mod response;
mod routes;
use handler::file_handler;
#[derive(Clone, Debug, Default, Serialize)]
pub struct Grouped<T> {
pub date: T,
pub height: T,
pub last: T,
}
#[derive(Clone)]
pub struct AppState {
routes: Arc<Routes>,
}
#[tokio::main]
async fn main() -> color_eyre::Result<()> {
color_eyre::install()?;
let routes = Routes::build();
routes.generate_grouped_keys_to_url_path_file();
let state = AppState {
routes: Arc::new(routes),
};
let compression_layer = CompressionLayer::new()
.br(true)
.deflate(true)
.gzip(true)
.zstd(true);
let router = Router::new()
.route("/*path", get(file_handler))
.route("/", get(fallback))
.with_state(state)
.layer(compression_layer);
let port = 3110;
log(&format!("Starting server on port {port}..."));
let listener = TcpListener::bind(format!("0.0.0.0:{port}")).await?;
serve(listener, router).await?;
Ok(())
}
pub async fn fallback(headers: HeaderMap, State(app_state): State<AppState>) -> Response {
generic_to_reponse(
app_state
.routes
.to_full_paths(headers[HOST].to_str().unwrap().to_string()),
None,
60,
)
}

9
server/src/paths.rs Normal file
View File

@@ -0,0 +1,9 @@
use std::collections::BTreeMap;
use derive_deref::{Deref, DerefMut};
use serde::Serialize;
use crate::Grouped;
#[derive(Clone, Default, Deref, DerefMut, Debug, Serialize)]
pub struct Paths(pub Grouped<BTreeMap<String, String>>);

81
server/src/response.rs Normal file
View File

@@ -0,0 +1,81 @@
use std::fmt::Debug;
use axum::response::{IntoResponse, Json, Response};
use bincode::Decode;
use serde::de::DeserializeOwned;
use serde::Serialize;
use crate::{
chunk::Chunk,
headers::{add_cache_control_to_headers, add_cors_to_headers, add_json_type_to_headers},
imports::{import_map, import_value, import_vec},
kind::Kind,
};
#[derive(Serialize)]
struct WrappedDataset<'a, T>
where
T: Serialize,
{
source: &'a str,
chunk: Chunk,
dataset: T,
}
pub fn typed_value_to_response<T>(
kind: Kind,
relative_path: &str,
chunk: Option<Chunk>,
) -> color_eyre::Result<Response>
where
T: Serialize + Debug + DeserializeOwned + Decode,
{
Ok(match kind {
Kind::Date => dataset_to_response(import_map::<T>(relative_path)?, chunk.unwrap()),
Kind::Height => dataset_to_response(import_vec::<T>(relative_path)?, chunk.unwrap()),
Kind::Last => value_to_response(import_value::<T>(relative_path)?),
})
}
fn value_to_response<T>(value: T) -> Response
where
T: Serialize,
{
generic_to_reponse(value, None, 5)
}
fn dataset_to_response<T>(dataset: T, chunk: Chunk) -> Response
where
T: Serialize,
{
generic_to_reponse(dataset, Some(chunk), 60)
}
pub fn generic_to_reponse<T>(generic: T, chunk: Option<Chunk>, cache_time: u64) -> Response
where
T: Serialize,
{
let mut response = {
if let Some(chunk) = chunk {
Json(WrappedDataset {
source: "https://satonomics.xyz",
chunk,
dataset: generic,
})
.into_response()
} else {
Json(generic).into_response()
}
};
let headers = response.headers_mut();
let max_age = cache_time;
let stale_while_revalidate = 2 * max_age;
add_cors_to_headers(headers);
add_json_type_to_headers(headers);
add_cache_control_to_headers(headers, max_age, stale_while_revalidate);
response
}

145
server/src/routes.rs Normal file
View File

@@ -0,0 +1,145 @@
use std::collections::{BTreeMap, HashMap};
use derive_deref::{Deref, DerefMut};
use itertools::Itertools;
use parser::{Json, Serialization};
use crate::{paths::Paths, Grouped};
#[derive(Clone, Debug)]
pub struct Route {
pub url_path: String,
pub file_path: String,
pub values_type: String,
pub serialization: Serialization,
}
#[derive(Clone, Default, Deref, DerefMut)]
pub struct Routes(pub Grouped<HashMap<String, Route>>);
const DATASETS_PATH: &str = "../datasets_bkp";
impl Routes {
pub fn build() -> Self {
let path_to_type: BTreeMap<String, String> =
Json::import(&format!("{DATASETS_PATH}/disk_path_to_type.json")).unwrap();
let mut routes = Routes::default();
path_to_type.into_iter().for_each(|(key, value)| {
let mut split_key = key.split('/').collect_vec();
let mut split_last = split_key.pop().unwrap().split('.').rev().collect_vec();
let last = split_last.pop().unwrap().to_owned();
let serialization = split_last.pop().map_or_else(
|| {
if *split_key.get(1).unwrap() == "price" {
Serialization::Json
} else {
Serialization::Binary
}
},
Serialization::from_extension,
);
let split_key = split_key.iter().skip(2).collect_vec();
let map_key = split_key.iter().join("_");
let url_path = split_key.iter().join("-");
let file_path = key.to_owned();
let values_type = value.to_owned();
if last == "date" {
routes.date.insert(
map_key,
Route {
url_path: format!("/date-to-{url_path}"),
file_path,
values_type,
serialization,
},
);
} else if last == "height" {
routes.height.insert(
map_key,
Route {
url_path: format!("/height-to-{url_path}"),
file_path,
values_type,
serialization,
},
);
} else if last == "last" {
routes.last.insert(
map_key,
Route {
url_path: format!("/{url_path}"),
file_path,
values_type,
serialization,
},
);
} else {
dbg!(&key, value, &last);
panic!("")
}
});
routes
}
pub fn generate_grouped_keys_to_url_path_file(&self) {
let transform = |map: &HashMap<String, Route>| -> BTreeMap<String, String> {
map.iter()
.map(|(key, route)| (key.to_owned(), route.url_path.to_owned()))
.collect()
};
let date_paths = transform(&self.date);
let height_paths = transform(&self.height);
let last_paths = transform(&self.last);
let paths = Paths(Grouped {
date: date_paths,
height: height_paths,
last: last_paths,
});
let _ = Json::export(
&format!("{DATASETS_PATH}/grouped_keys_to_url_path.json"),
&paths,
);
}
pub fn to_full_paths(&self, host: String) -> Paths {
let url = {
let scheme = if host.contains("0.0.0.0") || host.contains("localhost") {
"http"
} else {
"https"
};
format!("{scheme}://{host}")
};
let transform = |map: &HashMap<String, Route>| -> BTreeMap<String, String> {
map.iter()
.map(|(key, route)| {
(
key.to_owned(),
format!("{url}{}", route.url_path.to_owned()),
)
})
.collect()
};
let date_paths = transform(&self.date);
let height_paths = transform(&self.height);
let last_paths = transform(&self.last);
Paths(Grouped {
date: date_paths,
height: height_paths,
last: last_paths,
})
}
}