mirror of
https://github.com/bitcoinresearchkit/brk.git
synced 2026-04-30 01:20:00 -07:00
bindex: contained fjall code
This commit is contained in:
376
server/src/api/handlers/dataset.rs
Normal file
376
server/src/api/handlers/dataset.rs
Normal file
@@ -0,0 +1,376 @@
|
||||
use std::{fmt::Debug, path::PathBuf, time::Instant};
|
||||
|
||||
use axum::{
|
||||
body::Body,
|
||||
extract::{Path, Query, State},
|
||||
http::HeaderMap,
|
||||
response::{IntoResponse, Response},
|
||||
};
|
||||
use bincode::Decode;
|
||||
use chrono::{DateTime, Utc};
|
||||
use color_eyre::eyre::{eyre, ContextCompat};
|
||||
use reqwest::StatusCode;
|
||||
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
server::{
|
||||
api::{
|
||||
structs::{
|
||||
ChunkMetadata, DatasetRange, DatasetRangeChunk, Extension, Kind, Route, Routes,
|
||||
},
|
||||
API_URL_PREFIX,
|
||||
},
|
||||
header_map::{HeaderMapExtended, Modified},
|
||||
log_result,
|
||||
response::ResponseExtended,
|
||||
AppState,
|
||||
},
|
||||
structs::{
|
||||
Date, GenericMap, Height, HeightMapChunkId, MapChunkId, MapKey, MapSerialized, MapValue,
|
||||
SerializedBTreeMap, SerializedDateMap, SerializedTimeMap, SerializedVec, Timestamp, OHLC,
|
||||
},
|
||||
};
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct DatasetParams {
|
||||
pub chunk: Option<usize>,
|
||||
pub all: Option<bool>,
|
||||
pub kind: String,
|
||||
}
|
||||
|
||||
pub async fn dataset_handler(
|
||||
headers: HeaderMap,
|
||||
path: Path<String>,
|
||||
query: Query<DatasetParams>,
|
||||
State(app_state): State<AppState>,
|
||||
) -> Response {
|
||||
let instant = Instant::now();
|
||||
|
||||
let ser_path = format!(
|
||||
"{API_URL_PREFIX}/{}?kind={}{}{}",
|
||||
path.0,
|
||||
query.kind,
|
||||
query
|
||||
.chunk
|
||||
.map_or("".to_string(), |chunk| format!("&chunk={chunk}")),
|
||||
query
|
||||
.all
|
||||
.map_or("".to_string(), |all| format!("&all={all}"))
|
||||
);
|
||||
|
||||
match result_handler(headers, &path, &query, app_state) {
|
||||
Ok(response) => {
|
||||
log_result(response.status(), &ser_path, instant);
|
||||
response
|
||||
}
|
||||
Err(error) => {
|
||||
let mut response =
|
||||
(StatusCode::INTERNAL_SERVER_ERROR, error.to_string()).into_response();
|
||||
log_result(response.status(), &ser_path, instant);
|
||||
response.headers_mut().insert_cors();
|
||||
response
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn result_handler(
|
||||
headers: HeaderMap,
|
||||
Path(path): &Path<String>,
|
||||
query: &Query<DatasetParams>,
|
||||
AppState { routes, .. }: AppState,
|
||||
) -> color_eyre::Result<Response> {
|
||||
let path_buf = PathBuf::from(&path);
|
||||
let id = path_buf.file_stem().unwrap().to_str().unwrap();
|
||||
let ext = Extension::from(&path_buf);
|
||||
|
||||
let route = routes.get(id);
|
||||
if route.is_none() {
|
||||
return Err(eyre!("Wrong path"));
|
||||
}
|
||||
let route = route.unwrap();
|
||||
|
||||
let type_name = route.type_name.as_str();
|
||||
Ok(match type_name {
|
||||
"u8" => typed_handler::<u8>(headers, id, ext, query, route, &routes)?,
|
||||
"u16" => typed_handler::<u16>(headers, id, ext, query, route, &routes)?,
|
||||
"u32" => typed_handler::<u32>(headers, id, ext, query, route, &routes)?,
|
||||
"u64" => typed_handler::<u64>(headers, id, ext, query, route, &routes)?,
|
||||
"usize" => typed_handler::<usize>(headers, id, ext, query, route, &routes)?,
|
||||
"f32" => typed_handler::<f32>(headers, id, ext, query, route, &routes)?,
|
||||
"f64" => typed_handler::<f64>(headers, id, ext, query, route, &routes)?,
|
||||
"OHLC" => typed_handler::<OHLC>(headers, id, ext, query, route, &routes)?,
|
||||
"Date" => typed_handler::<Date>(headers, id, ext, query, route, &routes)?,
|
||||
"Height" => typed_handler::<Height>(headers, id, ext, query, route, &routes)?,
|
||||
"Timestamp" => typed_handler::<Timestamp>(headers, id, ext, query, route, &routes)?,
|
||||
_ => panic!("Incompatible type: {type_name}"),
|
||||
})
|
||||
}
|
||||
|
||||
fn typed_handler<T>(
|
||||
headers: HeaderMap,
|
||||
id: &str,
|
||||
ext: Option<Extension>,
|
||||
query: &Query<DatasetParams>,
|
||||
route: &Route,
|
||||
routes: &Routes,
|
||||
) -> color_eyre::Result<Response>
|
||||
where
|
||||
T: Serialize + Debug + DeserializeOwned + Decode + MapValue,
|
||||
{
|
||||
let kind = Kind::try_from(&query.kind)?;
|
||||
if !route.list.contains(&kind) {
|
||||
return Err(eyre!("{kind:?} not supported for this dataset"));
|
||||
}
|
||||
|
||||
let range = DatasetRange::try_from(query)?;
|
||||
|
||||
let (mut response, date_modified) = match kind {
|
||||
Kind::Last => {
|
||||
let last_value: T = route.serialization.import(&route.path.join("last"))?;
|
||||
return Ok(axum::response::Json(last_value).into_response());
|
||||
}
|
||||
Kind::Date => match read_serialized::<Date, T, _, SerializedDateMap<T>>(
|
||||
id, &headers, route, &range, query,
|
||||
)? {
|
||||
ReadSerialized::DatasetAndDate((dataset, date, chunk_meta)) => {
|
||||
(serialized_to_response(dataset, id, chunk_meta, ext), date)
|
||||
}
|
||||
ReadSerialized::NotModified => return Ok(Response::new_not_modified()),
|
||||
ReadSerialized::_Phantom(_) => unreachable!(),
|
||||
},
|
||||
Kind::Height => match read_serialized::<Height, T, _, SerializedVec<T>>(
|
||||
id, &headers, route, &range, query,
|
||||
)? {
|
||||
ReadSerialized::DatasetAndDate((dataset, date, chunk_meta)) => (
|
||||
serialized_to_response::<Height, T, _, SerializedVec<T>>(
|
||||
dataset, id, chunk_meta, ext,
|
||||
),
|
||||
date,
|
||||
),
|
||||
ReadSerialized::NotModified => return Ok(Response::new_not_modified()),
|
||||
ReadSerialized::_Phantom(_) => unreachable!(),
|
||||
},
|
||||
Kind::Timestamp => {
|
||||
let (dataset, date, chunk_meta) = match read_serialized::<Height, T, _, SerializedVec<T>>(
|
||||
id, &headers, route, &range, query,
|
||||
)? {
|
||||
ReadSerialized::DatasetAndDate(tuple) => tuple,
|
||||
ReadSerialized::NotModified => return Ok(Response::new_not_modified()),
|
||||
ReadSerialized::_Phantom(_) => unreachable!(),
|
||||
};
|
||||
|
||||
let (timestamp_dataset, _, _) =
|
||||
match read_serialized::<Height, Timestamp, _, SerializedVec<Timestamp>>(
|
||||
"timestamp",
|
||||
&headers,
|
||||
routes.get("timestamp").unwrap(),
|
||||
&range,
|
||||
query,
|
||||
)? {
|
||||
ReadSerialized::DatasetAndDate(tuple) => tuple,
|
||||
ReadSerialized::NotModified => return Ok(Response::new_not_modified()),
|
||||
ReadSerialized::_Phantom(_) => unreachable!(),
|
||||
};
|
||||
|
||||
let mut serialized_timemap: SerializedTimeMap<T> = SerializedBTreeMap::default();
|
||||
|
||||
dataset
|
||||
.map
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.for_each(|(index, value)| {
|
||||
serialized_timemap.map.insert(
|
||||
timestamp_dataset
|
||||
.get_index(index)
|
||||
.cloned()
|
||||
.unwrap_or(Timestamp::now()),
|
||||
value,
|
||||
);
|
||||
});
|
||||
|
||||
(
|
||||
serialized_to_response::<Timestamp, T, HeightMapChunkId, SerializedTimeMap<T>>(
|
||||
serialized_timemap,
|
||||
id,
|
||||
chunk_meta,
|
||||
ext,
|
||||
),
|
||||
date,
|
||||
)
|
||||
|
||||
// let m = read_serialized::<Height, T, _, SerializedVec<T>>(
|
||||
// id, &headers, route, &range, query,
|
||||
// )?;
|
||||
// let t = read_serialized::<Height, Timestamp, _, SerializedVec<Timestamp>>(
|
||||
// "timestamp",
|
||||
// &headers,
|
||||
// routes.get("timestamp").unwrap(),
|
||||
// &range,
|
||||
// query,
|
||||
// );
|
||||
// t
|
||||
}
|
||||
};
|
||||
|
||||
let headers = response.headers_mut();
|
||||
|
||||
headers.insert_cors();
|
||||
headers.insert_last_modified(date_modified);
|
||||
|
||||
match ext {
|
||||
Some(extension) => {
|
||||
headers.insert_content_disposition_attachment();
|
||||
match extension {
|
||||
Extension::CSV => headers.insert_content_type_text_csv(),
|
||||
Extension::JSON => headers.insert_content_type_application_json(),
|
||||
}
|
||||
}
|
||||
_ => headers.insert_content_type_application_json(),
|
||||
}
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
fn serialized_to_response<Key, Value, ChunkId, Serialized>(
|
||||
dataset: Serialized,
|
||||
id: &str,
|
||||
chunk_meta: Option<ChunkMetadata>,
|
||||
ext: Option<Extension>,
|
||||
) -> Response<Body>
|
||||
where
|
||||
Key: MapKey<ChunkId>,
|
||||
Value: MapValue,
|
||||
ChunkId: MapChunkId,
|
||||
Serialized: MapSerialized<Key, Value, ChunkId>,
|
||||
{
|
||||
if ext == Some(Extension::CSV) {
|
||||
dataset.to_csv(id).into_response()
|
||||
} else if let Some(chunk) = chunk_meta {
|
||||
axum::response::Json(SerializedMapChunk {
|
||||
chunk,
|
||||
map: dataset.map(),
|
||||
version: dataset.version(),
|
||||
})
|
||||
.into_response()
|
||||
} else {
|
||||
axum::response::Json(dataset).into_response()
|
||||
}
|
||||
}
|
||||
|
||||
enum ReadSerialized<Key, Value, ChunkId, Serialized>
|
||||
where
|
||||
Key: MapKey<ChunkId>,
|
||||
Value: MapValue,
|
||||
ChunkId: MapChunkId,
|
||||
Serialized: MapSerialized<Key, Value, ChunkId>,
|
||||
{
|
||||
DatasetAndDate((Serialized, DateTime<Utc>, Option<ChunkMetadata>)),
|
||||
NotModified,
|
||||
_Phantom((Key, Value, ChunkId)),
|
||||
}
|
||||
|
||||
fn read_serialized<Key, Value, ChunkId, Serialized>(
|
||||
id: &str,
|
||||
headers: &HeaderMap,
|
||||
route: &Route,
|
||||
range: &DatasetRange,
|
||||
query: &Query<DatasetParams>,
|
||||
) -> color_eyre::Result<ReadSerialized<Key, Value, ChunkId, Serialized>>
|
||||
where
|
||||
Key: MapKey<ChunkId>,
|
||||
Value: MapValue,
|
||||
ChunkId: MapChunkId,
|
||||
Serialized: MapSerialized<Key, Value, ChunkId>,
|
||||
{
|
||||
let folder_path = route.path.join(Key::map_name());
|
||||
let serialization = &route.serialization;
|
||||
|
||||
let date_modified;
|
||||
|
||||
let datasets =
|
||||
GenericMap::<Key, Value, ChunkId, Serialized>::_read_dir(&folder_path, serialization);
|
||||
|
||||
let mut chunk_meta = None;
|
||||
|
||||
let dataset = if let DatasetRange::Chunk(range_chunk) = range {
|
||||
let chunk_id = match range_chunk {
|
||||
DatasetRangeChunk::Last => {
|
||||
*datasets
|
||||
.last_key_value()
|
||||
.context("Last tuple of dataset directory")?
|
||||
.0
|
||||
}
|
||||
DatasetRangeChunk::Chunk(chunk) => ChunkId::from_usize(*chunk),
|
||||
};
|
||||
|
||||
let chunk_path = datasets.get(&chunk_id);
|
||||
if chunk_path.is_none() {
|
||||
return Err(eyre!("Couldn't find chunk"));
|
||||
}
|
||||
let chunk_path = chunk_path.unwrap();
|
||||
|
||||
let (modified, date) = headers.check_if_modified_since(chunk_path)?;
|
||||
if modified == Modified::NotModifiedSince {
|
||||
return Ok(ReadSerialized::NotModified);
|
||||
}
|
||||
date_modified = date;
|
||||
|
||||
let to_url = |chunk: Option<ChunkId>| {
|
||||
chunk.and_then(|chunk| {
|
||||
datasets.contains_key(&chunk).then(|| {
|
||||
let scheme = headers.get_scheme();
|
||||
let host = headers.get_host();
|
||||
format!(
|
||||
"{scheme}://{host}/api/{id}?kind={}&chunk={}",
|
||||
query.kind,
|
||||
chunk.to_usize()
|
||||
)
|
||||
})
|
||||
})
|
||||
};
|
||||
|
||||
chunk_meta.replace(ChunkMetadata {
|
||||
id: chunk_id.to_usize(),
|
||||
next: to_url(chunk_id.next()),
|
||||
previous: to_url(chunk_id.previous()),
|
||||
});
|
||||
|
||||
serialization.import::<Serialized>(chunk_path)?
|
||||
} else {
|
||||
let newest_file = datasets
|
||||
.values()
|
||||
.max_by(|a, b| {
|
||||
a.metadata()
|
||||
.unwrap()
|
||||
.modified()
|
||||
.unwrap()
|
||||
.cmp(&b.metadata().unwrap().modified().unwrap())
|
||||
})
|
||||
.context("Expect to find newest file")?;
|
||||
|
||||
let (modified, date) = headers.check_if_modified_since(newest_file)?;
|
||||
if modified == Modified::NotModifiedSince {
|
||||
return Ok(ReadSerialized::NotModified);
|
||||
}
|
||||
|
||||
date_modified = date;
|
||||
|
||||
Serialized::import_all(&folder_path, serialization)
|
||||
};
|
||||
|
||||
Ok(ReadSerialized::DatasetAndDate((
|
||||
dataset,
|
||||
date_modified,
|
||||
chunk_meta,
|
||||
)))
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct SerializedMapChunk<T>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
version: u32,
|
||||
chunk: ChunkMetadata,
|
||||
map: T,
|
||||
}
|
||||
13
server/src/api/handlers/last_values.rs
Normal file
13
server/src/api/handlers/last_values.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
use axum::{
|
||||
extract::State,
|
||||
response::{IntoResponse, Response},
|
||||
};
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::{io::Json, server::AppState};
|
||||
|
||||
pub async fn last_values_handler(State(app_state): State<AppState>) -> Response {
|
||||
let values = Json::import::<Value>(&app_state.config.path_datasets_last_values()).unwrap();
|
||||
let values = axum::Json(values);
|
||||
values.into_response()
|
||||
}
|
||||
5
server/src/api/handlers/mod.rs
Normal file
5
server/src/api/handlers/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
mod dataset;
|
||||
mod last_values;
|
||||
|
||||
pub use dataset::*;
|
||||
pub use last_values::*;
|
||||
20
server/src/api/mod.rs
Normal file
20
server/src/api/mod.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
use axum::{routing::get, Router};
|
||||
use handlers::{dataset_handler, last_values_handler};
|
||||
|
||||
use super::AppState;
|
||||
|
||||
mod handlers;
|
||||
pub mod structs;
|
||||
|
||||
pub const API_URL_PREFIX: &str = "/api";
|
||||
|
||||
pub trait ApiRoutes {
|
||||
fn add_api_routes(self) -> Self;
|
||||
}
|
||||
|
||||
impl ApiRoutes for Router<AppState> {
|
||||
fn add_api_routes(self) -> Self {
|
||||
self.route(&format!("{API_URL_PREFIX}/last"), get(last_values_handler))
|
||||
.route(&format!("{API_URL_PREFIX}/*path"), get(dataset_handler))
|
||||
}
|
||||
}
|
||||
8
server/src/api/structs/chunk_metadata.rs
Normal file
8
server/src/api/structs/chunk_metadata.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct ChunkMetadata {
|
||||
pub id: usize,
|
||||
pub previous: Option<String>,
|
||||
pub next: Option<String>,
|
||||
}
|
||||
34
server/src/api/structs/extension.rs
Normal file
34
server/src/api/structs/extension.rs
Normal file
@@ -0,0 +1,34 @@
|
||||
use std::path::Path;
|
||||
|
||||
#[derive(PartialEq, Eq, Clone, Copy)]
|
||||
pub enum Extension {
|
||||
#[allow(clippy::upper_case_acronyms)]
|
||||
CSV,
|
||||
#[allow(clippy::upper_case_acronyms)]
|
||||
JSON,
|
||||
}
|
||||
|
||||
impl Extension {
|
||||
pub fn from(path: &Path) -> Option<Self> {
|
||||
if let Some(extension) = path.extension() {
|
||||
let extension = extension.to_str().unwrap();
|
||||
|
||||
if extension == Self::CSV.to_str() {
|
||||
Some(Self::CSV)
|
||||
} else if extension == Self::JSON.to_str() {
|
||||
Some(Self::JSON)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_str(&self) -> &str {
|
||||
match self {
|
||||
Extension::CSV => "csv",
|
||||
Extension::JSON => "json",
|
||||
}
|
||||
}
|
||||
}
|
||||
52
server/src/api/structs/kind.rs
Normal file
52
server/src/api/structs/kind.rs
Normal file
@@ -0,0 +1,52 @@
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
use color_eyre::eyre::{eyre, ContextCompat};
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::structs::{AnyMap, Date, Height, MapKey};
|
||||
|
||||
#[derive(Debug, Clone, Copy, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum Kind {
|
||||
Date,
|
||||
Height,
|
||||
Timestamp,
|
||||
Last,
|
||||
}
|
||||
|
||||
impl TryFrom<&String> for Kind {
|
||||
type Error = color_eyre::Report;
|
||||
|
||||
fn try_from(str: &String) -> Result<Self, Self::Error> {
|
||||
Ok(
|
||||
match str
|
||||
.to_lowercase()
|
||||
.chars()
|
||||
.next()
|
||||
.context("Expect kind to have first letter")?
|
||||
{
|
||||
'd' => Self::Date,
|
||||
'h' => Self::Height,
|
||||
't' => Self::Timestamp,
|
||||
'l' => Self::Last,
|
||||
_ => return Err(eyre!("Bad kind")),
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&(dyn AnyMap + Send + Sync)> for BTreeSet<Kind> {
|
||||
fn from(map: &(dyn AnyMap + Send + Sync)) -> Self {
|
||||
let mut s = Self::new();
|
||||
if map.key_name() == Date::map_name() {
|
||||
s.insert(Kind::Date);
|
||||
}
|
||||
if map.key_name() == Height::map_name() {
|
||||
s.insert(Kind::Height);
|
||||
s.insert(Kind::Timestamp);
|
||||
}
|
||||
if map.last_value().is_some() {
|
||||
s.insert(Kind::Last);
|
||||
}
|
||||
s
|
||||
}
|
||||
}
|
||||
13
server/src/api/structs/mod.rs
Normal file
13
server/src/api/structs/mod.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
mod chunk_metadata;
|
||||
mod extension;
|
||||
mod kind;
|
||||
mod range;
|
||||
mod route;
|
||||
mod routes;
|
||||
|
||||
pub use chunk_metadata::*;
|
||||
pub use extension::*;
|
||||
pub use kind::*;
|
||||
pub use range::*;
|
||||
pub use route::*;
|
||||
pub use routes::*;
|
||||
32
server/src/api/structs/range.rs
Normal file
32
server/src/api/structs/range.rs
Normal file
@@ -0,0 +1,32 @@
|
||||
use axum::extract::Query;
|
||||
use color_eyre::eyre::eyre;
|
||||
|
||||
use crate::server::api::handlers::DatasetParams;
|
||||
|
||||
pub enum DatasetRange {
|
||||
All,
|
||||
Chunk(DatasetRangeChunk),
|
||||
}
|
||||
|
||||
impl TryFrom<&Query<DatasetParams>> for DatasetRange {
|
||||
type Error = color_eyre::Report;
|
||||
|
||||
fn try_from(query: &Query<DatasetParams>) -> Result<Self, Self::Error> {
|
||||
if let Some(chunk) = query.chunk {
|
||||
if query.all.is_some() {
|
||||
Err(eyre!("chunk and all are exclusive"))
|
||||
} else {
|
||||
Ok(Self::Chunk(DatasetRangeChunk::Chunk(chunk)))
|
||||
}
|
||||
} else if query.all.is_some() {
|
||||
Ok(Self::All)
|
||||
} else {
|
||||
Ok(Self::Chunk(DatasetRangeChunk::Last))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub enum DatasetRangeChunk {
|
||||
Chunk(usize),
|
||||
Last,
|
||||
}
|
||||
33
server/src/api/structs/route.rs
Normal file
33
server/src/api/structs/route.rs
Normal file
@@ -0,0 +1,33 @@
|
||||
use std::{collections::BTreeSet, path::PathBuf};
|
||||
|
||||
use crate::{io::Serialization, structs::AnyMap};
|
||||
|
||||
use super::Kind;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Route {
|
||||
pub type_name: String,
|
||||
pub list: BTreeSet<Kind>,
|
||||
pub path: PathBuf,
|
||||
pub serialization: Serialization,
|
||||
}
|
||||
|
||||
impl Route {
|
||||
pub fn update(&mut self, map: &(dyn AnyMap + Send + Sync)) {
|
||||
self.list.append(&mut BTreeSet::from(map));
|
||||
if self.serialization != map.serialization() {
|
||||
panic!("route.upate() different serialization")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&(dyn AnyMap + Send + Sync)> for Route {
|
||||
fn from(map: &(dyn AnyMap + Send + Sync)) -> Self {
|
||||
Self {
|
||||
list: BTreeSet::from(map),
|
||||
path: map.path_parent().to_owned(),
|
||||
type_name: map.type_name().split("::").last().unwrap().to_owned(),
|
||||
serialization: map.serialization(),
|
||||
}
|
||||
}
|
||||
}
|
||||
54
server/src/api/structs/routes.rs
Normal file
54
server/src/api/structs/routes.rs
Normal file
@@ -0,0 +1,54 @@
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use derive_deref::{Deref, DerefMut};
|
||||
|
||||
use crate::{
|
||||
parser::{AnyDatasets, Datasets},
|
||||
structs::Config,
|
||||
};
|
||||
|
||||
use super::Route;
|
||||
|
||||
#[derive(Debug, Clone, Default, Deref, DerefMut)]
|
||||
pub struct Routes(BTreeMap<String, Route>);
|
||||
|
||||
const WEBSITE_TYPES_PATH: &str = "../website/scripts/types";
|
||||
|
||||
impl Routes {
|
||||
pub fn build(datasets: &Datasets, config: &Config) -> Self {
|
||||
datasets
|
||||
.to_any_dataset_vec()
|
||||
.into_iter()
|
||||
.flat_map(|dataset| dataset.to_all_map_vec())
|
||||
.fold(Self::default(), |mut routes, map| {
|
||||
routes
|
||||
.entry(map.id(config))
|
||||
.or_insert_with(|| Route::from(map))
|
||||
.update(map);
|
||||
routes
|
||||
})
|
||||
}
|
||||
|
||||
pub fn generate_dts_file(&self) {
|
||||
// let map_to_type = |name: &str, map: &HashMap<String, Route>| -> String {
|
||||
// let paths = map
|
||||
// .values()
|
||||
// .map(|route| format!("\"{}\"", route.url_path))
|
||||
// .join(" | ");
|
||||
|
||||
// format!("export type {}Path = {};\n", name, paths)
|
||||
// };
|
||||
|
||||
// let date_type = map_to_type("Date", &self.date);
|
||||
|
||||
// let height_type = map_to_type("Height", &self.height);
|
||||
|
||||
// let last_type = map_to_type("Last", &self.last);
|
||||
|
||||
// fs::write(
|
||||
// format!("{WEBSITE_TYPES_PATH}/paths.d.ts"),
|
||||
// format!("// This file is auto generated by the server\n// Manual changes are forbidden\n\n{date_type}\n{height_type}\n{last_type}"),
|
||||
// )
|
||||
// .unwrap();
|
||||
}
|
||||
}
|
||||
219
server/src/header_map.rs
Normal file
219
server/src/header_map.rs
Normal file
@@ -0,0 +1,219 @@
|
||||
use std::path::Path;
|
||||
|
||||
use axum::http::{header, HeaderMap};
|
||||
use chrono::{DateTime, Timelike, Utc};
|
||||
use log::info;
|
||||
use reqwest::header::{HOST, IF_MODIFIED_SINCE};
|
||||
|
||||
const STALE_IF_ERROR: u64 = 30_000_000; // 1 Year ish
|
||||
const MODIFIED_SINCE_FORMAT: &str = "%a, %d %b %Y %H:%M:%S GMT";
|
||||
|
||||
#[derive(PartialEq, Eq)]
|
||||
pub enum Modified {
|
||||
ModifiedSince,
|
||||
NotModifiedSince,
|
||||
}
|
||||
|
||||
pub trait HeaderMapExtended {
|
||||
fn get_scheme(&self) -> &str;
|
||||
fn get_host(&self) -> &str;
|
||||
fn check_if_host_is_any_local(&self) -> bool;
|
||||
fn check_if_host_is_0000(&self) -> bool;
|
||||
fn check_if_host_is_localhost(&self) -> bool;
|
||||
|
||||
fn insert_cors(&mut self);
|
||||
|
||||
fn get_if_modified_since(&self) -> Option<DateTime<Utc>>;
|
||||
fn check_if_modified_since(&self, path: &Path)
|
||||
-> color_eyre::Result<(Modified, DateTime<Utc>)>;
|
||||
|
||||
fn insert_cache_control_immutable(&mut self);
|
||||
#[allow(unused)]
|
||||
fn insert_cache_control_revalidate(&mut self, max_age: u64, stale_while_revalidate: u64);
|
||||
fn insert_last_modified(&mut self, date: DateTime<Utc>);
|
||||
|
||||
fn insert_content_disposition_attachment(&mut self);
|
||||
|
||||
fn insert_content_type(&mut self, path: &Path);
|
||||
fn insert_content_type_image_icon(&mut self);
|
||||
fn insert_content_type_image_jpeg(&mut self);
|
||||
fn insert_content_type_image_png(&mut self);
|
||||
fn insert_content_type_application_javascript(&mut self);
|
||||
fn insert_content_type_application_json(&mut self);
|
||||
fn insert_content_type_application_manifest_json(&mut self);
|
||||
fn insert_content_type_application_pdf(&mut self);
|
||||
fn insert_content_type_text_css(&mut self);
|
||||
fn insert_content_type_text_csv(&mut self);
|
||||
fn insert_content_type_text_html(&mut self);
|
||||
fn insert_content_type_text_plain(&mut self);
|
||||
fn insert_content_type_font_woff2(&mut self);
|
||||
}
|
||||
|
||||
impl HeaderMapExtended for HeaderMap {
|
||||
fn get_scheme(&self) -> &str {
|
||||
if self.check_if_host_is_any_local() {
|
||||
"http"
|
||||
} else {
|
||||
"https"
|
||||
}
|
||||
}
|
||||
|
||||
fn get_host(&self) -> &str {
|
||||
self[HOST].to_str().unwrap()
|
||||
}
|
||||
|
||||
fn check_if_host_is_any_local(&self) -> bool {
|
||||
self.check_if_host_is_localhost() || self.check_if_host_is_0000()
|
||||
}
|
||||
|
||||
fn check_if_host_is_0000(&self) -> bool {
|
||||
self.get_host().contains("0.0.0.0")
|
||||
}
|
||||
|
||||
fn check_if_host_is_localhost(&self) -> bool {
|
||||
self.get_host().contains("localhost")
|
||||
}
|
||||
|
||||
fn insert_cors(&mut self) {
|
||||
self.insert(header::ACCESS_CONTROL_ALLOW_ORIGIN, "*".parse().unwrap());
|
||||
self.insert(header::ACCESS_CONTROL_ALLOW_HEADERS, "*".parse().unwrap());
|
||||
}
|
||||
|
||||
fn insert_cache_control_immutable(&mut self) {
|
||||
self.insert(
|
||||
header::CACHE_CONTROL,
|
||||
format!("public, max-age=604800, immutable, stale-if-error={STALE_IF_ERROR}")
|
||||
.parse()
|
||||
.unwrap(),
|
||||
);
|
||||
}
|
||||
|
||||
fn insert_content_disposition_attachment(&mut self) {
|
||||
self.insert(header::CONTENT_DISPOSITION, "attachment".parse().unwrap());
|
||||
}
|
||||
|
||||
fn insert_cache_control_revalidate(&mut self, max_age: u64, stale_while_revalidate: u64) {
|
||||
self.insert(
|
||||
header::CACHE_CONTROL,
|
||||
format!(
|
||||
"public, max-age={max_age}, stale-while-revalidate={stale_while_revalidate}, stale-if-error={STALE_IF_ERROR}")
|
||||
.parse()
|
||||
.unwrap(),
|
||||
);
|
||||
}
|
||||
|
||||
fn insert_last_modified(&mut self, date: DateTime<Utc>) {
|
||||
let formatted = date.format(MODIFIED_SINCE_FORMAT).to_string();
|
||||
|
||||
self.insert(header::LAST_MODIFIED, formatted.parse().unwrap());
|
||||
}
|
||||
|
||||
fn check_if_modified_since(
|
||||
&self,
|
||||
path: &Path,
|
||||
) -> color_eyre::Result<(Modified, DateTime<Utc>)> {
|
||||
let time = path.metadata()?.modified()?;
|
||||
let date: DateTime<Utc> = time.into();
|
||||
let date = date.with_nanosecond(0).unwrap();
|
||||
|
||||
if let Some(if_modified_since) = self.get_if_modified_since() {
|
||||
if if_modified_since == date {
|
||||
return Ok((Modified::NotModifiedSince, date));
|
||||
}
|
||||
}
|
||||
|
||||
Ok((Modified::ModifiedSince, date))
|
||||
}
|
||||
|
||||
fn get_if_modified_since(&self) -> Option<DateTime<Utc>> {
|
||||
if let Some(modified_since) = self.get(IF_MODIFIED_SINCE) {
|
||||
if let Ok(modified_since) = modified_since.to_str() {
|
||||
let date = DateTime::parse_from_str(
|
||||
&format!("{modified_since} +00:00"),
|
||||
&format!("{MODIFIED_SINCE_FORMAT} %z"),
|
||||
);
|
||||
|
||||
if let Ok(x) = date {
|
||||
return Some(x.to_utc());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types
|
||||
fn insert_content_type(&mut self, path: &Path) {
|
||||
match path.extension().unwrap().to_str().unwrap() {
|
||||
"js" => self.insert_content_type_application_javascript(),
|
||||
"json" => self.insert_content_type_application_json(),
|
||||
"html" => self.insert_content_type_text_html(),
|
||||
"css" => self.insert_content_type_text_css(),
|
||||
"toml" | "txt" => self.insert_content_type_text_plain(),
|
||||
"pdf" => self.insert_content_type_application_pdf(),
|
||||
"woff2" => self.insert_content_type_font_woff2(),
|
||||
"ico" => self.insert_content_type_image_icon(),
|
||||
"jpg" | "jpeg" => self.insert_content_type_image_jpeg(),
|
||||
"png" => self.insert_content_type_image_png(),
|
||||
"webmanifest" => self.insert_content_type_application_manifest_json(),
|
||||
extension => {
|
||||
info!("Extension unsupported: {extension}");
|
||||
panic!()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn insert_content_type_image_icon(&mut self) {
|
||||
self.insert(header::CONTENT_TYPE, "image/x-icon".parse().unwrap());
|
||||
}
|
||||
|
||||
fn insert_content_type_image_jpeg(&mut self) {
|
||||
self.insert(header::CONTENT_TYPE, "image/jpeg".parse().unwrap());
|
||||
}
|
||||
|
||||
fn insert_content_type_image_png(&mut self) {
|
||||
self.insert(header::CONTENT_TYPE, "image/png".parse().unwrap());
|
||||
}
|
||||
|
||||
fn insert_content_type_application_javascript(&mut self) {
|
||||
self.insert(
|
||||
header::CONTENT_TYPE,
|
||||
"application/javascript".parse().unwrap(),
|
||||
);
|
||||
}
|
||||
|
||||
fn insert_content_type_application_json(&mut self) {
|
||||
self.insert(header::CONTENT_TYPE, "application/json".parse().unwrap());
|
||||
}
|
||||
|
||||
fn insert_content_type_application_manifest_json(&mut self) {
|
||||
self.insert(
|
||||
header::CONTENT_TYPE,
|
||||
"application/manifest+json".parse().unwrap(),
|
||||
);
|
||||
}
|
||||
|
||||
fn insert_content_type_application_pdf(&mut self) {
|
||||
self.insert(header::CONTENT_TYPE, "application/pdf".parse().unwrap());
|
||||
}
|
||||
|
||||
fn insert_content_type_text_css(&mut self) {
|
||||
self.insert(header::CONTENT_TYPE, "text/css".parse().unwrap());
|
||||
}
|
||||
|
||||
fn insert_content_type_text_csv(&mut self) {
|
||||
self.insert(header::CONTENT_TYPE, "text/csv".parse().unwrap());
|
||||
}
|
||||
|
||||
fn insert_content_type_text_html(&mut self) {
|
||||
self.insert(header::CONTENT_TYPE, "text/html".parse().unwrap());
|
||||
}
|
||||
|
||||
fn insert_content_type_text_plain(&mut self) {
|
||||
self.insert(header::CONTENT_TYPE, "text/plain".parse().unwrap());
|
||||
}
|
||||
|
||||
fn insert_content_type_font_woff2(&mut self) {
|
||||
self.insert(header::CONTENT_TYPE, "font/woff2".parse().unwrap());
|
||||
}
|
||||
}
|
||||
74
server/src/lib.rs
Normal file
74
server/src/lib.rs
Normal file
@@ -0,0 +1,74 @@
|
||||
use std::{sync::Arc, time::Instant};
|
||||
|
||||
use api::{structs::Routes, ApiRoutes};
|
||||
use axum::{routing::get, serve, Router};
|
||||
use color_eyre::owo_colors::OwoColorize;
|
||||
use log::{error, info};
|
||||
use reqwest::StatusCode;
|
||||
use tokio::net::TcpListener;
|
||||
use tower_http::compression::CompressionLayer;
|
||||
use website::WebsiteRoutes;
|
||||
|
||||
use crate::structs::Config;
|
||||
|
||||
pub mod api;
|
||||
mod header_map;
|
||||
mod response;
|
||||
mod website;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AppState {
|
||||
routes: Arc<Routes>,
|
||||
config: Config,
|
||||
}
|
||||
|
||||
pub async fn main(routes: Routes, config: Config) -> color_eyre::Result<()> {
|
||||
routes.generate_dts_file();
|
||||
|
||||
let state = AppState {
|
||||
routes: Arc::new(routes),
|
||||
config,
|
||||
};
|
||||
|
||||
let compression_layer = CompressionLayer::new()
|
||||
.br(true)
|
||||
.deflate(true)
|
||||
.gzip(true)
|
||||
.zstd(true);
|
||||
|
||||
let router = Router::new()
|
||||
.add_api_routes()
|
||||
.add_website_routes()
|
||||
.route("/version", get(env!("CARGO_PKG_VERSION")))
|
||||
.with_state(state)
|
||||
.layer(compression_layer);
|
||||
|
||||
let mut port = 3110;
|
||||
|
||||
let mut listener;
|
||||
loop {
|
||||
listener = TcpListener::bind(format!("0.0.0.0:{port}")).await;
|
||||
if listener.is_ok() {
|
||||
break;
|
||||
}
|
||||
port += 1;
|
||||
}
|
||||
|
||||
info!("Starting server on port {port}...");
|
||||
let listener = listener.unwrap();
|
||||
|
||||
serve(listener, router).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn log_result(code: StatusCode, path: &str, instant: Instant) {
|
||||
let time = format!("{}µs", instant.elapsed().as_micros());
|
||||
let time = time.bright_black();
|
||||
match code {
|
||||
StatusCode::INTERNAL_SERVER_ERROR => error!("{} {} {}", code.as_u16().red(), path, time),
|
||||
StatusCode::NOT_MODIFIED => info!("{} {} {}", code.as_u16().bright_black(), path, time),
|
||||
StatusCode::OK => info!("{} {} {}", code.as_u16().green(), path, time),
|
||||
_ => error!("{} {} {}", code.as_u16().red(), path, time),
|
||||
}
|
||||
}
|
||||
20
server/src/response.rs
Normal file
20
server/src/response.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
use axum::{body::Body, http::Response, response::IntoResponse};
|
||||
use reqwest::StatusCode;
|
||||
|
||||
use super::header_map::HeaderMapExtended;
|
||||
|
||||
pub trait ResponseExtended
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
fn new_not_modified() -> Self;
|
||||
}
|
||||
|
||||
impl ResponseExtended for Response<Body> {
|
||||
fn new_not_modified() -> Response<Body> {
|
||||
let mut response = (StatusCode::NOT_MODIFIED, "").into_response();
|
||||
let headers = response.headers_mut();
|
||||
headers.insert_cors();
|
||||
response
|
||||
}
|
||||
}
|
||||
41
server/src/website/handlers/_minify.rs
Normal file
41
server/src/website/handlers/_minify.rs
Normal file
@@ -0,0 +1,41 @@
|
||||
// Files are bigger than with SWC, to retest later
|
||||
|
||||
// Source: https://github.com/oxc-project/oxc/blob/main/crates/oxc_minifier/examples/minifier.rs
|
||||
|
||||
use std::{fs, path::Path};
|
||||
|
||||
use oxc::{
|
||||
allocator::Allocator,
|
||||
codegen::{CodeGenerator, CodegenOptions},
|
||||
minifier::{MinifierOptions, MinifierReturn},
|
||||
parser::{Parser, ParserReturn},
|
||||
span::SourceType,
|
||||
};
|
||||
|
||||
//
|
||||
pub fn minify_js(path: &Path) -> String {
|
||||
let allocator = Allocator::default();
|
||||
|
||||
let source_type = SourceType::from_path(path).unwrap();
|
||||
|
||||
let source_text = fs::read_to_string(path).unwrap();
|
||||
|
||||
let ParserReturn { mut program, .. } =
|
||||
Parser::new(&allocator, &source_text, source_type).parse();
|
||||
|
||||
let minifier = oxc::minifier::Minifier::new(MinifierOptions::default());
|
||||
|
||||
let MinifierReturn { mangler } = minifier.build(&allocator, &mut program);
|
||||
|
||||
CodeGenerator::new()
|
||||
.with_options(CodegenOptions {
|
||||
single_quote: false,
|
||||
minify: true,
|
||||
comments: false,
|
||||
annotation_comments: false,
|
||||
source_map_path: None,
|
||||
})
|
||||
.with_mangler(mangler)
|
||||
.build(&program)
|
||||
.code
|
||||
}
|
||||
144
server/src/website/handlers/file.rs
Normal file
144
server/src/website/handlers/file.rs
Normal file
@@ -0,0 +1,144 @@
|
||||
use std::{
|
||||
fs::{self},
|
||||
path::{Path, PathBuf},
|
||||
time::Instant,
|
||||
};
|
||||
|
||||
use axum::{
|
||||
body::Body,
|
||||
extract,
|
||||
http::HeaderMap,
|
||||
response::{IntoResponse, Response},
|
||||
};
|
||||
use log::{error, info};
|
||||
use reqwest::StatusCode;
|
||||
|
||||
use crate::server::{
|
||||
header_map::{HeaderMapExtended, Modified},
|
||||
log_result,
|
||||
response::ResponseExtended,
|
||||
};
|
||||
|
||||
use super::minify_js;
|
||||
|
||||
const WEBSITE_PATH: &str = "./src/website/";
|
||||
|
||||
pub async fn file_handler(headers: HeaderMap, path: extract::Path<String>) -> Response {
|
||||
any_handler(headers, Some(path))
|
||||
}
|
||||
|
||||
pub async fn index_handler(headers: HeaderMap) -> Response {
|
||||
any_handler(headers, None)
|
||||
}
|
||||
|
||||
fn any_handler(headers: HeaderMap, path: Option<extract::Path<String>>) -> Response {
|
||||
let instant = Instant::now();
|
||||
|
||||
let response = if let Some(path) = path.as_ref() {
|
||||
let path = path.0.replace("..", "").replace("\\", "");
|
||||
|
||||
let mut path = str_to_path(&path);
|
||||
|
||||
if !path.exists() {
|
||||
if path.extension().is_some() {
|
||||
let mut response: Response<Body> = (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"File doesn't exist".to_string(),
|
||||
)
|
||||
.into_response();
|
||||
|
||||
response.headers_mut().insert_cors();
|
||||
|
||||
return response;
|
||||
} else {
|
||||
path = str_to_path("index.html");
|
||||
}
|
||||
}
|
||||
|
||||
path_to_response(&headers, &path)
|
||||
} else {
|
||||
path_to_response(&headers, &str_to_path("index.html"))
|
||||
};
|
||||
|
||||
log_result(
|
||||
response.status(),
|
||||
&format!("/{}", path.map_or("".to_owned(), |p| p.0)),
|
||||
instant,
|
||||
);
|
||||
|
||||
response
|
||||
}
|
||||
|
||||
fn path_to_response(headers: &HeaderMap, path: &Path) -> Response {
|
||||
match _path_to_response(headers, path) {
|
||||
Ok(response) => response,
|
||||
Err(error) => {
|
||||
let mut response =
|
||||
(StatusCode::INTERNAL_SERVER_ERROR, error.to_string()).into_response();
|
||||
|
||||
response.headers_mut().insert_cors();
|
||||
|
||||
response
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn _path_to_response(headers: &HeaderMap, path: &Path) -> color_eyre::Result<Response> {
|
||||
let (modified, date) = headers.check_if_modified_since(path)?;
|
||||
if modified == Modified::NotModifiedSince {
|
||||
return Ok(Response::new_not_modified());
|
||||
}
|
||||
|
||||
let mut response;
|
||||
|
||||
let is_localhost = headers.check_if_host_is_localhost();
|
||||
|
||||
if !is_localhost
|
||||
&& path.extension().unwrap_or_else(|| {
|
||||
dbg!(path);
|
||||
panic!();
|
||||
}) == "js"
|
||||
{
|
||||
let content = minify_js(path);
|
||||
|
||||
response = Response::new(content.into());
|
||||
} else {
|
||||
let content = fs::read(path).unwrap_or_else(|error| {
|
||||
error!("{error}");
|
||||
let path = path.to_str().unwrap();
|
||||
info!("Can't read file {path}");
|
||||
panic!("")
|
||||
});
|
||||
|
||||
response = Response::new(content.into());
|
||||
}
|
||||
|
||||
let headers = response.headers_mut();
|
||||
headers.insert_cors();
|
||||
headers.insert_content_type(path);
|
||||
|
||||
if !is_localhost {
|
||||
let serialized_path = path.to_str().unwrap();
|
||||
|
||||
if serialized_path.contains("fonts/")
|
||||
|| serialized_path.contains("assets/")
|
||||
|| serialized_path.contains("packages/")
|
||||
|| path.extension().is_some_and(|extension| {
|
||||
extension == "pdf"
|
||||
|| extension == "jpg"
|
||||
|| extension == "png"
|
||||
|| extension == "woff2"
|
||||
})
|
||||
{
|
||||
headers.insert_cache_control_immutable();
|
||||
}
|
||||
}
|
||||
|
||||
headers.insert_last_modified(date);
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
fn str_to_path(path: &str) -> PathBuf {
|
||||
PathBuf::from(&format!("{WEBSITE_PATH}{path}"))
|
||||
}
|
||||
29
server/src/website/handlers/minify.rs
Normal file
29
server/src/website/handlers/minify.rs
Normal file
@@ -0,0 +1,29 @@
|
||||
// Simplified version of: https://github.com/swc-project/swc/blob/main/crates/swc/examples/minify.rs
|
||||
|
||||
use std::{path::Path, sync::Arc};
|
||||
|
||||
use swc::{config::JsMinifyOptions, try_with_handler, JsMinifyExtras};
|
||||
use swc_common::{SourceMap, GLOBALS};
|
||||
|
||||
pub fn minify_js(path: &Path) -> String {
|
||||
let cm = Arc::<SourceMap>::default();
|
||||
|
||||
let c = swc::Compiler::new(cm.clone());
|
||||
|
||||
let output = GLOBALS
|
||||
.set(&Default::default(), || {
|
||||
try_with_handler(cm.clone(), Default::default(), |handler| {
|
||||
let fm = cm.load_file(path).expect("failed to load file");
|
||||
|
||||
c.minify(
|
||||
fm,
|
||||
handler,
|
||||
&JsMinifyOptions::default(),
|
||||
JsMinifyExtras::default(),
|
||||
)
|
||||
})
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
output.code
|
||||
}
|
||||
5
server/src/website/handlers/mod.rs
Normal file
5
server/src/website/handlers/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
mod file;
|
||||
mod minify;
|
||||
|
||||
pub use file::*;
|
||||
use minify::*;
|
||||
18
server/src/website/mod.rs
Normal file
18
server/src/website/mod.rs
Normal file
@@ -0,0 +1,18 @@
|
||||
use axum::{routing::get, Router};
|
||||
|
||||
mod handlers;
|
||||
|
||||
use handlers::{file_handler, index_handler};
|
||||
|
||||
use super::AppState;
|
||||
|
||||
pub trait WebsiteRoutes {
|
||||
fn add_website_routes(self) -> Self;
|
||||
}
|
||||
|
||||
impl WebsiteRoutes for Router<AppState> {
|
||||
fn add_website_routes(self) -> Self {
|
||||
self.route("/*path", get(file_handler))
|
||||
.route("/", get(index_handler))
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user