mirror of
https://github.com/bitcoinresearchkit/brk.git
synced 2026-04-30 01:20:00 -07:00
server: started
This commit is contained in:
@@ -15,9 +15,7 @@ use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
||||
use crate::{
|
||||
server::{
|
||||
api::{
|
||||
structs::{
|
||||
ChunkMetadata, DatasetRange, DatasetRangeChunk, Extension, Kind, Route, Routes,
|
||||
},
|
||||
structs::{ChunkMetadata, DatasetRange, DatasetRangeChunk, Extension, Kind, Route, Routes},
|
||||
API_URL_PREFIX,
|
||||
},
|
||||
header_map::{HeaderMapExtended, Modified},
|
||||
@@ -26,8 +24,8 @@ use crate::{
|
||||
AppState,
|
||||
},
|
||||
structs::{
|
||||
Date, GenericMap, Height, HeightMapChunkId, MapChunkId, MapKey, MapSerialized, MapValue,
|
||||
SerializedBTreeMap, SerializedDateMap, SerializedTimeMap, SerializedVec, Timestamp, OHLC,
|
||||
Date, GenericMap, Height, HeightMapChunkId, MapChunkId, MapKey, MapSerialized, MapValue, SerializedBTreeMap,
|
||||
SerializedDateMap, SerializedTimeMap, SerializedVec, Timestamp, OHLC,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -50,12 +48,8 @@ pub async fn dataset_handler(
|
||||
"{API_URL_PREFIX}/{}?kind={}{}{}",
|
||||
path.0,
|
||||
query.kind,
|
||||
query
|
||||
.chunk
|
||||
.map_or("".to_string(), |chunk| format!("&chunk={chunk}")),
|
||||
query
|
||||
.all
|
||||
.map_or("".to_string(), |all| format!("&all={all}"))
|
||||
query.chunk.map_or("".to_string(), |chunk| format!("&chunk={chunk}")),
|
||||
query.all.map_or("".to_string(), |all| format!("&all={all}"))
|
||||
);
|
||||
|
||||
match result_handler(headers, &path, &query, app_state) {
|
||||
@@ -64,8 +58,7 @@ pub async fn dataset_handler(
|
||||
response
|
||||
}
|
||||
Err(error) => {
|
||||
let mut response =
|
||||
(StatusCode::INTERNAL_SERVER_ERROR, error.to_string()).into_response();
|
||||
let mut response = (StatusCode::INTERNAL_SERVER_ERROR, error.to_string()).into_response();
|
||||
log_result(response.status(), &ser_path, instant);
|
||||
response.headers_mut().insert_cors();
|
||||
response
|
||||
@@ -129,64 +122,49 @@ where
|
||||
let last_value: T = route.serialization.import(&route.path.join("last"))?;
|
||||
return Ok(axum::response::Json(last_value).into_response());
|
||||
}
|
||||
Kind::Date => match read_serialized::<Date, T, _, SerializedDateMap<T>>(
|
||||
id, &headers, route, &range, query,
|
||||
)? {
|
||||
Kind::Date => match read_serialized::<Date, T, _, SerializedDateMap<T>>(id, &headers, route, &range, query)? {
|
||||
ReadSerialized::DatasetAndDate((dataset, date, chunk_meta)) => {
|
||||
(serialized_to_response(dataset, id, chunk_meta, ext), date)
|
||||
}
|
||||
ReadSerialized::NotModified => return Ok(Response::new_not_modified()),
|
||||
ReadSerialized::_Phantom(_) => unreachable!(),
|
||||
},
|
||||
Kind::Height => match read_serialized::<Height, T, _, SerializedVec<T>>(
|
||||
id, &headers, route, &range, query,
|
||||
)? {
|
||||
Kind::Height => match read_serialized::<Height, T, _, SerializedVec<T>>(id, &headers, route, &range, query)? {
|
||||
ReadSerialized::DatasetAndDate((dataset, date, chunk_meta)) => (
|
||||
serialized_to_response::<Height, T, _, SerializedVec<T>>(
|
||||
dataset, id, chunk_meta, ext,
|
||||
),
|
||||
serialized_to_response::<Height, T, _, SerializedVec<T>>(dataset, id, chunk_meta, ext),
|
||||
date,
|
||||
),
|
||||
ReadSerialized::NotModified => return Ok(Response::new_not_modified()),
|
||||
ReadSerialized::_Phantom(_) => unreachable!(),
|
||||
},
|
||||
Kind::Timestamp => {
|
||||
let (dataset, date, chunk_meta) = match read_serialized::<Height, T, _, SerializedVec<T>>(
|
||||
id, &headers, route, &range, query,
|
||||
let (dataset, date, chunk_meta) =
|
||||
match read_serialized::<Height, T, _, SerializedVec<T>>(id, &headers, route, &range, query)? {
|
||||
ReadSerialized::DatasetAndDate(tuple) => tuple,
|
||||
ReadSerialized::NotModified => return Ok(Response::new_not_modified()),
|
||||
ReadSerialized::_Phantom(_) => unreachable!(),
|
||||
};
|
||||
|
||||
let (timestamp_dataset, _, _) = match read_serialized::<Height, Timestamp, _, SerializedVec<Timestamp>>(
|
||||
"timestamp",
|
||||
&headers,
|
||||
routes.get("timestamp").unwrap(),
|
||||
&range,
|
||||
query,
|
||||
)? {
|
||||
ReadSerialized::DatasetAndDate(tuple) => tuple,
|
||||
ReadSerialized::NotModified => return Ok(Response::new_not_modified()),
|
||||
ReadSerialized::_Phantom(_) => unreachable!(),
|
||||
};
|
||||
|
||||
let (timestamp_dataset, _, _) =
|
||||
match read_serialized::<Height, Timestamp, _, SerializedVec<Timestamp>>(
|
||||
"timestamp",
|
||||
&headers,
|
||||
routes.get("timestamp").unwrap(),
|
||||
&range,
|
||||
query,
|
||||
)? {
|
||||
ReadSerialized::DatasetAndDate(tuple) => tuple,
|
||||
ReadSerialized::NotModified => return Ok(Response::new_not_modified()),
|
||||
ReadSerialized::_Phantom(_) => unreachable!(),
|
||||
};
|
||||
|
||||
let mut serialized_timemap: SerializedTimeMap<T> = SerializedBTreeMap::default();
|
||||
|
||||
dataset
|
||||
.map
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.for_each(|(index, value)| {
|
||||
serialized_timemap.map.insert(
|
||||
timestamp_dataset
|
||||
.get_index(index)
|
||||
.cloned()
|
||||
.unwrap_or(Timestamp::now()),
|
||||
value,
|
||||
);
|
||||
});
|
||||
dataset.map.into_iter().enumerate().for_each(|(index, value)| {
|
||||
serialized_timemap.map.insert(
|
||||
timestamp_dataset.get_index(index).cloned().unwrap_or(Timestamp::now()),
|
||||
value,
|
||||
);
|
||||
});
|
||||
|
||||
(
|
||||
serialized_to_response::<Timestamp, T, HeightMapChunkId, SerializedTimeMap<T>>(
|
||||
@@ -287,19 +265,13 @@ where
|
||||
|
||||
let date_modified;
|
||||
|
||||
let datasets =
|
||||
GenericMap::<Key, Value, ChunkId, Serialized>::_read_dir(&folder_path, serialization);
|
||||
let datasets = GenericMap::<Key, Value, ChunkId, Serialized>::_read_dir(&folder_path, serialization);
|
||||
|
||||
let mut chunk_meta = None;
|
||||
|
||||
let dataset = if let DatasetRange::Chunk(range_chunk) = range {
|
||||
let chunk_id = match range_chunk {
|
||||
DatasetRangeChunk::Last => {
|
||||
*datasets
|
||||
.last_key_value()
|
||||
.context("Last tuple of dataset directory")?
|
||||
.0
|
||||
}
|
||||
DatasetRangeChunk::Last => *datasets.last_key_value().context("Last tuple of dataset directory")?.0,
|
||||
DatasetRangeChunk::Chunk(chunk) => ChunkId::from_usize(*chunk),
|
||||
};
|
||||
|
||||
@@ -358,11 +330,7 @@ where
|
||||
Serialized::import_all(&folder_path, serialization)
|
||||
};
|
||||
|
||||
Ok(ReadSerialized::DatasetAndDate((
|
||||
dataset,
|
||||
date_modified,
|
||||
chunk_meta,
|
||||
)))
|
||||
Ok(ReadSerialized::DatasetAndDate((dataset, date_modified, chunk_meta)))
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
|
||||
@@ -1,12 +1,25 @@
|
||||
use axum::{routing::get, Router};
|
||||
use handlers::{dataset_handler, last_values_handler};
|
||||
use std::time::Instant;
|
||||
|
||||
use axum::{
|
||||
extract::{Query, State},
|
||||
http::HeaderMap,
|
||||
response::{IntoResponse, Response},
|
||||
routing::get,
|
||||
Json, Router,
|
||||
};
|
||||
use color_eyre::eyre::eyre;
|
||||
use reqwest::StatusCode;
|
||||
use serde::Deserialize;
|
||||
use structs::{Format, Index};
|
||||
|
||||
use crate::{log_result, traits::HeaderMapExtended};
|
||||
|
||||
use super::AppState;
|
||||
|
||||
mod handlers;
|
||||
// mod handlers;
|
||||
pub mod structs;
|
||||
|
||||
pub const API_URL_PREFIX: &str = "/api";
|
||||
pub const VECS_URL_PREFIX: &str = "/api/vecs";
|
||||
|
||||
pub trait ApiRoutes {
|
||||
fn add_api_routes(self) -> Self;
|
||||
@@ -14,7 +27,81 @@ pub trait ApiRoutes {
|
||||
|
||||
impl ApiRoutes for Router<AppState> {
|
||||
fn add_api_routes(self) -> Self {
|
||||
self.route(&format!("{API_URL_PREFIX}/last"), get(last_values_handler))
|
||||
.route(&format!("{API_URL_PREFIX}/*path"), get(dataset_handler))
|
||||
self.route(VECS_URL_PREFIX, get(handler))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct DatasetParams {
|
||||
pub i: String,
|
||||
pub v: String,
|
||||
pub from: Option<i64>,
|
||||
pub to: Option<i64>,
|
||||
pub format: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn handler(headers: HeaderMap, query: Query<DatasetParams>, State(app_state): State<AppState>) -> Response {
|
||||
let instant = Instant::now();
|
||||
|
||||
let path = format!(
|
||||
"{VECS_URL_PREFIX}?i={}&v={}{}{}",
|
||||
query.i,
|
||||
query.v,
|
||||
query.from.map_or("".to_string(), |from| format!("&from={from}")),
|
||||
query.to.map_or("".to_string(), |to| format!("&to={to}")),
|
||||
);
|
||||
|
||||
match req_to_response_res(headers, query, app_state) {
|
||||
Ok(response) => {
|
||||
log_result(response.status(), &path, instant);
|
||||
response
|
||||
}
|
||||
Err(error) => {
|
||||
let mut response = (StatusCode::INTERNAL_SERVER_ERROR, error.to_string()).into_response();
|
||||
log_result(response.status(), &path, instant);
|
||||
response.headers_mut().insert_cors();
|
||||
response
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn req_to_response_res(
|
||||
headers: HeaderMap,
|
||||
Query(DatasetParams { format, from, i, to, v }): Query<DatasetParams>,
|
||||
AppState { vecs, .. }: AppState,
|
||||
) -> color_eyre::Result<Response> {
|
||||
let format = Format::try_from(format).ok();
|
||||
let indexes = i
|
||||
.to_lowercase()
|
||||
.split(",")
|
||||
.flat_map(|s| Index::try_from(s).ok())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if indexes.len() > 1 {
|
||||
return Err(eyre!("Multiple indexes aren't supported yet !"));
|
||||
} else if indexes.is_empty() {
|
||||
return Err(eyre!("Bad index(es)"));
|
||||
}
|
||||
|
||||
dbg!(format, &indexes, &v, from, to);
|
||||
|
||||
let values = v
|
||||
.to_lowercase()
|
||||
.split(",")
|
||||
.flat_map(|s| vecs.get(&s.replace("_", "-")))
|
||||
.flat_map(|i_to_v| i_to_v.get(indexes.first().unwrap()))
|
||||
.map(|vec| vec.collect_range(from, to).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if values.len() == 1 {
|
||||
let values = values.first().unwrap();
|
||||
if values.len() == 1 {
|
||||
let value = values.first().unwrap();
|
||||
Ok(Json(value).into_response())
|
||||
} else {
|
||||
Ok(Json(values).into_response())
|
||||
}
|
||||
} else {
|
||||
Ok(Json(values).into_response())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct ChunkMetadata {
|
||||
pub id: usize,
|
||||
pub previous: Option<String>,
|
||||
pub next: Option<String>,
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
use std::path::Path;
|
||||
|
||||
#[derive(PartialEq, Eq, Clone, Copy)]
|
||||
pub enum Extension {
|
||||
#[allow(clippy::upper_case_acronyms)]
|
||||
CSV,
|
||||
#[allow(clippy::upper_case_acronyms)]
|
||||
JSON,
|
||||
}
|
||||
|
||||
impl Extension {
|
||||
pub fn from(path: &Path) -> Option<Self> {
|
||||
if let Some(extension) = path.extension() {
|
||||
let extension = extension.to_str().unwrap();
|
||||
|
||||
if extension == Self::CSV.to_str() {
|
||||
Some(Self::CSV)
|
||||
} else if extension == Self::JSON.to_str() {
|
||||
Some(Self::JSON)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_str(&self) -> &str {
|
||||
match self {
|
||||
Extension::CSV => "csv",
|
||||
Extension::JSON => "json",
|
||||
}
|
||||
}
|
||||
}
|
||||
27
server/src/api/structs/format.rs
Normal file
27
server/src/api/structs/format.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
use color_eyre::eyre::eyre;
|
||||
|
||||
#[allow(clippy::upper_case_acronyms)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum Format {
|
||||
CSV,
|
||||
JSON,
|
||||
}
|
||||
|
||||
impl TryFrom<Option<String>> for Format {
|
||||
type Error = color_eyre::Report;
|
||||
fn try_from(value: Option<String>) -> Result<Self, Self::Error> {
|
||||
if let Some(value) = value {
|
||||
let value = value.to_lowercase();
|
||||
let value = value.as_str();
|
||||
if value == "csv" {
|
||||
Ok(Self::CSV)
|
||||
} else if value == "json" {
|
||||
Ok(Self::JSON)
|
||||
} else {
|
||||
Err(eyre!("Fail"))
|
||||
}
|
||||
} else {
|
||||
Err(eyre!("Fail"))
|
||||
}
|
||||
}
|
||||
}
|
||||
122
server/src/api/structs/index.rs
Normal file
122
server/src/api/structs/index.rs
Normal file
@@ -0,0 +1,122 @@
|
||||
use std::fmt::{self, Debug};
|
||||
|
||||
use computer::Date;
|
||||
use indexer::{
|
||||
Addressindex, Height, P2PK33index, P2PK65index, P2PKHindex, P2SHindex, P2TRindex, P2WPKHindex, P2WSHindex, Txindex,
|
||||
Txinindex, Txoutindex,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum Index {
|
||||
Addressindex,
|
||||
Date,
|
||||
Height,
|
||||
P2PK33index,
|
||||
P2PK65index,
|
||||
P2PKHindex,
|
||||
P2SHindex,
|
||||
P2TRindex,
|
||||
P2WPKHindex,
|
||||
P2WSHindex,
|
||||
Txindex,
|
||||
Txinindex,
|
||||
Txoutindex,
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for Index {
|
||||
type Error = ();
|
||||
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
||||
Ok(match value {
|
||||
"addri" | "addressindex" => Self::Addressindex,
|
||||
"d" | "date" => Self::Date,
|
||||
"h" | "height" => Self::Height,
|
||||
"p2pk33index" => Self::P2PK33index,
|
||||
"p2pk65index" => Self::P2PK65index,
|
||||
"p2pkhindex" => Self::P2PKHindex,
|
||||
"p2shindex" => Self::P2SHindex,
|
||||
"p2trindex" => Self::P2TRindex,
|
||||
"p2wpkhindex" => Self::P2WPKHindex,
|
||||
"p2wshindex" => Self::P2WSHindex,
|
||||
"txi" | "txindex" => Self::Txindex,
|
||||
"txini" | "txinindex" => Self::Txinindex,
|
||||
"txouti" | "txoutindex" => Self::Txoutindex,
|
||||
_ => return Err(()),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Index {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
Debug::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait IndexTypeToIndexEnum {
|
||||
fn to_enum() -> Index;
|
||||
}
|
||||
|
||||
impl IndexTypeToIndexEnum for Addressindex {
|
||||
fn to_enum() -> Index {
|
||||
Index::Addressindex
|
||||
}
|
||||
}
|
||||
impl IndexTypeToIndexEnum for Date {
|
||||
fn to_enum() -> Index {
|
||||
Index::Date
|
||||
}
|
||||
}
|
||||
impl IndexTypeToIndexEnum for Height {
|
||||
fn to_enum() -> Index {
|
||||
Index::Height
|
||||
}
|
||||
}
|
||||
impl IndexTypeToIndexEnum for Txindex {
|
||||
fn to_enum() -> Index {
|
||||
Index::Txindex
|
||||
}
|
||||
}
|
||||
impl IndexTypeToIndexEnum for Txinindex {
|
||||
fn to_enum() -> Index {
|
||||
Index::Txinindex
|
||||
}
|
||||
}
|
||||
impl IndexTypeToIndexEnum for Txoutindex {
|
||||
fn to_enum() -> Index {
|
||||
Index::Txoutindex
|
||||
}
|
||||
}
|
||||
impl IndexTypeToIndexEnum for P2PK33index {
|
||||
fn to_enum() -> Index {
|
||||
Index::P2PK33index
|
||||
}
|
||||
}
|
||||
impl IndexTypeToIndexEnum for P2PK65index {
|
||||
fn to_enum() -> Index {
|
||||
Index::P2PK65index
|
||||
}
|
||||
}
|
||||
impl IndexTypeToIndexEnum for P2PKHindex {
|
||||
fn to_enum() -> Index {
|
||||
Index::P2PKHindex
|
||||
}
|
||||
}
|
||||
impl IndexTypeToIndexEnum for P2SHindex {
|
||||
fn to_enum() -> Index {
|
||||
Index::P2SHindex
|
||||
}
|
||||
}
|
||||
impl IndexTypeToIndexEnum for P2TRindex {
|
||||
fn to_enum() -> Index {
|
||||
Index::P2TRindex
|
||||
}
|
||||
}
|
||||
impl IndexTypeToIndexEnum for P2WPKHindex {
|
||||
fn to_enum() -> Index {
|
||||
Index::P2WPKHindex
|
||||
}
|
||||
}
|
||||
impl IndexTypeToIndexEnum for P2WSHindex {
|
||||
fn to_enum() -> Index {
|
||||
Index::P2WSHindex
|
||||
}
|
||||
}
|
||||
@@ -9,8 +9,8 @@ use crate::structs::{AnyMap, Date, Height, MapKey};
|
||||
pub enum Kind {
|
||||
Date,
|
||||
Height,
|
||||
Timestamp,
|
||||
Last,
|
||||
// Timestamp,
|
||||
// Last,
|
||||
}
|
||||
|
||||
impl TryFrom<&String> for Kind {
|
||||
@@ -26,8 +26,8 @@ impl TryFrom<&String> for Kind {
|
||||
{
|
||||
'd' => Self::Date,
|
||||
'h' => Self::Height,
|
||||
't' => Self::Timestamp,
|
||||
'l' => Self::Last,
|
||||
// 't' => Self::Timestamp,
|
||||
// 'l' => Self::Last,
|
||||
_ => return Err(eyre!("Bad kind")),
|
||||
},
|
||||
)
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
mod chunk_metadata;
|
||||
mod extension;
|
||||
mod kind;
|
||||
mod range;
|
||||
mod route;
|
||||
mod routes;
|
||||
// mod chunk_metadata;
|
||||
mod format;
|
||||
mod index;
|
||||
// mod kind;
|
||||
// mod range;
|
||||
// mod route;
|
||||
// mod routes;
|
||||
|
||||
pub use chunk_metadata::*;
|
||||
pub use extension::*;
|
||||
pub use kind::*;
|
||||
pub use range::*;
|
||||
pub use route::*;
|
||||
pub use routes::*;
|
||||
// pub use chunk_metadata::*;
|
||||
pub use format::*;
|
||||
pub use index::*;
|
||||
// pub use kind::*;
|
||||
// pub use range::*;
|
||||
// pub use route::*;
|
||||
// pub use routes::*;
|
||||
|
||||
@@ -1,40 +1,155 @@
|
||||
use std::{sync::Arc, time::Instant};
|
||||
use std::{collections::BTreeMap, time::Instant};
|
||||
|
||||
use api::{structs::Routes, ApiRoutes};
|
||||
use axum::{routing::get, serve, Router};
|
||||
use api::{
|
||||
structs::{Index, IndexTypeToIndexEnum},
|
||||
ApiRoutes,
|
||||
};
|
||||
use axum::{body::Body, response::IntoResponse, routing::get, serve, Router};
|
||||
use color_eyre::owo_colors::OwoColorize;
|
||||
use log::{error, info};
|
||||
use computer::Computer;
|
||||
use derive_deref::{Deref, DerefMut};
|
||||
use indexer::Indexer;
|
||||
use logger::{error, info};
|
||||
use reqwest::StatusCode;
|
||||
use serde::Serialize;
|
||||
use serde_json::Value;
|
||||
use storable_vec::{StorableVecIndex, StorableVecType, STATELESS};
|
||||
use tokio::net::TcpListener;
|
||||
use tower_http::compression::CompressionLayer;
|
||||
use website::WebsiteRoutes;
|
||||
|
||||
use crate::structs::Config;
|
||||
|
||||
pub mod api;
|
||||
mod header_map;
|
||||
mod response;
|
||||
mod api;
|
||||
mod traits;
|
||||
mod website;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AppState {
|
||||
routes: Arc<Routes>,
|
||||
config: Config,
|
||||
vecs: &'static VecIdToIndexToVec,
|
||||
indexer: &'static Indexer<STATELESS>,
|
||||
computer: &'static Computer<STATELESS>,
|
||||
}
|
||||
|
||||
pub async fn main(routes: Routes, config: Config) -> color_eyre::Result<()> {
|
||||
routes.generate_dts_file();
|
||||
#[derive(Default, Deref, DerefMut)]
|
||||
pub struct VecIdToIndexToVec(BTreeMap<String, IndexToVec>);
|
||||
|
||||
impl VecIdToIndexToVec {
|
||||
pub fn insert<I, T>(&mut self, vec: &'static storable_vec::StorableVec<I, T, STATELESS>)
|
||||
where
|
||||
I: StorableVecIndex + IndexTypeToIndexEnum + Send + Sync,
|
||||
T: StorableVecType + Send + Sync + Serialize,
|
||||
{
|
||||
let file_name = vec.file_name();
|
||||
let split = file_name.split("_to_").collect::<Vec<_>>();
|
||||
if split.len() != 2 {
|
||||
panic!();
|
||||
}
|
||||
let index = vec.key_to_enum();
|
||||
if split[0] != index.to_string().to_lowercase() {
|
||||
dbg!(split[0], index.to_string());
|
||||
panic!();
|
||||
}
|
||||
let key = split[1].to_string().replace("_", "-");
|
||||
let prev = self.entry(key).or_default().insert(index, vec);
|
||||
if prev.is_some() {
|
||||
panic!()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Deref, DerefMut)]
|
||||
pub struct IndexToVec {
|
||||
pub index_to_vec: BTreeMap<Index, &'static (dyn AnyStatelessStorableVec + Send + Sync)>,
|
||||
}
|
||||
|
||||
pub trait AnyStatelessStorableVec {
|
||||
fn key_to_enum(&self) -> Index;
|
||||
fn collect_range(&self, from: Option<i64>, to: Option<i64>) -> storable_vec::Result<Vec<Value>>;
|
||||
// fn collect_range(&self, from: Option<i64>, to: Option<i64>) -> storable_vec::Result<Vec<T>>;
|
||||
}
|
||||
|
||||
impl<I, T> AnyStatelessStorableVec for storable_vec::StorableVec<I, T, STATELESS>
|
||||
where
|
||||
I: StorableVecIndex + IndexTypeToIndexEnum + Send + Sync,
|
||||
T: StorableVecType + Send + Sync + Serialize,
|
||||
{
|
||||
fn key_to_enum(&self) -> Index {
|
||||
I::to_enum()
|
||||
}
|
||||
|
||||
fn collect_range(&self, from: Option<i64>, to: Option<i64>) -> storable_vec::Result<Vec<Value>> {
|
||||
Ok(self
|
||||
.collect_range(from, to)?
|
||||
.into_iter()
|
||||
.map(|v| serde_json::to_value(v).unwrap())
|
||||
.collect::<Vec<_>>())
|
||||
}
|
||||
}
|
||||
|
||||
trait StatelessVecs {
|
||||
fn parse(&'static self, vecs: &mut VecIdToIndexToVec);
|
||||
}
|
||||
|
||||
impl StatelessVecs for Indexer<STATELESS> {
|
||||
fn parse(&'static self, vecs: &mut VecIdToIndexToVec) {
|
||||
vecs.insert(&self.vecs.addressindex_to_addresstype);
|
||||
vecs.insert(&self.vecs.addressindex_to_addresstypeindex);
|
||||
vecs.insert(&self.vecs.addressindex_to_height);
|
||||
vecs.insert(&self.vecs.height_to_blockhash);
|
||||
vecs.insert(&self.vecs.height_to_difficulty);
|
||||
vecs.insert(&self.vecs.height_to_first_addressindex);
|
||||
vecs.insert(&self.vecs.height_to_first_emptyindex);
|
||||
vecs.insert(&self.vecs.height_to_first_multisigindex);
|
||||
vecs.insert(&self.vecs.height_to_first_opreturnindex);
|
||||
vecs.insert(&self.vecs.height_to_first_pushonlyindex);
|
||||
vecs.insert(&self.vecs.height_to_first_txindex);
|
||||
vecs.insert(&self.vecs.height_to_first_txinindex);
|
||||
vecs.insert(&self.vecs.height_to_first_txoutindex);
|
||||
vecs.insert(&self.vecs.height_to_first_unknownindex);
|
||||
vecs.insert(&self.vecs.height_to_first_p2pk33index);
|
||||
vecs.insert(&self.vecs.height_to_first_p2pk65index);
|
||||
vecs.insert(&self.vecs.height_to_first_p2pkhindex);
|
||||
vecs.insert(&self.vecs.height_to_first_p2shindex);
|
||||
vecs.insert(&self.vecs.height_to_first_p2trindex);
|
||||
vecs.insert(&self.vecs.height_to_first_p2wpkhindex);
|
||||
vecs.insert(&self.vecs.height_to_first_p2wshindex);
|
||||
vecs.insert(&self.vecs.height_to_size);
|
||||
vecs.insert(&self.vecs.height_to_timestamp);
|
||||
vecs.insert(&self.vecs.height_to_weight);
|
||||
vecs.insert(&self.vecs.p2pk33index_to_p2pk33addressbytes);
|
||||
vecs.insert(&self.vecs.p2pk65index_to_p2pk65addressbytes);
|
||||
vecs.insert(&self.vecs.p2pkhindex_to_p2pkhaddressbytes);
|
||||
vecs.insert(&self.vecs.p2shindex_to_p2shaddressbytes);
|
||||
vecs.insert(&self.vecs.p2trindex_to_p2traddressbytes);
|
||||
vecs.insert(&self.vecs.p2wpkhindex_to_p2wpkhaddressbytes);
|
||||
vecs.insert(&self.vecs.p2wshindex_to_p2wshaddressbytes);
|
||||
vecs.insert(&self.vecs.txindex_to_first_txinindex);
|
||||
vecs.insert(&self.vecs.txindex_to_first_txoutindex);
|
||||
vecs.insert(&self.vecs.txindex_to_height);
|
||||
vecs.insert(&self.vecs.txindex_to_locktime);
|
||||
vecs.insert(&self.vecs.txindex_to_txid);
|
||||
vecs.insert(&self.vecs.txindex_to_txversion);
|
||||
vecs.insert(&self.vecs.txinindex_to_txoutindex);
|
||||
vecs.insert(&self.vecs.txoutindex_to_addressindex);
|
||||
vecs.insert(&self.vecs.txoutindex_to_amount);
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn main(indexer: Indexer<STATELESS>, computer: Computer<STATELESS>) -> color_eyre::Result<()> {
|
||||
// pub async fn main(routes: Routes, config: Config) -> color_eyre::Result<()> {
|
||||
// routes.generate_dts_file();
|
||||
|
||||
let indexer = Box::leak(Box::new(indexer));
|
||||
let computer = Box::leak(Box::new(computer));
|
||||
let vecs = Box::leak(Box::new(VecIdToIndexToVec::default()));
|
||||
indexer.parse(vecs);
|
||||
|
||||
let state = AppState {
|
||||
routes: Arc::new(routes),
|
||||
config,
|
||||
vecs,
|
||||
indexer,
|
||||
computer,
|
||||
};
|
||||
|
||||
let compression_layer = CompressionLayer::new()
|
||||
.br(true)
|
||||
.deflate(true)
|
||||
.gzip(true)
|
||||
.zstd(true);
|
||||
let compression_layer = CompressionLayer::new().br(true).deflate(true).gzip(true).zstd(true);
|
||||
|
||||
let router = Router::new()
|
||||
.add_api_routes()
|
||||
@@ -55,6 +170,7 @@ pub async fn main(routes: Routes, config: Config) -> color_eyre::Result<()> {
|
||||
}
|
||||
|
||||
info!("Starting server on port {port}...");
|
||||
|
||||
let listener = listener.unwrap();
|
||||
|
||||
serve(listener, router).await?;
|
||||
|
||||
20
server/src/main.rs
Normal file
20
server/src/main.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
use std::path::Path;
|
||||
|
||||
use computer::Computer;
|
||||
use indexer::Indexer;
|
||||
use storable_vec::STATELESS;
|
||||
|
||||
#[tokio::main]
|
||||
pub async fn main() -> color_eyre::Result<()> {
|
||||
color_eyre::install()?;
|
||||
|
||||
logger::init_log(None);
|
||||
|
||||
let path = Path::new("../_outputs");
|
||||
let indexer: Indexer<STATELESS> = Indexer::import(&path.join("indexes"))?;
|
||||
let computer: Computer<STATELESS> = Computer::import(&path.join("computed"))?;
|
||||
|
||||
berver::main(indexer, computer).await.unwrap();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,15 +1,15 @@
|
||||
use std::path::Path;
|
||||
use std::{path::Path, time};
|
||||
|
||||
use axum::http::{header, HeaderMap};
|
||||
use chrono::{DateTime, Timelike, Utc};
|
||||
use log::info;
|
||||
use jiff::{civil::DateTime, fmt::strtime, tz::TimeZone, Timestamp};
|
||||
use logger::info;
|
||||
use reqwest::header::{HOST, IF_MODIFIED_SINCE};
|
||||
|
||||
const STALE_IF_ERROR: u64 = 30_000_000; // 1 Year ish
|
||||
const MODIFIED_SINCE_FORMAT: &str = "%a, %d %b %Y %H:%M:%S GMT";
|
||||
|
||||
#[derive(PartialEq, Eq)]
|
||||
pub enum Modified {
|
||||
pub enum ModifiedState {
|
||||
ModifiedSince,
|
||||
NotModifiedSince,
|
||||
}
|
||||
@@ -23,14 +23,13 @@ pub trait HeaderMapExtended {
|
||||
|
||||
fn insert_cors(&mut self);
|
||||
|
||||
fn get_if_modified_since(&self) -> Option<DateTime<Utc>>;
|
||||
fn check_if_modified_since(&self, path: &Path)
|
||||
-> color_eyre::Result<(Modified, DateTime<Utc>)>;
|
||||
fn get_if_modified_since(&self) -> Option<DateTime>;
|
||||
fn check_if_modified_since(&self, path: &Path) -> color_eyre::Result<(ModifiedState, DateTime)>;
|
||||
|
||||
fn insert_cache_control_immutable(&mut self);
|
||||
#[allow(unused)]
|
||||
fn insert_cache_control_revalidate(&mut self, max_age: u64, stale_while_revalidate: u64);
|
||||
fn insert_last_modified(&mut self, date: DateTime<Utc>);
|
||||
fn insert_last_modified(&mut self, date: DateTime);
|
||||
|
||||
fn insert_content_disposition_attachment(&mut self);
|
||||
|
||||
@@ -102,40 +101,39 @@ impl HeaderMapExtended for HeaderMap {
|
||||
);
|
||||
}
|
||||
|
||||
fn insert_last_modified(&mut self, date: DateTime<Utc>) {
|
||||
let formatted = date.format(MODIFIED_SINCE_FORMAT).to_string();
|
||||
fn insert_last_modified(&mut self, date: DateTime) {
|
||||
let formatted = date
|
||||
.to_zoned(TimeZone::system())
|
||||
.unwrap()
|
||||
.strftime(MODIFIED_SINCE_FORMAT)
|
||||
.to_string();
|
||||
|
||||
self.insert(header::LAST_MODIFIED, formatted.parse().unwrap());
|
||||
}
|
||||
|
||||
fn check_if_modified_since(
|
||||
&self,
|
||||
path: &Path,
|
||||
) -> color_eyre::Result<(Modified, DateTime<Utc>)> {
|
||||
let time = path.metadata()?.modified()?;
|
||||
let date: DateTime<Utc> = time.into();
|
||||
let date = date.with_nanosecond(0).unwrap();
|
||||
fn check_if_modified_since(&self, path: &Path) -> color_eyre::Result<(ModifiedState, DateTime)> {
|
||||
let duration = path.metadata()?.modified()?.duration_since(time::UNIX_EPOCH).unwrap();
|
||||
let date = Timestamp::new(duration.as_secs() as i64, 0)
|
||||
.unwrap()
|
||||
.to_zoned(TimeZone::UTC)
|
||||
.datetime();
|
||||
|
||||
if let Some(if_modified_since) = self.get_if_modified_since() {
|
||||
if if_modified_since == date {
|
||||
return Ok((Modified::NotModifiedSince, date));
|
||||
return Ok((ModifiedState::NotModifiedSince, date));
|
||||
}
|
||||
}
|
||||
|
||||
Ok((Modified::ModifiedSince, date))
|
||||
Ok((ModifiedState::ModifiedSince, date))
|
||||
}
|
||||
|
||||
fn get_if_modified_since(&self) -> Option<DateTime<Utc>> {
|
||||
fn get_if_modified_since(&self) -> Option<DateTime> {
|
||||
if let Some(modified_since) = self.get(IF_MODIFIED_SINCE) {
|
||||
if let Ok(modified_since) = modified_since.to_str() {
|
||||
let date = DateTime::parse_from_str(
|
||||
&format!("{modified_since} +00:00"),
|
||||
&format!("{MODIFIED_SINCE_FORMAT} %z"),
|
||||
);
|
||||
|
||||
if let Ok(x) = date {
|
||||
return Some(x.to_utc());
|
||||
}
|
||||
return strtime::parse(MODIFIED_SINCE_FORMAT, modified_since)
|
||||
.unwrap()
|
||||
.to_datetime()
|
||||
.ok();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -176,10 +174,7 @@ impl HeaderMapExtended for HeaderMap {
|
||||
}
|
||||
|
||||
fn insert_content_type_application_javascript(&mut self) {
|
||||
self.insert(
|
||||
header::CONTENT_TYPE,
|
||||
"application/javascript".parse().unwrap(),
|
||||
);
|
||||
self.insert(header::CONTENT_TYPE, "application/javascript".parse().unwrap());
|
||||
}
|
||||
|
||||
fn insert_content_type_application_json(&mut self) {
|
||||
@@ -187,10 +182,7 @@ impl HeaderMapExtended for HeaderMap {
|
||||
}
|
||||
|
||||
fn insert_content_type_application_manifest_json(&mut self) {
|
||||
self.insert(
|
||||
header::CONTENT_TYPE,
|
||||
"application/manifest+json".parse().unwrap(),
|
||||
);
|
||||
self.insert(header::CONTENT_TYPE, "application/manifest+json".parse().unwrap());
|
||||
}
|
||||
|
||||
fn insert_content_type_application_pdf(&mut self) {
|
||||
5
server/src/traits/mod.rs
Normal file
5
server/src/traits/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
mod header_map;
|
||||
mod response;
|
||||
|
||||
pub use header_map::*;
|
||||
pub use response::*;
|
||||
@@ -1,41 +0,0 @@
|
||||
// Files are bigger than with SWC, to retest later
|
||||
|
||||
// Source: https://github.com/oxc-project/oxc/blob/main/crates/oxc_minifier/examples/minifier.rs
|
||||
|
||||
use std::{fs, path::Path};
|
||||
|
||||
use oxc::{
|
||||
allocator::Allocator,
|
||||
codegen::{CodeGenerator, CodegenOptions},
|
||||
minifier::{MinifierOptions, MinifierReturn},
|
||||
parser::{Parser, ParserReturn},
|
||||
span::SourceType,
|
||||
};
|
||||
|
||||
//
|
||||
pub fn minify_js(path: &Path) -> String {
|
||||
let allocator = Allocator::default();
|
||||
|
||||
let source_type = SourceType::from_path(path).unwrap();
|
||||
|
||||
let source_text = fs::read_to_string(path).unwrap();
|
||||
|
||||
let ParserReturn { mut program, .. } =
|
||||
Parser::new(&allocator, &source_text, source_type).parse();
|
||||
|
||||
let minifier = oxc::minifier::Minifier::new(MinifierOptions::default());
|
||||
|
||||
let MinifierReturn { mangler } = minifier.build(&allocator, &mut program);
|
||||
|
||||
CodeGenerator::new()
|
||||
.with_options(CodegenOptions {
|
||||
single_quote: false,
|
||||
minify: true,
|
||||
comments: false,
|
||||
annotation_comments: false,
|
||||
source_map_path: None,
|
||||
})
|
||||
.with_mangler(mangler)
|
||||
.build(&program)
|
||||
.code
|
||||
}
|
||||
@@ -10,18 +10,17 @@ use axum::{
|
||||
http::HeaderMap,
|
||||
response::{IntoResponse, Response},
|
||||
};
|
||||
use log::{error, info};
|
||||
use logger::{error, info};
|
||||
use reqwest::StatusCode;
|
||||
|
||||
use crate::server::{
|
||||
header_map::{HeaderMapExtended, Modified},
|
||||
use crate::{
|
||||
log_result,
|
||||
response::ResponseExtended,
|
||||
traits::{HeaderMapExtended, ModifiedState, ResponseExtended},
|
||||
};
|
||||
|
||||
use super::minify_js;
|
||||
|
||||
const WEBSITE_PATH: &str = "./src/website/";
|
||||
const WEBSITE_DEV_PATH: &str = "../website/";
|
||||
|
||||
pub async fn file_handler(headers: HeaderMap, path: extract::Path<String>) -> Response {
|
||||
any_handler(headers, Some(path))
|
||||
@@ -41,11 +40,8 @@ fn any_handler(headers: HeaderMap, path: Option<extract::Path<String>>) -> Respo
|
||||
|
||||
if !path.exists() {
|
||||
if path.extension().is_some() {
|
||||
let mut response: Response<Body> = (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"File doesn't exist".to_string(),
|
||||
)
|
||||
.into_response();
|
||||
let mut response: Response<Body> =
|
||||
(StatusCode::INTERNAL_SERVER_ERROR, "File doesn't exist".to_string()).into_response();
|
||||
|
||||
response.headers_mut().insert_cors();
|
||||
|
||||
@@ -70,11 +66,10 @@ fn any_handler(headers: HeaderMap, path: Option<extract::Path<String>>) -> Respo
|
||||
}
|
||||
|
||||
fn path_to_response(headers: &HeaderMap, path: &Path) -> Response {
|
||||
match _path_to_response(headers, path) {
|
||||
match path_to_response_(headers, path) {
|
||||
Ok(response) => response,
|
||||
Err(error) => {
|
||||
let mut response =
|
||||
(StatusCode::INTERNAL_SERVER_ERROR, error.to_string()).into_response();
|
||||
let mut response = (StatusCode::INTERNAL_SERVER_ERROR, error.to_string()).into_response();
|
||||
|
||||
response.headers_mut().insert_cors();
|
||||
|
||||
@@ -83,9 +78,9 @@ fn path_to_response(headers: &HeaderMap, path: &Path) -> Response {
|
||||
}
|
||||
}
|
||||
|
||||
fn _path_to_response(headers: &HeaderMap, path: &Path) -> color_eyre::Result<Response> {
|
||||
fn path_to_response_(headers: &HeaderMap, path: &Path) -> color_eyre::Result<Response> {
|
||||
let (modified, date) = headers.check_if_modified_since(path)?;
|
||||
if modified == Modified::NotModifiedSince {
|
||||
if modified == ModifiedState::NotModifiedSince {
|
||||
return Ok(Response::new_not_modified());
|
||||
}
|
||||
|
||||
@@ -124,10 +119,7 @@ fn _path_to_response(headers: &HeaderMap, path: &Path) -> color_eyre::Result<Res
|
||||
|| serialized_path.contains("assets/")
|
||||
|| serialized_path.contains("packages/")
|
||||
|| path.extension().is_some_and(|extension| {
|
||||
extension == "pdf"
|
||||
|| extension == "jpg"
|
||||
|| extension == "png"
|
||||
|| extension == "woff2"
|
||||
extension == "pdf" || extension == "jpg" || extension == "png" || extension == "woff2"
|
||||
})
|
||||
{
|
||||
headers.insert_cache_control_immutable();
|
||||
@@ -140,5 +132,5 @@ fn _path_to_response(headers: &HeaderMap, path: &Path) -> color_eyre::Result<Res
|
||||
}
|
||||
|
||||
fn str_to_path(path: &str) -> PathBuf {
|
||||
PathBuf::from(&format!("{WEBSITE_PATH}{path}"))
|
||||
PathBuf::from(&format!("{WEBSITE_DEV_PATH}{path}"))
|
||||
}
|
||||
|
||||
@@ -1,22 +1,43 @@
|
||||
// Simplified version of: https://github.com/swc-project/swc/blob/main/crates/swc/examples/minify.rs
|
||||
// Source: https://github.com/oxc-project/oxc/blob/main/crates/oxc_minifier/examples/minifier.rs
|
||||
|
||||
use std::{path::Path, sync::Arc};
|
||||
use std::{fs, path::Path};
|
||||
|
||||
use swc::{config::JsMinifyOptions, try_with_handler, JsMinifyExtras};
|
||||
use swc_common::{SourceMap, GLOBALS};
|
||||
use oxc::{
|
||||
allocator::Allocator,
|
||||
codegen::{CodeGenerator, CodegenOptions, LegalComment},
|
||||
minifier::{CompressOptions, MangleOptions, Minifier, MinifierOptions},
|
||||
parser::Parser,
|
||||
span::SourceType,
|
||||
};
|
||||
|
||||
//
|
||||
pub fn minify_js(path: &Path) -> String {
|
||||
let source_map = Arc::<SourceMap>::default();
|
||||
let compiler = swc::Compiler::new(source_map.clone());
|
||||
let allocator = Allocator::default();
|
||||
|
||||
GLOBALS
|
||||
.set(&Default::default(), || {
|
||||
try_with_handler(source_map.clone(), Default::default(), |handler| {
|
||||
let fm = source_map.load_file(path).expect("failed to load file");
|
||||
let source_type = SourceType::from_path(path).unwrap();
|
||||
|
||||
compiler.minify(fm, handler, &JsMinifyOptions::default(), JsMinifyExtras::default())
|
||||
})
|
||||
let source_text = fs::read_to_string(path).unwrap();
|
||||
|
||||
let parser_return = Parser::new(&allocator, &source_text, source_type).parse();
|
||||
|
||||
let mut program = parser_return.program;
|
||||
|
||||
let minifier_return = Minifier::new(MinifierOptions {
|
||||
mangle: Some(MangleOptions::default()),
|
||||
compress: Some(CompressOptions::default()),
|
||||
})
|
||||
.build(&allocator, &mut program);
|
||||
|
||||
CodeGenerator::new()
|
||||
.with_options(CodegenOptions {
|
||||
minify: true,
|
||||
single_quote: false,
|
||||
comments: false,
|
||||
annotation_comments: false,
|
||||
source_map_path: None,
|
||||
legal_comments: LegalComment::None,
|
||||
})
|
||||
.unwrap()
|
||||
.with_symbol_table(minifier_return.symbol_table)
|
||||
.build(&program)
|
||||
.code
|
||||
}
|
||||
|
||||
@@ -12,7 +12,6 @@ pub trait WebsiteRoutes {
|
||||
|
||||
impl WebsiteRoutes for Router<AppState> {
|
||||
fn add_website_routes(self) -> Self {
|
||||
self.route("/*path", get(file_handler))
|
||||
.route("/", get(index_handler))
|
||||
self.route("/{*path}", get(file_handler)).route("/", get(index_handler))
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user