server: multiple frontends + auto download from github when needed

This commit is contained in:
nym21
2025-03-05 12:22:11 +01:00
parent 0d0edd7917
commit b27297cdc6
29 changed files with 892 additions and 99 deletions

View File

@@ -1,4 +1,3 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![doc = include_str!(concat!("../", env!("CARGO_PKG_README")))]
#[cfg(feature = "core")]

View File

@@ -8,6 +8,7 @@ repository.workspace = true
[dependencies]
brk_computer = { workspace = true }
brk_core = { workspace = true }
brk_exit = { workspace = true }
brk_indexer = { workspace = true }
brk_logger = { workspace = true }

View File

@@ -1,8 +1,6 @@
use std::{
fs,
path::{Path, PathBuf},
};
use std::fs;
use brk_core::{path_dot_brk, path_dot_brk_log};
use brk_query::Params as QueryArgs;
use clap::{Parser, Subcommand};
use query::query;
@@ -32,7 +30,7 @@ fn main() -> color_eyre::Result<()> {
fs::create_dir_all(path_dot_brk())?;
brk_logger::init(Some(&path_log()));
brk_logger::init(Some(&path_dot_brk_log()));
let cli = Cli::parse();
@@ -41,12 +39,3 @@ fn main() -> color_eyre::Result<()> {
Commands::Query(args) => query(args),
}
}
pub fn path_dot_brk() -> PathBuf {
let home = std::env::var("HOME").unwrap();
Path::new(&home).join(".brk")
}
pub fn path_log() -> PathBuf {
path_dot_brk().join("log")
}

View File

@@ -33,7 +33,7 @@ pub fn query(params: QueryParams) -> color_eyre::Result<()> {
"{}",
match res {
Output::Json(v) => match v {
Value::Single(v) => v.to_string(),
Value::Single(v) => v.to_string().replace("\"", ""),
v => {
let v = match v {
Value::Single(_) => unreachable!("Already processed"),

View File

@@ -6,17 +6,16 @@ use std::{
};
use brk_computer::Computer;
use brk_core::path_dot_brk;
use brk_exit::Exit;
use brk_indexer::Indexer;
use brk_parser::rpc::{self, Auth, Client, RpcApi};
use brk_server::tokio;
use brk_server::{Frontend, tokio};
use clap::{Parser, ValueEnum};
use color_eyre::eyre::eyre;
use log::info;
use serde::{Deserialize, Serialize};
use crate::path_dot_brk;
pub fn run(config: RunConfig) -> color_eyre::Result<()> {
let config = RunConfig::import(Some(config))?;
@@ -40,10 +39,11 @@ pub fn run(config: RunConfig) -> color_eyre::Result<()> {
.block_on(async {
let served_indexer = indexer.clone();
let served_computer = computer.clone();
let frontend = config.frontend();
let handle = if config.serve() {
let server = if config.serve() {
Some(tokio::spawn(async move {
brk_server::main(served_indexer, served_computer)
brk_server::main(served_indexer, served_computer, frontend)
.await
.unwrap();
}))
@@ -73,9 +73,10 @@ pub fn run(config: RunConfig) -> color_eyre::Result<()> {
}
}
if let Some(handle) = handle {
if let Some(handle) = server {
handle.await.unwrap();
}
Ok(())
})
}
@@ -94,6 +95,10 @@ pub struct RunConfig {
#[arg(short, long)]
mode: Option<Mode>,
/// Frontend served by the server (if active), default: kibo.money, saved
#[arg(short, long)]
frontend: Option<Frontend>,
/// Bitcoin RPC ip, default: localhost, saved
#[arg(long, value_name = "IP")]
rpcconnect: Option<String>,
@@ -142,6 +147,10 @@ impl RunConfig {
config_saved.mode = Some(mode);
}
if let Some(frontend) = config_args.frontend.take() {
config_saved.frontend = Some(frontend);
}
if let Some(rpcconnect) = config_args.rpcconnect.take() {
config_saved.rpcconnect = Some(rpcconnect);
}
@@ -182,6 +191,7 @@ impl RunConfig {
// info!(" bitcoindir: {:?}", config.bitcoindir);
// info!(" brkdir: {:?}", config.brkdir);
// info!(" mode: {:?}", config.mode);
// info!(" frontend: {:?}", config.frontend);
// info!(" rpcconnect: {:?}", config.rpcconnect);
// info!(" rpcport: {:?}", config.rpcport);
// info!(" rpccookiefile: {:?}", config.rpccookiefile);
@@ -334,6 +344,10 @@ impl RunConfig {
fix("~").unwrap_or_else(|| fix("$HOME").unwrap_or_else(|| PathBuf::from(&path)))
}
pub fn frontend(&self) -> Frontend {
self.frontend.unwrap_or_default()
}
}
#[derive(

View File

@@ -1,4 +1,3 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![doc = include_str!("../README.md")]
#![doc = "\n## Example\n\n```rust"]
#![doc = include_str!("../examples/main.rs")]

View File

@@ -1,4 +1,3 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![doc = include_str!("../README.md")]
mod error;

View File

@@ -1,5 +1,7 @@
mod paths;
mod pause;
mod rlimit;
pub use paths::*;
pub use pause::*;
pub use rlimit::*;

View File

@@ -0,0 +1,10 @@
use std::path::{Path, PathBuf};
pub fn path_dot_brk() -> PathBuf {
let home = std::env::var("HOME").unwrap();
Path::new(&home).join(".brk")
}
pub fn path_dot_brk_log() -> PathBuf {
path_dot_brk().join("log")
}

View File

@@ -1,4 +1,3 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![doc = include_str!("../README.md")]
use std::{

View File

@@ -1,4 +1,3 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![doc = include_str!("../README.md")]
#![doc = "\n## Example\n\n```rust"]
#![doc = include_str!("../examples/main.rs")]

View File

@@ -1,4 +1,3 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![doc = include_str!("../README.md")]
#![doc = "\n## Example\n\n```rust"]
#![doc = include_str!("../examples/main.rs")]
@@ -55,6 +54,7 @@ impl Indexer {
}
/// Do NOT import multiple times are things will break !!!
/// Clone struct instead
pub fn import_stores(&mut self) -> color_eyre::Result<()> {
self.stores = Some(Stores::import(&self.path.join("stores"))?);
Ok(())

View File

@@ -1,4 +1,3 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![doc = include_str!("../README.md")]
#![doc = "\n## Example\n\n```rust"]
#![doc = include_str!("../examples/main.rs")]

View File

@@ -1,4 +1,3 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![doc = include_str!("../README.md")]
#![doc = "\n## Example\n\n```rust"]
#![doc = include_str!("../examples/main.rs")]

View File

@@ -15,4 +15,5 @@ color-eyre = { workspace = true }
derive_deref = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
serde_with = "3.12.0"
tabled = { workspace = true }

View File

@@ -1,4 +1,3 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![doc = include_str!("../README.md")]
#![doc = "\n## Example\n\n```rust"]
#![doc = include_str!("../examples/main.rs")]

View File

@@ -1,20 +1,27 @@
use clap::{Parser, builder::PossibleValuesParser};
use serde::Deserialize;
use serde_with::{OneOrMany, formats::PreferOne, serde_as};
use crate::{Format, Index};
#[serde_as]
#[derive(Debug, Deserialize, Parser)]
pub struct Params {
#[clap(short, long, value_parser = PossibleValuesParser::new(Index::all_possible_values()))]
#[serde(alias = "i")]
/// Index of the values requested
pub index: String,
#[clap(short, long, value_delimiter = ' ', num_args = 1..)]
#[serde(alias = "v")]
#[serde_as(as = "OneOrMany<_, PreferOne>")]
/// Names of the values requested
pub values: Vec<String>,
#[clap(short, long, allow_hyphen_values = true)]
#[serde(alias = "f")]
/// Inclusive starting index, if negative will be from the end
pub from: Option<i64>,
#[clap(short, long, allow_hyphen_values = true)]
#[serde(default, alias = "t")]
/// Inclusive ending index, if negative will be from the end
pub to: Option<i64>,
#[clap(long)]

View File

@@ -14,7 +14,10 @@ impl Tabled for Vec<Vec<serde_json::Value>> {
let len = first.len();
(0..len).for_each(|index| {
builder.push_record(self.iter().map(|vec| vec.get(index).unwrap().to_string()));
builder.push_record(
self.iter()
.map(|vec| vec.get(index).unwrap().to_string().replace("\"", "")),
);
});
}

View File

@@ -10,15 +10,20 @@ repository.workspace = true
axum = "0.8.1"
brk_computer = { workspace = true }
brk_exit = { workspace = true }
brk_core = { workspace = true }
brk_indexer = { workspace = true }
brk_logger = { workspace = true }
brk_parser = { workspace = true }
brk_query = { workspace = true }
brk_vec = { workspace = true }
clap = { workspace = true }
color-eyre = { workspace = true }
jiff = { workspace = true }
log = { workspace = true }
minreq = { workspace = true }
oxc = { version = "0.54.0", features = ["codegen", "minifier"] }
serde = { workspace = true }
serde_json = { workspace = true }
tokio = { version = "1.43.0", features = ["full"] }
tower-http = { version = "0.6.2", features = ["compression-full"] }
zip = "2.2.3"

View File

@@ -7,6 +7,7 @@ use brk_parser::{
Parser,
rpc::{self, RpcApi},
};
use brk_server::Frontend;
use log::info;
pub fn main() -> color_eyre::Result<()> {
@@ -14,6 +15,8 @@ pub fn main() -> color_eyre::Result<()> {
brk_logger::init(Some(Path::new(".log")));
let process = false;
let bitcoin_dir = Path::new("../../../bitcoin");
let rpc = Box::leak(Box::new(rpc::Client::new(
"http://localhost:8332",
@@ -40,29 +43,33 @@ pub fn main() -> color_eyre::Result<()> {
let served_indexer = indexer.clone();
let served_computer = computer.clone();
tokio::spawn(async move {
brk_server::main(served_indexer, served_computer)
let server = tokio::spawn(async move {
brk_server::main(served_indexer, served_computer, Frontend::KiboMoney)
.await
.unwrap();
});
loop {
let block_count = rpc.get_block_count()?;
if process {
loop {
let block_count = rpc.get_block_count()?;
info!("{block_count} blocks found.");
info!("{block_count} blocks found.");
let starting_indexes = indexer.index(&parser, rpc, &exit)?;
let starting_indexes = indexer.index(&parser, rpc, &exit)?;
computer.compute(&mut indexer, starting_indexes, &exit)?;
computer.compute(&mut indexer, starting_indexes, &exit)?;
info!("Waiting for new blocks...");
info!("Waiting for new blocks...");
while block_count == rpc.get_block_count()? {
sleep(Duration::from_secs(1))
while block_count == rpc.get_block_count()? {
sleep(Duration::from_secs(1))
}
}
}
#[allow(unreachable_code)]
server.await.unwrap();
Ok(())
}) as color_eyre::Result<()>
}

View File

@@ -1,21 +1,34 @@
use std::{fs, io};
use std::{fs, io, path::Path};
use brk_query::{Index, Query};
use crate::WEBSITE_DEV_PATH;
use crate::Frontend;
const SCRIPTS: &str = "scripts";
const TPYES: &str = "types";
#[allow(clippy::upper_case_acronyms)]
pub trait DTS {
fn generate_dts_file(&self) -> io::Result<()>;
fn generate_dts_file(&self, frontend: Frontend, websites_path: &Path) -> io::Result<()>;
}
impl DTS for Query<'static> {
fn generate_dts_file(&self) -> io::Result<()> {
if !fs::exists(WEBSITE_DEV_PATH)? {
fn generate_dts_file(&self, frontend: Frontend, websites_path: &Path) -> io::Result<()> {
if frontend.is_none() {
return Ok(());
}
let path = format!("{WEBSITE_DEV_PATH}/scripts/types/vecid-to-indexes.d.ts");
let path = websites_path.join(frontend.to_folder_name());
if !fs::exists(&path)? {
return Ok(());
}
let path = path.join(SCRIPTS).join(TPYES);
fs::create_dir_all(&path)?;
let path = path.join(Path::new("vecid-to-indexes.d.ts"));
let mut contents = Index::all()
.into_iter()
@@ -26,22 +39,24 @@ impl DTS for Query<'static> {
contents += "\n\ninterface VecIdToIndexes {\n";
self.vecid_to_index_to_vec.iter().for_each(|(id, index_to_vec)| {
let indexes = index_to_vec
.keys()
.map(|i| i.to_string())
.collect::<Vec<_>>()
.join(", ");
self.vecid_to_index_to_vec
.iter()
.for_each(|(id, index_to_vec)| {
let indexes = index_to_vec
.keys()
.map(|i| i.to_string())
.collect::<Vec<_>>()
.join(", ");
contents += &format!(
" {}: [{indexes}]\n",
if id.contains("-") {
format!("\"{id}\"")
} else {
id.to_owned()
}
);
});
contents += &format!(
" {}: [{indexes}]\n",
if id.contains("-") {
format!("\"{id}\"")
} else {
id.to_owned()
}
);
});
contents.push('}');

View File

@@ -1,56 +1,73 @@
use std::{
fs::{self},
path::{Path, PathBuf},
path::Path,
time::Instant,
};
use axum::{
body::Body,
extract,
extract::{self, State},
http::{HeaderMap, StatusCode},
response::{IntoResponse, Response},
};
use log::{error, info};
use crate::{
WEBSITE_DEV_PATH, log_result,
AppState, log_result,
traits::{HeaderMapExtended, ModifiedState, ResponseExtended},
};
use super::minify::minify_js;
pub async fn file_handler(headers: HeaderMap, path: extract::Path<String>) -> Response {
any_handler(headers, Some(path))
pub async fn file_handler(
headers: HeaderMap,
State(app_state): State<AppState>,
path: extract::Path<String>,
) -> Response {
any_handler(headers, app_state, Some(path))
}
pub async fn index_handler(headers: HeaderMap) -> Response {
any_handler(headers, None)
pub async fn index_handler(headers: HeaderMap, State(app_state): State<AppState>) -> Response {
any_handler(headers, app_state, None)
}
fn any_handler(headers: HeaderMap, path: Option<extract::Path<String>>) -> Response {
fn any_handler(
headers: HeaderMap,
app_state: AppState,
path: Option<extract::Path<String>>,
) -> Response {
let website_path = app_state
.websites_path
.as_ref()
.expect("Should never reach here is websites_path is None")
.join(app_state.frontend.to_folder_name());
let instant = Instant::now();
let response = if let Some(path) = path.as_ref() {
let path = path.0.replace("..", "").replace("\\", "");
let mut path = str_to_path(&path);
let mut path = website_path.join(&path);
if !path.exists() {
if path.extension().is_some() {
let mut response: Response<Body> =
(StatusCode::INTERNAL_SERVER_ERROR, "File doesn't exist".to_string()).into_response();
let mut response: Response<Body> = (
StatusCode::INTERNAL_SERVER_ERROR,
"File doesn't exist".to_string(),
)
.into_response();
response.headers_mut().insert_cors();
return response;
} else {
path = str_to_path("index.html");
path = website_path.join("index.html");
}
}
path_to_response(&headers, &path)
} else {
path_to_response(&headers, &str_to_path("index.html"))
path_to_response(&headers, &website_path.join("index.html"))
};
log_result(
@@ -66,7 +83,8 @@ fn path_to_response(headers: &HeaderMap, path: &Path) -> Response {
match path_to_response_(headers, path) {
Ok(response) => response,
Err(error) => {
let mut response = (StatusCode::INTERNAL_SERVER_ERROR, error.to_string()).into_response();
let mut response =
(StatusCode::INTERNAL_SERVER_ERROR, error.to_string()).into_response();
response.headers_mut().insert_cors();
@@ -116,7 +134,10 @@ fn path_to_response_(headers: &HeaderMap, path: &Path) -> color_eyre::Result<Res
|| serialized_path.contains("assets/")
|| serialized_path.contains("packages/")
|| path.extension().is_some_and(|extension| {
extension == "pdf" || extension == "jpg" || extension == "png" || extension == "woff2"
extension == "pdf"
|| extension == "jpg"
|| extension == "png"
|| extension == "woff2"
})
{
headers.insert_cache_control_immutable();
@@ -127,7 +148,3 @@ fn path_to_response_(headers: &HeaderMap, path: &Path) -> color_eyre::Result<Res
Ok(response)
}
fn str_to_path(path: &str) -> PathBuf {
PathBuf::from(&format!("{WEBSITE_DEV_PATH}{path}"))
}

View File

@@ -0,0 +1,30 @@
use clap::ValueEnum;
use serde::{Deserialize, Serialize};
#[derive(
Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, ValueEnum,
)]
pub enum Frontend {
None,
#[default]
KiboMoney,
Custom,
}
impl Frontend {
pub fn is_none(&self) -> bool {
self == &Self::None
}
pub fn is_some(&self) -> bool {
!self.is_none()
}
pub fn to_folder_name(&self) -> &str {
match self {
Self::Custom => "custom",
Self::KiboMoney => "kibo.money",
Self::None => unreachable!(),
}
}
}

View File

@@ -1,18 +1,25 @@
use axum::{routing::get, Router};
use axum::{Router, routing::get};
use super::AppState;
mod file;
mod frontend;
mod minify;
use file::{file_handler, index_handler};
pub use frontend::Frontend;
pub trait FilesRoutes {
fn add_website_routes(self) -> Self;
fn add_website_routes(self, frontend: Frontend) -> Self;
}
impl FilesRoutes for Router<AppState> {
fn add_website_routes(self) -> Self {
self.route("/{*path}", get(file_handler)).route("/", get(index_handler))
fn add_website_routes(self, frontend: Frontend) -> Self {
if frontend.is_some() {
self.route("/{*path}", get(file_handler))
.route("/", get(index_handler))
} else {
self
}
}
}

View File

@@ -1,14 +1,19 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![doc = include_str!("../README.md")]
#![doc = "\n## Example\n\n```rust"]
#![doc = include_str!("../examples/main.rs")]
#![doc = "```"]
use std::time::Instant;
use std::{
fs,
io::Cursor,
path::{Path, PathBuf},
time::Instant,
};
use api::{ApiRoutes, DTS};
use axum::{Json, Router, http::StatusCode, routing::get, serve};
use brk_computer::Computer;
use brk_core::path_dot_brk;
use brk_indexer::Indexer;
use brk_query::Query;
use color_eyre::owo_colors::OwoColorize;
@@ -22,21 +27,69 @@ mod api;
mod files;
mod traits;
pub use files::Frontend;
#[derive(Clone)]
pub struct AppState {
query: &'static Query<'static>,
frontend: Frontend,
websites_path: Option<PathBuf>,
}
pub const WEBSITE_DEV_PATH: &str = "../../websites/kibo.money/";
const DEV_PATH: &str = "../..";
const DOWNLOADS: &str = "downloads";
const WEBSITES: &str = "websites";
pub async fn main(indexer: Indexer, computer: Computer) -> color_eyre::Result<()> {
pub async fn main(
indexer: Indexer,
computer: Computer,
frontend: Frontend,
) -> color_eyre::Result<()> {
let indexer = Box::leak(Box::new(indexer));
let computer = Box::leak(Box::new(computer));
let query = Box::leak(Box::new(Query::build(indexer, computer)));
query.generate_dts_file()?;
let websites_path = if frontend.is_some() {
let websites_dev_path = Path::new(DEV_PATH).join(WEBSITES);
let state = AppState { query };
let websites_path = if fs::exists(&websites_dev_path)? {
websites_dev_path
} else {
let downloads_path = path_dot_brk().join(DOWNLOADS);
let downloaded_websites_path = downloads_path.join("brk-main").join(WEBSITES);
if !fs::exists(&downloaded_websites_path)? {
info!("Downloading websites from Github...");
// TODO
// Need to download versioned, this is only for testing !
let url = "https://github.com/bitcoinresearchkit/brk/archive/refs/heads/main.zip";
let response = minreq::get(url).send()?;
let bytes = response.as_bytes();
let cursor = Cursor::new(bytes);
let mut zip = zip::ZipArchive::new(cursor)?;
zip.extract(&downloads_path)?;
}
downloaded_websites_path
};
query.generate_dts_file(frontend, websites_path.as_path())?;
Some(websites_path)
} else {
None
};
let state = AppState {
query,
frontend,
websites_path,
};
let compression_layer = CompressionLayer::new()
.br(true)
@@ -46,7 +99,7 @@ pub async fn main(indexer: Indexer, computer: Computer) -> color_eyre::Result<()
let router = Router::new()
.add_api_routes()
.add_website_routes()
.add_website_routes(frontend)
.route("/version", get(Json(env!("CARGO_PKG_VERSION"))))
.with_state(state)
.layer(compression_layer);

View File

@@ -1,4 +1,3 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![doc = include_str!("../README.md")]
#![doc = "\n## Example\n\n```rust"]
#![doc = include_str!("../examples/main.rs")]