server: snapshot

This commit is contained in:
nym21
2026-01-12 12:34:30 +01:00
parent 1b9e18f98b
commit b12a72ea1a
24 changed files with 3619 additions and 5378 deletions

View File

@@ -2,7 +2,7 @@
use std::fmt::Write;
use crate::{Endpoint, Parameter, generators::{MANUAL_GENERIC_TYPES, write_description}, to_camel_case};
use crate::{Endpoint, Parameter, generators::{normalize_return_type, write_description}, to_camel_case};
/// Generate API methods for the BrkClient class.
pub fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) {
@@ -130,15 +130,6 @@ fn build_path_template(path: &str, path_params: &[Parameter]) -> String {
result
}
/// Replace generic types with their Any variants in return types.
fn normalize_return_type(return_type: &str) -> String {
let mut result = return_type.to_string();
for type_name in MANUAL_GENERIC_TYPES {
result = result.replace(type_name, &format!("Any{}", type_name));
}
result
}
/// Format param description with dash prefix, or empty string if no description.
fn format_param_desc(desc: Option<&str>) -> String {
match desc {

View File

@@ -4,7 +4,7 @@ use std::fmt::Write;
use serde_json::Value;
use crate::{TypeSchemas, generators::MANUAL_GENERIC_TYPES, ref_to_type_name, to_camel_case};
use crate::{TypeSchemas, generators::{MANUAL_GENERIC_TYPES, write_description}, ref_to_type_name, to_camel_case};
/// Generate JSDoc type definitions from OpenAPI schemas.
pub fn generate_type_definitions(output: &mut String, schemas: &TypeSchemas) {
@@ -26,7 +26,7 @@ pub fn generate_type_definitions(output: &mut String, schemas: &TypeSchemas) {
if is_primitive_alias(schema) {
if let Some(desc) = type_desc {
writeln!(output, "/**").unwrap();
write_jsdoc_description(output, desc);
write_description(output, desc, " * ", " *");
writeln!(output, " *").unwrap();
writeln!(output, " * @typedef {{{}}} {}", js_type, name).unwrap();
writeln!(output, " */").unwrap();
@@ -36,7 +36,7 @@ pub fn generate_type_definitions(output: &mut String, schemas: &TypeSchemas) {
} else if let Some(props) = schema.get("properties").and_then(|p| p.as_object()) {
writeln!(output, "/**").unwrap();
if let Some(desc) = type_desc {
write_jsdoc_description(output, desc);
write_description(output, desc, " * ", " *");
writeln!(output, " *").unwrap();
}
writeln!(output, " * @typedef {{Object}} {}", name).unwrap();
@@ -64,7 +64,7 @@ pub fn generate_type_definitions(output: &mut String, schemas: &TypeSchemas) {
writeln!(output, " */").unwrap();
} else if let Some(desc) = type_desc {
writeln!(output, "/**").unwrap();
write_jsdoc_description(output, desc);
write_description(output, desc, " * ", " *");
writeln!(output, " *").unwrap();
writeln!(output, " * @typedef {{{}}} {}", js_type, name).unwrap();
writeln!(output, " */").unwrap();
@@ -75,17 +75,6 @@ pub fn generate_type_definitions(output: &mut String, schemas: &TypeSchemas) {
writeln!(output).unwrap();
}
/// Write a multi-line description with proper JSDoc formatting.
fn write_jsdoc_description(output: &mut String, desc: &str) {
for line in desc.lines() {
if line.is_empty() {
writeln!(output, " *").unwrap();
} else {
writeln!(output, " * {}", line).unwrap();
}
}
}
fn is_primitive_alias(schema: &Value) -> bool {
schema.get("properties").is_none()
&& schema.get("items").is_none()

View File

@@ -31,3 +31,13 @@ pub fn write_description(output: &mut String, desc: &str, prefix: &str, empty_pr
}
}
}
/// Replace generic types with their Any variants in return types.
/// Used by JS and Python generators.
pub fn normalize_return_type(return_type: &str) -> String {
let mut result = return_type.to_string();
for type_name in MANUAL_GENERIC_TYPES {
result = result.replace(type_name, &format!("Any{}", type_name));
}
result
}

View File

@@ -2,7 +2,7 @@
use std::fmt::Write;
use crate::{Endpoint, Parameter, escape_python_keyword, generators::{MANUAL_GENERIC_TYPES, write_description}, to_snake_case};
use crate::{Endpoint, Parameter, escape_python_keyword, generators::{normalize_return_type, write_description}, to_snake_case};
use super::client::generate_class_constants;
use super::types::js_type_to_python;
@@ -187,12 +187,3 @@ fn build_path_template(path: &str, path_params: &[Parameter]) -> String {
}
result
}
/// Replace generic types with their Any variants in return types.
fn normalize_return_type(return_type: &str) -> String {
let mut result = return_type.to_string();
for type_name in MANUAL_GENERIC_TYPES {
result = result.replace(type_name, &format!("Any{}", type_name));
}
result
}

View File

@@ -82,6 +82,7 @@ pub fn run() -> color_eyre::Result<()> {
let website = config.website();
let downloads_path = config.downloads_dir();
let data_path = config.brkdir();
let future = async move {
let bundle_path = if website.is_some() {
@@ -148,7 +149,7 @@ pub fn run() -> color_eyre::Result<()> {
}
}
let server = Server::new(&query, bundle_path);
let server = Server::new(&query, data_path, bundle_path);
tokio::spawn(async move {
server.serve(true).await.unwrap();

File diff suppressed because it is too large Load Diff

View File

@@ -3,6 +3,7 @@ use brk_error::Result;
use brk_indexer::Indexer;
use brk_mempool::Mempool;
use brk_reader::Reader;
use brk_rpc::Client;
use tokio::task::spawn_blocking;
use crate::Query;
@@ -53,4 +54,8 @@ impl AsyncQuery {
pub fn inner(&self) -> &Query {
&self.0
}
pub fn client(&self) -> &Client {
self.0.client()
}
}

View File

@@ -7,6 +7,7 @@ use brk_computer::Computer;
use brk_indexer::Indexer;
use brk_mempool::Mempool;
use brk_reader::Reader;
use brk_rpc::Client;
use brk_types::Height;
use vecdb::AnyStoredVec;
@@ -32,6 +33,7 @@ pub use vecs::Vecs;
pub struct Query(Arc<QueryInner<'static>>);
struct QueryInner<'a> {
vecs: &'a Vecs<'a>,
client: Client,
reader: Reader,
indexer: &'a Indexer,
computer: &'a Computer,
@@ -45,6 +47,7 @@ impl Query {
computer: &Computer,
mempool: Option<Mempool>,
) -> Self {
let client = reader.client().clone();
let reader = reader.clone();
let indexer = Box::leak(Box::new(indexer.clone()));
let computer = Box::leak(Box::new(computer.clone()));
@@ -52,6 +55,7 @@ impl Query {
Self(Arc::new(QueryInner {
vecs,
client,
reader,
indexer,
computer,
@@ -69,6 +73,16 @@ impl Query {
&self.0.reader
}
#[inline]
pub fn client(&self) -> &Client {
&self.0.client
}
#[inline]
pub fn blocks_dir(&self) -> &std::path::Path {
self.0.reader.blocks_dir()
}
#[inline]
pub fn indexer(&self) -> &Indexer {
self.0.indexer

View File

@@ -6,7 +6,7 @@ use std::{
io::{Read, Seek, SeekFrom},
mem,
ops::ControlFlow,
path::PathBuf,
path::{Path, PathBuf},
sync::Arc,
thread,
time::Duration,
@@ -75,6 +75,10 @@ impl ReaderInner {
&self.client
}
pub fn blocks_dir(&self) -> &Path {
&self.blocks_dir
}
pub fn blk_index_to_blk_path(&self) -> RwLockReadGuard<'_, BlkIndexToBlkPath> {
self.blk_index_to_blk_path.read()
}

View File

@@ -54,7 +54,7 @@ fn run() -> Result<()> {
// Option 1: block_on to run and properly propagate errors
runtime.block_on(async move {
let server = Server::new(&query, None);
let server = Server::new(&query, outputs_dir, None);
let handle = tokio::spawn(async move { server.serve(true).await });

View File

@@ -1,25 +1,24 @@
use std::{borrow::Cow, sync::Arc};
use std::sync::Arc;
use aide::{
axum::{ApiRouter, routing::get_with},
axum::ApiRouter,
openapi::OpenApi,
};
use axum::{
Extension, Json,
extract::State,
Extension,
http::HeaderMap,
response::{Html, Redirect, Response},
routing::get,
};
use brk_types::Health;
use crate::{
CacheStrategy, VERSION,
VERSION,
api::{
addresses::AddressRoutes, blocks::BlockRoutes, mempool::MempoolRoutes,
metrics::ApiMetricsRoutes, mining::MiningRoutes, transactions::TxRoutes,
metrics::ApiMetricsRoutes, mining::MiningRoutes, server::ServerRoutes,
transactions::TxRoutes,
},
extended::{HeaderMapExtended, ResponseExtended, TransformResponseExtended},
extended::{HeaderMapExtended, ResponseExtended},
};
use super::AppState;
@@ -30,6 +29,7 @@ mod mempool;
mod metrics;
mod mining;
mod openapi;
mod server;
mod transactions;
pub use openapi::*;
@@ -46,46 +46,8 @@ impl ApiRoutes for ApiRouter<AppState> {
.add_mining_routes()
.add_tx_routes()
.add_metrics_routes()
.add_server_routes()
.route("/api/server", get(Redirect::temporary("/api#tag/server")))
.api_route(
"/version",
get_with(
async |headers: HeaderMap, State(state): State<AppState>| {
state
.cached_json(&headers, CacheStrategy::Static, |_| {
Ok(env!("CARGO_PKG_VERSION"))
})
.await
},
|op| {
op.id("get_version")
.server_tag()
.summary("API version")
.description("Returns the current version of the API server")
.ok_response::<String>()
.not_modified()
},
),
)
.api_route(
"/health",
get_with(
async || -> Json<Health> {
Json(Health {
status: Cow::Borrowed("healthy"),
service: Cow::Borrowed("brk"),
timestamp: jiff::Timestamp::now().to_string(),
})
},
|op| {
op.id("get_health")
.server_tag()
.summary("Health check")
.description("Returns the health status of the API server")
.ok_response::<Health>()
},
),
)
.route(
"/api.json",
get(

View File

@@ -0,0 +1,174 @@
use std::{borrow::Cow, fs, path};
use aide::axum::{ApiRouter, routing::get_with};
use axum::{extract::State, http::HeaderMap};
use brk_types::{DiskUsage, Health, Height, SyncStatus};
use vecdb::GenericStoredVec;
use crate::{CacheStrategy, extended::TransformResponseExtended};
use super::AppState;
pub trait ServerRoutes {
fn add_server_routes(self) -> Self;
}
impl ServerRoutes for ApiRouter<AppState> {
fn add_server_routes(self) -> Self {
self.api_route(
"/api/server/sync",
get_with(
async |headers: HeaderMap, State(state): State<AppState>| {
let tip_height = state.client.get_last_height();
state
.cached_json(&headers, CacheStrategy::Height, move |q| {
let indexed_height = q.height();
let tip_height = tip_height?;
let blocks_behind = Height::from(tip_height.saturating_sub(*indexed_height));
let last_indexed_at_unix = q
.indexer()
.vecs
.blocks
.timestamp
.read_once(indexed_height)?;
Ok(SyncStatus {
indexed_height,
tip_height,
blocks_behind,
last_indexed_at: last_indexed_at_unix.to_iso8601(),
last_indexed_at_unix,
})
})
.await
},
|op| {
op.id("get_sync_status")
.server_tag()
.summary("Sync status")
.description(
"Returns the sync status of the indexer, including indexed height, \
tip height, blocks behind, and last indexed timestamp.",
)
.ok_response::<SyncStatus>()
.not_modified()
},
),
)
.api_route(
"/api/server/disk",
get_with(
async |headers: HeaderMap, State(state): State<AppState>| {
let brk_path = state.data_path.clone();
state
.cached_json(&headers, CacheStrategy::Height, move |q| {
let brk_bytes = dir_size(&brk_path)?;
let bitcoin_bytes = dir_size(q.blocks_dir())?;
Ok(DiskUsage::new(brk_bytes, bitcoin_bytes))
})
.await
},
|op| {
op.id("get_disk_usage")
.server_tag()
.summary("Disk usage")
.description(
"Returns the disk space used by the indexed data.",
)
.ok_response::<DiskUsage>()
.not_modified()
},
),
)
.api_route(
"/health",
get_with(
async |State(state): State<AppState>| -> axum::Json<Health> {
let uptime = state.started_instant.elapsed();
axum::Json(Health {
status: Cow::Borrowed("healthy"),
service: Cow::Borrowed("brk"),
timestamp: jiff::Timestamp::now().to_string(),
started_at: state.started_at.to_string(),
uptime_seconds: uptime.as_secs(),
})
},
|op| {
op.id("get_health")
.server_tag()
.summary("Health check")
.description("Returns the health status of the API server, including uptime information.")
.ok_response::<Health>()
},
),
)
.api_route(
"/version",
get_with(
async |headers: HeaderMap, State(state): State<AppState>| {
state
.cached_json(&headers, CacheStrategy::Static, |_| {
Ok(env!("CARGO_PKG_VERSION"))
})
.await
},
|op| {
op.id("get_version")
.server_tag()
.summary("API version")
.description("Returns the current version of the API server")
.ok_response::<String>()
.not_modified()
},
),
)
}
}
#[cfg(unix)]
fn dir_size(path: &path::Path) -> brk_error::Result<u64> {
use std::os::unix::fs::MetadataExt;
let mut total = 0u64;
if path.is_file() {
// blocks * 512 = actual disk usage (accounts for sparse files)
return Ok(fs::metadata(path)?.blocks() * 512);
}
let entries = fs::read_dir(path)?;
for entry in entries {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
total += dir_size(&path)?;
} else {
total += fs::metadata(&path)?.blocks() * 512;
}
}
Ok(total)
}
#[cfg(not(unix))]
fn dir_size(path: &path::Path) -> brk_error::Result<u64> {
let mut total = 0u64;
if path.is_file() {
return Ok(fs::metadata(path)?.len());
}
let entries = fs::read_dir(path)?;
for entry in entries {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
total += dir_size(&path)?;
} else {
total += fs::metadata(&path)?.len();
}
}
Ok(total)
}

View File

@@ -33,7 +33,7 @@ fn any_handler(
state: AppState,
path: Option<extract::Path<String>>,
) -> Response {
let files_path = state.path.as_ref().unwrap();
let files_path = state.files_path.as_ref().unwrap();
if let Some(path) = path.as_ref() {
// Sanitize path components to prevent traversal attacks

View File

@@ -1,6 +1,6 @@
#![doc = include_str!("../README.md")]
use std::{panic, path::PathBuf, sync::Arc, time::Duration};
use std::{panic, path::PathBuf, sync::Arc, time::{Duration, Instant}};
use aide::axum::ApiRouter;
use axum::{
@@ -37,11 +37,15 @@ pub const VERSION: &str = env!("CARGO_PKG_VERSION");
pub struct Server(AppState);
impl Server {
pub fn new(query: &AsyncQuery, files_path: Option<PathBuf>) -> Self {
pub fn new(query: &AsyncQuery, data_path: PathBuf, files_path: Option<PathBuf>) -> Self {
Self(AppState {
client: query.client().clone(),
query: query.clone(),
path: files_path,
data_path,
files_path,
cache: Arc::new(Cache::new(5_000)),
started_at: jiff::Timestamp::now(),
started_instant: Instant::now(),
})
}
@@ -83,7 +87,7 @@ impl Server {
let vecs = state.query.inner().vecs();
let router = ApiRouter::new()
.add_api_routes()
.add_files_routes(state.path.as_ref())
.add_files_routes(state.files_path.as_ref())
.route(
"/discord",
get(Redirect::temporary("https://discord.gg/WACpShCB7M")),

View File

@@ -1,4 +1,4 @@
use std::{path::PathBuf, sync::Arc};
use std::{path::PathBuf, sync::Arc, time::Instant};
use derive_more::Deref;
@@ -7,6 +7,8 @@ use axum::{
http::{HeaderMap, Response},
};
use brk_query::AsyncQuery;
use brk_rpc::Client;
use jiff::Timestamp;
use quick_cache::sync::Cache;
use serde::Serialize;
@@ -19,8 +21,12 @@ use crate::{
pub struct AppState {
#[deref]
pub query: AsyncQuery,
pub path: Option<PathBuf>,
pub data_path: PathBuf,
pub files_path: Option<PathBuf>,
pub cache: Arc<Cache<String, Bytes>>,
pub client: Client,
pub started_at: Timestamp,
pub started_instant: Instant,
}
impl AppState {

View File

@@ -0,0 +1,53 @@
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
/// Disk usage of the indexed data
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct DiskUsage {
/// Human-readable brk data size (e.g., "48.8 GiB")
pub brk: String,
/// brk data size in bytes
pub brk_bytes: u64,
/// Human-readable Bitcoin blocks directory size
pub bitcoin: String,
/// Bitcoin blocks directory size in bytes
pub bitcoin_bytes: u64,
/// brk as percentage of Bitcoin data
pub ratio: f64,
}
impl DiskUsage {
pub fn new(brk_bytes: u64, bitcoin_bytes: u64) -> Self {
let ratio = if bitcoin_bytes > 0 {
brk_bytes as f64 / bitcoin_bytes as f64
} else {
0.0
};
Self {
brk: format_bytes(brk_bytes),
brk_bytes,
bitcoin: format_bytes(bitcoin_bytes),
bitcoin_bytes,
ratio,
}
}
}
fn format_bytes(bytes: u64) -> String {
const KIB: u64 = 1024;
const MIB: u64 = KIB * 1024;
const GIB: u64 = MIB * 1024;
const TIB: u64 = GIB * 1024;
if bytes >= TIB {
format!("{:.2} TiB", bytes as f64 / TIB as f64)
} else if bytes >= GIB {
format!("{:.2} GiB", bytes as f64 / GIB as f64)
} else if bytes >= MIB {
format!("{:.2} MiB", bytes as f64 / MIB as f64)
} else if bytes >= KIB {
format!("{:.2} KiB", bytes as f64 / KIB as f64)
} else {
format!("{} B", bytes)
}
}

View File

@@ -9,4 +9,8 @@ pub struct Health {
pub status: Cow<'static, str>,
pub service: Cow<'static, str>,
pub timestamp: String,
/// Server start time (ISO 8601)
pub started_at: String,
/// Uptime in seconds
pub uptime_seconds: u64,
}

View File

@@ -44,6 +44,7 @@ mod dateindex;
mod decadeindex;
mod deser;
mod difficultyadjustment;
mod diskusage;
mod difficultyadjustmententry;
mod difficultyentry;
mod difficultyepoch;
@@ -131,6 +132,7 @@ mod stored_u32;
mod stored_u64;
mod stored_u8;
mod supply_state;
mod syncstatus;
mod timeperiod;
mod timeperiodparam;
mod term;
@@ -206,6 +208,7 @@ pub use dateindex::*;
pub use decadeindex::*;
pub use deser::*;
pub use difficultyadjustment::*;
pub use diskusage::*;
pub use difficultyadjustmententry::*;
pub use difficultyentry::*;
pub use difficultyepoch::*;
@@ -293,6 +296,7 @@ pub use stored_u16::*;
pub use stored_u32::*;
pub use stored_u64::*;
pub use supply_state::*;
pub use syncstatus::*;
pub use term::*;
pub use timeperiod::*;
pub use timeperiodparam::*;

View File

@@ -0,0 +1,19 @@
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use crate::{Height, Timestamp};
/// Sync status of the indexer
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct SyncStatus {
/// Height of the last indexed block
pub indexed_height: Height,
/// Height of the chain tip (from Bitcoin node)
pub tip_height: Height,
/// Number of blocks behind the tip
pub blocks_behind: Height,
/// Human-readable timestamp of the last indexed block (ISO 8601)
pub last_indexed_at: String,
/// Unix timestamp of the last indexed block
pub last_indexed_at_unix: Timestamp,
}

View File

@@ -79,6 +79,11 @@ impl Timestamp {
pub fn now() -> Self {
Self::from(jiff::Timestamp::now())
}
/// Returns an ISO 8601 formatted string
pub fn to_iso8601(self) -> String {
jiff::Timestamp::from(self).to_string()
}
}
impl From<u32> for Timestamp {

File diff suppressed because it is too large Load Diff

View File

@@ -397,6 +397,23 @@ class DifficultyEntry(TypedDict):
difficulty: float
height: Height
class DiskUsage(TypedDict):
"""
Disk usage of the indexed data
Attributes:
brk: Human-readable brk data size (e.g., "48.8 GiB")
brk_bytes: brk data size in bytes
bitcoin: Human-readable Bitcoin blocks directory size
bitcoin_bytes: Bitcoin blocks directory size in bytes
ratio: brk as percentage of Bitcoin data
"""
brk: str
brk_bytes: int
bitcoin: str
bitcoin_bytes: int
ratio: float
class EmptyAddressData(TypedDict):
"""
Data of an empty address
@@ -439,10 +456,16 @@ class HashrateSummary(TypedDict):
class Health(TypedDict):
"""
Server health status
Attributes:
started_at: Server start time (ISO 8601)
uptime_seconds: Uptime in seconds
"""
status: str
service: str
timestamp: str
started_at: str
uptime_seconds: int
class HeightParam(TypedDict):
height: Height
@@ -780,6 +803,23 @@ class SupplyState(TypedDict):
utxo_count: int
value: Sats
class SyncStatus(TypedDict):
"""
Sync status of the indexer
Attributes:
indexed_height: Height of the last indexed block
tip_height: Height of the chain tip (from Bitcoin node)
blocks_behind: Number of blocks behind the tip
last_indexed_at: Human-readable timestamp of the last indexed block (ISO 8601)
last_indexed_at_unix: Unix timestamp of the last indexed block
"""
indexed_height: Height
tip_height: Height
blocks_behind: Height
last_indexed_at: str
last_indexed_at_unix: Timestamp
class TimePeriodParam(TypedDict):
time_period: TimePeriod
@@ -2537,14 +2577,14 @@ class PeriodAveragePricePattern(Generic[T]):
self._6y: MetricPattern4[T] = MetricPattern4(client, (f'6y_{acc}' if acc else '6y'))
self._8y: MetricPattern4[T] = MetricPattern4(client, (f'8y_{acc}' if acc else '8y'))
class FullnessPattern(Generic[T]):
class DollarsPattern(Generic[T]):
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.average: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'average'))
self.base: MetricPattern11[T] = MetricPattern11(client, acc)
self.cumulative: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'cumulative'))
self.cumulative: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'cumulative'))
self.max: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'max'))
self.median: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'median'))
self.min: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'min'))
@@ -2570,14 +2610,14 @@ class ClassAveragePricePattern(Generic[T]):
self._2024: MetricPattern4[T] = MetricPattern4(client, f'{base_path}_2024')
self._2025: MetricPattern4[T] = MetricPattern4(client, f'{base_path}_2025')
class DollarsPattern(Generic[T]):
class FullnessPattern(Generic[T]):
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.average: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'average'))
self.base: MetricPattern11[T] = MetricPattern11(client, acc)
self.cumulative: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'cumulative'))
self.cumulative: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'cumulative'))
self.max: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'max'))
self.median: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'median'))
self.min: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'min'))
@@ -2587,22 +2627,6 @@ class DollarsPattern(Generic[T]):
self.pct90: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'pct90'))
self.sum: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'sum'))
class RelativePattern2:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.neg_unrealized_loss_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'neg_unrealized_loss_rel_to_own_market_cap'))
self.neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'neg_unrealized_loss_rel_to_own_total_unrealized_pnl'))
self.net_unrealized_pnl_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'net_unrealized_pnl_rel_to_own_market_cap'))
self.net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'net_unrealized_pnl_rel_to_own_total_unrealized_pnl'))
self.supply_in_loss_rel_to_own_supply: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'supply_in_loss_rel_to_own_supply'))
self.supply_in_profit_rel_to_own_supply: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'supply_in_profit_rel_to_own_supply'))
self.unrealized_loss_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_loss_rel_to_own_market_cap'))
self.unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_loss_rel_to_own_total_unrealized_pnl'))
self.unrealized_profit_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_profit_rel_to_own_market_cap'))
self.unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_profit_rel_to_own_total_unrealized_pnl'))
class RelativePattern:
"""Pattern struct for repeated tree structure."""
@@ -2619,6 +2643,22 @@ class RelativePattern:
self.unrealized_loss_rel_to_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_loss_rel_to_market_cap'))
self.unrealized_profit_rel_to_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_profit_rel_to_market_cap'))
class RelativePattern2:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.neg_unrealized_loss_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'neg_unrealized_loss_rel_to_own_market_cap'))
self.neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'neg_unrealized_loss_rel_to_own_total_unrealized_pnl'))
self.net_unrealized_pnl_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'net_unrealized_pnl_rel_to_own_market_cap'))
self.net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'net_unrealized_pnl_rel_to_own_total_unrealized_pnl'))
self.supply_in_loss_rel_to_own_supply: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'supply_in_loss_rel_to_own_supply'))
self.supply_in_profit_rel_to_own_supply: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'supply_in_profit_rel_to_own_supply'))
self.unrealized_loss_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_loss_rel_to_own_market_cap'))
self.unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_loss_rel_to_own_total_unrealized_pnl'))
self.unrealized_profit_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_profit_rel_to_own_market_cap'))
self.unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_profit_rel_to_own_total_unrealized_pnl'))
class CountPattern2(Generic[T]):
"""Pattern struct for repeated tree structure."""
@@ -2678,20 +2718,7 @@ class _0satsPattern:
self.supply: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply'))
self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc)
class _10yTo12yPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.activity: ActivityPattern2 = ActivityPattern2(client, acc)
self.cost_basis: CostBasisPattern2 = CostBasisPattern2(client, acc)
self.outputs: OutputsPattern = OutputsPattern(client, acc)
self.realized: RealizedPattern2 = RealizedPattern2(client, acc)
self.relative: RelativePattern2 = RelativePattern2(client, acc)
self.supply: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply'))
self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc)
class _10yPattern:
class _0satsPattern2:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
@@ -2699,8 +2726,8 @@ class _10yPattern:
self.activity: ActivityPattern2 = ActivityPattern2(client, acc)
self.cost_basis: CostBasisPattern = CostBasisPattern(client, acc)
self.outputs: OutputsPattern = OutputsPattern(client, acc)
self.realized: RealizedPattern4 = RealizedPattern4(client, acc)
self.relative: RelativePattern = RelativePattern(client, acc)
self.realized: RealizedPattern = RealizedPattern(client, acc)
self.relative: RelativePattern4 = RelativePattern4(client, _m(acc, 'supply_in'))
self.supply: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply'))
self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc)
@@ -2717,16 +2744,29 @@ class UnrealizedPattern:
self.unrealized_loss: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'unrealized_loss'))
self.unrealized_profit: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'unrealized_profit'))
class _0satsPattern2:
class PeriodCagrPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self._10y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'10y_{acc}' if acc else '10y'))
self._2y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'2y_{acc}' if acc else '2y'))
self._3y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'3y_{acc}' if acc else '3y'))
self._4y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'4y_{acc}' if acc else '4y'))
self._5y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'5y_{acc}' if acc else '5y'))
self._6y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'6y_{acc}' if acc else '6y'))
self._8y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'8y_{acc}' if acc else '8y'))
class _10yTo12yPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.activity: ActivityPattern2 = ActivityPattern2(client, acc)
self.cost_basis: CostBasisPattern = CostBasisPattern(client, acc)
self.cost_basis: CostBasisPattern2 = CostBasisPattern2(client, acc)
self.outputs: OutputsPattern = OutputsPattern(client, acc)
self.realized: RealizedPattern = RealizedPattern(client, acc)
self.relative: RelativePattern4 = RelativePattern4(client, _m(acc, 'supply_in'))
self.realized: RealizedPattern2 = RealizedPattern2(client, acc)
self.relative: RelativePattern2 = RelativePattern2(client, acc)
self.supply: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply'))
self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc)
@@ -2743,18 +2783,18 @@ class _100btcPattern:
self.supply: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply'))
self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc)
class PeriodCagrPattern:
class _10yPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self._10y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'10y_{acc}' if acc else '10y'))
self._2y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'2y_{acc}' if acc else '2y'))
self._3y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'3y_{acc}' if acc else '3y'))
self._4y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'4y_{acc}' if acc else '4y'))
self._5y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'5y_{acc}' if acc else '5y'))
self._6y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'6y_{acc}' if acc else '6y'))
self._8y: MetricPattern4[StoredF32] = MetricPattern4(client, (f'8y_{acc}' if acc else '8y'))
self.activity: ActivityPattern2 = ActivityPattern2(client, acc)
self.cost_basis: CostBasisPattern = CostBasisPattern(client, acc)
self.outputs: OutputsPattern = OutputsPattern(client, acc)
self.realized: RealizedPattern4 = RealizedPattern4(client, acc)
self.relative: RelativePattern = RelativePattern(client, acc)
self.supply: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply'))
self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc)
class ActivityPattern2:
"""Pattern struct for repeated tree structure."""
@@ -2777,15 +2817,6 @@ class SplitPattern2(Generic[T]):
self.low: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'low'))
self.open: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'open'))
class UnclaimedRewardsPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.bitcoin: BitcoinPattern[Bitcoin] = BitcoinPattern(client, _m(acc, 'btc'))
self.dollars: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'usd'))
self.sats: BlockCountPattern[Sats] = BlockCountPattern(client, acc)
class ActiveSupplyPattern:
"""Pattern struct for repeated tree structure."""
@@ -2795,32 +2826,13 @@ class ActiveSupplyPattern:
self.dollars: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'usd'))
self.sats: MetricPattern1[Sats] = MetricPattern1(client, acc)
class CoinbasePattern:
class CostBasisPattern2:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.bitcoin: FullnessPattern[Bitcoin] = FullnessPattern(client, _m(acc, 'btc'))
self.dollars: DollarsPattern[Dollars] = DollarsPattern(client, _m(acc, 'usd'))
self.sats: DollarsPattern[Sats] = DollarsPattern(client, acc)
class SegwitAdoptionPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.base: MetricPattern11[StoredF32] = MetricPattern11(client, acc)
self.cumulative: MetricPattern2[StoredF32] = MetricPattern2(client, _m(acc, 'cumulative'))
self.sum: MetricPattern2[StoredF32] = MetricPattern2(client, _m(acc, 'sum'))
class _2015Pattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.bitcoin: MetricPattern4[Bitcoin] = MetricPattern4(client, _m(acc, 'btc'))
self.dollars: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'usd'))
self.sats: MetricPattern4[Sats] = MetricPattern4(client, acc)
def __init__(self, client: BrkClientBase, base_path: str):
self.max: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_max')
self.min: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_min')
self.percentiles: PercentilesPattern = PercentilesPattern(client, f'{base_path}_percentiles')
class CoinbasePattern2:
"""Pattern struct for repeated tree structure."""
@@ -2831,13 +2843,41 @@ class CoinbasePattern2:
self.dollars: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'usd'))
self.sats: BlockCountPattern[Sats] = BlockCountPattern(client, acc)
class CostBasisPattern2:
class UnclaimedRewardsPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, base_path: str):
self.max: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_max')
self.min: MetricPattern1[Dollars] = MetricPattern1(client, f'{base_path}_min')
self.percentiles: PercentilesPattern = PercentilesPattern(client, f'{base_path}_percentiles')
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.bitcoin: BitcoinPattern[Bitcoin] = BitcoinPattern(client, _m(acc, 'btc'))
self.dollars: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'usd'))
self.sats: BlockCountPattern[Sats] = BlockCountPattern(client, acc)
class CoinbasePattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.bitcoin: FullnessPattern[Bitcoin] = FullnessPattern(client, _m(acc, 'btc'))
self.dollars: DollarsPattern[Dollars] = DollarsPattern(client, _m(acc, 'usd'))
self.sats: DollarsPattern[Sats] = DollarsPattern(client, acc)
class _2015Pattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.bitcoin: MetricPattern4[Bitcoin] = MetricPattern4(client, _m(acc, 'btc'))
self.dollars: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'usd'))
self.sats: MetricPattern4[Sats] = MetricPattern4(client, acc)
class SegwitAdoptionPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.base: MetricPattern11[StoredF32] = MetricPattern11(client, acc)
self.cumulative: MetricPattern2[StoredF32] = MetricPattern2(client, _m(acc, 'cumulative'))
self.sum: MetricPattern2[StoredF32] = MetricPattern2(client, _m(acc, 'sum'))
class CostBasisPattern:
"""Pattern struct for repeated tree structure."""
@@ -2879,6 +2919,13 @@ class BitcoinPattern(Generic[T]):
self.cumulative: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'cumulative'))
self.sum: MetricPattern1[T] = MetricPattern1(client, acc)
class SatsPattern(Generic[T]):
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, base_path: str):
self.ohlc: MetricPattern1[T] = MetricPattern1(client, f'{base_path}_ohlc')
self.split: SplitPattern2[Any] = SplitPattern2(client, f'{base_path}_split')
class BlockCountPattern(Generic[T]):
"""Pattern struct for repeated tree structure."""
@@ -2887,12 +2934,12 @@ class BlockCountPattern(Generic[T]):
self.cumulative: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'cumulative'))
self.sum: MetricPattern1[T] = MetricPattern1(client, acc)
class SatsPattern(Generic[T]):
class OutputsPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, base_path: str):
self.ohlc: MetricPattern1[T] = MetricPattern1(client, f'{base_path}_ohlc')
self.split: SplitPattern2[Any] = SplitPattern2(client, f'{base_path}_split')
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.utxo_count: MetricPattern1[StoredU64] = MetricPattern1(client, _m(acc, 'utxo_count'))
class RealizedPriceExtraPattern:
"""Pattern struct for repeated tree structure."""
@@ -2901,13 +2948,6 @@ class RealizedPriceExtraPattern:
"""Create pattern node with accumulated metric name."""
self.ratio: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'ratio'))
class OutputsPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.utxo_count: MetricPattern1[StoredU64] = MetricPattern1(client, _m(acc, 'utxo_count'))
# Metrics tree classes
class MetricsTree_Addresses:
@@ -5600,6 +5640,22 @@ class BrkClient(BrkClientBase):
path = f'/api/metrics/search/{metric}{"?" + query if query else ""}'
return self.get_json(path)
def get_disk_usage(self) -> DiskUsage:
"""Disk usage.
Returns the disk space used by the indexed data.
Endpoint: `GET /api/server/disk`"""
return self.get_json('/api/server/disk')
def get_sync_status(self) -> SyncStatus:
"""Sync status.
Returns the sync status of the indexer, including indexed height, tip height, blocks behind, and last indexed timestamp.
Endpoint: `GET /api/server/sync`"""
return self.get_json('/api/server/sync')
def get_tx(self, txid: Txid) -> Transaction:
"""Transaction information.
@@ -5823,7 +5879,7 @@ class BrkClient(BrkClientBase):
def get_health(self) -> Health:
"""Health check.
Returns the health status of the API server
Returns the health status of the API server, including uptime information.
Endpoint: `GET /health`"""
return self.get_json('/health')

View File

@@ -1563,7 +1563,7 @@
<link rel="modulepreload" href="/scripts/entry.7b7383d1.js">
<link rel="modulepreload" href="/scripts/lazy.1ae52534.js">
<link rel="modulepreload" href="/scripts/main.22a5bd79.js">
<link rel="modulepreload" href="/scripts/modules/brk-client/index.c18ba682.js">
<link rel="modulepreload" href="/scripts/modules/brk-client/index.74c13abc.js">
<link rel="modulepreload" href="/scripts/modules/brk-client/tests/basic.b92ff866.js">
<link rel="modulepreload" href="/scripts/modules/brk-client/tests/tree.ba9474f7.js">
<link rel="modulepreload" href="/scripts/modules/lean-qr/2.6.1/index.09195c13.mjs">
@@ -1634,7 +1634,7 @@
"/scripts/entry.js": "/scripts/entry.7b7383d1.js",
"/scripts/lazy.js": "/scripts/lazy.1ae52534.js",
"/scripts/main.js": "/scripts/main.22a5bd79.js",
"/scripts/modules/brk-client/index.js": "/scripts/modules/brk-client/index.c18ba682.js",
"/scripts/modules/brk-client/index.js": "/scripts/modules/brk-client/index.74c13abc.js",
"/scripts/modules/brk-client/tests/basic.js": "/scripts/modules/brk-client/tests/basic.b92ff866.js",
"/scripts/modules/brk-client/tests/tree.js": "/scripts/modules/brk-client/tests/tree.ba9474f7.js",
"/scripts/modules/lean-qr/2.6.1/index.mjs": "/scripts/modules/lean-qr/2.6.1/index.09195c13.mjs",

View File

@@ -3,8 +3,14 @@ const ROOT = "/";
const API = "/api";
const BYPASS = new Set([
"/changelog", "/crate", "/discord", "/github", "/health",
"/install", "/mcp", "/nostr", "/service", "/status", "/version"
"/changelog",
"/crate",
"/discord",
"/github",
"/install",
"/nostr",
"/service",
"/status",
]);
// Match hashed filenames: name.abc12345.js/mjs/css
@@ -13,16 +19,18 @@ const HASHED_RE = /\.[0-9a-f]{8}\.(js|mjs|css)$/;
/** @type {ServiceWorkerGlobalScope} */
const sw = /** @type {any} */ (self);
const offline = () => new Response("Offline", {
status: 503,
headers: { "Content-Type": "text/plain" }
});
const offline = () =>
new Response("Offline", {
status: 503,
headers: { "Content-Type": "text/plain" },
});
sw.addEventListener("install", (e) => {
e.waitUntil(
caches.open(CACHE)
caches
.open(CACHE)
.then((c) => c.addAll([ROOT]))
.then(() => sw.skipWaiting())
.then(() => sw.skipWaiting()),
);
});
@@ -30,10 +38,14 @@ sw.addEventListener("activate", (e) => {
e.waitUntil(
Promise.all([
sw.clients.claim(),
caches.keys().then((keys) =>
Promise.all(keys.filter((k) => k !== CACHE).map((k) => caches.delete(k)))
),
])
caches
.keys()
.then((keys) =>
Promise.all(
keys.filter((k) => k !== CACHE).map((k) => caches.delete(k)),
),
),
]),
);
});
@@ -57,7 +69,7 @@ sw.addEventListener("fetch", (event) => {
if (res.ok) caches.open(CACHE).then((c) => c.put(ROOT, res.clone()));
return res;
})
.catch(() => caches.match(ROOT).then((c) => c || offline()))
.catch(() => caches.match(ROOT).then((c) => c || offline())),
);
return;
}
@@ -65,15 +77,18 @@ sw.addEventListener("fetch", (event) => {
// Hashed assets: cache-first (immutable)
if (HASHED_RE.test(path)) {
event.respondWith(
caches.match(req)
.then((cached) =>
cached ||
fetch(req).then((res) => {
if (res.ok) caches.open(CACHE).then((c) => c.put(req, res.clone()));
return res;
})
caches
.match(req)
.then(
(cached) =>
cached ||
fetch(req).then((res) => {
if (res.ok)
caches.open(CACHE).then((c) => c.put(req, res.clone()));
return res;
}),
)
.catch(() => offline())
.catch(() => offline()),
);
return;
}
@@ -88,9 +103,15 @@ sw.addEventListener("fetch", (event) => {
return res;
})
.catch(() =>
caches.match(req).then((cached) =>
cached || (isStatic ? offline() : caches.match(ROOT).then((c) => c || offline()))
)
)
caches
.match(req)
.then(
(cached) =>
cached ||
(isStatic
? offline()
: caches.match(ROOT).then((c) => c || offline())),
),
),
);
});