diff --git a/.cargo/config.toml b/.cargo/config.toml index d627f84c7..ca68a34df 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -13,3 +13,6 @@ rustflags = ["-C", "target-cpu=native", "-C", "target-feature=+bmi1,+bmi2,+avx2" [target.x86_64-pc-windows-gnu] rustflags = ["-C", "target-cpu=native", "-C", "target-feature=+bmi1,+bmi2,+avx2"] + +[alias] +dev = "run -p brk_cli --features brk_server/bindgen" diff --git a/.gitignore b/.gitignore index 76cffb2ea..d11c88af2 100644 --- a/.gitignore +++ b/.gitignore @@ -22,6 +22,7 @@ _* /filter_* /heatmaps* /oracle* +/playground # Logs *.log* diff --git a/Cargo.lock b/Cargo.lock index f1b54d4de..853c4f974 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -54,7 +54,7 @@ dependencies = [ "serde", "serde_json", "serde_qs", - "thiserror 2.0.18", + "thiserror", "tower-layer", "tower-service", "tracing", @@ -463,7 +463,6 @@ dependencies = [ "color-eyre", "derive_more", "pco", - "plotters", "rayon", "rustc-hash", "schemars", @@ -483,7 +482,7 @@ dependencies = [ "jiff", "minreq", "serde_json", - "thiserror 2.0.18", + "thiserror", "tokio", "vecdb", ] @@ -540,10 +539,8 @@ name = "brk_logger" version = "0.1.0-alpha.6" dependencies = [ "jiff", - "logroller", "owo-colors", "tracing", - "tracing-appender", "tracing-log", "tracing-subscriber", ] @@ -1071,15 +1068,6 @@ dependencies = [ "parking_lot_core", ] -[[package]] -name = "deranged" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ececcb659e7ba858fb4f10388c250a7252eb0a27373f1a72b8748afdd248e587" -dependencies = [ - "powerfmt", -] - [[package]] name = "derive_more" version = "2.1.1" @@ -1981,18 +1969,6 @@ version = "0.4.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" -[[package]] -name = "logroller" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83db12bbf439ebe64c0b0e4402f435b6f866db498fc1ae17e1b5d1a01625e2be" -dependencies = [ - "chrono", - "flate2", - "regex", - "thiserror 1.0.69", -] - [[package]] name = "lsm-tree" version = "3.0.1" @@ -2137,12 +2113,6 @@ dependencies = [ "minimal-lexical", ] -[[package]] -name = "num-conv" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" - [[package]] name = "num-traits" version = "0.2.19" @@ -2372,12 +2342,6 @@ dependencies = [ "zerovec", ] -[[package]] -name = "powerfmt" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" - [[package]] name = "ppv-lite86" version = "0.2.21" @@ -2492,7 +2456,7 @@ dependencies = [ "parking_lot", "rayon", "smallvec", - "thiserror 2.0.18", + "thiserror", ] [[package]] @@ -2532,7 +2496,7 @@ checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac" dependencies = [ "getrandom 0.2.17", "libredox", - "thiserror 2.0.18", + "thiserror", ] [[package]] @@ -2845,7 +2809,7 @@ dependencies = [ "futures", "percent-encoding", "serde", - "thiserror 2.0.18", + "thiserror", ] [[package]] @@ -3000,33 +2964,13 @@ dependencies = [ "windows-sys 0.61.2", ] -[[package]] -name = "thiserror" -version = "1.0.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" -dependencies = [ - "thiserror-impl 1.0.69", -] - [[package]] name = "thiserror" version = "2.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4" dependencies = [ - "thiserror-impl 2.0.18", -] - -[[package]] -name = "thiserror-impl" -version = "1.0.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" -dependencies = [ - "proc-macro2", - "quote", - "syn", + "thiserror-impl", ] [[package]] @@ -3049,37 +2993,6 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "time" -version = "0.3.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9e442fc33d7fdb45aa9bfeb312c095964abdf596f7567261062b2a7107aaabd" -dependencies = [ - "deranged", - "itoa", - "num-conv", - "powerfmt", - "serde_core", - "time-core", - "time-macros", -] - -[[package]] -name = "time-core" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b36ee98fd31ec7426d599183e8fe26932a8dc1fb76ddb6214d05493377d34ca" - -[[package]] -name = "time-macros" -version = "0.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71e552d1249bf61ac2a52db88179fd0673def1e1ad8243a00d9ec9ed71fee3dd" -dependencies = [ - "num-conv", - "time-core", -] - [[package]] name = "tinystr" version = "0.8.2" @@ -3227,18 +3140,6 @@ dependencies = [ "tracing-core", ] -[[package]] -name = "tracing-appender" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "786d480bce6247ab75f005b14ae1624ad978d3029d9113f0a22fa1ac773faeaf" -dependencies = [ - "crossbeam-channel", - "thiserror 2.0.18", - "time", - "tracing-subscriber", -] - [[package]] name = "tracing-attributes" version = "0.1.31" @@ -3378,7 +3279,7 @@ dependencies = [ "schemars", "serde", "serde_json", - "thiserror 2.0.18", + "thiserror", "vecdb_derive", "zerocopy", "zstd", diff --git a/Cargo.toml b/Cargo.toml index b58260bfe..1d338c33e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -82,6 +82,7 @@ tracing = { version = "0.1", default-features = false, features = ["std"] } tower-http = { version = "0.6.8", features = ["catch-panic", "compression-br", "compression-gzip", "compression-zstd", "cors", "normalize-path", "timeout", "trace"] } tower-layer = "0.3" vecdb = { version = "0.6.1", features = ["derive", "serde_json", "pco", "schemars"] } +# vecdb = { path = "../anydb/crates/vecdb", features = ["derive", "serde_json", "pco", "schemars"] } [workspace.metadata.release] shared-version = true diff --git a/crates/brk_bindgen/src/generators/javascript/client.rs b/crates/brk_bindgen/src/generators/javascript/client.rs index 5333ba362..e0b00918c 100644 --- a/crates/brk_bindgen/src/generators/javascript/client.rs +++ b/crates/brk_bindgen/src/generators/javascript/client.rs @@ -51,9 +51,11 @@ class BrkError extends Error {{ /** * @template T * @typedef {{Object}} MetricData + * @property {{number}} version - Version of the metric data * @property {{number}} total - Total number of data points * @property {{number}} start - Start index (inclusive) * @property {{number}} end - End index (exclusive) + * @property {{string}} stamp - ISO 8601 timestamp of when the response was generated * @property {{T[]}} data - The metric data */ /** @typedef {{MetricData}} AnyMetricData */ diff --git a/crates/brk_bindgen/src/generators/python/client.rs b/crates/brk_bindgen/src/generators/python/client.rs index 81b658370..30352c162 100644 --- a/crates/brk_bindgen/src/generators/python/client.rs +++ b/crates/brk_bindgen/src/generators/python/client.rs @@ -133,9 +133,11 @@ pub fn generate_endpoint_class(output: &mut String) { output, r#"class MetricData(TypedDict, Generic[T]): """Metric data with range information.""" + version: int total: int start: int end: int + stamp: str data: List[T] diff --git a/crates/brk_cli/README.md b/crates/brk_cli/README.md index a9fd9f453..6d5d289f6 100644 --- a/crates/brk_cli/README.md +++ b/crates/brk_cli/README.md @@ -37,6 +37,8 @@ brk Indexes the blockchain, computes datasets, starts the server on `localhost:3110`, and waits for new blocks. +**Note:** When more than 10,000 blocks behind, indexing completes before the server starts to free up memory from fragmentation that occurs during large syncs. The web interface at `localhost:3110` won't be available until sync finishes. + ## Options ```bash diff --git a/crates/brk_cli/src/main.rs b/crates/brk_cli/src/main.rs index 5d87f754b..f21d13684 100644 --- a/crates/brk_cli/src/main.rs +++ b/crates/brk_cli/src/main.rs @@ -51,6 +51,22 @@ pub fn run() -> anyhow::Result<()> { let mut indexer = Indexer::forced_import(&config.brkdir())?; + #[cfg(not(debug_assertions))] + { + // Pre-run indexer if too far behind, then drop and reimport to reduce memory + let chain_height = client.get_last_height()?; + let indexed_height = indexer.vecs.starting_height(); + let blocks_behind = chain_height.saturating_sub(*indexed_height); + if blocks_behind > 10_000 { + info!("Indexing {blocks_behind} blocks before starting server..."); + sleep(Duration::from_secs(3)); + indexer.index(&blocks, &client, &exit)?; + drop(indexer); + Mimalloc::collect(); + indexer = Indexer::forced_import(&config.brkdir())?; + } + } + let mut computer = Computer::forced_import(&config.brkdir(), &indexer, config.fetcher())?; let mempool = Mempool::new(&client); diff --git a/crates/brk_client/src/lib.rs b/crates/brk_client/src/lib.rs index b8600309e..85bcfb169 100644 --- a/crates/brk_client/src/lib.rs +++ b/crates/brk_client/src/lib.rs @@ -7,12 +7,11 @@ #![allow(clippy::useless_format)] #![allow(clippy::unnecessary_to_owned)] -use std::sync::Arc; -use std::ops::{Bound, RangeBounds}; -use serde::de::DeserializeOwned; pub use brk_cohort::*; pub use brk_types::*; - +use serde::de::DeserializeOwned; +use std::ops::{Bound, RangeBounds}; +use std::sync::Arc; /// Error type for BRK client operations. #[derive(Debug)] @@ -77,7 +76,9 @@ impl BrkClientBase { let response = minreq::get(&url) .with_timeout(self.timeout_secs) .send() - .map_err(|e| BrkError { message: e.to_string() })?; + .map_err(|e| BrkError { + message: e.to_string(), + })?; if response.status_code >= 400 { return Err(BrkError { @@ -90,9 +91,9 @@ impl BrkClientBase { /// Make a GET request and deserialize JSON response. pub fn get_json(&self, path: &str) -> Result { - self.get(path)? - .json() - .map_err(|e| BrkError { message: e.to_string() }) + self.get(path)?.json().map_err(|e| BrkError { + message: e.to_string(), + }) } /// Make a GET request and return raw text response. @@ -100,25 +101,34 @@ impl BrkClientBase { self.get(path)? .as_str() .map(|s| s.to_string()) - .map_err(|e| BrkError { message: e.to_string() }) + .map_err(|e| BrkError { + message: e.to_string(), + }) } } /// Build metric name with suffix. #[inline] fn _m(acc: &str, s: &str) -> String { - if s.is_empty() { acc.to_string() } - else if acc.is_empty() { s.to_string() } - else { format!("{acc}_{s}") } + if s.is_empty() { + acc.to_string() + } else if acc.is_empty() { + s.to_string() + } else { + format!("{acc}_{s}") + } } /// Build metric name with prefix. #[inline] fn _p(prefix: &str, acc: &str) -> String { - if acc.is_empty() { prefix.to_string() } else { format!("{prefix}_{acc}") } + if acc.is_empty() { + prefix.to_string() + } else { + format!("{prefix}_{acc}") + } } - /// Non-generic trait for metric patterns (usable in collections). pub trait AnyMetricPattern { /// Get the metric name. @@ -134,7 +144,6 @@ pub trait MetricPattern: AnyMetricPattern { fn get(&self, index: Index) -> Option>; } - /// Shared endpoint configuration. #[derive(Clone)] struct EndpointConfig { @@ -147,7 +156,13 @@ struct EndpointConfig { impl EndpointConfig { fn new(client: Arc, name: Arc, index: Index) -> Self { - Self { client, name, index, start: None, end: None } + Self { + client, + name, + index, + start: None, + end: None, + } } fn path(&self) -> String { @@ -156,11 +171,21 @@ impl EndpointConfig { fn build_path(&self, format: Option<&str>) -> String { let mut params = Vec::new(); - if let Some(s) = self.start { params.push(format!("start={}", s)); } - if let Some(e) = self.end { params.push(format!("end={}", e)); } - if let Some(fmt) = format { params.push(format!("format={}", fmt)); } + if let Some(s) = self.start { + params.push(format!("start={}", s)); + } + if let Some(e) = self.end { + params.push(format!("end={}", e)); + } + if let Some(fmt) = format { + params.push(format!("format={}", fmt)); + } let p = self.path(); - if params.is_empty() { p } else { format!("{}?{}", p, params.join("&")) } + if params.is_empty() { + p + } else { + format!("{}?{}", p, params.join("&")) + } } fn get_json(&self, format: Option<&str>) -> Result { @@ -206,14 +231,20 @@ pub struct MetricEndpointBuilder { impl MetricEndpointBuilder { pub fn new(client: Arc, name: Arc, index: Index) -> Self { - Self { config: EndpointConfig::new(client, name, index), _marker: std::marker::PhantomData } + Self { + config: EndpointConfig::new(client, name, index), + _marker: std::marker::PhantomData, + } } /// Select a specific index position. pub fn get(mut self, index: usize) -> SingleItemBuilder { self.config.start = Some(index as i64); self.config.end = Some(index as i64 + 1); - SingleItemBuilder { config: self.config, _marker: std::marker::PhantomData } + SingleItemBuilder { + config: self.config, + _marker: std::marker::PhantomData, + } } /// Select a range using Rust range syntax. @@ -235,7 +266,10 @@ impl MetricEndpointBuilder { Bound::Excluded(&n) => Some(n as i64), Bound::Unbounded => None, }; - RangeBuilder { config: self.config, _marker: std::marker::PhantomData } + RangeBuilder { + config: self.config, + _marker: std::marker::PhantomData, + } } /// Take the first n items. @@ -250,13 +284,19 @@ impl MetricEndpointBuilder { } else { self.config.start = Some(-(n as i64)); } - RangeBuilder { config: self.config, _marker: std::marker::PhantomData } + RangeBuilder { + config: self.config, + _marker: std::marker::PhantomData, + } } /// Skip the first n items. Chain with `take(n)` to get a range. pub fn skip(mut self, n: usize) -> SkippedBuilder { self.config.start = Some(n as i64); - SkippedBuilder { config: self.config, _marker: std::marker::PhantomData } + SkippedBuilder { + config: self.config, + _marker: std::marker::PhantomData, + } } /// Fetch all data as parsed JSON. @@ -304,7 +344,10 @@ impl SkippedBuilder { pub fn take(mut self, n: usize) -> RangeBuilder { let start = self.config.start.unwrap_or(0); self.config.end = Some(start + n as i64); - RangeBuilder { config: self.config, _marker: std::marker::PhantomData } + RangeBuilder { + config: self.config, + _marker: std::marker::PhantomData, + } } /// Fetch from the skipped position to the end. @@ -336,12 +379,47 @@ impl RangeBuilder { } } - // Static index arrays -const _I1: &[Index] = &[Index::DateIndex, Index::DecadeIndex, Index::DifficultyEpoch, Index::Height, Index::MonthIndex, Index::QuarterIndex, Index::SemesterIndex, Index::WeekIndex, Index::YearIndex]; -const _I2: &[Index] = &[Index::DateIndex, Index::DecadeIndex, Index::DifficultyEpoch, Index::MonthIndex, Index::QuarterIndex, Index::SemesterIndex, Index::WeekIndex, Index::YearIndex]; -const _I3: &[Index] = &[Index::DateIndex, Index::DecadeIndex, Index::Height, Index::MonthIndex, Index::QuarterIndex, Index::SemesterIndex, Index::WeekIndex, Index::YearIndex]; -const _I4: &[Index] = &[Index::DateIndex, Index::DecadeIndex, Index::MonthIndex, Index::QuarterIndex, Index::SemesterIndex, Index::WeekIndex, Index::YearIndex]; +const _I1: &[Index] = &[ + Index::DateIndex, + Index::DecadeIndex, + Index::DifficultyEpoch, + Index::Height, + Index::MonthIndex, + Index::QuarterIndex, + Index::SemesterIndex, + Index::WeekIndex, + Index::YearIndex, +]; +const _I2: &[Index] = &[ + Index::DateIndex, + Index::DecadeIndex, + Index::DifficultyEpoch, + Index::MonthIndex, + Index::QuarterIndex, + Index::SemesterIndex, + Index::WeekIndex, + Index::YearIndex, +]; +const _I3: &[Index] = &[ + Index::DateIndex, + Index::DecadeIndex, + Index::Height, + Index::MonthIndex, + Index::QuarterIndex, + Index::SemesterIndex, + Index::WeekIndex, + Index::YearIndex, +]; +const _I4: &[Index] = &[ + Index::DateIndex, + Index::DecadeIndex, + Index::MonthIndex, + Index::QuarterIndex, + Index::SemesterIndex, + Index::WeekIndex, + Index::YearIndex, +]; const _I5: &[Index] = &[Index::DateIndex, Index::Height]; const _I6: &[Index] = &[Index::DateIndex]; const _I7: &[Index] = &[Index::DecadeIndex]; @@ -373,502 +451,1653 @@ const _I32: &[Index] = &[Index::EmptyAddressIndex]; const _I33: &[Index] = &[Index::PairOutputIndex]; #[inline] -fn _ep(c: &Arc, n: &Arc, i: Index) -> MetricEndpointBuilder { +fn _ep( + c: &Arc, + n: &Arc, + i: Index, +) -> MetricEndpointBuilder { MetricEndpointBuilder::new(c.clone(), n.clone(), i) } // Index accessor structs -pub struct MetricPattern1By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern1By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern1By { - pub fn dateindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DateIndex) } - pub fn decadeindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DecadeIndex) } - pub fn difficultyepoch(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DifficultyEpoch) } - pub fn height(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Height) } - pub fn monthindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::MonthIndex) } - pub fn quarterindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::QuarterIndex) } - pub fn semesterindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::SemesterIndex) } - pub fn weekindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::WeekIndex) } - pub fn yearindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::YearIndex) } + pub fn dateindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::DateIndex) + } + pub fn decadeindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::DecadeIndex) + } + pub fn difficultyepoch(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::DifficultyEpoch) + } + pub fn height(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::Height) + } + pub fn monthindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::MonthIndex) + } + pub fn quarterindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::QuarterIndex) + } + pub fn semesterindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::SemesterIndex) + } + pub fn weekindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::WeekIndex) + } + pub fn yearindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::YearIndex) + } } -pub struct MetricPattern1 { name: Arc, pub by: MetricPattern1By } +pub struct MetricPattern1 { + name: Arc, + pub by: MetricPattern1By, +} impl MetricPattern1 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern1By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern1By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern1 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I1 } } -impl MetricPattern for MetricPattern1 { fn get(&self, index: Index) -> Option> { _I1.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern1 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I1 + } +} +impl MetricPattern for MetricPattern1 { + fn get(&self, index: Index) -> Option> { + _I1.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern2By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern2By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern2By { - pub fn dateindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DateIndex) } - pub fn decadeindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DecadeIndex) } - pub fn difficultyepoch(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DifficultyEpoch) } - pub fn monthindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::MonthIndex) } - pub fn quarterindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::QuarterIndex) } - pub fn semesterindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::SemesterIndex) } - pub fn weekindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::WeekIndex) } - pub fn yearindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::YearIndex) } + pub fn dateindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::DateIndex) + } + pub fn decadeindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::DecadeIndex) + } + pub fn difficultyepoch(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::DifficultyEpoch) + } + pub fn monthindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::MonthIndex) + } + pub fn quarterindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::QuarterIndex) + } + pub fn semesterindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::SemesterIndex) + } + pub fn weekindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::WeekIndex) + } + pub fn yearindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::YearIndex) + } } -pub struct MetricPattern2 { name: Arc, pub by: MetricPattern2By } +pub struct MetricPattern2 { + name: Arc, + pub by: MetricPattern2By, +} impl MetricPattern2 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern2By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern2By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern2 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I2 } } -impl MetricPattern for MetricPattern2 { fn get(&self, index: Index) -> Option> { _I2.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern2 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I2 + } +} +impl MetricPattern for MetricPattern2 { + fn get(&self, index: Index) -> Option> { + _I2.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern3By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern3By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern3By { - pub fn dateindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DateIndex) } - pub fn decadeindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DecadeIndex) } - pub fn height(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Height) } - pub fn monthindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::MonthIndex) } - pub fn quarterindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::QuarterIndex) } - pub fn semesterindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::SemesterIndex) } - pub fn weekindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::WeekIndex) } - pub fn yearindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::YearIndex) } + pub fn dateindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::DateIndex) + } + pub fn decadeindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::DecadeIndex) + } + pub fn height(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::Height) + } + pub fn monthindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::MonthIndex) + } + pub fn quarterindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::QuarterIndex) + } + pub fn semesterindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::SemesterIndex) + } + pub fn weekindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::WeekIndex) + } + pub fn yearindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::YearIndex) + } } -pub struct MetricPattern3 { name: Arc, pub by: MetricPattern3By } +pub struct MetricPattern3 { + name: Arc, + pub by: MetricPattern3By, +} impl MetricPattern3 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern3By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern3By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern3 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I3 } } -impl MetricPattern for MetricPattern3 { fn get(&self, index: Index) -> Option> { _I3.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern3 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I3 + } +} +impl MetricPattern for MetricPattern3 { + fn get(&self, index: Index) -> Option> { + _I3.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern4By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern4By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern4By { - pub fn dateindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DateIndex) } - pub fn decadeindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DecadeIndex) } - pub fn monthindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::MonthIndex) } - pub fn quarterindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::QuarterIndex) } - pub fn semesterindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::SemesterIndex) } - pub fn weekindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::WeekIndex) } - pub fn yearindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::YearIndex) } + pub fn dateindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::DateIndex) + } + pub fn decadeindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::DecadeIndex) + } + pub fn monthindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::MonthIndex) + } + pub fn quarterindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::QuarterIndex) + } + pub fn semesterindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::SemesterIndex) + } + pub fn weekindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::WeekIndex) + } + pub fn yearindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::YearIndex) + } } -pub struct MetricPattern4 { name: Arc, pub by: MetricPattern4By } +pub struct MetricPattern4 { + name: Arc, + pub by: MetricPattern4By, +} impl MetricPattern4 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern4By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern4By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern4 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I4 } } -impl MetricPattern for MetricPattern4 { fn get(&self, index: Index) -> Option> { _I4.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern4 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I4 + } +} +impl MetricPattern for MetricPattern4 { + fn get(&self, index: Index) -> Option> { + _I4.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern5By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern5By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern5By { - pub fn dateindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DateIndex) } - pub fn height(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Height) } + pub fn dateindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::DateIndex) + } + pub fn height(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::Height) + } } -pub struct MetricPattern5 { name: Arc, pub by: MetricPattern5By } +pub struct MetricPattern5 { + name: Arc, + pub by: MetricPattern5By, +} impl MetricPattern5 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern5By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern5By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern5 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I5 } } -impl MetricPattern for MetricPattern5 { fn get(&self, index: Index) -> Option> { _I5.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern5 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I5 + } +} +impl MetricPattern for MetricPattern5 { + fn get(&self, index: Index) -> Option> { + _I5.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern6By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern6By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern6By { - pub fn dateindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DateIndex) } + pub fn dateindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::DateIndex) + } } -pub struct MetricPattern6 { name: Arc, pub by: MetricPattern6By } +pub struct MetricPattern6 { + name: Arc, + pub by: MetricPattern6By, +} impl MetricPattern6 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern6By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern6By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern6 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I6 } } -impl MetricPattern for MetricPattern6 { fn get(&self, index: Index) -> Option> { _I6.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern6 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I6 + } +} +impl MetricPattern for MetricPattern6 { + fn get(&self, index: Index) -> Option> { + _I6.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern7By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern7By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern7By { - pub fn decadeindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DecadeIndex) } + pub fn decadeindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::DecadeIndex) + } } -pub struct MetricPattern7 { name: Arc, pub by: MetricPattern7By } +pub struct MetricPattern7 { + name: Arc, + pub by: MetricPattern7By, +} impl MetricPattern7 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern7By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern7By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern7 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I7 } } -impl MetricPattern for MetricPattern7 { fn get(&self, index: Index) -> Option> { _I7.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern7 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I7 + } +} +impl MetricPattern for MetricPattern7 { + fn get(&self, index: Index) -> Option> { + _I7.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern8By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern8By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern8By { - pub fn difficultyepoch(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::DifficultyEpoch) } + pub fn difficultyepoch(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::DifficultyEpoch) + } } -pub struct MetricPattern8 { name: Arc, pub by: MetricPattern8By } +pub struct MetricPattern8 { + name: Arc, + pub by: MetricPattern8By, +} impl MetricPattern8 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern8By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern8By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern8 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I8 } } -impl MetricPattern for MetricPattern8 { fn get(&self, index: Index) -> Option> { _I8.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern8 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I8 + } +} +impl MetricPattern for MetricPattern8 { + fn get(&self, index: Index) -> Option> { + _I8.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern9By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern9By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern9By { - pub fn emptyoutputindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::EmptyOutputIndex) } + pub fn emptyoutputindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::EmptyOutputIndex) + } } -pub struct MetricPattern9 { name: Arc, pub by: MetricPattern9By } +pub struct MetricPattern9 { + name: Arc, + pub by: MetricPattern9By, +} impl MetricPattern9 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern9By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern9By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern9 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I9 } } -impl MetricPattern for MetricPattern9 { fn get(&self, index: Index) -> Option> { _I9.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern9 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I9 + } +} +impl MetricPattern for MetricPattern9 { + fn get(&self, index: Index) -> Option> { + _I9.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern10By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern10By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern10By { - pub fn halvingepoch(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::HalvingEpoch) } + pub fn halvingepoch(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::HalvingEpoch) + } } -pub struct MetricPattern10 { name: Arc, pub by: MetricPattern10By } +pub struct MetricPattern10 { + name: Arc, + pub by: MetricPattern10By, +} impl MetricPattern10 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern10By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern10By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern10 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I10 } } -impl MetricPattern for MetricPattern10 { fn get(&self, index: Index) -> Option> { _I10.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern10 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I10 + } +} +impl MetricPattern for MetricPattern10 { + fn get(&self, index: Index) -> Option> { + _I10.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern11By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern11By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern11By { - pub fn height(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::Height) } + pub fn height(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::Height) + } } -pub struct MetricPattern11 { name: Arc, pub by: MetricPattern11By } +pub struct MetricPattern11 { + name: Arc, + pub by: MetricPattern11By, +} impl MetricPattern11 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern11By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern11By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern11 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I11 } } -impl MetricPattern for MetricPattern11 { fn get(&self, index: Index) -> Option> { _I11.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern11 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I11 + } +} +impl MetricPattern for MetricPattern11 { + fn get(&self, index: Index) -> Option> { + _I11.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern12By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern12By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern12By { - pub fn txinindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::TxInIndex) } + pub fn txinindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::TxInIndex) + } } -pub struct MetricPattern12 { name: Arc, pub by: MetricPattern12By } +pub struct MetricPattern12 { + name: Arc, + pub by: MetricPattern12By, +} impl MetricPattern12 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern12By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern12By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern12 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I12 } } -impl MetricPattern for MetricPattern12 { fn get(&self, index: Index) -> Option> { _I12.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern12 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I12 + } +} +impl MetricPattern for MetricPattern12 { + fn get(&self, index: Index) -> Option> { + _I12.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern13By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern13By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern13By { - pub fn monthindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::MonthIndex) } + pub fn monthindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::MonthIndex) + } } -pub struct MetricPattern13 { name: Arc, pub by: MetricPattern13By } +pub struct MetricPattern13 { + name: Arc, + pub by: MetricPattern13By, +} impl MetricPattern13 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern13By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern13By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern13 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I13 } } -impl MetricPattern for MetricPattern13 { fn get(&self, index: Index) -> Option> { _I13.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern13 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I13 + } +} +impl MetricPattern for MetricPattern13 { + fn get(&self, index: Index) -> Option> { + _I13.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern14By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern14By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern14By { - pub fn opreturnindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::OpReturnIndex) } + pub fn opreturnindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::OpReturnIndex) + } } -pub struct MetricPattern14 { name: Arc, pub by: MetricPattern14By } +pub struct MetricPattern14 { + name: Arc, + pub by: MetricPattern14By, +} impl MetricPattern14 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern14By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern14By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern14 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I14 } } -impl MetricPattern for MetricPattern14 { fn get(&self, index: Index) -> Option> { _I14.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern14 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I14 + } +} +impl MetricPattern for MetricPattern14 { + fn get(&self, index: Index) -> Option> { + _I14.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern15By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern15By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern15By { - pub fn txoutindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::TxOutIndex) } + pub fn txoutindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::TxOutIndex) + } } -pub struct MetricPattern15 { name: Arc, pub by: MetricPattern15By } +pub struct MetricPattern15 { + name: Arc, + pub by: MetricPattern15By, +} impl MetricPattern15 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern15By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern15By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern15 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I15 } } -impl MetricPattern for MetricPattern15 { fn get(&self, index: Index) -> Option> { _I15.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern15 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I15 + } +} +impl MetricPattern for MetricPattern15 { + fn get(&self, index: Index) -> Option> { + _I15.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern16By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern16By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern16By { - pub fn p2aaddressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2AAddressIndex) } + pub fn p2aaddressindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::P2AAddressIndex) + } } -pub struct MetricPattern16 { name: Arc, pub by: MetricPattern16By } +pub struct MetricPattern16 { + name: Arc, + pub by: MetricPattern16By, +} impl MetricPattern16 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern16By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern16By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern16 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I16 } } -impl MetricPattern for MetricPattern16 { fn get(&self, index: Index) -> Option> { _I16.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern16 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I16 + } +} +impl MetricPattern for MetricPattern16 { + fn get(&self, index: Index) -> Option> { + _I16.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern17By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern17By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern17By { - pub fn p2msoutputindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2MSOutputIndex) } + pub fn p2msoutputindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::P2MSOutputIndex) + } } -pub struct MetricPattern17 { name: Arc, pub by: MetricPattern17By } +pub struct MetricPattern17 { + name: Arc, + pub by: MetricPattern17By, +} impl MetricPattern17 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern17By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern17By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern17 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I17 } } -impl MetricPattern for MetricPattern17 { fn get(&self, index: Index) -> Option> { _I17.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern17 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I17 + } +} +impl MetricPattern for MetricPattern17 { + fn get(&self, index: Index) -> Option> { + _I17.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern18By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern18By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern18By { - pub fn p2pk33addressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2PK33AddressIndex) } + pub fn p2pk33addressindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::P2PK33AddressIndex) + } } -pub struct MetricPattern18 { name: Arc, pub by: MetricPattern18By } +pub struct MetricPattern18 { + name: Arc, + pub by: MetricPattern18By, +} impl MetricPattern18 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern18By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern18By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern18 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I18 } } -impl MetricPattern for MetricPattern18 { fn get(&self, index: Index) -> Option> { _I18.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern18 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I18 + } +} +impl MetricPattern for MetricPattern18 { + fn get(&self, index: Index) -> Option> { + _I18.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern19By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern19By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern19By { - pub fn p2pk65addressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2PK65AddressIndex) } + pub fn p2pk65addressindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::P2PK65AddressIndex) + } } -pub struct MetricPattern19 { name: Arc, pub by: MetricPattern19By } +pub struct MetricPattern19 { + name: Arc, + pub by: MetricPattern19By, +} impl MetricPattern19 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern19By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern19By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern19 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I19 } } -impl MetricPattern for MetricPattern19 { fn get(&self, index: Index) -> Option> { _I19.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern19 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I19 + } +} +impl MetricPattern for MetricPattern19 { + fn get(&self, index: Index) -> Option> { + _I19.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern20By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern20By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern20By { - pub fn p2pkhaddressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2PKHAddressIndex) } + pub fn p2pkhaddressindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::P2PKHAddressIndex) + } } -pub struct MetricPattern20 { name: Arc, pub by: MetricPattern20By } +pub struct MetricPattern20 { + name: Arc, + pub by: MetricPattern20By, +} impl MetricPattern20 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern20By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern20By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern20 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I20 } } -impl MetricPattern for MetricPattern20 { fn get(&self, index: Index) -> Option> { _I20.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern20 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I20 + } +} +impl MetricPattern for MetricPattern20 { + fn get(&self, index: Index) -> Option> { + _I20.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern21By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern21By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern21By { - pub fn p2shaddressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2SHAddressIndex) } + pub fn p2shaddressindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::P2SHAddressIndex) + } } -pub struct MetricPattern21 { name: Arc, pub by: MetricPattern21By } +pub struct MetricPattern21 { + name: Arc, + pub by: MetricPattern21By, +} impl MetricPattern21 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern21By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern21By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern21 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I21 } } -impl MetricPattern for MetricPattern21 { fn get(&self, index: Index) -> Option> { _I21.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern21 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I21 + } +} +impl MetricPattern for MetricPattern21 { + fn get(&self, index: Index) -> Option> { + _I21.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern22By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern22By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern22By { - pub fn p2traddressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2TRAddressIndex) } + pub fn p2traddressindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::P2TRAddressIndex) + } } -pub struct MetricPattern22 { name: Arc, pub by: MetricPattern22By } +pub struct MetricPattern22 { + name: Arc, + pub by: MetricPattern22By, +} impl MetricPattern22 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern22By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern22By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern22 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I22 } } -impl MetricPattern for MetricPattern22 { fn get(&self, index: Index) -> Option> { _I22.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern22 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I22 + } +} +impl MetricPattern for MetricPattern22 { + fn get(&self, index: Index) -> Option> { + _I22.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern23By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern23By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern23By { - pub fn p2wpkhaddressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2WPKHAddressIndex) } + pub fn p2wpkhaddressindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::P2WPKHAddressIndex) + } } -pub struct MetricPattern23 { name: Arc, pub by: MetricPattern23By } +pub struct MetricPattern23 { + name: Arc, + pub by: MetricPattern23By, +} impl MetricPattern23 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern23By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern23By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern23 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I23 } } -impl MetricPattern for MetricPattern23 { fn get(&self, index: Index) -> Option> { _I23.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern23 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I23 + } +} +impl MetricPattern for MetricPattern23 { + fn get(&self, index: Index) -> Option> { + _I23.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern24By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern24By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern24By { - pub fn p2wshaddressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::P2WSHAddressIndex) } + pub fn p2wshaddressindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::P2WSHAddressIndex) + } } -pub struct MetricPattern24 { name: Arc, pub by: MetricPattern24By } +pub struct MetricPattern24 { + name: Arc, + pub by: MetricPattern24By, +} impl MetricPattern24 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern24By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern24By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern24 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I24 } } -impl MetricPattern for MetricPattern24 { fn get(&self, index: Index) -> Option> { _I24.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern24 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I24 + } +} +impl MetricPattern for MetricPattern24 { + fn get(&self, index: Index) -> Option> { + _I24.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern25By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern25By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern25By { - pub fn quarterindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::QuarterIndex) } + pub fn quarterindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::QuarterIndex) + } } -pub struct MetricPattern25 { name: Arc, pub by: MetricPattern25By } +pub struct MetricPattern25 { + name: Arc, + pub by: MetricPattern25By, +} impl MetricPattern25 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern25By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern25By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern25 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I25 } } -impl MetricPattern for MetricPattern25 { fn get(&self, index: Index) -> Option> { _I25.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern25 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I25 + } +} +impl MetricPattern for MetricPattern25 { + fn get(&self, index: Index) -> Option> { + _I25.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern26By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern26By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern26By { - pub fn semesterindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::SemesterIndex) } + pub fn semesterindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::SemesterIndex) + } } -pub struct MetricPattern26 { name: Arc, pub by: MetricPattern26By } +pub struct MetricPattern26 { + name: Arc, + pub by: MetricPattern26By, +} impl MetricPattern26 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern26By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern26By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern26 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I26 } } -impl MetricPattern for MetricPattern26 { fn get(&self, index: Index) -> Option> { _I26.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern26 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I26 + } +} +impl MetricPattern for MetricPattern26 { + fn get(&self, index: Index) -> Option> { + _I26.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern27By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern27By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern27By { - pub fn txindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::TxIndex) } + pub fn txindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::TxIndex) + } } -pub struct MetricPattern27 { name: Arc, pub by: MetricPattern27By } +pub struct MetricPattern27 { + name: Arc, + pub by: MetricPattern27By, +} impl MetricPattern27 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern27By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern27By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern27 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I27 } } -impl MetricPattern for MetricPattern27 { fn get(&self, index: Index) -> Option> { _I27.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern27 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I27 + } +} +impl MetricPattern for MetricPattern27 { + fn get(&self, index: Index) -> Option> { + _I27.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern28By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern28By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern28By { - pub fn unknownoutputindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::UnknownOutputIndex) } + pub fn unknownoutputindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::UnknownOutputIndex) + } } -pub struct MetricPattern28 { name: Arc, pub by: MetricPattern28By } +pub struct MetricPattern28 { + name: Arc, + pub by: MetricPattern28By, +} impl MetricPattern28 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern28By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern28By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern28 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I28 } } -impl MetricPattern for MetricPattern28 { fn get(&self, index: Index) -> Option> { _I28.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern28 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I28 + } +} +impl MetricPattern for MetricPattern28 { + fn get(&self, index: Index) -> Option> { + _I28.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern29By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern29By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern29By { - pub fn weekindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::WeekIndex) } + pub fn weekindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::WeekIndex) + } } -pub struct MetricPattern29 { name: Arc, pub by: MetricPattern29By } +pub struct MetricPattern29 { + name: Arc, + pub by: MetricPattern29By, +} impl MetricPattern29 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern29By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern29By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern29 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I29 } } -impl MetricPattern for MetricPattern29 { fn get(&self, index: Index) -> Option> { _I29.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern29 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I29 + } +} +impl MetricPattern for MetricPattern29 { + fn get(&self, index: Index) -> Option> { + _I29.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern30By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern30By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern30By { - pub fn yearindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::YearIndex) } + pub fn yearindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::YearIndex) + } } -pub struct MetricPattern30 { name: Arc, pub by: MetricPattern30By } +pub struct MetricPattern30 { + name: Arc, + pub by: MetricPattern30By, +} impl MetricPattern30 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern30By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern30By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern30 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I30 } } -impl MetricPattern for MetricPattern30 { fn get(&self, index: Index) -> Option> { _I30.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern30 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I30 + } +} +impl MetricPattern for MetricPattern30 { + fn get(&self, index: Index) -> Option> { + _I30.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern31By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern31By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern31By { - pub fn loadedaddressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::LoadedAddressIndex) } + pub fn loadedaddressindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::LoadedAddressIndex) + } } -pub struct MetricPattern31 { name: Arc, pub by: MetricPattern31By } +pub struct MetricPattern31 { + name: Arc, + pub by: MetricPattern31By, +} impl MetricPattern31 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern31By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern31By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern31 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I31 } } -impl MetricPattern for MetricPattern31 { fn get(&self, index: Index) -> Option> { _I31.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern31 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I31 + } +} +impl MetricPattern for MetricPattern31 { + fn get(&self, index: Index) -> Option> { + _I31.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern32By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern32By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern32By { - pub fn emptyaddressindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::EmptyAddressIndex) } + pub fn emptyaddressindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::EmptyAddressIndex) + } } -pub struct MetricPattern32 { name: Arc, pub by: MetricPattern32By } +pub struct MetricPattern32 { + name: Arc, + pub by: MetricPattern32By, +} impl MetricPattern32 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern32By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern32By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern32 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I32 } } -impl MetricPattern for MetricPattern32 { fn get(&self, index: Index) -> Option> { _I32.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern32 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I32 + } +} +impl MetricPattern for MetricPattern32 { + fn get(&self, index: Index) -> Option> { + _I32.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} -pub struct MetricPattern33By { client: Arc, name: Arc, _marker: std::marker::PhantomData } +pub struct MetricPattern33By { + client: Arc, + name: Arc, + _marker: std::marker::PhantomData, +} impl MetricPattern33By { - pub fn pairoutputindex(&self) -> MetricEndpointBuilder { _ep(&self.client, &self.name, Index::PairOutputIndex) } + pub fn pairoutputindex(&self) -> MetricEndpointBuilder { + _ep(&self.client, &self.name, Index::PairOutputIndex) + } } -pub struct MetricPattern33 { name: Arc, pub by: MetricPattern33By } +pub struct MetricPattern33 { + name: Arc, + pub by: MetricPattern33By, +} impl MetricPattern33 { - pub fn new(client: Arc, name: String) -> Self { let name: Arc = name.into(); Self { name: name.clone(), by: MetricPattern33By { client, name, _marker: std::marker::PhantomData } } } - pub fn name(&self) -> &str { &self.name } + pub fn new(client: Arc, name: String) -> Self { + let name: Arc = name.into(); + Self { + name: name.clone(), + by: MetricPattern33By { + client, + name, + _marker: std::marker::PhantomData, + }, + } + } + pub fn name(&self) -> &str { + &self.name + } } -impl AnyMetricPattern for MetricPattern33 { fn name(&self) -> &str { &self.name } fn indexes(&self) -> &'static [Index] { _I33 } } -impl MetricPattern for MetricPattern33 { fn get(&self, index: Index) -> Option> { _I33.contains(&index).then(|| _ep(&self.by.client, &self.by.name, index)) } } +impl AnyMetricPattern for MetricPattern33 { + fn name(&self) -> &str { + &self.name + } + fn indexes(&self) -> &'static [Index] { + _I33 + } +} +impl MetricPattern for MetricPattern33 { + fn get(&self, index: Index) -> Option> { + _I33.contains(&index) + .then(|| _ep(&self.by.client, &self.by.name, index)) + } +} // Reusable pattern structs @@ -913,31 +2142,88 @@ impl RealizedPattern3 { pub fn new(client: Arc, acc: String) -> Self { Self { adjusted_sopr: MetricPattern6::new(client.clone(), _m(&acc, "adjusted_sopr")), - adjusted_sopr_30d_ema: MetricPattern6::new(client.clone(), _m(&acc, "adjusted_sopr_30d_ema")), - adjusted_sopr_7d_ema: MetricPattern6::new(client.clone(), _m(&acc, "adjusted_sopr_7d_ema")), - adjusted_value_created: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_created")), - adjusted_value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_destroyed")), + adjusted_sopr_30d_ema: MetricPattern6::new( + client.clone(), + _m(&acc, "adjusted_sopr_30d_ema"), + ), + adjusted_sopr_7d_ema: MetricPattern6::new( + client.clone(), + _m(&acc, "adjusted_sopr_7d_ema"), + ), + adjusted_value_created: MetricPattern1::new( + client.clone(), + _m(&acc, "adjusted_value_created"), + ), + adjusted_value_destroyed: MetricPattern1::new( + client.clone(), + _m(&acc, "adjusted_value_destroyed"), + ), mvrv: MetricPattern4::new(client.clone(), _m(&acc, "mvrv")), neg_realized_loss: BitcoinPattern2::new(client.clone(), _m(&acc, "neg_realized_loss")), net_realized_pnl: BlockCountPattern::new(client.clone(), _m(&acc, "net_realized_pnl")), - net_realized_pnl_cumulative_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta")), - net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), - net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), - net_realized_pnl_rel_to_realized_cap: BlockCountPattern::new(client.clone(), _m(&acc, "net_realized_pnl_rel_to_realized_cap")), + net_realized_pnl_cumulative_30d_delta: MetricPattern4::new( + client.clone(), + _m(&acc, "net_realized_pnl_cumulative_30d_delta"), + ), + net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4::new( + client.clone(), + _m( + &acc, + "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap", + ), + ), + net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4::new( + client.clone(), + _m( + &acc, + "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap", + ), + ), + net_realized_pnl_rel_to_realized_cap: BlockCountPattern::new( + client.clone(), + _m(&acc, "net_realized_pnl_rel_to_realized_cap"), + ), realized_cap: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap")), - realized_cap_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "realized_cap_30d_delta")), - realized_cap_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap_rel_to_own_market_cap")), + realized_cap_30d_delta: MetricPattern4::new( + client.clone(), + _m(&acc, "realized_cap_30d_delta"), + ), + realized_cap_rel_to_own_market_cap: MetricPattern1::new( + client.clone(), + _m(&acc, "realized_cap_rel_to_own_market_cap"), + ), realized_loss: BlockCountPattern::new(client.clone(), _m(&acc, "realized_loss")), - realized_loss_rel_to_realized_cap: BlockCountPattern::new(client.clone(), _m(&acc, "realized_loss_rel_to_realized_cap")), + realized_loss_rel_to_realized_cap: BlockCountPattern::new( + client.clone(), + _m(&acc, "realized_loss_rel_to_realized_cap"), + ), realized_price: MetricPattern1::new(client.clone(), _m(&acc, "realized_price")), - realized_price_extra: ActivePriceRatioPattern::new(client.clone(), _m(&acc, "realized_price_ratio")), + realized_price_extra: ActivePriceRatioPattern::new( + client.clone(), + _m(&acc, "realized_price_ratio"), + ), realized_profit: BlockCountPattern::new(client.clone(), _m(&acc, "realized_profit")), - realized_profit_rel_to_realized_cap: BlockCountPattern::new(client.clone(), _m(&acc, "realized_profit_rel_to_realized_cap")), - realized_profit_to_loss_ratio: MetricPattern6::new(client.clone(), _m(&acc, "realized_profit_to_loss_ratio")), + realized_profit_rel_to_realized_cap: BlockCountPattern::new( + client.clone(), + _m(&acc, "realized_profit_rel_to_realized_cap"), + ), + realized_profit_to_loss_ratio: MetricPattern6::new( + client.clone(), + _m(&acc, "realized_profit_to_loss_ratio"), + ), realized_value: MetricPattern1::new(client.clone(), _m(&acc, "realized_value")), - sell_side_risk_ratio: MetricPattern6::new(client.clone(), _m(&acc, "sell_side_risk_ratio")), - sell_side_risk_ratio_30d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d_ema")), - sell_side_risk_ratio_7d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d_ema")), + sell_side_risk_ratio: MetricPattern6::new( + client.clone(), + _m(&acc, "sell_side_risk_ratio"), + ), + sell_side_risk_ratio_30d_ema: MetricPattern6::new( + client.clone(), + _m(&acc, "sell_side_risk_ratio_30d_ema"), + ), + sell_side_risk_ratio_7d_ema: MetricPattern6::new( + client.clone(), + _m(&acc, "sell_side_risk_ratio_7d_ema"), + ), sopr: MetricPattern6::new(client.clone(), _m(&acc, "sopr")), sopr_30d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sopr_30d_ema")), sopr_7d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sopr_7d_ema")), @@ -987,29 +2273,80 @@ impl RealizedPattern4 { pub fn new(client: Arc, acc: String) -> Self { Self { adjusted_sopr: MetricPattern6::new(client.clone(), _m(&acc, "adjusted_sopr")), - adjusted_sopr_30d_ema: MetricPattern6::new(client.clone(), _m(&acc, "adjusted_sopr_30d_ema")), - adjusted_sopr_7d_ema: MetricPattern6::new(client.clone(), _m(&acc, "adjusted_sopr_7d_ema")), - adjusted_value_created: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_created")), - adjusted_value_destroyed: MetricPattern1::new(client.clone(), _m(&acc, "adjusted_value_destroyed")), + adjusted_sopr_30d_ema: MetricPattern6::new( + client.clone(), + _m(&acc, "adjusted_sopr_30d_ema"), + ), + adjusted_sopr_7d_ema: MetricPattern6::new( + client.clone(), + _m(&acc, "adjusted_sopr_7d_ema"), + ), + adjusted_value_created: MetricPattern1::new( + client.clone(), + _m(&acc, "adjusted_value_created"), + ), + adjusted_value_destroyed: MetricPattern1::new( + client.clone(), + _m(&acc, "adjusted_value_destroyed"), + ), mvrv: MetricPattern4::new(client.clone(), _m(&acc, "mvrv")), neg_realized_loss: BitcoinPattern2::new(client.clone(), _m(&acc, "neg_realized_loss")), net_realized_pnl: BlockCountPattern::new(client.clone(), _m(&acc, "net_realized_pnl")), - net_realized_pnl_cumulative_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta")), - net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), - net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), - net_realized_pnl_rel_to_realized_cap: BlockCountPattern::new(client.clone(), _m(&acc, "net_realized_pnl_rel_to_realized_cap")), + net_realized_pnl_cumulative_30d_delta: MetricPattern4::new( + client.clone(), + _m(&acc, "net_realized_pnl_cumulative_30d_delta"), + ), + net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4::new( + client.clone(), + _m( + &acc, + "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap", + ), + ), + net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4::new( + client.clone(), + _m( + &acc, + "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap", + ), + ), + net_realized_pnl_rel_to_realized_cap: BlockCountPattern::new( + client.clone(), + _m(&acc, "net_realized_pnl_rel_to_realized_cap"), + ), realized_cap: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap")), - realized_cap_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "realized_cap_30d_delta")), + realized_cap_30d_delta: MetricPattern4::new( + client.clone(), + _m(&acc, "realized_cap_30d_delta"), + ), realized_loss: BlockCountPattern::new(client.clone(), _m(&acc, "realized_loss")), - realized_loss_rel_to_realized_cap: BlockCountPattern::new(client.clone(), _m(&acc, "realized_loss_rel_to_realized_cap")), + realized_loss_rel_to_realized_cap: BlockCountPattern::new( + client.clone(), + _m(&acc, "realized_loss_rel_to_realized_cap"), + ), realized_price: MetricPattern1::new(client.clone(), _m(&acc, "realized_price")), - realized_price_extra: RealizedPriceExtraPattern::new(client.clone(), _m(&acc, "realized_price_ratio")), + realized_price_extra: RealizedPriceExtraPattern::new( + client.clone(), + _m(&acc, "realized_price_ratio"), + ), realized_profit: BlockCountPattern::new(client.clone(), _m(&acc, "realized_profit")), - realized_profit_rel_to_realized_cap: BlockCountPattern::new(client.clone(), _m(&acc, "realized_profit_rel_to_realized_cap")), + realized_profit_rel_to_realized_cap: BlockCountPattern::new( + client.clone(), + _m(&acc, "realized_profit_rel_to_realized_cap"), + ), realized_value: MetricPattern1::new(client.clone(), _m(&acc, "realized_value")), - sell_side_risk_ratio: MetricPattern6::new(client.clone(), _m(&acc, "sell_side_risk_ratio")), - sell_side_risk_ratio_30d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d_ema")), - sell_side_risk_ratio_7d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d_ema")), + sell_side_risk_ratio: MetricPattern6::new( + client.clone(), + _m(&acc, "sell_side_risk_ratio"), + ), + sell_side_risk_ratio_30d_ema: MetricPattern6::new( + client.clone(), + _m(&acc, "sell_side_risk_ratio_30d_ema"), + ), + sell_side_risk_ratio_7d_ema: MetricPattern6::new( + client.clone(), + _m(&acc, "sell_side_risk_ratio_7d_ema"), + ), sopr: MetricPattern6::new(client.clone(), _m(&acc, "sopr")), sopr_30d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sopr_30d_ema")), sopr_7d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sopr_7d_ema")), @@ -1126,24 +2463,69 @@ impl RealizedPattern2 { mvrv: MetricPattern4::new(client.clone(), _m(&acc, "mvrv")), neg_realized_loss: BitcoinPattern2::new(client.clone(), _m(&acc, "neg_realized_loss")), net_realized_pnl: BlockCountPattern::new(client.clone(), _m(&acc, "net_realized_pnl")), - net_realized_pnl_cumulative_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta")), - net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), - net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), - net_realized_pnl_rel_to_realized_cap: BlockCountPattern::new(client.clone(), _m(&acc, "net_realized_pnl_rel_to_realized_cap")), + net_realized_pnl_cumulative_30d_delta: MetricPattern4::new( + client.clone(), + _m(&acc, "net_realized_pnl_cumulative_30d_delta"), + ), + net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4::new( + client.clone(), + _m( + &acc, + "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap", + ), + ), + net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4::new( + client.clone(), + _m( + &acc, + "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap", + ), + ), + net_realized_pnl_rel_to_realized_cap: BlockCountPattern::new( + client.clone(), + _m(&acc, "net_realized_pnl_rel_to_realized_cap"), + ), realized_cap: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap")), - realized_cap_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "realized_cap_30d_delta")), - realized_cap_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap_rel_to_own_market_cap")), + realized_cap_30d_delta: MetricPattern4::new( + client.clone(), + _m(&acc, "realized_cap_30d_delta"), + ), + realized_cap_rel_to_own_market_cap: MetricPattern1::new( + client.clone(), + _m(&acc, "realized_cap_rel_to_own_market_cap"), + ), realized_loss: BlockCountPattern::new(client.clone(), _m(&acc, "realized_loss")), - realized_loss_rel_to_realized_cap: BlockCountPattern::new(client.clone(), _m(&acc, "realized_loss_rel_to_realized_cap")), + realized_loss_rel_to_realized_cap: BlockCountPattern::new( + client.clone(), + _m(&acc, "realized_loss_rel_to_realized_cap"), + ), realized_price: MetricPattern1::new(client.clone(), _m(&acc, "realized_price")), - realized_price_extra: ActivePriceRatioPattern::new(client.clone(), _m(&acc, "realized_price_ratio")), + realized_price_extra: ActivePriceRatioPattern::new( + client.clone(), + _m(&acc, "realized_price_ratio"), + ), realized_profit: BlockCountPattern::new(client.clone(), _m(&acc, "realized_profit")), - realized_profit_rel_to_realized_cap: BlockCountPattern::new(client.clone(), _m(&acc, "realized_profit_rel_to_realized_cap")), - realized_profit_to_loss_ratio: MetricPattern6::new(client.clone(), _m(&acc, "realized_profit_to_loss_ratio")), + realized_profit_rel_to_realized_cap: BlockCountPattern::new( + client.clone(), + _m(&acc, "realized_profit_rel_to_realized_cap"), + ), + realized_profit_to_loss_ratio: MetricPattern6::new( + client.clone(), + _m(&acc, "realized_profit_to_loss_ratio"), + ), realized_value: MetricPattern1::new(client.clone(), _m(&acc, "realized_value")), - sell_side_risk_ratio: MetricPattern6::new(client.clone(), _m(&acc, "sell_side_risk_ratio")), - sell_side_risk_ratio_30d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d_ema")), - sell_side_risk_ratio_7d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d_ema")), + sell_side_risk_ratio: MetricPattern6::new( + client.clone(), + _m(&acc, "sell_side_risk_ratio"), + ), + sell_side_risk_ratio_30d_ema: MetricPattern6::new( + client.clone(), + _m(&acc, "sell_side_risk_ratio_30d_ema"), + ), + sell_side_risk_ratio_7d_ema: MetricPattern6::new( + client.clone(), + _m(&acc, "sell_side_risk_ratio_7d_ema"), + ), sopr: MetricPattern6::new(client.clone(), _m(&acc, "sopr")), sopr_30d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sopr_30d_ema")), sopr_7d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sopr_7d_ema")), @@ -1190,22 +2572,61 @@ impl RealizedPattern { mvrv: MetricPattern4::new(client.clone(), _m(&acc, "mvrv")), neg_realized_loss: BitcoinPattern2::new(client.clone(), _m(&acc, "neg_realized_loss")), net_realized_pnl: BlockCountPattern::new(client.clone(), _m(&acc, "net_realized_pnl")), - net_realized_pnl_cumulative_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta")), - net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap")), - net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4::new(client.clone(), _m(&acc, "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap")), - net_realized_pnl_rel_to_realized_cap: BlockCountPattern::new(client.clone(), _m(&acc, "net_realized_pnl_rel_to_realized_cap")), + net_realized_pnl_cumulative_30d_delta: MetricPattern4::new( + client.clone(), + _m(&acc, "net_realized_pnl_cumulative_30d_delta"), + ), + net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern4::new( + client.clone(), + _m( + &acc, + "net_realized_pnl_cumulative_30d_delta_rel_to_market_cap", + ), + ), + net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern4::new( + client.clone(), + _m( + &acc, + "net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap", + ), + ), + net_realized_pnl_rel_to_realized_cap: BlockCountPattern::new( + client.clone(), + _m(&acc, "net_realized_pnl_rel_to_realized_cap"), + ), realized_cap: MetricPattern1::new(client.clone(), _m(&acc, "realized_cap")), - realized_cap_30d_delta: MetricPattern4::new(client.clone(), _m(&acc, "realized_cap_30d_delta")), + realized_cap_30d_delta: MetricPattern4::new( + client.clone(), + _m(&acc, "realized_cap_30d_delta"), + ), realized_loss: BlockCountPattern::new(client.clone(), _m(&acc, "realized_loss")), - realized_loss_rel_to_realized_cap: BlockCountPattern::new(client.clone(), _m(&acc, "realized_loss_rel_to_realized_cap")), + realized_loss_rel_to_realized_cap: BlockCountPattern::new( + client.clone(), + _m(&acc, "realized_loss_rel_to_realized_cap"), + ), realized_price: MetricPattern1::new(client.clone(), _m(&acc, "realized_price")), - realized_price_extra: RealizedPriceExtraPattern::new(client.clone(), _m(&acc, "realized_price_ratio")), + realized_price_extra: RealizedPriceExtraPattern::new( + client.clone(), + _m(&acc, "realized_price_ratio"), + ), realized_profit: BlockCountPattern::new(client.clone(), _m(&acc, "realized_profit")), - realized_profit_rel_to_realized_cap: BlockCountPattern::new(client.clone(), _m(&acc, "realized_profit_rel_to_realized_cap")), + realized_profit_rel_to_realized_cap: BlockCountPattern::new( + client.clone(), + _m(&acc, "realized_profit_rel_to_realized_cap"), + ), realized_value: MetricPattern1::new(client.clone(), _m(&acc, "realized_value")), - sell_side_risk_ratio: MetricPattern6::new(client.clone(), _m(&acc, "sell_side_risk_ratio")), - sell_side_risk_ratio_30d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sell_side_risk_ratio_30d_ema")), - sell_side_risk_ratio_7d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sell_side_risk_ratio_7d_ema")), + sell_side_risk_ratio: MetricPattern6::new( + client.clone(), + _m(&acc, "sell_side_risk_ratio"), + ), + sell_side_risk_ratio_30d_ema: MetricPattern6::new( + client.clone(), + _m(&acc, "sell_side_risk_ratio_30d_ema"), + ), + sell_side_risk_ratio_7d_ema: MetricPattern6::new( + client.clone(), + _m(&acc, "sell_side_risk_ratio_7d_ema"), + ), sopr: MetricPattern6::new(client.clone(), _m(&acc, "sopr")), sopr_30d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sopr_30d_ema")), sopr_7d_ema: MetricPattern6::new(client.clone(), _m(&acc, "sopr_7d_ema")), @@ -1394,24 +2815,75 @@ impl RelativePattern5 { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - neg_unrealized_loss_rel_to_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "neg_unrealized_loss_rel_to_market_cap")), - neg_unrealized_loss_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "neg_unrealized_loss_rel_to_own_market_cap")), - neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "neg_unrealized_loss_rel_to_own_total_unrealized_pnl")), - net_unrealized_pnl_rel_to_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "net_unrealized_pnl_rel_to_market_cap")), - net_unrealized_pnl_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "net_unrealized_pnl_rel_to_own_market_cap")), - net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "net_unrealized_pnl_rel_to_own_total_unrealized_pnl")), + neg_unrealized_loss_rel_to_market_cap: MetricPattern1::new( + client.clone(), + _m(&acc, "neg_unrealized_loss_rel_to_market_cap"), + ), + neg_unrealized_loss_rel_to_own_market_cap: MetricPattern1::new( + client.clone(), + _m(&acc, "neg_unrealized_loss_rel_to_own_market_cap"), + ), + neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1::new( + client.clone(), + _m(&acc, "neg_unrealized_loss_rel_to_own_total_unrealized_pnl"), + ), + net_unrealized_pnl_rel_to_market_cap: MetricPattern1::new( + client.clone(), + _m(&acc, "net_unrealized_pnl_rel_to_market_cap"), + ), + net_unrealized_pnl_rel_to_own_market_cap: MetricPattern1::new( + client.clone(), + _m(&acc, "net_unrealized_pnl_rel_to_own_market_cap"), + ), + net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern1::new( + client.clone(), + _m(&acc, "net_unrealized_pnl_rel_to_own_total_unrealized_pnl"), + ), nupl: MetricPattern1::new(client.clone(), _m(&acc, "nupl")), - supply_in_loss_rel_to_circulating_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_in_loss_rel_to_circulating_supply")), - supply_in_loss_rel_to_own_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_in_loss_rel_to_own_supply")), - supply_in_profit_rel_to_circulating_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_in_profit_rel_to_circulating_supply")), - supply_in_profit_rel_to_own_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_in_profit_rel_to_own_supply")), - supply_rel_to_circulating_supply: MetricPattern4::new(client.clone(), _m(&acc, "supply_rel_to_circulating_supply")), - unrealized_loss_rel_to_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_loss_rel_to_market_cap")), - unrealized_loss_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_loss_rel_to_own_market_cap")), - unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_loss_rel_to_own_total_unrealized_pnl")), - unrealized_profit_rel_to_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_profit_rel_to_market_cap")), - unrealized_profit_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_profit_rel_to_own_market_cap")), - unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_profit_rel_to_own_total_unrealized_pnl")), + supply_in_loss_rel_to_circulating_supply: MetricPattern1::new( + client.clone(), + _m(&acc, "supply_in_loss_rel_to_circulating_supply"), + ), + supply_in_loss_rel_to_own_supply: MetricPattern1::new( + client.clone(), + _m(&acc, "supply_in_loss_rel_to_own_supply"), + ), + supply_in_profit_rel_to_circulating_supply: MetricPattern1::new( + client.clone(), + _m(&acc, "supply_in_profit_rel_to_circulating_supply"), + ), + supply_in_profit_rel_to_own_supply: MetricPattern1::new( + client.clone(), + _m(&acc, "supply_in_profit_rel_to_own_supply"), + ), + supply_rel_to_circulating_supply: MetricPattern4::new( + client.clone(), + _m(&acc, "supply_rel_to_circulating_supply"), + ), + unrealized_loss_rel_to_market_cap: MetricPattern1::new( + client.clone(), + _m(&acc, "unrealized_loss_rel_to_market_cap"), + ), + unrealized_loss_rel_to_own_market_cap: MetricPattern1::new( + client.clone(), + _m(&acc, "unrealized_loss_rel_to_own_market_cap"), + ), + unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1::new( + client.clone(), + _m(&acc, "unrealized_loss_rel_to_own_total_unrealized_pnl"), + ), + unrealized_profit_rel_to_market_cap: MetricPattern1::new( + client.clone(), + _m(&acc, "unrealized_profit_rel_to_market_cap"), + ), + unrealized_profit_rel_to_own_market_cap: MetricPattern1::new( + client.clone(), + _m(&acc, "unrealized_profit_rel_to_own_market_cap"), + ), + unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern1::new( + client.clone(), + _m(&acc, "unrealized_profit_rel_to_own_total_unrealized_pnl"), + ), } } } @@ -1619,17 +3091,17 @@ impl ClassAveragePricePattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - _2015: MetricPattern4::new(client.clone(), _m(&acc, "2015_returns")), - _2016: MetricPattern4::new(client.clone(), _m(&acc, "2016_returns")), - _2017: MetricPattern4::new(client.clone(), _m(&acc, "2017_returns")), - _2018: MetricPattern4::new(client.clone(), _m(&acc, "2018_returns")), - _2019: MetricPattern4::new(client.clone(), _m(&acc, "2019_returns")), - _2020: MetricPattern4::new(client.clone(), _m(&acc, "2020_returns")), - _2021: MetricPattern4::new(client.clone(), _m(&acc, "2021_returns")), - _2022: MetricPattern4::new(client.clone(), _m(&acc, "2022_returns")), - _2023: MetricPattern4::new(client.clone(), _m(&acc, "2023_returns")), - _2024: MetricPattern4::new(client.clone(), _m(&acc, "2024_returns")), - _2025: MetricPattern4::new(client.clone(), _m(&acc, "2025_returns")), + _2015: MetricPattern4::new(client.clone(), _m(&acc, "2015_average_price")), + _2016: MetricPattern4::new(client.clone(), _m(&acc, "2016_average_price")), + _2017: MetricPattern4::new(client.clone(), _m(&acc, "2017_average_price")), + _2018: MetricPattern4::new(client.clone(), _m(&acc, "2018_average_price")), + _2019: MetricPattern4::new(client.clone(), _m(&acc, "2019_average_price")), + _2020: MetricPattern4::new(client.clone(), _m(&acc, "2020_average_price")), + _2021: MetricPattern4::new(client.clone(), _m(&acc, "2021_average_price")), + _2022: MetricPattern4::new(client.clone(), _m(&acc, "2022_average_price")), + _2023: MetricPattern4::new(client.clone(), _m(&acc, "2023_average_price")), + _2024: MetricPattern4::new(client.clone(), _m(&acc, "2024_average_price")), + _2025: MetricPattern4::new(client.clone(), _m(&acc, "2025_average_price")), } } } @@ -1668,38 +3140,6 @@ impl DollarsPattern { } } -/// Pattern struct for repeated tree structure. -pub struct RelativePattern2 { - pub neg_unrealized_loss_rel_to_own_market_cap: MetricPattern1, - pub neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1, - pub net_unrealized_pnl_rel_to_own_market_cap: MetricPattern1, - pub net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern1, - pub supply_in_loss_rel_to_own_supply: MetricPattern1, - pub supply_in_profit_rel_to_own_supply: MetricPattern1, - pub unrealized_loss_rel_to_own_market_cap: MetricPattern1, - pub unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1, - pub unrealized_profit_rel_to_own_market_cap: MetricPattern1, - pub unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern1, -} - -impl RelativePattern2 { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - neg_unrealized_loss_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "neg_unrealized_loss_rel_to_own_market_cap")), - neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "neg_unrealized_loss_rel_to_own_total_unrealized_pnl")), - net_unrealized_pnl_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "net_unrealized_pnl_rel_to_own_market_cap")), - net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "net_unrealized_pnl_rel_to_own_total_unrealized_pnl")), - supply_in_loss_rel_to_own_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_in_loss_rel_to_own_supply")), - supply_in_profit_rel_to_own_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_in_profit_rel_to_own_supply")), - unrealized_loss_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_loss_rel_to_own_market_cap")), - unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_loss_rel_to_own_total_unrealized_pnl")), - unrealized_profit_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_profit_rel_to_own_market_cap")), - unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_profit_rel_to_own_total_unrealized_pnl")), - } - } -} - /// Pattern struct for repeated tree structure. pub struct RelativePattern { pub neg_unrealized_loss_rel_to_market_cap: MetricPattern1, @@ -1718,16 +3158,105 @@ impl RelativePattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - neg_unrealized_loss_rel_to_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "neg_unrealized_loss_rel_to_market_cap")), - net_unrealized_pnl_rel_to_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "net_unrealized_pnl_rel_to_market_cap")), + neg_unrealized_loss_rel_to_market_cap: MetricPattern1::new( + client.clone(), + _m(&acc, "neg_unrealized_loss_rel_to_market_cap"), + ), + net_unrealized_pnl_rel_to_market_cap: MetricPattern1::new( + client.clone(), + _m(&acc, "net_unrealized_pnl_rel_to_market_cap"), + ), nupl: MetricPattern1::new(client.clone(), _m(&acc, "nupl")), - supply_in_loss_rel_to_circulating_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_in_loss_rel_to_circulating_supply")), - supply_in_loss_rel_to_own_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_in_loss_rel_to_own_supply")), - supply_in_profit_rel_to_circulating_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_in_profit_rel_to_circulating_supply")), - supply_in_profit_rel_to_own_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_in_profit_rel_to_own_supply")), - supply_rel_to_circulating_supply: MetricPattern4::new(client.clone(), _m(&acc, "supply_rel_to_circulating_supply")), - unrealized_loss_rel_to_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_loss_rel_to_market_cap")), - unrealized_profit_rel_to_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_profit_rel_to_market_cap")), + supply_in_loss_rel_to_circulating_supply: MetricPattern1::new( + client.clone(), + _m(&acc, "supply_in_loss_rel_to_circulating_supply"), + ), + supply_in_loss_rel_to_own_supply: MetricPattern1::new( + client.clone(), + _m(&acc, "supply_in_loss_rel_to_own_supply"), + ), + supply_in_profit_rel_to_circulating_supply: MetricPattern1::new( + client.clone(), + _m(&acc, "supply_in_profit_rel_to_circulating_supply"), + ), + supply_in_profit_rel_to_own_supply: MetricPattern1::new( + client.clone(), + _m(&acc, "supply_in_profit_rel_to_own_supply"), + ), + supply_rel_to_circulating_supply: MetricPattern4::new( + client.clone(), + _m(&acc, "supply_rel_to_circulating_supply"), + ), + unrealized_loss_rel_to_market_cap: MetricPattern1::new( + client.clone(), + _m(&acc, "unrealized_loss_rel_to_market_cap"), + ), + unrealized_profit_rel_to_market_cap: MetricPattern1::new( + client.clone(), + _m(&acc, "unrealized_profit_rel_to_market_cap"), + ), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct RelativePattern2 { + pub neg_unrealized_loss_rel_to_own_market_cap: MetricPattern1, + pub neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1, + pub net_unrealized_pnl_rel_to_own_market_cap: MetricPattern1, + pub net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern1, + pub supply_in_loss_rel_to_own_supply: MetricPattern1, + pub supply_in_profit_rel_to_own_supply: MetricPattern1, + pub unrealized_loss_rel_to_own_market_cap: MetricPattern1, + pub unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1, + pub unrealized_profit_rel_to_own_market_cap: MetricPattern1, + pub unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern1, +} + +impl RelativePattern2 { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + neg_unrealized_loss_rel_to_own_market_cap: MetricPattern1::new( + client.clone(), + _m(&acc, "neg_unrealized_loss_rel_to_own_market_cap"), + ), + neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1::new( + client.clone(), + _m(&acc, "neg_unrealized_loss_rel_to_own_total_unrealized_pnl"), + ), + net_unrealized_pnl_rel_to_own_market_cap: MetricPattern1::new( + client.clone(), + _m(&acc, "net_unrealized_pnl_rel_to_own_market_cap"), + ), + net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern1::new( + client.clone(), + _m(&acc, "net_unrealized_pnl_rel_to_own_total_unrealized_pnl"), + ), + supply_in_loss_rel_to_own_supply: MetricPattern1::new( + client.clone(), + _m(&acc, "supply_in_loss_rel_to_own_supply"), + ), + supply_in_profit_rel_to_own_supply: MetricPattern1::new( + client.clone(), + _m(&acc, "supply_in_profit_rel_to_own_supply"), + ), + unrealized_loss_rel_to_own_market_cap: MetricPattern1::new( + client.clone(), + _m(&acc, "unrealized_loss_rel_to_own_market_cap"), + ), + unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1::new( + client.clone(), + _m(&acc, "unrealized_loss_rel_to_own_total_unrealized_pnl"), + ), + unrealized_profit_rel_to_own_market_cap: MetricPattern1::new( + client.clone(), + _m(&acc, "unrealized_profit_rel_to_own_market_cap"), + ), + unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern1::new( + client.clone(), + _m(&acc, "unrealized_profit_rel_to_own_total_unrealized_pnl"), + ), } } } @@ -1936,6 +3465,41 @@ impl _10yTo12yPattern { } } +/// Pattern struct for repeated tree structure. +pub struct UnrealizedPattern { + pub neg_unrealized_loss: MetricPattern1, + pub net_unrealized_pnl: MetricPattern1, + pub supply_in_loss: ActiveSupplyPattern, + pub supply_in_profit: ActiveSupplyPattern, + pub total_unrealized_pnl: MetricPattern1, + pub unrealized_loss: MetricPattern1, + pub unrealized_profit: MetricPattern1, +} + +impl UnrealizedPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + neg_unrealized_loss: MetricPattern1::new( + client.clone(), + _m(&acc, "neg_unrealized_loss"), + ), + net_unrealized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "net_unrealized_pnl")), + supply_in_loss: ActiveSupplyPattern::new(client.clone(), _m(&acc, "supply_in_loss")), + supply_in_profit: ActiveSupplyPattern::new( + client.clone(), + _m(&acc, "supply_in_profit"), + ), + total_unrealized_pnl: MetricPattern1::new( + client.clone(), + _m(&acc, "total_unrealized_pnl"), + ), + unrealized_loss: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_loss")), + unrealized_profit: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_profit")), + } + } +} + /// Pattern struct for repeated tree structure. pub struct _10yPattern { pub activity: ActivityPattern2, @@ -1962,32 +3526,6 @@ impl _10yPattern { } } -/// Pattern struct for repeated tree structure. -pub struct _0satsPattern2 { - pub activity: ActivityPattern2, - pub cost_basis: CostBasisPattern, - pub outputs: OutputsPattern, - pub realized: RealizedPattern, - pub relative: RelativePattern4, - pub supply: SupplyPattern2, - pub unrealized: UnrealizedPattern, -} - -impl _0satsPattern2 { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - activity: ActivityPattern2::new(client.clone(), acc.clone()), - cost_basis: CostBasisPattern::new(client.clone(), acc.clone()), - outputs: OutputsPattern::new(client.clone(), _m(&acc, "utxo_count")), - realized: RealizedPattern::new(client.clone(), acc.clone()), - relative: RelativePattern4::new(client.clone(), _m(&acc, "supply_in")), - supply: SupplyPattern2::new(client.clone(), _m(&acc, "supply")), - unrealized: UnrealizedPattern::new(client.clone(), acc.clone()), - } - } -} - /// Pattern struct for repeated tree structure. pub struct _100btcPattern { pub activity: ActivityPattern2, @@ -2014,32 +3552,6 @@ impl _100btcPattern { } } -/// Pattern struct for repeated tree structure. -pub struct UnrealizedPattern { - pub neg_unrealized_loss: MetricPattern1, - pub net_unrealized_pnl: MetricPattern1, - pub supply_in_loss: ActiveSupplyPattern, - pub supply_in_profit: ActiveSupplyPattern, - pub total_unrealized_pnl: MetricPattern1, - pub unrealized_loss: MetricPattern1, - pub unrealized_profit: MetricPattern1, -} - -impl UnrealizedPattern { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - neg_unrealized_loss: MetricPattern1::new(client.clone(), _m(&acc, "neg_unrealized_loss")), - net_unrealized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "net_unrealized_pnl")), - supply_in_loss: ActiveSupplyPattern::new(client.clone(), _m(&acc, "supply_in_loss")), - supply_in_profit: ActiveSupplyPattern::new(client.clone(), _m(&acc, "supply_in_profit")), - total_unrealized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "total_unrealized_pnl")), - unrealized_loss: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_loss")), - unrealized_profit: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_profit")), - } - } -} - /// Pattern struct for repeated tree structure. pub struct PeriodCagrPattern { pub _10y: MetricPattern4, @@ -2066,6 +3578,32 @@ impl PeriodCagrPattern { } } +/// Pattern struct for repeated tree structure. +pub struct _0satsPattern2 { + pub activity: ActivityPattern2, + pub cost_basis: CostBasisPattern, + pub outputs: OutputsPattern, + pub realized: RealizedPattern, + pub relative: RelativePattern4, + pub supply: SupplyPattern2, + pub unrealized: UnrealizedPattern, +} + +impl _0satsPattern2 { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + activity: ActivityPattern2::new(client.clone(), acc.clone()), + cost_basis: CostBasisPattern::new(client.clone(), acc.clone()), + outputs: OutputsPattern::new(client.clone(), _m(&acc, "utxo_count")), + realized: RealizedPattern::new(client.clone(), acc.clone()), + relative: RelativePattern4::new(client.clone(), _m(&acc, "supply_in")), + supply: SupplyPattern2::new(client.clone(), _m(&acc, "supply")), + unrealized: UnrealizedPattern::new(client.clone(), acc.clone()), + } + } +} + /// Pattern struct for repeated tree structure. pub struct ActivityPattern2 { pub coinblocks_destroyed: BlockCountPattern, @@ -2079,9 +3617,18 @@ impl ActivityPattern2 { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - coinblocks_destroyed: BlockCountPattern::new(client.clone(), _m(&acc, "coinblocks_destroyed")), - coindays_destroyed: BlockCountPattern::new(client.clone(), _m(&acc, "coindays_destroyed")), - satblocks_destroyed: MetricPattern11::new(client.clone(), _m(&acc, "satblocks_destroyed")), + coinblocks_destroyed: BlockCountPattern::new( + client.clone(), + _m(&acc, "coinblocks_destroyed"), + ), + coindays_destroyed: BlockCountPattern::new( + client.clone(), + _m(&acc, "coindays_destroyed"), + ), + satblocks_destroyed: MetricPattern11::new( + client.clone(), + _m(&acc, "satblocks_destroyed"), + ), satdays_destroyed: MetricPattern11::new(client.clone(), _m(&acc, "satdays_destroyed")), sent: UnclaimedRewardsPattern::new(client.clone(), _m(&acc, "sent")), } @@ -2145,37 +3692,19 @@ impl CoinbasePattern2 { } /// Pattern struct for repeated tree structure. -pub struct ActiveSupplyPattern { - pub bitcoin: MetricPattern1, - pub dollars: MetricPattern1, - pub sats: MetricPattern1, +pub struct CoinbasePattern { + pub bitcoin: BitcoinPattern, + pub dollars: DollarsPattern, + pub sats: DollarsPattern, } -impl ActiveSupplyPattern { +impl CoinbasePattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - bitcoin: MetricPattern1::new(client.clone(), _m(&acc, "btc")), - dollars: MetricPattern1::new(client.clone(), _m(&acc, "usd")), - sats: MetricPattern1::new(client.clone(), acc.clone()), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct SegwitAdoptionPattern { - pub base: MetricPattern11, - pub cumulative: MetricPattern2, - pub sum: MetricPattern2, -} - -impl SegwitAdoptionPattern { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - base: MetricPattern11::new(client.clone(), acc.clone()), - cumulative: MetricPattern2::new(client.clone(), _m(&acc, "cumulative")), - sum: MetricPattern2::new(client.clone(), _m(&acc, "sum")), + bitcoin: BitcoinPattern::new(client.clone(), _m(&acc, "btc")), + dollars: DollarsPattern::new(client.clone(), _m(&acc, "usd")), + sats: DollarsPattern::new(client.clone(), acc.clone()), } } } @@ -2217,19 +3746,53 @@ impl _2015Pattern { } /// Pattern struct for repeated tree structure. -pub struct CoinbasePattern { - pub bitcoin: BitcoinPattern, - pub dollars: DollarsPattern, - pub sats: DollarsPattern, +pub struct SegwitAdoptionPattern { + pub base: MetricPattern11, + pub cumulative: MetricPattern2, + pub sum: MetricPattern2, } -impl CoinbasePattern { +impl SegwitAdoptionPattern { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - bitcoin: BitcoinPattern::new(client.clone(), _m(&acc, "btc")), - dollars: DollarsPattern::new(client.clone(), _m(&acc, "usd")), - sats: DollarsPattern::new(client.clone(), acc.clone()), + base: MetricPattern11::new(client.clone(), acc.clone()), + cumulative: MetricPattern2::new(client.clone(), _m(&acc, "cumulative")), + sum: MetricPattern2::new(client.clone(), _m(&acc, "sum")), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct ActiveSupplyPattern { + pub bitcoin: MetricPattern1, + pub dollars: MetricPattern1, + pub sats: MetricPattern1, +} + +impl ActiveSupplyPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + bitcoin: MetricPattern1::new(client.clone(), _m(&acc, "btc")), + dollars: MetricPattern1::new(client.clone(), _m(&acc, "usd")), + sats: MetricPattern1::new(client.clone(), acc.clone()), + } + } +} + +/// Pattern struct for repeated tree structure. +pub struct SupplyPattern2 { + pub halved: ActiveSupplyPattern, + pub total: ActiveSupplyPattern, +} + +impl SupplyPattern2 { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + halved: ActiveSupplyPattern::new(client.clone(), _m(&acc, "halved")), + total: ActiveSupplyPattern::new(client.clone(), acc.clone()), } } } @@ -2276,24 +3839,14 @@ impl RelativePattern4 { /// Create a new pattern node with accumulated metric name. pub fn new(client: Arc, acc: String) -> Self { Self { - supply_in_loss_rel_to_own_supply: MetricPattern1::new(client.clone(), _m(&acc, "loss_rel_to_own_supply")), - supply_in_profit_rel_to_own_supply: MetricPattern1::new(client.clone(), _m(&acc, "profit_rel_to_own_supply")), - } - } -} - -/// Pattern struct for repeated tree structure. -pub struct SupplyPattern2 { - pub halved: ActiveSupplyPattern, - pub total: ActiveSupplyPattern, -} - -impl SupplyPattern2 { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - halved: ActiveSupplyPattern::new(client.clone(), _m(&acc, "halved")), - total: ActiveSupplyPattern::new(client.clone(), acc.clone()), + supply_in_loss_rel_to_own_supply: MetricPattern1::new( + client.clone(), + _m(&acc, "loss_rel_to_own_supply"), + ), + supply_in_profit_rel_to_own_supply: MetricPattern1::new( + client.clone(), + _m(&acc, "profit_rel_to_own_supply"), + ), } } } @@ -2314,22 +3867,6 @@ impl BitcoinPattern2 { } } -/// Pattern struct for repeated tree structure. -pub struct BlockCountPattern { - pub cumulative: MetricPattern1, - pub sum: MetricPattern1, -} - -impl BlockCountPattern { - /// Create a new pattern node with accumulated metric name. - pub fn new(client: Arc, acc: String) -> Self { - Self { - cumulative: MetricPattern1::new(client.clone(), _m(&acc, "cumulative")), - sum: MetricPattern1::new(client.clone(), acc.clone()), - } - } -} - /// Pattern struct for repeated tree structure. pub struct SatsPattern { pub ohlc: MetricPattern1, @@ -2346,6 +3883,22 @@ impl SatsPattern { } } +/// Pattern struct for repeated tree structure. +pub struct BlockCountPattern { + pub cumulative: MetricPattern1, + pub sum: MetricPattern1, +} + +impl BlockCountPattern { + /// Create a new pattern node with accumulated metric name. + pub fn new(client: Arc, acc: String) -> Self { + Self { + cumulative: MetricPattern1::new(client.clone(), _m(&acc, "cumulative")), + sum: MetricPattern1::new(client.clone(), acc.clone()), + } + } +} + /// Pattern struct for repeated tree structure. pub struct RealizedPriceExtraPattern { pub ratio: MetricPattern4, @@ -2402,7 +3955,10 @@ impl MetricsTree { blocks: MetricsTree_Blocks::new(client.clone(), format!("{base_path}_blocks")), cointime: MetricsTree_Cointime::new(client.clone(), format!("{base_path}_cointime")), constants: MetricsTree_Constants::new(client.clone(), format!("{base_path}_constants")), - distribution: MetricsTree_Distribution::new(client.clone(), format!("{base_path}_distribution")), + distribution: MetricsTree_Distribution::new( + client.clone(), + format!("{base_path}_distribution"), + ), indexes: MetricsTree_Indexes::new(client.clone(), format!("{base_path}_indexes")), inputs: MetricsTree_Inputs::new(client.clone(), format!("{base_path}_inputs")), market: MetricsTree_Market::new(client.clone(), format!("{base_path}_market")), @@ -2412,7 +3968,10 @@ impl MetricsTree { price: MetricsTree_Price::new(client.clone(), format!("{base_path}_price")), scripts: MetricsTree_Scripts::new(client.clone(), format!("{base_path}_scripts")), supply: MetricsTree_Supply::new(client.clone(), format!("{base_path}_supply")), - transactions: MetricsTree_Transactions::new(client.clone(), format!("{base_path}_transactions")), + transactions: MetricsTree_Transactions::new( + client.clone(), + format!("{base_path}_transactions"), + ), } } } @@ -2440,14 +3999,38 @@ pub struct MetricsTree_Addresses { impl MetricsTree_Addresses { pub fn new(client: Arc, base_path: String) -> Self { Self { - first_p2aaddressindex: MetricPattern11::new(client.clone(), "first_p2aaddressindex".to_string()), - first_p2pk33addressindex: MetricPattern11::new(client.clone(), "first_p2pk33addressindex".to_string()), - first_p2pk65addressindex: MetricPattern11::new(client.clone(), "first_p2pk65addressindex".to_string()), - first_p2pkhaddressindex: MetricPattern11::new(client.clone(), "first_p2pkhaddressindex".to_string()), - first_p2shaddressindex: MetricPattern11::new(client.clone(), "first_p2shaddressindex".to_string()), - first_p2traddressindex: MetricPattern11::new(client.clone(), "first_p2traddressindex".to_string()), - first_p2wpkhaddressindex: MetricPattern11::new(client.clone(), "first_p2wpkhaddressindex".to_string()), - first_p2wshaddressindex: MetricPattern11::new(client.clone(), "first_p2wshaddressindex".to_string()), + first_p2aaddressindex: MetricPattern11::new( + client.clone(), + "first_p2aaddressindex".to_string(), + ), + first_p2pk33addressindex: MetricPattern11::new( + client.clone(), + "first_p2pk33addressindex".to_string(), + ), + first_p2pk65addressindex: MetricPattern11::new( + client.clone(), + "first_p2pk65addressindex".to_string(), + ), + first_p2pkhaddressindex: MetricPattern11::new( + client.clone(), + "first_p2pkhaddressindex".to_string(), + ), + first_p2shaddressindex: MetricPattern11::new( + client.clone(), + "first_p2shaddressindex".to_string(), + ), + first_p2traddressindex: MetricPattern11::new( + client.clone(), + "first_p2traddressindex".to_string(), + ), + first_p2wpkhaddressindex: MetricPattern11::new( + client.clone(), + "first_p2wpkhaddressindex".to_string(), + ), + first_p2wshaddressindex: MetricPattern11::new( + client.clone(), + "first_p2wshaddressindex".to_string(), + ), p2abytes: MetricPattern16::new(client.clone(), "p2abytes".to_string()), p2pk33bytes: MetricPattern18::new(client.clone(), "p2pk33bytes".to_string()), p2pk65bytes: MetricPattern19::new(client.clone(), "p2pk65bytes".to_string()), @@ -2482,12 +4065,21 @@ impl MetricsTree_Blocks { Self { blockhash: MetricPattern11::new(client.clone(), "blockhash".to_string()), count: MetricsTree_Blocks_Count::new(client.clone(), format!("{base_path}_count")), - difficulty: MetricsTree_Blocks_Difficulty::new(client.clone(), format!("{base_path}_difficulty")), + difficulty: MetricsTree_Blocks_Difficulty::new( + client.clone(), + format!("{base_path}_difficulty"), + ), fullness: FullnessPattern::new(client.clone(), "block_fullness".to_string()), - halving: MetricsTree_Blocks_Halving::new(client.clone(), format!("{base_path}_halving")), + halving: MetricsTree_Blocks_Halving::new( + client.clone(), + format!("{base_path}_halving"), + ), interval: FullnessPattern::new(client.clone(), "block_interval".to_string()), mining: MetricsTree_Blocks_Mining::new(client.clone(), format!("{base_path}_mining")), - rewards: MetricsTree_Blocks_Rewards::new(client.clone(), format!("{base_path}_rewards")), + rewards: MetricsTree_Blocks_Rewards::new( + client.clone(), + format!("{base_path}_rewards"), + ), size: MetricsTree_Blocks_Size::new(client.clone(), format!("{base_path}_size")), time: MetricsTree_Blocks_Time::new(client.clone(), format!("{base_path}_time")), total_size: MetricPattern11::new(client.clone(), "total_size".to_string()), @@ -2523,7 +4115,10 @@ impl MetricsTree_Blocks_Count { _24h_block_count: MetricPattern1::new(client.clone(), "24h_block_count".to_string()), _24h_start: MetricPattern11::new(client.clone(), "24h_start".to_string()), block_count: BlockCountPattern::new(client.clone(), "block_count".to_string()), - block_count_target: MetricPattern4::new(client.clone(), "block_count_target".to_string()), + block_count_target: MetricPattern4::new( + client.clone(), + "block_count_target".to_string(), + ), } } } @@ -2543,8 +4138,14 @@ impl MetricsTree_Blocks_Difficulty { Self { adjustment: MetricPattern1::new(client.clone(), "difficulty_adjustment".to_string()), as_hash: MetricPattern1::new(client.clone(), "difficulty_as_hash".to_string()), - blocks_before_next_adjustment: MetricPattern1::new(client.clone(), "blocks_before_next_difficulty_adjustment".to_string()), - days_before_next_adjustment: MetricPattern1::new(client.clone(), "days_before_next_difficulty_adjustment".to_string()), + blocks_before_next_adjustment: MetricPattern1::new( + client.clone(), + "blocks_before_next_difficulty_adjustment".to_string(), + ), + days_before_next_adjustment: MetricPattern1::new( + client.clone(), + "days_before_next_difficulty_adjustment".to_string(), + ), epoch: MetricPattern4::new(client.clone(), "difficultyepoch".to_string()), raw: MetricPattern1::new(client.clone(), "difficulty".to_string()), } @@ -2561,8 +4162,14 @@ pub struct MetricsTree_Blocks_Halving { impl MetricsTree_Blocks_Halving { pub fn new(client: Arc, base_path: String) -> Self { Self { - blocks_before_next_halving: MetricPattern1::new(client.clone(), "blocks_before_next_halving".to_string()), - days_before_next_halving: MetricPattern1::new(client.clone(), "days_before_next_halving".to_string()), + blocks_before_next_halving: MetricPattern1::new( + client.clone(), + "blocks_before_next_halving".to_string(), + ), + days_before_next_halving: MetricPattern1::new( + client.clone(), + "days_before_next_halving".to_string(), + ), epoch: MetricPattern4::new(client.clone(), "halvingepoch".to_string()), } } @@ -2591,20 +4198,38 @@ impl MetricsTree_Blocks_Mining { pub fn new(client: Arc, base_path: String) -> Self { Self { hash_price_phs: MetricPattern1::new(client.clone(), "hash_price_phs".to_string()), - hash_price_phs_min: MetricPattern1::new(client.clone(), "hash_price_phs_min".to_string()), - hash_price_rebound: MetricPattern1::new(client.clone(), "hash_price_rebound".to_string()), + hash_price_phs_min: MetricPattern1::new( + client.clone(), + "hash_price_phs_min".to_string(), + ), + hash_price_rebound: MetricPattern1::new( + client.clone(), + "hash_price_rebound".to_string(), + ), hash_price_ths: MetricPattern1::new(client.clone(), "hash_price_ths".to_string()), - hash_price_ths_min: MetricPattern1::new(client.clone(), "hash_price_ths_min".to_string()), + hash_price_ths_min: MetricPattern1::new( + client.clone(), + "hash_price_ths_min".to_string(), + ), hash_rate: MetricPattern1::new(client.clone(), "hash_rate".to_string()), hash_rate_1m_sma: MetricPattern4::new(client.clone(), "hash_rate_1m_sma".to_string()), hash_rate_1w_sma: MetricPattern4::new(client.clone(), "hash_rate_1w_sma".to_string()), hash_rate_1y_sma: MetricPattern4::new(client.clone(), "hash_rate_1y_sma".to_string()), hash_rate_2m_sma: MetricPattern4::new(client.clone(), "hash_rate_2m_sma".to_string()), hash_value_phs: MetricPattern1::new(client.clone(), "hash_value_phs".to_string()), - hash_value_phs_min: MetricPattern1::new(client.clone(), "hash_value_phs_min".to_string()), - hash_value_rebound: MetricPattern1::new(client.clone(), "hash_value_rebound".to_string()), + hash_value_phs_min: MetricPattern1::new( + client.clone(), + "hash_value_phs_min".to_string(), + ), + hash_value_rebound: MetricPattern1::new( + client.clone(), + "hash_value_rebound".to_string(), + ), hash_value_ths: MetricPattern1::new(client.clone(), "hash_value_ths".to_string()), - hash_value_ths_min: MetricPattern1::new(client.clone(), "hash_value_ths_min".to_string()), + hash_value_ths_min: MetricPattern1::new( + client.clone(), + "hash_value_ths_min".to_string(), + ), } } } @@ -2623,13 +4248,22 @@ pub struct MetricsTree_Blocks_Rewards { impl MetricsTree_Blocks_Rewards { pub fn new(client: Arc, base_path: String) -> Self { Self { - _24h_coinbase_sum: MetricsTree_Blocks_Rewards_24hCoinbaseSum::new(client.clone(), format!("{base_path}_24h_coinbase_sum")), + _24h_coinbase_sum: MetricsTree_Blocks_Rewards_24hCoinbaseSum::new( + client.clone(), + format!("{base_path}_24h_coinbase_sum"), + ), coinbase: CoinbasePattern::new(client.clone(), "coinbase".to_string()), fee_dominance: MetricPattern6::new(client.clone(), "fee_dominance".to_string()), subsidy: CoinbasePattern::new(client.clone(), "subsidy".to_string()), subsidy_dominance: MetricPattern6::new(client.clone(), "subsidy_dominance".to_string()), - subsidy_usd_1y_sma: MetricPattern4::new(client.clone(), "subsidy_usd_1y_sma".to_string()), - unclaimed_rewards: UnclaimedRewardsPattern::new(client.clone(), "unclaimed_rewards".to_string()), + subsidy_usd_1y_sma: MetricPattern4::new( + client.clone(), + "subsidy_usd_1y_sma".to_string(), + ), + unclaimed_rewards: UnclaimedRewardsPattern::new( + client.clone(), + "unclaimed_rewards".to_string(), + ), } } } @@ -2694,7 +4328,10 @@ impl MetricsTree_Blocks_Time { Self { date: MetricPattern11::new(client.clone(), "date".to_string()), timestamp: MetricPattern1::new(client.clone(), "timestamp".to_string()), - timestamp_monotonic: MetricPattern11::new(client.clone(), "timestamp_monotonic".to_string()), + timestamp_monotonic: MetricPattern11::new( + client.clone(), + "timestamp_monotonic".to_string(), + ), } } } @@ -2712,10 +4349,19 @@ pub struct MetricsTree_Cointime { impl MetricsTree_Cointime { pub fn new(client: Arc, base_path: String) -> Self { Self { - activity: MetricsTree_Cointime_Activity::new(client.clone(), format!("{base_path}_activity")), - adjusted: MetricsTree_Cointime_Adjusted::new(client.clone(), format!("{base_path}_adjusted")), + activity: MetricsTree_Cointime_Activity::new( + client.clone(), + format!("{base_path}_activity"), + ), + adjusted: MetricsTree_Cointime_Adjusted::new( + client.clone(), + format!("{base_path}_adjusted"), + ), cap: MetricsTree_Cointime_Cap::new(client.clone(), format!("{base_path}_cap")), - pricing: MetricsTree_Cointime_Pricing::new(client.clone(), format!("{base_path}_pricing")), + pricing: MetricsTree_Cointime_Pricing::new( + client.clone(), + format!("{base_path}_pricing"), + ), supply: MetricsTree_Cointime_Supply::new(client.clone(), format!("{base_path}_supply")), value: MetricsTree_Cointime_Value::new(client.clone(), format!("{base_path}_value")), } @@ -2734,9 +4380,18 @@ pub struct MetricsTree_Cointime_Activity { impl MetricsTree_Cointime_Activity { pub fn new(client: Arc, base_path: String) -> Self { Self { - activity_to_vaultedness_ratio: MetricPattern1::new(client.clone(), "activity_to_vaultedness_ratio".to_string()), - coinblocks_created: BlockCountPattern::new(client.clone(), "coinblocks_created".to_string()), - coinblocks_stored: BlockCountPattern::new(client.clone(), "coinblocks_stored".to_string()), + activity_to_vaultedness_ratio: MetricPattern1::new( + client.clone(), + "activity_to_vaultedness_ratio".to_string(), + ), + coinblocks_created: BlockCountPattern::new( + client.clone(), + "coinblocks_created".to_string(), + ), + coinblocks_stored: BlockCountPattern::new( + client.clone(), + "coinblocks_stored".to_string(), + ), liveliness: MetricPattern1::new(client.clone(), "liveliness".to_string()), vaultedness: MetricPattern1::new(client.clone(), "vaultedness".to_string()), } @@ -2753,9 +4408,18 @@ pub struct MetricsTree_Cointime_Adjusted { impl MetricsTree_Cointime_Adjusted { pub fn new(client: Arc, base_path: String) -> Self { Self { - cointime_adj_inflation_rate: MetricPattern4::new(client.clone(), "cointime_adj_inflation_rate".to_string()), - cointime_adj_tx_btc_velocity: MetricPattern4::new(client.clone(), "cointime_adj_tx_btc_velocity".to_string()), - cointime_adj_tx_usd_velocity: MetricPattern4::new(client.clone(), "cointime_adj_tx_usd_velocity".to_string()), + cointime_adj_inflation_rate: MetricPattern4::new( + client.clone(), + "cointime_adj_inflation_rate".to_string(), + ), + cointime_adj_tx_btc_velocity: MetricPattern4::new( + client.clone(), + "cointime_adj_tx_btc_velocity".to_string(), + ), + cointime_adj_tx_usd_velocity: MetricPattern4::new( + client.clone(), + "cointime_adj_tx_usd_velocity".to_string(), + ), } } } @@ -2797,13 +4461,25 @@ impl MetricsTree_Cointime_Pricing { pub fn new(client: Arc, base_path: String) -> Self { Self { active_price: MetricPattern1::new(client.clone(), "active_price".to_string()), - active_price_ratio: ActivePriceRatioPattern::new(client.clone(), "active_price_ratio".to_string()), + active_price_ratio: ActivePriceRatioPattern::new( + client.clone(), + "active_price_ratio".to_string(), + ), cointime_price: MetricPattern1::new(client.clone(), "cointime_price".to_string()), - cointime_price_ratio: ActivePriceRatioPattern::new(client.clone(), "cointime_price_ratio".to_string()), + cointime_price_ratio: ActivePriceRatioPattern::new( + client.clone(), + "cointime_price_ratio".to_string(), + ), true_market_mean: MetricPattern1::new(client.clone(), "true_market_mean".to_string()), - true_market_mean_ratio: ActivePriceRatioPattern::new(client.clone(), "true_market_mean_ratio".to_string()), + true_market_mean_ratio: ActivePriceRatioPattern::new( + client.clone(), + "true_market_mean_ratio".to_string(), + ), vaulted_price: MetricPattern1::new(client.clone(), "vaulted_price".to_string()), - vaulted_price_ratio: ActivePriceRatioPattern::new(client.clone(), "vaulted_price_ratio".to_string()), + vaulted_price_ratio: ActivePriceRatioPattern::new( + client.clone(), + "vaulted_price_ratio".to_string(), + ), } } } @@ -2833,9 +4509,18 @@ pub struct MetricsTree_Cointime_Value { impl MetricsTree_Cointime_Value { pub fn new(client: Arc, base_path: String) -> Self { Self { - cointime_value_created: BlockCountPattern::new(client.clone(), "cointime_value_created".to_string()), - cointime_value_destroyed: BlockCountPattern::new(client.clone(), "cointime_value_destroyed".to_string()), - cointime_value_stored: BlockCountPattern::new(client.clone(), "cointime_value_stored".to_string()), + cointime_value_created: BlockCountPattern::new( + client.clone(), + "cointime_value_created".to_string(), + ), + cointime_value_destroyed: BlockCountPattern::new( + client.clone(), + "cointime_value_destroyed".to_string(), + ), + cointime_value_stored: BlockCountPattern::new( + client.clone(), + "cointime_value_stored".to_string(), + ), } } } @@ -2904,14 +4589,32 @@ impl MetricsTree_Distribution { pub fn new(client: Arc, base_path: String) -> Self { Self { addr_count: AddrCountPattern::new(client.clone(), "addr_count".to_string()), - address_cohorts: MetricsTree_Distribution_AddressCohorts::new(client.clone(), format!("{base_path}_address_cohorts")), - addresses_data: MetricsTree_Distribution_AddressesData::new(client.clone(), format!("{base_path}_addresses_data")), - any_address_indexes: MetricsTree_Distribution_AnyAddressIndexes::new(client.clone(), format!("{base_path}_any_address_indexes")), + address_cohorts: MetricsTree_Distribution_AddressCohorts::new( + client.clone(), + format!("{base_path}_address_cohorts"), + ), + addresses_data: MetricsTree_Distribution_AddressesData::new( + client.clone(), + format!("{base_path}_addresses_data"), + ), + any_address_indexes: MetricsTree_Distribution_AnyAddressIndexes::new( + client.clone(), + format!("{base_path}_any_address_indexes"), + ), chain_state: MetricPattern11::new(client.clone(), "chain".to_string()), empty_addr_count: AddrCountPattern::new(client.clone(), "empty_addr_count".to_string()), - emptyaddressindex: MetricPattern32::new(client.clone(), "emptyaddressindex".to_string()), - loadedaddressindex: MetricPattern31::new(client.clone(), "loadedaddressindex".to_string()), - utxo_cohorts: MetricsTree_Distribution_UtxoCohorts::new(client.clone(), format!("{base_path}_utxo_cohorts")), + emptyaddressindex: MetricPattern32::new( + client.clone(), + "emptyaddressindex".to_string(), + ), + loadedaddressindex: MetricPattern31::new( + client.clone(), + "loadedaddressindex".to_string(), + ), + utxo_cohorts: MetricsTree_Distribution_UtxoCohorts::new( + client.clone(), + format!("{base_path}_utxo_cohorts"), + ), } } } @@ -2926,9 +4629,18 @@ pub struct MetricsTree_Distribution_AddressCohorts { impl MetricsTree_Distribution_AddressCohorts { pub fn new(client: Arc, base_path: String) -> Self { Self { - amount_range: MetricsTree_Distribution_AddressCohorts_AmountRange::new(client.clone(), format!("{base_path}_amount_range")), - ge_amount: MetricsTree_Distribution_AddressCohorts_GeAmount::new(client.clone(), format!("{base_path}_ge_amount")), - lt_amount: MetricsTree_Distribution_AddressCohorts_LtAmount::new(client.clone(), format!("{base_path}_lt_amount")), + amount_range: MetricsTree_Distribution_AddressCohorts_AmountRange::new( + client.clone(), + format!("{base_path}_amount_range"), + ), + ge_amount: MetricsTree_Distribution_AddressCohorts_GeAmount::new( + client.clone(), + format!("{base_path}_ge_amount"), + ), + lt_amount: MetricsTree_Distribution_AddressCohorts_LtAmount::new( + client.clone(), + format!("{base_path}_lt_amount"), + ), } } } @@ -2956,20 +4668,62 @@ impl MetricsTree_Distribution_AddressCohorts_AmountRange { pub fn new(client: Arc, base_path: String) -> Self { Self { _0sats: _0satsPattern::new(client.clone(), "addrs_with_0sats".to_string()), - _100btc_to_1k_btc: _0satsPattern::new(client.clone(), "addrs_above_100btc_under_1k_btc".to_string()), - _100k_btc_or_more: _0satsPattern::new(client.clone(), "addrs_above_100k_btc".to_string()), - _100k_sats_to_1m_sats: _0satsPattern::new(client.clone(), "addrs_above_100k_sats_under_1m_sats".to_string()), - _100sats_to_1k_sats: _0satsPattern::new(client.clone(), "addrs_above_100sats_under_1k_sats".to_string()), - _10btc_to_100btc: _0satsPattern::new(client.clone(), "addrs_above_10btc_under_100btc".to_string()), - _10k_btc_to_100k_btc: _0satsPattern::new(client.clone(), "addrs_above_10k_btc_under_100k_btc".to_string()), - _10k_sats_to_100k_sats: _0satsPattern::new(client.clone(), "addrs_above_10k_sats_under_100k_sats".to_string()), - _10m_sats_to_1btc: _0satsPattern::new(client.clone(), "addrs_above_10m_sats_under_1btc".to_string()), - _10sats_to_100sats: _0satsPattern::new(client.clone(), "addrs_above_10sats_under_100sats".to_string()), - _1btc_to_10btc: _0satsPattern::new(client.clone(), "addrs_above_1btc_under_10btc".to_string()), - _1k_btc_to_10k_btc: _0satsPattern::new(client.clone(), "addrs_above_1k_btc_under_10k_btc".to_string()), - _1k_sats_to_10k_sats: _0satsPattern::new(client.clone(), "addrs_above_1k_sats_under_10k_sats".to_string()), - _1m_sats_to_10m_sats: _0satsPattern::new(client.clone(), "addrs_above_1m_sats_under_10m_sats".to_string()), - _1sat_to_10sats: _0satsPattern::new(client.clone(), "addrs_above_1sat_under_10sats".to_string()), + _100btc_to_1k_btc: _0satsPattern::new( + client.clone(), + "addrs_above_100btc_under_1k_btc".to_string(), + ), + _100k_btc_or_more: _0satsPattern::new( + client.clone(), + "addrs_above_100k_btc".to_string(), + ), + _100k_sats_to_1m_sats: _0satsPattern::new( + client.clone(), + "addrs_above_100k_sats_under_1m_sats".to_string(), + ), + _100sats_to_1k_sats: _0satsPattern::new( + client.clone(), + "addrs_above_100sats_under_1k_sats".to_string(), + ), + _10btc_to_100btc: _0satsPattern::new( + client.clone(), + "addrs_above_10btc_under_100btc".to_string(), + ), + _10k_btc_to_100k_btc: _0satsPattern::new( + client.clone(), + "addrs_above_10k_btc_under_100k_btc".to_string(), + ), + _10k_sats_to_100k_sats: _0satsPattern::new( + client.clone(), + "addrs_above_10k_sats_under_100k_sats".to_string(), + ), + _10m_sats_to_1btc: _0satsPattern::new( + client.clone(), + "addrs_above_10m_sats_under_1btc".to_string(), + ), + _10sats_to_100sats: _0satsPattern::new( + client.clone(), + "addrs_above_10sats_under_100sats".to_string(), + ), + _1btc_to_10btc: _0satsPattern::new( + client.clone(), + "addrs_above_1btc_under_10btc".to_string(), + ), + _1k_btc_to_10k_btc: _0satsPattern::new( + client.clone(), + "addrs_above_1k_btc_under_10k_btc".to_string(), + ), + _1k_sats_to_10k_sats: _0satsPattern::new( + client.clone(), + "addrs_above_1k_sats_under_10k_sats".to_string(), + ), + _1m_sats_to_10m_sats: _0satsPattern::new( + client.clone(), + "addrs_above_1m_sats_under_10m_sats".to_string(), + ), + _1sat_to_10sats: _0satsPattern::new( + client.clone(), + "addrs_above_1sat_under_10sats".to_string(), + ), } } } @@ -3108,17 +4862,50 @@ pub struct MetricsTree_Distribution_UtxoCohorts { impl MetricsTree_Distribution_UtxoCohorts { pub fn new(client: Arc, base_path: String) -> Self { Self { - age_range: MetricsTree_Distribution_UtxoCohorts_AgeRange::new(client.clone(), format!("{base_path}_age_range")), - all: MetricsTree_Distribution_UtxoCohorts_All::new(client.clone(), format!("{base_path}_all")), - amount_range: MetricsTree_Distribution_UtxoCohorts_AmountRange::new(client.clone(), format!("{base_path}_amount_range")), - epoch: MetricsTree_Distribution_UtxoCohorts_Epoch::new(client.clone(), format!("{base_path}_epoch")), - ge_amount: MetricsTree_Distribution_UtxoCohorts_GeAmount::new(client.clone(), format!("{base_path}_ge_amount")), - lt_amount: MetricsTree_Distribution_UtxoCohorts_LtAmount::new(client.clone(), format!("{base_path}_lt_amount")), - max_age: MetricsTree_Distribution_UtxoCohorts_MaxAge::new(client.clone(), format!("{base_path}_max_age")), - min_age: MetricsTree_Distribution_UtxoCohorts_MinAge::new(client.clone(), format!("{base_path}_min_age")), - term: MetricsTree_Distribution_UtxoCohorts_Term::new(client.clone(), format!("{base_path}_term")), - type_: MetricsTree_Distribution_UtxoCohorts_Type::new(client.clone(), format!("{base_path}_type_")), - year: MetricsTree_Distribution_UtxoCohorts_Year::new(client.clone(), format!("{base_path}_year")), + age_range: MetricsTree_Distribution_UtxoCohorts_AgeRange::new( + client.clone(), + format!("{base_path}_age_range"), + ), + all: MetricsTree_Distribution_UtxoCohorts_All::new( + client.clone(), + format!("{base_path}_all"), + ), + amount_range: MetricsTree_Distribution_UtxoCohorts_AmountRange::new( + client.clone(), + format!("{base_path}_amount_range"), + ), + epoch: MetricsTree_Distribution_UtxoCohorts_Epoch::new( + client.clone(), + format!("{base_path}_epoch"), + ), + ge_amount: MetricsTree_Distribution_UtxoCohorts_GeAmount::new( + client.clone(), + format!("{base_path}_ge_amount"), + ), + lt_amount: MetricsTree_Distribution_UtxoCohorts_LtAmount::new( + client.clone(), + format!("{base_path}_lt_amount"), + ), + max_age: MetricsTree_Distribution_UtxoCohorts_MaxAge::new( + client.clone(), + format!("{base_path}_max_age"), + ), + min_age: MetricsTree_Distribution_UtxoCohorts_MinAge::new( + client.clone(), + format!("{base_path}_min_age"), + ), + term: MetricsTree_Distribution_UtxoCohorts_Term::new( + client.clone(), + format!("{base_path}_term"), + ), + type_: MetricsTree_Distribution_UtxoCohorts_Type::new( + client.clone(), + format!("{base_path}_type_"), + ), + year: MetricsTree_Distribution_UtxoCohorts_Year::new( + client.clone(), + format!("{base_path}_year"), + ), } } } @@ -3151,25 +4938,82 @@ pub struct MetricsTree_Distribution_UtxoCohorts_AgeRange { impl MetricsTree_Distribution_UtxoCohorts_AgeRange { pub fn new(client: Arc, base_path: String) -> Self { Self { - _10y_to_12y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_10y_up_to_12y_old".to_string()), - _12y_to_15y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_12y_up_to_15y_old".to_string()), - _1d_to_1w: _10yTo12yPattern::new(client.clone(), "utxos_at_least_1d_up_to_1w_old".to_string()), - _1h_to_1d: _10yTo12yPattern::new(client.clone(), "utxos_at_least_1h_up_to_1d_old".to_string()), - _1m_to_2m: _10yTo12yPattern::new(client.clone(), "utxos_at_least_1m_up_to_2m_old".to_string()), - _1w_to_1m: _10yTo12yPattern::new(client.clone(), "utxos_at_least_1w_up_to_1m_old".to_string()), - _1y_to_2y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_1y_up_to_2y_old".to_string()), - _2m_to_3m: _10yTo12yPattern::new(client.clone(), "utxos_at_least_2m_up_to_3m_old".to_string()), - _2y_to_3y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_2y_up_to_3y_old".to_string()), - _3m_to_4m: _10yTo12yPattern::new(client.clone(), "utxos_at_least_3m_up_to_4m_old".to_string()), - _3y_to_4y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_3y_up_to_4y_old".to_string()), - _4m_to_5m: _10yTo12yPattern::new(client.clone(), "utxos_at_least_4m_up_to_5m_old".to_string()), - _4y_to_5y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_4y_up_to_5y_old".to_string()), - _5m_to_6m: _10yTo12yPattern::new(client.clone(), "utxos_at_least_5m_up_to_6m_old".to_string()), - _5y_to_6y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_5y_up_to_6y_old".to_string()), - _6m_to_1y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_6m_up_to_1y_old".to_string()), - _6y_to_7y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_6y_up_to_7y_old".to_string()), - _7y_to_8y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_7y_up_to_8y_old".to_string()), - _8y_to_10y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_8y_up_to_10y_old".to_string()), + _10y_to_12y: _10yTo12yPattern::new( + client.clone(), + "utxos_at_least_10y_up_to_12y_old".to_string(), + ), + _12y_to_15y: _10yTo12yPattern::new( + client.clone(), + "utxos_at_least_12y_up_to_15y_old".to_string(), + ), + _1d_to_1w: _10yTo12yPattern::new( + client.clone(), + "utxos_at_least_1d_up_to_1w_old".to_string(), + ), + _1h_to_1d: _10yTo12yPattern::new( + client.clone(), + "utxos_at_least_1h_up_to_1d_old".to_string(), + ), + _1m_to_2m: _10yTo12yPattern::new( + client.clone(), + "utxos_at_least_1m_up_to_2m_old".to_string(), + ), + _1w_to_1m: _10yTo12yPattern::new( + client.clone(), + "utxos_at_least_1w_up_to_1m_old".to_string(), + ), + _1y_to_2y: _10yTo12yPattern::new( + client.clone(), + "utxos_at_least_1y_up_to_2y_old".to_string(), + ), + _2m_to_3m: _10yTo12yPattern::new( + client.clone(), + "utxos_at_least_2m_up_to_3m_old".to_string(), + ), + _2y_to_3y: _10yTo12yPattern::new( + client.clone(), + "utxos_at_least_2y_up_to_3y_old".to_string(), + ), + _3m_to_4m: _10yTo12yPattern::new( + client.clone(), + "utxos_at_least_3m_up_to_4m_old".to_string(), + ), + _3y_to_4y: _10yTo12yPattern::new( + client.clone(), + "utxos_at_least_3y_up_to_4y_old".to_string(), + ), + _4m_to_5m: _10yTo12yPattern::new( + client.clone(), + "utxos_at_least_4m_up_to_5m_old".to_string(), + ), + _4y_to_5y: _10yTo12yPattern::new( + client.clone(), + "utxos_at_least_4y_up_to_5y_old".to_string(), + ), + _5m_to_6m: _10yTo12yPattern::new( + client.clone(), + "utxos_at_least_5m_up_to_6m_old".to_string(), + ), + _5y_to_6y: _10yTo12yPattern::new( + client.clone(), + "utxos_at_least_5y_up_to_6y_old".to_string(), + ), + _6m_to_1y: _10yTo12yPattern::new( + client.clone(), + "utxos_at_least_6m_up_to_1y_old".to_string(), + ), + _6y_to_7y: _10yTo12yPattern::new( + client.clone(), + "utxos_at_least_6y_up_to_7y_old".to_string(), + ), + _7y_to_8y: _10yTo12yPattern::new( + client.clone(), + "utxos_at_least_7y_up_to_8y_old".to_string(), + ), + _8y_to_10y: _10yTo12yPattern::new( + client.clone(), + "utxos_at_least_8y_up_to_10y_old".to_string(), + ), from_15y: _10yTo12yPattern::new(client.clone(), "utxos_at_least_15y_old".to_string()), up_to_1h: _10yTo12yPattern::new(client.clone(), "utxos_up_to_1h_old".to_string()), } @@ -3191,10 +5035,16 @@ impl MetricsTree_Distribution_UtxoCohorts_All { pub fn new(client: Arc, base_path: String) -> Self { Self { activity: ActivityPattern2::new(client.clone(), "".to_string()), - cost_basis: MetricsTree_Distribution_UtxoCohorts_All_CostBasis::new(client.clone(), format!("{base_path}_cost_basis")), + cost_basis: MetricsTree_Distribution_UtxoCohorts_All_CostBasis::new( + client.clone(), + format!("{base_path}_cost_basis"), + ), outputs: OutputsPattern::new(client.clone(), "utxo_count".to_string()), realized: RealizedPattern3::new(client.clone(), "".to_string()), - relative: MetricsTree_Distribution_UtxoCohorts_All_Relative::new(client.clone(), format!("{base_path}_relative")), + relative: MetricsTree_Distribution_UtxoCohorts_All_Relative::new( + client.clone(), + format!("{base_path}_relative"), + ), supply: SupplyPattern2::new(client.clone(), "supply".to_string()), unrealized: UnrealizedPattern::new(client.clone(), "".to_string()), } @@ -3231,12 +5081,30 @@ pub struct MetricsTree_Distribution_UtxoCohorts_All_Relative { impl MetricsTree_Distribution_UtxoCohorts_All_Relative { pub fn new(client: Arc, base_path: String) -> Self { Self { - neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1::new(client.clone(), "neg_unrealized_loss_rel_to_own_total_unrealized_pnl".to_string()), - net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern1::new(client.clone(), "net_unrealized_pnl_rel_to_own_total_unrealized_pnl".to_string()), - supply_in_loss_rel_to_own_supply: MetricPattern1::new(client.clone(), "supply_in_loss_rel_to_own_supply".to_string()), - supply_in_profit_rel_to_own_supply: MetricPattern1::new(client.clone(), "supply_in_profit_rel_to_own_supply".to_string()), - unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1::new(client.clone(), "unrealized_loss_rel_to_own_total_unrealized_pnl".to_string()), - unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern1::new(client.clone(), "unrealized_profit_rel_to_own_total_unrealized_pnl".to_string()), + neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1::new( + client.clone(), + "neg_unrealized_loss_rel_to_own_total_unrealized_pnl".to_string(), + ), + net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern1::new( + client.clone(), + "net_unrealized_pnl_rel_to_own_total_unrealized_pnl".to_string(), + ), + supply_in_loss_rel_to_own_supply: MetricPattern1::new( + client.clone(), + "supply_in_loss_rel_to_own_supply".to_string(), + ), + supply_in_profit_rel_to_own_supply: MetricPattern1::new( + client.clone(), + "supply_in_profit_rel_to_own_supply".to_string(), + ), + unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1::new( + client.clone(), + "unrealized_loss_rel_to_own_total_unrealized_pnl".to_string(), + ), + unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern1::new( + client.clone(), + "unrealized_profit_rel_to_own_total_unrealized_pnl".to_string(), + ), } } } @@ -3264,20 +5132,62 @@ impl MetricsTree_Distribution_UtxoCohorts_AmountRange { pub fn new(client: Arc, base_path: String) -> Self { Self { _0sats: _0satsPattern2::new(client.clone(), "utxos_with_0sats".to_string()), - _100btc_to_1k_btc: _0satsPattern2::new(client.clone(), "utxos_above_100btc_under_1k_btc".to_string()), - _100k_btc_or_more: _0satsPattern2::new(client.clone(), "utxos_above_100k_btc".to_string()), - _100k_sats_to_1m_sats: _0satsPattern2::new(client.clone(), "utxos_above_100k_sats_under_1m_sats".to_string()), - _100sats_to_1k_sats: _0satsPattern2::new(client.clone(), "utxos_above_100sats_under_1k_sats".to_string()), - _10btc_to_100btc: _0satsPattern2::new(client.clone(), "utxos_above_10btc_under_100btc".to_string()), - _10k_btc_to_100k_btc: _0satsPattern2::new(client.clone(), "utxos_above_10k_btc_under_100k_btc".to_string()), - _10k_sats_to_100k_sats: _0satsPattern2::new(client.clone(), "utxos_above_10k_sats_under_100k_sats".to_string()), - _10m_sats_to_1btc: _0satsPattern2::new(client.clone(), "utxos_above_10m_sats_under_1btc".to_string()), - _10sats_to_100sats: _0satsPattern2::new(client.clone(), "utxos_above_10sats_under_100sats".to_string()), - _1btc_to_10btc: _0satsPattern2::new(client.clone(), "utxos_above_1btc_under_10btc".to_string()), - _1k_btc_to_10k_btc: _0satsPattern2::new(client.clone(), "utxos_above_1k_btc_under_10k_btc".to_string()), - _1k_sats_to_10k_sats: _0satsPattern2::new(client.clone(), "utxos_above_1k_sats_under_10k_sats".to_string()), - _1m_sats_to_10m_sats: _0satsPattern2::new(client.clone(), "utxos_above_1m_sats_under_10m_sats".to_string()), - _1sat_to_10sats: _0satsPattern2::new(client.clone(), "utxos_above_1sat_under_10sats".to_string()), + _100btc_to_1k_btc: _0satsPattern2::new( + client.clone(), + "utxos_above_100btc_under_1k_btc".to_string(), + ), + _100k_btc_or_more: _0satsPattern2::new( + client.clone(), + "utxos_above_100k_btc".to_string(), + ), + _100k_sats_to_1m_sats: _0satsPattern2::new( + client.clone(), + "utxos_above_100k_sats_under_1m_sats".to_string(), + ), + _100sats_to_1k_sats: _0satsPattern2::new( + client.clone(), + "utxos_above_100sats_under_1k_sats".to_string(), + ), + _10btc_to_100btc: _0satsPattern2::new( + client.clone(), + "utxos_above_10btc_under_100btc".to_string(), + ), + _10k_btc_to_100k_btc: _0satsPattern2::new( + client.clone(), + "utxos_above_10k_btc_under_100k_btc".to_string(), + ), + _10k_sats_to_100k_sats: _0satsPattern2::new( + client.clone(), + "utxos_above_10k_sats_under_100k_sats".to_string(), + ), + _10m_sats_to_1btc: _0satsPattern2::new( + client.clone(), + "utxos_above_10m_sats_under_1btc".to_string(), + ), + _10sats_to_100sats: _0satsPattern2::new( + client.clone(), + "utxos_above_10sats_under_100sats".to_string(), + ), + _1btc_to_10btc: _0satsPattern2::new( + client.clone(), + "utxos_above_1btc_under_10btc".to_string(), + ), + _1k_btc_to_10k_btc: _0satsPattern2::new( + client.clone(), + "utxos_above_1k_btc_under_10k_btc".to_string(), + ), + _1k_sats_to_10k_sats: _0satsPattern2::new( + client.clone(), + "utxos_above_1k_sats_under_10k_sats".to_string(), + ), + _1m_sats_to_10m_sats: _0satsPattern2::new( + client.clone(), + "utxos_above_1m_sats_under_10m_sats".to_string(), + ), + _1sat_to_10sats: _0satsPattern2::new( + client.clone(), + "utxos_above_1sat_under_10sats".to_string(), + ), } } } @@ -3480,8 +5390,14 @@ pub struct MetricsTree_Distribution_UtxoCohorts_Term { impl MetricsTree_Distribution_UtxoCohorts_Term { pub fn new(client: Arc, base_path: String) -> Self { Self { - long: MetricsTree_Distribution_UtxoCohorts_Term_Long::new(client.clone(), format!("{base_path}_long")), - short: MetricsTree_Distribution_UtxoCohorts_Term_Short::new(client.clone(), format!("{base_path}_short")), + long: MetricsTree_Distribution_UtxoCohorts_Term_Long::new( + client.clone(), + format!("{base_path}_long"), + ), + short: MetricsTree_Distribution_UtxoCohorts_Term_Short::new( + client.clone(), + format!("{base_path}_short"), + ), } } } @@ -3637,20 +5553,59 @@ pub struct MetricsTree_Indexes { impl MetricsTree_Indexes { pub fn new(client: Arc, base_path: String) -> Self { Self { - address: MetricsTree_Indexes_Address::new(client.clone(), format!("{base_path}_address")), - dateindex: MetricsTree_Indexes_Dateindex::new(client.clone(), format!("{base_path}_dateindex")), - decadeindex: MetricsTree_Indexes_Decadeindex::new(client.clone(), format!("{base_path}_decadeindex")), - difficultyepoch: MetricsTree_Indexes_Difficultyepoch::new(client.clone(), format!("{base_path}_difficultyepoch")), - halvingepoch: MetricsTree_Indexes_Halvingepoch::new(client.clone(), format!("{base_path}_halvingepoch")), + address: MetricsTree_Indexes_Address::new( + client.clone(), + format!("{base_path}_address"), + ), + dateindex: MetricsTree_Indexes_Dateindex::new( + client.clone(), + format!("{base_path}_dateindex"), + ), + decadeindex: MetricsTree_Indexes_Decadeindex::new( + client.clone(), + format!("{base_path}_decadeindex"), + ), + difficultyepoch: MetricsTree_Indexes_Difficultyepoch::new( + client.clone(), + format!("{base_path}_difficultyepoch"), + ), + halvingepoch: MetricsTree_Indexes_Halvingepoch::new( + client.clone(), + format!("{base_path}_halvingepoch"), + ), height: MetricsTree_Indexes_Height::new(client.clone(), format!("{base_path}_height")), - monthindex: MetricsTree_Indexes_Monthindex::new(client.clone(), format!("{base_path}_monthindex")), - quarterindex: MetricsTree_Indexes_Quarterindex::new(client.clone(), format!("{base_path}_quarterindex")), - semesterindex: MetricsTree_Indexes_Semesterindex::new(client.clone(), format!("{base_path}_semesterindex")), - txindex: MetricsTree_Indexes_Txindex::new(client.clone(), format!("{base_path}_txindex")), - txinindex: MetricsTree_Indexes_Txinindex::new(client.clone(), format!("{base_path}_txinindex")), - txoutindex: MetricsTree_Indexes_Txoutindex::new(client.clone(), format!("{base_path}_txoutindex")), - weekindex: MetricsTree_Indexes_Weekindex::new(client.clone(), format!("{base_path}_weekindex")), - yearindex: MetricsTree_Indexes_Yearindex::new(client.clone(), format!("{base_path}_yearindex")), + monthindex: MetricsTree_Indexes_Monthindex::new( + client.clone(), + format!("{base_path}_monthindex"), + ), + quarterindex: MetricsTree_Indexes_Quarterindex::new( + client.clone(), + format!("{base_path}_quarterindex"), + ), + semesterindex: MetricsTree_Indexes_Semesterindex::new( + client.clone(), + format!("{base_path}_semesterindex"), + ), + txindex: MetricsTree_Indexes_Txindex::new( + client.clone(), + format!("{base_path}_txindex"), + ), + txinindex: MetricsTree_Indexes_Txinindex::new( + client.clone(), + format!("{base_path}_txinindex"), + ), + txoutindex: MetricsTree_Indexes_Txoutindex::new( + client.clone(), + format!("{base_path}_txoutindex"), + ), + weekindex: MetricsTree_Indexes_Weekindex::new( + client.clone(), + format!("{base_path}_weekindex"), + ), + yearindex: MetricsTree_Indexes_Yearindex::new( + client.clone(), + format!("{base_path}_yearindex"), + ), } } } @@ -3674,18 +5629,51 @@ pub struct MetricsTree_Indexes_Address { impl MetricsTree_Indexes_Address { pub fn new(client: Arc, base_path: String) -> Self { Self { - empty: MetricsTree_Indexes_Address_Empty::new(client.clone(), format!("{base_path}_empty")), - opreturn: MetricsTree_Indexes_Address_Opreturn::new(client.clone(), format!("{base_path}_opreturn")), + empty: MetricsTree_Indexes_Address_Empty::new( + client.clone(), + format!("{base_path}_empty"), + ), + opreturn: MetricsTree_Indexes_Address_Opreturn::new( + client.clone(), + format!("{base_path}_opreturn"), + ), p2a: MetricsTree_Indexes_Address_P2a::new(client.clone(), format!("{base_path}_p2a")), - p2ms: MetricsTree_Indexes_Address_P2ms::new(client.clone(), format!("{base_path}_p2ms")), - p2pk33: MetricsTree_Indexes_Address_P2pk33::new(client.clone(), format!("{base_path}_p2pk33")), - p2pk65: MetricsTree_Indexes_Address_P2pk65::new(client.clone(), format!("{base_path}_p2pk65")), - p2pkh: MetricsTree_Indexes_Address_P2pkh::new(client.clone(), format!("{base_path}_p2pkh")), - p2sh: MetricsTree_Indexes_Address_P2sh::new(client.clone(), format!("{base_path}_p2sh")), - p2tr: MetricsTree_Indexes_Address_P2tr::new(client.clone(), format!("{base_path}_p2tr")), - p2wpkh: MetricsTree_Indexes_Address_P2wpkh::new(client.clone(), format!("{base_path}_p2wpkh")), - p2wsh: MetricsTree_Indexes_Address_P2wsh::new(client.clone(), format!("{base_path}_p2wsh")), - unknown: MetricsTree_Indexes_Address_Unknown::new(client.clone(), format!("{base_path}_unknown")), + p2ms: MetricsTree_Indexes_Address_P2ms::new( + client.clone(), + format!("{base_path}_p2ms"), + ), + p2pk33: MetricsTree_Indexes_Address_P2pk33::new( + client.clone(), + format!("{base_path}_p2pk33"), + ), + p2pk65: MetricsTree_Indexes_Address_P2pk65::new( + client.clone(), + format!("{base_path}_p2pk65"), + ), + p2pkh: MetricsTree_Indexes_Address_P2pkh::new( + client.clone(), + format!("{base_path}_p2pkh"), + ), + p2sh: MetricsTree_Indexes_Address_P2sh::new( + client.clone(), + format!("{base_path}_p2sh"), + ), + p2tr: MetricsTree_Indexes_Address_P2tr::new( + client.clone(), + format!("{base_path}_p2tr"), + ), + p2wpkh: MetricsTree_Indexes_Address_P2wpkh::new( + client.clone(), + format!("{base_path}_p2wpkh"), + ), + p2wsh: MetricsTree_Indexes_Address_P2wsh::new( + client.clone(), + format!("{base_path}_p2wsh"), + ), + unknown: MetricsTree_Indexes_Address_Unknown::new( + client.clone(), + format!("{base_path}_unknown"), + ), } } } @@ -4144,12 +6132,24 @@ impl MetricsTree_Market { Self { ath: MetricsTree_Market_Ath::new(client.clone(), format!("{base_path}_ath")), dca: MetricsTree_Market_Dca::new(client.clone(), format!("{base_path}_dca")), - indicators: MetricsTree_Market_Indicators::new(client.clone(), format!("{base_path}_indicators")), + indicators: MetricsTree_Market_Indicators::new( + client.clone(), + format!("{base_path}_indicators"), + ), lookback: LookbackPattern::new(client.clone(), "price".to_string()), - moving_average: MetricsTree_Market_MovingAverage::new(client.clone(), format!("{base_path}_moving_average")), + moving_average: MetricsTree_Market_MovingAverage::new( + client.clone(), + format!("{base_path}_moving_average"), + ), range: MetricsTree_Market_Range::new(client.clone(), format!("{base_path}_range")), - returns: MetricsTree_Market_Returns::new(client.clone(), format!("{base_path}_returns")), - volatility: MetricsTree_Market_Volatility::new(client.clone(), format!("{base_path}_volatility")), + returns: MetricsTree_Market_Returns::new( + client.clone(), + format!("{base_path}_returns"), + ), + volatility: MetricsTree_Market_Volatility::new( + client.clone(), + format!("{base_path}_volatility"), + ), } } } @@ -4167,20 +6167,32 @@ pub struct MetricsTree_Market_Ath { impl MetricsTree_Market_Ath { pub fn new(client: Arc, base_path: String) -> Self { Self { - days_since_price_ath: MetricPattern4::new(client.clone(), "days_since_price_ath".to_string()), - max_days_between_price_aths: MetricPattern4::new(client.clone(), "max_days_between_price_aths".to_string()), - max_years_between_price_aths: MetricPattern4::new(client.clone(), "max_years_between_price_aths".to_string()), + days_since_price_ath: MetricPattern4::new( + client.clone(), + "days_since_price_ath".to_string(), + ), + max_days_between_price_aths: MetricPattern4::new( + client.clone(), + "max_days_between_price_aths".to_string(), + ), + max_years_between_price_aths: MetricPattern4::new( + client.clone(), + "max_years_between_price_aths".to_string(), + ), price_ath: MetricPattern1::new(client.clone(), "price_ath".to_string()), price_drawdown: MetricPattern3::new(client.clone(), "price_drawdown".to_string()), - years_since_price_ath: MetricPattern4::new(client.clone(), "years_since_price_ath".to_string()), + years_since_price_ath: MetricPattern4::new( + client.clone(), + "years_since_price_ath".to_string(), + ), } } } /// Metrics tree node. pub struct MetricsTree_Market_Dca { - pub class_average_price: MetricsTree_Market_Dca_ClassAveragePrice, - pub class_returns: ClassAveragePricePattern, + pub class_average_price: ClassAveragePricePattern, + pub class_returns: MetricsTree_Market_Dca_ClassReturns, pub class_stack: MetricsTree_Market_Dca_ClassStack, pub period_average_price: PeriodAveragePricePattern, pub period_cagr: PeriodCagrPattern, @@ -4192,47 +6204,65 @@ pub struct MetricsTree_Market_Dca { impl MetricsTree_Market_Dca { pub fn new(client: Arc, base_path: String) -> Self { Self { - class_average_price: MetricsTree_Market_Dca_ClassAveragePrice::new(client.clone(), format!("{base_path}_class_average_price")), - class_returns: ClassAveragePricePattern::new(client.clone(), "dca_class".to_string()), - class_stack: MetricsTree_Market_Dca_ClassStack::new(client.clone(), format!("{base_path}_class_stack")), - period_average_price: PeriodAveragePricePattern::new(client.clone(), "dca_average_price".to_string()), + class_average_price: ClassAveragePricePattern::new( + client.clone(), + "dca_class".to_string(), + ), + class_returns: MetricsTree_Market_Dca_ClassReturns::new( + client.clone(), + format!("{base_path}_class_returns"), + ), + class_stack: MetricsTree_Market_Dca_ClassStack::new( + client.clone(), + format!("{base_path}_class_stack"), + ), + period_average_price: PeriodAveragePricePattern::new( + client.clone(), + "dca_average_price".to_string(), + ), period_cagr: PeriodCagrPattern::new(client.clone(), "dca_cagr".to_string()), - period_lump_sum_stack: PeriodLumpSumStackPattern::new(client.clone(), "lump_sum_stack".to_string()), - period_returns: PeriodAveragePricePattern::new(client.clone(), "dca_returns".to_string()), + period_lump_sum_stack: PeriodLumpSumStackPattern::new( + client.clone(), + "lump_sum_stack".to_string(), + ), + period_returns: PeriodAveragePricePattern::new( + client.clone(), + "dca_returns".to_string(), + ), period_stack: PeriodLumpSumStackPattern::new(client.clone(), "dca_stack".to_string()), } } } /// Metrics tree node. -pub struct MetricsTree_Market_Dca_ClassAveragePrice { - pub _2015: MetricPattern4, - pub _2016: MetricPattern4, - pub _2017: MetricPattern4, - pub _2018: MetricPattern4, - pub _2019: MetricPattern4, - pub _2020: MetricPattern4, - pub _2021: MetricPattern4, - pub _2022: MetricPattern4, - pub _2023: MetricPattern4, - pub _2024: MetricPattern4, - pub _2025: MetricPattern4, +pub struct MetricsTree_Market_Dca_ClassReturns { + pub _2015: MetricPattern4, + pub _2016: MetricPattern4, + pub _2017: MetricPattern4, + pub _2018: MetricPattern4, + pub _2019: MetricPattern4, + pub _2020: MetricPattern4, + pub _2021: MetricPattern4, + pub _2022: MetricPattern4, + pub _2023: MetricPattern4, + pub _2024: MetricPattern4, + pub _2025: MetricPattern4, } -impl MetricsTree_Market_Dca_ClassAveragePrice { +impl MetricsTree_Market_Dca_ClassReturns { pub fn new(client: Arc, base_path: String) -> Self { Self { - _2015: MetricPattern4::new(client.clone(), "dca_class_2015_average_price".to_string()), - _2016: MetricPattern4::new(client.clone(), "dca_class_2016_average_price".to_string()), - _2017: MetricPattern4::new(client.clone(), "dca_class_2017_average_price".to_string()), - _2018: MetricPattern4::new(client.clone(), "dca_class_2018_average_price".to_string()), - _2019: MetricPattern4::new(client.clone(), "dca_class_2019_average_price".to_string()), - _2020: MetricPattern4::new(client.clone(), "dca_class_2020_average_price".to_string()), - _2021: MetricPattern4::new(client.clone(), "dca_class_2021_average_price".to_string()), - _2022: MetricPattern4::new(client.clone(), "dca_class_2022_average_price".to_string()), - _2023: MetricPattern4::new(client.clone(), "dca_class_2023_average_price".to_string()), - _2024: MetricPattern4::new(client.clone(), "dca_class_2024_average_price".to_string()), - _2025: MetricPattern4::new(client.clone(), "dca_class_2025_average_price".to_string()), + _2015: MetricPattern4::new(client.clone(), "dca_class_2015_returns".to_string()), + _2016: MetricPattern4::new(client.clone(), "dca_class_2016_returns".to_string()), + _2017: MetricPattern4::new(client.clone(), "dca_class_2017_returns".to_string()), + _2018: MetricPattern4::new(client.clone(), "dca_class_2018_returns".to_string()), + _2019: MetricPattern4::new(client.clone(), "dca_class_2019_returns".to_string()), + _2020: MetricPattern4::new(client.clone(), "dca_class_2020_returns".to_string()), + _2021: MetricPattern4::new(client.clone(), "dca_class_2021_returns".to_string()), + _2022: MetricPattern4::new(client.clone(), "dca_class_2022_returns".to_string()), + _2023: MetricPattern4::new(client.clone(), "dca_class_2023_returns".to_string()), + _2024: MetricPattern4::new(client.clone(), "dca_class_2024_returns".to_string()), + _2025: MetricPattern4::new(client.clone(), "dca_class_2025_returns".to_string()), } } } @@ -4306,8 +6336,14 @@ impl MetricsTree_Market_Indicators { rsi_14d: MetricPattern6::new(client.clone(), "rsi_14d".to_string()), rsi_14d_max: MetricPattern6::new(client.clone(), "rsi_14d_max".to_string()), rsi_14d_min: MetricPattern6::new(client.clone(), "rsi_14d_min".to_string()), - rsi_average_gain_14d: MetricPattern6::new(client.clone(), "rsi_average_gain_14d".to_string()), - rsi_average_loss_14d: MetricPattern6::new(client.clone(), "rsi_average_loss_14d".to_string()), + rsi_average_gain_14d: MetricPattern6::new( + client.clone(), + "rsi_average_gain_14d".to_string(), + ), + rsi_average_loss_14d: MetricPattern6::new( + client.clone(), + "rsi_average_loss_14d".to_string(), + ), rsi_gains: MetricPattern6::new(client.clone(), "rsi_gains".to_string()), rsi_losses: MetricPattern6::new(client.clone(), "rsi_losses".to_string()), stoch_d: MetricPattern6::new(client.clone(), "stoch_d".to_string()), @@ -4375,8 +6411,14 @@ impl MetricsTree_Market_MovingAverage { price_1y_sma: Price111dSmaPattern::new(client.clone(), "price_1y_sma".to_string()), price_200d_ema: Price111dSmaPattern::new(client.clone(), "price_200d_ema".to_string()), price_200d_sma: Price111dSmaPattern::new(client.clone(), "price_200d_sma".to_string()), - price_200d_sma_x0_8: MetricPattern4::new(client.clone(), "price_200d_sma_x0_8".to_string()), - price_200d_sma_x2_4: MetricPattern4::new(client.clone(), "price_200d_sma_x2_4".to_string()), + price_200d_sma_x0_8: MetricPattern4::new( + client.clone(), + "price_200d_sma_x0_8".to_string(), + ), + price_200d_sma_x2_4: MetricPattern4::new( + client.clone(), + "price_200d_sma_x2_4".to_string(), + ), price_200w_ema: Price111dSmaPattern::new(client.clone(), "price_200w_ema".to_string()), price_200w_sma: Price111dSmaPattern::new(client.clone(), "price_200w_sma".to_string()), price_21d_ema: Price111dSmaPattern::new(client.clone(), "price_21d_ema".to_string()), @@ -4424,11 +6466,17 @@ impl MetricsTree_Market_Range { price_1w_min: MetricPattern4::new(client.clone(), "price_1w_min".to_string()), price_1y_max: MetricPattern4::new(client.clone(), "price_1y_max".to_string()), price_1y_min: MetricPattern4::new(client.clone(), "price_1y_min".to_string()), - price_2w_choppiness_index: MetricPattern4::new(client.clone(), "price_2w_choppiness_index".to_string()), + price_2w_choppiness_index: MetricPattern4::new( + client.clone(), + "price_2w_choppiness_index".to_string(), + ), price_2w_max: MetricPattern4::new(client.clone(), "price_2w_max".to_string()), price_2w_min: MetricPattern4::new(client.clone(), "price_2w_min".to_string()), price_true_range: MetricPattern6::new(client.clone(), "price_true_range".to_string()), - price_true_range_2w_sum: MetricPattern6::new(client.clone(), "price_true_range_2w_sum".to_string()), + price_true_range_2w_sum: MetricPattern6::new( + client.clone(), + "price_true_range_2w_sum".to_string(), + ), } } } @@ -4449,15 +6497,36 @@ pub struct MetricsTree_Market_Returns { impl MetricsTree_Market_Returns { pub fn new(client: Arc, base_path: String) -> Self { Self { - _1d_returns_1m_sd: _1dReturns1mSdPattern::new(client.clone(), "1d_returns_1m_sd".to_string()), - _1d_returns_1w_sd: _1dReturns1mSdPattern::new(client.clone(), "1d_returns_1w_sd".to_string()), - _1d_returns_1y_sd: _1dReturns1mSdPattern::new(client.clone(), "1d_returns_1y_sd".to_string()), + _1d_returns_1m_sd: _1dReturns1mSdPattern::new( + client.clone(), + "1d_returns_1m_sd".to_string(), + ), + _1d_returns_1w_sd: _1dReturns1mSdPattern::new( + client.clone(), + "1d_returns_1w_sd".to_string(), + ), + _1d_returns_1y_sd: _1dReturns1mSdPattern::new( + client.clone(), + "1d_returns_1y_sd".to_string(), + ), cagr: PeriodCagrPattern::new(client.clone(), "cagr".to_string()), - downside_1m_sd: _1dReturns1mSdPattern::new(client.clone(), "downside_1m_sd".to_string()), - downside_1w_sd: _1dReturns1mSdPattern::new(client.clone(), "downside_1w_sd".to_string()), - downside_1y_sd: _1dReturns1mSdPattern::new(client.clone(), "downside_1y_sd".to_string()), + downside_1m_sd: _1dReturns1mSdPattern::new( + client.clone(), + "downside_1m_sd".to_string(), + ), + downside_1w_sd: _1dReturns1mSdPattern::new( + client.clone(), + "downside_1w_sd".to_string(), + ), + downside_1y_sd: _1dReturns1mSdPattern::new( + client.clone(), + "downside_1y_sd".to_string(), + ), downside_returns: MetricPattern6::new(client.clone(), "downside_returns".to_string()), - price_returns: MetricsTree_Market_Returns_PriceReturns::new(client.clone(), format!("{base_path}_price_returns")), + price_returns: MetricsTree_Market_Returns_PriceReturns::new( + client.clone(), + format!("{base_path}_price_returns"), + ), } } } @@ -4515,9 +6584,18 @@ pub struct MetricsTree_Market_Volatility { impl MetricsTree_Market_Volatility { pub fn new(client: Arc, base_path: String) -> Self { Self { - price_1m_volatility: MetricPattern4::new(client.clone(), "price_1m_volatility".to_string()), - price_1w_volatility: MetricPattern4::new(client.clone(), "price_1w_volatility".to_string()), - price_1y_volatility: MetricPattern4::new(client.clone(), "price_1y_volatility".to_string()), + price_1m_volatility: MetricPattern4::new( + client.clone(), + "price_1m_volatility".to_string(), + ), + price_1w_volatility: MetricPattern4::new( + client.clone(), + "price_1w_volatility".to_string(), + ), + price_1y_volatility: MetricPattern4::new( + client.clone(), + "price_1y_volatility".to_string(), + ), sharpe_1m: MetricPattern6::new(client.clone(), "sharpe_1m".to_string()), sharpe_1w: MetricPattern6::new(client.clone(), "sharpe_1w".to_string()), sharpe_1y: MetricPattern6::new(client.clone(), "sharpe_1y".to_string()), @@ -4772,7 +6850,10 @@ impl MetricsTree_Pools_Vecs { binancepool: AaopoolPattern::new(client.clone(), "binancepool".to_string()), bitalo: AaopoolPattern::new(client.clone(), "bitalo".to_string()), bitclub: AaopoolPattern::new(client.clone(), "bitclub".to_string()), - bitcoinaffiliatenetwork: AaopoolPattern::new(client.clone(), "bitcoinaffiliatenetwork".to_string()), + bitcoinaffiliatenetwork: AaopoolPattern::new( + client.clone(), + "bitcoinaffiliatenetwork".to_string(), + ), bitcoincom: AaopoolPattern::new(client.clone(), "bitcoincom".to_string()), bitcoinindia: AaopoolPattern::new(client.clone(), "bitcoinindia".to_string()), bitcoinrussia: AaopoolPattern::new(client.clone(), "bitcoinrussia".to_string()), @@ -4818,13 +6899,19 @@ impl MetricsTree_Pools_Vecs { ekanembtc: AaopoolPattern::new(client.clone(), "ekanembtc".to_string()), eligius: AaopoolPattern::new(client.clone(), "eligius".to_string()), emcdpool: AaopoolPattern::new(client.clone(), "emcdpool".to_string()), - entrustcharitypool: AaopoolPattern::new(client.clone(), "entrustcharitypool".to_string()), + entrustcharitypool: AaopoolPattern::new( + client.clone(), + "entrustcharitypool".to_string(), + ), eobot: AaopoolPattern::new(client.clone(), "eobot".to_string()), exxbw: AaopoolPattern::new(client.clone(), "exxbw".to_string()), f2pool: AaopoolPattern::new(client.clone(), "f2pool".to_string()), fiftyeightcoin: AaopoolPattern::new(client.clone(), "fiftyeightcoin".to_string()), foundryusa: AaopoolPattern::new(client.clone(), "foundryusa".to_string()), - futurebitapollosolo: AaopoolPattern::new(client.clone(), "futurebitapollosolo".to_string()), + futurebitapollosolo: AaopoolPattern::new( + client.clone(), + "futurebitapollosolo".to_string(), + ), gbminers: AaopoolPattern::new(client.clone(), "gbminers".to_string()), ghashio: AaopoolPattern::new(client.clone(), "ghashio".to_string()), givemecoins: AaopoolPattern::new(client.clone(), "givemecoins".to_string()), @@ -4905,7 +6992,10 @@ impl MetricsTree_Pools_Vecs { tiger: AaopoolPattern::new(client.clone(), "tiger".to_string()), tigerpoolnet: AaopoolPattern::new(client.clone(), "tigerpoolnet".to_string()), titan: AaopoolPattern::new(client.clone(), "titan".to_string()), - transactioncoinmining: AaopoolPattern::new(client.clone(), "transactioncoinmining".to_string()), + transactioncoinmining: AaopoolPattern::new( + client.clone(), + "transactioncoinmining".to_string(), + ), trickysbtcpool: AaopoolPattern::new(client.clone(), "trickysbtcpool".to_string()), triplemining: AaopoolPattern::new(client.clone(), "triplemining".to_string()), twentyoneinc: AaopoolPattern::new(client.clone(), "twentyoneinc".to_string()), @@ -5024,30 +7114,87 @@ pub struct MetricsTree_Price_Oracle { impl MetricsTree_Price_Oracle { pub fn new(client: Arc, base_path: String) -> Self { Self { - height_to_first_pairoutputindex: MetricPattern11::new(client.clone(), "height_to_first_pairoutputindex".to_string()), + height_to_first_pairoutputindex: MetricPattern11::new( + client.clone(), + "height_to_first_pairoutputindex".to_string(), + ), ohlc_cents: MetricPattern6::new(client.clone(), "oracle_ohlc_cents".to_string()), ohlc_dollars: MetricPattern6::new(client.clone(), "oracle_ohlc".to_string()), output0_value: MetricPattern33::new(client.clone(), "pair_output0_value".to_string()), output1_value: MetricPattern33::new(client.clone(), "pair_output1_value".to_string()), - pairoutputindex_to_txindex: MetricPattern33::new(client.clone(), "pairoutputindex_to_txindex".to_string()), - phase_daily_cents: PhaseDailyCentsPattern::new(client.clone(), "phase_daily".to_string()), - phase_daily_dollars: PhaseDailyCentsPattern::new(client.clone(), "phase_daily_dollars".to_string()), + pairoutputindex_to_txindex: MetricPattern33::new( + client.clone(), + "pairoutputindex_to_txindex".to_string(), + ), + phase_daily_cents: PhaseDailyCentsPattern::new( + client.clone(), + "phase_daily".to_string(), + ), + phase_daily_dollars: PhaseDailyCentsPattern::new( + client.clone(), + "phase_daily_dollars".to_string(), + ), phase_histogram: MetricPattern11::new(client.clone(), "phase_histogram".to_string()), - phase_price_cents: MetricPattern11::new(client.clone(), "phase_price_cents".to_string()), - phase_v2_daily_cents: PhaseDailyCentsPattern::new(client.clone(), "phase_v2_daily".to_string()), - phase_v2_daily_dollars: PhaseDailyCentsPattern::new(client.clone(), "phase_v2_daily_dollars".to_string()), - phase_v2_histogram: MetricPattern11::new(client.clone(), "phase_v2_histogram".to_string()), - phase_v2_peak_daily_cents: PhaseDailyCentsPattern::new(client.clone(), "phase_v2_peak_daily".to_string()), - phase_v2_peak_daily_dollars: PhaseDailyCentsPattern::new(client.clone(), "phase_v2_peak_daily_dollars".to_string()), - phase_v2_peak_price_cents: MetricPattern11::new(client.clone(), "phase_v2_peak_price_cents".to_string()), - phase_v2_price_cents: MetricPattern11::new(client.clone(), "phase_v2_price_cents".to_string()), - phase_v3_daily_cents: PhaseDailyCentsPattern::new(client.clone(), "phase_v3_daily".to_string()), - phase_v3_daily_dollars: PhaseDailyCentsPattern::new(client.clone(), "phase_v3_daily_dollars".to_string()), - phase_v3_histogram: MetricPattern11::new(client.clone(), "phase_v3_histogram".to_string()), - phase_v3_peak_daily_cents: PhaseDailyCentsPattern::new(client.clone(), "phase_v3_peak_daily".to_string()), - phase_v3_peak_daily_dollars: PhaseDailyCentsPattern::new(client.clone(), "phase_v3_peak_daily_dollars".to_string()), - phase_v3_peak_price_cents: MetricPattern11::new(client.clone(), "phase_v3_peak_price_cents".to_string()), - phase_v3_price_cents: MetricPattern11::new(client.clone(), "phase_v3_price_cents".to_string()), + phase_price_cents: MetricPattern11::new( + client.clone(), + "phase_price_cents".to_string(), + ), + phase_v2_daily_cents: PhaseDailyCentsPattern::new( + client.clone(), + "phase_v2_daily".to_string(), + ), + phase_v2_daily_dollars: PhaseDailyCentsPattern::new( + client.clone(), + "phase_v2_daily_dollars".to_string(), + ), + phase_v2_histogram: MetricPattern11::new( + client.clone(), + "phase_v2_histogram".to_string(), + ), + phase_v2_peak_daily_cents: PhaseDailyCentsPattern::new( + client.clone(), + "phase_v2_peak_daily".to_string(), + ), + phase_v2_peak_daily_dollars: PhaseDailyCentsPattern::new( + client.clone(), + "phase_v2_peak_daily_dollars".to_string(), + ), + phase_v2_peak_price_cents: MetricPattern11::new( + client.clone(), + "phase_v2_peak_price_cents".to_string(), + ), + phase_v2_price_cents: MetricPattern11::new( + client.clone(), + "phase_v2_price_cents".to_string(), + ), + phase_v3_daily_cents: PhaseDailyCentsPattern::new( + client.clone(), + "phase_v3_daily".to_string(), + ), + phase_v3_daily_dollars: PhaseDailyCentsPattern::new( + client.clone(), + "phase_v3_daily_dollars".to_string(), + ), + phase_v3_histogram: MetricPattern11::new( + client.clone(), + "phase_v3_histogram".to_string(), + ), + phase_v3_peak_daily_cents: PhaseDailyCentsPattern::new( + client.clone(), + "phase_v3_peak_daily".to_string(), + ), + phase_v3_peak_daily_dollars: PhaseDailyCentsPattern::new( + client.clone(), + "phase_v3_peak_daily_dollars".to_string(), + ), + phase_v3_peak_price_cents: MetricPattern11::new( + client.clone(), + "phase_v3_peak_price_cents".to_string(), + ), + phase_v3_price_cents: MetricPattern11::new( + client.clone(), + "phase_v3_price_cents".to_string(), + ), price_cents: MetricPattern11::new(client.clone(), "oracle_price_cents".to_string()), tx_count: MetricPattern6::new(client.clone(), "oracle_tx_count".to_string()), } @@ -5088,10 +7235,22 @@ impl MetricsTree_Scripts { Self { count: MetricsTree_Scripts_Count::new(client.clone(), format!("{base_path}_count")), empty_to_txindex: MetricPattern9::new(client.clone(), "txindex".to_string()), - first_emptyoutputindex: MetricPattern11::new(client.clone(), "first_emptyoutputindex".to_string()), - first_opreturnindex: MetricPattern11::new(client.clone(), "first_opreturnindex".to_string()), - first_p2msoutputindex: MetricPattern11::new(client.clone(), "first_p2msoutputindex".to_string()), - first_unknownoutputindex: MetricPattern11::new(client.clone(), "first_unknownoutputindex".to_string()), + first_emptyoutputindex: MetricPattern11::new( + client.clone(), + "first_emptyoutputindex".to_string(), + ), + first_opreturnindex: MetricPattern11::new( + client.clone(), + "first_opreturnindex".to_string(), + ), + first_p2msoutputindex: MetricPattern11::new( + client.clone(), + "first_p2msoutputindex".to_string(), + ), + first_unknownoutputindex: MetricPattern11::new( + client.clone(), + "first_unknownoutputindex".to_string(), + ), opreturn_to_txindex: MetricPattern14::new(client.clone(), "txindex".to_string()), p2ms_to_txindex: MetricPattern17::new(client.clone(), "txindex".to_string()), unknown_to_txindex: MetricPattern28::new(client.clone(), "txindex".to_string()), @@ -5134,8 +7293,14 @@ impl MetricsTree_Scripts_Count { p2wpkh: DollarsPattern::new(client.clone(), "p2wpkh_count".to_string()), p2wsh: DollarsPattern::new(client.clone(), "p2wsh_count".to_string()), segwit: DollarsPattern::new(client.clone(), "segwit_count".to_string()), - segwit_adoption: SegwitAdoptionPattern::new(client.clone(), "segwit_adoption".to_string()), - taproot_adoption: SegwitAdoptionPattern::new(client.clone(), "taproot_adoption".to_string()), + segwit_adoption: SegwitAdoptionPattern::new( + client.clone(), + "segwit_adoption".to_string(), + ), + taproot_adoption: SegwitAdoptionPattern::new( + client.clone(), + "taproot_adoption".to_string(), + ), unknownoutput: DollarsPattern::new(client.clone(), "unknownoutput_count".to_string()), } } @@ -5167,10 +7332,16 @@ impl MetricsTree_Supply { pub fn new(client: Arc, base_path: String) -> Self { Self { burned: MetricsTree_Supply_Burned::new(client.clone(), format!("{base_path}_burned")), - circulating: MetricsTree_Supply_Circulating::new(client.clone(), format!("{base_path}_circulating")), + circulating: MetricsTree_Supply_Circulating::new( + client.clone(), + format!("{base_path}_circulating"), + ), inflation: MetricPattern4::new(client.clone(), "inflation_rate".to_string()), market_cap: MetricPattern1::new(client.clone(), "market_cap".to_string()), - velocity: MetricsTree_Supply_Velocity::new(client.clone(), format!("{base_path}_velocity")), + velocity: MetricsTree_Supply_Velocity::new( + client.clone(), + format!("{base_path}_velocity"), + ), } } } @@ -5185,7 +7356,10 @@ impl MetricsTree_Supply_Burned { pub fn new(client: Arc, base_path: String) -> Self { Self { opreturn: UnclaimedRewardsPattern::new(client.clone(), "opreturn_supply".to_string()), - unspendable: UnclaimedRewardsPattern::new(client.clone(), "unspendable_supply".to_string()), + unspendable: UnclaimedRewardsPattern::new( + client.clone(), + "unspendable_supply".to_string(), + ), } } } @@ -5245,20 +7419,32 @@ impl MetricsTree_Transactions { pub fn new(client: Arc, base_path: String) -> Self { Self { base_size: MetricPattern27::new(client.clone(), "base_size".to_string()), - count: MetricsTree_Transactions_Count::new(client.clone(), format!("{base_path}_count")), + count: MetricsTree_Transactions_Count::new( + client.clone(), + format!("{base_path}_count"), + ), fees: MetricsTree_Transactions_Fees::new(client.clone(), format!("{base_path}_fees")), first_txindex: MetricPattern11::new(client.clone(), "first_txindex".to_string()), first_txinindex: MetricPattern27::new(client.clone(), "first_txinindex".to_string()), first_txoutindex: MetricPattern27::new(client.clone(), "first_txoutindex".to_string()), height: MetricPattern27::new(client.clone(), "height".to_string()), - is_explicitly_rbf: MetricPattern27::new(client.clone(), "is_explicitly_rbf".to_string()), + is_explicitly_rbf: MetricPattern27::new( + client.clone(), + "is_explicitly_rbf".to_string(), + ), rawlocktime: MetricPattern27::new(client.clone(), "rawlocktime".to_string()), size: MetricsTree_Transactions_Size::new(client.clone(), format!("{base_path}_size")), total_size: MetricPattern27::new(client.clone(), "total_size".to_string()), txid: MetricPattern27::new(client.clone(), "txid".to_string()), txversion: MetricPattern27::new(client.clone(), "txversion".to_string()), - versions: MetricsTree_Transactions_Versions::new(client.clone(), format!("{base_path}_versions")), - volume: MetricsTree_Transactions_Volume::new(client.clone(), format!("{base_path}_volume")), + versions: MetricsTree_Transactions_Versions::new( + client.clone(), + format!("{base_path}_versions"), + ), + volume: MetricsTree_Transactions_Volume::new( + client.clone(), + format!("{base_path}_volume"), + ), } } } @@ -5309,7 +7495,10 @@ impl MetricsTree_Transactions_Fees_Fee { pub fn new(client: Arc, base_path: String) -> Self { Self { bitcoin: CountPattern2::new(client.clone(), "fee_btc".to_string()), - dollars: MetricsTree_Transactions_Fees_Fee_Dollars::new(client.clone(), format!("{base_path}_dollars")), + dollars: MetricsTree_Transactions_Fees_Fee_Dollars::new( + client.clone(), + format!("{base_path}_dollars"), + ), sats: CountPattern2::new(client.clone(), "fee".to_string()), txindex: MetricPattern27::new(client.clone(), "fee".to_string()), } @@ -5336,7 +7525,10 @@ impl MetricsTree_Transactions_Fees_Fee_Dollars { Self { average: MetricPattern1::new(client.clone(), "fee_usd_average".to_string()), cumulative: MetricPattern2::new(client.clone(), "fee_usd_cumulative".to_string()), - height_cumulative: MetricPattern11::new(client.clone(), "fee_usd_cumulative".to_string()), + height_cumulative: MetricPattern11::new( + client.clone(), + "fee_usd_cumulative".to_string(), + ), max: MetricPattern1::new(client.clone(), "fee_usd_max".to_string()), median: MetricPattern11::new(client.clone(), "fee_usd_median".to_string()), min: MetricPattern1::new(client.clone(), "fee_usd_min".to_string()), @@ -5442,12 +7634,12 @@ impl BrkClient { /// .last(10) /// .json::()?; /// ``` - pub fn metric(&self, metric: impl Into, index: Index) -> MetricEndpointBuilder { - MetricEndpointBuilder::new( - self.base.clone(), - Arc::from(metric.into().as_str()), - index, - ) + pub fn metric( + &self, + metric: impl Into, + index: Index, + ) -> MetricEndpointBuilder { + MetricEndpointBuilder::new(self.base.clone(), Arc::from(metric.into().as_str()), index) } /// Compact OpenAPI specification @@ -5477,11 +7669,24 @@ impl BrkClient { /// *[Mempool.space docs](https://mempool.space/docs/api/rest#get-address-transactions)* /// /// Endpoint: `GET /api/address/{address}/txs` - pub fn get_address_txs(&self, address: Address, after_txid: Option<&str>, limit: Option) -> Result> { + pub fn get_address_txs( + &self, + address: Address, + after_txid: Option<&str>, + limit: Option, + ) -> Result> { let mut query = Vec::new(); - if let Some(v) = after_txid { query.push(format!("after_txid={}", v)); } - if let Some(v) = limit { query.push(format!("limit={}", v)); } - let query_str = if query.is_empty() { String::new() } else { format!("?{}", query.join("&")) }; + if let Some(v) = after_txid { + query.push(format!("after_txid={}", v)); + } + if let Some(v) = limit { + query.push(format!("limit={}", v)); + } + let query_str = if query.is_empty() { + String::new() + } else { + format!("?{}", query.join("&")) + }; let path = format!("/api/address/{address}/txs{}", query_str); self.base.get_json(&path) } @@ -5493,11 +7698,24 @@ impl BrkClient { /// *[Mempool.space docs](https://mempool.space/docs/api/rest#get-address-transactions-chain)* /// /// Endpoint: `GET /api/address/{address}/txs/chain` - pub fn get_address_confirmed_txs(&self, address: Address, after_txid: Option<&str>, limit: Option) -> Result> { + pub fn get_address_confirmed_txs( + &self, + address: Address, + after_txid: Option<&str>, + limit: Option, + ) -> Result> { let mut query = Vec::new(); - if let Some(v) = after_txid { query.push(format!("after_txid={}", v)); } - if let Some(v) = limit { query.push(format!("limit={}", v)); } - let query_str = if query.is_empty() { String::new() } else { format!("?{}", query.join("&")) }; + if let Some(v) = after_txid { + query.push(format!("after_txid={}", v)); + } + if let Some(v) = limit { + query.push(format!("limit={}", v)); + } + let query_str = if query.is_empty() { + String::new() + } else { + format!("?{}", query.join("&")) + }; let path = format!("/api/address/{address}/txs/chain{}", query_str); self.base.get_json(&path) } @@ -5510,7 +7728,8 @@ impl BrkClient { /// /// Endpoint: `GET /api/address/{address}/txs/mempool` pub fn get_address_mempool_txs(&self, address: Address) -> Result> { - self.base.get_json(&format!("/api/address/{address}/txs/mempool")) + self.base + .get_json(&format!("/api/address/{address}/txs/mempool")) } /// Address UTXOs @@ -5576,7 +7795,8 @@ impl BrkClient { /// /// Endpoint: `GET /api/block/{hash}/txid/{index}` pub fn get_block_txid(&self, hash: BlockHash, index: TxIndex) -> Result { - self.base.get_json(&format!("/api/block/{hash}/txid/{index}")) + self.base + .get_json(&format!("/api/block/{hash}/txid/{index}")) } /// Block transaction IDs @@ -5598,7 +7818,8 @@ impl BrkClient { /// /// Endpoint: `GET /api/block/{hash}/txs/{start_index}` pub fn get_block_txs(&self, hash: BlockHash, start_index: TxIndex) -> Result> { - self.base.get_json(&format!("/api/block/{hash}/txs/{start_index}")) + self.base + .get_json(&format!("/api/block/{hash}/txs/{start_index}")) } /// Recent blocks @@ -5659,14 +7880,38 @@ impl BrkClient { /// Fetch data for a specific metric at the given index. Use query parameters to filter by date range and format (json/csv). /// /// Endpoint: `GET /api/metric/{metric}/{index}` - pub fn get_metric(&self, metric: Metric, index: Index, start: Option, end: Option, limit: Option<&str>, format: Option) -> Result> { + pub fn get_metric( + &self, + metric: Metric, + index: Index, + start: Option, + end: Option, + limit: Option<&str>, + format: Option, + ) -> Result> { let mut query = Vec::new(); - if let Some(v) = start { query.push(format!("start={}", v)); } - if let Some(v) = end { query.push(format!("end={}", v)); } - if let Some(v) = limit { query.push(format!("limit={}", v)); } - if let Some(v) = format { query.push(format!("format={}", v)); } - let query_str = if query.is_empty() { String::new() } else { format!("?{}", query.join("&")) }; - let path = format!("/api/metric/{metric}/{}{}", index.serialize_long(), query_str); + if let Some(v) = start { + query.push(format!("start={}", v)); + } + if let Some(v) = end { + query.push(format!("end={}", v)); + } + if let Some(v) = limit { + query.push(format!("limit={}", v)); + } + if let Some(v) = format { + query.push(format!("format={}", v)); + } + let query_str = if query.is_empty() { + String::new() + } else { + format!("?{}", query.join("&")) + }; + let path = format!( + "/api/metric/{metric}/{}{}", + index.serialize_long(), + query_str + ); if format == Some(Format::CSV) { self.base.get_text(&path).map(FormatResponse::Csv) } else { @@ -5688,15 +7933,35 @@ impl BrkClient { /// Fetch multiple metrics in a single request. Supports filtering by index and date range. Returns an array of MetricData objects. For a single metric, use `get_metric` instead. /// /// Endpoint: `GET /api/metrics/bulk` - pub fn get_metrics(&self, metrics: Metrics, index: Index, start: Option, end: Option, limit: Option<&str>, format: Option) -> Result>> { + pub fn get_metrics( + &self, + metrics: Metrics, + index: Index, + start: Option, + end: Option, + limit: Option<&str>, + format: Option, + ) -> Result>> { let mut query = Vec::new(); query.push(format!("metrics={}", metrics)); query.push(format!("index={}", index)); - if let Some(v) = start { query.push(format!("start={}", v)); } - if let Some(v) = end { query.push(format!("end={}", v)); } - if let Some(v) = limit { query.push(format!("limit={}", v)); } - if let Some(v) = format { query.push(format!("format={}", v)); } - let query_str = if query.is_empty() { String::new() } else { format!("?{}", query.join("&")) }; + if let Some(v) = start { + query.push(format!("start={}", v)); + } + if let Some(v) = end { + query.push(format!("end={}", v)); + } + if let Some(v) = limit { + query.push(format!("limit={}", v)); + } + if let Some(v) = format { + query.push(format!("format={}", v)); + } + let query_str = if query.is_empty() { + String::new() + } else { + format!("?{}", query.join("&")) + }; let path = format!("/api/metrics/bulk{}", query_str); if format == Some(Format::CSV) { self.base.get_text(&path).map(FormatResponse::Csv) @@ -5730,8 +7995,14 @@ impl BrkClient { /// Endpoint: `GET /api/metrics/list` pub fn list_metrics(&self, page: Option) -> Result { let mut query = Vec::new(); - if let Some(v) = page { query.push(format!("page={}", v)); } - let query_str = if query.is_empty() { String::new() } else { format!("?{}", query.join("&")) }; + if let Some(v) = page { + query.push(format!("page={}", v)); + } + let query_str = if query.is_empty() { + String::new() + } else { + format!("?{}", query.join("&")) + }; let path = format!("/api/metrics/list{}", query_str); self.base.get_json(&path) } @@ -5743,8 +8014,14 @@ impl BrkClient { /// Endpoint: `GET /api/metrics/search/{metric}` pub fn search_metrics(&self, metric: Metric, limit: Option) -> Result> { let mut query = Vec::new(); - if let Some(v) = limit { query.push(format!("limit={}", v)); } - let query_str = if query.is_empty() { String::new() } else { format!("?{}", query.join("&")) }; + if let Some(v) = limit { + query.push(format!("limit={}", v)); + } + let query_str = if query.is_empty() { + String::new() + } else { + format!("?{}", query.join("&")) + }; let path = format!("/api/metrics/search/{metric}{}", query_str); self.base.get_json(&path) } @@ -5797,7 +8074,8 @@ impl BrkClient { /// /// Endpoint: `GET /api/tx/{txid}/outspend/{vout}` pub fn get_tx_outspend(&self, txid: Txid, vout: Vout) -> Result { - self.base.get_json(&format!("/api/tx/{txid}/outspend/{vout}")) + self.base + .get_json(&format!("/api/tx/{txid}/outspend/{vout}")) } /// All output spend statuses @@ -5830,7 +8108,8 @@ impl BrkClient { /// /// Endpoint: `GET /api/v1/difficulty-adjustment` pub fn get_difficulty_adjustment(&self) -> Result { - self.base.get_json(&format!("/api/v1/difficulty-adjustment")) + self.base + .get_json(&format!("/api/v1/difficulty-adjustment")) } /// Projected mempool blocks @@ -5863,7 +8142,8 @@ impl BrkClient { /// /// Endpoint: `GET /api/v1/mining/blocks/fee-rates/{time_period}` pub fn get_block_fee_rates(&self, time_period: TimePeriod) -> Result { - self.base.get_json(&format!("/api/v1/mining/blocks/fee-rates/{time_period}")) + self.base + .get_json(&format!("/api/v1/mining/blocks/fee-rates/{time_period}")) } /// Block fees @@ -5874,7 +8154,8 @@ impl BrkClient { /// /// Endpoint: `GET /api/v1/mining/blocks/fees/{time_period}` pub fn get_block_fees(&self, time_period: TimePeriod) -> Result> { - self.base.get_json(&format!("/api/v1/mining/blocks/fees/{time_period}")) + self.base + .get_json(&format!("/api/v1/mining/blocks/fees/{time_period}")) } /// Block rewards @@ -5885,7 +8166,8 @@ impl BrkClient { /// /// Endpoint: `GET /api/v1/mining/blocks/rewards/{time_period}` pub fn get_block_rewards(&self, time_period: TimePeriod) -> Result> { - self.base.get_json(&format!("/api/v1/mining/blocks/rewards/{time_period}")) + self.base + .get_json(&format!("/api/v1/mining/blocks/rewards/{time_period}")) } /// Block sizes and weights @@ -5896,7 +8178,9 @@ impl BrkClient { /// /// Endpoint: `GET /api/v1/mining/blocks/sizes-weights/{time_period}` pub fn get_block_sizes_weights(&self, time_period: TimePeriod) -> Result { - self.base.get_json(&format!("/api/v1/mining/blocks/sizes-weights/{time_period}")) + self.base.get_json(&format!( + "/api/v1/mining/blocks/sizes-weights/{time_period}" + )) } /// Block by timestamp @@ -5907,7 +8191,8 @@ impl BrkClient { /// /// Endpoint: `GET /api/v1/mining/blocks/timestamp/{timestamp}` pub fn get_block_by_timestamp(&self, timestamp: Timestamp) -> Result { - self.base.get_json(&format!("/api/v1/mining/blocks/timestamp/{timestamp}")) + self.base + .get_json(&format!("/api/v1/mining/blocks/timestamp/{timestamp}")) } /// Difficulty adjustments (all time) @@ -5918,7 +8203,8 @@ impl BrkClient { /// /// Endpoint: `GET /api/v1/mining/difficulty-adjustments` pub fn get_difficulty_adjustments(&self) -> Result> { - self.base.get_json(&format!("/api/v1/mining/difficulty-adjustments")) + self.base + .get_json(&format!("/api/v1/mining/difficulty-adjustments")) } /// Difficulty adjustments @@ -5928,8 +8214,13 @@ impl BrkClient { /// *[Mempool.space docs](https://mempool.space/docs/api/rest#get-difficulty-adjustments)* /// /// Endpoint: `GET /api/v1/mining/difficulty-adjustments/{time_period}` - pub fn get_difficulty_adjustments_by_period(&self, time_period: TimePeriod) -> Result> { - self.base.get_json(&format!("/api/v1/mining/difficulty-adjustments/{time_period}")) + pub fn get_difficulty_adjustments_by_period( + &self, + time_period: TimePeriod, + ) -> Result> { + self.base.get_json(&format!( + "/api/v1/mining/difficulty-adjustments/{time_period}" + )) } /// Network hashrate (all time) @@ -5951,7 +8242,8 @@ impl BrkClient { /// /// Endpoint: `GET /api/v1/mining/hashrate/{time_period}` pub fn get_hashrate_by_period(&self, time_period: TimePeriod) -> Result { - self.base.get_json(&format!("/api/v1/mining/hashrate/{time_period}")) + self.base + .get_json(&format!("/api/v1/mining/hashrate/{time_period}")) } /// Mining pool details @@ -5984,7 +8276,8 @@ impl BrkClient { /// /// Endpoint: `GET /api/v1/mining/pools/{time_period}` pub fn get_pool_stats(&self, time_period: TimePeriod) -> Result { - self.base.get_json(&format!("/api/v1/mining/pools/{time_period}")) + self.base + .get_json(&format!("/api/v1/mining/pools/{time_period}")) } /// Mining reward statistics @@ -5995,7 +8288,8 @@ impl BrkClient { /// /// Endpoint: `GET /api/v1/mining/reward-stats/{block_count}` pub fn get_reward_stats(&self, block_count: i64) -> Result { - self.base.get_json(&format!("/api/v1/mining/reward-stats/{block_count}")) + self.base + .get_json(&format!("/api/v1/mining/reward-stats/{block_count}")) } /// Validate address @@ -6006,7 +8300,8 @@ impl BrkClient { /// /// Endpoint: `GET /api/v1/validate-address/{address}` pub fn validate_address(&self, address: &str) -> Result { - self.base.get_json(&format!("/api/v1/validate-address/{address}")) + self.base + .get_json(&format!("/api/v1/validate-address/{address}")) } /// Health check @@ -6035,5 +8330,4 @@ impl BrkClient { pub fn get_version(&self) -> Result { self.base.get_json(&format!("/version")) } - } diff --git a/crates/brk_computer/Cargo.toml b/crates/brk_computer/Cargo.toml index 7ad8d856f..6e2a1a952 100644 --- a/crates/brk_computer/Cargo.toml +++ b/crates/brk_computer/Cargo.toml @@ -32,6 +32,5 @@ vecdb = { workspace = true } [dev-dependencies] brk_alloc = { workspace = true } -plotters = "0.3" brk_bencher = { workspace = true } color-eyre = { workspace = true } diff --git a/crates/brk_computer/src/internal/compute.rs b/crates/brk_computer/src/internal/compute.rs index 2237d76b1..3705b678d 100644 --- a/crates/brk_computer/src/internal/compute.rs +++ b/crates/brk_computer/src/internal/compute.rs @@ -70,7 +70,9 @@ where }}; } - let index = validate_vec!(first, last, min, max, average, sum, cumulative, median, pct10, pct25, pct75, pct90); + let index = validate_vec!( + first, last, min, max, average, sum, cumulative, median, pct10, pct25, pct75, pct90 + ); let needs_first = first.is_some(); let needs_last = last.is_some(); @@ -298,44 +300,9 @@ where }; } - write_vec!(first, last, min, max, average, sum, cumulative, median, pct10, pct25, pct75, pct90); - - Ok(()) -} - -/// Compute cumulative extension from a source vec. -/// -/// Used when only cumulative needs to be extended from an existing source. -pub fn compute_cumulative_extend( - max_from: I, - source: &impl IterableVec, - cumulative: &mut EagerVec>, - exit: &Exit, -) -> Result<()> -where - I: VecIndex, - T: ComputedVecValue + JsonSchema, -{ - cumulative.validate_computed_version_or_reset(source.version())?; - - let index = max_from.min(I::from(cumulative.len())); - - let mut cumulative_val = index - .decremented() - .map_or(T::from(0_usize), |idx| cumulative.iter().get_unwrap(idx)); - - source - .iter() - .enumerate() - .skip(index.to_usize()) - .try_for_each(|(i, v)| -> Result<()> { - cumulative_val += v; - cumulative.truncate_push_at(i, cumulative_val)?; - Ok(()) - })?; - - let _lock = exit.lock(); - cumulative.write()?; + write_vec!( + first, last, min, max, average, sum, cumulative, median, pct10, pct25, pct75, pct90 + ); Ok(()) } diff --git a/crates/brk_computer/src/internal/multi/from_tx/value_dollars.rs b/crates/brk_computer/src/internal/multi/from_tx/value_dollars.rs index 7a057d89f..2fbb11bd7 100644 --- a/crates/brk_computer/src/internal/multi/from_tx/value_dollars.rs +++ b/crates/brk_computer/src/internal/multi/from_tx/value_dollars.rs @@ -12,16 +12,16 @@ use brk_types::{ }; use derive_more::{Deref, DerefMut}; use vecdb::{ - AnyStoredVec, AnyVec, Database, EagerVec, Exit, GenericStoredVec, ImportableVec, - IterableBoxedVec, IterableCloneableVec, IterableVec, LazyVecFrom3, + Database, EagerVec, Exit, ImportableVec, IterableBoxedVec, IterableCloneableVec, LazyVecFrom3, }; use crate::{ - ComputeIndexes, indexes, + indexes, internal::{ CumulativeVec, Full, LazyBinaryTransformFull, LazyDateDerivedFull, LazyFull, SatsTimesClosePrice, Stats, }, + ComputeIndexes, }; /// Lazy dollars at TxIndex: `sats * price[height]` @@ -137,7 +137,11 @@ impl ValueDollarsFromTxFull { exit: &Exit, ) -> Result<()> { // Compute height cumulative by summing lazy height.sum values - self.compute_height_cumulative(starting_indexes.height, exit)?; + self.height_cumulative.0.compute_cumulative( + starting_indexes.height, + &self.height.sum, + exit, + )?; // Compute dateindex stats by aggregating lazy height stats self.dateindex.compute( @@ -150,30 +154,6 @@ impl ValueDollarsFromTxFull { Ok(()) } - - /// Compute cumulative USD by summing `sum_sats[h] * price[h]` for all heights. - fn compute_height_cumulative(&mut self, max_from: Height, exit: &Exit) -> Result<()> { - let starting_height = max_from.min(Height::from(self.height_cumulative.0.len())); - - let mut cumulative = starting_height.decremented().map_or(Dollars::ZERO, |h| { - self.height_cumulative.0.iter().get_unwrap(h) - }); - - let mut sum_iter = self.height.sum.iter(); - let start_idx = *starting_height as usize; - let end_idx = sum_iter.len(); - - for h in start_idx..end_idx { - let sum_usd = sum_iter.get_unwrap(Height::from(h)); - cumulative += sum_usd; - self.height_cumulative.0.truncate_push_at(h, cumulative)?; - } - - let _lock = exit.lock(); - self.height_cumulative.0.write()?; - - Ok(()) - } } fn create_lazy_txindex( diff --git a/crates/brk_computer/src/internal/multi/height_derived/full.rs b/crates/brk_computer/src/internal/multi/height_derived/full.rs index 42f62f0e2..f95f5160a 100644 --- a/crates/brk_computer/src/internal/multi/height_derived/full.rs +++ b/crates/brk_computer/src/internal/multi/height_derived/full.rs @@ -9,11 +9,11 @@ use schemars::JsonSchema; use vecdb::{Database, Exit, IterableBoxedVec, IterableCloneableVec, IterableVec}; use crate::{ - ComputeIndexes, indexes, + indexes, internal::{ - ComputedVecValue, CumulativeVec, LazyDateDerivedFull, Full, LazyFull, NumericValue, - compute_cumulative_extend, + ComputedVecValue, CumulativeVec, Full, LazyDateDerivedFull, LazyFull, NumericValue, }, + ComputeIndexes, }; #[derive(Clone, Deref, DerefMut, Traversable)] @@ -102,6 +102,9 @@ where height_source: &impl IterableVec, exit: &Exit, ) -> Result<()> { - compute_cumulative_extend(max_from, height_source, &mut self.height_cumulative.0, exit) + self.height_cumulative + .0 + .compute_cumulative(max_from, height_source, exit)?; + Ok(()) } } diff --git a/crates/brk_computer/src/internal/multi/height_derived/sum_cum.rs b/crates/brk_computer/src/internal/multi/height_derived/sum_cum.rs index f73fc40ce..673f7072f 100644 --- a/crates/brk_computer/src/internal/multi/height_derived/sum_cum.rs +++ b/crates/brk_computer/src/internal/multi/height_derived/sum_cum.rs @@ -12,11 +12,11 @@ use vecdb::{ }; use crate::{ - ComputeIndexes, indexes, + indexes, internal::{ ComputedVecValue, CumulativeVec, LazyDateDerivedSumCum, LazySumCum, NumericValue, SumCum, - compute_cumulative_extend, }, + ComputeIndexes, }; #[derive(Clone, Deref, DerefMut, Traversable)] @@ -99,7 +99,10 @@ where source: &impl IterableVec, exit: &Exit, ) -> Result<()> { - compute_cumulative_extend(max_from, source, &mut self.height_cumulative.0, exit) + self.height_cumulative + .0 + .compute_cumulative(max_from, source, exit)?; + Ok(()) } fn compute_dateindex_sum_cum( diff --git a/crates/brk_computer/src/internal/single/group/sum_cum.rs b/crates/brk_computer/src/internal/single/group/sum_cum.rs index 418add0da..ae00e9aed 100644 --- a/crates/brk_computer/src/internal/single/group/sum_cum.rs +++ b/crates/brk_computer/src/internal/single/group/sum_cum.rs @@ -1,7 +1,10 @@ use brk_error::Result; use brk_traversable::Traversable; use schemars::JsonSchema; -use vecdb::{AnyVec, Database, Exit, IterableBoxedVec, IterableCloneableVec, IterableVec, VecIndex, VecValue, Version}; +use vecdb::{ + AnyVec, Database, Exit, IterableBoxedVec, IterableCloneableVec, IterableVec, VecIndex, + VecValue, Version, +}; use crate::internal::{ComputedVecValue, CumulativeVec, SumVec}; @@ -48,7 +51,7 @@ impl SumCum { first_indexes, count_indexes, exit, - 0, // min_skip_count + 0, // min_skip_count None, // first None, // last None, // min @@ -64,16 +67,6 @@ impl SumCum { ) } - /// Extend cumulative from an existing source vec. - pub fn extend_cumulative( - &mut self, - max_from: I, - source: &impl IterableVec, - exit: &Exit, - ) -> Result<()> { - crate::internal::compute_cumulative_extend(max_from, source, &mut self.cumulative.0, exit) - } - pub fn len(&self) -> usize { self.sum.0.len().min(self.cumulative.0.len()) } diff --git a/crates/brk_logger/Cargo.toml b/crates/brk_logger/Cargo.toml index c7ec1a3c7..04753b827 100644 --- a/crates/brk_logger/Cargo.toml +++ b/crates/brk_logger/Cargo.toml @@ -9,9 +9,7 @@ repository.workspace = true [dependencies] jiff = { workspace = true } -logroller = "0.1" owo-colors = "4.2.3" tracing = { workspace = true } -tracing-appender = "0.2" tracing-log = "0.2" tracing-subscriber = { version = "0.3", default-features = false, features = ["fmt", "env-filter", "std"] } diff --git a/crates/brk_logger/src/format.rs b/crates/brk_logger/src/format.rs new file mode 100644 index 000000000..9bf39c9cc --- /dev/null +++ b/crates/brk_logger/src/format.rs @@ -0,0 +1,168 @@ +use std::fmt::Write; + +use jiff::{Timestamp, tz}; +use owo_colors::OwoColorize; +use tracing::{Event, Level, Subscriber, field::Field}; +use tracing_subscriber::{ + fmt::{FmtContext, FormatEvent, FormatFields, format::Writer}, + registry::LookupSpan, +}; + +// Don't remove, used to know the target of unwanted logs +const WITH_TARGET: bool = false; +// const WITH_TARGET: bool = true; + +const fn level_str(level: Level) -> &'static str { + match level { + Level::ERROR => "error", + Level::WARN => "warn ", + Level::INFO => "info ", + Level::DEBUG => "debug", + Level::TRACE => "trace", + } +} + +pub struct Formatter; + +impl FormatEvent for Formatter +where + S: Subscriber + for<'a> LookupSpan<'a>, + N: for<'a> FormatFields<'a> + 'static, +{ + fn format_event( + &self, + _ctx: &FmtContext<'_, S, N>, + mut writer: Writer<'_>, + event: &Event<'_>, + ) -> std::fmt::Result { + let ts = Timestamp::now() + .to_zoned(tz::TimeZone::system()) + .strftime("%Y-%m-%d %H:%M:%S") + .to_string(); + + let level = *event.metadata().level(); + let level_str = level_str(level); + + if ANSI { + let level_colored = match level { + Level::ERROR => level_str.red().to_string(), + Level::WARN => level_str.yellow().to_string(), + Level::INFO => level_str.green().to_string(), + Level::DEBUG => level_str.blue().to_string(), + Level::TRACE => level_str.cyan().to_string(), + }; + if WITH_TARGET { + write!( + writer, + "{} {} {} {level_colored} ", + ts.bright_black(), + event.metadata().target(), + "-".bright_black(), + )?; + } else { + write!( + writer, + "{} {} {level_colored} ", + ts.bright_black(), + "-".bright_black() + )?; + } + } else if WITH_TARGET { + write!(writer, "{ts} {} - {level_str} ", event.metadata().target())?; + } else { + write!(writer, "{ts} - {level_str} ")?; + } + + let mut visitor = FieldVisitor::::new(); + event.record(&mut visitor); + write!(writer, "{}", visitor.finish())?; + writeln!(writer) + } +} + +struct FieldVisitor { + result: String, + status: Option, + uri: Option, + latency: Option, +} + +impl FieldVisitor { + fn new() -> Self { + Self { + result: String::new(), + status: None, + uri: None, + latency: None, + } + } + + fn finish(self) -> String { + if let Some(status) = self.status { + let status_str = if ANSI { + match status { + 200..=299 => status.green().to_string(), + 300..=399 => status.bright_black().to_string(), + _ => status.red().to_string(), + } + } else { + status.to_string() + }; + + let uri = self.uri.as_deref().unwrap_or(""); + let latency = self.latency.as_deref().unwrap_or(""); + + if ANSI { + format!("{status_str} {uri} {}", latency.bright_black()) + } else { + format!("{status_str} {uri} {latency}") + } + } else { + self.result + } + } +} + +impl tracing::field::Visit for FieldVisitor { + fn record_u64(&mut self, field: &Field, value: u64) { + let name = field.name(); + if name == "status" { + self.status = Some(value); + } else if !name.starts_with("log.") { + let _ = write!(self.result, "{}={} ", name, value); + } + } + + fn record_i64(&mut self, field: &Field, value: i64) { + let name = field.name(); + if !name.starts_with("log.") { + let _ = write!(self.result, "{}={} ", name, value); + } + } + + fn record_str(&mut self, field: &Field, value: &str) { + let name = field.name(); + if name == "uri" { + self.uri = Some(value.to_string()); + } else if name == "message" { + let _ = write!(self.result, "{value}"); + } else if !name.starts_with("log.") { + let _ = write!(self.result, "{}={} ", name, value); + } + } + + fn record_debug(&mut self, field: &Field, value: &dyn std::fmt::Debug) { + let name = field.name(); + match name { + "uri" => self.uri = Some(format!("{value:?}")), + "latency" => self.latency = Some(format!("{value:?}")), + "message" => { + let _ = write!(self.result, "{value:?}"); + } + _ if name.starts_with("log.") => {} + _ => { + let _ = write!(self.result, "{}={:?} ", name, value); + } + } + } +} diff --git a/crates/brk_logger/src/hook.rs b/crates/brk_logger/src/hook.rs new file mode 100644 index 000000000..d1acd4f58 --- /dev/null +++ b/crates/brk_logger/src/hook.rs @@ -0,0 +1,30 @@ +use std::{fmt::Write, sync::OnceLock}; + +use tracing::{Event, Subscriber, field::Field}; + +type LogHook = Box; + +pub static LOG_HOOK: OnceLock = OnceLock::new(); + +pub struct HookLayer; + +impl tracing_subscriber::Layer for HookLayer { + fn on_event(&self, event: &Event<'_>, _: tracing_subscriber::layer::Context<'_, S>) { + if let Some(hook) = LOG_HOOK.get() { + let mut msg = String::new(); + event.record(&mut MessageVisitor(&mut msg)); + hook(&msg); + } + } +} + +struct MessageVisitor<'a>(&'a mut String); + +impl tracing::field::Visit for MessageVisitor<'_> { + fn record_debug(&mut self, field: &Field, value: &dyn std::fmt::Debug) { + if field.name() == "message" { + self.0.clear(); + let _ = write!(self.0, "{value:?}"); + } + } +} diff --git a/crates/brk_logger/src/lib.rs b/crates/brk_logger/src/lib.rs index a40e6c29c..3fd0aa8b4 100644 --- a/crates/brk_logger/src/lib.rs +++ b/crates/brk_logger/src/lib.rs @@ -1,215 +1,21 @@ #![doc = include_str!("../README.md")] -use std::{fmt::Write as _, io, path::Path, sync::OnceLock}; +mod format; +mod hook; +mod rate_limit; -use jiff::{Timestamp, tz}; -use logroller::{LogRollerBuilder, Rotation, RotationSize}; -use owo_colors::OwoColorize; -use tracing::{Event, Level, Subscriber, field::Field}; -use tracing_appender::non_blocking::WorkerGuard; -use tracing_subscriber::{ - EnvFilter, - fmt::{self, FmtContext, FormatEvent, FormatFields, format::Writer}, - layer::SubscriberExt, - registry::LookupSpan, - util::SubscriberInitExt, -}; +use std::{io, path::Path, time::Duration}; -type LogHook = Box; +use tracing_subscriber::{EnvFilter, fmt, layer::SubscriberExt, util::SubscriberInitExt}; -static GUARD: OnceLock = OnceLock::new(); -static LOG_HOOK: OnceLock = OnceLock::new(); +use format::Formatter; +use hook::{HookLayer, LOG_HOOK}; +use rate_limit::RateLimitedFile; -const MAX_LOG_FILES: u64 = 5; -const MAX_FILE_SIZE_MB: u64 = 42; - -// Don't remove, used to know the target of unwanted logs -const WITH_TARGET: bool = false; -// const WITH_TARGET: bool = true; - -const fn level_str(level: Level) -> &'static str { - match level { - Level::ERROR => "error", - Level::WARN => "warn ", - Level::INFO => "info ", - Level::DEBUG => "debug", - Level::TRACE => "trace", - } -} - -struct Formatter; - -/// Visitor that collects structured fields for colored formatting -struct FieldVisitor { - result: String, - status: Option, - uri: Option, - latency: Option, -} - -impl FieldVisitor { - fn new() -> Self { - Self { - result: String::new(), - status: None, - uri: None, - latency: None, - } - } - - fn finish(self) -> String { - // Format HTTP-style log if we have status - if let Some(status) = self.status { - let status_str = if ANSI { - match status { - 200..=299 => status.green().to_string(), - 300..=399 => status.bright_black().to_string(), - _ => status.red().to_string(), - } - } else { - status.to_string() - }; - - let uri = self.uri.as_deref().unwrap_or(""); - let latency = self.latency.as_deref().unwrap_or(""); - - if ANSI { - format!("{status_str} {uri} {}", latency.bright_black()) - } else { - format!("{status_str} {uri} {latency}") - } - } else { - self.result - } - } -} - -impl tracing::field::Visit for FieldVisitor { - fn record_u64(&mut self, field: &Field, value: u64) { - let name = field.name(); - if name == "status" { - self.status = Some(value); - } else if !name.starts_with("log.") { - let _ = write!(self.result, "{}={} ", name, value); - } - } - - fn record_i64(&mut self, field: &Field, value: i64) { - let name = field.name(); - if !name.starts_with("log.") { - let _ = write!(self.result, "{}={} ", name, value); - } - } - - fn record_str(&mut self, field: &Field, value: &str) { - let name = field.name(); - if name == "uri" { - self.uri = Some(value.to_string()); - } else if name == "message" { - let _ = write!(self.result, "{value}"); - } else if !name.starts_with("log.") { - let _ = write!(self.result, "{}={} ", name, value); - } - } - - fn record_debug(&mut self, field: &Field, value: &dyn std::fmt::Debug) { - let name = field.name(); - match name { - "uri" => self.uri = Some(format!("{value:?}")), - "latency" => self.latency = Some(format!("{value:?}")), - "message" => { - let _ = write!(self.result, "{value:?}"); - } - _ if name.starts_with("log.") => {} - _ => { - let _ = write!(self.result, "{}={:?} ", name, value); - } - } - } -} - -impl FormatEvent for Formatter -where - S: Subscriber + for<'a> LookupSpan<'a>, - N: for<'a> FormatFields<'a> + 'static, -{ - fn format_event( - &self, - _ctx: &FmtContext<'_, S, N>, - mut writer: Writer<'_>, - event: &Event<'_>, - ) -> std::fmt::Result { - let ts = Timestamp::now() - .to_zoned(tz::TimeZone::system()) - .strftime("%Y-%m-%d %H:%M:%S") - .to_string(); - - let level = *event.metadata().level(); - let level_str = level_str(level); - - if ANSI { - let level_colored = match level { - Level::ERROR => level_str.red().to_string(), - Level::WARN => level_str.yellow().to_string(), - Level::INFO => level_str.green().to_string(), - Level::DEBUG => level_str.blue().to_string(), - Level::TRACE => level_str.cyan().to_string(), - }; - if WITH_TARGET { - write!( - writer, - "{} {} {} {level_colored} ", - ts.bright_black(), - event.metadata().target(), - "-".bright_black(), - )?; - } else { - write!( - writer, - "{} {} {level_colored} ", - ts.bright_black(), - "-".bright_black() - )?; - } - } else if WITH_TARGET { - write!(writer, "{ts} {} - {level_str} ", event.metadata().target())?; - } else { - write!(writer, "{ts} - {level_str} ")?; - } - - let mut visitor = FieldVisitor::::new(); - event.record(&mut visitor); - write!(writer, "{}", visitor.finish())?; - writeln!(writer) - } -} - -struct HookLayer; - -impl tracing_subscriber::Layer for HookLayer { - fn on_event(&self, event: &Event<'_>, _: tracing_subscriber::layer::Context<'_, S>) { - if let Some(hook) = LOG_HOOK.get() { - let mut msg = String::new(); - event.record(&mut MessageVisitor(&mut msg)); - hook(&msg); - } - } -} - -struct MessageVisitor<'a>(&'a mut String); - -impl tracing::field::Visit for MessageVisitor<'_> { - fn record_debug(&mut self, field: &tracing::field::Field, value: &dyn std::fmt::Debug) { - use std::fmt::Write; - if field.name() == "message" { - self.0.clear(); - let _ = write!(self.0, "{value:?}"); - } - } -} +/// Days to keep log files before cleanup +const MAX_LOG_AGE_DAYS: u64 = 7; pub fn init(path: Option<&Path>) -> io::Result<()> { - // Bridge log crate to tracing (for vecdb and other log-based crates) tracing_log::LogTracer::init().ok(); #[cfg(debug_assertions)] @@ -217,12 +23,11 @@ pub fn init(path: Option<&Path>) -> io::Result<()> { #[cfg(not(debug_assertions))] const DEFAULT_LEVEL: &str = "info"; - let default_filter = format!( - "{DEFAULT_LEVEL},bitcoin=off,bitcoincore-rpc=off,fjall=off,brk_fjall=off,lsm_tree=off,brk_rolldown=off,rolldown=off,tracing=off,aide=off,rustls=off,notify=off,oxc_resolver=off,tower_http=off" - ); - - let filter = - EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new(default_filter)); + let filter = EnvFilter::try_from_default_env().unwrap_or_else(|_| { + EnvFilter::new(format!( + "{DEFAULT_LEVEL},bitcoin=off,bitcoincore-rpc=off,fjall=off,brk_fjall=off,lsm_tree=off,brk_rolldown=off,rolldown=off,tracing=off,aide=off,rustls=off,notify=off,oxc_resolver=off,tower_http=off" + )) + }); let registry = tracing_subscriber::registry() .with(filter) @@ -231,25 +36,20 @@ pub fn init(path: Option<&Path>) -> io::Result<()> { if let Some(path) = path { let dir = path.parent().unwrap_or(Path::new(".")); - let filename = path + let prefix = path .file_name() .and_then(|s| s.to_str()) .unwrap_or("app.log"); - let roller = LogRollerBuilder::new(dir, Path::new(filename)) - .rotation(Rotation::SizeBased(RotationSize::MB(MAX_FILE_SIZE_MB))) - .max_keep_files(MAX_LOG_FILES) - .build() - .map_err(io::Error::other)?; + cleanup_old_logs(dir, prefix); - let (non_blocking, guard) = tracing_appender::non_blocking(roller); - GUARD.set(guard).ok(); + let writer = RateLimitedFile::new(dir, prefix); registry .with( fmt::layer() .event_format(Formatter::) - .with_writer(non_blocking), + .with_writer(writer), ) .init(); } else { @@ -260,7 +60,6 @@ pub fn init(path: Option<&Path>) -> io::Result<()> { } /// Register a hook that gets called for every log message. -/// Can only be called once. pub fn register_hook(hook: F) -> Result<(), &'static str> where F: Fn(&str) + Send + Sync + 'static, @@ -269,3 +68,29 @@ where .set(Box::new(hook)) .map_err(|_| "Hook already registered") } + +fn cleanup_old_logs(dir: &Path, prefix: &str) { + let max_age = Duration::from_secs(MAX_LOG_AGE_DAYS * 24 * 60 * 60); + let Ok(entries) = std::fs::read_dir(dir) else { + return; + }; + + for entry in entries.flatten() { + let path = entry.path(); + let Some(name) = path.file_name().and_then(|n| n.to_str()) else { + continue; + }; + + if !name.starts_with(prefix) || name == prefix { + continue; + } + + if let Ok(meta) = path.metadata() + && let Ok(modified) = meta.modified() + && let Ok(age) = modified.elapsed() + && age > max_age + { + let _ = std::fs::remove_file(&path); + } + } +} diff --git a/crates/brk_logger/src/rate_limit.rs b/crates/brk_logger/src/rate_limit.rs new file mode 100644 index 000000000..065404e0c --- /dev/null +++ b/crates/brk_logger/src/rate_limit.rs @@ -0,0 +1,90 @@ +use std::{ + fs::OpenOptions, + io::{self, Write}, + path::PathBuf, + sync::{ + Arc, + atomic::{AtomicU64, Ordering}, + }, + time::{SystemTime, UNIX_EPOCH}, +}; + +use jiff::{Timestamp, tz}; +use tracing_subscriber::fmt::MakeWriter; + +const MAX_WRITES_PER_SEC: u64 = 100; + +struct Inner { + dir: PathBuf, + prefix: String, + count: AtomicU64, + last_second: AtomicU64, +} + +impl Inner { + fn can_write(&self) -> bool { + let now = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_secs(); + + let last = self.last_second.load(Ordering::Relaxed); + if now != last { + self.last_second.store(now, Ordering::Relaxed); + self.count.store(1, Ordering::Relaxed); + true + } else { + self.count.fetch_add(1, Ordering::Relaxed) < MAX_WRITES_PER_SEC + } + } + + fn path(&self) -> PathBuf { + let date = Timestamp::now() + .to_zoned(tz::TimeZone::system()) + .strftime("%Y-%m-%d") + .to_string(); + self.dir.join(format!("{}.{}", self.prefix, date)) + } +} + +#[derive(Clone)] +pub struct RateLimitedFile(Arc); + +impl RateLimitedFile { + pub fn new(dir: &std::path::Path, prefix: &str) -> Self { + Self(Arc::new(Inner { + dir: dir.to_path_buf(), + prefix: prefix.to_string(), + count: AtomicU64::new(0), + last_second: AtomicU64::new(0), + })) + } +} + +pub struct FileWriter(Arc); + +impl Write for FileWriter { + fn write(&mut self, buf: &[u8]) -> io::Result { + if !self.0.can_write() { + return Ok(buf.len()); + } + + OpenOptions::new() + .create(true) + .append(true) + .open(self.0.path())? + .write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + Ok(()) + } +} + +impl<'a> MakeWriter<'a> for RateLimitedFile { + type Writer = FileWriter; + + fn make_writer(&'a self) -> Self::Writer { + FileWriter(Arc::clone(&self.0)) + } +} diff --git a/crates/brk_playground/src/lib.rs b/crates/brk_playground/src/lib.rs index 4335be26b..3b5f27f1d 100644 --- a/crates/brk_playground/src/lib.rs +++ b/crates/brk_playground/src/lib.rs @@ -14,13 +14,15 @@ pub mod oracle; pub mod render; pub mod signal; -pub use anchors::{get_anchor_ohlc, get_anchor_range, Ohlc}; -pub use conditions::{out_bits, tx_bits, MappedOutputConditions}; -pub use constants::{HeatmapFilter, NUM_BINS, ROUND_USD_AMOUNTS}; +pub use anchors::{Ohlc, get_anchor_ohlc, get_anchor_range}; +pub use conditions::{MappedOutputConditions, out_bits, tx_bits}; +pub use constants::{NUM_BINS, OutputFilter, ROUND_USD_AMOUNTS}; pub use filters::FILTERS; +pub use histogram::load_or_compute_output_conditions; pub use oracle::{ - derive_daily_ohlc, derive_daily_ohlc_with_confidence, derive_height_price, - derive_ohlc_from_height_prices, derive_price_from_histogram, OracleConfig, OracleResult, + HeightPriceResult, OracleConfig, OracleResult, derive_daily_ohlc, + derive_daily_ohlc_with_confidence, derive_height_price, derive_height_price_with_confidence, + derive_ohlc_from_height_prices, derive_ohlc_from_height_prices_with_confidence, + derive_price_from_histogram, }; pub use signal::{compute_expected_bins_per_day, usd_to_bin}; -pub use histogram::load_or_compute_output_conditions; diff --git a/crates/brk_server/Cargo.toml b/crates/brk_server/Cargo.toml index 0dfedd5a2..f0dc26f42 100644 --- a/crates/brk_server/Cargo.toml +++ b/crates/brk_server/Cargo.toml @@ -7,10 +7,13 @@ license.workspace = true homepage.workspace = true repository.workspace = true +[features] +bindgen = ["dep:brk_bindgen"] + [dependencies] aide = { workspace = true } axum = { workspace = true } -brk_bindgen = { workspace = true } +brk_bindgen = { workspace = true, optional = true } brk_computer = { workspace = true } brk_error = { workspace = true, features = ["jiff", "serde_json", "tokio", "vecdb"] } brk_fetcher = { workspace = true } diff --git a/crates/brk_server/src/lib.rs b/crates/brk_server/src/lib.rs index 99a612885..f42708969 100644 --- a/crates/brk_server/src/lib.rs +++ b/crates/brk_server/src/lib.rs @@ -1,7 +1,6 @@ #![doc = include_str!("../README.md")] use std::{ - panic, path::PathBuf, sync::Arc, time::{Duration, Instant}, @@ -62,6 +61,9 @@ impl Server { pub async fn serve(self, port: Option) -> brk_error::Result<()> { let state = self.0; + #[cfg(feature = "bindgen")] + let vecs = state.query.inner().vecs(); + let compression_layer = CompressionLayer::new().br(true).gzip(true).zstd(true); let response_uri_layer = axum::middleware::from_fn( @@ -96,8 +98,6 @@ impl Server { ) .on_eos(()); - let vecs = state.query.inner().vecs(); - let website_router = brk_website::router(state.website.clone()); let mut router = ApiRouter::new().add_api_routes(); if !state.website.is_enabled() { @@ -141,28 +141,33 @@ impl Server { let mut openapi = create_openapi(); let router = router.finish_api(&mut openapi); - let workspace_root: PathBuf = PathBuf::from(env!("CARGO_MANIFEST_DIR")) - .parent() - .and_then(|p| p.parent()) - .unwrap() - .into(); - let output_paths = brk_bindgen::ClientOutputPaths::new() - .rust(workspace_root.join("crates/brk_client/src/lib.rs")) - .javascript(workspace_root.join("modules/brk-client/index.js")) - .python(workspace_root.join("packages/brk_client/brk_client/__init__.py")); + #[cfg(feature = "bindgen")] + { + let workspace_root = PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .parent() + .and_then(|p| p.parent()) + .unwrap() + .to_path_buf(); + + let output_paths = brk_bindgen::ClientOutputPaths::new() + .rust(workspace_root.join("crates/brk_client/src/lib.rs")) + .javascript(workspace_root.join("modules/brk-client/index.js")) + .python(workspace_root.join("packages/brk_client/brk_client/__init__.py")); + + let openapi_json = serde_json::to_string(&openapi).unwrap(); + + let result = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| { + brk_bindgen::generate_clients(vecs, &openapi_json, &output_paths) + })); + + match result { + Ok(Ok(())) => info!("Generated clients"), + Ok(Err(e)) => error!("Failed to generate clients: {e}"), + Err(_) => error!("Client generation panicked"), + } + } let api_json = Arc::new(ApiJson::new(&openapi)); - let openapi_json = serde_json::to_string(&openapi).unwrap(); - - let result = panic::catch_unwind(panic::AssertUnwindSafe(|| { - brk_bindgen::generate_clients(vecs, &openapi_json, &output_paths) - })); - - match result { - Ok(Ok(())) => info!("Generated clients"), - Ok(Err(e)) => error!("Failed to generate clients: {e}"), - Err(_) => error!("Client generation panicked"), - } let router = router .layer(Extension(Arc::new(openapi))) diff --git a/modules/brk-client/index.js b/modules/brk-client/index.js index 099f40253..aec94d217 100644 --- a/modules/brk-client/index.js +++ b/modules/brk-client/index.js @@ -839,9 +839,11 @@ class BrkError extends Error { /** * @template T * @typedef {Object} MetricData + * @property {number} version - Version of the metric data * @property {number} total - Total number of data points * @property {number} start - Start index (inclusive) * @property {number} end - End index (exclusive) + * @property {string} stamp - ISO 8601 timestamp of when the response was generated * @property {T[]} data - The metric data */ /** @typedef {MetricData} AnyMetricData */ @@ -2030,17 +2032,17 @@ function createBitcoinPattern(client, acc) { */ function createClassAveragePricePattern(client, acc) { return { - _2015: createMetricPattern4(client, _m(acc, '2015_returns')), - _2016: createMetricPattern4(client, _m(acc, '2016_returns')), - _2017: createMetricPattern4(client, _m(acc, '2017_returns')), - _2018: createMetricPattern4(client, _m(acc, '2018_returns')), - _2019: createMetricPattern4(client, _m(acc, '2019_returns')), - _2020: createMetricPattern4(client, _m(acc, '2020_returns')), - _2021: createMetricPattern4(client, _m(acc, '2021_returns')), - _2022: createMetricPattern4(client, _m(acc, '2022_returns')), - _2023: createMetricPattern4(client, _m(acc, '2023_returns')), - _2024: createMetricPattern4(client, _m(acc, '2024_returns')), - _2025: createMetricPattern4(client, _m(acc, '2025_returns')), + _2015: createMetricPattern4(client, _m(acc, '2015_average_price')), + _2016: createMetricPattern4(client, _m(acc, '2016_average_price')), + _2017: createMetricPattern4(client, _m(acc, '2017_average_price')), + _2018: createMetricPattern4(client, _m(acc, '2018_average_price')), + _2019: createMetricPattern4(client, _m(acc, '2019_average_price')), + _2020: createMetricPattern4(client, _m(acc, '2020_average_price')), + _2021: createMetricPattern4(client, _m(acc, '2021_average_price')), + _2022: createMetricPattern4(client, _m(acc, '2022_average_price')), + _2023: createMetricPattern4(client, _m(acc, '2023_average_price')), + _2024: createMetricPattern4(client, _m(acc, '2024_average_price')), + _2025: createMetricPattern4(client, _m(acc, '2025_average_price')), }; } @@ -2083,41 +2085,6 @@ function createDollarsPattern(client, acc) { }; } -/** - * @typedef {Object} RelativePattern2 - * @property {MetricPattern1} negUnrealizedLossRelToOwnMarketCap - * @property {MetricPattern1} negUnrealizedLossRelToOwnTotalUnrealizedPnl - * @property {MetricPattern1} netUnrealizedPnlRelToOwnMarketCap - * @property {MetricPattern1} netUnrealizedPnlRelToOwnTotalUnrealizedPnl - * @property {MetricPattern1} supplyInLossRelToOwnSupply - * @property {MetricPattern1} supplyInProfitRelToOwnSupply - * @property {MetricPattern1} unrealizedLossRelToOwnMarketCap - * @property {MetricPattern1} unrealizedLossRelToOwnTotalUnrealizedPnl - * @property {MetricPattern1} unrealizedProfitRelToOwnMarketCap - * @property {MetricPattern1} unrealizedProfitRelToOwnTotalUnrealizedPnl - */ - -/** - * Create a RelativePattern2 pattern node - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {RelativePattern2} - */ -function createRelativePattern2(client, acc) { - return { - negUnrealizedLossRelToOwnMarketCap: createMetricPattern1(client, _m(acc, 'neg_unrealized_loss_rel_to_own_market_cap')), - negUnrealizedLossRelToOwnTotalUnrealizedPnl: createMetricPattern1(client, _m(acc, 'neg_unrealized_loss_rel_to_own_total_unrealized_pnl')), - netUnrealizedPnlRelToOwnMarketCap: createMetricPattern1(client, _m(acc, 'net_unrealized_pnl_rel_to_own_market_cap')), - netUnrealizedPnlRelToOwnTotalUnrealizedPnl: createMetricPattern1(client, _m(acc, 'net_unrealized_pnl_rel_to_own_total_unrealized_pnl')), - supplyInLossRelToOwnSupply: createMetricPattern1(client, _m(acc, 'supply_in_loss_rel_to_own_supply')), - supplyInProfitRelToOwnSupply: createMetricPattern1(client, _m(acc, 'supply_in_profit_rel_to_own_supply')), - unrealizedLossRelToOwnMarketCap: createMetricPattern1(client, _m(acc, 'unrealized_loss_rel_to_own_market_cap')), - unrealizedLossRelToOwnTotalUnrealizedPnl: createMetricPattern1(client, _m(acc, 'unrealized_loss_rel_to_own_total_unrealized_pnl')), - unrealizedProfitRelToOwnMarketCap: createMetricPattern1(client, _m(acc, 'unrealized_profit_rel_to_own_market_cap')), - unrealizedProfitRelToOwnTotalUnrealizedPnl: createMetricPattern1(client, _m(acc, 'unrealized_profit_rel_to_own_total_unrealized_pnl')), - }; -} - /** * @typedef {Object} RelativePattern * @property {MetricPattern1} negUnrealizedLossRelToMarketCap @@ -2153,6 +2120,41 @@ function createRelativePattern(client, acc) { }; } +/** + * @typedef {Object} RelativePattern2 + * @property {MetricPattern1} negUnrealizedLossRelToOwnMarketCap + * @property {MetricPattern1} negUnrealizedLossRelToOwnTotalUnrealizedPnl + * @property {MetricPattern1} netUnrealizedPnlRelToOwnMarketCap + * @property {MetricPattern1} netUnrealizedPnlRelToOwnTotalUnrealizedPnl + * @property {MetricPattern1} supplyInLossRelToOwnSupply + * @property {MetricPattern1} supplyInProfitRelToOwnSupply + * @property {MetricPattern1} unrealizedLossRelToOwnMarketCap + * @property {MetricPattern1} unrealizedLossRelToOwnTotalUnrealizedPnl + * @property {MetricPattern1} unrealizedProfitRelToOwnMarketCap + * @property {MetricPattern1} unrealizedProfitRelToOwnTotalUnrealizedPnl + */ + +/** + * Create a RelativePattern2 pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {RelativePattern2} + */ +function createRelativePattern2(client, acc) { + return { + negUnrealizedLossRelToOwnMarketCap: createMetricPattern1(client, _m(acc, 'neg_unrealized_loss_rel_to_own_market_cap')), + negUnrealizedLossRelToOwnTotalUnrealizedPnl: createMetricPattern1(client, _m(acc, 'neg_unrealized_loss_rel_to_own_total_unrealized_pnl')), + netUnrealizedPnlRelToOwnMarketCap: createMetricPattern1(client, _m(acc, 'net_unrealized_pnl_rel_to_own_market_cap')), + netUnrealizedPnlRelToOwnTotalUnrealizedPnl: createMetricPattern1(client, _m(acc, 'net_unrealized_pnl_rel_to_own_total_unrealized_pnl')), + supplyInLossRelToOwnSupply: createMetricPattern1(client, _m(acc, 'supply_in_loss_rel_to_own_supply')), + supplyInProfitRelToOwnSupply: createMetricPattern1(client, _m(acc, 'supply_in_profit_rel_to_own_supply')), + unrealizedLossRelToOwnMarketCap: createMetricPattern1(client, _m(acc, 'unrealized_loss_rel_to_own_market_cap')), + unrealizedLossRelToOwnTotalUnrealizedPnl: createMetricPattern1(client, _m(acc, 'unrealized_loss_rel_to_own_total_unrealized_pnl')), + unrealizedProfitRelToOwnMarketCap: createMetricPattern1(client, _m(acc, 'unrealized_profit_rel_to_own_market_cap')), + unrealizedProfitRelToOwnTotalUnrealizedPnl: createMetricPattern1(client, _m(acc, 'unrealized_profit_rel_to_own_total_unrealized_pnl')), + }; +} + /** * @template T * @typedef {Object} CountPattern2 @@ -2386,6 +2388,35 @@ function create_10yTo12yPattern(client, acc) { }; } +/** + * @typedef {Object} UnrealizedPattern + * @property {MetricPattern1} negUnrealizedLoss + * @property {MetricPattern1} netUnrealizedPnl + * @property {ActiveSupplyPattern} supplyInLoss + * @property {ActiveSupplyPattern} supplyInProfit + * @property {MetricPattern1} totalUnrealizedPnl + * @property {MetricPattern1} unrealizedLoss + * @property {MetricPattern1} unrealizedProfit + */ + +/** + * Create a UnrealizedPattern pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {UnrealizedPattern} + */ +function createUnrealizedPattern(client, acc) { + return { + negUnrealizedLoss: createMetricPattern1(client, _m(acc, 'neg_unrealized_loss')), + netUnrealizedPnl: createMetricPattern1(client, _m(acc, 'net_unrealized_pnl')), + supplyInLoss: createActiveSupplyPattern(client, _m(acc, 'supply_in_loss')), + supplyInProfit: createActiveSupplyPattern(client, _m(acc, 'supply_in_profit')), + totalUnrealizedPnl: createMetricPattern1(client, _m(acc, 'total_unrealized_pnl')), + unrealizedLoss: createMetricPattern1(client, _m(acc, 'unrealized_loss')), + unrealizedProfit: createMetricPattern1(client, _m(acc, 'unrealized_profit')), + }; +} + /** * @typedef {Object} _10yPattern * @property {ActivityPattern2} activity @@ -2415,35 +2446,6 @@ function create_10yPattern(client, acc) { }; } -/** - * @typedef {Object} _0satsPattern2 - * @property {ActivityPattern2} activity - * @property {CostBasisPattern} costBasis - * @property {OutputsPattern} outputs - * @property {RealizedPattern} realized - * @property {RelativePattern4} relative - * @property {SupplyPattern2} supply - * @property {UnrealizedPattern} unrealized - */ - -/** - * Create a _0satsPattern2 pattern node - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {_0satsPattern2} - */ -function create_0satsPattern2(client, acc) { - return { - activity: createActivityPattern2(client, acc), - costBasis: createCostBasisPattern(client, acc), - outputs: createOutputsPattern(client, _m(acc, 'utxo_count')), - realized: createRealizedPattern(client, acc), - relative: createRelativePattern4(client, _m(acc, 'supply_in')), - supply: createSupplyPattern2(client, _m(acc, 'supply')), - unrealized: createUnrealizedPattern(client, acc), - }; -} - /** * @typedef {Object} _100btcPattern * @property {ActivityPattern2} activity @@ -2473,35 +2475,6 @@ function create_100btcPattern(client, acc) { }; } -/** - * @typedef {Object} UnrealizedPattern - * @property {MetricPattern1} negUnrealizedLoss - * @property {MetricPattern1} netUnrealizedPnl - * @property {ActiveSupplyPattern} supplyInLoss - * @property {ActiveSupplyPattern} supplyInProfit - * @property {MetricPattern1} totalUnrealizedPnl - * @property {MetricPattern1} unrealizedLoss - * @property {MetricPattern1} unrealizedProfit - */ - -/** - * Create a UnrealizedPattern pattern node - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {UnrealizedPattern} - */ -function createUnrealizedPattern(client, acc) { - return { - negUnrealizedLoss: createMetricPattern1(client, _m(acc, 'neg_unrealized_loss')), - netUnrealizedPnl: createMetricPattern1(client, _m(acc, 'net_unrealized_pnl')), - supplyInLoss: createActiveSupplyPattern(client, _m(acc, 'supply_in_loss')), - supplyInProfit: createActiveSupplyPattern(client, _m(acc, 'supply_in_profit')), - totalUnrealizedPnl: createMetricPattern1(client, _m(acc, 'total_unrealized_pnl')), - unrealizedLoss: createMetricPattern1(client, _m(acc, 'unrealized_loss')), - unrealizedProfit: createMetricPattern1(client, _m(acc, 'unrealized_profit')), - }; -} - /** * @typedef {Object} PeriodCagrPattern * @property {MetricPattern4} _10y @@ -2531,6 +2504,35 @@ function createPeriodCagrPattern(client, acc) { }; } +/** + * @typedef {Object} _0satsPattern2 + * @property {ActivityPattern2} activity + * @property {CostBasisPattern} costBasis + * @property {OutputsPattern} outputs + * @property {RealizedPattern} realized + * @property {RelativePattern4} relative + * @property {SupplyPattern2} supply + * @property {UnrealizedPattern} unrealized + */ + +/** + * Create a _0satsPattern2 pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {_0satsPattern2} + */ +function create_0satsPattern2(client, acc) { + return { + activity: createActivityPattern2(client, acc), + costBasis: createCostBasisPattern(client, acc), + outputs: createOutputsPattern(client, _m(acc, 'utxo_count')), + realized: createRealizedPattern(client, acc), + relative: createRelativePattern4(client, _m(acc, 'supply_in')), + supply: createSupplyPattern2(client, _m(acc, 'supply')), + unrealized: createUnrealizedPattern(client, acc), + }; +} + /** * @typedef {Object} ActivityPattern2 * @property {BlockCountPattern} coinblocksDestroyed @@ -2624,44 +2626,23 @@ function createCoinbasePattern2(client, acc) { } /** - * @typedef {Object} ActiveSupplyPattern - * @property {MetricPattern1} bitcoin - * @property {MetricPattern1} dollars - * @property {MetricPattern1} sats + * @typedef {Object} CoinbasePattern + * @property {BitcoinPattern} bitcoin + * @property {DollarsPattern} dollars + * @property {DollarsPattern} sats */ /** - * Create a ActiveSupplyPattern pattern node + * Create a CoinbasePattern pattern node * @param {BrkClientBase} client * @param {string} acc - Accumulated metric name - * @returns {ActiveSupplyPattern} + * @returns {CoinbasePattern} */ -function createActiveSupplyPattern(client, acc) { +function createCoinbasePattern(client, acc) { return { - bitcoin: createMetricPattern1(client, _m(acc, 'btc')), - dollars: createMetricPattern1(client, _m(acc, 'usd')), - sats: createMetricPattern1(client, acc), - }; -} - -/** - * @typedef {Object} SegwitAdoptionPattern - * @property {MetricPattern11} base - * @property {MetricPattern2} cumulative - * @property {MetricPattern2} sum - */ - -/** - * Create a SegwitAdoptionPattern pattern node - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {SegwitAdoptionPattern} - */ -function createSegwitAdoptionPattern(client, acc) { - return { - base: createMetricPattern11(client, acc), - cumulative: createMetricPattern2(client, _m(acc, 'cumulative')), - sum: createMetricPattern2(client, _m(acc, 'sum')), + bitcoin: createBitcoinPattern(client, _m(acc, 'btc')), + dollars: createDollarsPattern(client, _m(acc, 'usd')), + sats: createDollarsPattern(client, acc), }; } @@ -2708,23 +2689,63 @@ function create_2015Pattern(client, acc) { } /** - * @typedef {Object} CoinbasePattern - * @property {BitcoinPattern} bitcoin - * @property {DollarsPattern} dollars - * @property {DollarsPattern} sats + * @typedef {Object} SegwitAdoptionPattern + * @property {MetricPattern11} base + * @property {MetricPattern2} cumulative + * @property {MetricPattern2} sum */ /** - * Create a CoinbasePattern pattern node + * Create a SegwitAdoptionPattern pattern node * @param {BrkClientBase} client * @param {string} acc - Accumulated metric name - * @returns {CoinbasePattern} + * @returns {SegwitAdoptionPattern} */ -function createCoinbasePattern(client, acc) { +function createSegwitAdoptionPattern(client, acc) { return { - bitcoin: createBitcoinPattern(client, _m(acc, 'btc')), - dollars: createDollarsPattern(client, _m(acc, 'usd')), - sats: createDollarsPattern(client, acc), + base: createMetricPattern11(client, acc), + cumulative: createMetricPattern2(client, _m(acc, 'cumulative')), + sum: createMetricPattern2(client, _m(acc, 'sum')), + }; +} + +/** + * @typedef {Object} ActiveSupplyPattern + * @property {MetricPattern1} bitcoin + * @property {MetricPattern1} dollars + * @property {MetricPattern1} sats + */ + +/** + * Create a ActiveSupplyPattern pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {ActiveSupplyPattern} + */ +function createActiveSupplyPattern(client, acc) { + return { + bitcoin: createMetricPattern1(client, _m(acc, 'btc')), + dollars: createMetricPattern1(client, _m(acc, 'usd')), + sats: createMetricPattern1(client, acc), + }; +} + +/** + * @typedef {Object} SupplyPattern2 + * @property {ActiveSupplyPattern} halved + * @property {ActiveSupplyPattern} total + */ + +/** + * Create a SupplyPattern2 pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {SupplyPattern2} + */ +function createSupplyPattern2(client, acc) { + return { + halved: createActiveSupplyPattern(client, _m(acc, 'halved')), + total: createActiveSupplyPattern(client, acc), }; } @@ -2785,25 +2806,6 @@ function createRelativePattern4(client, acc) { }; } -/** - * @typedef {Object} SupplyPattern2 - * @property {ActiveSupplyPattern} halved - * @property {ActiveSupplyPattern} total - */ - -/** - * Create a SupplyPattern2 pattern node - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {SupplyPattern2} - */ -function createSupplyPattern2(client, acc) { - return { - halved: createActiveSupplyPattern(client, _m(acc, 'halved')), - total: createActiveSupplyPattern(client, acc), - }; -} - /** * @template T * @typedef {Object} BitcoinPattern2 @@ -2825,27 +2827,6 @@ function createBitcoinPattern2(client, acc) { }; } -/** - * @template T - * @typedef {Object} BlockCountPattern - * @property {MetricPattern1} cumulative - * @property {MetricPattern1} sum - */ - -/** - * Create a BlockCountPattern pattern node - * @template T - * @param {BrkClientBase} client - * @param {string} acc - Accumulated metric name - * @returns {BlockCountPattern} - */ -function createBlockCountPattern(client, acc) { - return { - cumulative: createMetricPattern1(client, _m(acc, 'cumulative')), - sum: createMetricPattern1(client, acc), - }; -} - /** * @template T * @typedef {Object} SatsPattern @@ -2867,6 +2848,27 @@ function createSatsPattern(client, acc) { }; } +/** + * @template T + * @typedef {Object} BlockCountPattern + * @property {MetricPattern1} cumulative + * @property {MetricPattern1} sum + */ + +/** + * Create a BlockCountPattern pattern node + * @template T + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {BlockCountPattern} + */ +function createBlockCountPattern(client, acc) { + return { + cumulative: createMetricPattern1(client, _m(acc, 'cumulative')), + sum: createMetricPattern1(client, acc), + }; +} + /** * @typedef {Object} RealizedPriceExtraPattern * @property {MetricPattern4} ratio @@ -3696,8 +3698,8 @@ function createOutputsPattern(client, acc) { /** * @typedef {Object} MetricsTree_Market_Dca - * @property {MetricsTree_Market_Dca_ClassAveragePrice} classAveragePrice - * @property {ClassAveragePricePattern} classReturns + * @property {ClassAveragePricePattern} classAveragePrice + * @property {MetricsTree_Market_Dca_ClassReturns} classReturns * @property {MetricsTree_Market_Dca_ClassStack} classStack * @property {PeriodAveragePricePattern} periodAveragePrice * @property {PeriodCagrPattern} periodCagr @@ -3707,18 +3709,18 @@ function createOutputsPattern(client, acc) { */ /** - * @typedef {Object} MetricsTree_Market_Dca_ClassAveragePrice - * @property {MetricPattern4} _2015 - * @property {MetricPattern4} _2016 - * @property {MetricPattern4} _2017 - * @property {MetricPattern4} _2018 - * @property {MetricPattern4} _2019 - * @property {MetricPattern4} _2020 - * @property {MetricPattern4} _2021 - * @property {MetricPattern4} _2022 - * @property {MetricPattern4} _2023 - * @property {MetricPattern4} _2024 - * @property {MetricPattern4} _2025 + * @typedef {Object} MetricsTree_Market_Dca_ClassReturns + * @property {MetricPattern4} _2015 + * @property {MetricPattern4} _2016 + * @property {MetricPattern4} _2017 + * @property {MetricPattern4} _2018 + * @property {MetricPattern4} _2019 + * @property {MetricPattern4} _2020 + * @property {MetricPattern4} _2021 + * @property {MetricPattern4} _2022 + * @property {MetricPattern4} _2023 + * @property {MetricPattern4} _2024 + * @property {MetricPattern4} _2025 */ /** @@ -5726,20 +5728,20 @@ class BrkClient extends BrkClientBase { yearsSincePriceAth: createMetricPattern4(this, 'years_since_price_ath'), }, dca: { - classAveragePrice: { - _2015: createMetricPattern4(this, 'dca_class_2015_average_price'), - _2016: createMetricPattern4(this, 'dca_class_2016_average_price'), - _2017: createMetricPattern4(this, 'dca_class_2017_average_price'), - _2018: createMetricPattern4(this, 'dca_class_2018_average_price'), - _2019: createMetricPattern4(this, 'dca_class_2019_average_price'), - _2020: createMetricPattern4(this, 'dca_class_2020_average_price'), - _2021: createMetricPattern4(this, 'dca_class_2021_average_price'), - _2022: createMetricPattern4(this, 'dca_class_2022_average_price'), - _2023: createMetricPattern4(this, 'dca_class_2023_average_price'), - _2024: createMetricPattern4(this, 'dca_class_2024_average_price'), - _2025: createMetricPattern4(this, 'dca_class_2025_average_price'), + classAveragePrice: createClassAveragePricePattern(this, 'dca_class'), + classReturns: { + _2015: createMetricPattern4(this, 'dca_class_2015_returns'), + _2016: createMetricPattern4(this, 'dca_class_2016_returns'), + _2017: createMetricPattern4(this, 'dca_class_2017_returns'), + _2018: createMetricPattern4(this, 'dca_class_2018_returns'), + _2019: createMetricPattern4(this, 'dca_class_2019_returns'), + _2020: createMetricPattern4(this, 'dca_class_2020_returns'), + _2021: createMetricPattern4(this, 'dca_class_2021_returns'), + _2022: createMetricPattern4(this, 'dca_class_2022_returns'), + _2023: createMetricPattern4(this, 'dca_class_2023_returns'), + _2024: createMetricPattern4(this, 'dca_class_2024_returns'), + _2025: createMetricPattern4(this, 'dca_class_2025_returns'), }, - classReturns: createClassAveragePricePattern(this, 'dca_class'), classStack: { _2015: create_2015Pattern(this, 'dca_class_2015_stack'), _2016: create_2015Pattern(this, 'dca_class_2016_stack'), diff --git a/packages/brk_client/brk_client/__init__.py b/packages/brk_client/brk_client/__init__.py index 9bf209c65..ef90b8b72 100644 --- a/packages/brk_client/brk_client/__init__.py +++ b/packages/brk_client/brk_client/__init__.py @@ -1041,9 +1041,11 @@ def _p(prefix: str, acc: str) -> str: class MetricData(TypedDict, Generic[T]): """Metric data with range information.""" + version: int total: int start: int end: int + stamp: str data: List[T] @@ -2054,17 +2056,17 @@ class ClassAveragePricePattern(Generic[T]): def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" - self._2015: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2015_returns')) - self._2016: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2016_returns')) - self._2017: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2017_returns')) - self._2018: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2018_returns')) - self._2019: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2019_returns')) - self._2020: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2020_returns')) - self._2021: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2021_returns')) - self._2022: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2022_returns')) - self._2023: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2023_returns')) - self._2024: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2024_returns')) - self._2025: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2025_returns')) + self._2015: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2015_average_price')) + self._2016: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2016_average_price')) + self._2017: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2017_average_price')) + self._2018: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2018_average_price')) + self._2019: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2019_average_price')) + self._2020: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2020_average_price')) + self._2021: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2021_average_price')) + self._2022: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2022_average_price')) + self._2023: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2023_average_price')) + self._2024: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2024_average_price')) + self._2025: MetricPattern4[T] = MetricPattern4(client, _m(acc, '2025_average_price')) class DollarsPattern(Generic[T]): """Pattern struct for repeated tree structure.""" @@ -2083,22 +2085,6 @@ class DollarsPattern(Generic[T]): self.pct90: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'pct90')) self.sum: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'sum')) -class RelativePattern2: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.neg_unrealized_loss_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'neg_unrealized_loss_rel_to_own_market_cap')) - self.neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'neg_unrealized_loss_rel_to_own_total_unrealized_pnl')) - self.net_unrealized_pnl_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'net_unrealized_pnl_rel_to_own_market_cap')) - self.net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'net_unrealized_pnl_rel_to_own_total_unrealized_pnl')) - self.supply_in_loss_rel_to_own_supply: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'supply_in_loss_rel_to_own_supply')) - self.supply_in_profit_rel_to_own_supply: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'supply_in_profit_rel_to_own_supply')) - self.unrealized_loss_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_loss_rel_to_own_market_cap')) - self.unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_loss_rel_to_own_total_unrealized_pnl')) - self.unrealized_profit_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_profit_rel_to_own_market_cap')) - self.unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_profit_rel_to_own_total_unrealized_pnl')) - class RelativePattern: """Pattern struct for repeated tree structure.""" @@ -2115,6 +2101,22 @@ class RelativePattern: self.unrealized_loss_rel_to_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_loss_rel_to_market_cap')) self.unrealized_profit_rel_to_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_profit_rel_to_market_cap')) +class RelativePattern2: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.neg_unrealized_loss_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'neg_unrealized_loss_rel_to_own_market_cap')) + self.neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'neg_unrealized_loss_rel_to_own_total_unrealized_pnl')) + self.net_unrealized_pnl_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'net_unrealized_pnl_rel_to_own_market_cap')) + self.net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'net_unrealized_pnl_rel_to_own_total_unrealized_pnl')) + self.supply_in_loss_rel_to_own_supply: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'supply_in_loss_rel_to_own_supply')) + self.supply_in_profit_rel_to_own_supply: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'supply_in_profit_rel_to_own_supply')) + self.unrealized_loss_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_loss_rel_to_own_market_cap')) + self.unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_loss_rel_to_own_total_unrealized_pnl')) + self.unrealized_profit_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_profit_rel_to_own_market_cap')) + self.unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_profit_rel_to_own_total_unrealized_pnl')) + class CountPattern2(Generic[T]): """Pattern struct for repeated tree structure.""" @@ -2217,6 +2219,19 @@ class _10yTo12yPattern: self.supply: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply')) self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc) +class UnrealizedPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.neg_unrealized_loss: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'neg_unrealized_loss')) + self.net_unrealized_pnl: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'net_unrealized_pnl')) + self.supply_in_loss: ActiveSupplyPattern = ActiveSupplyPattern(client, _m(acc, 'supply_in_loss')) + self.supply_in_profit: ActiveSupplyPattern = ActiveSupplyPattern(client, _m(acc, 'supply_in_profit')) + self.total_unrealized_pnl: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'total_unrealized_pnl')) + self.unrealized_loss: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'unrealized_loss')) + self.unrealized_profit: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'unrealized_profit')) + class _10yPattern: """Pattern struct for repeated tree structure.""" @@ -2230,19 +2245,6 @@ class _10yPattern: self.supply: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply')) self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc) -class _0satsPattern2: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.activity: ActivityPattern2 = ActivityPattern2(client, acc) - self.cost_basis: CostBasisPattern = CostBasisPattern(client, acc) - self.outputs: OutputsPattern = OutputsPattern(client, _m(acc, 'utxo_count')) - self.realized: RealizedPattern = RealizedPattern(client, acc) - self.relative: RelativePattern4 = RelativePattern4(client, _m(acc, 'supply_in')) - self.supply: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply')) - self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc) - class _100btcPattern: """Pattern struct for repeated tree structure.""" @@ -2256,19 +2258,6 @@ class _100btcPattern: self.supply: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply')) self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc) -class UnrealizedPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.neg_unrealized_loss: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'neg_unrealized_loss')) - self.net_unrealized_pnl: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'net_unrealized_pnl')) - self.supply_in_loss: ActiveSupplyPattern = ActiveSupplyPattern(client, _m(acc, 'supply_in_loss')) - self.supply_in_profit: ActiveSupplyPattern = ActiveSupplyPattern(client, _m(acc, 'supply_in_profit')) - self.total_unrealized_pnl: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'total_unrealized_pnl')) - self.unrealized_loss: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'unrealized_loss')) - self.unrealized_profit: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'unrealized_profit')) - class PeriodCagrPattern: """Pattern struct for repeated tree structure.""" @@ -2282,6 +2271,19 @@ class PeriodCagrPattern: self._6y: MetricPattern4[StoredF32] = MetricPattern4(client, _p('6y', acc)) self._8y: MetricPattern4[StoredF32] = MetricPattern4(client, _p('8y', acc)) +class _0satsPattern2: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.activity: ActivityPattern2 = ActivityPattern2(client, acc) + self.cost_basis: CostBasisPattern = CostBasisPattern(client, acc) + self.outputs: OutputsPattern = OutputsPattern(client, _m(acc, 'utxo_count')) + self.realized: RealizedPattern = RealizedPattern(client, acc) + self.relative: RelativePattern4 = RelativePattern4(client, _m(acc, 'supply_in')) + self.supply: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply')) + self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc) + class ActivityPattern2: """Pattern struct for repeated tree structure.""" @@ -2321,23 +2323,14 @@ class CoinbasePattern2: self.dollars: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'usd')) self.sats: BlockCountPattern[Sats] = BlockCountPattern(client, acc) -class ActiveSupplyPattern: +class CoinbasePattern: """Pattern struct for repeated tree structure.""" def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" - self.bitcoin: MetricPattern1[Bitcoin] = MetricPattern1(client, _m(acc, 'btc')) - self.dollars: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'usd')) - self.sats: MetricPattern1[Sats] = MetricPattern1(client, acc) - -class SegwitAdoptionPattern: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.base: MetricPattern11[StoredF32] = MetricPattern11(client, acc) - self.cumulative: MetricPattern2[StoredF32] = MetricPattern2(client, _m(acc, 'cumulative')) - self.sum: MetricPattern2[StoredF32] = MetricPattern2(client, _m(acc, 'sum')) + self.bitcoin: BitcoinPattern = BitcoinPattern(client, _m(acc, 'btc')) + self.dollars: DollarsPattern[Dollars] = DollarsPattern(client, _m(acc, 'usd')) + self.sats: DollarsPattern[Sats] = DollarsPattern(client, acc) class UnclaimedRewardsPattern: """Pattern struct for repeated tree structure.""" @@ -2357,14 +2350,31 @@ class _2015Pattern: self.dollars: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'usd')) self.sats: MetricPattern4[Sats] = MetricPattern4(client, acc) -class CoinbasePattern: +class SegwitAdoptionPattern: """Pattern struct for repeated tree structure.""" def __init__(self, client: BrkClientBase, acc: str): """Create pattern node with accumulated metric name.""" - self.bitcoin: BitcoinPattern = BitcoinPattern(client, _m(acc, 'btc')) - self.dollars: DollarsPattern[Dollars] = DollarsPattern(client, _m(acc, 'usd')) - self.sats: DollarsPattern[Sats] = DollarsPattern(client, acc) + self.base: MetricPattern11[StoredF32] = MetricPattern11(client, acc) + self.cumulative: MetricPattern2[StoredF32] = MetricPattern2(client, _m(acc, 'cumulative')) + self.sum: MetricPattern2[StoredF32] = MetricPattern2(client, _m(acc, 'sum')) + +class ActiveSupplyPattern: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.bitcoin: MetricPattern1[Bitcoin] = MetricPattern1(client, _m(acc, 'btc')) + self.dollars: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'usd')) + self.sats: MetricPattern1[Sats] = MetricPattern1(client, acc) + +class SupplyPattern2: + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.halved: ActiveSupplyPattern = ActiveSupplyPattern(client, _m(acc, 'halved')) + self.total: ActiveSupplyPattern = ActiveSupplyPattern(client, acc) class _1dReturns1mSdPattern: """Pattern struct for repeated tree structure.""" @@ -2390,14 +2400,6 @@ class RelativePattern4: self.supply_in_loss_rel_to_own_supply: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'loss_rel_to_own_supply')) self.supply_in_profit_rel_to_own_supply: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'profit_rel_to_own_supply')) -class SupplyPattern2: - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.halved: ActiveSupplyPattern = ActiveSupplyPattern(client, _m(acc, 'halved')) - self.total: ActiveSupplyPattern = ActiveSupplyPattern(client, acc) - class BitcoinPattern2(Generic[T]): """Pattern struct for repeated tree structure.""" @@ -2406,14 +2408,6 @@ class BitcoinPattern2(Generic[T]): self.cumulative: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'cumulative')) self.sum: MetricPattern1[T] = MetricPattern1(client, acc) -class BlockCountPattern(Generic[T]): - """Pattern struct for repeated tree structure.""" - - def __init__(self, client: BrkClientBase, acc: str): - """Create pattern node with accumulated metric name.""" - self.cumulative: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'cumulative')) - self.sum: MetricPattern1[T] = MetricPattern1(client, acc) - class SatsPattern(Generic[T]): """Pattern struct for repeated tree structure.""" @@ -2422,6 +2416,14 @@ class SatsPattern(Generic[T]): self.ohlc: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'ohlc_sats')) self.split: SplitPattern2[T] = SplitPattern2(client, _m(acc, 'sats')) +class BlockCountPattern(Generic[T]): + """Pattern struct for repeated tree structure.""" + + def __init__(self, client: BrkClientBase, acc: str): + """Create pattern node with accumulated metric name.""" + self.cumulative: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'cumulative')) + self.sum: MetricPattern1[T] = MetricPattern1(client, acc) + class RealizedPriceExtraPattern: """Pattern struct for repeated tree structure.""" @@ -3269,21 +3271,21 @@ class MetricsTree_Market_Ath: self.price_drawdown: MetricPattern3[StoredF32] = MetricPattern3(client, 'price_drawdown') self.years_since_price_ath: MetricPattern4[StoredF32] = MetricPattern4(client, 'years_since_price_ath') -class MetricsTree_Market_Dca_ClassAveragePrice: +class MetricsTree_Market_Dca_ClassReturns: """Metrics tree node.""" def __init__(self, client: BrkClientBase, base_path: str = ''): - self._2015: MetricPattern4[Dollars] = MetricPattern4(client, 'dca_class_2015_average_price') - self._2016: MetricPattern4[Dollars] = MetricPattern4(client, 'dca_class_2016_average_price') - self._2017: MetricPattern4[Dollars] = MetricPattern4(client, 'dca_class_2017_average_price') - self._2018: MetricPattern4[Dollars] = MetricPattern4(client, 'dca_class_2018_average_price') - self._2019: MetricPattern4[Dollars] = MetricPattern4(client, 'dca_class_2019_average_price') - self._2020: MetricPattern4[Dollars] = MetricPattern4(client, 'dca_class_2020_average_price') - self._2021: MetricPattern4[Dollars] = MetricPattern4(client, 'dca_class_2021_average_price') - self._2022: MetricPattern4[Dollars] = MetricPattern4(client, 'dca_class_2022_average_price') - self._2023: MetricPattern4[Dollars] = MetricPattern4(client, 'dca_class_2023_average_price') - self._2024: MetricPattern4[Dollars] = MetricPattern4(client, 'dca_class_2024_average_price') - self._2025: MetricPattern4[Dollars] = MetricPattern4(client, 'dca_class_2025_average_price') + self._2015: MetricPattern4[StoredF32] = MetricPattern4(client, 'dca_class_2015_returns') + self._2016: MetricPattern4[StoredF32] = MetricPattern4(client, 'dca_class_2016_returns') + self._2017: MetricPattern4[StoredF32] = MetricPattern4(client, 'dca_class_2017_returns') + self._2018: MetricPattern4[StoredF32] = MetricPattern4(client, 'dca_class_2018_returns') + self._2019: MetricPattern4[StoredF32] = MetricPattern4(client, 'dca_class_2019_returns') + self._2020: MetricPattern4[StoredF32] = MetricPattern4(client, 'dca_class_2020_returns') + self._2021: MetricPattern4[StoredF32] = MetricPattern4(client, 'dca_class_2021_returns') + self._2022: MetricPattern4[StoredF32] = MetricPattern4(client, 'dca_class_2022_returns') + self._2023: MetricPattern4[StoredF32] = MetricPattern4(client, 'dca_class_2023_returns') + self._2024: MetricPattern4[StoredF32] = MetricPattern4(client, 'dca_class_2024_returns') + self._2025: MetricPattern4[StoredF32] = MetricPattern4(client, 'dca_class_2025_returns') class MetricsTree_Market_Dca_ClassStack: """Metrics tree node.""" @@ -3305,8 +3307,8 @@ class MetricsTree_Market_Dca: """Metrics tree node.""" def __init__(self, client: BrkClientBase, base_path: str = ''): - self.class_average_price: MetricsTree_Market_Dca_ClassAveragePrice = MetricsTree_Market_Dca_ClassAveragePrice(client) - self.class_returns: ClassAveragePricePattern[StoredF32] = ClassAveragePricePattern(client, 'dca_class') + self.class_average_price: ClassAveragePricePattern[Dollars] = ClassAveragePricePattern(client, 'dca_class') + self.class_returns: MetricsTree_Market_Dca_ClassReturns = MetricsTree_Market_Dca_ClassReturns(client) self.class_stack: MetricsTree_Market_Dca_ClassStack = MetricsTree_Market_Dca_ClassStack(client) self.period_average_price: PeriodAveragePricePattern[Dollars] = PeriodAveragePricePattern(client, 'dca_average_price') self.period_cagr: PeriodCagrPattern = PeriodCagrPattern(client, 'dca_cagr') diff --git a/website/index.html b/website/index.html index 7397d9ba5..43e3e5554 100644 --- a/website/index.html +++ b/website/index.html @@ -1533,6 +1533,11 @@ "(prefers-color-scheme: dark)", ); + let storedTheme; + try { storedTheme = localStorage.getItem("theme"); } catch (_) {} + const isDark = storedTheme ? storedTheme === "dark" : preferredColorSchemeMatchMedia.matches; + document.documentElement.style.colorScheme = isDark ? "dark" : "light"; + const themeColor = window.document.createElement("meta"); themeColor.name = "theme-color"; window.document.getElementsByTagName("head")[0].appendChild(themeColor); @@ -1545,7 +1550,7 @@ themeColor.content = theme; } - updateThemeColor(preferredColorSchemeMatchMedia.matches); + updateThemeColor(isDark); preferredColorSchemeMatchMedia.addEventListener( "change", ({ matches }) => { @@ -1832,6 +1837,7 @@ + diff --git a/website/scripts/panes/chart/screenshot.js b/website/scripts/chart/capture.js similarity index 62% rename from website/scripts/panes/chart/screenshot.js rename to website/scripts/chart/capture.js index 3ba34a9f0..0c351d3fc 100644 --- a/website/scripts/panes/chart/screenshot.js +++ b/website/scripts/chart/capture.js @@ -1,13 +1,14 @@ -import { ios } from "../../utils/env.js"; -import { domToBlob } from "../../modules/modern-screenshot/4.6.7/dist/index.mjs"; +import { ios, canShare } from "../utils/env.js"; +import { domToBlob } from "../modules/modern-screenshot/4.6.7/dist/index.mjs"; + +export const canCapture = !ios || canShare; /** * @param {Object} args * @param {Element} args.element * @param {string} args.name - * @param {string} args.title */ -export async function screenshot({ element, name, title }) { +export async function capture({ element, name }) { const blob = await domToBlob(element, { scale: 2, }); @@ -16,15 +17,13 @@ export async function screenshot({ element, name, title }) { const file = new File( [blob], `bitview-${name}-${new Date().toJSON().split(".")[0]}.png`, - { - type: "image/png", - }, + { type: "image/png" }, ); try { await navigator.share({ files: [file], - title: `${title} on ${window.document.location.hostname}`, + title: `${name} on ${window.document.location.hostname}`, }); return; } catch (err) { diff --git a/website/scripts/chart/colors.js b/website/scripts/chart/colors.js new file mode 100644 index 000000000..803ecd265 --- /dev/null +++ b/website/scripts/chart/colors.js @@ -0,0 +1,98 @@ +import { oklchToRgba } from "./oklch.js"; +import { dark } from "../utils/theme.js"; + +/** @type {Map} */ +const rgbaCache = new Map(); + +/** + * Convert oklch to rgba with caching + * @param {string} color - oklch color string + */ +function toRgba(color) { + if (color === "transparent") return color; + const cached = rgbaCache.get(color); + if (cached) return cached; + const rgba = oklchToRgba(color); + rgbaCache.set(color, rgba); + return rgba; +} + +/** + * Reduce color opacity to 50% for dimming effect + * @param {string} color - oklch color string + */ +function tameColor(color) { + if (color === "transparent") return color; + return `${color.slice(0, -1)} / 25%)`; +} + +/** + * @typedef {Object} ColorMethods + * @property {() => string} tame - Returns tamed (50% opacity) version + * @property {(highlighted: boolean) => string} highlight - Returns normal if highlighted, tamed otherwise + */ + +/** + * @typedef {(() => string) & ColorMethods} Color + */ + +/** + * Creates a Color object that is callable and has utility methods + * @param {() => string} getter + * @returns {Color} + */ +function createColor(getter) { + const color = /** @type {Color} */ (() => toRgba(getter())); + color.tame = () => toRgba(tameColor(getter())); + color.highlight = (highlighted) => + highlighted ? toRgba(getter()) : toRgba(tameColor(getter())); + return color; +} + +const globalComputedStyle = getComputedStyle(window.document.documentElement); + +/** + * @param {string} name + */ +function getColor(name) { + return globalComputedStyle.getPropertyValue(`--${name}`); +} + +/** + * @param {string} property + */ +function getLightDarkValue(property) { + const value = globalComputedStyle.getPropertyValue(property); + const [light, _dark] = value.slice(11, -1).split(", "); + return dark ? _dark : light; +} + +export const colors = { + default: createColor(() => getLightDarkValue("--color")), + gray: createColor(() => getColor("gray")), + border: createColor(() => getLightDarkValue("--border-color")), + + red: createColor(() => getColor("red")), + orange: createColor(() => getColor("orange")), + amber: createColor(() => getColor("amber")), + yellow: createColor(() => getColor("yellow")), + avocado: createColor(() => getColor("avocado")), + lime: createColor(() => getColor("lime")), + green: createColor(() => getColor("green")), + emerald: createColor(() => getColor("emerald")), + teal: createColor(() => getColor("teal")), + cyan: createColor(() => getColor("cyan")), + sky: createColor(() => getColor("sky")), + blue: createColor(() => getColor("blue")), + indigo: createColor(() => getColor("indigo")), + violet: createColor(() => getColor("violet")), + purple: createColor(() => getColor("purple")), + fuchsia: createColor(() => getColor("fuchsia")), + pink: createColor(() => getColor("pink")), + rose: createColor(() => getColor("rose")), +}; + +/** + * @typedef {typeof colors} Colors + * @typedef {keyof Colors} ColorName + */ diff --git a/website/scripts/chart/index.js b/website/scripts/chart/index.js index 1a04645a2..3cb3a8b33 100644 --- a/website/scripts/chart/index.js +++ b/website/scripts/chart/index.js @@ -1,20 +1,21 @@ import { - createChart as _createChart, - createSeriesMarkers, + createChart as untypedLcCreateChart, CandlestickSeries, HistogramSeries, LineSeries, BaselineSeries, // } from "../modules/lightweight-charts/5.1.0/dist/lightweight-charts.standalone.development.mjs"; } from "../modules/lightweight-charts/5.1.0/dist/lightweight-charts.standalone.production.mjs"; -import { createMinMaxMarkers } from "./markers.js"; import { createLegend } from "./legend.js"; +import { capture, canCapture } from "./capture.js"; +import { colors } from "./colors.js"; -const createChart = /** @type {CreateChart} */ (_createChart); +const lcCreateChart = /** @type {CreateLCChart} */ (untypedLcCreateChart); import { createChoiceField } from "../utils/dom.js"; -import { createOklchToRGBA } from "./oklch.js"; -import { throttle } from "../utils/timing.js"; -import { serdeBool } from "../utils/serde.js"; +import { createPersistedValue } from "../utils/persisted.js"; +import { onChange as onThemeChange } from "../utils/theme.js"; +import { throttle, debounce } from "../utils/timing.js"; +import { serdeBool, serdeChartableIndex } from "../utils/serde.js"; import { stringToId, numberToShortUSFormat } from "../utils/format.js"; import { style } from "../utils/elements.js"; import { resources } from "../resources.js"; @@ -35,16 +36,20 @@ import { resources } from "../resources.js"; /** * @template T * @typedef {Object} Series + * @property {string} key * @property {string} id * @property {number} paneIndex * @property {Signal} active - * @property {Signal} highlighted - * @property {Signal} hasData - * @property {Signal} url + * @property {(value: boolean) => void} setActive + * @property {() => void} show + * @property {() => void} hide + * @property {(order: number) => void} setOrder + * @property {() => void} highlight + * @property {() => void} tame + * @property {() => boolean} hasData + * @property {string | null} url * @property {() => readonly T[]} getData * @property {(data: T) => void} update - * @property {(markers: TimeSeriesMarker[]) => void} setMarkers - * @property {VoidFunction} clearMarkers * @property {VoidFunction} remove */ @@ -65,8 +70,6 @@ import { resources } from "../resources.js"; * @property {function(number): void} removeFrom */ -const oklchToRGBA = createOklchToRGBA(); - const lineWidth = /** @type {any} */ (1.5); /** @@ -74,26 +77,63 @@ const lineWidth = /** @type {any} */ (1.5); * @param {string} args.id * @param {HTMLElement} args.parent * @param {Signals} args.signals - * @param {Colors} args.colors * @param {BrkClient} args.brk - * @param {Accessor} args.index - * @param {((unknownTimeScaleCallback: VoidFunction) => void)} [args.timeScaleSetCallback] - * @param {number | null} [args.initialVisibleBarsCount] * @param {true} [args.fitContent] + * @param {HTMLElement} [args.captureElement] * @param {{unit: Unit; blueprints: AnySeriesBlueprint[]}[]} [args.config] */ -export function createChartElement({ +export function createChart({ parent, signals, - colors, id: chartId, - index, brk, - timeScaleSetCallback, - initialVisibleBarsCount, fitContent, + captureElement, config, }) { + // Chart owns its index state + /** @type {Signal} */ + const indexName = signals.createPersistedSignal({ + defaultValue: /** @type {ChartableIndexName} */ ("date"), + storageKey: "chart-index", + urlKey: "i", + serialize: (v) => v, + deserialize: (s) => /** @type {ChartableIndexName} */ (s), + }); + + const index = signals.createMemo(() => + serdeChartableIndex.deserialize(indexName()), + ); + + // Range state: localStorage stores all ranges per-index, URL stores current range only + /** @typedef {{ from: number, to: number }} Range */ + const ranges = createPersistedValue({ + defaultValue: /** @type {Record} */ ({}), + storageKey: "chart-ranges", + serialize: JSON.stringify, + deserialize: JSON.parse, + }); + + const range = createPersistedValue({ + defaultValue: /** @type {Range | null} */ (null), + urlKey: "range", + serialize: (v) => (v ? `${v.from.toFixed(2)}_${v.to.toFixed(2)}` : ""), + deserialize: (s) => { + if (!s) return null; + const [from, to] = s.split("_").map(Number); + return !isNaN(from) && !isNaN(to) ? { from, to } : null; + }, + }); + + /** @returns {Range | null} */ + const getRange = () => range.value ?? ranges.value[indexName()] ?? null; + + /** @param {Range} value */ + const setRange = (value) => { + ranges.set({ ...ranges.value, [indexName()]: value }); + range.set(value); + }; + const div = window.document.createElement("div"); div.classList.add("chart"); parent.append(div); @@ -102,17 +142,21 @@ export function createChartElement({ /** @type {Map>} */ const sharedActiveSignals = new Map(); - const legendTop = createLegend(signals); + // Registry for linked series (same key = linked across panes) + /** @type {Map>} */ + const seriesByKey = new Map(); + + const legendTop = createLegend(); div.append(legendTop.element); const chartDiv = window.document.createElement("div"); chartDiv.classList.add("lightweight-chart"); div.append(chartDiv); - const legendBottom = createLegend(signals); + const legendBottom = createLegend(); div.append(legendBottom.element); - const ichart = createChart( + const ichart = lcCreateChart( chartDiv, /** @satisfies {DeepPartial} */ ({ autoSize: true, @@ -120,8 +164,6 @@ export function createChartElement({ fontFamily: style.fontFamily, background: { color: "transparent" }, attributionLogo: false, - colorSpace: "display-p3", - colorParsers: [oklchToRGBA], }, grid: { vertLines: { visible: false }, @@ -133,6 +175,7 @@ export function createChartElement({ timeScale: { borderVisible: false, enableConflation: true, + // conflationThresholdFactor: 8, ...(fitContent ? { minBarSpacing: 0.001, @@ -144,7 +187,7 @@ export function createChartElement({ locale: "en-us", }, crosshair: { - mode: 3, + mode: 0, }, ...(fitContent ? { @@ -162,82 +205,64 @@ export function createChartElement({ ichart.panes().at(0)?.setStretchFactor(1); - /** @param {{ from: number, to: number }} range */ - const setVisibleLogicalRange = (range) => { - // Defer to next frame to ensure chart has rendered - requestAnimationFrame(() => { - ichart.timeScale().setVisibleLogicalRange(range); - }); - }; + /** @typedef {(visibleBarsCount: number) => void} ZoomChangeCallback */ - const seriesList = signals.createSignal( - /** @type {Set} */ (new Set()), - { equals: false }, - ); - const seriesCount = signals.createMemo(() => seriesList().size); - const markers = createMinMaxMarkers({ - chart: ichart, - seriesList, - colors, - formatValue: numberToShortUSFormat, - }); + const initialRange = getRange(); + if (initialRange) { + ichart.timeScale().setVisibleLogicalRange(initialRange); + } - const visibleBarsCount = signals.createSignal( - initialVisibleBarsCount ?? Infinity, - ); - /** @type {() => 0 | 1 | 2 | 3} 0: <=200, 1: <=500, 2: <=1000, 3: >1000 */ - const visibleBarsCountBucket = signals.createMemo(() => { - const count = visibleBarsCount(); - return count > 1000 ? 3 : count > 500 ? 2 : count > 200 ? 1 : 0; - }); - const shouldShowLine = signals.createMemo( - () => visibleBarsCountBucket() >= 2, - ); - const shouldUpdateMarkers = signals.createMemo( - () => visibleBarsCount() * seriesCount() <= 20_000, - ); + let visibleBarsCount = initialRange + ? initialRange.to - initialRange.from + : Infinity; - signals.createEffect(shouldUpdateMarkers, (should) => { - if (should) markers.update(); - else markers.clear(); - }); + /** @type {Set} */ + const onZoomChange = new Set(); ichart.timeScale().subscribeVisibleLogicalRangeChange( throttle((range) => { - if (range) { - visibleBarsCount.set(range.to - range.from); - if (shouldUpdateMarkers()) markers.update(); + if (!range) return; + const count = range.to - range.from; + if (count === visibleBarsCount) return; + visibleBarsCount = count; + onZoomChange.forEach((cb) => cb(count)); + }, 100), + ); + + // Debounced range persistence + ichart.timeScale().subscribeVisibleLogicalRangeChange( + debounce((range) => { + if (range && range.from < range.to) { + setRange({ from: range.from, to: range.to }); } }, 100), ); - signals.createEffect( - () => ({ - defaultColor: colors.default(), - offColor: colors.gray(), - borderColor: colors.border(), - }), - ({ defaultColor, offColor, borderColor }) => { - ichart.applyOptions({ - layout: { - textColor: offColor, - panes: { - separatorColor: borderColor, - }, + function applyColors() { + const defaultColor = colors.default(); + const offColor = colors.gray(); + const borderColor = colors.border(); + ichart.applyOptions({ + layout: { + textColor: offColor, + panes: { + separatorColor: borderColor, }, - crosshair: { - horzLine: { - color: offColor, - labelBackgroundColor: defaultColor, - }, - vertLine: { - color: offColor, - labelBackgroundColor: defaultColor, - }, + }, + crosshair: { + horzLine: { + color: offColor, + labelBackgroundColor: defaultColor, }, - }); - }, - ); + vertLine: { + color: offColor, + labelBackgroundColor: defaultColor, + }, + }, + }); + } + applyColors(); + const removeThemeListener = onThemeChange(applyColors); signals.createEffect(index, (index) => { const minBarSpacing = @@ -273,7 +298,7 @@ export function createChartElement({ activeResources.forEach((v) => { v.fetch(); }); - }), + }, 10_000), ); if (fitContent) { @@ -345,29 +370,38 @@ export function createChartElement({ paneIndex, position: "sw", createChild(pane) { + /** @type {"lin" | "log"} */ const defaultValue = unit.id === "usd" && seriesType !== "Baseline" ? "log" : "lin"; - const selected = signals.createPersistedSignal({ + + const persisted = createPersistedValue({ defaultValue, storageKey: `${id}-scale-${paneIndex}`, urlKey: paneIndex === 0 ? "price_scale" : "unit_scale", serialize: (v) => v, deserialize: (s) => /** @type {"lin" | "log"} */ (s), }); + + /** @param {"lin" | "log"} value */ + const applyScale = (value) => { + try { + pane.priceScale("right").applyOptions({ + mode: value === "lin" ? 0 : 1, + }); + } catch {} + }; + + // Apply initial value + applyScale(persisted.value); + const field = createChoiceField({ choices: /** @type {const} */ (["lin", "log"]), id: stringToId(`${id} ${paneIndex} ${unit}`), - defaultValue, - selected, - signals, - }); - - signals.createEffect(selected, (selected) => { - try { - pane.priceScale("right").applyOptions({ - mode: selected === "lin" ? 0 : 1, - }); - } catch {} + initialValue: persisted.value, + onChange(value) { + persisted.set(value); + applyScale(value); + }, }); return field; @@ -386,13 +420,16 @@ export function createChartElement({ * @param {Accessor} [args.data] * @param {number} args.paneIndex * @param {boolean} [args.defaultActive] - * @param {(ctx: { active: Signal, highlighted: Signal }) => void} args.setup + * @param {(order: number) => void} args.setOrder + * @param {() => void} args.show + * @param {() => void} args.hide + * @param {() => void} args.highlight + * @param {() => void} args.tame * @param {() => readonly any[]} args.getData * @param {(data: any[]) => void} args.setData * @param {(data: any) => void} args.update - * @param {(markers: TimeSeriesMarker[]) => void} args.setMarkers - * @param {VoidFunction} args.clearMarkers * @param {() => void} args.onRemove + * @param {() => void} [args.onDataLoaded] */ function addSeries({ metric, @@ -404,40 +441,38 @@ export function createChartElement({ defaultActive, colors, data, - setup, + setOrder, + show, + hide, + highlight, + tame, getData, setData, update, - setMarkers, - clearMarkers, onRemove, + onDataLoaded, }) { return signals.createRoot((dispose) => { - const id = `${stringToId(name)}-${paneIndex}`; - const urlId = stringToId(name); + const key = stringToId(name); + const id = `${key}-${paneIndex}`; // Reuse existing signal if same name (links legends across panes) - let active = sharedActiveSignals.get(urlId); + let active = sharedActiveSignals.get(key); if (!active) { active = signals.createPersistedSignal({ defaultValue: defaultActive ?? true, storageKey: id, - urlKey: urlId, + urlKey: key, ...serdeBool, }); - sharedActiveSignals.set(urlId, active); + sharedActiveSignals.set(key, active); } - const highlighted = signals.createSignal(true); + setOrder(-order); - setup({ active, highlighted }); + active() ? show() : hide(); - // Update markers when active changes - signals.createEffect(active, () => { - if (shouldUpdateMarkers()) markers.scheduleUpdate(); - }); - - const hasData = signals.createSignal(false); + let hasData = false; let lastTime = -Infinity; /** @type {MetricResource | undefined} */ @@ -446,28 +481,46 @@ export function createChartElement({ /** @type {AnySeries} */ const series = { active, - highlighted, - hasData, + setActive(value) { + active.set(value); + seriesByKey.get(key)?.forEach((s) => { + value ? s.show() : s.hide(); + }); + document.querySelectorAll(`[data-series="${id}"]`).forEach((el) => { + if (el instanceof HTMLInputElement && el.type === "checkbox") { + el.checked = value; + } + }); + }, + setOrder, + show, + hide, + highlight, + tame, + hasData: () => hasData, + key, id, paneIndex, - url: signals.createSignal(/** @type {string | null} */ (null)), + url: null, getData, update, - setMarkers, - clearMarkers, remove() { dispose(); onRemove(); + seriesByKey.get(key)?.delete(series); if (_valuesResource) { activeResources.delete(_valuesResource); } - seriesList().delete(series); - seriesList.set(seriesList()); }, }; - seriesList().add(series); - seriesList.set(seriesList()); + // Register series for cross-pane linking + let keySet = seriesByKey.get(key); + if (!keySet) { + keySet = new Set(); + seriesByKey.set(key, keySet); + } + keySet.add(series); if (metric) { signals.createScopedEffect(index, (index) => { @@ -489,158 +542,190 @@ export function createChartElement({ const valuesResource = resources.useMetricEndpoint(valuesNode); _valuesResource = valuesResource; - series.url.set(() => { - const base = brk.baseUrl.endsWith("/") - ? brk.baseUrl.slice(0, -1) - : brk.baseUrl; - return `${base}${valuesResource.path}`; + series.url = `${ + brk.baseUrl.endsWith("/") ? brk.baseUrl.slice(0, -1) : brk.baseUrl + }${valuesResource.path}`; + + (paneIndex ? legendBottom : legendTop).addOrReplace({ + series, + name, + colors, + order, }); - signals.createScopedEffect(active, (active) => { - if (active) { - timeResource.fetch(); - valuesResource.fetch(); - activeResources.add(valuesResource); + // Create memo outside active check (cheap, just checks data existence) + const timeRange = timeResource.range(); + const valuesRange = valuesResource.range(); + const valuesCacheKey = signals.createMemo(() => { + const res = valuesRange.response(); + if (!res?.data?.length) return null; + if (!timeRange.response()?.data?.length) return null; + return `${res.version}|${res.stamp}|${res.total}|${res.start}|${res.end}`; + }); - const timeRange = timeResource.range(); - const valuesRange = valuesResource.range(); - const valuesCacheKey = signals.createMemo(() => { - const res = valuesRange.response(); - if (!res?.data?.length) return null; - if (!timeRange.response()?.data?.length) return null; - return `${res.version}|${res.stamp}|${res.total}|${res.start}|${res.end}`; - }); - signals.createEffect(valuesCacheKey, (cacheKey) => { - if (!cacheKey) return; - const _indexes = timeRange.response()?.data; - const values = valuesRange.response()?.data; - if (!_indexes?.length || !values?.length) return; + // Combined effect for active + data processing (flat, uses prev comparison) + signals.createEffect( + () => ({ isActive: active(), cacheKey: valuesCacheKey() }), + (curr, prev) => { + const becameActive = curr.isActive && (!prev || !prev.isActive); + const becameInactive = !curr.isActive && prev?.isActive; - const indexes = /** @type {number[]} */ (_indexes); - const length = Math.min(indexes.length, values.length); + if (becameInactive) { + activeResources.delete(valuesResource); + return; + } - // Find start index for processing - let startIdx = 0; - if (hasData()) { - // Binary search to find first index where time >= lastTime - let lo = 0; - let hi = length; - while (lo < hi) { - const mid = (lo + hi) >>> 1; - if (indexes[mid] < lastTime) { - lo = mid + 1; - } else { - hi = mid; - } - } - startIdx = lo; - if (startIdx >= length) return; // No new data - } + if (!curr.isActive) return; - /** - * @param {number} i - * @param {(number | null | [number, number, number, number])[]} vals - * @returns {LineData | CandlestickData} - */ - function buildDataPoint(i, vals) { - const time = /** @type {Time} */ (indexes[i]); - const v = vals[i]; - if (v === null) { - return { time, value: NaN }; - } else if (typeof v === "number") { - return { time, value: v }; + if (becameActive) { + timeResource.fetch(); + valuesResource.fetch(); + activeResources.add(valuesResource); + } + + // Process data only if cacheKey changed + if (!curr.cacheKey || curr.cacheKey === prev?.cacheKey) return; + + const _indexes = timeRange.response()?.data; + const values = valuesRange.response()?.data; + if (!_indexes?.length || !values?.length) return; + + const indexes = /** @type {number[]} */ (_indexes); + const length = Math.min(indexes.length, values.length); + + // Find start index for processing + let startIdx = 0; + if (hasData) { + // Binary search to find first index where time >= lastTime + let lo = 0; + let hi = length; + while (lo < hi) { + const mid = (lo + hi) >>> 1; + if (indexes[mid] < lastTime) { + lo = mid + 1; } else { - if (!Array.isArray(v) || v.length !== 4) - throw new Error(`Expected OHLC tuple, got: ${v}`); - const [open, high, low, close] = v; - return { time, open, high, low, close }; + hi = mid; } } + startIdx = lo; + if (startIdx >= length) return; // No new data + } - if (!hasData()) { - // Initial load: build full array - const data = /** @type {LineData[] | CandlestickData[]} */ ( - Array.from({ length }) - ); - - let prevTime = null; - let timeOffset = 0; - - for (let i = 0; i < length; i++) { - const time = indexes[i]; - const sameTime = prevTime === time; - if (sameTime) { - timeOffset += 1; - } - const offsetedI = i - timeOffset; - const point = buildDataPoint(i, values); - if (sameTime && "open" in point) { - const prev = /** @type {CandlestickData} */ ( - data[offsetedI] - ); - point.open = prev.open; - point.high = Math.max(prev.high, point.high); - point.low = Math.min(prev.low, point.low); - } - data[offsetedI] = point; - prevTime = time; - } - - data.length -= timeOffset; - - setData(data); - hasData.set(true); - if (shouldUpdateMarkers()) markers.scheduleUpdate(); - lastTime = - /** @type {number} */ (data.at(-1)?.time) ?? -Infinity; - - if (fitContent) { - ichart.timeScale().fitContent(); - } - - timeScaleSetCallback?.(() => { - if ( - index === "quarterindex" || - index === "semesterindex" || - index === "yearindex" || - index === "decadeindex" - ) { - setVisibleLogicalRange({ from: -1, to: data.length }); - } - }); + /** + * @param {number} i + * @param {(number | null | [number, number, number, number])[]} vals + * @returns {LineData | CandlestickData} + */ + function buildDataPoint(i, vals) { + const time = /** @type {Time} */ (indexes[i]); + const v = vals[i]; + if (v === null) { + return { time, value: NaN }; + } else if (typeof v === "number") { + return { time, value: v }; } else { - // Incremental update: only process new data points - for (let i = startIdx; i < length; i++) { - const point = buildDataPoint(i, values); - update(point); - lastTime = /** @type {number} */ (point.time); - } + if (!Array.isArray(v) || v.length !== 4) + throw new Error(`Expected OHLC tuple, got: ${v}`); + const [open, high, low, close] = v; + return { time, open, high, low, close }; } + } + + if (!hasData) { + // Initial load: build full array + const data = /** @type {LineData[] | CandlestickData[]} */ ( + Array.from({ length }) + ); + + let prevTime = null; + let timeOffset = 0; + + for (let i = 0; i < length; i++) { + const time = indexes[i]; + const sameTime = prevTime === time; + if (sameTime) { + timeOffset += 1; + } + const offsetedI = i - timeOffset; + const point = buildDataPoint(i, values); + if (sameTime && "open" in point) { + const prev = /** @type {CandlestickData} */ ( + data[offsetedI] + ); + point.open = prev.open; + point.high = Math.max(prev.high, point.high); + point.low = Math.min(prev.low, point.low); + } + data[offsetedI] = point; + prevTime = time; + } + + data.length -= timeOffset; + + setData(data); + hasData = true; + lastTime = + /** @type {number} */ (data.at(-1)?.time) ?? -Infinity; + + // Restore saved range or use defaults + const savedRange = getRange(); + if (savedRange) { + ichart.timeScale().setVisibleLogicalRange({ + from: savedRange.from, + to: savedRange.to, + }); + } else if (fitContent) { + ichart.timeScale().fitContent(); + } else if ( + index === "quarterindex" || + index === "semesterindex" || + index === "yearindex" || + index === "decadeindex" + ) { + ichart + .timeScale() + .setVisibleLogicalRange({ from: -1, to: data.length }); + } + // Delay until chart has applied the range + requestAnimationFrame(() => onDataLoaded?.()); + } else { + // Incremental update: only process new data points + for (let i = startIdx; i < length; i++) { + const point = buildDataPoint(i, values); + update(point); + lastTime = /** @type {number} */ (point.time); + } + } + }, + ); + }); + } else { + (paneIndex ? legendBottom : legendTop).addOrReplace({ + series, + name, + colors, + order, + }); + + if (data) { + signals.createEffect(data, (data) => { + setData(data); + hasData = true; + const savedRange = getRange(); + if (savedRange) { + ichart.timeScale().setVisibleLogicalRange({ + from: savedRange.from, + to: savedRange.to, }); - } else { - activeResources.delete(valuesResource); + } else if (fitContent) { + ichart.timeScale().fitContent(); } + // Delay until chart has applied the range + requestAnimationFrame(() => onDataLoaded?.()); }); - }); - } else if (data) { - signals.createEffect(data, (data) => { - setData(data); - hasData.set(true); - if (shouldUpdateMarkers()) markers.scheduleUpdate(); - - if (fitContent) { - ichart.timeScale().fitContent(); - } - }); + } } - (paneIndex ? legendBottom : legendTop).addOrReplace({ - series, - name, - colors, - order, - }); - addPriceScaleSelectorIfNeeded({ paneIndex, seriesType, @@ -652,23 +737,14 @@ export function createChartElement({ } const chart = { + index, + indexName, + legendTop, legendBottom, addFieldsetIfNeeded, - setVisibleLogicalRange, - - /** - * @param {(range: { from: number, to: number } | null) => void} callback - * @param {number} [wait=500] - */ - onVisibleLogicalRangeChange(callback, wait = 500) { - ichart - .timeScale() - .subscribeVisibleLogicalRangeChange(throttle(callback, wait)); - }, - /** * @param {Object} args * @param {string} args.name @@ -698,19 +774,14 @@ export function createChartElement({ const defaultRed = inverse ? colors.green : colors.red; const upColor = customColors?.[0] ?? defaultGreen; const downColor = customColors?.[1] ?? defaultRed; - let showLine = shouldShowLine(); /** @type {CandlestickISeries} */ const candlestickISeries = /** @type {any} */ ( ichart.addSeries( /** @type {SeriesDefinition<'Candlestick'>} */ (CandlestickSeries), { - upColor: upColor(), - downColor: downColor(), - wickUpColor: upColor(), - wickDownColor: downColor(), - borderVisible: false, visible: false, + borderVisible: false, ...options, }, paneIndex, @@ -722,17 +793,45 @@ export function createChartElement({ ichart.addSeries( /** @type {SeriesDefinition<'Line'>} */ (LineSeries), { - color: colors.default(), - lineWidth, visible: false, - priceLineVisible: false, + lineWidth, + priceLineVisible: true, }, paneIndex, ) ); - // Marker plugin always on candlestick (has true min/max via high/low) - const markerPlugin = createSeriesMarkers(candlestickISeries, [], { autoScale: false }); + let active = defaultActive !== false; + let highlighted = true; + let showLine = visibleBarsCount > 500; + let dataLoaded = false; + + function update() { + candlestickISeries.applyOptions({ + visible: active && !showLine, + lastValueVisible: highlighted, + upColor: upColor.highlight(highlighted), + downColor: downColor.highlight(highlighted), + wickUpColor: upColor.highlight(highlighted), + wickDownColor: downColor.highlight(highlighted), + }); + lineISeries.applyOptions({ + visible: active && showLine, + lastValueVisible: highlighted, + color: colors.default.highlight(highlighted), + }); + } + + /** @type {ZoomChangeCallback} */ + function handleZoom(count) { + if (!dataLoaded) return; // Ignore zoom changes until data is ready + const newShowLine = count > 500; + if (newShowLine === showLine) return; + showLine = newShowLine; + update(); + } + onZoomChange.add(handleZoom); + const removeSeriesThemeListener = onThemeChange(update); const series = addSeries({ colors: [upColor, downColor], @@ -744,40 +843,29 @@ export function createChartElement({ data, defaultActive, metric, - setup: ({ active, highlighted }) => { + setOrder(order) { candlestickISeries.setSeriesOrder(order); lineISeries.setSeriesOrder(order); - signals.createEffect( - () => ({ - shouldShow: shouldShowLine(), - active: active(), - highlighted: highlighted(), - barsCount: visibleBarsCount(), - }), - ({ shouldShow, active, highlighted, barsCount }) => { - if (barsCount === Infinity) return; - const wasLine = showLine; - showLine = shouldShow; - // Use transparent when showing the other mode, otherwise use highlight - const up = showLine ? "transparent" : upColor.highlight(highlighted); - const down = showLine ? "transparent" : downColor.highlight(highlighted); - const line = showLine ? colors.default.highlight(highlighted) : "transparent"; - candlestickISeries.applyOptions({ - visible: active, - upColor: up, - downColor: down, - wickUpColor: up, - wickDownColor: down, - }); - lineISeries.applyOptions({ - visible: active, - color: line, - priceLineVisible: active && showLine, - }); - if (wasLine !== showLine && shouldUpdateMarkers()) - markers.scheduleUpdate(); - }, - ); + }, + show() { + if (active) return; + active = true; + update(); + }, + hide() { + if (!active) return; + active = false; + update(); + }, + highlight() { + if (highlighted) return; + highlighted = true; + update(); + }, + tame() { + if (!highlighted) return; + highlighted = false; + update(); }, setData: (data) => { candlestickISeries.setData(data); @@ -789,12 +877,16 @@ export function createChartElement({ lineISeries.update({ time: data.time, value: data.close }); }, getData: () => candlestickISeries.data(), - setMarkers: (m) => markerPlugin.setMarkers(m), - clearMarkers: () => markerPlugin.setMarkers([]), onRemove: () => { + onZoomChange.delete(handleZoom); + removeSeriesThemeListener(); ichart.removeSeries(candlestickISeries); ichart.removeSeries(lineISeries); }, + onDataLoaded: () => { + dataLoaded = true; + update(); + }, }); return series; }, @@ -830,8 +922,6 @@ export function createChartElement({ ichart.addSeries( /** @type {SeriesDefinition<'Histogram'>} */ (HistogramSeries), { - color: positiveColor(), - visible: defaultActive !== false, priceLineVisible: false, ...options, }, @@ -839,7 +929,18 @@ export function createChartElement({ ) ); - const markerPlugin = createSeriesMarkers(iseries, [], { autoScale: false }); + let active = defaultActive !== false; + let highlighted = true; + + function update() { + iseries.applyOptions({ + visible: active, + lastValueVisible: highlighted, + color: positiveColor.highlight(highlighted), + }); + } + update(); + const removeSeriesThemeListener = onThemeChange(update); const series = addSeries({ colors: isDualColor ? [positiveColor, negativeColor] : [positiveColor], @@ -851,17 +952,26 @@ export function createChartElement({ data, defaultActive, metric, - setup: ({ active, highlighted }) => { - iseries.setSeriesOrder(order); - signals.createEffect( - () => ({ active: active(), highlighted: highlighted() }), - ({ active, highlighted }) => { - iseries.applyOptions({ - visible: active, - color: positiveColor.highlight(highlighted), - }); - }, - ); + setOrder: (order) => iseries.setSeriesOrder(order), + show() { + if (active) return; + active = true; + update(); + }, + hide() { + if (!active) return; + active = false; + update(); + }, + highlight() { + if (highlighted) return; + highlighted = true; + update(); + }, + tame() { + if (!highlighted) return; + highlighted = false; + update(); }, setData: (data) => { if (isDualColor) { @@ -880,9 +990,10 @@ export function createChartElement({ }, update: (data) => iseries.update(data), getData: () => iseries.data(), - setMarkers: (m) => markerPlugin.setMarkers(m), - clearMarkers: () => markerPlugin.setMarkers([]), - onRemove: () => ichart.removeSeries(iseries), + onRemove: () => { + removeSeriesThemeListener(); + ichart.removeSeries(iseries); + }, }); return series; }, @@ -903,13 +1014,14 @@ export function createChartElement({ name, unit, order, - color, + color: _color, paneIndex = 0, defaultActive, data, options, }) { - color ||= unit.id === "usd" ? colors.green : colors.orange; + const color = + _color ?? (unit.id === "usd" ? colors.green : colors.orange); /** @type {LineISeries} */ const iseries = /** @type {any} */ ( @@ -917,16 +1029,25 @@ export function createChartElement({ /** @type {SeriesDefinition<'Line'>} */ (LineSeries), { lineWidth, - visible: defaultActive !== false, priceLineVisible: false, - color: color(), ...options, }, paneIndex, ) ); - const markerPlugin = createSeriesMarkers(iseries, [], { autoScale: false }); + let active = defaultActive !== false; + let highlighted = true; + + function update() { + iseries.applyOptions({ + visible: active, + lastValueVisible: highlighted, + color: color.highlight(highlighted), + }); + } + update(); + const removeSeriesThemeListener = onThemeChange(update); const series = addSeries({ colors: [color], @@ -938,24 +1059,34 @@ export function createChartElement({ data, defaultActive, metric, - setup: ({ active, highlighted }) => { - iseries.setSeriesOrder(order); - signals.createEffect( - () => ({ active: active(), highlighted: highlighted() }), - ({ active, highlighted }) => { - iseries.applyOptions({ - visible: active, - color: color.highlight(highlighted), - }); - }, - ); + setOrder: (order) => iseries.setSeriesOrder(order), + show() { + if (active) return; + active = true; + update(); + }, + hide() { + if (!active) return; + active = false; + update(); + }, + highlight() { + if (highlighted) return; + highlighted = true; + update(); + }, + tame() { + if (!highlighted) return; + highlighted = false; + update(); }, setData: (data) => iseries.setData(data), update: (data) => iseries.update(data), getData: () => iseries.data(), - setMarkers: (m) => markerPlugin.setMarkers(m), - clearMarkers: () => markerPlugin.setMarkers([]), - onRemove: () => ichart.removeSeries(iseries), + onRemove: () => { + removeSeriesThemeListener(); + ichart.removeSeries(iseries); + }, }); return series; }, @@ -976,22 +1107,21 @@ export function createChartElement({ name, unit, order, - color, + color: _color, paneIndex = 0, defaultActive, data, options, }) { - color ||= unit.id === "usd" ? colors.green : colors.orange; + const color = + _color ?? (unit.id === "usd" ? colors.green : colors.orange); /** @type {LineISeries} */ const iseries = /** @type {any} */ ( ichart.addSeries( /** @type {SeriesDefinition<'Line'>} */ (LineSeries), { - visible: defaultActive !== false, priceLineVisible: false, - color: color(), lineVisible: false, pointMarkersVisible: true, pointMarkersRadius: 1, @@ -1001,7 +1131,29 @@ export function createChartElement({ ) ); - const markerPlugin = createSeriesMarkers(iseries, [], { autoScale: false }); + let active = defaultActive !== false; + let highlighted = true; + let radius = + visibleBarsCount > 1000 ? 1 : visibleBarsCount > 200 ? 1.5 : 2; + + function update() { + iseries.applyOptions({ + visible: active, + lastValueVisible: highlighted, + color: color.highlight(highlighted), + }); + } + update(); + + /** @type {ZoomChangeCallback} */ + function handleZoom(count) { + const newRadius = count > 1000 ? 1 : count > 200 ? 1.5 : 2; + if (newRadius === radius) return; + radius = newRadius; + iseries.applyOptions({ pointMarkersRadius: radius }); + } + onZoomChange.add(handleZoom); + const removeSeriesThemeListener = onThemeChange(update); const series = addSeries({ colors: [color], @@ -1013,28 +1165,35 @@ export function createChartElement({ data, defaultActive, metric, - setup: ({ active, highlighted }) => { - iseries.setSeriesOrder(order); - signals.createEffect( - () => ({ active: active(), highlighted: highlighted() }), - ({ active, highlighted }) => { - iseries.applyOptions({ - visible: active, - color: color.highlight(highlighted), - }); - }, - ); - signals.createEffect(visibleBarsCountBucket, (bucket) => { - const radius = bucket === 3 ? 1 : bucket >= 1 ? 1.5 : 2; - iseries.applyOptions({ pointMarkersRadius: radius }); - }); + setOrder: (order) => iseries.setSeriesOrder(order), + show() { + if (active) return; + active = true; + update(); + }, + hide() { + if (!active) return; + active = false; + update(); + }, + highlight() { + if (highlighted) return; + highlighted = true; + update(); + }, + tame() { + if (!highlighted) return; + highlighted = false; + update(); }, setData: (data) => iseries.setData(data), update: (data) => iseries.update(data), getData: () => iseries.data(), - setMarkers: (m) => markerPlugin.setMarkers(m), - clearMarkers: () => markerPlugin.setMarkers([]), - onRemove: () => ichart.removeSeries(iseries), + onRemove: () => { + onZoomChange.delete(handleZoom); + removeSeriesThemeListener(); + ichart.removeSeries(iseries); + }, }); return series; }, @@ -1071,13 +1230,10 @@ export function createChartElement({ /** @type {SeriesDefinition<'Baseline'>} */ (BaselineSeries), { lineWidth, - visible: defaultActive !== false, baseValue: { price: options?.baseValue?.price ?? 0, }, ...options, - topLineColor: topColor(), - bottomLineColor: bottomColor(), priceLineVisible: false, bottomFillColor1: "transparent", bottomFillColor2: "transparent", @@ -1089,7 +1245,19 @@ export function createChartElement({ ) ); - const markerPlugin = createSeriesMarkers(iseries, [], { autoScale: false }); + let active = defaultActive !== false; + let highlighted = true; + + function update() { + iseries.applyOptions({ + visible: active, + lastValueVisible: highlighted, + topLineColor: topColor.highlight(highlighted), + bottomLineColor: bottomColor.highlight(highlighted), + }); + } + update(); + const removeSeriesThemeListener = onThemeChange(update); const series = addSeries({ colors: [topColor, bottomColor], @@ -1101,28 +1269,42 @@ export function createChartElement({ data, defaultActive, metric, - setup: ({ active, highlighted }) => { - iseries.setSeriesOrder(order); - signals.createEffect( - () => ({ active: active(), highlighted: highlighted() }), - ({ active, highlighted }) => { - iseries.applyOptions({ - visible: active, - topLineColor: topColor.highlight(highlighted), - bottomLineColor: bottomColor.highlight(highlighted), - }); - }, - ); + setOrder: (order) => iseries.setSeriesOrder(order), + show() { + if (active) return; + active = true; + update(); + }, + hide() { + if (!active) return; + active = false; + update(); + }, + highlight() { + if (highlighted) return; + highlighted = true; + update(); + }, + tame() { + if (!highlighted) return; + highlighted = false; + update(); }, setData: (data) => iseries.setData(data), update: (data) => iseries.update(data), getData: () => iseries.data(), - setMarkers: (m) => markerPlugin.setMarkers(m), - clearMarkers: () => markerPlugin.setMarkers([]), - onRemove: () => ichart.removeSeries(iseries), + onRemove: () => { + removeSeriesThemeListener(); + ichart.removeSeries(iseries); + }, }); return series; }, + + destroy() { + removeThemeListener(); + ichart.remove(); + }, }; config?.forEach(({ unit, blueprints }, paneIndex) => { @@ -1179,10 +1361,38 @@ export function createChartElement({ }); }); + if (captureElement && canCapture) { + const domain = window.document.createElement("p"); + domain.innerText = window.location.host; + domain.id = "domain"; + + addFieldsetIfNeeded({ + id: "capture", + paneIndex: 0, + position: "ne", + createChild() { + const button = window.document.createElement("button"); + button.id = "capture"; + button.innerText = "capture"; + button.title = "Capture chart as image"; + button.addEventListener("click", async () => { + captureElement.dataset.screenshot = "true"; + captureElement.append(domain); + try { + await capture({ element: captureElement, name: chartId }); + } catch {} + captureElement.removeChild(domain); + captureElement.dataset.screenshot = "false"; + }); + return button; + }, + }); + } + return chart; } /** - * @typedef {typeof createChartElement} CreateChartElement - * @typedef {ReturnType} Chart + * @typedef {typeof createChart} CreateChart + * @typedef {ReturnType} Chart */ diff --git a/website/scripts/chart/legend.js b/website/scripts/chart/legend.js index 93d5b5efe..b8203fa9e 100644 --- a/website/scripts/chart/legend.js +++ b/website/scripts/chart/legend.js @@ -1,13 +1,26 @@ import { createLabeledInput, createSpanName } from "../utils/dom.js"; import { stringToId } from "../utils/format.js"; -/** - * @param {Signals} signals - */ -export function createLegend(signals) { +export function createLegend() { const element = window.document.createElement("legend"); - const hovered = signals.createSignal(/** @type {AnySeries | null} */ (null)); + /** @type {AnySeries | null} */ + let hoveredSeries = null; + /** @type {Map} */ + const seriesColorSpans = new Map(); + + /** @param {AnySeries | null} series */ + function setHovered(series) { + if (hoveredSeries === series) return; + hoveredSeries = series; + for (const [entrySeries, colorSpans] of seriesColorSpans) { + const shouldHighlight = !hoveredSeries || hoveredSeries === entrySeries; + shouldHighlight ? entrySeries.highlight() : entrySeries.tame(); + for (const { span, color } of colorSpans) { + span.style.backgroundColor = color.highlight(shouldHighlight); + } + } + } /** @type {HTMLElement[]} */ const legends = []; @@ -44,15 +57,11 @@ export function createLegend(signals) { title: "Click to toggle", inputChecked: series.active(), onClick: () => { - series.active.set(input.checked); + series.setActive(input.checked); }, type: "checkbox", }); - - // Sync checkbox with signal (for shared signals across panes) - signals.createEffect(series.active, (active) => { - input.checked = active; - }); + input.dataset.series = series.key; const spanMain = window.document.createElement("span"); spanMain.classList.add("main"); @@ -62,49 +71,30 @@ export function createLegend(signals) { spanMain.append(spanName); div.append(label); - label.addEventListener("mouseover", () => { - const h = hovered(); - if (!h || h !== series) { - hovered.set(series); - } - }); - label.addEventListener("mouseleave", () => { - hovered.set(null); - }); - - const shouldHighlight = () => !hovered() || hovered() === series; - - // Update series highlighted state - signals.createEffect(shouldHighlight, (shouldHighlight) => { - series.highlighted.set(shouldHighlight); - }); + label.addEventListener("mouseover", () => setHovered(series)); + label.addEventListener("mouseleave", () => setHovered(null)); const spanColors = window.document.createElement("span"); spanColors.classList.add("colors"); spanMain.prepend(spanColors); + /** @type {{ span: HTMLSpanElement, color: Color }[]} */ + const colorSpans = []; colors.forEach((color) => { const spanColor = window.document.createElement("span"); + spanColor.style.backgroundColor = color.highlight(true); spanColors.append(spanColor); - - signals.createEffect( - () => color.highlight(shouldHighlight()), - (c) => { - spanColor.style.backgroundColor = c; - }, - ); + colorSpans.push({ span: spanColor, color }); }); + seriesColorSpans.set(series, colorSpans); - const anchor = window.document.createElement("a"); - - signals.createEffect(series.url, (url) => { - if (url) { - anchor.href = url; - anchor.target = "_blank"; - anchor.rel = "noopener noreferrer"; - anchor.title = "Click to view data"; - div.append(anchor); - } - }); + if (series.url) { + const anchor = window.document.createElement("a"); + anchor.href = series.url; + anchor.target = "_blank"; + anchor.rel = "noopener noreferrer"; + anchor.title = "Click to view data"; + div.append(anchor); + } }, /** * @param {number} start diff --git a/website/scripts/chart/markers.js b/website/scripts/chart/markers.js deleted file mode 100644 index ecdb6c4bc..000000000 --- a/website/scripts/chart/markers.js +++ /dev/null @@ -1,129 +0,0 @@ -import { throttle } from "../utils/timing.js"; - -/** - * @param {Object} args - * @param {IChartApi} args.chart - * @param {Accessor>} args.seriesList - * @param {Colors} args.colors - * @param {(value: number) => string} args.formatValue - */ -export function createMinMaxMarkers({ chart, seriesList, colors, formatValue }) { - /** @type {Set} */ - const prevMarkerSeries = new Set(); - - function update() { - const timeScale = chart.timeScale(); - const width = timeScale.width(); - const range = timeScale.getVisibleRange(); - if (!range) return; - - const tLeft = timeScale.coordinateToTime(30); - const tRight = timeScale.coordinateToTime(width - 30); - const t0 = /** @type {number} */ (tLeft ?? range.from); - const t1 = /** @type {number} */ (tRight ?? range.to); - const color = colors.gray(); - - /** @type {Map} */ - const byPane = new Map(); - - for (const series of seriesList()) { - if (!series.active() || !series.hasData()) continue; - - const data = series.getData(); - const len = data.length; - if (!len) continue; - - // Binary search for start - let lo = 0, hi = len; - while (lo < hi) { - const mid = (lo + hi) >>> 1; - if (/** @type {number} */ (data[mid].time) < t0) lo = mid + 1; - else hi = mid; - } - if (lo >= len) continue; - - const paneIndex = series.paneIndex; - let pane = byPane.get(paneIndex); - if (!pane) { - pane = { - minV: Infinity, - minT: /** @type {Time} */ (0), - minS: series, - maxV: -Infinity, - maxT: /** @type {Time} */ (0), - maxS: series, - }; - byPane.set(paneIndex, pane); - } - - for (let i = lo; i < len; i++) { - const pt = data[i]; - if (/** @type {number} */ (pt.time) > t1) break; - const v = pt.low ?? pt.value; - const h = pt.high ?? pt.value; - if (v && v < pane.minV) { - pane.minV = v; - pane.minT = pt.time; - pane.minS = series; - } - if (h && h > pane.maxV) { - pane.maxV = h; - pane.maxT = pt.time; - pane.maxS = series; - } - } - } - - // Set new markers - /** @type {Set} */ - const used = new Set(); - for (const { minV, minT, minS, maxV, maxT, maxS } of byPane.values()) { - if (!Number.isFinite(minV) || !Number.isFinite(maxV) || minT === maxT) - continue; - - const minM = /** @type {TimeSeriesMarker} */ ({ - time: minT, - position: "belowBar", - shape: "arrowUp", - color, - size: 0, - text: formatValue(minV), - }); - const maxM = /** @type {TimeSeriesMarker} */ ({ - time: maxT, - position: "aboveBar", - shape: "arrowDown", - color, - size: 0, - text: formatValue(maxV), - }); - - used.add(minS); - used.add(maxS); - if (minS === maxS) { - minS.setMarkers([minM, maxM]); - } else { - minS.setMarkers([minM]); - maxS.setMarkers([maxM]); - } - } - - // Clear stale - for (const s of prevMarkerSeries) { - if (!used.has(s)) s.clearMarkers(); - } - prevMarkerSeries.clear(); - for (const s of used) prevMarkerSeries.add(s); - } - - function clear() { - for (const s of prevMarkerSeries) s.clearMarkers(); - prevMarkerSeries.clear(); - } - - return { - update, - scheduleUpdate: throttle(update, 100), - clear, - }; -} diff --git a/website/scripts/chart/oklch.js b/website/scripts/chart/oklch.js index a2ed84141..b4b42bb5f 100644 --- a/website/scripts/chart/oklch.js +++ b/website/scripts/chart/oklch.js @@ -1,100 +1,107 @@ -export function createOklchToRGBA() { - { - /** - * - * @param {readonly [number, number, number, number, number, number, number, number, number]} A - * @param {readonly [number, number, number]} B - * @returns - */ - function multiplyMatrices(A, B) { - return /** @type {const} */ ([ - A[0] * B[0] + A[1] * B[1] + A[2] * B[2], - A[3] * B[0] + A[4] * B[1] + A[5] * B[2], - A[6] * B[0] + A[7] * B[1] + A[8] * B[2], - ]); - } - /** - * @param {readonly [number, number, number]} param0 - */ - function oklch2oklab([l, c, h]) { - return /** @type {const} */ ([ - l, - isNaN(h) ? 0 : c * Math.cos((h * Math.PI) / 180), - isNaN(h) ? 0 : c * Math.sin((h * Math.PI) / 180), - ]); - } - /** - * @param {readonly [number, number, number]} rgb - */ - function srgbLinear2rgb(rgb) { - return rgb.map((c) => - Math.abs(c) > 0.0031308 - ? (c < 0 ? -1 : 1) * (1.055 * Math.abs(c) ** (1 / 2.4) - 0.055) - : 12.92 * c, - ); - } - /** - * @param {readonly [number, number, number]} lab - */ - function oklab2xyz(lab) { - const LMSg = multiplyMatrices( - /** @type {const} */ ([ - 1, 0.3963377773761749, 0.2158037573099136, 1, -0.1055613458156586, - -0.0638541728258133, 1, -0.0894841775298119, -1.2914855480194092, - ]), - lab, - ); - const LMS = /** @type {[number, number, number]} */ ( - LMSg.map((val) => val ** 3) - ); - return multiplyMatrices( - /** @type {const} */ ([ - 1.2268798758459243, -0.5578149944602171, 0.2813910456659647, - -0.0405757452148008, 1.112286803280317, -0.0717110580655164, - -0.0763729366746601, -0.4214933324022432, 1.5869240198367816, - ]), - LMS, - ); - } - /** - * @param {readonly [number, number, number]} xyz - */ - function xyz2rgbLinear(xyz) { - return multiplyMatrices( - [ - 3.2409699419045226, -1.537383177570094, -0.4986107602930034, - -0.9692436362808796, 1.8759675015077202, 0.04155505740717559, - 0.05563007969699366, -0.20397695888897652, 1.0569715142428786, - ], - xyz, - ); - } - - /** @param {string} oklch */ - return function (oklch) { - oklch = oklch.replace("oklch(", ""); - oklch = oklch.replace(")", ""); - let splitOklch = oklch.split(" / "); - let alpha = 1; - if (splitOklch.length === 2) { - alpha = Number(splitOklch.pop()?.replace("%", "")) / 100; - } - splitOklch = oklch.split(" "); - const lch = splitOklch.map((v, i) => { - if (!i && v.includes("%")) { - return Number(v.replace("%", "")) / 100; - } else { - return Number(v); - } - }); - const rgb = srgbLinear2rgb( - xyz2rgbLinear( - oklab2xyz(oklch2oklab(/** @type {[number, number, number]} */ (lch))), - ), - ).map((v) => { - return Math.max(Math.min(Math.round(v * 255), 255), 0); - }); - return [...rgb, alpha]; - }; - } +/** + * @param {readonly [number, number, number, number, number, number, number, number, number]} A + * @param {readonly [number, number, number]} B + */ +function multiplyMatrices(A, B) { + return /** @type {const} */ ([ + A[0] * B[0] + A[1] * B[1] + A[2] * B[2], + A[3] * B[0] + A[4] * B[1] + A[5] * B[2], + A[6] * B[0] + A[7] * B[1] + A[8] * B[2], + ]); } + +/** @param {readonly [number, number, number]} param0 */ +function oklch2oklab([l, c, h]) { + return /** @type {const} */ ([ + l, + isNaN(h) ? 0 : c * Math.cos((h * Math.PI) / 180), + isNaN(h) ? 0 : c * Math.sin((h * Math.PI) / 180), + ]); +} + +/** @param {readonly [number, number, number]} rgb */ +function srgbLinear2rgb(rgb) { + return rgb.map((c) => + Math.abs(c) > 0.0031308 + ? (c < 0 ? -1 : 1) * (1.055 * Math.abs(c) ** (1 / 2.4) - 0.055) + : 12.92 * c, + ); +} + +/** @param {readonly [number, number, number]} lab */ +function oklab2xyz(lab) { + const LMSg = multiplyMatrices( + [1, 0.3963377773761749, 0.2158037573099136, 1, -0.1055613458156586, + -0.0638541728258133, 1, -0.0894841775298119, -1.2914855480194092], + lab, + ); + const LMS = /** @type {[number, number, number]} */ (LMSg.map((val) => val ** 3)); + return multiplyMatrices( + [1.2268798758459243, -0.5578149944602171, 0.2813910456659647, + -0.0405757452148008, 1.112286803280317, -0.0717110580655164, + -0.0763729366746601, -0.4214933324022432, 1.5869240198367816], + LMS, + ); +} + +/** @param {readonly [number, number, number]} xyz */ +function xyz2rgbLinear(xyz) { + return multiplyMatrices( + [3.2409699419045226, -1.537383177570094, -0.4986107602930034, + -0.9692436362808796, 1.8759675015077202, 0.04155505740717559, + 0.05563007969699366, -0.20397695888897652, 1.0569715142428786], + xyz, + ); +} + +/** @type {Map} */ +const conversionCache = new Map(); + +/** + * Parse oklch string and return rgba tuple + * @param {string} oklch + * @returns {[number, number, number, number] | null} + */ +function parseOklch(oklch) { + if (!oklch.startsWith("oklch(")) return null; + + const cached = conversionCache.get(oklch); + if (cached) return cached; + + let str = oklch.slice(6, -1); // remove "oklch(" and ")" + let alpha = 1; + + const slashIdx = str.indexOf(" / "); + if (slashIdx !== -1) { + const alphaPart = str.slice(slashIdx + 3); + alpha = alphaPart.includes("%") + ? Number(alphaPart.replace("%", "")) / 100 + : Number(alphaPart); + str = str.slice(0, slashIdx); + } + + const parts = str.split(" "); + const l = parts[0].includes("%") ? Number(parts[0].replace("%", "")) / 100 : Number(parts[0]); + const c = Number(parts[1]); + const h = Number(parts[2]); + + const rgb = srgbLinear2rgb(xyz2rgbLinear(oklab2xyz(oklch2oklab([l, c, h])))) + .map((v) => Math.max(Math.min(Math.round(v * 255), 255), 0)); + + const result = /** @type {[number, number, number, number]} */ ([...rgb, alpha]); + conversionCache.set(oklch, result); + return result; +} + +/** + * Convert oklch string to rgba string + * @param {string} oklch + * @returns {string} + */ +export function oklchToRgba(oklch) { + const result = parseOklch(oklch); + if (!result) return oklch; + const [r, g, b, a] = result; + return a === 1 ? `rgb(${r}, ${g}, ${b})` : `rgba(${r}, ${g}, ${b}, ${a})`; +} + diff --git a/website/scripts/chart/state.js b/website/scripts/chart/state.js deleted file mode 100644 index 44b80cea7..000000000 --- a/website/scripts/chart/state.js +++ /dev/null @@ -1,59 +0,0 @@ -import { readParam, writeParam } from "../utils/url.js"; -import { readStored, writeToStorage } from "../utils/storage.js"; - -/** - * @typedef {{ from: number | null, to: number | null }} Range - */ - -const RANGES_KEY = "chart-ranges"; -const RANGE_SEP = "_"; - -/** - * @param {Signals} signals - */ -export function createChartState(signals) { - const index = signals.createPersistedSignal({ - storageKey: "chart-index", - urlKey: "index", - defaultValue: /** @type {ChartableIndexName} */ ("date"), - serialize: (v) => v, - deserialize: (s) => /** @type {ChartableIndexName} */ (s), - }); - - // Ranges stored per-index in localStorage only - /** @type {Record} */ - let ranges = {}; - try { - const stored = readStored(RANGES_KEY); - if (stored) ranges = JSON.parse(stored); - } catch {} - - // Initialize from URL if present - const urlRange = readParam("range"); - if (urlRange) { - const [from, to] = urlRange.split(RANGE_SEP).map(Number); - if (!isNaN(from) && !isNaN(to)) { - ranges[index()] = { from, to }; - writeToStorage(RANGES_KEY, JSON.stringify(ranges)); - } - } - - return { - index, - /** @returns {Range} */ - range: () => ranges[index()] ?? { from: null, to: null }, - /** @param {Range} value */ - setRange(value) { - ranges[index()] = value; - writeToStorage(RANGES_KEY, JSON.stringify(ranges)); - if (value.from !== null && value.to !== null) { - // Round to 2 decimals for cleaner URLs - const f = Math.floor(value.from * 100) / 100; - const t = Math.floor(value.to * 100) / 100; - writeParam("range", `${f}${RANGE_SEP}${t}`); - } else { - writeParam("range", null); - } - }, - }; -} diff --git a/website/scripts/entry.js b/website/scripts/entry.js index 1364975bd..6c0be0f62 100644 --- a/website/scripts/entry.js +++ b/website/scripts/entry.js @@ -1,7 +1,7 @@ /** * @import * as _ from "./modules/leeoniya-ufuzzy/1.0.19/dist/uFuzzy.d.ts" * - * @import { IChartApi, ISeriesApi as _ISeriesApi, SeriesDefinition, SingleValueData as _SingleValueData, CandlestickData as _CandlestickData, BaselineData as _BaselineData, HistogramData as _HistogramData, SeriesType as LCSeriesType, IPaneApi, LineSeriesPartialOptions as _LineSeriesPartialOptions, HistogramSeriesPartialOptions as _HistogramSeriesPartialOptions, BaselineSeriesPartialOptions as _BaselineSeriesPartialOptions, CandlestickSeriesPartialOptions as _CandlestickSeriesPartialOptions, WhitespaceData, DeepPartial, ChartOptions, Time, LineData as _LineData, createChart as CreateChart, LineStyle, createSeriesMarkers as CreateSeriesMarkers, SeriesMarker, ISeriesMarkersPluginApi } from './modules/lightweight-charts/5.1.0/dist/typings.js' + * @import { IChartApi, ISeriesApi as _ISeriesApi, SeriesDefinition, SingleValueData as _SingleValueData, CandlestickData as _CandlestickData, BaselineData as _BaselineData, HistogramData as _HistogramData, SeriesType as LCSeriesType, IPaneApi, LineSeriesPartialOptions as _LineSeriesPartialOptions, HistogramSeriesPartialOptions as _HistogramSeriesPartialOptions, BaselineSeriesPartialOptions as _BaselineSeriesPartialOptions, CandlestickSeriesPartialOptions as _CandlestickSeriesPartialOptions, WhitespaceData, DeepPartial, ChartOptions, Time, LineData as _LineData, createChart as CreateLCChart, LineStyle, createSeriesMarkers as CreateSeriesMarkers, SeriesMarker, ISeriesMarkersPluginApi } from './modules/lightweight-charts/5.1.0/dist/typings.js' * * @import { Signal, Signals, Accessor } from "./signals.js"; * @@ -10,19 +10,18 @@ * * @import { Resources, MetricResource } from './resources.js' * - * @import { SingleValueData, CandlestickData, Series, AnySeries, ISeries, HistogramData, LineData, BaselineData, LineSeriesPartialOptions, BaselineSeriesPartialOptions, HistogramSeriesPartialOptions, CandlestickSeriesPartialOptions, CreateChartElement, Chart, Legend } from "./chart/index.js" + * @import { SingleValueData, CandlestickData, Series, AnySeries, ISeries, HistogramData, LineData, BaselineData, LineSeriesPartialOptions, BaselineSeriesPartialOptions, HistogramSeriesPartialOptions, CandlestickSeriesPartialOptions, Chart, Legend } from "./chart/index.js" * - * @import { Color, ColorName, Colors } from "./utils/colors.js" + * @import { Color, ColorName, Colors } from "./chart/colors.js" * * @import { WebSockets } from "./utils/ws.js" * * @import { Option, PartialChartOption, ChartOption, AnyPartialOption, ProcessedOptionAddons, OptionsTree, SimulationOption, AnySeriesBlueprint, SeriesType, AnyFetchedSeriesBlueprint, TableOption, ExplorerOption, UrlOption, PartialOptionsGroup, OptionsGroup, PartialOptionsTree, UtxoCohortObject, AddressCohortObject, CohortObject, CohortGroupObject, FetchedLineSeriesBlueprint, FetchedBaselineSeriesBlueprint, FetchedHistogramSeriesBlueprint, PartialContext, PatternAll, PatternFull, PatternWithAdjusted, PatternWithPercentiles, PatternBasic, CohortAll, CohortFull, CohortWithAdjusted, CohortWithPercentiles, CohortBasic, CohortGroupFull, CohortGroupWithAdjusted, CohortGroupWithPercentiles, CohortGroupBasic, UtxoCohortGroupObject, AddressCohortGroupObject, FetchedDotsSeriesBlueprint, FetchedCandlestickSeriesBlueprint } from "./options/partial.js" * - * @import { line as LineSeriesFn, dots as DotsSeriesFn, candlestick as CandlestickSeriesFn, baseline as BaselineSeriesFn, histogram as HistogramSeriesFn } from "./options/series.js" * * @import { UnitObject as Unit } from "./utils/units.js" * - * @import { ChartableIndexName } from "./panes/chart/index.js"; + * @import { ChartableIndexName } from "./utils/serde.js"; */ // import uFuzzy = require("./modules/leeoniya-ufuzzy/1.0.19/dist/uFuzzy.d.ts"); diff --git a/website/scripts/main.js b/website/scripts/main.js index db1ab7c13..9b4df9583 100644 --- a/website/scripts/main.js +++ b/website/scripts/main.js @@ -1,4 +1,3 @@ -import { createColors } from "./utils/colors.js"; import { webSockets } from "./utils/ws.js"; import * as formatters from "./utils/format.js"; import { onFirstIntersection, getElementById, isHidden } from "./utils/dom.js"; @@ -8,7 +7,7 @@ import { initOptions } from "./options/full.js"; import ufuzzy from "./modules/leeoniya-ufuzzy/1.0.19/dist/uFuzzy.mjs"; import * as leanQr from "./modules/lean-qr/2.7.1/index.mjs"; import { init as initExplorer } from "./panes/_explorer.js"; -import { init as initChart } from "./panes/chart/index.js"; +import { init as initChart } from "./panes/chart.js"; import { init as initTable } from "./panes/table.js"; import { init as initSimulation } from "./panes/_simulation.js"; import { next } from "./utils/timing.js"; @@ -121,18 +120,6 @@ signals.createRoot(() => { console.log(`VERSION = ${brk.VERSION}`); - function initDark() { - const preferredColorSchemeMatchMedia = window.matchMedia( - "(prefers-color-scheme: dark)", - ); - const dark = signals.createSignal(preferredColorSchemeMatchMedia.matches); - preferredColorSchemeMatchMedia.addEventListener("change", ({ matches }) => { - dark.set(matches); - }); - return dark; - } - const dark = initDark(); - const qrcode = signals.createSignal(/** @type {string | null} */ (null)); signals.createEffect(webSockets.kraken1dCandle.latest, (latest) => { @@ -159,10 +146,7 @@ signals.createRoot(() => { // } // const lastHeight = createLastHeightResource(); - const colors = createColors(dark); - const options = initOptions({ - colors, signals, brk, qrcode, @@ -234,7 +218,6 @@ signals.createRoot(() => { if (firstTimeLoadingChart) { signals.runWithOwner(owner, () => initChart({ - colors, option: /** @type {Accessor} */ (chartOption), brk, }), @@ -260,11 +243,7 @@ signals.createRoot(() => { simOption.set(option); if (firstTimeLoadingSimulation) { - signals.runWithOwner(owner, () => - initSimulation({ - colors, - }), - ); + signals.runWithOwner(owner, () => initSimulation()); } firstTimeLoadingSimulation = false; @@ -552,6 +531,7 @@ signals.createRoot(() => { qrcode.set(window.location.href); }); + shareDiv.addEventListener("click", () => { qrcode.set(null); }); diff --git a/website/scripts/options/chain.js b/website/scripts/options/chain.js index 67a60b9ec..c9e1df214 100644 --- a/website/scripts/options/chain.js +++ b/website/scripts/options/chain.js @@ -1,6 +1,7 @@ /** Chain section builder - typed tree-based patterns */ import { Unit } from "../utils/units.js"; +import { line, baseline, dots } from "./series.js"; import { satsBtcUsd } from "./shared.js"; /** @@ -12,9 +13,6 @@ export function createChainSection(ctx) { const { colors, brk, - line, - baseline, - dots, createPriceLine, fromSizePattern, fromFullnessPattern, @@ -240,7 +238,7 @@ export function createChainSection(ctx) { name: "Volume", title: "Transaction Volume", bottom: [ - ...satsBtcUsd(ctx, transactions.volume.sentSum, "Sent"), + ...satsBtcUsd( transactions.volume.sentSum, "Sent"), line({ metric: transactions.volume.annualizedVolume.sats, name: "annualized", diff --git a/website/scripts/options/cohorts/address.js b/website/scripts/options/cohorts/address.js index e79f18c8d..eba3b37ab 100644 --- a/website/scripts/options/cohorts/address.js +++ b/website/scripts/options/cohorts/address.js @@ -5,6 +5,7 @@ */ import { Unit } from "../../utils/units.js"; +import { line, baseline } from "../series.js"; import { createSingleSupplySeries, createGroupedSupplyTotalSeries, @@ -54,12 +55,12 @@ export function createAddressCohortFolder(ctx, args) { { name: "in profit", title: `Supply In Profit ${title}`, - bottom: createGroupedSupplyInProfitSeries(ctx, list), + bottom: createGroupedSupplyInProfitSeries(list), }, { name: "in loss", title: `Supply In Loss ${title}`, - bottom: createGroupedSupplyInLossSeries(ctx, list), + bottom: createGroupedSupplyInLossSeries(list), }, ], }, @@ -68,7 +69,7 @@ export function createAddressCohortFolder(ctx, args) { { name: "utxo count", title: `UTXO Count ${title}`, - bottom: createUtxoCountSeries(ctx, list, useGroupName), + bottom: createUtxoCountSeries(list, useGroupName), }, // Address count (ADDRESS COHORTS ONLY - fully type safe!) @@ -87,7 +88,7 @@ export function createAddressCohortFolder(ctx, args) { { name: "Price", title: `Realized Price ${title}`, - top: createRealizedPriceSeries(ctx, list), + top: createRealizedPriceSeries(list), }, { name: "Ratio", @@ -96,7 +97,6 @@ export function createAddressCohortFolder(ctx, args) { }, ] : createRealizedPriceOptions( - ctx, /** @type {AddressCohortObject} */ (args), title, )), @@ -119,24 +119,22 @@ export function createAddressCohortFolder(ctx, args) { ...createUnrealizedSection(ctx, list, useGroupName, title), // Cost basis section (no percentiles for address cohorts) - ...createCostBasisSection(ctx, list, useGroupName, title), + ...createCostBasisSection(list, useGroupName, title), // Activity section - ...createActivitySection(ctx, list, useGroupName, title), + ...createActivitySection(list, useGroupName, title), ], }; } /** * Create realized price options for single cohort - * @param {PartialContext} ctx * @param {AddressCohortObject} args * @param {string} title * @returns {PartialOptionsTree} */ -function createRealizedPriceOptions(ctx, args, title) { - const { line } = ctx; - const { tree, color } = args; +function createRealizedPriceOptions(args, title) { + const { tree, color } = args; return [ { @@ -163,7 +161,7 @@ function createRealizedPriceOptions(ctx, args, title) { * @returns {AnyFetchedSeriesBlueprint[]} */ function createRealizedCapWithExtras(ctx, list, args, useGroupName) { - const { line, baseline, createPriceLine } = ctx; + const { createPriceLine } = ctx; const isSingle = !("list" in args); return list.flatMap(({ color, name, tree }) => [ @@ -196,7 +194,7 @@ function createRealizedCapWithExtras(ctx, list, args, useGroupName) { * @returns {PartialOptionsTree} */ function createRealizedPnlSection(ctx, args, title) { - const { colors, line } = ctx; + const { colors } = ctx; const { realized } = args.tree; return [ @@ -251,7 +249,7 @@ function createRealizedPnlSection(ctx, args, title) { * @returns {PartialOptionsTree} */ function createUnrealizedSection(ctx, list, useGroupName, title) { - const { colors, line, baseline } = ctx; + const { colors } = ctx; return [ { @@ -301,15 +299,12 @@ function createUnrealizedSection(ctx, list, useGroupName, title) { /** * Create cost basis section (no percentiles for address cohorts) - * @param {PartialContext} ctx * @param {readonly AddressCohortObject[]} list * @param {boolean} useGroupName * @param {string} title * @returns {PartialOptionsTree} */ -function createCostBasisSection(ctx, list, useGroupName, title) { - const { line } = ctx; - +function createCostBasisSection(list, useGroupName, title) { return [ { name: "Cost Basis", @@ -345,15 +340,12 @@ function createCostBasisSection(ctx, list, useGroupName, title) { /** * Create activity section - * @param {PartialContext} ctx * @param {readonly AddressCohortObject[]} list * @param {boolean} useGroupName * @param {string} title * @returns {PartialOptionsTree} */ -function createActivitySection(ctx, list, useGroupName, title) { - const { line } = ctx; - +function createActivitySection(list, useGroupName, title) { return [ { name: "Activity", diff --git a/website/scripts/options/cohorts/shared.js b/website/scripts/options/cohorts/shared.js index f8da67d69..2b92d0303 100644 --- a/website/scripts/options/cohorts/shared.js +++ b/website/scripts/options/cohorts/shared.js @@ -1,6 +1,7 @@ /** Shared cohort chart section builders */ import { Unit } from "../../utils/units.js"; +import { line } from "../series.js"; import { satsBtcUsd } from "../shared.js"; /** @@ -10,11 +11,11 @@ import { satsBtcUsd } from "../shared.js"; * @returns {AnyFetchedSeriesBlueprint[]} */ export function createSingleSupplySeries(ctx, cohort) { - const { colors, line, createPriceLine } = ctx; + const { colors, createPriceLine } = ctx; const { tree } = cohort; return [ - ...satsBtcUsd(ctx, tree.supply.total, "Supply", colors.default), + ...satsBtcUsd( tree.supply.total, "Supply", colors.default), ...("supplyRelToCirculatingSupply" in tree.relative ? [ line({ @@ -25,9 +26,9 @@ export function createSingleSupplySeries(ctx, cohort) { }), ] : []), - ...satsBtcUsd(ctx, tree.unrealized.supplyInProfit, "In Profit", colors.green), - ...satsBtcUsd(ctx, tree.unrealized.supplyInLoss, "In Loss", colors.red), - ...satsBtcUsd(ctx, tree.supply.halved, "half", colors.gray).map((s) => ({ + ...satsBtcUsd( tree.unrealized.supplyInProfit, "In Profit", colors.green), + ...satsBtcUsd( tree.unrealized.supplyInLoss, "In Loss", colors.red), + ...satsBtcUsd( tree.supply.halved, "half", colors.gray).map((s) => ({ ...s, options: { lineStyle: 4 }, })), @@ -76,11 +77,11 @@ export function createSingleSupplySeries(ctx, cohort) { * @returns {AnyFetchedSeriesBlueprint[]} */ export function createGroupedSupplyTotalSeries(ctx, list) { - const { line, brk } = ctx; + const { brk } = ctx; const constant100 = brk.metrics.constants.constant100; return list.flatMap(({ color, name, tree }) => [ - ...satsBtcUsd(ctx, tree.supply.total, name, color), + ...satsBtcUsd( tree.supply.total, name, color), line({ metric: "supplyRelToCirculatingSupply" in tree.relative @@ -95,15 +96,13 @@ export function createGroupedSupplyTotalSeries(ctx, list) { /** * Create supply in profit series for grouped cohorts - * @param {PartialContext} ctx * @param {readonly CohortObject[]} list * @returns {AnyFetchedSeriesBlueprint[]} */ -export function createGroupedSupplyInProfitSeries(ctx, list) { - const { line } = ctx; - +export function createGroupedSupplyInProfitSeries(list) { + return list.flatMap(({ color, name, tree }) => [ - ...satsBtcUsd(ctx, tree.unrealized.supplyInProfit, name, color), + ...satsBtcUsd( tree.unrealized.supplyInProfit, name, color), ...("supplyInProfitRelToCirculatingSupply" in tree.relative ? [ line({ @@ -119,15 +118,13 @@ export function createGroupedSupplyInProfitSeries(ctx, list) { /** * Create supply in loss series for grouped cohorts - * @param {PartialContext} ctx * @param {readonly CohortObject[]} list * @returns {AnyFetchedSeriesBlueprint[]} */ -export function createGroupedSupplyInLossSeries(ctx, list) { - const { line } = ctx; - +export function createGroupedSupplyInLossSeries(list) { + return list.flatMap(({ color, name, tree }) => [ - ...satsBtcUsd(ctx, tree.unrealized.supplyInLoss, name, color), + ...satsBtcUsd( tree.unrealized.supplyInLoss, name, color), ...("supplyInLossRelToCirculatingSupply" in tree.relative ? [ line({ @@ -143,14 +140,11 @@ export function createGroupedSupplyInLossSeries(ctx, list) { /** * Create UTXO count series - * @param {PartialContext} ctx * @param {readonly CohortObject[]} list * @param {boolean} useGroupName * @returns {AnyFetchedSeriesBlueprint[]} */ -export function createUtxoCountSeries(ctx, list, useGroupName) { - const { line } = ctx; - +export function createUtxoCountSeries(list, useGroupName) { return list.flatMap(({ color, name, tree }) => [ line({ metric: tree.outputs.utxoCount, @@ -169,7 +163,7 @@ export function createUtxoCountSeries(ctx, list, useGroupName) { * @returns {AnyFetchedSeriesBlueprint[]} */ export function createAddressCountSeries(ctx, list, useGroupName) { - const { line, colors } = ctx; + const { colors } = ctx; return list.flatMap(({ color, name, tree }) => [ line({ @@ -183,13 +177,10 @@ export function createAddressCountSeries(ctx, list, useGroupName) { /** * Create realized price series for grouped cohorts - * @param {PartialContext} ctx * @param {readonly CohortObject[]} list * @returns {AnyFetchedSeriesBlueprint[]} */ -export function createRealizedPriceSeries(ctx, list) { - const { line } = ctx; - +export function createRealizedPriceSeries(list) { return list.map(({ color, name, tree }) => line({ metric: tree.realized.realizedPrice, name, color, unit: Unit.usd }), ); @@ -202,7 +193,7 @@ export function createRealizedPriceSeries(ctx, list) { * @returns {AnyFetchedSeriesBlueprint[]} */ export function createRealizedPriceRatioSeries(ctx, list) { - const { line, createPriceLine } = ctx; + const { createPriceLine } = ctx; return [ ...list.map(({ color, name, tree }) => @@ -219,14 +210,11 @@ export function createRealizedPriceRatioSeries(ctx, list) { /** * Create realized capitalization series - * @param {PartialContext} ctx * @param {readonly CohortObject[]} list * @param {boolean} useGroupName * @returns {AnyFetchedSeriesBlueprint[]} */ -export function createRealizedCapSeries(ctx, list, useGroupName) { - const { line } = ctx; - +export function createRealizedCapSeries(list, useGroupName) { return list.flatMap(({ color, name, tree }) => [ line({ metric: tree.realized.realizedCap, @@ -239,14 +227,11 @@ export function createRealizedCapSeries(ctx, list, useGroupName) { /** * Create cost basis min/max series (available on all cohorts) - * @param {PartialContext} ctx * @param {readonly CohortObject[]} list * @param {boolean} useGroupName * @returns {AnyFetchedSeriesBlueprint[]} */ -export function createCostBasisMinMaxSeries(ctx, list, useGroupName) { - const { line } = ctx; - +export function createCostBasisMinMaxSeries(list, useGroupName) { return list.flatMap(({ color, name, tree }) => [ line({ metric: tree.costBasis.min, @@ -265,14 +250,11 @@ export function createCostBasisMinMaxSeries(ctx, list, useGroupName) { /** * Create cost basis percentile series (only for cohorts with CostBasisPattern2) - * @param {PartialContext} ctx * @param {readonly CohortWithCostBasisPercentiles[]} list * @param {boolean} useGroupName * @returns {AnyFetchedSeriesBlueprint[]} */ -export function createCostBasisPercentilesSeries(ctx, list, useGroupName) { - const { line } = ctx; - +export function createCostBasisPercentilesSeries(list, useGroupName) { return list.flatMap(({ color, name, tree }) => { const percentiles = tree.costBasis.percentiles; return [ diff --git a/website/scripts/options/cohorts/utxo.js b/website/scripts/options/cohorts/utxo.js index 36e5c8491..393101c74 100644 --- a/website/scripts/options/cohorts/utxo.js +++ b/website/scripts/options/cohorts/utxo.js @@ -34,6 +34,7 @@ import { createCostBasisPercentilesSeries, } from "./shared.js"; import { Unit } from "../../utils/units.js"; +import { line, baseline } from "../series.js"; // ============================================================================ // Folder Builders (4 variants based on pattern capabilities) @@ -51,10 +52,10 @@ export function createCohortFolderAll(ctx, args) { name: args.name || "all", tree: [ createSingleSupplyChart(ctx, args, title), - createSingleUtxoCountChart(ctx, args, title), + createSingleUtxoCountChart(args, title), createSingleRealizedSectionWithAdjusted(ctx, args, title), createSingleUnrealizedSectionAll(ctx, args, title), - createSingleCostBasisSectionWithPercentiles(ctx, args, title), + createSingleCostBasisSectionWithPercentiles(args, title), ...createSingleActivitySectionWithAdjusted(ctx, args, title), ], }; @@ -74,11 +75,11 @@ export function createCohortFolderFull(ctx, args) { name: args.name || "all", tree: [ createGroupedSupplySection(ctx, list, title), - createGroupedUtxoCountChart(ctx, list, title), + createGroupedUtxoCountChart(list, title), createGroupedRealizedSectionWithAdjusted(ctx, list, title), createGroupedUnrealizedSectionFull(ctx, list, title), - createGroupedCostBasisSectionWithPercentiles(ctx, list, title), - ...createGroupedActivitySectionWithAdjusted(ctx, list, title), + createGroupedCostBasisSectionWithPercentiles(list, title), + ...createGroupedActivitySectionWithAdjusted(list, title), ], }; } @@ -87,10 +88,10 @@ export function createCohortFolderFull(ctx, args) { name: args.name || "all", tree: [ createSingleSupplyChart(ctx, args, title), - createSingleUtxoCountChart(ctx, args, title), + createSingleUtxoCountChart(args, title), createSingleRealizedSectionWithAdjusted(ctx, args, title), createSingleUnrealizedSectionFull(ctx, args, title), - createSingleCostBasisSectionWithPercentiles(ctx, args, title), + createSingleCostBasisSectionWithPercentiles(args, title), ...createSingleActivitySectionWithAdjusted(ctx, args, title), ], }; @@ -110,11 +111,11 @@ export function createCohortFolderWithAdjusted(ctx, args) { name: args.name || "all", tree: [ createGroupedSupplySection(ctx, list, title), - createGroupedUtxoCountChart(ctx, list, title), + createGroupedUtxoCountChart(list, title), createGroupedRealizedSectionWithAdjusted(ctx, list, title), createGroupedUnrealizedSectionWithMarketCap(ctx, list, title), - createGroupedCostBasisSection(ctx, list, title), - ...createGroupedActivitySectionWithAdjusted(ctx, list, title), + createGroupedCostBasisSection(list, title), + ...createGroupedActivitySectionWithAdjusted(list, title), ], }; } @@ -123,10 +124,10 @@ export function createCohortFolderWithAdjusted(ctx, args) { name: args.name || "all", tree: [ createSingleSupplyChart(ctx, args, title), - createSingleUtxoCountChart(ctx, args, title), + createSingleUtxoCountChart(args, title), createSingleRealizedSectionWithAdjusted(ctx, args, title), createSingleUnrealizedSectionWithMarketCap(ctx, args, title), - createSingleCostBasisSection(ctx, args, title), + createSingleCostBasisSection(args, title), ...createSingleActivitySectionWithAdjusted(ctx, args, title), ], }; @@ -146,11 +147,11 @@ export function createCohortFolderWithPercentiles(ctx, args) { name: args.name || "all", tree: [ createGroupedSupplySection(ctx, list, title), - createGroupedUtxoCountChart(ctx, list, title), + createGroupedUtxoCountChart(list, title), createGroupedRealizedSectionBasic(ctx, list, title), createGroupedUnrealizedSectionWithOwnCaps(ctx, list, title), - createGroupedCostBasisSectionWithPercentiles(ctx, list, title), - ...createGroupedActivitySectionBasic(ctx, list, title), + createGroupedCostBasisSectionWithPercentiles(list, title), + ...createGroupedActivitySectionBasic(list, title), ], }; } @@ -159,10 +160,10 @@ export function createCohortFolderWithPercentiles(ctx, args) { name: args.name || "all", tree: [ createSingleSupplyChart(ctx, args, title), - createSingleUtxoCountChart(ctx, args, title), + createSingleUtxoCountChart(args, title), createSingleRealizedSectionBasic(ctx, args, title), createSingleUnrealizedSectionWithOwnCaps(ctx, args, title), - createSingleCostBasisSectionWithPercentiles(ctx, args, title), + createSingleCostBasisSectionWithPercentiles(args, title), ...createSingleActivitySectionBasic(ctx, args, title), ], }; @@ -182,11 +183,11 @@ export function createCohortFolderBasic(ctx, args) { name: args.name || "all", tree: [ createGroupedSupplySection(ctx, list, title), - createGroupedUtxoCountChart(ctx, list, title), + createGroupedUtxoCountChart(list, title), createGroupedRealizedSectionBasic(ctx, list, title), createGroupedUnrealizedSectionBase(ctx, list, title), - createGroupedCostBasisSection(ctx, list, title), - ...createGroupedActivitySectionBasic(ctx, list, title), + createGroupedCostBasisSection(list, title), + ...createGroupedActivitySectionBasic(list, title), ], }; } @@ -195,10 +196,10 @@ export function createCohortFolderBasic(ctx, args) { name: args.name || "all", tree: [ createSingleSupplyChart(ctx, args, title), - createSingleUtxoCountChart(ctx, args, title), + createSingleUtxoCountChart(args, title), createSingleRealizedSectionBasic(ctx, args, title), createSingleUnrealizedSectionBase(ctx, args, title), - createSingleCostBasisSection(ctx, args, title), + createSingleCostBasisSection(args, title), ...createSingleActivitySectionBasic(ctx, args, title), ], }; @@ -238,12 +239,12 @@ function createGroupedSupplySection(ctx, list, title) { { name: "in profit", title: `Supply In Profit ${title}`, - bottom: createGroupedSupplyInProfitSeries(ctx, list), + bottom: createGroupedSupplyInProfitSeries(list), }, { name: "in loss", title: `Supply In Loss ${title}`, - bottom: createGroupedSupplyInLossSeries(ctx, list), + bottom: createGroupedSupplyInLossSeries(list), }, ], }; @@ -251,31 +252,29 @@ function createGroupedSupplySection(ctx, list, title) { /** * Create UTXO count chart for single cohort - * @param {PartialContext} ctx * @param {UtxoCohortObject} cohort * @param {string} title * @returns {PartialChartOption} */ -function createSingleUtxoCountChart(ctx, cohort, title) { +function createSingleUtxoCountChart(cohort, title) { return { name: "utxo count", title: `UTXO Count ${title}`, - bottom: createUtxoCountSeries(ctx, [cohort], false), + bottom: createUtxoCountSeries( [cohort], false), }; } /** * Create UTXO count chart for grouped cohorts - * @param {PartialContext} ctx * @param {readonly UtxoCohortObject[]} list * @param {string} title * @returns {PartialChartOption} */ -function createGroupedUtxoCountChart(ctx, list, title) { +function createGroupedUtxoCountChart(list, title) { return { name: "utxo count", title: `UTXO Count ${title}`, - bottom: createUtxoCountSeries(ctx, list, true), + bottom: createUtxoCountSeries( list, true), }; } @@ -290,7 +289,7 @@ function createSingleRealizedSectionWithAdjusted(ctx, cohort, title) { return { name: "Realized", tree: [ - createSingleRealizedPriceChart(ctx, cohort, title), + createSingleRealizedPriceChart(cohort, title), { name: "capitalization", title: `Realized Capitalization ${title}`, @@ -316,7 +315,7 @@ function createGroupedRealizedSectionWithAdjusted(ctx, list, title) { { name: "Price", title: `Realized Price ${title}`, - top: createRealizedPriceSeries(ctx, list), + top: createRealizedPriceSeries(list), }, { name: "Ratio", @@ -326,7 +325,7 @@ function createGroupedRealizedSectionWithAdjusted(ctx, list, title) { { name: "capitalization", title: `Realized Capitalization ${title}`, - bottom: createGroupedRealizedCapSeries(ctx, list), + bottom: createGroupedRealizedCapSeries(list), }, ...createGroupedRealizedPnlSections(ctx, list, title), createGroupedSoprSectionWithAdjusted(ctx, list, title), @@ -345,7 +344,7 @@ function createSingleRealizedSectionBasic(ctx, cohort, title) { return { name: "Realized", tree: [ - createSingleRealizedPriceChart(ctx, cohort, title), + createSingleRealizedPriceChart(cohort, title), { name: "capitalization", title: `Realized Capitalization ${title}`, @@ -371,7 +370,7 @@ function createGroupedRealizedSectionBasic(ctx, list, title) { { name: "Price", title: `Realized Price ${title}`, - top: createRealizedPriceSeries(ctx, list), + top: createRealizedPriceSeries(list), }, { name: "Ratio", @@ -381,7 +380,7 @@ function createGroupedRealizedSectionBasic(ctx, list, title) { { name: "capitalization", title: `Realized Capitalization ${title}`, - bottom: createGroupedRealizedCapSeries(ctx, list), + bottom: createGroupedRealizedCapSeries(list), }, ...createGroupedRealizedPnlSections(ctx, list, title), createGroupedSoprSectionBasic(ctx, list, title), @@ -391,14 +390,12 @@ function createGroupedRealizedSectionBasic(ctx, list, title) { /** * Create realized price chart for single cohort - * @param {PartialContext} ctx * @param {UtxoCohortObject} cohort * @param {string} title * @returns {PartialChartOption} */ -function createSingleRealizedPriceChart(ctx, cohort, title) { - const { line } = ctx; - const { tree, color } = cohort; +function createSingleRealizedPriceChart(cohort, title) { + const { tree, color } = cohort; return { name: "price", @@ -421,7 +418,7 @@ function createSingleRealizedPriceChart(ctx, cohort, title) { * @returns {AnyFetchedSeriesBlueprint[]} */ function createSingleRealizedCapSeries(ctx, cohort) { - const { colors, line, baseline, createPriceLine } = ctx; + const { colors, createPriceLine } = ctx; const { color, tree } = cohort; return [ @@ -459,13 +456,10 @@ function createSingleRealizedCapSeries(ctx, cohort) { /** * Create realized cap series for grouped cohorts - * @param {PartialContext} ctx * @param {readonly UtxoCohortObject[]} list * @returns {AnyFetchedSeriesBlueprint[]} */ -function createGroupedRealizedCapSeries(ctx, list) { - const { line } = ctx; - +function createGroupedRealizedCapSeries(list) { return list.map(({ color, name, tree }) => line({ metric: tree.realized.realizedCap, @@ -486,8 +480,6 @@ function createGroupedRealizedCapSeries(ctx, list) { function createSingleRealizedPnlSection(ctx, cohort, title) { const { colors, - line, - baseline, createPriceLine, fromBlockCountWithUnit, fromBitcoinPatternWithUnit, @@ -598,7 +590,7 @@ function createSingleRealizedPnlSection(ctx, cohort, title) { * @returns {PartialOptionsTree} */ function createGroupedRealizedPnlSections(ctx, list, title) { - const { line, baseline, createPriceLine } = ctx; + const { createPriceLine } = ctx; return [ { @@ -795,7 +787,7 @@ function createGroupedRealizedPnlSections(ctx, list, title) { * @returns {PartialChartOption} */ function createSingleBaseSoprChart(ctx, cohort, title) { - const { colors, baseline, createPriceLine } = ctx; + const { colors, createPriceLine } = ctx; const { tree } = cohort; return { @@ -837,7 +829,7 @@ function createSingleBaseSoprChart(ctx, cohort, title) { * @returns {PartialChartOption} */ function createSingleAdjustedSoprChart(ctx, cohort, title) { - const { colors, baseline, createPriceLine } = ctx; + const { colors, createPriceLine } = ctx; const { tree } = cohort; return { @@ -880,7 +872,7 @@ function createSingleAdjustedSoprChart(ctx, cohort, title) { * @returns {PartialChartOption} */ function createGroupedBaseSoprChart(ctx, list, title) { - const { baseline, createPriceLine } = ctx; + const { createPriceLine } = ctx; return { name: "Normal", @@ -924,7 +916,7 @@ function createGroupedBaseSoprChart(ctx, list, title) { * @returns {PartialChartOption} */ function createGroupedAdjustedSoprChart(ctx, list, title) { - const { baseline, createPriceLine } = ctx; + const { createPriceLine } = ctx; return { name: "Adjusted", @@ -1035,7 +1027,7 @@ function createGroupedSoprSectionBasic(ctx, list, title) { * @param {RelativeWithMarketCap} rel */ function createUnrealizedPnlRelToMarketCapMetrics(ctx, rel) { - const { colors, line } = ctx; + const { colors } = ctx; return [ line({ metric: rel.unrealizedProfitRelToMarketCap, @@ -1064,7 +1056,7 @@ function createUnrealizedPnlRelToMarketCapMetrics(ctx, rel) { * @param {RelativeWithOwnMarketCap} rel */ function createUnrealizedPnlRelToOwnMarketCapMetrics(ctx, rel) { - const { colors, line, createPriceLine } = ctx; + const { colors, createPriceLine } = ctx; return [ line({ metric: rel.unrealizedProfitRelToOwnMarketCap, @@ -1095,7 +1087,7 @@ function createUnrealizedPnlRelToOwnMarketCapMetrics(ctx, rel) { * @param {RelativeWithOwnPnl} rel */ function createUnrealizedPnlRelToOwnPnlMetrics(ctx, rel) { - const { colors, line, createPriceLine } = ctx; + const { colors, createPriceLine } = ctx; return [ line({ metric: rel.unrealizedProfitRelToOwnTotalUnrealizedPnl, @@ -1122,12 +1114,10 @@ function createUnrealizedPnlRelToOwnPnlMetrics(ctx, rel) { } /** - * @param {PartialContext} ctx * @param {RelativeWithMarketCap} rel */ -function createNetUnrealizedPnlRelToMarketCapMetrics(ctx, rel) { - const { baseline } = ctx; - return [ +function createNetUnrealizedPnlRelToMarketCapMetrics(rel) { + return [ baseline({ metric: rel.netUnrealizedPnlRelToMarketCap, name: "Net", @@ -1141,7 +1131,7 @@ function createNetUnrealizedPnlRelToMarketCapMetrics(ctx, rel) { * @param {RelativeWithOwnMarketCap} rel */ function createNetUnrealizedPnlRelToOwnMarketCapMetrics(ctx, rel) { - const { baseline, createPriceLine } = ctx; + const { createPriceLine } = ctx; return [ baseline({ metric: rel.netUnrealizedPnlRelToOwnMarketCap, @@ -1157,7 +1147,7 @@ function createNetUnrealizedPnlRelToOwnMarketCapMetrics(ctx, rel) { * @param {RelativeWithOwnPnl} rel */ function createNetUnrealizedPnlRelToOwnPnlMetrics(ctx, rel) { - const { baseline, createPriceLine } = ctx; + const { createPriceLine } = ctx; return [ baseline({ metric: rel.netUnrealizedPnlRelToOwnTotalUnrealizedPnl, @@ -1174,7 +1164,7 @@ function createNetUnrealizedPnlRelToOwnPnlMetrics(ctx, rel) { * @param {{ unrealized: { totalUnrealizedPnl: AnyMetricPattern, unrealizedProfit: AnyMetricPattern, unrealizedLoss: AnyMetricPattern, negUnrealizedLoss: AnyMetricPattern } }} tree */ function createUnrealizedPnlBaseMetrics(ctx, tree) { - const { colors, line } = ctx; + const { colors } = ctx; return [ line({ metric: tree.unrealized.totalUnrealizedPnl, @@ -1206,12 +1196,10 @@ function createUnrealizedPnlBaseMetrics(ctx, tree) { /** * Base net unrealized metric (always present) - * @param {PartialContext} ctx * @param {{ unrealized: { netUnrealizedPnl: AnyMetricPattern } }} tree */ -function createNetUnrealizedPnlBaseMetric(ctx, tree) { - const { baseline } = ctx; - return baseline({ +function createNetUnrealizedPnlBaseMetric(tree) { + return baseline({ metric: tree.unrealized.netUnrealizedPnl, name: "Net", unit: Unit.ratio, @@ -1249,7 +1237,7 @@ function createSingleUnrealizedSectionAll(ctx, cohort, title) { name: "Net pnl", title: `Net Unrealized Profit And Loss ${title}`, bottom: [ - createNetUnrealizedPnlBaseMetric(ctx, tree), + createNetUnrealizedPnlBaseMetric(tree), ...createNetUnrealizedPnlRelToOwnPnlMetrics(ctx, tree.relative), createPriceLine({ unit: Unit.usd }), ], @@ -1287,8 +1275,8 @@ function createSingleUnrealizedSectionFull(ctx, cohort, title) { name: "Net pnl", title: `Net Unrealized Profit And Loss ${title}`, bottom: [ - createNetUnrealizedPnlBaseMetric(ctx, tree), - ...createNetUnrealizedPnlRelToMarketCapMetrics(ctx, tree.relative), + createNetUnrealizedPnlBaseMetric(tree), + ...createNetUnrealizedPnlRelToMarketCapMetrics(tree.relative), ...createNetUnrealizedPnlRelToOwnMarketCapMetrics(ctx, tree.relative), ...createNetUnrealizedPnlRelToOwnPnlMetrics(ctx, tree.relative), createPriceLine({ unit: Unit.usd }), @@ -1326,8 +1314,8 @@ function createSingleUnrealizedSectionWithMarketCap(ctx, cohort, title) { name: "Net pnl", title: `Net Unrealized Profit And Loss ${title}`, bottom: [ - createNetUnrealizedPnlBaseMetric(ctx, tree), - ...createNetUnrealizedPnlRelToMarketCapMetrics(ctx, tree.relative), + createNetUnrealizedPnlBaseMetric(tree), + ...createNetUnrealizedPnlRelToMarketCapMetrics(tree.relative), createPriceLine({ unit: Unit.usd }), createPriceLine({ unit: Unit.pctMcap }), ], @@ -1364,7 +1352,7 @@ function createSingleUnrealizedSectionWithOwnCaps(ctx, cohort, title) { name: "Net pnl", title: `Net Unrealized Profit And Loss ${title}`, bottom: [ - createNetUnrealizedPnlBaseMetric(ctx, tree), + createNetUnrealizedPnlBaseMetric(tree), ...createNetUnrealizedPnlRelToOwnMarketCapMetrics(ctx, tree.relative), ...createNetUnrealizedPnlRelToOwnPnlMetrics(ctx, tree.relative), createPriceLine({ unit: Unit.usd }), @@ -1400,7 +1388,7 @@ function createSingleUnrealizedSectionBase(ctx, cohort, title) { name: "Net pnl", title: `Net Unrealized Profit And Loss ${title}`, bottom: [ - createNetUnrealizedPnlBaseMetric(ctx, tree), + createNetUnrealizedPnlBaseMetric(tree), createPriceLine({ unit: Unit.usd }), ], }, @@ -1410,13 +1398,11 @@ function createSingleUnrealizedSectionBase(ctx, cohort, title) { /** * Grouped unrealized base charts (profit, loss, total pnl) - * @param {PartialContext} ctx * @param {readonly { color: Color, name: string, tree: { unrealized: PatternAll["unrealized"] } }[]} list * @param {string} title */ -function createGroupedUnrealizedBaseCharts(ctx, list, title) { - const { line } = ctx; - return [ +function createGroupedUnrealizedBaseCharts(list, title) { + return [ { name: "profit", title: `Unrealized Profit ${title}`, @@ -1464,11 +1450,11 @@ function createGroupedUnrealizedBaseCharts(ctx, list, title) { * @returns {PartialOptionsGroup} */ function createGroupedUnrealizedSectionFull(ctx, list, title) { - const { baseline, createPriceLine } = ctx; + const { createPriceLine } = ctx; return { name: "Unrealized", tree: [ - ...createGroupedUnrealizedBaseCharts(ctx, list, title), + ...createGroupedUnrealizedBaseCharts(list, title), { name: "Net pnl", title: `Net Unrealized Profit And Loss ${title}`, @@ -1517,11 +1503,11 @@ function createGroupedUnrealizedSectionFull(ctx, list, title) { * @returns {PartialOptionsGroup} */ function createGroupedUnrealizedSectionWithMarketCap(ctx, list, title) { - const { baseline, createPriceLine } = ctx; + const { createPriceLine } = ctx; return { name: "Unrealized", tree: [ - ...createGroupedUnrealizedBaseCharts(ctx, list, title), + ...createGroupedUnrealizedBaseCharts(list, title), { name: "Net pnl", title: `Net Unrealized Profit And Loss ${title}`, @@ -1556,11 +1542,11 @@ function createGroupedUnrealizedSectionWithMarketCap(ctx, list, title) { * @returns {PartialOptionsGroup} */ function createGroupedUnrealizedSectionWithOwnCaps(ctx, list, title) { - const { baseline, createPriceLine } = ctx; + const { createPriceLine } = ctx; return { name: "Unrealized", tree: [ - ...createGroupedUnrealizedBaseCharts(ctx, list, title), + ...createGroupedUnrealizedBaseCharts(list, title), { name: "Net pnl", title: `Net Unrealized Profit And Loss ${title}`, @@ -1602,11 +1588,11 @@ function createGroupedUnrealizedSectionWithOwnCaps(ctx, list, title) { * @returns {PartialOptionsGroup} */ function createGroupedUnrealizedSectionBase(ctx, list, title) { - const { baseline, createPriceLine } = ctx; + const { createPriceLine } = ctx; return { name: "Unrealized", tree: [ - ...createGroupedUnrealizedBaseCharts(ctx, list, title), + ...createGroupedUnrealizedBaseCharts(list, title), { name: "Net pnl", title: `Net Unrealized Profit And Loss ${title}`, @@ -1628,14 +1614,12 @@ function createGroupedUnrealizedSectionBase(ctx, list, title) { /** * Create cost basis section for single cohort WITH percentiles - * @param {PartialContext} ctx * @param {CohortAll | CohortFull | CohortWithPercentiles} cohort * @param {string} title * @returns {PartialOptionsGroup} */ -function createSingleCostBasisSectionWithPercentiles(ctx, cohort, title) { - const { line } = ctx; - const { color, tree } = cohort; +function createSingleCostBasisSectionWithPercentiles(cohort, title) { + const { color, tree } = cohort; return { name: "Cost Basis", @@ -1668,7 +1652,7 @@ function createSingleCostBasisSectionWithPercentiles(ctx, cohort, title) { { name: "percentiles", title: `Cost Basis Percentiles ${title}`, - top: createCostBasisPercentilesSeries(ctx, [cohort], false), + top: createCostBasisPercentilesSeries( [cohort], false), }, ], }; @@ -1676,14 +1660,11 @@ function createSingleCostBasisSectionWithPercentiles(ctx, cohort, title) { /** * Create cost basis section for grouped cohorts WITH percentiles - * @param {PartialContext} ctx * @param {readonly (CohortFull | CohortWithPercentiles)[]} list * @param {string} title * @returns {PartialOptionsGroup} */ -function createGroupedCostBasisSectionWithPercentiles(ctx, list, title) { - const { line } = ctx; - +function createGroupedCostBasisSectionWithPercentiles(list, title) { return { name: "Cost Basis", tree: [ @@ -1719,14 +1700,12 @@ function createGroupedCostBasisSectionWithPercentiles(ctx, list, title) { /** * Create cost basis section for single cohort (no percentiles) - * @param {PartialContext} ctx * @param {CohortWithAdjusted | CohortBasic} cohort * @param {string} title * @returns {PartialOptionsGroup} */ -function createSingleCostBasisSection(ctx, cohort, title) { - const { line } = ctx; - const { color, tree } = cohort; +function createSingleCostBasisSection(cohort, title) { + const { color, tree } = cohort; return { name: "Cost Basis", @@ -1762,14 +1741,11 @@ function createSingleCostBasisSection(ctx, cohort, title) { /** * Create cost basis section for grouped cohorts (no percentiles) - * @param {PartialContext} ctx * @param {readonly (CohortWithAdjusted | CohortBasic)[]} list * @param {string} title * @returns {PartialOptionsGroup} */ -function createGroupedCostBasisSection(ctx, list, title) { - const { line } = ctx; - +function createGroupedCostBasisSection(list, title) { return { name: "Cost Basis", tree: [ @@ -1811,7 +1787,7 @@ function createGroupedCostBasisSection(ctx, list, title) { * @returns {PartialOptionsTree} */ function createSingleActivitySectionWithAdjusted(ctx, cohort, title) { - const { colors, line } = ctx; + const { colors } = ctx; const { tree, color } = cohort; return [ @@ -1925,7 +1901,7 @@ function createSingleActivitySectionWithAdjusted(ctx, cohort, title) { * @returns {PartialOptionsTree} */ function createSingleActivitySectionBasic(ctx, cohort, title) { - const { colors, line } = ctx; + const { colors } = ctx; const { tree, color } = cohort; return [ @@ -2021,14 +1997,11 @@ function createSingleActivitySectionBasic(ctx, cohort, title) { /** * Create activity section for grouped cohorts with adjusted values (for cohorts with RealizedPattern3/4) - * @param {PartialContext} ctx * @param {readonly (CohortFull | CohortWithAdjusted)[]} list * @param {string} title * @returns {PartialOptionsTree} */ -function createGroupedActivitySectionWithAdjusted(ctx, list, title) { - const { line } = ctx; - +function createGroupedActivitySectionWithAdjusted(list, title) { return [ { name: "Sell Side Risk", @@ -2151,14 +2124,11 @@ function createGroupedActivitySectionWithAdjusted(ctx, list, title) { /** * Create activity section for grouped cohorts without adjusted values (for cohorts with RealizedPattern/2) - * @param {PartialContext} ctx * @param {readonly (CohortWithPercentiles | CohortBasic)[]} list * @param {string} title * @returns {PartialOptionsTree} */ -function createGroupedActivitySectionBasic(ctx, list, title) { - const { line } = ctx; - +function createGroupedActivitySectionBasic(list, title) { return [ { name: "Sell Side Risk", diff --git a/website/scripts/options/cointime.js b/website/scripts/options/cointime.js index 0b84dca70..675b39f52 100644 --- a/website/scripts/options/cointime.js +++ b/website/scripts/options/cointime.js @@ -1,6 +1,7 @@ /** Cointime section builder - typed tree-based patterns */ import { Unit } from "../utils/units.js"; +import { line, baseline } from "./series.js"; import { satsBtcUsd, priceLines, @@ -25,7 +26,7 @@ function createCointimePriceWithRatioOptions( ctx, { title, legend, price, ratio, color }, ) { - const { line, colors, createPriceLine } = ctx; + const { colors, createPriceLine } = ctx; const pctUsdMap = percentileUsdMap(colors, ratio); const pctMap = percentileMap(colors, ratio); @@ -54,42 +55,53 @@ function createCointimePriceWithRatioOptions( ), ], bottom: [ - line({ metric: ratio.ratio, name: "Ratio", color, unit: Unit.ratio }), + baseline({ + metric: ratio.ratio, + name: "Ratio", + color, + unit: Unit.ratio, + }), line({ metric: ratio.ratio1wSma, name: "1w SMA", color: colors.lime, unit: Unit.ratio, + defaultActive: false, }), line({ metric: ratio.ratio1mSma, name: "1m SMA", color: colors.teal, unit: Unit.ratio, + defaultActive: false, }), line({ metric: ratio.ratio1ySd.sma, name: "1y SMA", color: colors.sky, unit: Unit.ratio, + defaultActive: false, }), line({ metric: ratio.ratio2ySd.sma, name: "2y SMA", color: colors.indigo, unit: Unit.ratio, + defaultActive: false, }), line({ metric: ratio.ratio4ySd.sma, name: "4y SMA", color: colors.purple, unit: Unit.ratio, + defaultActive: false, }), line({ metric: ratio.ratioSd.sma, name: "All SMA", color: colors.rose, unit: Unit.ratio, + defaultActive: false, }), ...pctMap.map(({ name: pctName, prop, color: pctColor }) => line({ @@ -174,13 +186,14 @@ function createCointimePriceWithRatioOptions( ...sdPats.map(({ nameAddon, titleAddon, sd }) => ({ name: nameAddon, title: `${title} ${titleAddon} Z-Score`, - top: sdBands(colors, sd).map(({ name: bandName, prop, color: bandColor }) => - line({ - metric: prop, - name: bandName, - color: bandColor, - unit: Unit.usd, - }), + top: sdBands(colors, sd).map( + ({ name: bandName, prop, color: bandColor }) => + line({ + metric: prop, + name: bandName, + color: bandColor, + unit: Unit.usd, + }), ), bottom: [ line({ metric: sd.zscore, name: "Z-Score", color, unit: Unit.sd }), @@ -198,7 +211,7 @@ function createCointimePriceWithRatioOptions( * @returns {PartialOptionsGroup} */ export function createCointimeSection(ctx) { - const { colors, brk, line } = ctx; + const { colors, brk } = ctx; const { cointime, distribution, supply } = brk.metrics; const { pricing, cap, activity, supply: cointimeSupply, adjusted } = cointime; const { all } = distribution.utxoCohorts; @@ -348,9 +361,17 @@ export function createCointimeSection(ctx) { name: "Supply", title: "Cointime Supply", bottom: [ - ...satsBtcUsd(ctx, all.supply.total, "All", colors.orange), - ...satsBtcUsd(ctx, cointimeSupply.vaultedSupply, "Vaulted", colors.lime), - ...satsBtcUsd(ctx, cointimeSupply.activeSupply, "Active", colors.rose), + ...satsBtcUsd( all.supply.total, "All", colors.orange), + ...satsBtcUsd( + cointimeSupply.vaultedSupply, + "Vaulted", + colors.lime, + ), + ...satsBtcUsd( + cointimeSupply.activeSupply, + "Active", + colors.rose, + ), ], }, diff --git a/website/scripts/options/context.js b/website/scripts/options/context.js index 011bfbf5a..977310313 100644 --- a/website/scripts/options/context.js +++ b/website/scripts/options/context.js @@ -1,9 +1,4 @@ import { - line, - dots, - candlestick, - baseline, - histogram, fromBlockCount, fromBitcoin, fromBlockSize, @@ -22,27 +17,21 @@ import { createPriceLines, constantLine, } from "./constants.js"; +import { colors } from "../chart/colors.js"; /** * Create a context object with all dependencies for building partial options * @param {Object} args - * @param {Colors} args.colors * @param {BrkClient} args.brk * @returns {PartialContext} */ -export function createContext({ colors, brk }) { +export function createContext({ brk }) { const constants = brk.metrics.constants; return { colors, brk, - // Series helpers - line, - dots, - candlestick, - baseline, - histogram, fromBlockCount: (pattern, title, color) => fromBlockCount(colors, pattern, title, color), fromBitcoin: (pattern, title, color) => diff --git a/website/scripts/options/full.js b/website/scripts/options/full.js index 215917ae0..dced42d97 100644 --- a/website/scripts/options/full.js +++ b/website/scripts/options/full.js @@ -10,12 +10,11 @@ import { collect, markUsed, logUnused } from "./unused.js"; /** * @param {Object} args - * @param {Colors} args.colors * @param {Signals} args.signals * @param {BrkClient} args.brk * @param {Signal} args.qrcode */ -export function initOptions({ colors, signals, brk, qrcode }) { +export function initOptions({ signals, brk, qrcode }) { collect(brk.metrics); const LS_SELECTED_KEY = `selected_path`; @@ -33,7 +32,6 @@ export function initOptions({ colors, signals, brk, qrcode }) { const selected = signals.createSignal(/** @type {any} */ (undefined)); const partialOptions = createPartialOptions({ - colors, brk, }); diff --git a/website/scripts/options/market/averages.js b/website/scripts/options/market/averages.js index f6029114f..d1b9da692 100644 --- a/website/scripts/options/market/averages.js +++ b/website/scripts/options/market/averages.js @@ -1,6 +1,7 @@ /** Moving averages section */ import { Unit } from "../../utils/units.js"; +import { line, baseline } from "../series.js"; import { priceLines, percentileUsdMap, @@ -55,7 +56,7 @@ export function createPriceWithRatioOptions( ctx, { title, legend, ratio, color }, ) { - const { line, colors, createPriceLine } = ctx; + const { colors, createPriceLine } = ctx; const priceMetric = ratio.price; const pctUsdMap = percentileUsdMap(colors, ratio); @@ -85,42 +86,54 @@ export function createPriceWithRatioOptions( ), ], bottom: [ - line({ metric: ratio.ratio, name: "Ratio", color, unit: Unit.ratio }), + baseline({ + metric: ratio.ratio, + name: "Ratio", + base: 1, + color, + unit: Unit.ratio, + }), line({ metric: ratio.ratio1wSma, name: "1w SMA", color: colors.lime, unit: Unit.ratio, + defaultActive: false, }), line({ metric: ratio.ratio1mSma, name: "1m SMA", color: colors.teal, unit: Unit.ratio, + defaultActive: false, }), line({ metric: ratio.ratio1ySd.sma, name: "1y SMA", color: colors.sky, unit: Unit.ratio, + defaultActive: false, }), line({ metric: ratio.ratio2ySd.sma, name: "2y SMA", color: colors.indigo, unit: Unit.ratio, + defaultActive: false, }), line({ metric: ratio.ratio4ySd.sma, name: "4y SMA", color: colors.purple, unit: Unit.ratio, + defaultActive: false, }), line({ metric: ratio.ratioSd.sma, name: "All SMA", color: colors.rose, unit: Unit.ratio, + defaultActive: false, }), ...pctMap.map(({ name: pctName, prop, color: pctColor }) => line({ @@ -140,13 +153,14 @@ export function createPriceWithRatioOptions( tree: sdPats.map(({ nameAddon, titleAddon, sd }) => ({ name: nameAddon, title: `${title} ${titleAddon} Z-Score`, - top: sdBands(colors, sd).map(({ name: bandName, prop, color: bandColor }) => - line({ - metric: prop, - name: bandName, - color: bandColor, - unit: Unit.usd, - }), + top: sdBands(colors, sd).map( + ({ name: bandName, prop, color: bandColor }) => + line({ + metric: prop, + name: bandName, + color: bandColor, + unit: Unit.usd, + }), ), bottom: [ line({ metric: sd.zscore, name: "Z-Score", color, unit: Unit.sd }), @@ -163,8 +177,7 @@ export function createPriceWithRatioOptions( * @param {ReturnType} averages */ export function createAveragesSection(ctx, averages) { - const { line } = ctx; - + return { name: "Averages", tree: [ diff --git a/website/scripts/options/market/index.js b/website/scripts/options/market/index.js index fc3440552..921fccc6a 100644 --- a/website/scripts/options/market/index.js +++ b/website/scripts/options/market/index.js @@ -2,6 +2,7 @@ import { localhost } from "../../utils/env.js"; import { Unit } from "../../utils/units.js"; +import { line } from "../series.js"; import { buildAverages, createAveragesSection } from "./averages.js"; import { createPerformanceSection } from "./performance.js"; import { createIndicatorsSection } from "./indicators/index.js"; @@ -13,7 +14,7 @@ import { createInvestingSection } from "./investing.js"; * @returns {PartialOptionsGroup} */ export function createMarketSection(ctx) { - const { colors, brk, line } = ctx; + const { colors, brk } = ctx; const { market, supply, price } = brk.metrics; const { movingAverage, diff --git a/website/scripts/options/market/indicators/bands.js b/website/scripts/options/market/indicators/bands.js index 3a52bec57..21eeac599 100644 --- a/website/scripts/options/market/indicators/bands.js +++ b/website/scripts/options/market/indicators/bands.js @@ -1,6 +1,7 @@ /** Bands indicators (MinMax, Mayer Multiple) */ import { Unit } from "../../../utils/units.js"; +import { line } from "../../series.js"; /** * Create Bands section @@ -10,7 +11,7 @@ import { Unit } from "../../../utils/units.js"; * @param {Market["movingAverage"]} args.movingAverage */ export function createBandsSection(ctx, { range, movingAverage }) { - const { line, colors } = ctx; + const { colors } = ctx; return { name: "Bands", diff --git a/website/scripts/options/market/indicators/momentum.js b/website/scripts/options/market/indicators/momentum.js index 87ae6c215..f032fc0df 100644 --- a/website/scripts/options/market/indicators/momentum.js +++ b/website/scripts/options/market/indicators/momentum.js @@ -1,6 +1,7 @@ /** Momentum indicators (RSI, StochRSI, Stochastic, MACD) */ import { Unit } from "../../../utils/units.js"; +import { line, histogram } from "../../series.js"; /** * Create Momentum section @@ -8,7 +9,7 @@ import { Unit } from "../../../utils/units.js"; * @param {Market["indicators"]} indicators */ export function createMomentumSection(ctx, indicators) { - const { line, histogram, colors, createPriceLine } = ctx; + const { colors, createPriceLine } = ctx; return { name: "Momentum", diff --git a/website/scripts/options/market/indicators/onchain.js b/website/scripts/options/market/indicators/onchain.js index b72d00319..52c4c1a5a 100644 --- a/website/scripts/options/market/indicators/onchain.js +++ b/website/scripts/options/market/indicators/onchain.js @@ -1,6 +1,7 @@ /** On-chain indicators (Pi Cycle, Puell, NVT, Gini) */ import { Unit } from "../../../utils/units.js"; +import { line } from "../../series.js"; /** * Create On-chain section @@ -10,7 +11,7 @@ import { Unit } from "../../../utils/units.js"; * @param {Market["movingAverage"]} args.movingAverage */ export function createOnchainSection(ctx, { indicators, movingAverage }) { - const { line, colors, createPriceLine } = ctx; + const { colors, createPriceLine } = ctx; return { name: "On-chain", diff --git a/website/scripts/options/market/indicators/volatility.js b/website/scripts/options/market/indicators/volatility.js index a0d44c522..78ad8a5b7 100644 --- a/website/scripts/options/market/indicators/volatility.js +++ b/website/scripts/options/market/indicators/volatility.js @@ -1,6 +1,7 @@ /** Volatility indicators (Index, True Range, Choppiness, Sharpe, Sortino) */ import { Unit } from "../../../utils/units.js"; +import { line } from "../../series.js"; /** * Create Volatility section @@ -10,7 +11,7 @@ import { Unit } from "../../../utils/units.js"; * @param {Market["range"]} args.range */ export function createVolatilitySection(ctx, { volatility, range }) { - const { line, colors, createPriceLine } = ctx; + const { colors, createPriceLine } = ctx; return { name: "Volatility", diff --git a/website/scripts/options/market/investing.js b/website/scripts/options/market/investing.js index e790ef894..61d671e53 100644 --- a/website/scripts/options/market/investing.js +++ b/website/scripts/options/market/investing.js @@ -1,6 +1,7 @@ /** Investing section (DCA) */ import { Unit } from "../../utils/units.js"; +import { line, baseline } from "../series.js"; import { satsBtcUsd } from "../shared.js"; import { periodIdToName } from "./utils.js"; @@ -41,7 +42,7 @@ export function buildDcaClasses(colors, dca) { * @param {Market["returns"]} args.returns */ export function createInvestingSection(ctx, { dca, lookback, returns }) { - const { line, baseline, colors, createPriceLine } = ctx; + const { colors, createPriceLine } = ctx; const dcaClasses = buildDcaClasses(colors, dca); return { @@ -114,8 +115,8 @@ export function createInvestingSection(ctx, { dca, lookback, returns }) { name: "Stack", title: `${name} DCA vs Lump Sum Stack ($100/day)`, bottom: [ - ...satsBtcUsd(ctx, dcaStack, "DCA", colors.green), - ...satsBtcUsd(ctx, lumpSumStack, "Lump sum", colors.orange), + ...satsBtcUsd( dcaStack, "DCA", colors.green), + ...satsBtcUsd( lumpSumStack, "Lump sum", colors.orange), ], }, ], @@ -164,7 +165,7 @@ export function createInvestingSection(ctx, { dca, lookback, returns }) { title: "DCA Stack by Year ($100/day)", bottom: dcaClasses.flatMap( ({ year, color, defaultActive, stack }) => - satsBtcUsd(ctx, stack, `${year}`, color, { defaultActive }), + satsBtcUsd( stack, `${year}`, color, { defaultActive }), ), }, ], @@ -200,7 +201,7 @@ export function createInvestingSection(ctx, { dca, lookback, returns }) { { name: "Stack", title: `DCA Class ${year} Stack ($100/day)`, - bottom: satsBtcUsd(ctx, stack, "Stack", color), + bottom: satsBtcUsd( stack, "Stack", color), }, ], })), diff --git a/website/scripts/options/market/performance.js b/website/scripts/options/market/performance.js index d6e84ff59..20c045920 100644 --- a/website/scripts/options/market/performance.js +++ b/website/scripts/options/market/performance.js @@ -1,6 +1,7 @@ /** Performance section */ import { Unit } from "../../utils/units.js"; +import { baseline } from "../series.js"; import { periodIdToName } from "./utils.js"; /** @@ -9,7 +10,7 @@ import { periodIdToName } from "./utils.js"; * @param {Market["returns"]} returns */ export function createPerformanceSection(ctx, returns) { - const { colors, baseline, createPriceLine } = ctx; + const { colors, createPriceLine } = ctx; return { name: "Performance", diff --git a/website/scripts/options/partial.js b/website/scripts/options/partial.js index 5378bc5eb..626d40853 100644 --- a/website/scripts/options/partial.js +++ b/website/scripts/options/partial.js @@ -13,6 +13,7 @@ import { import { createMarketSection } from "./market/index.js"; import { createChainSection } from "./chain.js"; import { createCointimeSection } from "./cointime.js"; +import { colors } from "../chart/colors.js"; // Re-export types for external consumers export * from "./types.js"; @@ -20,13 +21,12 @@ export * from "./types.js"; /** * Create partial options tree * @param {Object} args - * @param {Colors} args.colors * @param {BrkClient} args.brk * @returns {PartialOptionsTree} */ -export function createPartialOptions({ colors, brk }) { +export function createPartialOptions({ brk }) { // Create context with all helpers - const ctx = createContext({ colors, brk }); + const ctx = createContext({ brk }); // Build cohort data const { diff --git a/website/scripts/options/series.js b/website/scripts/options/series.js index f7212adf8..54cc8a4c0 100644 --- a/website/scripts/options/series.js +++ b/website/scripts/options/series.js @@ -85,6 +85,8 @@ export function candlestick({ * @param {Unit} args.unit * @param {Color | [Color, Color]} [args.color] * @param {boolean} [args.defaultActive] + * @param {boolean} [args.defaultActive] + * @param {number | undefined} [args.base] * @param {BaselineSeriesPartialOptions} [args.options] * @returns {FetchedBaselineSeriesBlueprint} */ @@ -94,6 +96,7 @@ export function baseline({ color, defaultActive, unit, + base, options, }) { const isTuple = Array.isArray(color); @@ -105,7 +108,12 @@ export function baseline({ colors: isTuple ? color : undefined, unit, defaultActive, - options, + options: { + baseValue: { + price: base, + }, + ...options, + }, }; } diff --git a/website/scripts/options/shared.js b/website/scripts/options/shared.js index 9bd5da545..36a195937 100644 --- a/website/scripts/options/shared.js +++ b/website/scripts/options/shared.js @@ -1,22 +1,22 @@ /** Shared helpers for options */ import { Unit } from "../utils/units.js"; +import { line } from "./series.js"; /** * Create sats/btc/usd line series from a pattern with .sats/.bitcoin/.dollars - * @param {PartialContext} ctx * @param {{ sats: AnyMetricPattern, bitcoin: AnyMetricPattern, dollars: AnyMetricPattern }} pattern * @param {string} name * @param {Color} [color] * @param {{ defaultActive?: boolean }} [options] * @returns {FetchedLineSeriesBlueprint[]} */ -export function satsBtcUsd(ctx, pattern, name, color, options) { +export function satsBtcUsd(pattern, name, color, options) { const { defaultActive } = options || {}; return [ - ctx.line({ metric: pattern.sats, name, color, unit: Unit.sats, defaultActive }), - ctx.line({ metric: pattern.bitcoin, name, color, unit: Unit.btc, defaultActive }), - ctx.line({ metric: pattern.dollars, name, color, unit: Unit.usd, defaultActive }), + line({ metric: pattern.sats, name, color, unit: Unit.sats, defaultActive }), + line({ metric: pattern.bitcoin, name, color, unit: Unit.btc, defaultActive }), + line({ metric: pattern.dollars, name, color, unit: Unit.usd, defaultActive }), ]; } diff --git a/website/scripts/options/types.js b/website/scripts/options/types.js index d7eb0a058..641e1b26f 100644 --- a/website/scripts/options/types.js +++ b/website/scripts/options/types.js @@ -241,11 +241,6 @@ * @typedef {Object} PartialContext * @property {Colors} colors * @property {BrkClient} brk - * @property {LineSeriesFn} line - * @property {DotsSeriesFn} dots - * @property {CandlestickSeriesFn} candlestick - * @property {BaselineSeriesFn} baseline - * @property {HistogramSeriesFn} histogram * @property {(pattern: BlockCountPattern, title: string, color?: Color) => AnyFetchedSeriesBlueprint[]} fromBlockCount * @property {(pattern: FullnessPattern, title: string, color?: Color) => AnyFetchedSeriesBlueprint[]} fromBitcoin * @property {(pattern: AnyStatsPattern, title: string, color?: Color) => AnyFetchedSeriesBlueprint[]} fromBlockSize diff --git a/website/scripts/panes/_simulation.js b/website/scripts/panes/_simulation.js index 60c85e447..ebca9e567 100644 --- a/website/scripts/panes/_simulation.js +++ b/website/scripts/panes/_simulation.js @@ -19,14 +19,11 @@ import { } from "../utils/format.js"; import { serdeDate, serdeOptDate, serdeOptNumber } from "../utils/serde.js"; import signals from "../signals.js"; -import { createChartElement } from "../chart/index.js"; +import { createChart } from "../chart/index.js"; import { resources } from "../resources.js"; +import { colors } from "../chart/colors.js"; -/** - * @param {Object} args - * @param {Colors} args.colors - */ -export function init({ colors }) { +export function init() { /** * @typedef {Object} Frequency * @property {string} name @@ -684,7 +681,7 @@ export function init({ colors }) { /** @type {() => IndexName} */ const index = () => "dateindex"; - createChartElement({ + createChart({ index, parent: resultsElement, signals, @@ -727,7 +724,7 @@ export function init({ colors }) { ], }); - createChartElement({ + createChart({ index, parent: resultsElement, signals, @@ -750,7 +747,7 @@ export function init({ colors }) { ], }); - createChartElement({ + createChart({ index, parent: resultsElement, signals, @@ -779,7 +776,7 @@ export function init({ colors }) { ], }); - createChartElement({ + createChart({ index, parent: resultsElement, signals, @@ -801,7 +798,7 @@ export function init({ colors }) { ], }); - createChartElement({ + createChart({ index, parent: resultsElement, signals, diff --git a/website/scripts/panes/chart.js b/website/scripts/panes/chart.js new file mode 100644 index 000000000..47a900392 --- /dev/null +++ b/website/scripts/panes/chart.js @@ -0,0 +1,454 @@ +import { + createShadow, + createReactiveChoiceField, + createHeader, +} from "../utils/dom.js"; +import { chartElement } from "../utils/elements.js"; +import { serdeChartableIndex } from "../utils/serde.js"; +import { Unit } from "../utils/units.js"; +import signals from "../signals.js"; +import { createChart } from "../chart/index.js"; +import { webSockets } from "../utils/ws.js"; + +const keyPrefix = "chart"; +const ONE_BTC_IN_SATS = 100_000_000; + +/** + * @param {Object} args + * @param {Accessor} args.option + * @param {BrkClient} args.brk + */ +export function init({ option, brk }) { + chartElement.append(createShadow("left")); + chartElement.append(createShadow("right")); + + const { headerElement, headingElement } = createHeader(); + chartElement.append(headerElement); + + const chart = createChart({ + parent: chartElement, + signals, + id: "charts", + brk, + captureElement: chartElement, + }); + + // Create index selector using chart's index state + const fieldset = createIndexSelector(option, chart); + chartElement.append(fieldset); + + const unitChoices = /** @type {const} */ ([Unit.usd, Unit.sats]); + /** @type {Signal} */ + const topUnit = signals.createPersistedSignal({ + defaultValue: /** @type {Unit} */ (Unit.usd), + storageKey: `${keyPrefix}-price`, + urlKey: "price", + serialize: (u) => u.id, + deserialize: (s) => + /** @type {Unit} */ (unitChoices.find((u) => u.id === s) ?? Unit.usd), + }); + const topUnitField = createReactiveChoiceField({ + defaultValue: Unit.usd, + choices: unitChoices, + toKey: (u) => u.id, + toLabel: (u) => u.name, + selected: topUnit, + signals, + sorted: true, + type: "select", + }); + + chart.addFieldsetIfNeeded({ + id: "charts-unit-0", + paneIndex: 0, + position: "nw", + createChild() { + return topUnitField; + }, + }); + + const seriesListTop = /** @type {AnySeries[]} */ ([]); + const seriesListBottom = /** @type {AnySeries[]} */ ([]); + + /** + * @param {Object} params + * @param {AnySeries} params.series + * @param {Unit} params.unit + * @param {IndexName} params.index + */ + function printLatest({ series, unit, index }) { + const _latest = webSockets.kraken1dCandle.latest(); + + if (!_latest) return; + + const latest = { ..._latest }; + + if (unit === Unit.sats) { + latest.open = Math.floor(ONE_BTC_IN_SATS / latest.open); + latest.high = Math.floor(ONE_BTC_IN_SATS / latest.high); + latest.low = Math.floor(ONE_BTC_IN_SATS / latest.low); + latest.close = Math.floor(ONE_BTC_IN_SATS / latest.close); + } + + const last_ = series.getData().at(-1); + if (!last_) return; + const last = { ...last_ }; + + if ("close" in last) { + last.close = latest.close; + } + if ("value" in last) { + last.value = latest.close; + } + const date = new Date(/** @type {number} */ (latest.time) * 1000); + + switch (index) { + case "height": + case "difficultyepoch": + case "halvingepoch": { + if ("close" in last) { + last.low = Math.min(last.low, latest.close); + last.high = Math.max(last.high, latest.close); + } + series.update(last); + break; + } + default: { + if (index === "weekindex") { + date.setUTCDate(date.getUTCDate() - ((date.getUTCDay() + 6) % 7)); + } else if (index === "monthindex") { + date.setUTCDate(1); + } else if (index === "quarterindex") { + const month = date.getUTCMonth(); + date.setUTCMonth(month - (month % 3), 1); + } else if (index === "semesterindex") { + const month = date.getUTCMonth(); + date.setUTCMonth(month - (month % 6), 1); + } else if (index === "yearindex") { + date.setUTCMonth(0, 1); + } else if (index === "decadeindex") { + date.setUTCFullYear( + Math.floor(date.getUTCFullYear() / 10) * 10, + 0, + 1, + ); + } else if (index !== "dateindex") { + throw Error("Unsupported"); + } + + const time = date.valueOf() / 1000; + + if (time === last.time) { + if ("close" in last) { + last.low = Math.min(last.low, latest.low); + last.high = Math.max(last.high, latest.high); + } + series.update(last); + } else { + last.time = time; + series.update(last); + } + } + } + } + + signals.createScopedEffect(option, (option) => { + headingElement.innerHTML = option.title; + + const bottomUnits = Array.from(option.bottom.keys()); + + /** @type {Signal | undefined} */ + let bottomUnit; + + if (bottomUnits.length) { + // Storage key based on unit group (sorted unit IDs) so each group remembers its selection + const unitGroupKey = bottomUnits + .map((u) => u.id) + .sort() + .join("-"); + bottomUnit = signals.createPersistedSignal({ + defaultValue: bottomUnits[0], + storageKey: `${keyPrefix}-unit-${unitGroupKey}`, + urlKey: "unit", + serialize: (u) => u.id, + deserialize: (s) => + bottomUnits.find((u) => u.id === s) ?? bottomUnits[0], + }); + const field = createReactiveChoiceField({ + defaultValue: bottomUnits[0], + choices: bottomUnits, + toKey: (u) => u.id, + toLabel: (u) => u.name, + selected: bottomUnit, + signals, + sorted: true, + type: "select", + }); + chart.addFieldsetIfNeeded({ + id: "charts-unit-1", + paneIndex: 1, + position: "nw", + createChild() { + return field; + }, + }); + } else { + // Clean up bottom pane when new option has no bottom series + seriesListBottom.forEach((series) => series.remove()); + seriesListBottom.length = 0; + chart.legendBottom.removeFrom(0); + } + + /** + * @param {Object} args + * @param {Map} args.blueprints + * @param {number} args.paneIndex + * @param {Unit} args.unit + * @param {IndexName} args.idx + * @param {AnySeries[]} args.seriesList + * @param {number} args.orderStart + * @param {Legend} args.legend + */ + function createSeriesFromBlueprints({ + blueprints, + paneIndex, + unit, + idx, + seriesList, + orderStart, + legend, + }) { + legend.removeFrom(orderStart); + seriesList.splice(orderStart).forEach((series) => series.remove()); + + blueprints.get(unit)?.forEach((blueprint, order) => { + order += orderStart; + const options = blueprint.options; + const indexes = Object.keys(blueprint.metric.by); + + if (indexes.includes(idx)) { + switch (blueprint.type) { + case "Baseline": { + seriesList.push( + chart.addBaselineSeries({ + metric: blueprint.metric, + name: blueprint.title, + unit, + defaultActive: blueprint.defaultActive, + paneIndex, + options: { + ...options, + topLineColor: + blueprint.color?.() ?? blueprint.colors?.[0](), + bottomLineColor: + blueprint.color?.() ?? blueprint.colors?.[1](), + }, + order, + }), + ); + break; + } + case "Histogram": { + seriesList.push( + chart.addHistogramSeries({ + metric: blueprint.metric, + name: blueprint.title, + unit, + color: blueprint.color, + defaultActive: blueprint.defaultActive, + paneIndex, + options, + order, + }), + ); + break; + } + case "Candlestick": { + seriesList.push( + chart.addCandlestickSeries({ + metric: blueprint.metric, + name: blueprint.title, + unit, + colors: blueprint.colors, + defaultActive: blueprint.defaultActive, + paneIndex, + options, + order, + }), + ); + break; + } + case "Dots": { + seriesList.push( + chart.addDotsSeries({ + metric: blueprint.metric, + color: blueprint.color, + name: blueprint.title, + unit, + defaultActive: blueprint.defaultActive, + paneIndex, + options, + order, + }), + ); + break; + } + case "Line": + case undefined: + seriesList.push( + chart.addLineSeries({ + metric: blueprint.metric, + color: blueprint.color, + name: blueprint.title, + unit, + defaultActive: blueprint.defaultActive, + paneIndex, + options, + order, + }), + ); + } + } + }); + } + + // Price series + top pane blueprints: combined effect on index + topUnit + signals.createScopedEffect( + () => ({ idx: chart.index(), unit: topUnit() }), + ({ idx, unit }) => { + // Create price series + /** @type {AnySeries | undefined} */ + let series; + switch (unit) { + case Unit.usd: { + series = chart.addCandlestickSeries({ + metric: brk.metrics.price.usd.ohlc, + name: "Price", + unit, + order: 0, + }); + break; + } + case Unit.sats: { + series = chart.addCandlestickSeries({ + metric: brk.metrics.price.sats.ohlc, + name: "Price", + unit, + inverse: true, + order: 0, + }); + break; + } + } + if (!series) throw Error("Unreachable"); + + seriesListTop[0]?.remove(); + seriesListTop[0] = series; + + // Live price update effect + signals.createEffect( + () => ({ + latest: webSockets.kraken1dCandle.latest(), + hasData: series.hasData(), + }), + ({ latest, hasData }) => { + if (!series || !latest || !hasData) return; + printLatest({ series, unit, index: idx }); + }, + ); + + // Top pane blueprint series + createSeriesFromBlueprints({ + blueprints: option.top, + paneIndex: 0, + unit, + idx, + seriesList: seriesListTop, + orderStart: 1, + legend: chart.legendTop, + }); + }, + ); + + // Bottom pane blueprints: combined effect on index + bottomUnit + if (bottomUnit) { + signals.createScopedEffect( + () => ({ idx: chart.index(), unit: bottomUnit() }), + ({ idx, unit }) => { + createSeriesFromBlueprints({ + blueprints: option.bottom, + paneIndex: 1, + unit, + idx, + seriesList: seriesListBottom, + orderStart: 0, + legend: chart.legendBottom, + }); + }, + ); + } + }); +} + +/** + * @param {Accessor} option + * @param {Chart} chart + */ +function createIndexSelector(option, chart) { + const choices_ = /** @satisfies {ChartableIndexName[]} */ ([ + "timestamp", + "date", + "week", + "month", + "quarter", + "semester", + "year", + "decade", + ]); + + /** @type {Accessor} */ + const choices = signals.createMemo(() => { + const o = option(); + + if (!o.top.size && !o.bottom.size) { + return [...choices_]; + } + const rawIndexes = new Set( + [Array.from(o.top.values()), Array.from(o.bottom.values())] + .flat(2) + .filter((blueprint) => { + const path = Object.values(blueprint.metric.by)[0]?.path ?? ""; + return !path.includes("constant_"); + }) + .flatMap((blueprint) => blueprint.metric.indexes()), + ); + + return /** @type {any} */ ( + choices_.filter((choice) => + rawIndexes.has(serdeChartableIndex.deserialize(choice)), + ) + ); + }); + + /** @type {ChartableIndexName} */ + const defaultIndex = "date"; + const field = createReactiveChoiceField({ + defaultValue: defaultIndex, + selected: chart.indexName, + choices, + id: "index", + signals, + }); + + const fieldset = window.document.createElement("fieldset"); + fieldset.id = "interval"; + + const screenshotSpan = window.document.createElement("span"); + screenshotSpan.innerText = "interval:"; + fieldset.append(screenshotSpan); + + fieldset.append(field); + fieldset.dataset.size = "sm"; + + return fieldset; +} diff --git a/website/scripts/panes/chart/index.js b/website/scripts/panes/chart/index.js deleted file mode 100644 index 40d6617bd..000000000 --- a/website/scripts/panes/chart/index.js +++ /dev/null @@ -1,518 +0,0 @@ -import { - createShadow, - createChoiceField, - createHeader, -} from "../../utils/dom.js"; -import { chartElement } from "../../utils/elements.js"; -import { ios, canShare } from "../../utils/env.js"; -import { serdeChartableIndex } from "../../utils/serde.js"; -import { Unit } from "../../utils/units.js"; -import signals from "../../signals.js"; -import { createChartElement } from "../../chart/index.js"; -import { createChartState } from "../../chart/state.js"; -import { webSockets } from "../../utils/ws.js"; -import { screenshot } from "./screenshot.js"; -import { debounce } from "../../utils/timing.js"; - -const keyPrefix = "chart"; -const ONE_BTC_IN_SATS = 100_000_000; - -/** - * @typedef {"timestamp" | "date" | "week" | "month" | "quarter" | "semester" | "year" | "decade" } ChartableIndexName - */ - -/** - * @param {Object} args - * @param {Colors} args.colors - * @param {Accessor} args.option - * @param {BrkClient} args.brk - */ -export function init({ colors, option, brk }) { - chartElement.append(createShadow("left")); - chartElement.append(createShadow("right")); - - const { headerElement, headingElement } = createHeader(); - chartElement.append(headerElement); - - const state = createChartState(signals); - const { fieldset, index } = createIndexSelector(option, state); - - const { from, to } = state.range(); - - const chart = createChartElement({ - parent: chartElement, - signals, - colors, - id: "charts", - brk, - index, - initialVisibleBarsCount: - from !== null && to !== null ? to - from : null, - timeScaleSetCallback: (unknownTimeScaleCallback) => { - const { from, to } = state.range(); - if (from !== null && to !== null) { - chart.setVisibleLogicalRange({ from, to }); - } else { - unknownTimeScaleCallback(); - } - }, - }); - - if (!(ios && !canShare)) { - const domain = window.document.createElement("p"); - domain.innerText = `${window.location.host}`; - domain.id = "domain"; - - chart.addFieldsetIfNeeded({ - id: "capture", - paneIndex: 0, - position: "ne", - createChild() { - const button = window.document.createElement("button"); - button.id = "capture"; - button.innerText = "capture"; - button.title = "Capture chart as image"; - button.addEventListener("click", async () => { - chartElement.dataset.screenshot = "true"; - chartElement.append(domain); - try { - await screenshot({ - element: chartElement, - name: option().path.join("-"), - title: option().title, - }); - } catch {} - chartElement.removeChild(domain); - chartElement.dataset.screenshot = "false"; - }); - return button; - }, - }); - } - - // Sync chart → state.range on user pan/zoom - // Debounce to avoid rapid URL updates while panning - const debouncedSetRange = debounce( - (/** @type {{ from: number, to: number }} */ range) => state.setRange(range), - 500, - ); - chart.onVisibleLogicalRangeChange((t) => { - if (!t || t.from >= t.to) return; - debouncedSetRange({ from: t.from, to: t.to }); - }); - - chartElement.append(fieldset); - - const unitChoices = /** @type {const} */ ([Unit.usd, Unit.sats]); - /** @type {Signal} */ - const topUnit = signals.createPersistedSignal({ - defaultValue: /** @type {Unit} */ (Unit.usd), - storageKey: `${keyPrefix}-price`, - urlKey: "price", - serialize: (u) => u.id, - deserialize: (s) => /** @type {Unit} */ (unitChoices.find((u) => u.id === s) ?? Unit.usd), - }); - const topUnitField = createChoiceField({ - defaultValue: Unit.usd, - choices: unitChoices, - toKey: (u) => u.id, - toLabel: (u) => u.name, - selected: topUnit, - signals, - sorted: true, - type: "select", - }); - - chart.addFieldsetIfNeeded({ - id: "charts-unit-0", - paneIndex: 0, - position: "nw", - createChild() { - return topUnitField; - }, - }); - - const seriesListTop = /** @type {AnySeries[]} */ ([]); - const seriesListBottom = /** @type {AnySeries[]} */ ([]); - - /** - * @param {Object} params - * @param {AnySeries} params.series - * @param {Unit} params.unit - * @param {IndexName} params.index - */ - function printLatest({ series, unit, index }) { - const _latest = webSockets.kraken1dCandle.latest(); - - if (!_latest) return; - - const latest = { ..._latest }; - - if (unit === Unit.sats) { - latest.open = Math.floor(ONE_BTC_IN_SATS / latest.open); - latest.high = Math.floor(ONE_BTC_IN_SATS / latest.high); - latest.low = Math.floor(ONE_BTC_IN_SATS / latest.low); - latest.close = Math.floor(ONE_BTC_IN_SATS / latest.close); - } - - const last_ = series.getData().at(-1); - if (!last_) return; - const last = { ...last_ }; - - if ("close" in last) { - last.close = latest.close; - } - if ("value" in last) { - last.value = latest.close; - } - const date = new Date(/** @type {number} */ (latest.time) * 1000); - - switch (index) { - case "height": - case "difficultyepoch": - case "halvingepoch": { - if ("close" in last) { - last.low = Math.min(last.low, latest.close); - last.high = Math.max(last.high, latest.close); - } - series.update(last); - break; - } - default: { - if (index === "weekindex") { - date.setUTCDate(date.getUTCDate() - ((date.getUTCDay() + 6) % 7)); - } else if (index === "monthindex") { - date.setUTCDate(1); - } else if (index === "quarterindex") { - const month = date.getUTCMonth(); - date.setUTCMonth(month - (month % 3), 1); - } else if (index === "semesterindex") { - const month = date.getUTCMonth(); - date.setUTCMonth(month - (month % 6), 1); - } else if (index === "yearindex") { - date.setUTCMonth(0, 1); - } else if (index === "decadeindex") { - date.setUTCFullYear( - Math.floor(date.getUTCFullYear() / 10) * 10, - 0, - 1, - ); - } else if (index !== "dateindex") { - throw Error("Unsupported"); - } - - const time = date.valueOf() / 1000; - - if (time === last.time) { - if ("close" in last) { - last.low = Math.min(last.low, latest.low); - last.high = Math.max(last.high, latest.high); - } - series.update(last); - } else { - last.time = time; - series.update(last); - } - } - } - } - - signals.createScopedEffect(option, (option) => { - headingElement.innerHTML = option.title; - - const bottomUnits = Array.from(option.bottom.keys()); - - /** @type {{ field: HTMLDivElement, selected: Signal } | undefined} */ - let bottomUnitSelector; - - if (bottomUnits.length) { - const selected = signals.createPersistedSignal({ - defaultValue: bottomUnits[0], - storageKey: `${keyPrefix}-unit`, - urlKey: "unit", - serialize: (u) => u.id, - deserialize: (s) => bottomUnits.find((u) => u.id === s) ?? bottomUnits[0], - }); - const field = createChoiceField({ - defaultValue: bottomUnits[0], - choices: bottomUnits, - toKey: (u) => u.id, - toLabel: (u) => u.name, - selected, - signals, - sorted: true, - type: "select", - }); - bottomUnitSelector = { field, selected }; - chart.addFieldsetIfNeeded({ - id: "charts-unit-1", - paneIndex: 1, - position: "nw", - createChild() { - return field; - }, - }); - } else { - // Clean up bottom pane when new option has no bottom series - seriesListBottom.forEach((series) => series.remove()); - seriesListBottom.length = 0; - chart.legendBottom.removeFrom(0); - } - - signals.createScopedEffect(index, (index) => { - signals.createScopedEffect(topUnit, (topUnit) => { - /** @type {AnySeries | undefined} */ - let series; - - switch (topUnit) { - case Unit.usd: { - series = chart.addCandlestickSeries({ - metric: brk.metrics.price.usd.ohlc, - name: "Price", - unit: topUnit, - order: 0, - }); - break; - } - case Unit.sats: { - series = chart.addCandlestickSeries({ - metric: brk.metrics.price.sats.ohlc, - name: "Price", - unit: topUnit, - inverse: true, - order: 0, - }); - break; - } - } - - if (!series) throw Error("Unreachable"); - - seriesListTop[0]?.remove(); - seriesListTop[0] = series; - - signals.createEffect( - () => ({ - latest: webSockets.kraken1dCandle.latest(), - hasData: series.hasData(), - }), - ({ latest, hasData }) => { - if (!series || !latest || !hasData) return; - printLatest({ series, unit: topUnit, index }); - }, - ); - }); - - /** - * @param {Object} args - * @param {Map} args.blueprints - * @param {number} args.paneIndex - * @param {Accessor} args.unit - * @param {AnySeries[]} args.seriesList - * @param {number} args.orderStart - * @param {Legend} args.legend - */ - function processPane({ - blueprints, - paneIndex, - unit, - seriesList, - orderStart, - legend, - }) { - signals.createScopedEffect(unit, (unit) => { - legend.removeFrom(orderStart); - - seriesList.splice(orderStart).forEach((series) => { - series.remove(); - }); - - blueprints.get(unit)?.forEach((blueprint, order) => { - order += orderStart; - - const options = blueprint.options; - - // Tree-first: metric is now an accessor with .by property - const indexes = Object.keys(blueprint.metric.by); - - if (indexes.includes(index)) { - switch (blueprint.type) { - case "Baseline": { - seriesList.push( - chart.addBaselineSeries({ - metric: blueprint.metric, - name: blueprint.title, - unit, - defaultActive: blueprint.defaultActive, - paneIndex, - options: { - ...options, - topLineColor: - blueprint.color?.() ?? blueprint.colors?.[0](), - bottomLineColor: - blueprint.color?.() ?? blueprint.colors?.[1](), - }, - order, - }), - ); - break; - } - case "Histogram": { - seriesList.push( - chart.addHistogramSeries({ - metric: blueprint.metric, - name: blueprint.title, - unit, - color: blueprint.color, - defaultActive: blueprint.defaultActive, - paneIndex, - options, - order, - }), - ); - break; - } - case "Candlestick": { - seriesList.push( - chart.addCandlestickSeries({ - metric: blueprint.metric, - name: blueprint.title, - unit, - colors: blueprint.colors, - defaultActive: blueprint.defaultActive, - paneIndex, - options, - order, - }), - ); - break; - } - case "Dots": { - seriesList.push( - chart.addDotsSeries({ - metric: blueprint.metric, - color: blueprint.color, - name: blueprint.title, - unit, - defaultActive: blueprint.defaultActive, - paneIndex, - options, - order, - }), - ); - break; - } - case "Line": - case undefined: - seriesList.push( - chart.addLineSeries({ - metric: blueprint.metric, - color: blueprint.color, - name: blueprint.title, - unit, - defaultActive: blueprint.defaultActive, - paneIndex, - options, - order, - }), - ); - } - } - }); - }); - } - - processPane({ - blueprints: option.top, - paneIndex: 0, - unit: topUnit, - seriesList: seriesListTop, - orderStart: 1, - legend: chart.legendTop, - }); - - if (bottomUnitSelector) { - processPane({ - blueprints: option.bottom, - paneIndex: 1, - unit: bottomUnitSelector.selected, - seriesList: seriesListBottom, - orderStart: 0, - legend: chart.legendBottom, - }); - } - - }); - }); -} - -/** - * @param {Accessor} option - * @param {ReturnType} state - */ -function createIndexSelector(option, state) { - const choices_ = /** @satisfies {ChartableIndexName[]} */ ([ - "timestamp", - "date", - "week", - "month", - "quarter", - "semester", - "year", - "decade", - ]); - - /** @type {Accessor} */ - const choices = signals.createMemo(() => { - const o = option(); - - if (!o.top.size && !o.bottom.size) { - return [...choices_]; - } - const rawIndexes = new Set( - [Array.from(o.top.values()), Array.from(o.bottom.values())] - .flat(2) - .filter((blueprint) => { - const path = Object.values(blueprint.metric.by)[0]?.path ?? ""; - return !path.includes("constant_"); - }) - .flatMap((blueprint) => blueprint.metric.indexes()), - ); - - const serializedIndexes = [...rawIndexes].flatMap((index) => { - const c = serdeChartableIndex.serialize(index); - return c ? [c] : []; - }); - - return /** @type {any} */ ( - choices_.filter((choice) => serializedIndexes.includes(choice)) - ); - }); - - /** @type {ChartableIndexName} */ - const defaultIndex = "date"; - const field = createChoiceField({ - defaultValue: defaultIndex, - selected: state.index, - choices, - id: "index", - signals, - }); - - const fieldset = window.document.createElement("fieldset"); - fieldset.id = "interval"; - - const screenshotSpan = window.document.createElement("span"); - screenshotSpan.innerText = "interval:"; - fieldset.append(screenshotSpan); - - fieldset.append(field); - fieldset.dataset.size = "sm"; - - // Convert short name to internal name - const index = signals.createMemo(() => - serdeChartableIndex.deserialize(state.index()), - ); - - return { fieldset, index }; -} diff --git a/website/scripts/resources.js b/website/scripts/resources.js index f5cffc40b..93780cf51 100644 --- a/website/scripts/resources.js +++ b/website/scripts/resources.js @@ -87,7 +87,7 @@ function useMetricEndpoint(endpoint) { * @param {number} [to] * @returns {RangeState} */ - function range(from, to) { + function range(from = -10000, to) { const key = `${from}-${to ?? ""}`; const existing = ranges.get(key); if (existing) return existing; @@ -111,7 +111,7 @@ function useMetricEndpoint(endpoint) { * @param {number} [start=-10000] * @param {number} [end] */ - async fetch(start, end) { + async fetch(start = -10000, end) { const r = range(start, end); r.loading.set(true); try { diff --git a/website/scripts/signals.js b/website/scripts/signals.js index 9762a027f..e5cac6061 100644 --- a/website/scripts/signals.js +++ b/website/scripts/signals.js @@ -23,11 +23,9 @@ import { runWithOwner, onCleanup, } from "./modules/solidjs-signals/0.6.3/dist/prod.js"; -import { debounce } from "./utils/timing.js"; -import { writeParam, readParam } from "./utils/url.js"; -import { readStored, writeToStorage } from "./utils/storage.js"; +import { createPersistedValue } from "./utils/persisted.js"; -let effectCount = 0; +// let effectCount = 0; const signals = { createSolidSignal: /** @type {typeof CreateSignal} */ (createSignal), @@ -45,13 +43,13 @@ const signals = { if (dispose) { dispose(); dispose = null; - console.log("effectCount = ", --effectCount); + // console.log("effectCount = ", --effectCount); } } // @ts-ignore createEffect(compute, (v, oldV) => { - console.log("effectCount = ", ++effectCount); + // console.log("effectCount = ", ++effectCount); cleanup(); signals.createRoot((_dispose) => { dispose = _dispose; @@ -74,7 +72,10 @@ const signals = { * @returns {Signal} */ createSignal(initialValue, options) { - const [get, set] = this.createSolidSignal(/** @type {any} */ (initialValue), options); + const [get, set] = this.createSolidSignal( + /** @type {any} */ (initialValue), + options, + ); // @ts-ignore get.set = set; @@ -104,42 +105,24 @@ const signals = { deserialize, saveDefaultValue = false, }) { - const defaultSerialized = serialize(defaultValue); + const persisted = createPersistedValue({ + defaultValue, + storageKey, + urlKey, + serialize, + deserialize, + saveDefaultValue, + }); - // Read: URL > localStorage > default - let serialized = urlKey ? readParam(urlKey) : null; - if (serialized === null) { - serialized = readStored(storageKey); - } - const initialValue = serialized !== null ? deserialize(serialized) : defaultValue; - - const signal = this.createSignal(initialValue); - - /** @param {T} value */ - const write = (value) => { - const s = serialize(value); - const isDefault = s === defaultSerialized; - - if (!isDefault || saveDefaultValue) { - writeToStorage(storageKey, s); - } else { - writeToStorage(storageKey, null); - } - - if (urlKey) { - writeParam(urlKey, !isDefault || saveDefaultValue ? s : null); - } - }; - - const debouncedWrite = debounce(write, 250); + const signal = this.createSignal(persisted.value); + // Sync signal changes to persisted storage let firstRun = true; this.createEffect(signal, (value) => { if (firstRun) { - write(value); firstRun = false; } else { - debouncedWrite(value); + persisted.set(value); } }); diff --git a/website/scripts/utils/colors.js b/website/scripts/utils/colors.js deleted file mode 100644 index b29ee77bc..000000000 --- a/website/scripts/utils/colors.js +++ /dev/null @@ -1,83 +0,0 @@ -/** - * Reduce color opacity to 50% for dimming effect - * @param {string} color - oklch color string - */ -export function tameColor(color) { - if (color === "transparent") return color; - return `${color.slice(0, -1)} / 50%)`; -} - -/** - * @typedef {Object} ColorMethods - * @property {() => string} tame - Returns tamed (50% opacity) version - * @property {(highlighted: boolean) => string} highlight - Returns normal if highlighted, tamed otherwise - */ - -/** - * @typedef {(() => string) & ColorMethods} Color - */ - -/** - * Creates a Color object that is callable and has utility methods - * @param {() => string} getter - * @returns {Color} - */ -function createColor(getter) { - const color = /** @type {Color} */ (() => getter()); - color.tame = () => tameColor(getter()); - color.highlight = (highlighted) => highlighted ? getter() : tameColor(getter()); - return color; -} - -/** - * @param {Accessor} dark - */ -export function createColors(dark) { - const globalComputedStyle = getComputedStyle(window.document.documentElement); - - /** - * @param {string} name - */ - function getColor(name) { - return globalComputedStyle.getPropertyValue(`--${name}`); - } - - /** - * @param {string} property - */ - function getLightDarkValue(property) { - const value = globalComputedStyle.getPropertyValue(property); - const [light, _dark] = value.slice(11, -1).split(", "); - return dark() ? _dark : light; - } - - return { - default: createColor(() => getLightDarkValue("--color")), - gray: createColor(() => getColor("gray")), - border: createColor(() => getLightDarkValue("--border-color")), - - red: createColor(() => getColor("red")), - orange: createColor(() => getColor("orange")), - amber: createColor(() => getColor("amber")), - yellow: createColor(() => getColor("yellow")), - avocado: createColor(() => getColor("avocado")), - lime: createColor(() => getColor("lime")), - green: createColor(() => getColor("green")), - emerald: createColor(() => getColor("emerald")), - teal: createColor(() => getColor("teal")), - cyan: createColor(() => getColor("cyan")), - sky: createColor(() => getColor("sky")), - blue: createColor(() => getColor("blue")), - indigo: createColor(() => getColor("indigo")), - violet: createColor(() => getColor("violet")), - purple: createColor(() => getColor("purple")), - fuchsia: createColor(() => getColor("fuchsia")), - pink: createColor(() => getColor("pink")), - rose: createColor(() => getColor("rose")), - }; -} - -/** - * @typedef {ReturnType} Colors - * @typedef {keyof Colors} ColorName - */ diff --git a/website/scripts/utils/dom.js b/website/scripts/utils/dom.js index 4bcf8671f..579752fb1 100644 --- a/website/scripts/utils/dom.js +++ b/website/scripts/utils/dom.js @@ -225,7 +225,7 @@ export function importStyle(href) { * @param {(choice: T) => string} [args.toLabel] - Extract display label (defaults to identity for strings) * @param {"radio" | "select"} [args.type] - Render as radio buttons or select dropdown */ -export function createChoiceField({ +export function createReactiveChoiceField({ id, choices: unsortedChoices, defaultValue, @@ -257,7 +257,8 @@ export function createChoiceField({ }); /** @param {string} key */ - const fromKey = (key) => choices().find((c) => toKey(c) === key) ?? defaultValue; + const fromKey = (key) => + choices().find((c) => toKey(c) === key) ?? defaultValue; const field = window.document.createElement("div"); field.classList.add("field"); @@ -354,6 +355,85 @@ export function createChoiceField({ return field; } +/** + * @template T + * @param {Object} args + * @param {T} args.initialValue + * @param {string} [args.id] + * @param {readonly T[]} args.choices + * @param {(value: T) => void} [args.onChange] + * @param {(choice: T) => string} [args.toKey] + * @param {(choice: T) => string} [args.toLabel] + * @param {"radio" | "select"} [args.type] + */ +export function createChoiceField({ + id, + choices, + initialValue, + onChange, + toKey = /** @type {(choice: T) => string} */ ((/** @type {any} */ c) => c), + toLabel = /** @type {(choice: T) => string} */ ((/** @type {any} */ c) => c), + type = "radio", +}) { + const field = window.document.createElement("div"); + field.classList.add("field"); + + const div = window.document.createElement("div"); + field.append(div); + + const initialKey = toKey(initialValue); + + /** @param {string} key */ + const fromKey = (key) => + choices.find((c) => toKey(c) === key) ?? initialValue; + + if (type === "select") { + const select = window.document.createElement("select"); + select.id = id ?? ""; + select.name = id ?? ""; + + choices.forEach((choice) => { + const option = window.document.createElement("option"); + option.value = toKey(choice); + option.textContent = toLabel(choice); + if (toKey(choice) === initialKey) { + option.selected = true; + } + select.append(option); + }); + + select.addEventListener("change", () => { + onChange?.(fromKey(select.value)); + }); + + div.append(select); + } else { + const fieldId = id ?? ""; + choices.forEach((choice) => { + const choiceKey = toKey(choice); + const choiceLabel = toLabel(choice); + const { label } = createLabeledInput({ + inputId: `${fieldId}-${choiceKey.toLowerCase()}`, + inputName: fieldId, + inputValue: choiceKey, + inputChecked: choiceKey === initialKey, + type: "radio", + }); + + const text = window.document.createTextNode(choiceLabel); + label.append(text); + div.append(label); + }); + + field.addEventListener("change", (event) => { + // @ts-ignore + onChange?.(fromKey(event.target.value)); + }); + } + + return field; +} + /** * @param {string} [title] * @param {1 | 2 | 3} [level] diff --git a/website/scripts/utils/persisted.js b/website/scripts/utils/persisted.js new file mode 100644 index 000000000..7f90147b2 --- /dev/null +++ b/website/scripts/utils/persisted.js @@ -0,0 +1,72 @@ +import { readParam, writeParam } from "./url.js"; +import { readStored, writeToStorage } from "./storage.js"; +import { debounce } from "./timing.js"; + +/** + * @template T + * @param {Object} args + * @param {T} args.defaultValue + * @param {string} [args.storageKey] + * @param {string} [args.urlKey] + * @param {(v: T) => string} args.serialize + * @param {(s: string) => T} args.deserialize + * @param {boolean} [args.saveDefaultValue] + */ +export function createPersistedValue({ + defaultValue, + storageKey, + urlKey, + serialize, + deserialize, + saveDefaultValue = false, +}) { + const defaultSerialized = serialize(defaultValue); + + // Read: URL > localStorage > default + let serialized = urlKey ? readParam(urlKey) : null; + if (serialized === null && storageKey) { + serialized = readStored(storageKey); + } + let value = serialized !== null ? deserialize(serialized) : defaultValue; + + /** @param {T} v */ + const write = (v) => { + const s = serialize(v); + const isDefault = s === defaultSerialized; + + if (storageKey) { + if (!isDefault || saveDefaultValue) { + writeToStorage(storageKey, s); + } else { + writeToStorage(storageKey, null); + } + } + + if (urlKey) { + writeParam(urlKey, !isDefault || saveDefaultValue ? s : null); + } + }; + + const debouncedWrite = debounce(write, 250); + + // Write initial value + write(value); + + return { + get value() { + return value; + }, + /** @param {T} v */ + set(v) { + value = v; + debouncedWrite(v); + }, + /** @param {T} v */ + setImmediate(v) { + value = v; + write(v); + }, + }; +} + +/** @typedef {ReturnType} PersistedValue */ diff --git a/website/scripts/utils/serde.js b/website/scripts/utils/serde.js index 3a88fdb60..bbc82fdd7 100644 --- a/website/scripts/utils/serde.js +++ b/website/scripts/utils/serde.js @@ -110,6 +110,10 @@ export const serdeBool = { }, }; +/** + * @typedef {"timestamp" | "date" | "week" | "month" | "quarter" | "semester" | "year" | "decade"} ChartableIndexName + */ + export const serdeChartableIndex = { /** * @param {IndexName} v diff --git a/website/scripts/utils/theme.js b/website/scripts/utils/theme.js new file mode 100644 index 000000000..824fcd90e --- /dev/null +++ b/website/scripts/utils/theme.js @@ -0,0 +1,50 @@ +import { readStored, removeStored, writeToStorage } from "./storage.js"; + +const preferredColorSchemeMatchMedia = window.matchMedia( + "(prefers-color-scheme: dark)", +); +const stored = readStored("theme"); +const initial = stored ? stored === "dark" : preferredColorSchemeMatchMedia.matches; + +export let dark = initial; + +/** @type {Set<() => void>} */ +const callbacks = new Set(); + +/** @param {() => void} callback */ +export function onChange(callback) { + callbacks.add(callback); + return () => callbacks.delete(callback); +} + +/** @param {boolean} value */ +export function setDark(value) { + if (dark === value) return; + dark = value; + apply(value); + callbacks.forEach((cb) => cb()); +} + +/** @param {boolean} isDark */ +function apply(isDark) { + document.documentElement.style.colorScheme = isDark ? "dark" : "light"; +} +apply(initial); + +preferredColorSchemeMatchMedia.addEventListener("change", ({ matches }) => { + if (!readStored("theme")) { + setDark(matches); + } +}); + +function invert() { + const newValue = !dark; + setDark(newValue); + if (newValue === preferredColorSchemeMatchMedia.matches) { + removeStored("theme"); + } else { + writeToStorage("theme", newValue ? "dark" : "light"); + } +} + +document.getElementById("invert-button")?.addEventListener("click", invert); diff --git a/website/scripts/utils/timing.js b/website/scripts/utils/timing.js index 88182818e..47b9a9bcc 100644 --- a/website/scripts/utils/timing.js +++ b/website/scripts/utils/timing.js @@ -22,18 +22,22 @@ export function throttle(callback, wait = 1000) { let timeoutId = null; /** @type {Parameters} */ let latestArgs; + let hasTrailing = false; return (/** @type {Parameters} */ ...args) => { latestArgs = args; - - if (!timeoutId) { - // Otherwise it optimizes away timeoutId in Chrome and FF - timeoutId = timeoutId; - timeoutId = setTimeout(() => { - callback(...latestArgs); // Execute with latest args - timeoutId = null; - }, wait); + if (timeoutId) { + hasTrailing = true; + return; } + callback(...latestArgs); + timeoutId = setTimeout(() => { + timeoutId = null; + if (hasTrailing) { + hasTrailing = false; + callback(...latestArgs); + } + }, wait); }; }