global: snapshot

This commit is contained in:
nym21
2026-01-16 23:49:49 +01:00
parent 3b00a92fa4
commit 6bb1a2a311
34 changed files with 2600 additions and 5071 deletions

4
.gitignore vendored
View File

@@ -6,6 +6,7 @@ target
websites/dist websites/dist
bridge/ bridge/
/ids.txt /ids.txt
rust_out
# Copies # Copies
*\ copy* *\ copy*
@@ -15,9 +16,10 @@ _*
!__*.py !__*.py
/*.md /*.md
/*.py /*.py
/api.json
/*.json /*.json
/*.html /*.html
/research
/filter_*
# Logs # Logs
*.log* *.log*

56
Cargo.lock generated
View File

@@ -41,9 +41,9 @@ dependencies = [
[[package]] [[package]]
name = "aide" name = "aide"
version = "0.16.0-alpha.1" version = "0.16.0-alpha.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc156b16d6d8e9bb84a7cba8b93fc399c0654bfbb927832ea7ab326d5d6895e2" checksum = "29e03dae8ee60626675f62a8d0c503fdc2721b01ae7feb2fd1e233b6bb16223a"
dependencies = [ dependencies = [
"axum", "axum",
"bytes", "bytes",
@@ -831,9 +831,9 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
[[package]] [[package]]
name = "chrono" name = "chrono"
version = "0.4.42" version = "0.4.43"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" checksum = "fac4744fb15ae8337dc853fee7fb3f4e48c0fbaa23d0afe49c447b4fab126118"
dependencies = [ dependencies = [
"iana-time-zone", "iana-time-zone",
"js-sys", "js-sys",
@@ -1957,9 +1957,9 @@ checksum = "00810f1d8b74be64b13dbf3db89ac67740615d6c891f0e7b6179326533011a07"
[[package]] [[package]]
name = "js-sys" name = "js-sys"
version = "0.3.83" version = "0.3.85"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "464a3709c7f55f1f721e5389aa6ea4e3bc6aba669353300af094b29ffbdde1d8" checksum = "8c942ebf8e95485ca0d52d97da7c5a2c387d0e7f0ba4c35e93bfcaee045955b3"
dependencies = [ dependencies = [
"once_cell", "once_cell",
"wasm-bindgen", "wasm-bindgen",
@@ -2596,9 +2596,9 @@ dependencies = [
[[package]] [[package]]
name = "rawdb" name = "rawdb"
version = "0.5.10" version = "0.5.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "12f4f316acfc1844da8b3c84163aabd98f155df82d3b57ce4075dda550e1752d" checksum = "133cd3a1d92510fe902efcdf70a7f45aa83e1cd47173d8cc013fef82af0b8f8e"
dependencies = [ dependencies = [
"libc", "libc",
"log", "log",
@@ -2723,9 +2723,9 @@ dependencies = [
[[package]] [[package]]
name = "rustc-demangle" name = "rustc-demangle"
version = "0.1.26" version = "0.1.27"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace" checksum = "b50b8869d9fc858ce7266cce0194bd74df58b9d0e3f6df3a9fc8eb470d95c09d"
[[package]] [[package]]
name = "rustc-hash" name = "rustc-hash"
@@ -3505,9 +3505,9 @@ checksum = "8f54a172d0620933a27a4360d3db3e2ae0dd6cceae9730751a036bbf182c4b23"
[[package]] [[package]]
name = "vecdb" name = "vecdb"
version = "0.5.10" version = "0.5.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28d56bd525ca319c5772dcc91b40caa6947dfb6c56b63c4a9b2268ee24254c05" checksum = "0a9b98950cd49f718ec32f3b91282b9d0c033ea15430436ddb0cc286c77aa5bb"
dependencies = [ dependencies = [
"ctrlc", "ctrlc",
"log", "log",
@@ -3526,9 +3526,9 @@ dependencies = [
[[package]] [[package]]
name = "vecdb_derive" name = "vecdb_derive"
version = "0.5.10" version = "0.5.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17c227a593e7a08c3d00babaa81aec624052d2c6a16ba127fb8f491f273c9751" checksum = "043380b6bd5d519b63def192ff310f7a599feaca7f5b12cc86e5d9dd7eabf750"
dependencies = [ dependencies = [
"quote", "quote",
"syn", "syn",
@@ -3558,18 +3558,18 @@ checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b"
[[package]] [[package]]
name = "wasip2" name = "wasip2"
version = "1.0.1+wasi-0.2.4" version = "1.0.2+wasi-0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5"
dependencies = [ dependencies = [
"wit-bindgen", "wit-bindgen",
] ]
[[package]] [[package]]
name = "wasm-bindgen" name = "wasm-bindgen"
version = "0.2.106" version = "0.2.108"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0d759f433fa64a2d763d1340820e46e111a7a5ab75f993d1852d70b03dbb80fd" checksum = "64024a30ec1e37399cf85a7ffefebdb72205ca1c972291c51512360d90bd8566"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"once_cell", "once_cell",
@@ -3580,9 +3580,9 @@ dependencies = [
[[package]] [[package]]
name = "wasm-bindgen-macro" name = "wasm-bindgen-macro"
version = "0.2.106" version = "0.2.108"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48cb0d2638f8baedbc542ed444afc0644a29166f1595371af4fecf8ce1e7eeb3" checksum = "008b239d9c740232e71bd39e8ef6429d27097518b6b30bdf9086833bd5b6d608"
dependencies = [ dependencies = [
"quote", "quote",
"wasm-bindgen-macro-support", "wasm-bindgen-macro-support",
@@ -3590,9 +3590,9 @@ dependencies = [
[[package]] [[package]]
name = "wasm-bindgen-macro-support" name = "wasm-bindgen-macro-support"
version = "0.2.106" version = "0.2.108"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cefb59d5cd5f92d9dcf80e4683949f15ca4b511f4ac0a6e14d4e1ac60c6ecd40" checksum = "5256bae2d58f54820e6490f9839c49780dff84c65aeab9e772f15d5f0e913a55"
dependencies = [ dependencies = [
"bumpalo", "bumpalo",
"proc-macro2", "proc-macro2",
@@ -3603,18 +3603,18 @@ dependencies = [
[[package]] [[package]]
name = "wasm-bindgen-shared" name = "wasm-bindgen-shared"
version = "0.2.106" version = "0.2.108"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cbc538057e648b67f72a982e708d485b2efa771e1ac05fec311f9f63e5800db4" checksum = "1f01b580c9ac74c8d8f0c0e4afb04eeef2acf145458e52c03845ee9cd23e3d12"
dependencies = [ dependencies = [
"unicode-ident", "unicode-ident",
] ]
[[package]] [[package]]
name = "web-sys" name = "web-sys"
version = "0.3.83" version = "0.3.85"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b32828d774c412041098d182a8b38b16ea816958e07cf40eec2bc080ae137ac" checksum = "312e32e551d92129218ea9a2452120f4aabc03529ef03e4d0d82fb2780608598"
dependencies = [ dependencies = [
"js-sys", "js-sys",
"wasm-bindgen", "wasm-bindgen",
@@ -3898,9 +3898,9 @@ dependencies = [
[[package]] [[package]]
name = "wit-bindgen" name = "wit-bindgen"
version = "0.46.0" version = "0.51.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5"
[[package]] [[package]]
name = "writeable" name = "writeable"

View File

@@ -36,7 +36,7 @@ inherits = "release"
debug = true debug = true
[workspace.dependencies] [workspace.dependencies]
aide = { version = "0.16.0-alpha.1", features = ["axum-json", "axum-query"] } aide = { version = "0.16.0-alpha.2", features = ["axum-json", "axum-query"] }
axum = "0.8.8" axum = "0.8.8"
bitcoin = { version = "0.32.8", features = ["serde"] } bitcoin = { version = "0.32.8", features = ["serde"] }
bitcoincore-rpc = "0.19.0" bitcoincore-rpc = "0.19.0"
@@ -78,7 +78,7 @@ serde_json = { version = "1.0.149", features = ["float_roundtrip", "preserve_ord
smallvec = "1.15.1" smallvec = "1.15.1"
tokio = { version = "1.49.0", features = ["rt-multi-thread"] } tokio = { version = "1.49.0", features = ["rt-multi-thread"] }
tracing = { version = "0.1", default-features = false, features = ["std"] } tracing = { version = "0.1", default-features = false, features = ["std"] }
vecdb = { version = "0.5.10", features = ["derive", "serde_json", "pco", "schemars"] } vecdb = { version = "0.5.11", features = ["derive", "serde_json", "pco", "schemars"] }
# vecdb = { path = "../anydb/crates/vecdb", features = ["derive", "serde_json", "pco", "schemars"] } # vecdb = { path = "../anydb/crates/vecdb", features = ["derive", "serde_json", "pco", "schemars"] }
[workspace.metadata.release] [workspace.metadata.release]

View File

@@ -13,10 +13,10 @@ fn load_catalog() -> TreeNode {
serde_json::from_str(&catalog_json).expect("Failed to parse catalog.json") serde_json::from_str(&catalog_json).expect("Failed to parse catalog.json")
} }
/// Load OpenAPI spec from api.json. /// Load OpenAPI spec from openapi.json.
fn load_openapi_json() -> String { fn load_openapi_json() -> String {
let path = concat!(env!("CARGO_MANIFEST_DIR"), "/api.json"); let path = concat!(env!("CARGO_MANIFEST_DIR"), "/openapi.json");
std::fs::read_to_string(path).expect("Failed to read api.json") std::fs::read_to_string(path).expect("Failed to read openapi.json")
} }
/// Load metadata from the catalog. /// Load metadata from the catalog.

View File

@@ -11,7 +11,6 @@ use serde::{Deserialize, Deserializer, Serialize};
use crate::{default_brk_path, dot_brk_path, fix_user_path, website::Website}; use crate::{default_brk_path, dot_brk_path, fix_user_path, website::Website};
#[derive(Parser, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize)] #[derive(Parser, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize)]
#[command(version, about)] #[command(version, about)]
pub struct Config { pub struct Config {
@@ -40,9 +39,9 @@ pub struct Config {
#[arg(long, value_name = "BOOL")] #[arg(long, value_name = "BOOL")]
exchanges: Option<bool>, exchanges: Option<bool>,
/// Website served by the server, default: default, saved /// Website served by the server: true (default), false, or PATH, saved
#[serde(default, deserialize_with = "default_on_error")] #[serde(default, deserialize_with = "default_on_error")]
#[arg(short, long)] #[arg(short, long, value_name = "BOOL|PATH")]
website: Option<Website>, website: Option<Website>,
/// Bitcoin RPC ip, default: localhost, saved /// Bitcoin RPC ip, default: localhost, saved
@@ -232,9 +231,7 @@ Finally, you can run the program with '-h' for help."
pub fn bitcoindir(&self) -> PathBuf { pub fn bitcoindir(&self) -> PathBuf {
self.bitcoindir self.bitcoindir
.as_ref() .as_ref()
.map_or_else(Client::default_bitcoin_path, |s| { .map_or_else(Client::default_bitcoin_path, |s| fix_user_path(s.as_ref()))
fix_user_path(s.as_ref())
})
} }
pub fn blocksdir(&self) -> PathBuf { pub fn blocksdir(&self) -> PathBuf {

View File

@@ -2,7 +2,6 @@
use std::{ use std::{
fs, fs,
path::PathBuf,
thread::{self, sleep}, thread::{self, sleep},
time::Duration, time::Duration,
}; };
@@ -68,14 +67,17 @@ pub fn run() -> color_eyre::Result<()> {
let data_path = config.brkdir(); let data_path = config.brkdir();
let website_source = match config.website() { let website_source = match config.website() {
Website::Enabled(false) => WebsiteSource::Disabled, Website::Enabled(false) => {
Website::Path(p) => WebsiteSource::Filesystem(p), info!("Website: disabled");
Website::Enabled(true) => { WebsiteSource::Disabled
// Prefer local filesystem if available, otherwise use embedded
match find_local_website_dir() {
Some(path) => WebsiteSource::Filesystem(path),
None => WebsiteSource::Embedded,
} }
Website::Path(p) => {
info!("Website: filesystem ({})", p.display());
WebsiteSource::Filesystem(p)
}
Website::Enabled(true) => {
info!("Website: embedded");
WebsiteSource::Embedded
} }
}; };
@@ -119,12 +121,3 @@ pub fn run() -> color_eyre::Result<()> {
} }
} }
} }
/// Path to website directory relative to this crate (only valid at dev machine)
const DEV_WEBSITE_DIR: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/../../website");
/// Returns local website path if it exists (dev mode)
fn find_local_website_dir() -> Option<PathBuf> {
let path = PathBuf::from(DEV_WEBSITE_DIR);
path.exists().then_some(path)
}

View File

@@ -21,7 +21,6 @@ impl Default for Website {
} }
} }
impl FromStr for Website { impl FromStr for Website {
type Err = std::convert::Infallible; type Err = std::convert::Infallible;

View File

@@ -1268,56 +1268,6 @@ impl Price111dSmaPattern {
} }
} }
/// Pattern struct for repeated tree structure.
pub struct PercentilesPattern {
pub pct05: MetricPattern4<Dollars>,
pub pct10: MetricPattern4<Dollars>,
pub pct15: MetricPattern4<Dollars>,
pub pct20: MetricPattern4<Dollars>,
pub pct25: MetricPattern4<Dollars>,
pub pct30: MetricPattern4<Dollars>,
pub pct35: MetricPattern4<Dollars>,
pub pct40: MetricPattern4<Dollars>,
pub pct45: MetricPattern4<Dollars>,
pub pct50: MetricPattern4<Dollars>,
pub pct55: MetricPattern4<Dollars>,
pub pct60: MetricPattern4<Dollars>,
pub pct65: MetricPattern4<Dollars>,
pub pct70: MetricPattern4<Dollars>,
pub pct75: MetricPattern4<Dollars>,
pub pct80: MetricPattern4<Dollars>,
pub pct85: MetricPattern4<Dollars>,
pub pct90: MetricPattern4<Dollars>,
pub pct95: MetricPattern4<Dollars>,
}
impl PercentilesPattern {
/// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self {
pct05: MetricPattern4::new(client.clone(), _m(&acc, "pct05")),
pct10: MetricPattern4::new(client.clone(), _m(&acc, "pct10")),
pct15: MetricPattern4::new(client.clone(), _m(&acc, "pct15")),
pct20: MetricPattern4::new(client.clone(), _m(&acc, "pct20")),
pct25: MetricPattern4::new(client.clone(), _m(&acc, "pct25")),
pct30: MetricPattern4::new(client.clone(), _m(&acc, "pct30")),
pct35: MetricPattern4::new(client.clone(), _m(&acc, "pct35")),
pct40: MetricPattern4::new(client.clone(), _m(&acc, "pct40")),
pct45: MetricPattern4::new(client.clone(), _m(&acc, "pct45")),
pct50: MetricPattern4::new(client.clone(), _m(&acc, "pct50")),
pct55: MetricPattern4::new(client.clone(), _m(&acc, "pct55")),
pct60: MetricPattern4::new(client.clone(), _m(&acc, "pct60")),
pct65: MetricPattern4::new(client.clone(), _m(&acc, "pct65")),
pct70: MetricPattern4::new(client.clone(), _m(&acc, "pct70")),
pct75: MetricPattern4::new(client.clone(), _m(&acc, "pct75")),
pct80: MetricPattern4::new(client.clone(), _m(&acc, "pct80")),
pct85: MetricPattern4::new(client.clone(), _m(&acc, "pct85")),
pct90: MetricPattern4::new(client.clone(), _m(&acc, "pct90")),
pct95: MetricPattern4::new(client.clone(), _m(&acc, "pct95")),
}
}
}
/// Pattern struct for repeated tree structure. /// Pattern struct for repeated tree structure.
pub struct ActivePriceRatioPattern { pub struct ActivePriceRatioPattern {
pub ratio: MetricPattern4<StoredF32>, pub ratio: MetricPattern4<StoredF32>,
@@ -1368,6 +1318,56 @@ impl ActivePriceRatioPattern {
} }
} }
/// Pattern struct for repeated tree structure.
pub struct PercentilesPattern {
pub pct05: MetricPattern4<Dollars>,
pub pct10: MetricPattern4<Dollars>,
pub pct15: MetricPattern4<Dollars>,
pub pct20: MetricPattern4<Dollars>,
pub pct25: MetricPattern4<Dollars>,
pub pct30: MetricPattern4<Dollars>,
pub pct35: MetricPattern4<Dollars>,
pub pct40: MetricPattern4<Dollars>,
pub pct45: MetricPattern4<Dollars>,
pub pct50: MetricPattern4<Dollars>,
pub pct55: MetricPattern4<Dollars>,
pub pct60: MetricPattern4<Dollars>,
pub pct65: MetricPattern4<Dollars>,
pub pct70: MetricPattern4<Dollars>,
pub pct75: MetricPattern4<Dollars>,
pub pct80: MetricPattern4<Dollars>,
pub pct85: MetricPattern4<Dollars>,
pub pct90: MetricPattern4<Dollars>,
pub pct95: MetricPattern4<Dollars>,
}
impl PercentilesPattern {
/// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self {
pct05: MetricPattern4::new(client.clone(), _m(&acc, "pct05")),
pct10: MetricPattern4::new(client.clone(), _m(&acc, "pct10")),
pct15: MetricPattern4::new(client.clone(), _m(&acc, "pct15")),
pct20: MetricPattern4::new(client.clone(), _m(&acc, "pct20")),
pct25: MetricPattern4::new(client.clone(), _m(&acc, "pct25")),
pct30: MetricPattern4::new(client.clone(), _m(&acc, "pct30")),
pct35: MetricPattern4::new(client.clone(), _m(&acc, "pct35")),
pct40: MetricPattern4::new(client.clone(), _m(&acc, "pct40")),
pct45: MetricPattern4::new(client.clone(), _m(&acc, "pct45")),
pct50: MetricPattern4::new(client.clone(), _m(&acc, "pct50")),
pct55: MetricPattern4::new(client.clone(), _m(&acc, "pct55")),
pct60: MetricPattern4::new(client.clone(), _m(&acc, "pct60")),
pct65: MetricPattern4::new(client.clone(), _m(&acc, "pct65")),
pct70: MetricPattern4::new(client.clone(), _m(&acc, "pct70")),
pct75: MetricPattern4::new(client.clone(), _m(&acc, "pct75")),
pct80: MetricPattern4::new(client.clone(), _m(&acc, "pct80")),
pct85: MetricPattern4::new(client.clone(), _m(&acc, "pct85")),
pct90: MetricPattern4::new(client.clone(), _m(&acc, "pct90")),
pct95: MetricPattern4::new(client.clone(), _m(&acc, "pct95")),
}
}
}
/// Pattern struct for repeated tree structure. /// Pattern struct for repeated tree structure.
pub struct RelativePattern5 { pub struct RelativePattern5 {
pub neg_unrealized_loss_rel_to_market_cap: MetricPattern1<StoredF32>, pub neg_unrealized_loss_rel_to_market_cap: MetricPattern1<StoredF32>,
@@ -1668,38 +1668,6 @@ impl<T: DeserializeOwned> DollarsPattern<T> {
} }
} }
/// Pattern struct for repeated tree structure.
pub struct RelativePattern2 {
pub neg_unrealized_loss_rel_to_own_market_cap: MetricPattern1<StoredF32>,
pub neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1<StoredF32>,
pub net_unrealized_pnl_rel_to_own_market_cap: MetricPattern1<StoredF32>,
pub net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern1<StoredF32>,
pub supply_in_loss_rel_to_own_supply: MetricPattern1<StoredF64>,
pub supply_in_profit_rel_to_own_supply: MetricPattern1<StoredF64>,
pub unrealized_loss_rel_to_own_market_cap: MetricPattern1<StoredF32>,
pub unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1<StoredF32>,
pub unrealized_profit_rel_to_own_market_cap: MetricPattern1<StoredF32>,
pub unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern1<StoredF32>,
}
impl RelativePattern2 {
/// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self {
neg_unrealized_loss_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "neg_unrealized_loss_rel_to_own_market_cap")),
neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "neg_unrealized_loss_rel_to_own_total_unrealized_pnl")),
net_unrealized_pnl_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "net_unrealized_pnl_rel_to_own_market_cap")),
net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "net_unrealized_pnl_rel_to_own_total_unrealized_pnl")),
supply_in_loss_rel_to_own_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_in_loss_rel_to_own_supply")),
supply_in_profit_rel_to_own_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_in_profit_rel_to_own_supply")),
unrealized_loss_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_loss_rel_to_own_market_cap")),
unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_loss_rel_to_own_total_unrealized_pnl")),
unrealized_profit_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_profit_rel_to_own_market_cap")),
unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_profit_rel_to_own_total_unrealized_pnl")),
}
}
}
/// Pattern struct for repeated tree structure. /// Pattern struct for repeated tree structure.
pub struct RelativePattern { pub struct RelativePattern {
pub neg_unrealized_loss_rel_to_market_cap: MetricPattern1<StoredF32>, pub neg_unrealized_loss_rel_to_market_cap: MetricPattern1<StoredF32>,
@@ -1732,6 +1700,38 @@ impl RelativePattern {
} }
} }
/// Pattern struct for repeated tree structure.
pub struct RelativePattern2 {
pub neg_unrealized_loss_rel_to_own_market_cap: MetricPattern1<StoredF32>,
pub neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1<StoredF32>,
pub net_unrealized_pnl_rel_to_own_market_cap: MetricPattern1<StoredF32>,
pub net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern1<StoredF32>,
pub supply_in_loss_rel_to_own_supply: MetricPattern1<StoredF64>,
pub supply_in_profit_rel_to_own_supply: MetricPattern1<StoredF64>,
pub unrealized_loss_rel_to_own_market_cap: MetricPattern1<StoredF32>,
pub unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1<StoredF32>,
pub unrealized_profit_rel_to_own_market_cap: MetricPattern1<StoredF32>,
pub unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern1<StoredF32>,
}
impl RelativePattern2 {
/// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self {
neg_unrealized_loss_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "neg_unrealized_loss_rel_to_own_market_cap")),
neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "neg_unrealized_loss_rel_to_own_total_unrealized_pnl")),
net_unrealized_pnl_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "net_unrealized_pnl_rel_to_own_market_cap")),
net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "net_unrealized_pnl_rel_to_own_total_unrealized_pnl")),
supply_in_loss_rel_to_own_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_in_loss_rel_to_own_supply")),
supply_in_profit_rel_to_own_supply: MetricPattern1::new(client.clone(), _m(&acc, "supply_in_profit_rel_to_own_supply")),
unrealized_loss_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_loss_rel_to_own_market_cap")),
unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_loss_rel_to_own_total_unrealized_pnl")),
unrealized_profit_rel_to_own_market_cap: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_profit_rel_to_own_market_cap")),
unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern1::new(client.clone(), _m(&acc, "unrealized_profit_rel_to_own_total_unrealized_pnl")),
}
}
}
/// Pattern struct for repeated tree structure. /// Pattern struct for repeated tree structure.
pub struct CountPattern2<T> { pub struct CountPattern2<T> {
pub average: MetricPattern1<T>, pub average: MetricPattern1<T>,
@@ -1794,36 +1794,6 @@ impl AddrCountPattern {
} }
} }
/// Pattern struct for repeated tree structure.
pub struct FullnessPattern<T> {
pub average: MetricPattern2<T>,
pub base: MetricPattern11<T>,
pub max: MetricPattern2<T>,
pub median: MetricPattern6<T>,
pub min: MetricPattern2<T>,
pub pct10: MetricPattern6<T>,
pub pct25: MetricPattern6<T>,
pub pct75: MetricPattern6<T>,
pub pct90: MetricPattern6<T>,
}
impl<T: DeserializeOwned> FullnessPattern<T> {
/// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self {
average: MetricPattern2::new(client.clone(), _m(&acc, "average")),
base: MetricPattern11::new(client.clone(), acc.clone()),
max: MetricPattern2::new(client.clone(), _m(&acc, "max")),
median: MetricPattern6::new(client.clone(), _m(&acc, "median")),
min: MetricPattern2::new(client.clone(), _m(&acc, "min")),
pct10: MetricPattern6::new(client.clone(), _m(&acc, "pct10")),
pct25: MetricPattern6::new(client.clone(), _m(&acc, "pct25")),
pct75: MetricPattern6::new(client.clone(), _m(&acc, "pct75")),
pct90: MetricPattern6::new(client.clone(), _m(&acc, "pct90")),
}
}
}
/// Pattern struct for repeated tree structure. /// Pattern struct for repeated tree structure.
pub struct FeeRatePattern<T> { pub struct FeeRatePattern<T> {
pub average: MetricPattern1<T>, pub average: MetricPattern1<T>,
@@ -1854,6 +1824,36 @@ impl<T: DeserializeOwned> FeeRatePattern<T> {
} }
} }
/// Pattern struct for repeated tree structure.
pub struct FullnessPattern<T> {
pub average: MetricPattern2<T>,
pub base: MetricPattern11<T>,
pub max: MetricPattern2<T>,
pub median: MetricPattern6<T>,
pub min: MetricPattern2<T>,
pub pct10: MetricPattern6<T>,
pub pct25: MetricPattern6<T>,
pub pct75: MetricPattern6<T>,
pub pct90: MetricPattern6<T>,
}
impl<T: DeserializeOwned> FullnessPattern<T> {
/// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self {
average: MetricPattern2::new(client.clone(), _m(&acc, "average")),
base: MetricPattern11::new(client.clone(), acc.clone()),
max: MetricPattern2::new(client.clone(), _m(&acc, "max")),
median: MetricPattern6::new(client.clone(), _m(&acc, "median")),
min: MetricPattern2::new(client.clone(), _m(&acc, "min")),
pct10: MetricPattern6::new(client.clone(), _m(&acc, "pct10")),
pct25: MetricPattern6::new(client.clone(), _m(&acc, "pct25")),
pct75: MetricPattern6::new(client.clone(), _m(&acc, "pct75")),
pct90: MetricPattern6::new(client.clone(), _m(&acc, "pct90")),
}
}
}
/// Pattern struct for repeated tree structure. /// Pattern struct for repeated tree structure.
pub struct _0satsPattern { pub struct _0satsPattern {
pub activity: ActivityPattern2, pub activity: ActivityPattern2,
@@ -1910,6 +1910,84 @@ impl<T: DeserializeOwned> PhaseDailyCentsPattern<T> {
} }
} }
/// Pattern struct for repeated tree structure.
pub struct PeriodCagrPattern {
pub _10y: MetricPattern4<StoredF32>,
pub _2y: MetricPattern4<StoredF32>,
pub _3y: MetricPattern4<StoredF32>,
pub _4y: MetricPattern4<StoredF32>,
pub _5y: MetricPattern4<StoredF32>,
pub _6y: MetricPattern4<StoredF32>,
pub _8y: MetricPattern4<StoredF32>,
}
impl PeriodCagrPattern {
/// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self {
_10y: MetricPattern4::new(client.clone(), _p("10y", &acc)),
_2y: MetricPattern4::new(client.clone(), _p("2y", &acc)),
_3y: MetricPattern4::new(client.clone(), _p("3y", &acc)),
_4y: MetricPattern4::new(client.clone(), _p("4y", &acc)),
_5y: MetricPattern4::new(client.clone(), _p("5y", &acc)),
_6y: MetricPattern4::new(client.clone(), _p("6y", &acc)),
_8y: MetricPattern4::new(client.clone(), _p("8y", &acc)),
}
}
}
/// Pattern struct for repeated tree structure.
pub struct _0satsPattern2 {
pub activity: ActivityPattern2,
pub cost_basis: CostBasisPattern,
pub outputs: OutputsPattern,
pub realized: RealizedPattern,
pub relative: RelativePattern4,
pub supply: SupplyPattern2,
pub unrealized: UnrealizedPattern,
}
impl _0satsPattern2 {
/// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self {
activity: ActivityPattern2::new(client.clone(), acc.clone()),
cost_basis: CostBasisPattern::new(client.clone(), acc.clone()),
outputs: OutputsPattern::new(client.clone(), _m(&acc, "utxo_count")),
realized: RealizedPattern::new(client.clone(), acc.clone()),
relative: RelativePattern4::new(client.clone(), _m(&acc, "supply_in")),
supply: SupplyPattern2::new(client.clone(), _m(&acc, "supply")),
unrealized: UnrealizedPattern::new(client.clone(), acc.clone()),
}
}
}
/// Pattern struct for repeated tree structure.
pub struct _100btcPattern {
pub activity: ActivityPattern2,
pub cost_basis: CostBasisPattern,
pub outputs: OutputsPattern,
pub realized: RealizedPattern,
pub relative: RelativePattern,
pub supply: SupplyPattern2,
pub unrealized: UnrealizedPattern,
}
impl _100btcPattern {
/// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self {
activity: ActivityPattern2::new(client.clone(), acc.clone()),
cost_basis: CostBasisPattern::new(client.clone(), acc.clone()),
outputs: OutputsPattern::new(client.clone(), _m(&acc, "utxo_count")),
realized: RealizedPattern::new(client.clone(), acc.clone()),
relative: RelativePattern::new(client.clone(), acc.clone()),
supply: SupplyPattern2::new(client.clone(), _m(&acc, "supply")),
unrealized: UnrealizedPattern::new(client.clone(), acc.clone()),
}
}
}
/// Pattern struct for repeated tree structure. /// Pattern struct for repeated tree structure.
pub struct UnrealizedPattern { pub struct UnrealizedPattern {
pub neg_unrealized_loss: MetricPattern1<Dollars>, pub neg_unrealized_loss: MetricPattern1<Dollars>,
@@ -1962,32 +2040,6 @@ impl _10yTo12yPattern {
} }
} }
/// Pattern struct for repeated tree structure.
pub struct PeriodCagrPattern {
pub _10y: MetricPattern4<StoredF32>,
pub _2y: MetricPattern4<StoredF32>,
pub _3y: MetricPattern4<StoredF32>,
pub _4y: MetricPattern4<StoredF32>,
pub _5y: MetricPattern4<StoredF32>,
pub _6y: MetricPattern4<StoredF32>,
pub _8y: MetricPattern4<StoredF32>,
}
impl PeriodCagrPattern {
/// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self {
_10y: MetricPattern4::new(client.clone(), _p("10y", &acc)),
_2y: MetricPattern4::new(client.clone(), _p("2y", &acc)),
_3y: MetricPattern4::new(client.clone(), _p("3y", &acc)),
_4y: MetricPattern4::new(client.clone(), _p("4y", &acc)),
_5y: MetricPattern4::new(client.clone(), _p("5y", &acc)),
_6y: MetricPattern4::new(client.clone(), _p("6y", &acc)),
_8y: MetricPattern4::new(client.clone(), _p("8y", &acc)),
}
}
}
/// Pattern struct for repeated tree structure. /// Pattern struct for repeated tree structure.
pub struct _10yPattern { pub struct _10yPattern {
pub activity: ActivityPattern2, pub activity: ActivityPattern2,
@@ -2014,58 +2066,6 @@ impl _10yPattern {
} }
} }
/// Pattern struct for repeated tree structure.
pub struct _100btcPattern {
pub activity: ActivityPattern2,
pub cost_basis: CostBasisPattern,
pub outputs: OutputsPattern,
pub realized: RealizedPattern,
pub relative: RelativePattern,
pub supply: SupplyPattern2,
pub unrealized: UnrealizedPattern,
}
impl _100btcPattern {
/// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self {
activity: ActivityPattern2::new(client.clone(), acc.clone()),
cost_basis: CostBasisPattern::new(client.clone(), acc.clone()),
outputs: OutputsPattern::new(client.clone(), _m(&acc, "utxo_count")),
realized: RealizedPattern::new(client.clone(), acc.clone()),
relative: RelativePattern::new(client.clone(), acc.clone()),
supply: SupplyPattern2::new(client.clone(), _m(&acc, "supply")),
unrealized: UnrealizedPattern::new(client.clone(), acc.clone()),
}
}
}
/// Pattern struct for repeated tree structure.
pub struct _0satsPattern2 {
pub activity: ActivityPattern2,
pub cost_basis: CostBasisPattern,
pub outputs: OutputsPattern,
pub realized: RealizedPattern,
pub relative: RelativePattern4,
pub supply: SupplyPattern2,
pub unrealized: UnrealizedPattern,
}
impl _0satsPattern2 {
/// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self {
activity: ActivityPattern2::new(client.clone(), acc.clone()),
cost_basis: CostBasisPattern::new(client.clone(), acc.clone()),
outputs: OutputsPattern::new(client.clone(), _m(&acc, "utxo_count")),
realized: RealizedPattern::new(client.clone(), acc.clone()),
relative: RelativePattern4::new(client.clone(), _m(&acc, "supply_in")),
supply: SupplyPattern2::new(client.clone(), _m(&acc, "supply")),
unrealized: UnrealizedPattern::new(client.clone(), acc.clone()),
}
}
}
/// Pattern struct for repeated tree structure. /// Pattern struct for repeated tree structure.
pub struct ActivityPattern2 { pub struct ActivityPattern2 {
pub coinblocks_destroyed: BlockCountPattern<StoredF64>, pub coinblocks_destroyed: BlockCountPattern<StoredF64>,
@@ -2108,24 +2108,6 @@ impl<T: DeserializeOwned> SplitPattern2<T> {
} }
} }
/// Pattern struct for repeated tree structure.
pub struct _2015Pattern {
pub bitcoin: MetricPattern4<Bitcoin>,
pub dollars: MetricPattern4<Dollars>,
pub sats: MetricPattern4<Sats>,
}
impl _2015Pattern {
/// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self {
bitcoin: MetricPattern4::new(client.clone(), _m(&acc, "btc")),
dollars: MetricPattern4::new(client.clone(), _m(&acc, "usd")),
sats: MetricPattern4::new(client.clone(), acc.clone()),
}
}
}
/// Pattern struct for repeated tree structure. /// Pattern struct for repeated tree structure.
pub struct ActiveSupplyPattern { pub struct ActiveSupplyPattern {
pub bitcoin: MetricPattern1<Bitcoin>, pub bitcoin: MetricPattern1<Bitcoin>,
@@ -2145,19 +2127,19 @@ impl ActiveSupplyPattern {
} }
/// Pattern struct for repeated tree structure. /// Pattern struct for repeated tree structure.
pub struct CostBasisPattern2 { pub struct _2015Pattern {
pub max: MetricPattern1<Dollars>, pub bitcoin: MetricPattern4<Bitcoin>,
pub min: MetricPattern1<Dollars>, pub dollars: MetricPattern4<Dollars>,
pub percentiles: PercentilesPattern, pub sats: MetricPattern4<Sats>,
} }
impl CostBasisPattern2 { impl _2015Pattern {
/// Create a new pattern node with accumulated metric name. /// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self { pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self { Self {
max: MetricPattern1::new(client.clone(), _m(&acc, "max_cost_basis")), bitcoin: MetricPattern4::new(client.clone(), _m(&acc, "btc")),
min: MetricPattern1::new(client.clone(), _m(&acc, "min_cost_basis")), dollars: MetricPattern4::new(client.clone(), _m(&acc, "usd")),
percentiles: PercentilesPattern::new(client.clone(), _m(&acc, "cost_basis")), sats: MetricPattern4::new(client.clone(), acc.clone()),
} }
} }
} }
@@ -2180,6 +2162,24 @@ impl CoinbasePattern2 {
} }
} }
/// Pattern struct for repeated tree structure.
pub struct CostBasisPattern2 {
pub max: MetricPattern1<Dollars>,
pub min: MetricPattern1<Dollars>,
pub percentiles: PercentilesPattern,
}
impl CostBasisPattern2 {
/// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self {
max: MetricPattern1::new(client.clone(), _m(&acc, "max_cost_basis")),
min: MetricPattern1::new(client.clone(), _m(&acc, "min_cost_basis")),
percentiles: PercentilesPattern::new(client.clone(), _m(&acc, "cost_basis")),
}
}
}
/// Pattern struct for repeated tree structure. /// Pattern struct for repeated tree structure.
pub struct SegwitAdoptionPattern { pub struct SegwitAdoptionPattern {
pub base: MetricPattern11<StoredF32>, pub base: MetricPattern11<StoredF32>,
@@ -2198,24 +2198,6 @@ impl SegwitAdoptionPattern {
} }
} }
/// Pattern struct for repeated tree structure.
pub struct CoinbasePattern {
pub bitcoin: BitcoinPattern,
pub dollars: DollarsPattern<Dollars>,
pub sats: DollarsPattern<Sats>,
}
impl CoinbasePattern {
/// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self {
bitcoin: BitcoinPattern::new(client.clone(), _m(&acc, "btc")),
dollars: DollarsPattern::new(client.clone(), _m(&acc, "usd")),
sats: DollarsPattern::new(client.clone(), acc.clone()),
}
}
}
/// Pattern struct for repeated tree structure. /// Pattern struct for repeated tree structure.
pub struct UnclaimedRewardsPattern { pub struct UnclaimedRewardsPattern {
pub bitcoin: BitcoinPattern2<Bitcoin>, pub bitcoin: BitcoinPattern2<Bitcoin>,
@@ -2235,17 +2217,19 @@ impl UnclaimedRewardsPattern {
} }
/// Pattern struct for repeated tree structure. /// Pattern struct for repeated tree structure.
pub struct SupplyPattern2 { pub struct CoinbasePattern {
pub halved: ActiveSupplyPattern, pub bitcoin: BitcoinPattern,
pub total: ActiveSupplyPattern, pub dollars: DollarsPattern<Dollars>,
pub sats: DollarsPattern<Sats>,
} }
impl SupplyPattern2 { impl CoinbasePattern {
/// Create a new pattern node with accumulated metric name. /// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self { pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self { Self {
halved: ActiveSupplyPattern::new(client.clone(), _m(&acc, "halved")), bitcoin: BitcoinPattern::new(client.clone(), _m(&acc, "btc")),
total: ActiveSupplyPattern::new(client.clone(), acc.clone()), dollars: DollarsPattern::new(client.clone(), _m(&acc, "usd")),
sats: DollarsPattern::new(client.clone(), acc.clone()),
} }
} }
} }
@@ -2266,22 +2250,6 @@ impl RelativePattern4 {
} }
} }
/// Pattern struct for repeated tree structure.
pub struct CostBasisPattern {
pub max: MetricPattern1<Dollars>,
pub min: MetricPattern1<Dollars>,
}
impl CostBasisPattern {
/// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self {
max: MetricPattern1::new(client.clone(), _m(&acc, "max_cost_basis")),
min: MetricPattern1::new(client.clone(), _m(&acc, "min_cost_basis")),
}
}
}
/// Pattern struct for repeated tree structure. /// Pattern struct for repeated tree structure.
pub struct _1dReturns1mSdPattern { pub struct _1dReturns1mSdPattern {
pub sd: MetricPattern4<StoredF32>, pub sd: MetricPattern4<StoredF32>,
@@ -2299,17 +2267,33 @@ impl _1dReturns1mSdPattern {
} }
/// Pattern struct for repeated tree structure. /// Pattern struct for repeated tree structure.
pub struct BitcoinPattern2<T> { pub struct CostBasisPattern {
pub cumulative: MetricPattern2<T>, pub max: MetricPattern1<Dollars>,
pub sum: MetricPattern1<T>, pub min: MetricPattern1<Dollars>,
} }
impl<T: DeserializeOwned> BitcoinPattern2<T> { impl CostBasisPattern {
/// Create a new pattern node with accumulated metric name. /// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self { pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self { Self {
cumulative: MetricPattern2::new(client.clone(), _m(&acc, "cumulative")), max: MetricPattern1::new(client.clone(), _m(&acc, "max_cost_basis")),
sum: MetricPattern1::new(client.clone(), acc.clone()), min: MetricPattern1::new(client.clone(), _m(&acc, "min_cost_basis")),
}
}
}
/// Pattern struct for repeated tree structure.
pub struct SupplyPattern2 {
pub halved: ActiveSupplyPattern,
pub total: ActiveSupplyPattern,
}
impl SupplyPattern2 {
/// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self {
halved: ActiveSupplyPattern::new(client.clone(), _m(&acc, "halved")),
total: ActiveSupplyPattern::new(client.clone(), acc.clone()),
} }
} }
} }
@@ -2330,6 +2314,22 @@ impl<T: DeserializeOwned> BlockCountPattern<T> {
} }
} }
/// Pattern struct for repeated tree structure.
pub struct BitcoinPattern2<T> {
pub cumulative: MetricPattern2<T>,
pub sum: MetricPattern1<T>,
}
impl<T: DeserializeOwned> BitcoinPattern2<T> {
/// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self {
cumulative: MetricPattern2::new(client.clone(), _m(&acc, "cumulative")),
sum: MetricPattern1::new(client.clone(), acc.clone()),
}
}
}
/// Pattern struct for repeated tree structure. /// Pattern struct for repeated tree structure.
pub struct SatsPattern<T> { pub struct SatsPattern<T> {
pub ohlc: MetricPattern1<T>, pub ohlc: MetricPattern1<T>,
@@ -2340,22 +2340,8 @@ impl<T: DeserializeOwned> SatsPattern<T> {
/// Create a new pattern node with accumulated metric name. /// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self { pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self { Self {
ohlc: MetricPattern1::new(client.clone(), _m(&acc, "ohlc")), ohlc: MetricPattern1::new(client.clone(), _m(&acc, "ohlc_sats")),
split: SplitPattern2::new(client.clone(), acc.clone()), split: SplitPattern2::new(client.clone(), _m(&acc, "sats")),
}
}
}
/// Pattern struct for repeated tree structure.
pub struct RealizedPriceExtraPattern {
pub ratio: MetricPattern4<StoredF32>,
}
impl RealizedPriceExtraPattern {
/// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self {
ratio: MetricPattern4::new(client.clone(), acc.clone()),
} }
} }
} }
@@ -2374,6 +2360,20 @@ impl OutputsPattern {
} }
} }
/// Pattern struct for repeated tree structure.
pub struct RealizedPriceExtraPattern {
pub ratio: MetricPattern4<StoredF32>,
}
impl RealizedPriceExtraPattern {
/// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self {
ratio: MetricPattern4::new(client.clone(), acc.clone()),
}
}
}
// Metrics tree // Metrics tree
/// Metrics tree node. /// Metrics tree node.
@@ -4942,8 +4942,8 @@ impl MetricsTree_Positions {
pub struct MetricsTree_Price { pub struct MetricsTree_Price {
pub cents: MetricsTree_Price_Cents, pub cents: MetricsTree_Price_Cents,
pub oracle: MetricsTree_Price_Oracle, pub oracle: MetricsTree_Price_Oracle,
pub sats: MetricsTree_Price_Sats, pub sats: SatsPattern<OHLCSats>,
pub usd: SatsPattern<OHLCDollars>, pub usd: MetricsTree_Price_Usd,
} }
impl MetricsTree_Price { impl MetricsTree_Price {
@@ -4951,8 +4951,8 @@ impl MetricsTree_Price {
Self { Self {
cents: MetricsTree_Price_Cents::new(client.clone(), format!("{base_path}_cents")), cents: MetricsTree_Price_Cents::new(client.clone(), format!("{base_path}_cents")),
oracle: MetricsTree_Price_Oracle::new(client.clone(), format!("{base_path}_oracle")), oracle: MetricsTree_Price_Oracle::new(client.clone(), format!("{base_path}_oracle")),
sats: MetricsTree_Price_Sats::new(client.clone(), format!("{base_path}_sats")), sats: SatsPattern::new(client.clone(), "price".to_string()),
usd: SatsPattern::new(client.clone(), "price".to_string()), usd: MetricsTree_Price_Usd::new(client.clone(), format!("{base_path}_usd")),
} }
} }
} }
@@ -5003,6 +5003,20 @@ pub struct MetricsTree_Price_Oracle {
pub phase_daily_dollars: PhaseDailyCentsPattern<Dollars>, pub phase_daily_dollars: PhaseDailyCentsPattern<Dollars>,
pub phase_histogram: MetricPattern11<OracleBins>, pub phase_histogram: MetricPattern11<OracleBins>,
pub phase_price_cents: MetricPattern11<Cents>, pub phase_price_cents: MetricPattern11<Cents>,
pub phase_v2_daily_cents: PhaseDailyCentsPattern<Cents>,
pub phase_v2_daily_dollars: PhaseDailyCentsPattern<Dollars>,
pub phase_v2_histogram: MetricPattern11<OracleBinsV2>,
pub phase_v2_peak_daily_cents: PhaseDailyCentsPattern<Cents>,
pub phase_v2_peak_daily_dollars: PhaseDailyCentsPattern<Dollars>,
pub phase_v2_peak_price_cents: MetricPattern11<Cents>,
pub phase_v2_price_cents: MetricPattern11<Cents>,
pub phase_v3_daily_cents: PhaseDailyCentsPattern<Cents>,
pub phase_v3_daily_dollars: PhaseDailyCentsPattern<Dollars>,
pub phase_v3_histogram: MetricPattern11<OracleBinsV2>,
pub phase_v3_peak_daily_cents: PhaseDailyCentsPattern<Cents>,
pub phase_v3_peak_daily_dollars: PhaseDailyCentsPattern<Dollars>,
pub phase_v3_peak_price_cents: MetricPattern11<Cents>,
pub phase_v3_price_cents: MetricPattern11<Cents>,
pub price_cents: MetricPattern11<Cents>, pub price_cents: MetricPattern11<Cents>,
pub tx_count: MetricPattern6<StoredU32>, pub tx_count: MetricPattern6<StoredU32>,
} }
@@ -5020,6 +5034,20 @@ impl MetricsTree_Price_Oracle {
phase_daily_dollars: PhaseDailyCentsPattern::new(client.clone(), "phase_daily_dollars".to_string()), phase_daily_dollars: PhaseDailyCentsPattern::new(client.clone(), "phase_daily_dollars".to_string()),
phase_histogram: MetricPattern11::new(client.clone(), "phase_histogram".to_string()), phase_histogram: MetricPattern11::new(client.clone(), "phase_histogram".to_string()),
phase_price_cents: MetricPattern11::new(client.clone(), "phase_price_cents".to_string()), phase_price_cents: MetricPattern11::new(client.clone(), "phase_price_cents".to_string()),
phase_v2_daily_cents: PhaseDailyCentsPattern::new(client.clone(), "phase_v2_daily".to_string()),
phase_v2_daily_dollars: PhaseDailyCentsPattern::new(client.clone(), "phase_v2_daily_dollars".to_string()),
phase_v2_histogram: MetricPattern11::new(client.clone(), "phase_v2_histogram".to_string()),
phase_v2_peak_daily_cents: PhaseDailyCentsPattern::new(client.clone(), "phase_v2_peak_daily".to_string()),
phase_v2_peak_daily_dollars: PhaseDailyCentsPattern::new(client.clone(), "phase_v2_peak_daily_dollars".to_string()),
phase_v2_peak_price_cents: MetricPattern11::new(client.clone(), "phase_v2_peak_price_cents".to_string()),
phase_v2_price_cents: MetricPattern11::new(client.clone(), "phase_v2_price_cents".to_string()),
phase_v3_daily_cents: PhaseDailyCentsPattern::new(client.clone(), "phase_v3_daily".to_string()),
phase_v3_daily_dollars: PhaseDailyCentsPattern::new(client.clone(), "phase_v3_daily_dollars".to_string()),
phase_v3_histogram: MetricPattern11::new(client.clone(), "phase_v3_histogram".to_string()),
phase_v3_peak_daily_cents: PhaseDailyCentsPattern::new(client.clone(), "phase_v3_peak_daily".to_string()),
phase_v3_peak_daily_dollars: PhaseDailyCentsPattern::new(client.clone(), "phase_v3_peak_daily_dollars".to_string()),
phase_v3_peak_price_cents: MetricPattern11::new(client.clone(), "phase_v3_peak_price_cents".to_string()),
phase_v3_price_cents: MetricPattern11::new(client.clone(), "phase_v3_price_cents".to_string()),
price_cents: MetricPattern11::new(client.clone(), "oracle_price_cents".to_string()), price_cents: MetricPattern11::new(client.clone(), "oracle_price_cents".to_string()),
tx_count: MetricPattern6::new(client.clone(), "oracle_tx_count".to_string()), tx_count: MetricPattern6::new(client.clone(), "oracle_tx_count".to_string()),
} }
@@ -5027,16 +5055,16 @@ impl MetricsTree_Price_Oracle {
} }
/// Metrics tree node. /// Metrics tree node.
pub struct MetricsTree_Price_Sats { pub struct MetricsTree_Price_Usd {
pub ohlc: MetricPattern1<OHLCSats>, pub ohlc: MetricPattern1<OHLCDollars>,
pub split: SplitPattern2<Sats>, pub split: SplitPattern2<Dollars>,
} }
impl MetricsTree_Price_Sats { impl MetricsTree_Price_Usd {
pub fn new(client: Arc<BrkClientBase>, base_path: String) -> Self { pub fn new(client: Arc<BrkClientBase>, base_path: String) -> Self {
Self { Self {
ohlc: MetricPattern1::new(client.clone(), "price_ohlc_sats".to_string()), ohlc: MetricPattern1::new(client.clone(), "price_ohlc".to_string()),
split: SplitPattern2::new(client.clone(), "price_sats".to_string()), split: SplitPattern2::new(client.clone(), "price".to_string()),
} }
} }
} }
@@ -5422,24 +5450,15 @@ impl BrkClient {
) )
} }
/// OpenAPI specification /// Compact OpenAPI specification
/// ///
/// Full OpenAPI 3.1 specification for this API. /// Compact OpenAPI specification optimized for LLM consumption. Removes redundant fields while preserving essential API information. Full spec available at `/openapi.json`.
/// ///
/// Endpoint: `GET /api.json` /// Endpoint: `GET /api.json`
pub fn get_openapi(&self) -> Result<serde_json::Value> { pub fn get_api(&self) -> Result<serde_json::Value> {
self.base.get_json(&format!("/api.json")) self.base.get_json(&format!("/api.json"))
} }
/// Trimmed OpenAPI specification
///
/// Compact OpenAPI specification optimized for LLM consumption. Removes redundant fields while preserving essential API information.
///
/// Endpoint: `GET /api.trimmed.json`
pub fn get_openapi_trimmed(&self) -> Result<serde_json::Value> {
self.base.get_json(&format!("/api.trimmed.json"))
}
/// Address information /// Address information
/// ///
/// Retrieve address information including balance and transaction counts. Supports all standard Bitcoin address types (P2PKH, P2SH, P2WPKH, P2WSH, P2TR). /// Retrieve address information including balance and transaction counts. Supports all standard Bitcoin address types (P2PKH, P2SH, P2WPKH, P2WSH, P2TR).
@@ -5999,6 +6018,15 @@ impl BrkClient {
self.base.get_json(&format!("/health")) self.base.get_json(&format!("/health"))
} }
/// OpenAPI specification
///
/// Full OpenAPI 3.1 specification for this API.
///
/// Endpoint: `GET /openapi.json`
pub fn get_openapi(&self) -> Result<serde_json::Value> {
self.base.get_json(&format!("/openapi.json"))
}
/// API version /// API version
/// ///
/// Returns the current version of the API server /// Returns the current version of the API server

View File

@@ -26,7 +26,8 @@ impl Vecs {
info!("Computing oracle prices..."); info!("Computing oracle prices...");
let i = Instant::now(); let i = Instant::now();
self.oracle.compute(indexer, indexes, starting_indexes, exit)?; self.oracle
.compute(indexer, indexes, &self.cents, starting_indexes, exit)?;
info!("Computed oracle prices in {:?}", i.elapsed()); info!("Computed oracle prices in {:?}", i.elapsed());
} }

View File

@@ -53,8 +53,8 @@ use std::collections::VecDeque;
use brk_error::Result; use brk_error::Result;
use brk_indexer::Indexer; use brk_indexer::Indexer;
use brk_types::{ use brk_types::{
Cents, Close, Date, DateIndex, Height, High, Low, OHLCCents, Open, OracleBins, OutputType, Cents, Close, Date, DateIndex, Height, High, Low, OHLCCents, Open, OracleBins, OracleBinsV2,
PHASE_BINS, PairOutputIndex, Sats, StoredU32, StoredU64, TxIndex, OutputType, PHASE_BINS, PairOutputIndex, Sats, StoredU32, StoredU64, TxIndex,
}; };
use tracing::info; use tracing::info;
use vecdb::{ use vecdb::{
@@ -66,9 +66,10 @@ use super::{
Vecs, Vecs,
config::OracleConfig, config::OracleConfig,
histogram::{Histogram, TOTAL_BINS}, histogram::{Histogram, TOTAL_BINS},
phase_v2::{PhaseHistogramV2, find_best_phase, phase_range_from_anchor, phase_to_price},
stencil::{find_best_price, is_round_sats, refine_price}, stencil::{find_best_price, is_round_sats, refine_price},
}; };
use crate::{ComputeIndexes, indexes}; use crate::{ComputeIndexes, indexes, price::cents};
/// Flush interval for periodic writes during oracle computation. /// Flush interval for periodic writes during oracle computation.
const FLUSH_INTERVAL: usize = 10_000; const FLUSH_INTERVAL: usize = 10_000;
@@ -79,6 +80,7 @@ impl Vecs {
&mut self, &mut self,
indexer: &Indexer, indexer: &Indexer,
indexes: &indexes::Vecs, indexes: &indexes::Vecs,
price_cents: &cents::Vecs,
starting_indexes: &ComputeIndexes, starting_indexes: &ComputeIndexes,
exit: &Exit, exit: &Exit,
) -> Result<()> { ) -> Result<()> {
@@ -100,6 +102,32 @@ impl Vecs {
// Step 7: Aggregate to daily OHLC // Step 7: Aggregate to daily OHLC
self.compute_daily_ohlc(indexes, starting_indexes, exit)?; self.compute_daily_ohlc(indexes, starting_indexes, exit)?;
// Step 8: Compute Phase Oracle V2 (round USD template matching)
// 8a: Per-block 200-bin histograms (uses ALL outputs, not pair-filtered)
self.compute_phase_v2_histograms(indexer, indexes, starting_indexes, exit)?;
// 8b: Per-block prices using cross-correlation with weekly anchors
self.compute_phase_v2_prices(indexes, price_cents, starting_indexes, exit)?;
// 8c: Per-block prices using direct peak finding (like V1)
self.compute_phase_v2_peak_prices(indexes, price_cents, starting_indexes, exit)?;
// 8d: Daily distributions from per-block prices
self.compute_phase_v2_daily(indexes, starting_indexes, exit)?;
// Step 9: Compute Phase Oracle V3 (BASE + uniqueVal filter)
// 9a: Per-block histograms with uniqueVal filtering (only outputs with unique values in tx)
self.compute_phase_v3_histograms(indexer, indexes, starting_indexes, exit)?;
// 9b: Per-block prices using cross-correlation
self.compute_phase_v3_prices(indexes, price_cents, starting_indexes, exit)?;
// 9c: Per-block prices using direct peak finding (like V1)
self.compute_phase_v3_peak_prices(indexes, price_cents, starting_indexes, exit)?;
// 9d: Daily distributions from per-block prices
self.compute_phase_v3_daily(indexes, starting_indexes, exit)?;
Ok(()) Ok(())
} }
@@ -1091,4 +1119,898 @@ impl Vecs {
Ok(()) Ok(())
} }
/// Compute Phase Oracle V2 - Step 1: Per-block 200-bin phase histograms
///
/// Uses ALL outputs (like Python test), filtered only by sats range (1k-100k BTC).
/// This is different from the pair-filtered approach used by UTXOracle.
fn compute_phase_v2_histograms(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
let source_version = indexer.vecs.outputs.value.version();
self.phase_v2_histogram
.validate_computed_version_or_reset(source_version)?;
let total_heights = indexer.vecs.blocks.timestamp.len();
let start_height = self
.phase_v2_histogram
.len()
.min(starting_indexes.height.to_usize());
self.phase_v2_histogram
.truncate_if_needed_at(start_height)?;
if start_height >= total_heights {
return Ok(());
}
info!(
"Computing phase V2 histograms from height {} to {}",
start_height, total_heights
);
let mut height_to_first_txindex_iter = indexer.vecs.transactions.first_txindex.into_iter();
let mut txindex_to_first_txoutindex_iter =
indexer.vecs.transactions.first_txoutindex.into_iter();
let mut txindex_to_output_count_iter = indexes.txindex.output_count.iter();
let mut txoutindex_to_value_iter = indexer.vecs.outputs.value.into_iter();
let total_txs = indexer.vecs.transactions.height.len();
let mut last_progress = (start_height * 100 / total_heights.max(1)) as u8;
for height in start_height..total_heights {
// Get transaction range for this block
let first_txindex = height_to_first_txindex_iter.get_at_unwrap(height);
let next_first_txindex = height_to_first_txindex_iter
.get_at(height + 1)
.unwrap_or(TxIndex::from(total_txs));
// Build phase histogram from ALL outputs in this block
let mut histogram = OracleBinsV2::ZERO;
for txindex in first_txindex.to_usize()..next_first_txindex.to_usize() {
// Get output count and first output for this transaction
let first_txoutindex = txindex_to_first_txoutindex_iter.get_at_unwrap(txindex);
let output_count: StoredU64 =
txindex_to_output_count_iter.get_unwrap(TxIndex::from(txindex));
for i in 0..*output_count as usize {
let txoutindex = first_txoutindex.to_usize() + i;
let sats: Sats = txoutindex_to_value_iter.get_at_unwrap(txoutindex);
// OracleBinsV2::add already filters by sats range (1k to 100k BTC)
histogram.add(sats);
}
}
self.phase_v2_histogram.push(histogram);
// Progress logging
let progress = (height * 100 / total_heights.max(1)) as u8;
if progress > last_progress {
last_progress = progress;
info!("Phase V2 histogram computation: {}%", progress);
let _lock = exit.lock();
self.phase_v2_histogram.write()?;
}
}
// Final write
{
let _lock = exit.lock();
self.phase_v2_histogram.write()?;
}
info!(
"Phase V2 histograms complete: {} blocks",
self.phase_v2_histogram.len()
);
Ok(())
}
/// Compute Phase Oracle V2 - Step 2: Per-block prices using cross-correlation
fn compute_phase_v2_prices(
&mut self,
indexes: &indexes::Vecs,
price_cents: &cents::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
let source_version = self.phase_v2_histogram.version();
self.phase_v2_price_cents
.validate_computed_version_or_reset(source_version)?;
let total_heights = self.phase_v2_histogram.len();
let start_height = self
.phase_v2_price_cents
.len()
.min(starting_indexes.height.to_usize());
self.phase_v2_price_cents
.truncate_if_needed_at(start_height)?;
if start_height >= total_heights {
return Ok(());
}
info!(
"Computing phase V2 prices from height {} to {}",
start_height, total_heights
);
let mut histogram_iter = self.phase_v2_histogram.iter()?;
let mut height_to_dateindex_iter = indexes.height.dateindex.iter();
// For weekly OHLC anchors
let mut price_ohlc_iter = price_cents.ohlc.dateindex.iter()?;
let mut dateindex_to_weekindex_iter = indexes.dateindex.weekindex.iter();
let mut weekindex_to_first_dateindex_iter = indexes.weekindex.first_dateindex.iter();
let mut weekindex_dateindex_count_iter = indexes.weekindex.dateindex_count.iter();
let mut last_progress = (start_height * 100 / total_heights.max(1)) as u8;
// Track previous price for fallback
let mut prev_price_cents = if start_height > 0 {
self.phase_v2_price_cents
.iter()?
.get(Height::from(start_height - 1))
.unwrap_or(Cents::from(10_000_000i64))
} else {
Cents::from(10_000_000i64) // Default ~$100k
};
for height in start_height..total_heights {
let height_idx = Height::from(height);
let histogram: OracleBinsV2 = histogram_iter.get_unwrap(height_idx);
// Get weekly anchor for this block's date
let dateindex = height_to_dateindex_iter.get(height_idx);
let weekly_bounds: Option<(f64, f64)> = dateindex.and_then(|di| {
let wi = dateindex_to_weekindex_iter.get(di)?;
let first_di = weekindex_to_first_dateindex_iter.get(wi)?;
let count = weekindex_dateindex_count_iter
.get(wi)
.map(|c| *c as usize)?;
let mut low = Cents::from(i64::MAX);
let mut high = Cents::from(0i64);
for i in 0..count {
let di = DateIndex::from(first_di.to_usize() + i);
if let Some(ohlc) = price_ohlc_iter.get(di) {
if *ohlc.low < low {
low = *ohlc.low;
}
if *ohlc.high > high {
high = *ohlc.high;
}
}
}
if i64::from(low) > 0 && i64::from(high) > 0 {
Some((
i64::from(low) as f64 / 100.0,
i64::from(high) as f64 / 100.0,
))
} else {
None
}
});
// Compute price using cross-correlation
let price_cents = if histogram.total_count() >= 10 {
// Convert OracleBinsV2 to PhaseHistogramV2
let mut phase_hist = PhaseHistogramV2::new();
for (i, &count) in histogram.bins.iter().enumerate() {
if count > 0 {
let phase = (i as f64 + 0.5) / 200.0;
let log_sats = 6.0 + phase;
let sats = 10.0_f64.powf(log_sats);
for _ in 0..count {
phase_hist.add(Sats::from(sats as u64));
}
}
}
if let Some((low, high)) = weekly_bounds {
// Have weekly anchor - constrained search
let (phase_min, phase_max) = phase_range_from_anchor(low, high, 0.05);
let (best_phase, _corr) =
find_best_phase(&phase_hist, 2, Some(phase_min), Some(phase_max));
let price = phase_to_price(best_phase, low, high);
Cents::from((price * 100.0) as i64)
} else {
// No anchor - use previous price as reference
let anchor_low = (i64::from(prev_price_cents) as f64 / 100.0) * 0.5;
let anchor_high = (i64::from(prev_price_cents) as f64 / 100.0) * 2.0;
let (best_phase, _corr) = find_best_phase(&phase_hist, 2, None, None);
let price = phase_to_price(best_phase, anchor_low, anchor_high);
Cents::from((price * 100.0) as i64)
}
} else {
// Too few outputs - use previous price
prev_price_cents
};
prev_price_cents = price_cents;
self.phase_v2_price_cents.push(price_cents);
// Progress logging
let progress = (height * 100 / total_heights.max(1)) as u8;
if progress > last_progress {
last_progress = progress;
info!("Phase V2 price computation: {}%", progress);
let _lock = exit.lock();
self.phase_v2_price_cents.write()?;
}
}
// Final write
{
let _lock = exit.lock();
self.phase_v2_price_cents.write()?;
}
info!(
"Phase V2 prices complete: {} blocks",
self.phase_v2_price_cents.len()
);
Ok(())
}
/// Compute Phase Oracle V2 - Peak prices using direct peak finding (like V1)
fn compute_phase_v2_peak_prices(
&mut self,
indexes: &indexes::Vecs,
price_cents: &cents::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
let source_version = self.phase_v2_histogram.version();
self.phase_v2_peak_price_cents
.validate_computed_version_or_reset(source_version)?;
let total_heights = self.phase_v2_histogram.len();
let start_height = self
.phase_v2_peak_price_cents
.len()
.min(starting_indexes.height.to_usize());
self.phase_v2_peak_price_cents
.truncate_if_needed_at(start_height)?;
if start_height >= total_heights {
return Ok(());
}
info!(
"Computing phase V2 peak prices from height {} to {}",
start_height, total_heights
);
let mut histogram_iter = self.phase_v2_histogram.iter()?;
let mut height_to_dateindex_iter = indexes.height.dateindex.iter();
// For weekly OHLC anchors
let mut price_ohlc_iter = price_cents.ohlc.dateindex.iter()?;
let mut dateindex_to_weekindex_iter = indexes.dateindex.weekindex.iter();
let mut weekindex_to_first_dateindex_iter = indexes.weekindex.first_dateindex.iter();
let mut weekindex_dateindex_count_iter = indexes.weekindex.dateindex_count.iter();
let mut last_progress = (start_height * 100 / total_heights.max(1)) as u8;
// Track previous price for fallback
let mut prev_price_cents = if start_height > 0 {
self.phase_v2_peak_price_cents
.iter()?
.get(Height::from(start_height - 1))
.unwrap_or(Cents::from(10_000_000i64))
} else {
Cents::from(10_000_000i64)
};
for height in start_height..total_heights {
let height_idx = Height::from(height);
let histogram: OracleBinsV2 = histogram_iter.get_unwrap(height_idx);
// Get weekly anchor for decade selection
let dateindex = height_to_dateindex_iter.get(height_idx);
let anchor_price: Option<f64> = dateindex.and_then(|di| {
let wi = dateindex_to_weekindex_iter.get(di)?;
let first_di = weekindex_to_first_dateindex_iter.get(wi)?;
let count = weekindex_dateindex_count_iter
.get(wi)
.map(|c| *c as usize)?;
let mut sum = 0i64;
let mut cnt = 0;
for i in 0..count {
let di = DateIndex::from(first_di.to_usize() + i);
if let Some(ohlc) = price_ohlc_iter.get(di) {
sum += i64::from(*ohlc.close);
cnt += 1;
}
}
if cnt > 0 {
Some(sum as f64 / cnt as f64 / 100.0)
} else {
None
}
});
// Use anchor or previous price for decade selection
let anchor = anchor_price.unwrap_or(i64::from(prev_price_cents) as f64 / 100.0);
// Find peak bin directly (like V1) using 100 bins (downsample from 200)
let price_cents = if histogram.total_count() >= 10 {
// Downsample 200 bins to 100 bins
let mut bins100 = [0u32; 100];
for i in 0..100 {
bins100[i] = histogram.bins[i * 2] as u32 + histogram.bins[i * 2 + 1] as u32;
}
// Find peak bin, skipping bin 0 (round BTC amounts cluster there)
let peak_bin = bins100
.iter()
.enumerate()
.filter(|(bin, _)| *bin != 0)
.max_by_key(|(_, count)| *count)
.map(|(bin, _)| bin)
.unwrap_or(0);
// Convert bin to price using anchor for decade (100 bins)
let phase = (peak_bin as f64 + 0.5) / 100.0;
let base_price = 10.0_f64.powf(phase);
// Find best decade
let mut best_price = base_price;
let mut best_dist = f64::MAX;
for decade in -2..=6 {
let candidate = base_price * 10.0_f64.powi(decade);
let dist = (candidate - anchor).abs();
if dist < best_dist {
best_dist = dist;
best_price = candidate;
}
}
Cents::from((best_price.clamp(0.01, 10_000_000.0) * 100.0) as i64)
} else {
prev_price_cents
};
prev_price_cents = price_cents;
self.phase_v2_peak_price_cents.push(price_cents);
// Progress logging
let progress = (height * 100 / total_heights.max(1)) as u8;
if progress > last_progress {
last_progress = progress;
info!("Phase V2 peak price computation: {}%", progress);
let _lock = exit.lock();
self.phase_v2_peak_price_cents.write()?;
}
}
// Final write
{
let _lock = exit.lock();
self.phase_v2_peak_price_cents.write()?;
}
info!(
"Phase V2 peak prices complete: {} blocks",
self.phase_v2_peak_price_cents.len()
);
Ok(())
}
/// Compute Phase Oracle V2 - Daily distributions from per-block prices
fn compute_phase_v2_daily(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
info!("Computing phase V2 daily distributions");
// Cross-correlation based
self.phase_v2_daily_cents.compute(
starting_indexes.dateindex,
&self.phase_v2_price_cents,
&indexes.dateindex.first_height,
&indexes.dateindex.height_count,
exit,
)?;
// Peak-based
self.phase_v2_peak_daily_cents.compute(
starting_indexes.dateindex,
&self.phase_v2_peak_price_cents,
&indexes.dateindex.first_height,
&indexes.dateindex.height_count,
exit,
)?;
info!(
"Phase V2 daily distributions complete: {} days",
self.phase_v2_daily_cents.len()
);
Ok(())
}
/// Compute Phase Oracle V3 - Step 1: Per-block histograms with uniqueVal filtering
///
/// Filters: >= 1000 sats, only outputs with unique values within their transaction.
/// This reduces spurious peaks from exchange batched payouts and inscription spam.
fn compute_phase_v3_histograms(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
let source_version = indexer.vecs.outputs.value.version();
self.phase_v3_histogram
.validate_computed_version_or_reset(source_version)?;
let total_heights = indexer.vecs.blocks.timestamp.len();
let start_height = self
.phase_v3_histogram
.len()
.min(starting_indexes.height.to_usize());
self.phase_v3_histogram
.truncate_if_needed_at(start_height)?;
if start_height >= total_heights {
return Ok(());
}
info!(
"Computing phase V3 histograms from height {} to {}",
start_height, total_heights
);
let mut height_to_first_txindex_iter = indexer.vecs.transactions.first_txindex.into_iter();
let mut txindex_to_first_txoutindex_iter =
indexer.vecs.transactions.first_txoutindex.into_iter();
let mut txindex_to_output_count_iter = indexes.txindex.output_count.iter();
let mut txoutindex_to_value_iter = indexer.vecs.outputs.value.into_iter();
let total_txs = indexer.vecs.transactions.height.len();
let mut last_progress = (start_height * 100 / total_heights.max(1)) as u8;
// Reusable buffer for collecting output values per transaction
let mut tx_values: Vec<Sats> = Vec::with_capacity(16);
for height in start_height..total_heights {
// Get transaction range for this block
let first_txindex = height_to_first_txindex_iter.get_at_unwrap(height);
let next_first_txindex = height_to_first_txindex_iter
.get_at(height + 1)
.unwrap_or(TxIndex::from(total_txs));
// Build phase histogram with uniqueVal filtering
let mut histogram = OracleBinsV2::ZERO;
// Skip coinbase (first tx in block)
for txindex in (first_txindex.to_usize() + 1)..next_first_txindex.to_usize() {
// Get output count and first output for this transaction
let first_txoutindex = txindex_to_first_txoutindex_iter.get_at_unwrap(txindex);
let output_count: StoredU64 =
txindex_to_output_count_iter.get_unwrap(TxIndex::from(txindex));
// Collect all output values for this transaction
tx_values.clear();
for i in 0..*output_count as usize {
let txoutindex = first_txoutindex.to_usize() + i;
let sats: Sats = txoutindex_to_value_iter.get_at_unwrap(txoutindex);
tx_values.push(sats);
}
// Count occurrences of each value to determine uniqueness
// For small output counts, simple nested loop is faster than HashMap
for (i, &sats) in tx_values.iter().enumerate() {
// Skip if below minimum (BASE filter: >= 1000 sats)
if sats < Sats::_1K {
continue;
}
// Check if this value is unique within the transaction
let mut is_unique = true;
for (j, &other_sats) in tx_values.iter().enumerate() {
if i != j && sats == other_sats {
is_unique = false;
break;
}
}
// Only add unique values to histogram
if is_unique {
histogram.add(sats);
}
}
}
self.phase_v3_histogram.push(histogram);
// Progress logging
let progress = (height * 100 / total_heights.max(1)) as u8;
if progress > last_progress {
last_progress = progress;
info!("Phase V3 histogram computation: {}%", progress);
let _lock = exit.lock();
self.phase_v3_histogram.write()?;
}
}
// Final write
{
let _lock = exit.lock();
self.phase_v3_histogram.write()?;
}
info!(
"Phase V3 histograms complete: {} blocks",
self.phase_v3_histogram.len()
);
Ok(())
}
/// Compute Phase Oracle V3 - Step 2: Per-block prices using cross-correlation
fn compute_phase_v3_prices(
&mut self,
indexes: &indexes::Vecs,
price_cents: &cents::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
let source_version = self.phase_v3_histogram.version();
self.phase_v3_price_cents
.validate_computed_version_or_reset(source_version)?;
let total_heights = self.phase_v3_histogram.len();
let start_height = self
.phase_v3_price_cents
.len()
.min(starting_indexes.height.to_usize());
self.phase_v3_price_cents
.truncate_if_needed_at(start_height)?;
if start_height >= total_heights {
return Ok(());
}
info!(
"Computing phase V3 prices from height {} to {}",
start_height, total_heights
);
let mut histogram_iter = self.phase_v3_histogram.iter()?;
let mut height_to_dateindex_iter = indexes.height.dateindex.iter();
// For weekly OHLC anchors
let mut price_ohlc_iter = price_cents.ohlc.dateindex.iter()?;
let mut dateindex_to_weekindex_iter = indexes.dateindex.weekindex.iter();
let mut weekindex_to_first_dateindex_iter = indexes.weekindex.first_dateindex.iter();
let mut weekindex_dateindex_count_iter = indexes.weekindex.dateindex_count.iter();
let mut last_progress = (start_height * 100 / total_heights.max(1)) as u8;
// Track previous price for fallback
let mut prev_price_cents = if start_height > 0 {
self.phase_v3_price_cents
.iter()?
.get(Height::from(start_height - 1))
.unwrap_or(Cents::from(10_000_000i64))
} else {
Cents::from(10_000_000i64) // Default ~$100k
};
for height in start_height..total_heights {
let height_idx = Height::from(height);
let histogram: OracleBinsV2 = histogram_iter.get_unwrap(height_idx);
// Get weekly anchor for this block's date
let dateindex = height_to_dateindex_iter.get(height_idx);
let weekly_bounds: Option<(f64, f64)> = dateindex.and_then(|di| {
let wi = dateindex_to_weekindex_iter.get(di)?;
let first_di = weekindex_to_first_dateindex_iter.get(wi)?;
let count = weekindex_dateindex_count_iter
.get(wi)
.map(|c| *c as usize)?;
let mut low = Cents::from(i64::MAX);
let mut high = Cents::from(0i64);
for i in 0..count {
let di = DateIndex::from(first_di.to_usize() + i);
if let Some(ohlc) = price_ohlc_iter.get(di) {
if *ohlc.low < low {
low = *ohlc.low;
}
if *ohlc.high > high {
high = *ohlc.high;
}
}
}
if i64::from(low) > 0 && i64::from(high) > 0 {
Some((
i64::from(low) as f64 / 100.0,
i64::from(high) as f64 / 100.0,
))
} else {
None
}
});
// Compute price using cross-correlation
let price_cents = if histogram.total_count() >= 10 {
// Convert OracleBinsV2 to PhaseHistogramV2
let mut phase_hist = PhaseHistogramV2::new();
for (i, &count) in histogram.bins.iter().enumerate() {
if count > 0 {
let phase = (i as f64 + 0.5) / 200.0;
let log_sats = 6.0 + phase;
let sats = 10.0_f64.powf(log_sats);
for _ in 0..count {
phase_hist.add(Sats::from(sats as u64));
}
}
}
if let Some((low, high)) = weekly_bounds {
// Have weekly anchor - constrained search
let (phase_min, phase_max) = phase_range_from_anchor(low, high, 0.05);
let (best_phase, _corr) =
find_best_phase(&phase_hist, 2, Some(phase_min), Some(phase_max));
let price = phase_to_price(best_phase, low, high);
Cents::from((price * 100.0) as i64)
} else {
// No anchor - use previous price as reference
let anchor_low = (i64::from(prev_price_cents) as f64 / 100.0) * 0.5;
let anchor_high = (i64::from(prev_price_cents) as f64 / 100.0) * 2.0;
let (best_phase, _corr) = find_best_phase(&phase_hist, 2, None, None);
let price = phase_to_price(best_phase, anchor_low, anchor_high);
Cents::from((price * 100.0) as i64)
}
} else {
// Too few outputs - use previous price
prev_price_cents
};
prev_price_cents = price_cents;
self.phase_v3_price_cents.push(price_cents);
// Progress logging
let progress = (height * 100 / total_heights.max(1)) as u8;
if progress > last_progress {
last_progress = progress;
info!("Phase V3 price computation: {}%", progress);
let _lock = exit.lock();
self.phase_v3_price_cents.write()?;
}
}
// Final write
{
let _lock = exit.lock();
self.phase_v3_price_cents.write()?;
}
info!(
"Phase V3 prices complete: {} blocks",
self.phase_v3_price_cents.len()
);
Ok(())
}
/// Compute Phase Oracle V3 - Peak prices using direct peak finding (like V1)
fn compute_phase_v3_peak_prices(
&mut self,
indexes: &indexes::Vecs,
price_cents: &cents::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
let source_version = self.phase_v3_histogram.version();
self.phase_v3_peak_price_cents
.validate_computed_version_or_reset(source_version)?;
let total_heights = self.phase_v3_histogram.len();
let start_height = self
.phase_v3_peak_price_cents
.len()
.min(starting_indexes.height.to_usize());
self.phase_v3_peak_price_cents
.truncate_if_needed_at(start_height)?;
if start_height >= total_heights {
return Ok(());
}
info!(
"Computing phase V3 peak prices from height {} to {}",
start_height, total_heights
);
let mut histogram_iter = self.phase_v3_histogram.iter()?;
let mut height_to_dateindex_iter = indexes.height.dateindex.iter();
// For weekly OHLC anchors
let mut price_ohlc_iter = price_cents.ohlc.dateindex.iter()?;
let mut dateindex_to_weekindex_iter = indexes.dateindex.weekindex.iter();
let mut weekindex_to_first_dateindex_iter = indexes.weekindex.first_dateindex.iter();
let mut weekindex_dateindex_count_iter = indexes.weekindex.dateindex_count.iter();
let mut last_progress = (start_height * 100 / total_heights.max(1)) as u8;
// Track previous price for fallback
let mut prev_price_cents = if start_height > 0 {
self.phase_v3_peak_price_cents
.iter()?
.get(Height::from(start_height - 1))
.unwrap_or(Cents::from(10_000_000i64))
} else {
Cents::from(10_000_000i64)
};
for height in start_height..total_heights {
let height_idx = Height::from(height);
let histogram: OracleBinsV2 = histogram_iter.get_unwrap(height_idx);
// Get weekly anchor for decade selection
let dateindex = height_to_dateindex_iter.get(height_idx);
let anchor_price: Option<f64> = dateindex.and_then(|di| {
let wi = dateindex_to_weekindex_iter.get(di)?;
let first_di = weekindex_to_first_dateindex_iter.get(wi)?;
let count = weekindex_dateindex_count_iter
.get(wi)
.map(|c| *c as usize)?;
let mut sum = 0i64;
let mut cnt = 0;
for i in 0..count {
let di = DateIndex::from(first_di.to_usize() + i);
if let Some(ohlc) = price_ohlc_iter.get(di) {
sum += i64::from(*ohlc.close);
cnt += 1;
}
}
if cnt > 0 {
Some(sum as f64 / cnt as f64 / 100.0)
} else {
None
}
});
// Use anchor or previous price for decade selection
let anchor = anchor_price.unwrap_or(i64::from(prev_price_cents) as f64 / 100.0);
// Find peak bin directly (like V1) using 100 bins (downsample from 200)
let price_cents = if histogram.total_count() >= 10 {
// Downsample 200 bins to 100 bins
let mut bins100 = [0u32; 100];
(0..100).for_each(|i| {
bins100[i] = histogram.bins[i * 2] as u32 + histogram.bins[i * 2 + 1] as u32;
});
// Find peak bin, skipping bin 0 (round BTC amounts cluster there)
let peak_bin = bins100
.iter()
.enumerate()
.filter(|(bin, _)| *bin != 0)
.max_by_key(|(_, count)| *count)
.map(|(bin, _)| bin)
.unwrap_or(0);
// Convert bin to price using anchor for decade (100 bins)
let phase = (peak_bin as f64 + 0.5) / 100.0;
let base_price = 10.0_f64.powf(phase);
// Find best decade
let mut best_price = base_price;
let mut best_dist = f64::MAX;
for decade in -2..=6 {
let candidate = base_price * 10.0_f64.powi(decade);
let dist = (candidate - anchor).abs();
if dist < best_dist {
best_dist = dist;
best_price = candidate;
}
}
Cents::from((best_price.clamp(0.01, 10_000_000.0) * 100.0) as i64)
} else {
prev_price_cents
};
prev_price_cents = price_cents;
self.phase_v3_peak_price_cents.push(price_cents);
// Progress logging
let progress = (height * 100 / total_heights.max(1)) as u8;
if progress > last_progress {
last_progress = progress;
info!("Phase V3 peak price computation: {}%", progress);
let _lock = exit.lock();
self.phase_v3_peak_price_cents.write()?;
}
}
// Final write
{
let _lock = exit.lock();
self.phase_v3_peak_price_cents.write()?;
}
info!(
"Phase V3 peak prices complete: {} blocks",
self.phase_v3_peak_price_cents.len()
);
Ok(())
}
/// Compute Phase Oracle V3 - Daily distributions from per-block prices
fn compute_phase_v3_daily(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
info!("Computing phase V3 daily distributions");
// Cross-correlation based
self.phase_v3_daily_cents.compute(
starting_indexes.dateindex,
&self.phase_v3_price_cents,
&indexes.dateindex.first_height,
&indexes.dateindex.height_count,
exit,
)?;
// Peak-based
self.phase_v3_peak_daily_cents.compute(
starting_indexes.dateindex,
&self.phase_v3_peak_price_cents,
&indexes.dateindex.first_height,
&indexes.dateindex.height_count,
exit,
)?;
info!(
"Phase V3 daily distributions complete: {} days",
self.phase_v3_daily_cents.len()
);
Ok(())
}
} }

View File

@@ -46,6 +46,58 @@ impl Vecs {
|di: DateIndex, iter| iter.get(di).map(|o: OHLCCents| OHLCDollars::from(o)), |di: DateIndex, iter| iter.get(di).map(|o: OHLCCents| OHLCDollars::from(o)),
); );
// Phase Oracle V2 (round USD template matching)
// v3: Peak prices use 100 bins (downsampled from 200)
let phase_v2_version = version + Version::new(3);
let phase_v2_histogram =
BytesVec::forced_import(db, "phase_v2_histogram", phase_v2_version)?;
let phase_v2_price_cents =
PcoVec::forced_import(db, "phase_v2_price_cents", phase_v2_version)?;
let phase_v2_peak_price_cents =
PcoVec::forced_import(db, "phase_v2_peak_price_cents", phase_v2_version)?;
let phase_v2_daily_cents =
Distribution::forced_import(db, "phase_v2_daily", phase_v2_version)?;
let phase_v2_daily_dollars =
LazyTransformDistribution::from_distribution::<CentsToDollars>(
"phase_v2_daily_dollars",
phase_v2_version,
&phase_v2_daily_cents,
);
let phase_v2_peak_daily_cents =
Distribution::forced_import(db, "phase_v2_peak_daily", phase_v2_version)?;
let phase_v2_peak_daily_dollars =
LazyTransformDistribution::from_distribution::<CentsToDollars>(
"phase_v2_peak_daily_dollars",
phase_v2_version,
&phase_v2_peak_daily_cents,
);
// Phase Oracle V3 (BASE + uniqueVal filter)
// v4: Peak prices use 100 bins (downsampled from 200)
let phase_v3_version = version + Version::new(4);
let phase_v3_histogram =
BytesVec::forced_import(db, "phase_v3_histogram", phase_v3_version)?;
let phase_v3_price_cents =
PcoVec::forced_import(db, "phase_v3_price_cents", phase_v3_version)?;
let phase_v3_peak_price_cents =
PcoVec::forced_import(db, "phase_v3_peak_price_cents", phase_v3_version)?;
let phase_v3_daily_cents =
Distribution::forced_import(db, "phase_v3_daily", phase_v3_version)?;
let phase_v3_daily_dollars =
LazyTransformDistribution::from_distribution::<CentsToDollars>(
"phase_v3_daily_dollars",
phase_v3_version,
&phase_v3_daily_cents,
);
let phase_v3_peak_daily_cents =
Distribution::forced_import(db, "phase_v3_peak_daily", phase_v3_version)?;
let phase_v3_peak_daily_dollars =
LazyTransformDistribution::from_distribution::<CentsToDollars>(
"phase_v3_peak_daily_dollars",
phase_v3_version,
&phase_v3_peak_daily_cents,
);
Ok(Self { Ok(Self {
pairoutputindex_to_txindex, pairoutputindex_to_txindex,
height_to_first_pairoutputindex, height_to_first_pairoutputindex,
@@ -59,6 +111,20 @@ impl Vecs {
ohlc_cents, ohlc_cents,
ohlc_dollars, ohlc_dollars,
tx_count, tx_count,
phase_v2_histogram,
phase_v2_price_cents,
phase_v2_peak_price_cents,
phase_v2_daily_cents,
phase_v2_daily_dollars,
phase_v2_peak_daily_cents,
phase_v2_peak_daily_dollars,
phase_v3_histogram,
phase_v3_price_cents,
phase_v3_peak_price_cents,
phase_v3_daily_cents,
phase_v3_daily_dollars,
phase_v3_peak_daily_cents,
phase_v3_peak_daily_dollars,
}) })
} }
} }

View File

@@ -158,6 +158,7 @@ mod compute;
mod config; mod config;
mod histogram; mod histogram;
mod import; mod import;
mod phase_v2;
mod stencil; mod stencil;
mod vecs; mod vecs;

View File

@@ -0,0 +1,296 @@
//! Phase Oracle V2 - Round USD Template Cross-Correlation
//!
//! Detects Bitcoin prices by finding where round USD amounts ($1, $5, $10, etc.)
//! cluster in the phase histogram. Uses weekly OHLC anchors to constrain search.
//!
//! ## Algorithm
//!
//! 1. Build 200-bin phase histogram: bin = frac(log10(sats)) * 200
//! 2. Cross-correlate with weighted round USD template
//! 3. Use weekly OHLC anchor to constrain phase search range
//! 4. Return best-matching phase, convert to price
//!
//! ## Key Insight
//!
//! Round USD amounts create a fixed "fingerprint" pattern in phase space:
//! - $1, $10, $100, $1000 → phase 0.00 (weight 10)
//! - $5, $50, $500 → phase 0.70 (weight 9)
//! - $2, $20, $200 → phase 0.30 (weight 7)
//! - etc.
//!
//! The pattern shifts based on price: sats_phase = usd_phase - price_phase (mod 1)
//! Finding the shift that best matches the template reveals the price phase.
use brk_types::Sats;
/// Number of phase bins (0.5% resolution)
pub const PHASE_BINS_V2: usize = 200;
/// Round USD template: (phase, weight) pairs
/// Phase = frac(log10(usd_cents)) for round USD values
/// Weight reflects expected popularity (higher = more common)
pub const ROUND_USD_TEMPLATE: [(f64, u32); 11] = [
(0.00, 10), // $1, $10, $100, $1000 - VERY common
(0.18, 3), // $1.50, $15, $150 - uncommon
(0.30, 7), // $2, $20, $200 - common
(0.40, 4), // $2.50, $25, $250 - moderate
(0.48, 5), // $3, $30, $300 - moderate
(0.60, 4), // $4, $40, $400 - moderate
(0.70, 9), // $5, $50, $500 - VERY common
(0.78, 2), // $6, $60, $600 - rare
(0.85, 2), // $7, $70, $700 - rare
(0.90, 2), // $8, $80, $800 - rare
(0.95, 2), // $9, $90, $900 - rare
];
/// Pre-computed template bins: (bin_index, weight)
pub fn template_bins() -> Vec<(usize, u32)> {
ROUND_USD_TEMPLATE
.iter()
.map(|&(phase, weight)| {
let bin = ((phase * PHASE_BINS_V2 as f64) as usize) % PHASE_BINS_V2;
(bin, weight)
})
.collect()
}
/// Phase histogram for V2 oracle (200 bins)
#[derive(Clone)]
pub struct PhaseHistogramV2 {
bins: [u32; PHASE_BINS_V2],
total: u32,
}
impl Default for PhaseHistogramV2 {
fn default() -> Self {
Self::new()
}
}
impl PhaseHistogramV2 {
pub fn new() -> Self {
Self {
bins: [0; PHASE_BINS_V2],
total: 0,
}
}
/// Convert sats value to phase bin index
/// Filters: min 1k sats, max 100k BTC
#[inline]
pub fn sats_to_bin(sats: Sats) -> Option<usize> {
if sats < Sats::_1K || sats > Sats::_100K_BTC {
return None;
}
let log_sats = f64::from(sats).log10();
let phase = log_sats.fract();
let phase = if phase < 0.0 { phase + 1.0 } else { phase };
Some(((phase * PHASE_BINS_V2 as f64) as usize).min(PHASE_BINS_V2 - 1))
}
/// Add a sats value to the histogram
#[inline]
pub fn add(&mut self, sats: Sats) {
if let Some(bin) = Self::sats_to_bin(sats) {
self.bins[bin] = self.bins[bin].saturating_add(1);
self.total += 1;
}
}
/// Add another histogram to this one
pub fn add_histogram(&mut self, other: &PhaseHistogramV2) {
for (i, &count) in other.bins.iter().enumerate() {
self.bins[i] = self.bins[i].saturating_add(count);
}
self.total = self.total.saturating_add(other.total);
}
/// Get total count
pub fn total(&self) -> u32 {
self.total
}
/// Get bins array
pub fn bins(&self) -> &[u32; PHASE_BINS_V2] {
&self.bins
}
/// Clear the histogram
pub fn clear(&mut self) {
self.bins.fill(0);
self.total = 0;
}
}
/// Find the best price phase using cross-correlation with weighted template
///
/// # Arguments
/// * `histogram` - Phase histogram to analyze
/// * `tolerance_bins` - Number of bins tolerance for template matching (e.g., 4 = ±2%)
/// * `phase_min` - Optional minimum phase from anchor (0.0-1.0)
/// * `phase_max` - Optional maximum phase from anchor (0.0-1.0)
///
/// # Returns
/// * `(best_phase, best_correlation)` - Best matching phase (0.0-1.0) and correlation score
pub fn find_best_phase(
histogram: &PhaseHistogramV2,
tolerance_bins: usize,
phase_min: Option<f64>,
phase_max: Option<f64>,
) -> (f64, u64) {
let template = template_bins();
let bins = histogram.bins();
let mut best_phase = 0.0;
let mut best_corr: u64 = 0;
// Determine valid shifts based on anchor constraints
let valid_shifts: Vec<usize> = if let (Some(p_min), Some(p_max)) = (phase_min, phase_max) {
let min_bin = ((p_min * PHASE_BINS_V2 as f64) as usize) % PHASE_BINS_V2;
let max_bin = ((p_max * PHASE_BINS_V2 as f64) as usize) % PHASE_BINS_V2;
if min_bin <= max_bin {
(min_bin..=max_bin).collect()
} else {
// Wraps around
(min_bin..PHASE_BINS_V2)
.chain(0..=max_bin)
.collect()
}
} else {
(0..PHASE_BINS_V2).collect()
};
// Cross-correlation: slide template across histogram
for shift in valid_shifts {
let mut corr: u64 = 0;
for &(template_bin, weight) in &template {
// Where would this template bin appear at this price phase shift?
let expected_bin = (template_bin + PHASE_BINS_V2 - shift) % PHASE_BINS_V2;
// Sum bins within tolerance, weighted
for t in 0..=(2 * tolerance_bins) {
let check_bin = (expected_bin + PHASE_BINS_V2 - tolerance_bins + t) % PHASE_BINS_V2;
corr += bins[check_bin] as u64 * weight as u64;
}
}
if corr > best_corr {
best_corr = corr;
best_phase = shift as f64 / PHASE_BINS_V2 as f64;
}
}
(best_phase, best_corr)
}
/// Get phase range from price anchor (low, high)
///
/// Returns (phase_min, phase_max) with tolerance added
pub fn phase_range_from_anchor(price_low: f64, price_high: f64, tolerance_pct: f64) -> (f64, f64) {
let low_adj = price_low * (1.0 - tolerance_pct);
let high_adj = price_high * (1.0 + tolerance_pct);
let phase_low = low_adj.log10().fract();
let phase_high = high_adj.log10().fract();
let phase_low = if phase_low < 0.0 {
phase_low + 1.0
} else {
phase_low
};
let phase_high = if phase_high < 0.0 {
phase_high + 1.0
} else {
phase_high
};
(phase_low, phase_high)
}
/// Convert detected phase to price using anchor for decade selection
///
/// The phase alone is ambiguous ($6.3, $63, $630, $6300 all have same phase).
/// Use the anchor price range to select the correct decade.
pub fn phase_to_price(phase: f64, anchor_low: f64, anchor_high: f64) -> f64 {
// Base price from phase (arbitrary decade, we'll adjust)
// phase = frac(log10(price)), so price = 10^(decade + phase)
// Start with decade 0 (prices 1-10)
let base_price = 10.0_f64.powf(phase);
// Find which decade puts us in the anchor range
let anchor_mid = (anchor_low + anchor_high) / 2.0;
// Try decades -2 to 6 ($0.01 to $1,000,000)
let mut best_price = base_price;
let mut best_dist = f64::MAX;
for decade in -2..=6 {
let candidate = base_price * 10.0_f64.powi(decade);
let dist = (candidate - anchor_mid).abs();
if dist < best_dist {
best_dist = dist;
best_price = candidate;
}
}
// Clamp to reasonable range
best_price.clamp(0.01, 10_000_000.0)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_template_bins() {
let template = template_bins();
assert_eq!(template.len(), 11);
// Check $1/$10/$100 maps to bin 0
assert_eq!(template[0].0, 0);
assert_eq!(template[0].1, 10);
// Check $5/$50 maps to bin 140 (0.70 * 200)
assert_eq!(template[6].0, 140);
assert_eq!(template[6].1, 9);
}
#[test]
fn test_sats_to_bin() {
// 1 BTC = 100M sats, log10(100M) = 8.0, frac = 0.0 → bin 0
let bin = PhaseHistogramV2::sats_to_bin(Sats::_1BTC).unwrap();
assert_eq!(bin, 0);
// 10M sats, log10(10M) = 7.0, frac = 0.0 → bin 0
let bin = PhaseHistogramV2::sats_to_bin(Sats::_10M).unwrap();
assert_eq!(bin, 0);
// 5M sats, log10(5M) ≈ 6.699, frac ≈ 0.699 → bin ~140
let bin = PhaseHistogramV2::sats_to_bin(Sats::from(5_000_000u64)).unwrap();
assert!((138..=142).contains(&bin), "5M sats bin = {}", bin);
}
#[test]
fn test_phase_range_from_anchor() {
// $6000-$8000 range
let (p_min, p_max) = phase_range_from_anchor(6000.0, 8000.0, 0.05);
// $6000 → log10 = 3.778, phase = 0.778
// $8000 → log10 = 3.903, phase = 0.903
assert!(p_min > 0.7 && p_min < 0.8, "p_min = {}", p_min);
assert!(p_max > 0.85 && p_max < 0.95, "p_max = {}", p_max);
}
#[test]
fn test_phase_to_price() {
// Phase 0.0 with anchor $50-150 should give ~$100
let price = phase_to_price(0.0, 50.0, 150.0);
assert!(price > 80.0 && price < 120.0, "price = {}", price);
// Phase 0.70 with anchor $4000-6000 should give ~$5000
let price = phase_to_price(0.70, 4000.0, 6000.0);
assert!(price > 4000.0 && price < 6000.0, "price = {}", price);
}
}

View File

@@ -1,7 +1,7 @@
use brk_traversable::Traversable; use brk_traversable::Traversable;
use brk_types::{ use brk_types::{
Cents, DateIndex, Dollars, Height, OHLCCents, OHLCDollars, OracleBins, PairOutputIndex, Sats, Cents, DateIndex, Dollars, Height, OHLCCents, OHLCDollars, OracleBins, OracleBinsV2,
StoredU32, TxIndex, PairOutputIndex, Sats, StoredU32, TxIndex,
}; };
use vecdb::{BytesVec, LazyVecFrom1, PcoVec}; use vecdb::{BytesVec, LazyVecFrom1, PcoVec};
@@ -55,4 +55,49 @@ pub struct Vecs {
/// Number of qualifying transactions per day (for confidence) /// Number of qualifying transactions per day (for confidence)
pub tx_count: PcoVec<DateIndex, StoredU32>, pub tx_count: PcoVec<DateIndex, StoredU32>,
// ========== Phase Oracle V2 (round USD template matching) ==========
/// Per-block 200-bin phase histogram
pub phase_v2_histogram: BytesVec<Height, OracleBinsV2>,
/// Per-block price in cents from phase oracle V2 (cross-correlation with round USD template)
pub phase_v2_price_cents: PcoVec<Height, Cents>,
/// Per-block price in cents using direct peak finding (like V1)
pub phase_v2_peak_price_cents: PcoVec<Height, Cents>,
/// Daily distribution (min, max, average, percentiles) from phase oracle V2
pub phase_v2_daily_cents: Distribution<DateIndex, Cents>,
/// Daily distribution in dollars (lazy conversion from cents)
pub phase_v2_daily_dollars: LazyTransformDistribution<DateIndex, Dollars, Cents>,
/// Daily distribution from peak-based prices
pub phase_v2_peak_daily_cents: Distribution<DateIndex, Cents>,
/// Daily distribution in dollars (lazy conversion from cents)
pub phase_v2_peak_daily_dollars: LazyTransformDistribution<DateIndex, Dollars, Cents>,
// ========== Phase Oracle V3 (BASE + uniqueVal filter) ==========
/// Per-block 200-bin phase histogram with uniqueVal filtering
/// Only includes outputs with unique values within their transaction
pub phase_v3_histogram: BytesVec<Height, OracleBinsV2>,
/// Per-block price in cents from phase oracle V3 (cross-correlation)
pub phase_v3_price_cents: PcoVec<Height, Cents>,
/// Per-block price in cents using direct peak finding (like V1)
pub phase_v3_peak_price_cents: PcoVec<Height, Cents>,
/// Daily distribution from phase oracle V3
pub phase_v3_daily_cents: Distribution<DateIndex, Cents>,
/// Daily distribution in dollars (lazy conversion from cents)
pub phase_v3_daily_dollars: LazyTransformDistribution<DateIndex, Dollars, Cents>,
/// Daily distribution from peak-based prices
pub phase_v3_peak_daily_cents: Distribution<DateIndex, Cents>,
/// Daily distribution in dollars (lazy conversion from cents)
pub phase_v3_peak_daily_dollars: LazyTransformDistribution<DateIndex, Dollars, Cents>,
} }

View File

@@ -4,8 +4,8 @@ HTTP API server for Bitcoin on-chain analytics.
## Features ## Features
- **OpenAPI spec**: Auto-generated docs at `/api` with full spec at `/api.json` - **OpenAPI spec**: Auto-generated docs at `/api` with full spec at `/openapi.json`
- **LLM-optimized**: Compact spec at `/api.trimmed.json` for AI tools - **LLM-optimized**: Compact spec at `/api.json` for AI tools
- **Response caching**: ETag-based with LRU cache (5000 entries) - **Response caching**: ETag-based with LRU cache (5000 entries)
- **Compression**: Brotli, gzip, deflate, zstd - **Compression**: Brotli, gzip, deflate, zstd
- **Static files**: Optional web interface hosting - **Static files**: Optional web interface hosting
@@ -23,8 +23,8 @@ server.serve().await?;
| Path | Description | | Path | Description |
|------|-------------| |------|-------------|
| `/api` | Interactive API documentation | | `/api` | Interactive API documentation |
| `/api.json` | Full OpenAPI specification | | `/openapi.json` | Full OpenAPI specification |
| `/api.trimmed.json` | Compact OpenAPI for LLMs | | `/api.json` | Compact OpenAPI for LLMs |
| `/api/address/{address}` | Address stats, transactions, UTXOs | | `/api/address/{address}` | Address stats, transactions, UTXOs |
| `/api/block/{hash}` | Block info, transactions, status | | `/api/block/{hash}` | Block info, transactions, status |
| `/api/block-height/{height}` | Block by height | | `/api/block-height/{height}` | Block by height |

View File

@@ -5,19 +5,35 @@ fn main() {
// Generate importmap for website (updates index.html in place) // Generate importmap for website (updates index.html in place)
let manifest_dir = env::var("CARGO_MANIFEST_DIR").unwrap(); let manifest_dir = env::var("CARGO_MANIFEST_DIR").unwrap();
let website_path = Path::new(&manifest_dir).join("../../website"); // Use ./website (symlink in repo, real dir in published crate)
let website_path = Path::new(&manifest_dir).join("website");
println!("cargo:rerun-if-changed=../../website"); println!("cargo:rerun-if-changed=website");
println!("cargo::warning=build.rs: website_path={website_path:?}, exists={}", website_path.exists());
if website_path.exists() { if website_path.exists() {
// Skip importmap hashing in dev mode (files change often) // Skip importmap hashing in dev mode (files change often)
let map = if is_dev { let map = if is_dev {
println!("cargo::warning=build.rs: dev mode, skipping importmap");
importmap::ImportMap::empty() importmap::ImportMap::empty()
} else { } else {
importmap::ImportMap::scan(&website_path, "") match importmap::ImportMap::scan(&website_path, "") {
.unwrap_or_else(|_| importmap::ImportMap::empty()) Ok(map) => {
println!("cargo::warning=build.rs: importmap scanned {} entries", map.imports.len());
map
}
Err(e) => {
println!("cargo::warning=build.rs: importmap scan failed: {e}");
importmap::ImportMap::empty()
}
}
}; };
let _ = map.update_html_file(&website_path.join("index.html")); let index_path = website_path.join("index.html");
if let Err(e) = map.update_html_file(&index_path) {
println!("cargo::warning=build.rs: failed to update index.html: {e}");
}
} else {
println!("cargo::warning=build.rs: website path does not exist!");
} }
} }

View File

@@ -6,11 +6,11 @@ use axum::{
http::{HeaderMap, StatusCode, Uri}, http::{HeaderMap, StatusCode, Uri},
response::{IntoResponse, Response}, response::{IntoResponse, Response},
}; };
use brk_error::Result;
use brk_types::{Format, MetricSelection, Output}; use brk_types::{Format, MetricSelection, Output};
use quick_cache::sync::GuardResult; use quick_cache::sync::GuardResult;
use crate::{ use crate::{
Result,
api::metrics::{CACHE_CONTROL, MAX_WEIGHT}, api::metrics::{CACHE_CONTROL, MAX_WEIGHT},
extended::HeaderMapExtended, extended::HeaderMapExtended,
}; };
@@ -18,22 +18,10 @@ use crate::{
use super::AppState; use super::AppState;
pub async fn handler( pub async fn handler(
uri: Uri,
headers: HeaderMap,
query: Query<MetricSelection>,
State(state): State<AppState>,
) -> Response {
match req_to_response_res(uri, headers, query, state).await {
Ok(response) => response,
Err(error) => (StatusCode::INTERNAL_SERVER_ERROR, error.to_string()).into_response(),
}
}
async fn req_to_response_res(
uri: Uri, uri: Uri,
headers: HeaderMap, headers: HeaderMap,
Query(params): Query<MetricSelection>, Query(params): Query<MetricSelection>,
AppState { query, cache, .. }: AppState, State(AppState { query, cache, .. }): State<AppState>,
) -> Result<Response> { ) -> Result<Response> {
// Phase 1: Search and resolve metadata (cheap) // Phase 1: Search and resolve metadata (cheap)
let resolved = query.run(move |q| q.resolve(params, MAX_WEIGHT)).await?; let resolved = query.run(move |q| q.resolve(params, MAX_WEIGHT)).await?;

View File

@@ -6,11 +6,11 @@ use axum::{
http::{HeaderMap, StatusCode, Uri}, http::{HeaderMap, StatusCode, Uri},
response::{IntoResponse, Response}, response::{IntoResponse, Response},
}; };
use brk_error::Result;
use brk_types::{Format, MetricSelection, Output}; use brk_types::{Format, MetricSelection, Output};
use quick_cache::sync::GuardResult; use quick_cache::sync::GuardResult;
use crate::{ use crate::{
Result,
api::metrics::{CACHE_CONTROL, MAX_WEIGHT}, api::metrics::{CACHE_CONTROL, MAX_WEIGHT},
extended::HeaderMapExtended, extended::HeaderMapExtended,
}; };
@@ -18,22 +18,10 @@ use crate::{
use super::AppState; use super::AppState;
pub async fn handler( pub async fn handler(
uri: Uri,
headers: HeaderMap,
query: Query<MetricSelection>,
State(state): State<AppState>,
) -> Response {
match req_to_response_res(uri, headers, query, state).await {
Ok(response) => response,
Err(error) => (StatusCode::INTERNAL_SERVER_ERROR, error.to_string()).into_response(),
}
}
async fn req_to_response_res(
uri: Uri, uri: Uri,
headers: HeaderMap, headers: HeaderMap,
Query(params): Query<MetricSelection>, Query(params): Query<MetricSelection>,
AppState { query, cache, .. }: AppState, State(AppState { query, cache, .. }): State<AppState>,
) -> Result<Response> { ) -> Result<Response> {
// Phase 1: Search and resolve metadata (cheap) // Phase 1: Search and resolve metadata (cheap)
let resolved = query.run(move |q| q.resolve(params, MAX_WEIGHT)).await?; let resolved = query.run(move |q| q.resolve(params, MAX_WEIGHT)).await?;

View File

@@ -6,11 +6,11 @@ use axum::{
http::{HeaderMap, StatusCode, Uri}, http::{HeaderMap, StatusCode, Uri},
response::{IntoResponse, Response}, response::{IntoResponse, Response},
}; };
use brk_error::Result;
use brk_types::{Format, MetricSelection, OutputLegacy}; use brk_types::{Format, MetricSelection, OutputLegacy};
use quick_cache::sync::GuardResult; use quick_cache::sync::GuardResult;
use crate::{ use crate::{
Result,
api::metrics::{CACHE_CONTROL, MAX_WEIGHT}, api::metrics::{CACHE_CONTROL, MAX_WEIGHT},
extended::HeaderMapExtended, extended::HeaderMapExtended,
}; };
@@ -18,22 +18,10 @@ use crate::{
use super::AppState; use super::AppState;
pub async fn handler( pub async fn handler(
uri: Uri,
headers: HeaderMap,
query: Query<MetricSelection>,
State(state): State<AppState>,
) -> Response {
match req_to_response_res(uri, headers, query, state).await {
Ok(response) => response,
Err(error) => (StatusCode::INTERNAL_SERVER_ERROR, error.to_string()).into_response(),
}
}
async fn req_to_response_res(
uri: Uri, uri: Uri,
headers: HeaderMap, headers: HeaderMap,
Query(params): Query<MetricSelection>, Query(params): Query<MetricSelection>,
AppState { query, cache, .. }: AppState, State(AppState { query, cache, .. }): State<AppState>,
) -> Result<Response> { ) -> Result<Response> {
// Phase 1: Search and resolve metadata (cheap) // Phase 1: Search and resolve metadata (cheap)
let resolved = query.run(move |q| q.resolve(params, MAX_WEIGHT)).await?; let resolved = query.run(move |q| q.resolve(params, MAX_WEIGHT)).await?;

View File

@@ -170,6 +170,7 @@ impl ApiMetricsRoutes for ApiRouter<AppState> {
state, state,
) )
.await .await
.into_response()
}, },
|op| op |op| op
.id("get_metric") .id("get_metric")
@@ -188,7 +189,9 @@ impl ApiMetricsRoutes for ApiRouter<AppState> {
.api_route( .api_route(
"/api/metrics/bulk", "/api/metrics/bulk",
get_with( get_with(
bulk::handler, |uri, headers, query, state| async move {
bulk::handler(uri, headers, query, state).await.into_response()
},
|op| op |op| op
.id("get_metrics") .id("get_metrics")
.metrics_tag() .metrics_tag()
@@ -225,7 +228,9 @@ impl ApiMetricsRoutes for ApiRouter<AppState> {
Metrics::from(split.collect::<Vec<_>>().join(separator)), Metrics::from(split.collect::<Vec<_>>().join(separator)),
range, range,
)); ));
legacy::handler(uri, headers, Query(params), state).await legacy::handler(uri, headers, Query(params), state)
.await
.into_response()
}, },
|op| op |op| op
.metrics_tag() .metrics_tag()
@@ -250,7 +255,9 @@ impl ApiMetricsRoutes for ApiRouter<AppState> {
state: State<AppState>| state: State<AppState>|
-> Response { -> Response {
let params: MetricSelection = params.into(); let params: MetricSelection = params.into();
legacy::handler(uri, headers, Query(params), state).await legacy::handler(uri, headers, Query(params), state)
.await
.into_response()
}, },
|op| op |op| op
.metrics_tag() .metrics_tag()

View File

@@ -48,7 +48,7 @@ impl ApiRoutes for ApiRouter<AppState> {
.add_server_routes() .add_server_routes()
.route("/api/server", get(Redirect::temporary("/api#tag/server"))) .route("/api/server", get(Redirect::temporary("/api#tag/server")))
.api_route( .api_route(
"/api.json", "/openapi.json",
get_with( get_with(
async |headers: HeaderMap, async |headers: HeaderMap,
Extension(api): Extension<Arc<OpenApi>>| Extension(api): Extension<Arc<OpenApi>>|
@@ -62,7 +62,7 @@ impl ApiRoutes for ApiRouter<AppState> {
), ),
) )
.api_route( .api_route(
"/api.trimmed.json", "/api.json",
get_with( get_with(
async |headers: HeaderMap, async |headers: HeaderMap,
Extension(api_trimmed): Extension<Arc<String>>| Extension(api_trimmed): Extension<Arc<String>>|
@@ -72,12 +72,13 @@ impl ApiRoutes for ApiRouter<AppState> {
Response::static_json(&headers, &value) Response::static_json(&headers, &value)
}, },
|op| { |op| {
op.id("get_openapi_trimmed") op.id("get_api")
.server_tag() .server_tag()
.summary("Trimmed OpenAPI specification") .summary("Compact OpenAPI specification")
.description( .description(
"Compact OpenAPI specification optimized for LLM consumption. \ "Compact OpenAPI specification optimized for LLM consumption. \
Removes redundant fields while preserving essential API information.", Removes redundant fields while preserving essential API information. \
Full spec available at `/openapi.json`.",
) )
.ok_response::<serde_json::Value>() .ok_response::<serde_json::Value>()
}, },

View File

@@ -29,7 +29,7 @@ pub fn create_openapi() -> OpenApi {
- **Metrics**: Thousands of time-series metrics across multiple indexes (date, block height, etc.) - **Metrics**: Thousands of time-series metrics across multiple indexes (date, block height, etc.)
- **[Mempool.space](https://mempool.space/docs/api/rest) compatible** (WIP): Most non-metrics endpoints follow the mempool.space API format - **[Mempool.space](https://mempool.space/docs/api/rest) compatible** (WIP): Most non-metrics endpoints follow the mempool.space API format
- **Multiple formats**: JSON and CSV output - **Multiple formats**: JSON and CSV output
- **LLM-optimized**: Compact OpenAPI spec at [`/api.trimmed.json`](/api.trimmed.json) for AI tools - **LLM-optimized**: Compact OpenAPI spec at [`/api.json`](/api.json) for AI tools (full spec at [`/openapi.json`](/openapi.json))
### Client Libraries ### Client Libraries

View File

@@ -18,7 +18,7 @@
<script> <script>
Scalar.createApiReference("#app", { Scalar.createApiReference("#app", {
url: "/api.json", url: "/openapi.json",
hideClientButton: true, hideClientButton: true,
telemetry: false, telemetry: false,
// showToolbar: "never", // showToolbar: "never",

View File

@@ -0,0 +1,58 @@
use axum::{
http::StatusCode,
response::{IntoResponse, Response},
};
use brk_error::Error as BrkError;
/// Server result type with Error that implements IntoResponse.
pub type Result<T> = std::result::Result<T, Error>;
/// Server error type that maps to HTTP status codes.
pub struct Error(StatusCode, String);
impl Error {
pub fn bad_request(msg: impl Into<String>) -> Self {
Self(StatusCode::BAD_REQUEST, msg.into())
}
pub fn forbidden(msg: impl Into<String>) -> Self {
Self(StatusCode::FORBIDDEN, msg.into())
}
pub fn not_found(msg: impl Into<String>) -> Self {
Self(StatusCode::NOT_FOUND, msg.into())
}
pub fn internal(msg: impl Into<String>) -> Self {
Self(StatusCode::INTERNAL_SERVER_ERROR, msg.into())
}
}
impl From<BrkError> for Error {
fn from(e: BrkError) -> Self {
let status = match &e {
BrkError::InvalidTxid
| BrkError::InvalidNetwork
| BrkError::InvalidAddress
| BrkError::UnsupportedType(_)
| BrkError::Parse(_)
| BrkError::NoMetrics
| BrkError::MetricUnsupportedIndex { .. }
| BrkError::WeightExceeded { .. } => StatusCode::BAD_REQUEST,
BrkError::UnknownAddress
| BrkError::UnknownTxid
| BrkError::NotFound(_)
| BrkError::MetricNotFound { .. } => StatusCode::NOT_FOUND,
_ => StatusCode::INTERNAL_SERVER_ERROR,
};
Self(status, e.to_string())
}
}
impl IntoResponse for Error {
fn into_response(self) -> Response {
(self.0, self.1).into_response()
}
}

View File

@@ -7,30 +7,30 @@ use std::{
use axum::{ use axum::{
body::Body, body::Body,
extract::{self, State}, extract::{self, State},
http::{HeaderMap, StatusCode}, http::HeaderMap,
response::{IntoResponse, Response}, response::Response,
}; };
use brk_error::Result;
use quick_cache::sync::GuardResult; use quick_cache::sync::GuardResult;
use tracing::{error, info}; use tracing::{error, info};
use crate::{ use crate::{
AppState, EMBEDDED_WEBSITE, HeaderMapExtended, ModifiedState, ResponseExtended, WebsiteSource, AppState, EMBEDDED_WEBSITE, Error, HeaderMapExtended, ModifiedState, ResponseExtended, Result,
WebsiteSource,
}; };
pub async fn file_handler( pub async fn file_handler(
headers: HeaderMap, headers: HeaderMap,
State(state): State<AppState>, State(state): State<AppState>,
path: extract::Path<String>, path: extract::Path<String>,
) -> Response { ) -> Result<Response> {
any_handler(headers, state, Some(path.0)) any_handler(headers, state, Some(path.0))
} }
pub async fn index_handler(headers: HeaderMap, State(state): State<AppState>) -> Response { pub async fn index_handler(headers: HeaderMap, State(state): State<AppState>) -> Result<Response> {
any_handler(headers, state, None) any_handler(headers, state, None)
} }
fn any_handler(headers: HeaderMap, state: AppState, path: Option<String>) -> Response { fn any_handler(headers: HeaderMap, state: AppState, path: Option<String>) -> Result<Response> {
match &state.website { match &state.website {
WebsiteSource::Disabled => unreachable!("routes not added when disabled"), WebsiteSource::Disabled => unreachable!("routes not added when disabled"),
WebsiteSource::Embedded => embedded_handler(&state, path), WebsiteSource::Embedded => embedded_handler(&state, path),
@@ -92,7 +92,7 @@ fn build_response(state: &AppState, path: &Path, content: Vec<u8>, cache_key: &s
response response
} }
fn embedded_handler(state: &AppState, path: Option<String>) -> Response { fn embedded_handler(state: &AppState, path: Option<String>) -> Result<Response> {
let path = path.unwrap_or_else(|| "index.html".to_string()); let path = path.unwrap_or_else(|| "index.html".to_string());
let sanitized = sanitize_path(&path); let sanitized = sanitize_path(&path);
@@ -113,17 +113,15 @@ fn embedded_handler(state: &AppState, path: Option<String>) -> Response {
}); });
let Some(file) = file else { let Some(file) = file else {
let response: Response<Body> = return Err(Error::not_found("File not found"));
(StatusCode::NOT_FOUND, "File not found".to_string()).into_response();
return response;
}; };
build_response( Ok(build_response(
state, state,
Path::new(file.path()), Path::new(file.path()),
file.contents().to_vec(), file.contents().to_vec(),
&file.path().to_string_lossy(), &file.path().to_string_lossy(),
) ))
} }
fn filesystem_handler( fn filesystem_handler(
@@ -131,7 +129,7 @@ fn filesystem_handler(
state: &AppState, state: &AppState,
files_path: &Path, files_path: &Path,
path: Option<String>, path: Option<String>,
) -> Response { ) -> Result<Response> {
let path = if let Some(path) = path { let path = if let Some(path) = path {
let sanitized = sanitize_path(&path); let sanitized = sanitize_path(&path);
let mut path = files_path.join(&sanitized); let mut path = files_path.join(&sanitized);
@@ -145,9 +143,7 @@ fn filesystem_handler(
let allowed = canonical.starts_with(&canonical_base) let allowed = canonical.starts_with(&canonical_base)
|| project_root.is_some_and(|root| canonical.starts_with(root)); || project_root.is_some_and(|root| canonical.starts_with(root));
if !allowed { if !allowed {
let response: Response<Body> = return Err(Error::forbidden("Access denied"));
(StatusCode::FORBIDDEN, "Access denied".to_string()).into_response();
return response;
} }
} }
@@ -162,12 +158,7 @@ fn filesystem_handler(
// SPA fallback // SPA fallback
if !path.exists() || path.is_dir() { if !path.exists() || path.is_dir() {
if path.extension().is_some() { if path.extension().is_some() {
let response: Response<Body> = ( return Err(Error::not_found("File doesn't exist"));
StatusCode::INTERNAL_SERVER_ERROR,
"File doesn't exist".to_string(),
)
.into_response();
return response;
} else { } else {
path = files_path.join("index.html"); path = files_path.join("index.html");
} }
@@ -181,14 +172,7 @@ fn filesystem_handler(
path_to_response(&headers, state, &path) path_to_response(&headers, state, &path)
} }
fn path_to_response(headers: &HeaderMap, state: &AppState, path: &Path) -> Response { fn path_to_response(headers: &HeaderMap, state: &AppState, path: &Path) -> Result<Response> {
match path_to_response_(headers, state, path) {
Ok(response) => response,
Err(error) => (StatusCode::INTERNAL_SERVER_ERROR, error.to_string()).into_response(),
}
}
fn path_to_response_(headers: &HeaderMap, state: &AppState, path: &Path) -> Result<Response> {
let (modified, date) = headers.check_if_modified_since(path)?; let (modified, date) = headers.check_if_modified_since(path)?;
if !cfg!(debug_assertions) && modified == ModifiedState::NotModifiedSince { if !cfg!(debug_assertions) && modified == ModifiedState::NotModifiedSince {
return Ok(Response::new_not_modified()); return Ok(Response::new_not_modified());

View File

@@ -17,7 +17,6 @@ use axum::{
routing::get, routing::get,
serve, serve,
}; };
use brk_error::Result;
use brk_query::AsyncQuery; use brk_query::AsyncQuery;
use include_dir::{Dir, include_dir}; use include_dir::{Dir, include_dir};
use quick_cache::sync::Cache; use quick_cache::sync::Cache;
@@ -48,12 +47,14 @@ impl WebsiteSource {
mod api; mod api;
pub mod cache; pub mod cache;
mod error;
mod extended; mod extended;
mod files; mod files;
mod state; mod state;
use api::*; use api::*;
pub use cache::{CacheParams, CacheStrategy}; pub use cache::{CacheParams, CacheStrategy};
pub use error::{Error, Result};
use extended::*; use extended::*;
use files::FilesRoutes; use files::FilesRoutes;
use state::*; use state::*;
@@ -75,7 +76,7 @@ impl Server {
}) })
} }
pub async fn serve(self) -> Result<()> { pub async fn serve(self) -> brk_error::Result<()> {
let state = self.0; let state = self.0;
let compression_layer = CompressionLayer::new() let compression_layer = CompressionLayer::new()

View File

@@ -148,3 +148,157 @@ impl Formattable for OracleBins {
false false
} }
} }
// ============================================================================
// OracleBinsV2: 200-bin phase histogram for V2 phase oracle
// ============================================================================
/// Number of bins for V2 phase histogram (0.5% resolution)
pub const PHASE_BINS_V2: usize = 200;
/// V2 Phase histogram: counts per bin for frac(log10(sats))
///
/// Used for phase oracle V2 with round USD template matching.
/// Each bin represents 0.5% of the log10 fractional range [0, 1).
/// Values are u16 (max 65535 per bin).
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct OracleBinsV2 {
pub bins: [u16; PHASE_BINS_V2],
}
impl Default for OracleBinsV2 {
fn default() -> Self {
Self::ZERO
}
}
impl Display for OracleBinsV2 {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "OracleBinsV2(peak={})", self.peak_bin())
}
}
impl Serialize for OracleBinsV2 {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
self.bins.as_slice().serialize(serializer)
}
}
impl<'de> Deserialize<'de> for OracleBinsV2 {
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
struct BinsVisitor;
impl<'de> Visitor<'de> for BinsVisitor {
type Value = OracleBinsV2;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(formatter, "an array of {} u16 values", PHASE_BINS_V2)
}
fn visit_seq<A: SeqAccess<'de>>(self, mut seq: A) -> Result<Self::Value, A::Error> {
let mut bins = [0u16; PHASE_BINS_V2];
for (i, bin) in bins.iter_mut().enumerate() {
*bin = seq
.next_element()?
.ok_or_else(|| serde::de::Error::invalid_length(i, &self))?;
}
Ok(OracleBinsV2 { bins })
}
}
deserializer.deserialize_seq(BinsVisitor)
}
}
impl JsonSchema for OracleBinsV2 {
fn schema_name() -> std::borrow::Cow<'static, str> {
"OracleBinsV2".into()
}
fn json_schema(_gen: &mut schemars::SchemaGenerator) -> schemars::Schema {
Vec::<u16>::json_schema(_gen)
}
}
impl OracleBinsV2 {
pub const ZERO: Self = Self {
bins: [0; PHASE_BINS_V2],
};
/// Get the bin index for a sats value
/// Filters: min 1k sats, max 100k BTC
#[inline]
pub fn sats_to_bin(sats: Sats) -> Option<usize> {
if sats < Sats::_1K || sats > Sats::_100K_BTC {
return None;
}
let log_sats = f64::from(sats).log10();
let phase = log_sats.fract();
let phase = if phase < 0.0 { phase + 1.0 } else { phase };
Some(((phase * PHASE_BINS_V2 as f64) as usize).min(PHASE_BINS_V2 - 1))
}
/// Add a count to the bin for this sats value
#[inline]
pub fn add(&mut self, sats: Sats) {
if let Some(bin) = Self::sats_to_bin(sats) {
self.bins[bin] = self.bins[bin].saturating_add(1);
}
}
/// Add another histogram to this one
pub fn add_histogram(&mut self, other: &OracleBinsV2) {
for (i, &count) in other.bins.iter().enumerate() {
self.bins[i] = self.bins[i].saturating_add(count);
}
}
/// Find the peak bin (index with highest count)
pub fn peak_bin(&self) -> usize {
self.bins
.iter()
.enumerate()
.max_by_key(|(_, count)| *count)
.map(|(idx, _)| idx)
.unwrap_or(0)
}
/// Get total count across all bins
pub fn total_count(&self) -> u32 {
self.bins.iter().map(|&c| c as u32).sum()
}
}
impl Bytes for OracleBinsV2 {
type Array = [u8; size_of::<Self>()];
fn to_bytes(&self) -> Self::Array {
let mut arr = [0u8; size_of::<Self>()];
for (i, &count) in self.bins.iter().enumerate() {
let bytes = count.to_le_bytes();
arr[i * 2] = bytes[0];
arr[i * 2 + 1] = bytes[1];
}
arr
}
fn from_bytes(bytes: &[u8]) -> vecdb::Result<Self> {
if bytes.len() < size_of::<Self>() {
return Err(vecdb::Error::WrongLength {
received: bytes.len(),
expected: size_of::<Self>(),
});
}
let mut bins = [0u16; PHASE_BINS_V2];
for (i, bin) in bins.iter_mut().enumerate() {
*bin = u16::from_le_bytes([bytes[i * 2], bytes[i * 2 + 1]]);
}
Ok(Self { bins })
}
}
impl Formattable for OracleBinsV2 {
fn may_need_escaping() -> bool {
false
}
}

View File

@@ -456,6 +456,7 @@
* @typedef {Cents} Open * @typedef {Cents} Open
*/ */
/** @typedef {number[]} OracleBins */ /** @typedef {number[]} OracleBins */
/** @typedef {number[]} OracleBinsV2 */
/** @typedef {number} OutPoint */ /** @typedef {number} OutPoint */
/** /**
* Type (P2PKH, P2WPKH, P2SH, P2TR, etc.) * Type (P2PKH, P2WPKH, P2SH, P2TR, etc.)
@@ -1644,59 +1645,6 @@ function createPrice111dSmaPattern(client, acc) {
}; };
} }
/**
* @typedef {Object} PercentilesPattern
* @property {MetricPattern4<Dollars>} pct05
* @property {MetricPattern4<Dollars>} pct10
* @property {MetricPattern4<Dollars>} pct15
* @property {MetricPattern4<Dollars>} pct20
* @property {MetricPattern4<Dollars>} pct25
* @property {MetricPattern4<Dollars>} pct30
* @property {MetricPattern4<Dollars>} pct35
* @property {MetricPattern4<Dollars>} pct40
* @property {MetricPattern4<Dollars>} pct45
* @property {MetricPattern4<Dollars>} pct50
* @property {MetricPattern4<Dollars>} pct55
* @property {MetricPattern4<Dollars>} pct60
* @property {MetricPattern4<Dollars>} pct65
* @property {MetricPattern4<Dollars>} pct70
* @property {MetricPattern4<Dollars>} pct75
* @property {MetricPattern4<Dollars>} pct80
* @property {MetricPattern4<Dollars>} pct85
* @property {MetricPattern4<Dollars>} pct90
* @property {MetricPattern4<Dollars>} pct95
*/
/**
* Create a PercentilesPattern pattern node
* @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name
* @returns {PercentilesPattern}
*/
function createPercentilesPattern(client, acc) {
return {
pct05: createMetricPattern4(client, _m(acc, 'pct05')),
pct10: createMetricPattern4(client, _m(acc, 'pct10')),
pct15: createMetricPattern4(client, _m(acc, 'pct15')),
pct20: createMetricPattern4(client, _m(acc, 'pct20')),
pct25: createMetricPattern4(client, _m(acc, 'pct25')),
pct30: createMetricPattern4(client, _m(acc, 'pct30')),
pct35: createMetricPattern4(client, _m(acc, 'pct35')),
pct40: createMetricPattern4(client, _m(acc, 'pct40')),
pct45: createMetricPattern4(client, _m(acc, 'pct45')),
pct50: createMetricPattern4(client, _m(acc, 'pct50')),
pct55: createMetricPattern4(client, _m(acc, 'pct55')),
pct60: createMetricPattern4(client, _m(acc, 'pct60')),
pct65: createMetricPattern4(client, _m(acc, 'pct65')),
pct70: createMetricPattern4(client, _m(acc, 'pct70')),
pct75: createMetricPattern4(client, _m(acc, 'pct75')),
pct80: createMetricPattern4(client, _m(acc, 'pct80')),
pct85: createMetricPattern4(client, _m(acc, 'pct85')),
pct90: createMetricPattern4(client, _m(acc, 'pct90')),
pct95: createMetricPattern4(client, _m(acc, 'pct95')),
};
}
/** /**
* @typedef {Object} ActivePriceRatioPattern * @typedef {Object} ActivePriceRatioPattern
* @property {MetricPattern4<StoredF32>} ratio * @property {MetricPattern4<StoredF32>} ratio
@@ -1750,6 +1698,59 @@ function createActivePriceRatioPattern(client, acc) {
}; };
} }
/**
* @typedef {Object} PercentilesPattern
* @property {MetricPattern4<Dollars>} pct05
* @property {MetricPattern4<Dollars>} pct10
* @property {MetricPattern4<Dollars>} pct15
* @property {MetricPattern4<Dollars>} pct20
* @property {MetricPattern4<Dollars>} pct25
* @property {MetricPattern4<Dollars>} pct30
* @property {MetricPattern4<Dollars>} pct35
* @property {MetricPattern4<Dollars>} pct40
* @property {MetricPattern4<Dollars>} pct45
* @property {MetricPattern4<Dollars>} pct50
* @property {MetricPattern4<Dollars>} pct55
* @property {MetricPattern4<Dollars>} pct60
* @property {MetricPattern4<Dollars>} pct65
* @property {MetricPattern4<Dollars>} pct70
* @property {MetricPattern4<Dollars>} pct75
* @property {MetricPattern4<Dollars>} pct80
* @property {MetricPattern4<Dollars>} pct85
* @property {MetricPattern4<Dollars>} pct90
* @property {MetricPattern4<Dollars>} pct95
*/
/**
* Create a PercentilesPattern pattern node
* @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name
* @returns {PercentilesPattern}
*/
function createPercentilesPattern(client, acc) {
return {
pct05: createMetricPattern4(client, _m(acc, 'pct05')),
pct10: createMetricPattern4(client, _m(acc, 'pct10')),
pct15: createMetricPattern4(client, _m(acc, 'pct15')),
pct20: createMetricPattern4(client, _m(acc, 'pct20')),
pct25: createMetricPattern4(client, _m(acc, 'pct25')),
pct30: createMetricPattern4(client, _m(acc, 'pct30')),
pct35: createMetricPattern4(client, _m(acc, 'pct35')),
pct40: createMetricPattern4(client, _m(acc, 'pct40')),
pct45: createMetricPattern4(client, _m(acc, 'pct45')),
pct50: createMetricPattern4(client, _m(acc, 'pct50')),
pct55: createMetricPattern4(client, _m(acc, 'pct55')),
pct60: createMetricPattern4(client, _m(acc, 'pct60')),
pct65: createMetricPattern4(client, _m(acc, 'pct65')),
pct70: createMetricPattern4(client, _m(acc, 'pct70')),
pct75: createMetricPattern4(client, _m(acc, 'pct75')),
pct80: createMetricPattern4(client, _m(acc, 'pct80')),
pct85: createMetricPattern4(client, _m(acc, 'pct85')),
pct90: createMetricPattern4(client, _m(acc, 'pct90')),
pct95: createMetricPattern4(client, _m(acc, 'pct95')),
};
}
/** /**
* @typedef {Object} RelativePattern5 * @typedef {Object} RelativePattern5
* @property {MetricPattern1<StoredF32>} negUnrealizedLossRelToMarketCap * @property {MetricPattern1<StoredF32>} negUnrealizedLossRelToMarketCap
@@ -2082,41 +2083,6 @@ function createDollarsPattern(client, acc) {
}; };
} }
/**
* @typedef {Object} RelativePattern2
* @property {MetricPattern1<StoredF32>} negUnrealizedLossRelToOwnMarketCap
* @property {MetricPattern1<StoredF32>} negUnrealizedLossRelToOwnTotalUnrealizedPnl
* @property {MetricPattern1<StoredF32>} netUnrealizedPnlRelToOwnMarketCap
* @property {MetricPattern1<StoredF32>} netUnrealizedPnlRelToOwnTotalUnrealizedPnl
* @property {MetricPattern1<StoredF64>} supplyInLossRelToOwnSupply
* @property {MetricPattern1<StoredF64>} supplyInProfitRelToOwnSupply
* @property {MetricPattern1<StoredF32>} unrealizedLossRelToOwnMarketCap
* @property {MetricPattern1<StoredF32>} unrealizedLossRelToOwnTotalUnrealizedPnl
* @property {MetricPattern1<StoredF32>} unrealizedProfitRelToOwnMarketCap
* @property {MetricPattern1<StoredF32>} unrealizedProfitRelToOwnTotalUnrealizedPnl
*/
/**
* Create a RelativePattern2 pattern node
* @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name
* @returns {RelativePattern2}
*/
function createRelativePattern2(client, acc) {
return {
negUnrealizedLossRelToOwnMarketCap: createMetricPattern1(client, _m(acc, 'neg_unrealized_loss_rel_to_own_market_cap')),
negUnrealizedLossRelToOwnTotalUnrealizedPnl: createMetricPattern1(client, _m(acc, 'neg_unrealized_loss_rel_to_own_total_unrealized_pnl')),
netUnrealizedPnlRelToOwnMarketCap: createMetricPattern1(client, _m(acc, 'net_unrealized_pnl_rel_to_own_market_cap')),
netUnrealizedPnlRelToOwnTotalUnrealizedPnl: createMetricPattern1(client, _m(acc, 'net_unrealized_pnl_rel_to_own_total_unrealized_pnl')),
supplyInLossRelToOwnSupply: createMetricPattern1(client, _m(acc, 'supply_in_loss_rel_to_own_supply')),
supplyInProfitRelToOwnSupply: createMetricPattern1(client, _m(acc, 'supply_in_profit_rel_to_own_supply')),
unrealizedLossRelToOwnMarketCap: createMetricPattern1(client, _m(acc, 'unrealized_loss_rel_to_own_market_cap')),
unrealizedLossRelToOwnTotalUnrealizedPnl: createMetricPattern1(client, _m(acc, 'unrealized_loss_rel_to_own_total_unrealized_pnl')),
unrealizedProfitRelToOwnMarketCap: createMetricPattern1(client, _m(acc, 'unrealized_profit_rel_to_own_market_cap')),
unrealizedProfitRelToOwnTotalUnrealizedPnl: createMetricPattern1(client, _m(acc, 'unrealized_profit_rel_to_own_total_unrealized_pnl')),
};
}
/** /**
* @typedef {Object} RelativePattern * @typedef {Object} RelativePattern
* @property {MetricPattern1<StoredF32>} negUnrealizedLossRelToMarketCap * @property {MetricPattern1<StoredF32>} negUnrealizedLossRelToMarketCap
@@ -2152,6 +2118,41 @@ function createRelativePattern(client, acc) {
}; };
} }
/**
* @typedef {Object} RelativePattern2
* @property {MetricPattern1<StoredF32>} negUnrealizedLossRelToOwnMarketCap
* @property {MetricPattern1<StoredF32>} negUnrealizedLossRelToOwnTotalUnrealizedPnl
* @property {MetricPattern1<StoredF32>} netUnrealizedPnlRelToOwnMarketCap
* @property {MetricPattern1<StoredF32>} netUnrealizedPnlRelToOwnTotalUnrealizedPnl
* @property {MetricPattern1<StoredF64>} supplyInLossRelToOwnSupply
* @property {MetricPattern1<StoredF64>} supplyInProfitRelToOwnSupply
* @property {MetricPattern1<StoredF32>} unrealizedLossRelToOwnMarketCap
* @property {MetricPattern1<StoredF32>} unrealizedLossRelToOwnTotalUnrealizedPnl
* @property {MetricPattern1<StoredF32>} unrealizedProfitRelToOwnMarketCap
* @property {MetricPattern1<StoredF32>} unrealizedProfitRelToOwnTotalUnrealizedPnl
*/
/**
* Create a RelativePattern2 pattern node
* @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name
* @returns {RelativePattern2}
*/
function createRelativePattern2(client, acc) {
return {
negUnrealizedLossRelToOwnMarketCap: createMetricPattern1(client, _m(acc, 'neg_unrealized_loss_rel_to_own_market_cap')),
negUnrealizedLossRelToOwnTotalUnrealizedPnl: createMetricPattern1(client, _m(acc, 'neg_unrealized_loss_rel_to_own_total_unrealized_pnl')),
netUnrealizedPnlRelToOwnMarketCap: createMetricPattern1(client, _m(acc, 'net_unrealized_pnl_rel_to_own_market_cap')),
netUnrealizedPnlRelToOwnTotalUnrealizedPnl: createMetricPattern1(client, _m(acc, 'net_unrealized_pnl_rel_to_own_total_unrealized_pnl')),
supplyInLossRelToOwnSupply: createMetricPattern1(client, _m(acc, 'supply_in_loss_rel_to_own_supply')),
supplyInProfitRelToOwnSupply: createMetricPattern1(client, _m(acc, 'supply_in_profit_rel_to_own_supply')),
unrealizedLossRelToOwnMarketCap: createMetricPattern1(client, _m(acc, 'unrealized_loss_rel_to_own_market_cap')),
unrealizedLossRelToOwnTotalUnrealizedPnl: createMetricPattern1(client, _m(acc, 'unrealized_loss_rel_to_own_total_unrealized_pnl')),
unrealizedProfitRelToOwnMarketCap: createMetricPattern1(client, _m(acc, 'unrealized_profit_rel_to_own_market_cap')),
unrealizedProfitRelToOwnTotalUnrealizedPnl: createMetricPattern1(client, _m(acc, 'unrealized_profit_rel_to_own_total_unrealized_pnl')),
};
}
/** /**
* @template T * @template T
* @typedef {Object} CountPattern2 * @typedef {Object} CountPattern2
@@ -2222,41 +2223,6 @@ function createAddrCountPattern(client, acc) {
}; };
} }
/**
* @template T
* @typedef {Object} FullnessPattern
* @property {MetricPattern2<T>} average
* @property {MetricPattern11<T>} base
* @property {MetricPattern2<T>} max
* @property {MetricPattern6<T>} median
* @property {MetricPattern2<T>} min
* @property {MetricPattern6<T>} pct10
* @property {MetricPattern6<T>} pct25
* @property {MetricPattern6<T>} pct75
* @property {MetricPattern6<T>} pct90
*/
/**
* Create a FullnessPattern pattern node
* @template T
* @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name
* @returns {FullnessPattern<T>}
*/
function createFullnessPattern(client, acc) {
return {
average: createMetricPattern2(client, _m(acc, 'average')),
base: createMetricPattern11(client, acc),
max: createMetricPattern2(client, _m(acc, 'max')),
median: createMetricPattern6(client, _m(acc, 'median')),
min: createMetricPattern2(client, _m(acc, 'min')),
pct10: createMetricPattern6(client, _m(acc, 'pct10')),
pct25: createMetricPattern6(client, _m(acc, 'pct25')),
pct75: createMetricPattern6(client, _m(acc, 'pct75')),
pct90: createMetricPattern6(client, _m(acc, 'pct90')),
};
}
/** /**
* @template T * @template T
* @typedef {Object} FeeRatePattern * @typedef {Object} FeeRatePattern
@@ -2292,6 +2258,41 @@ function createFeeRatePattern(client, acc) {
}; };
} }
/**
* @template T
* @typedef {Object} FullnessPattern
* @property {MetricPattern2<T>} average
* @property {MetricPattern11<T>} base
* @property {MetricPattern2<T>} max
* @property {MetricPattern6<T>} median
* @property {MetricPattern2<T>} min
* @property {MetricPattern6<T>} pct10
* @property {MetricPattern6<T>} pct25
* @property {MetricPattern6<T>} pct75
* @property {MetricPattern6<T>} pct90
*/
/**
* Create a FullnessPattern pattern node
* @template T
* @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name
* @returns {FullnessPattern<T>}
*/
function createFullnessPattern(client, acc) {
return {
average: createMetricPattern2(client, _m(acc, 'average')),
base: createMetricPattern11(client, acc),
max: createMetricPattern2(client, _m(acc, 'max')),
median: createMetricPattern6(client, _m(acc, 'median')),
min: createMetricPattern2(client, _m(acc, 'min')),
pct10: createMetricPattern6(client, _m(acc, 'pct10')),
pct25: createMetricPattern6(client, _m(acc, 'pct25')),
pct75: createMetricPattern6(client, _m(acc, 'pct75')),
pct90: createMetricPattern6(client, _m(acc, 'pct90')),
};
}
/** /**
* @typedef {Object} _0satsPattern * @typedef {Object} _0satsPattern
* @property {ActivityPattern2} activity * @property {ActivityPattern2} activity
@@ -2356,6 +2357,93 @@ function createPhaseDailyCentsPattern(client, acc) {
}; };
} }
/**
* @typedef {Object} PeriodCagrPattern
* @property {MetricPattern4<StoredF32>} _10y
* @property {MetricPattern4<StoredF32>} _2y
* @property {MetricPattern4<StoredF32>} _3y
* @property {MetricPattern4<StoredF32>} _4y
* @property {MetricPattern4<StoredF32>} _5y
* @property {MetricPattern4<StoredF32>} _6y
* @property {MetricPattern4<StoredF32>} _8y
*/
/**
* Create a PeriodCagrPattern pattern node
* @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name
* @returns {PeriodCagrPattern}
*/
function createPeriodCagrPattern(client, acc) {
return {
_10y: createMetricPattern4(client, _p('10y', acc)),
_2y: createMetricPattern4(client, _p('2y', acc)),
_3y: createMetricPattern4(client, _p('3y', acc)),
_4y: createMetricPattern4(client, _p('4y', acc)),
_5y: createMetricPattern4(client, _p('5y', acc)),
_6y: createMetricPattern4(client, _p('6y', acc)),
_8y: createMetricPattern4(client, _p('8y', acc)),
};
}
/**
* @typedef {Object} _0satsPattern2
* @property {ActivityPattern2} activity
* @property {CostBasisPattern} costBasis
* @property {OutputsPattern} outputs
* @property {RealizedPattern} realized
* @property {RelativePattern4} relative
* @property {SupplyPattern2} supply
* @property {UnrealizedPattern} unrealized
*/
/**
* Create a _0satsPattern2 pattern node
* @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name
* @returns {_0satsPattern2}
*/
function create_0satsPattern2(client, acc) {
return {
activity: createActivityPattern2(client, acc),
costBasis: createCostBasisPattern(client, acc),
outputs: createOutputsPattern(client, _m(acc, 'utxo_count')),
realized: createRealizedPattern(client, acc),
relative: createRelativePattern4(client, _m(acc, 'supply_in')),
supply: createSupplyPattern2(client, _m(acc, 'supply')),
unrealized: createUnrealizedPattern(client, acc),
};
}
/**
* @typedef {Object} _100btcPattern
* @property {ActivityPattern2} activity
* @property {CostBasisPattern} costBasis
* @property {OutputsPattern} outputs
* @property {RealizedPattern} realized
* @property {RelativePattern} relative
* @property {SupplyPattern2} supply
* @property {UnrealizedPattern} unrealized
*/
/**
* Create a _100btcPattern pattern node
* @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name
* @returns {_100btcPattern}
*/
function create_100btcPattern(client, acc) {
return {
activity: createActivityPattern2(client, acc),
costBasis: createCostBasisPattern(client, acc),
outputs: createOutputsPattern(client, _m(acc, 'utxo_count')),
realized: createRealizedPattern(client, acc),
relative: createRelativePattern(client, acc),
supply: createSupplyPattern2(client, _m(acc, 'supply')),
unrealized: createUnrealizedPattern(client, acc),
};
}
/** /**
* @typedef {Object} UnrealizedPattern * @typedef {Object} UnrealizedPattern
* @property {MetricPattern1<Dollars>} negUnrealizedLoss * @property {MetricPattern1<Dollars>} negUnrealizedLoss
@@ -2414,35 +2502,6 @@ function create_10yTo12yPattern(client, acc) {
}; };
} }
/**
* @typedef {Object} PeriodCagrPattern
* @property {MetricPattern4<StoredF32>} _10y
* @property {MetricPattern4<StoredF32>} _2y
* @property {MetricPattern4<StoredF32>} _3y
* @property {MetricPattern4<StoredF32>} _4y
* @property {MetricPattern4<StoredF32>} _5y
* @property {MetricPattern4<StoredF32>} _6y
* @property {MetricPattern4<StoredF32>} _8y
*/
/**
* Create a PeriodCagrPattern pattern node
* @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name
* @returns {PeriodCagrPattern}
*/
function createPeriodCagrPattern(client, acc) {
return {
_10y: createMetricPattern4(client, _p('10y', acc)),
_2y: createMetricPattern4(client, _p('2y', acc)),
_3y: createMetricPattern4(client, _p('3y', acc)),
_4y: createMetricPattern4(client, _p('4y', acc)),
_5y: createMetricPattern4(client, _p('5y', acc)),
_6y: createMetricPattern4(client, _p('6y', acc)),
_8y: createMetricPattern4(client, _p('8y', acc)),
};
}
/** /**
* @typedef {Object} _10yPattern * @typedef {Object} _10yPattern
* @property {ActivityPattern2} activity * @property {ActivityPattern2} activity
@@ -2472,64 +2531,6 @@ function create_10yPattern(client, acc) {
}; };
} }
/**
* @typedef {Object} _100btcPattern
* @property {ActivityPattern2} activity
* @property {CostBasisPattern} costBasis
* @property {OutputsPattern} outputs
* @property {RealizedPattern} realized
* @property {RelativePattern} relative
* @property {SupplyPattern2} supply
* @property {UnrealizedPattern} unrealized
*/
/**
* Create a _100btcPattern pattern node
* @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name
* @returns {_100btcPattern}
*/
function create_100btcPattern(client, acc) {
return {
activity: createActivityPattern2(client, acc),
costBasis: createCostBasisPattern(client, acc),
outputs: createOutputsPattern(client, _m(acc, 'utxo_count')),
realized: createRealizedPattern(client, acc),
relative: createRelativePattern(client, acc),
supply: createSupplyPattern2(client, _m(acc, 'supply')),
unrealized: createUnrealizedPattern(client, acc),
};
}
/**
* @typedef {Object} _0satsPattern2
* @property {ActivityPattern2} activity
* @property {CostBasisPattern} costBasis
* @property {OutputsPattern} outputs
* @property {RealizedPattern} realized
* @property {RelativePattern4} relative
* @property {SupplyPattern2} supply
* @property {UnrealizedPattern} unrealized
*/
/**
* Create a _0satsPattern2 pattern node
* @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name
* @returns {_0satsPattern2}
*/
function create_0satsPattern2(client, acc) {
return {
activity: createActivityPattern2(client, acc),
costBasis: createCostBasisPattern(client, acc),
outputs: createOutputsPattern(client, _m(acc, 'utxo_count')),
realized: createRealizedPattern(client, acc),
relative: createRelativePattern4(client, _m(acc, 'supply_in')),
supply: createSupplyPattern2(client, _m(acc, 'supply')),
unrealized: createUnrealizedPattern(client, acc),
};
}
/** /**
* @typedef {Object} ActivityPattern2 * @typedef {Object} ActivityPattern2
* @property {BlockCountPattern<StoredF64>} coinblocksDestroyed * @property {BlockCountPattern<StoredF64>} coinblocksDestroyed
@@ -2580,27 +2581,6 @@ function createSplitPattern2(client, acc) {
}; };
} }
/**
* @typedef {Object} _2015Pattern
* @property {MetricPattern4<Bitcoin>} bitcoin
* @property {MetricPattern4<Dollars>} dollars
* @property {MetricPattern4<Sats>} sats
*/
/**
* Create a _2015Pattern pattern node
* @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name
* @returns {_2015Pattern}
*/
function create_2015Pattern(client, acc) {
return {
bitcoin: createMetricPattern4(client, _m(acc, 'btc')),
dollars: createMetricPattern4(client, _m(acc, 'usd')),
sats: createMetricPattern4(client, acc),
};
}
/** /**
* @typedef {Object} ActiveSupplyPattern * @typedef {Object} ActiveSupplyPattern
* @property {MetricPattern1<Bitcoin>} bitcoin * @property {MetricPattern1<Bitcoin>} bitcoin
@@ -2623,23 +2603,23 @@ function createActiveSupplyPattern(client, acc) {
} }
/** /**
* @typedef {Object} CostBasisPattern2 * @typedef {Object} _2015Pattern
* @property {MetricPattern1<Dollars>} max * @property {MetricPattern4<Bitcoin>} bitcoin
* @property {MetricPattern1<Dollars>} min * @property {MetricPattern4<Dollars>} dollars
* @property {PercentilesPattern} percentiles * @property {MetricPattern4<Sats>} sats
*/ */
/** /**
* Create a CostBasisPattern2 pattern node * Create a _2015Pattern pattern node
* @param {BrkClientBase} client * @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name * @param {string} acc - Accumulated metric name
* @returns {CostBasisPattern2} * @returns {_2015Pattern}
*/ */
function createCostBasisPattern2(client, acc) { function create_2015Pattern(client, acc) {
return { return {
max: createMetricPattern1(client, _m(acc, 'max_cost_basis')), bitcoin: createMetricPattern4(client, _m(acc, 'btc')),
min: createMetricPattern1(client, _m(acc, 'min_cost_basis')), dollars: createMetricPattern4(client, _m(acc, 'usd')),
percentiles: createPercentilesPattern(client, _m(acc, 'cost_basis')), sats: createMetricPattern4(client, acc),
}; };
} }
@@ -2664,6 +2644,27 @@ function createCoinbasePattern2(client, acc) {
}; };
} }
/**
* @typedef {Object} CostBasisPattern2
* @property {MetricPattern1<Dollars>} max
* @property {MetricPattern1<Dollars>} min
* @property {PercentilesPattern} percentiles
*/
/**
* Create a CostBasisPattern2 pattern node
* @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name
* @returns {CostBasisPattern2}
*/
function createCostBasisPattern2(client, acc) {
return {
max: createMetricPattern1(client, _m(acc, 'max_cost_basis')),
min: createMetricPattern1(client, _m(acc, 'min_cost_basis')),
percentiles: createPercentilesPattern(client, _m(acc, 'cost_basis')),
};
}
/** /**
* @typedef {Object} SegwitAdoptionPattern * @typedef {Object} SegwitAdoptionPattern
* @property {MetricPattern11<StoredF32>} base * @property {MetricPattern11<StoredF32>} base
@@ -2685,27 +2686,6 @@ function createSegwitAdoptionPattern(client, acc) {
}; };
} }
/**
* @typedef {Object} CoinbasePattern
* @property {BitcoinPattern} bitcoin
* @property {DollarsPattern<Dollars>} dollars
* @property {DollarsPattern<Sats>} sats
*/
/**
* Create a CoinbasePattern pattern node
* @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name
* @returns {CoinbasePattern}
*/
function createCoinbasePattern(client, acc) {
return {
bitcoin: createBitcoinPattern(client, _m(acc, 'btc')),
dollars: createDollarsPattern(client, _m(acc, 'usd')),
sats: createDollarsPattern(client, acc),
};
}
/** /**
* @typedef {Object} UnclaimedRewardsPattern * @typedef {Object} UnclaimedRewardsPattern
* @property {BitcoinPattern2<Bitcoin>} bitcoin * @property {BitcoinPattern2<Bitcoin>} bitcoin
@@ -2728,21 +2708,23 @@ function createUnclaimedRewardsPattern(client, acc) {
} }
/** /**
* @typedef {Object} SupplyPattern2 * @typedef {Object} CoinbasePattern
* @property {ActiveSupplyPattern} halved * @property {BitcoinPattern} bitcoin
* @property {ActiveSupplyPattern} total * @property {DollarsPattern<Dollars>} dollars
* @property {DollarsPattern<Sats>} sats
*/ */
/** /**
* Create a SupplyPattern2 pattern node * Create a CoinbasePattern pattern node
* @param {BrkClientBase} client * @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name * @param {string} acc - Accumulated metric name
* @returns {SupplyPattern2} * @returns {CoinbasePattern}
*/ */
function createSupplyPattern2(client, acc) { function createCoinbasePattern(client, acc) {
return { return {
halved: createActiveSupplyPattern(client, _m(acc, 'halved')), bitcoin: createBitcoinPattern(client, _m(acc, 'btc')),
total: createActiveSupplyPattern(client, acc), dollars: createDollarsPattern(client, _m(acc, 'usd')),
sats: createDollarsPattern(client, acc),
}; };
} }
@@ -2765,25 +2747,6 @@ function createRelativePattern4(client, acc) {
}; };
} }
/**
* @typedef {Object} CostBasisPattern
* @property {MetricPattern1<Dollars>} max
* @property {MetricPattern1<Dollars>} min
*/
/**
* Create a CostBasisPattern pattern node
* @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name
* @returns {CostBasisPattern}
*/
function createCostBasisPattern(client, acc) {
return {
max: createMetricPattern1(client, _m(acc, 'max_cost_basis')),
min: createMetricPattern1(client, _m(acc, 'min_cost_basis')),
};
}
/** /**
* @typedef {Object} _1dReturns1mSdPattern * @typedef {Object} _1dReturns1mSdPattern
* @property {MetricPattern4<StoredF32>} sd * @property {MetricPattern4<StoredF32>} sd
@@ -2804,23 +2767,40 @@ function create_1dReturns1mSdPattern(client, acc) {
} }
/** /**
* @template T * @typedef {Object} CostBasisPattern
* @typedef {Object} BitcoinPattern2 * @property {MetricPattern1<Dollars>} max
* @property {MetricPattern2<T>} cumulative * @property {MetricPattern1<Dollars>} min
* @property {MetricPattern1<T>} sum
*/ */
/** /**
* Create a BitcoinPattern2 pattern node * Create a CostBasisPattern pattern node
* @template T
* @param {BrkClientBase} client * @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name * @param {string} acc - Accumulated metric name
* @returns {BitcoinPattern2<T>} * @returns {CostBasisPattern}
*/ */
function createBitcoinPattern2(client, acc) { function createCostBasisPattern(client, acc) {
return { return {
cumulative: createMetricPattern2(client, _m(acc, 'cumulative')), max: createMetricPattern1(client, _m(acc, 'max_cost_basis')),
sum: createMetricPattern1(client, acc), min: createMetricPattern1(client, _m(acc, 'min_cost_basis')),
};
}
/**
* @typedef {Object} SupplyPattern2
* @property {ActiveSupplyPattern} halved
* @property {ActiveSupplyPattern} total
*/
/**
* Create a SupplyPattern2 pattern node
* @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name
* @returns {SupplyPattern2}
*/
function createSupplyPattern2(client, acc) {
return {
halved: createActiveSupplyPattern(client, _m(acc, 'halved')),
total: createActiveSupplyPattern(client, acc),
}; };
} }
@@ -2845,6 +2825,27 @@ function createBlockCountPattern(client, acc) {
}; };
} }
/**
* @template T
* @typedef {Object} BitcoinPattern2
* @property {MetricPattern2<T>} cumulative
* @property {MetricPattern1<T>} sum
*/
/**
* Create a BitcoinPattern2 pattern node
* @template T
* @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name
* @returns {BitcoinPattern2<T>}
*/
function createBitcoinPattern2(client, acc) {
return {
cumulative: createMetricPattern2(client, _m(acc, 'cumulative')),
sum: createMetricPattern1(client, acc),
};
}
/** /**
* @template T * @template T
* @typedef {Object} SatsPattern * @typedef {Object} SatsPattern
@@ -2861,25 +2862,8 @@ function createBlockCountPattern(client, acc) {
*/ */
function createSatsPattern(client, acc) { function createSatsPattern(client, acc) {
return { return {
ohlc: createMetricPattern1(client, _m(acc, 'ohlc')), ohlc: createMetricPattern1(client, _m(acc, 'ohlc_sats')),
split: createSplitPattern2(client, acc), split: createSplitPattern2(client, _m(acc, 'sats')),
};
}
/**
* @typedef {Object} RealizedPriceExtraPattern
* @property {MetricPattern4<StoredF32>} ratio
*/
/**
* Create a RealizedPriceExtraPattern pattern node
* @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name
* @returns {RealizedPriceExtraPattern}
*/
function createRealizedPriceExtraPattern(client, acc) {
return {
ratio: createMetricPattern4(client, acc),
}; };
} }
@@ -2900,6 +2884,23 @@ function createOutputsPattern(client, acc) {
}; };
} }
/**
* @typedef {Object} RealizedPriceExtraPattern
* @property {MetricPattern4<StoredF32>} ratio
*/
/**
* Create a RealizedPriceExtraPattern pattern node
* @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name
* @returns {RealizedPriceExtraPattern}
*/
function createRealizedPriceExtraPattern(client, acc) {
return {
ratio: createMetricPattern4(client, acc),
};
}
// Catalog tree typedefs // Catalog tree typedefs
/** /**
@@ -4055,8 +4056,8 @@ function createOutputsPattern(client, acc) {
* @typedef {Object} MetricsTree_Price * @typedef {Object} MetricsTree_Price
* @property {MetricsTree_Price_Cents} cents * @property {MetricsTree_Price_Cents} cents
* @property {MetricsTree_Price_Oracle} oracle * @property {MetricsTree_Price_Oracle} oracle
* @property {MetricsTree_Price_Sats} sats * @property {SatsPattern<OHLCSats>} sats
* @property {SatsPattern<OHLCDollars>} usd * @property {MetricsTree_Price_Usd} usd
*/ */
/** /**
@@ -4085,14 +4086,28 @@ function createOutputsPattern(client, acc) {
* @property {PhaseDailyCentsPattern<Dollars>} phaseDailyDollars * @property {PhaseDailyCentsPattern<Dollars>} phaseDailyDollars
* @property {MetricPattern11<OracleBins>} phaseHistogram * @property {MetricPattern11<OracleBins>} phaseHistogram
* @property {MetricPattern11<Cents>} phasePriceCents * @property {MetricPattern11<Cents>} phasePriceCents
* @property {PhaseDailyCentsPattern<Cents>} phaseV2DailyCents
* @property {PhaseDailyCentsPattern<Dollars>} phaseV2DailyDollars
* @property {MetricPattern11<OracleBinsV2>} phaseV2Histogram
* @property {PhaseDailyCentsPattern<Cents>} phaseV2PeakDailyCents
* @property {PhaseDailyCentsPattern<Dollars>} phaseV2PeakDailyDollars
* @property {MetricPattern11<Cents>} phaseV2PeakPriceCents
* @property {MetricPattern11<Cents>} phaseV2PriceCents
* @property {PhaseDailyCentsPattern<Cents>} phaseV3DailyCents
* @property {PhaseDailyCentsPattern<Dollars>} phaseV3DailyDollars
* @property {MetricPattern11<OracleBinsV2>} phaseV3Histogram
* @property {PhaseDailyCentsPattern<Cents>} phaseV3PeakDailyCents
* @property {PhaseDailyCentsPattern<Dollars>} phaseV3PeakDailyDollars
* @property {MetricPattern11<Cents>} phaseV3PeakPriceCents
* @property {MetricPattern11<Cents>} phaseV3PriceCents
* @property {MetricPattern11<Cents>} priceCents * @property {MetricPattern11<Cents>} priceCents
* @property {MetricPattern6<StoredU32>} txCount * @property {MetricPattern6<StoredU32>} txCount
*/ */
/** /**
* @typedef {Object} MetricsTree_Price_Sats * @typedef {Object} MetricsTree_Price_Usd
* @property {MetricPattern1<OHLCSats>} ohlc * @property {MetricPattern1<OHLCDollars>} ohlc
* @property {SplitPattern2<Sats>} split * @property {SplitPattern2<Dollars>} split
*/ */
/** /**
@@ -6055,14 +6070,28 @@ class BrkClient extends BrkClientBase {
phaseDailyDollars: createPhaseDailyCentsPattern(this, 'phase_daily_dollars'), phaseDailyDollars: createPhaseDailyCentsPattern(this, 'phase_daily_dollars'),
phaseHistogram: createMetricPattern11(this, 'phase_histogram'), phaseHistogram: createMetricPattern11(this, 'phase_histogram'),
phasePriceCents: createMetricPattern11(this, 'phase_price_cents'), phasePriceCents: createMetricPattern11(this, 'phase_price_cents'),
phaseV2DailyCents: createPhaseDailyCentsPattern(this, 'phase_v2_daily'),
phaseV2DailyDollars: createPhaseDailyCentsPattern(this, 'phase_v2_daily_dollars'),
phaseV2Histogram: createMetricPattern11(this, 'phase_v2_histogram'),
phaseV2PeakDailyCents: createPhaseDailyCentsPattern(this, 'phase_v2_peak_daily'),
phaseV2PeakDailyDollars: createPhaseDailyCentsPattern(this, 'phase_v2_peak_daily_dollars'),
phaseV2PeakPriceCents: createMetricPattern11(this, 'phase_v2_peak_price_cents'),
phaseV2PriceCents: createMetricPattern11(this, 'phase_v2_price_cents'),
phaseV3DailyCents: createPhaseDailyCentsPattern(this, 'phase_v3_daily'),
phaseV3DailyDollars: createPhaseDailyCentsPattern(this, 'phase_v3_daily_dollars'),
phaseV3Histogram: createMetricPattern11(this, 'phase_v3_histogram'),
phaseV3PeakDailyCents: createPhaseDailyCentsPattern(this, 'phase_v3_peak_daily'),
phaseV3PeakDailyDollars: createPhaseDailyCentsPattern(this, 'phase_v3_peak_daily_dollars'),
phaseV3PeakPriceCents: createMetricPattern11(this, 'phase_v3_peak_price_cents'),
phaseV3PriceCents: createMetricPattern11(this, 'phase_v3_price_cents'),
priceCents: createMetricPattern11(this, 'oracle_price_cents'), priceCents: createMetricPattern11(this, 'oracle_price_cents'),
txCount: createMetricPattern6(this, 'oracle_tx_count'), txCount: createMetricPattern6(this, 'oracle_tx_count'),
}, },
sats: { sats: createSatsPattern(this, 'price'),
ohlc: createMetricPattern1(this, 'price_ohlc_sats'), usd: {
split: createSplitPattern2(this, 'price_sats'), ohlc: createMetricPattern1(this, 'price_ohlc'),
split: createSplitPattern2(this, 'price'),
}, },
usd: createSatsPattern(this, 'price'),
}, },
scripts: { scripts: {
count: { count: {
@@ -6184,29 +6213,17 @@ class BrkClient extends BrkClientBase {
} }
/** /**
* OpenAPI specification * Compact OpenAPI specification
* *
* Full OpenAPI 3.1 specification for this API. * Compact OpenAPI specification optimized for LLM consumption. Removes redundant fields while preserving essential API information. Full spec available at `/openapi.json`.
* *
* Endpoint: `GET /api.json` * Endpoint: `GET /api.json`
* @returns {Promise<*>} * @returns {Promise<*>}
*/ */
async getOpenapi() { async getApi() {
return this.getJson(`/api.json`); return this.getJson(`/api.json`);
} }
/**
* Trimmed OpenAPI specification
*
* Compact OpenAPI specification optimized for LLM consumption. Removes redundant fields while preserving essential API information.
*
* Endpoint: `GET /api.trimmed.json`
* @returns {Promise<*>}
*/
async getOpenapiTrimmed() {
return this.getJson(`/api.trimmed.json`);
}
/** /**
* Address information * Address information
* *
@@ -6997,6 +7014,18 @@ class BrkClient extends BrkClientBase {
return this.getJson(`/health`); return this.getJson(`/health`);
} }
/**
* OpenAPI specification
*
* Full OpenAPI 3.1 specification for this API.
*
* Endpoint: `GET /openapi.json`
* @returns {Promise<*>}
*/
async getOpenapi() {
return this.getJson(`/openapi.json`);
}
/** /**
* API version * API version
* *

View File

@@ -70,6 +70,7 @@ MonthIndex = int
Open = Cents Open = Cents
OpReturnIndex = TypeIndex OpReturnIndex = TypeIndex
OracleBins = List[int] OracleBins = List[int]
OracleBinsV2 = List[int]
OutPoint = int OutPoint = int
# Type (P2PKH, P2WPKH, P2SH, P2TR, etc.) # Type (P2PKH, P2WPKH, P2SH, P2TR, etc.)
OutputType = Literal["p2pk65", "p2pk33", "p2pkh", "p2ms", "p2sh", "opreturn", "p2wpkh", "p2wsh", "p2tr", "p2a", "empty", "unknown"] OutputType = Literal["p2pk65", "p2pk33", "p2pkh", "p2ms", "p2sh", "opreturn", "p2wpkh", "p2wsh", "p2tr", "p2a", "empty", "unknown"]
@@ -1882,31 +1883,6 @@ class Price111dSmaPattern:
self.ratio_pct99_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'ratio_pct99_usd')) self.ratio_pct99_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'ratio_pct99_usd'))
self.ratio_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, _m(acc, 'ratio')) self.ratio_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, _m(acc, 'ratio'))
class PercentilesPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.pct05: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct05'))
self.pct10: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct10'))
self.pct15: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct15'))
self.pct20: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct20'))
self.pct25: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct25'))
self.pct30: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct30'))
self.pct35: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct35'))
self.pct40: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct40'))
self.pct45: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct45'))
self.pct50: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct50'))
self.pct55: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct55'))
self.pct60: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct60'))
self.pct65: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct65'))
self.pct70: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct70'))
self.pct75: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct75'))
self.pct80: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct80'))
self.pct85: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct85'))
self.pct90: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct90'))
self.pct95: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct95'))
class ActivePriceRatioPattern: class ActivePriceRatioPattern:
"""Pattern struct for repeated tree structure.""" """Pattern struct for repeated tree structure."""
@@ -1932,6 +1908,31 @@ class ActivePriceRatioPattern:
self.ratio_pct99_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct99_usd')) self.ratio_pct99_usd: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct99_usd'))
self.ratio_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, acc) self.ratio_sd: Ratio1ySdPattern = Ratio1ySdPattern(client, acc)
class PercentilesPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.pct05: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct05'))
self.pct10: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct10'))
self.pct15: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct15'))
self.pct20: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct20'))
self.pct25: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct25'))
self.pct30: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct30'))
self.pct35: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct35'))
self.pct40: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct40'))
self.pct45: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct45'))
self.pct50: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct50'))
self.pct55: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct55'))
self.pct60: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct60'))
self.pct65: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct65'))
self.pct70: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct70'))
self.pct75: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct75'))
self.pct80: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct80'))
self.pct85: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct85'))
self.pct90: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct90'))
self.pct95: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'pct95'))
class RelativePattern5: class RelativePattern5:
"""Pattern struct for repeated tree structure.""" """Pattern struct for repeated tree structure."""
@@ -2082,22 +2083,6 @@ class DollarsPattern(Generic[T]):
self.pct90: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'pct90')) self.pct90: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'pct90'))
self.sum: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'sum')) self.sum: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'sum'))
class RelativePattern2:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.neg_unrealized_loss_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'neg_unrealized_loss_rel_to_own_market_cap'))
self.neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'neg_unrealized_loss_rel_to_own_total_unrealized_pnl'))
self.net_unrealized_pnl_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'net_unrealized_pnl_rel_to_own_market_cap'))
self.net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'net_unrealized_pnl_rel_to_own_total_unrealized_pnl'))
self.supply_in_loss_rel_to_own_supply: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'supply_in_loss_rel_to_own_supply'))
self.supply_in_profit_rel_to_own_supply: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'supply_in_profit_rel_to_own_supply'))
self.unrealized_loss_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_loss_rel_to_own_market_cap'))
self.unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_loss_rel_to_own_total_unrealized_pnl'))
self.unrealized_profit_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_profit_rel_to_own_market_cap'))
self.unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_profit_rel_to_own_total_unrealized_pnl'))
class RelativePattern: class RelativePattern:
"""Pattern struct for repeated tree structure.""" """Pattern struct for repeated tree structure."""
@@ -2114,6 +2099,22 @@ class RelativePattern:
self.unrealized_loss_rel_to_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_loss_rel_to_market_cap')) self.unrealized_loss_rel_to_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_loss_rel_to_market_cap'))
self.unrealized_profit_rel_to_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_profit_rel_to_market_cap')) self.unrealized_profit_rel_to_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_profit_rel_to_market_cap'))
class RelativePattern2:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.neg_unrealized_loss_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'neg_unrealized_loss_rel_to_own_market_cap'))
self.neg_unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'neg_unrealized_loss_rel_to_own_total_unrealized_pnl'))
self.net_unrealized_pnl_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'net_unrealized_pnl_rel_to_own_market_cap'))
self.net_unrealized_pnl_rel_to_own_total_unrealized_pnl: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'net_unrealized_pnl_rel_to_own_total_unrealized_pnl'))
self.supply_in_loss_rel_to_own_supply: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'supply_in_loss_rel_to_own_supply'))
self.supply_in_profit_rel_to_own_supply: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'supply_in_profit_rel_to_own_supply'))
self.unrealized_loss_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_loss_rel_to_own_market_cap'))
self.unrealized_loss_rel_to_own_total_unrealized_pnl: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_loss_rel_to_own_total_unrealized_pnl'))
self.unrealized_profit_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_profit_rel_to_own_market_cap'))
self.unrealized_profit_rel_to_own_total_unrealized_pnl: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'unrealized_profit_rel_to_own_total_unrealized_pnl'))
class CountPattern2(Generic[T]): class CountPattern2(Generic[T]):
"""Pattern struct for repeated tree structure.""" """Pattern struct for repeated tree structure."""
@@ -2145,21 +2146,6 @@ class AddrCountPattern:
self.p2wpkh: MetricPattern1[StoredU64] = MetricPattern1(client, _p('p2wpkh', acc)) self.p2wpkh: MetricPattern1[StoredU64] = MetricPattern1(client, _p('p2wpkh', acc))
self.p2wsh: MetricPattern1[StoredU64] = MetricPattern1(client, _p('p2wsh', acc)) self.p2wsh: MetricPattern1[StoredU64] = MetricPattern1(client, _p('p2wsh', acc))
class FullnessPattern(Generic[T]):
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.average: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'average'))
self.base: MetricPattern11[T] = MetricPattern11(client, acc)
self.max: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'max'))
self.median: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'median'))
self.min: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'min'))
self.pct10: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'pct10'))
self.pct25: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'pct25'))
self.pct75: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'pct75'))
self.pct90: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'pct90'))
class FeeRatePattern(Generic[T]): class FeeRatePattern(Generic[T]):
"""Pattern struct for repeated tree structure.""" """Pattern struct for repeated tree structure."""
@@ -2175,6 +2161,21 @@ class FeeRatePattern(Generic[T]):
self.pct90: MetricPattern11[T] = MetricPattern11(client, _m(acc, 'pct90')) self.pct90: MetricPattern11[T] = MetricPattern11(client, _m(acc, 'pct90'))
self.txindex: MetricPattern27[T] = MetricPattern27(client, acc) self.txindex: MetricPattern27[T] = MetricPattern27(client, acc)
class FullnessPattern(Generic[T]):
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.average: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'average'))
self.base: MetricPattern11[T] = MetricPattern11(client, acc)
self.max: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'max'))
self.median: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'median'))
self.min: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'min'))
self.pct10: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'pct10'))
self.pct25: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'pct25'))
self.pct75: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'pct75'))
self.pct90: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'pct90'))
class _0satsPattern: class _0satsPattern:
"""Pattern struct for repeated tree structure.""" """Pattern struct for repeated tree structure."""
@@ -2203,6 +2204,45 @@ class PhaseDailyCentsPattern(Generic[T]):
self.pct75: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'pct75')) self.pct75: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'pct75'))
self.pct90: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'pct90')) self.pct90: MetricPattern6[T] = MetricPattern6(client, _m(acc, 'pct90'))
class PeriodCagrPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self._10y: MetricPattern4[StoredF32] = MetricPattern4(client, _p('10y', acc))
self._2y: MetricPattern4[StoredF32] = MetricPattern4(client, _p('2y', acc))
self._3y: MetricPattern4[StoredF32] = MetricPattern4(client, _p('3y', acc))
self._4y: MetricPattern4[StoredF32] = MetricPattern4(client, _p('4y', acc))
self._5y: MetricPattern4[StoredF32] = MetricPattern4(client, _p('5y', acc))
self._6y: MetricPattern4[StoredF32] = MetricPattern4(client, _p('6y', acc))
self._8y: MetricPattern4[StoredF32] = MetricPattern4(client, _p('8y', acc))
class _0satsPattern2:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.activity: ActivityPattern2 = ActivityPattern2(client, acc)
self.cost_basis: CostBasisPattern = CostBasisPattern(client, acc)
self.outputs: OutputsPattern = OutputsPattern(client, _m(acc, 'utxo_count'))
self.realized: RealizedPattern = RealizedPattern(client, acc)
self.relative: RelativePattern4 = RelativePattern4(client, _m(acc, 'supply_in'))
self.supply: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply'))
self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc)
class _100btcPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.activity: ActivityPattern2 = ActivityPattern2(client, acc)
self.cost_basis: CostBasisPattern = CostBasisPattern(client, acc)
self.outputs: OutputsPattern = OutputsPattern(client, _m(acc, 'utxo_count'))
self.realized: RealizedPattern = RealizedPattern(client, acc)
self.relative: RelativePattern = RelativePattern(client, acc)
self.supply: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply'))
self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc)
class UnrealizedPattern: class UnrealizedPattern:
"""Pattern struct for repeated tree structure.""" """Pattern struct for repeated tree structure."""
@@ -2229,19 +2269,6 @@ class _10yTo12yPattern:
self.supply: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply')) self.supply: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply'))
self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc) self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc)
class PeriodCagrPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self._10y: MetricPattern4[StoredF32] = MetricPattern4(client, _p('10y', acc))
self._2y: MetricPattern4[StoredF32] = MetricPattern4(client, _p('2y', acc))
self._3y: MetricPattern4[StoredF32] = MetricPattern4(client, _p('3y', acc))
self._4y: MetricPattern4[StoredF32] = MetricPattern4(client, _p('4y', acc))
self._5y: MetricPattern4[StoredF32] = MetricPattern4(client, _p('5y', acc))
self._6y: MetricPattern4[StoredF32] = MetricPattern4(client, _p('6y', acc))
self._8y: MetricPattern4[StoredF32] = MetricPattern4(client, _p('8y', acc))
class _10yPattern: class _10yPattern:
"""Pattern struct for repeated tree structure.""" """Pattern struct for repeated tree structure."""
@@ -2255,32 +2282,6 @@ class _10yPattern:
self.supply: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply')) self.supply: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply'))
self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc) self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc)
class _100btcPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.activity: ActivityPattern2 = ActivityPattern2(client, acc)
self.cost_basis: CostBasisPattern = CostBasisPattern(client, acc)
self.outputs: OutputsPattern = OutputsPattern(client, _m(acc, 'utxo_count'))
self.realized: RealizedPattern = RealizedPattern(client, acc)
self.relative: RelativePattern = RelativePattern(client, acc)
self.supply: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply'))
self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc)
class _0satsPattern2:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.activity: ActivityPattern2 = ActivityPattern2(client, acc)
self.cost_basis: CostBasisPattern = CostBasisPattern(client, acc)
self.outputs: OutputsPattern = OutputsPattern(client, _m(acc, 'utxo_count'))
self.realized: RealizedPattern = RealizedPattern(client, acc)
self.relative: RelativePattern4 = RelativePattern4(client, _m(acc, 'supply_in'))
self.supply: SupplyPattern2 = SupplyPattern2(client, _m(acc, 'supply'))
self.unrealized: UnrealizedPattern = UnrealizedPattern(client, acc)
class ActivityPattern2: class ActivityPattern2:
"""Pattern struct for repeated tree structure.""" """Pattern struct for repeated tree structure."""
@@ -2302,15 +2303,6 @@ class SplitPattern2(Generic[T]):
self.low: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'low')) self.low: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'low'))
self.open: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'open')) self.open: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'open'))
class _2015Pattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.bitcoin: MetricPattern4[Bitcoin] = MetricPattern4(client, _m(acc, 'btc'))
self.dollars: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'usd'))
self.sats: MetricPattern4[Sats] = MetricPattern4(client, acc)
class ActiveSupplyPattern: class ActiveSupplyPattern:
"""Pattern struct for repeated tree structure.""" """Pattern struct for repeated tree structure."""
@@ -2320,14 +2312,14 @@ class ActiveSupplyPattern:
self.dollars: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'usd')) self.dollars: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'usd'))
self.sats: MetricPattern1[Sats] = MetricPattern1(client, acc) self.sats: MetricPattern1[Sats] = MetricPattern1(client, acc)
class CostBasisPattern2: class _2015Pattern:
"""Pattern struct for repeated tree structure.""" """Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str): def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name.""" """Create pattern node with accumulated metric name."""
self.max: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'max_cost_basis')) self.bitcoin: MetricPattern4[Bitcoin] = MetricPattern4(client, _m(acc, 'btc'))
self.min: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'min_cost_basis')) self.dollars: MetricPattern4[Dollars] = MetricPattern4(client, _m(acc, 'usd'))
self.percentiles: PercentilesPattern = PercentilesPattern(client, _m(acc, 'cost_basis')) self.sats: MetricPattern4[Sats] = MetricPattern4(client, acc)
class CoinbasePattern2: class CoinbasePattern2:
"""Pattern struct for repeated tree structure.""" """Pattern struct for repeated tree structure."""
@@ -2338,6 +2330,15 @@ class CoinbasePattern2:
self.dollars: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'usd')) self.dollars: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'usd'))
self.sats: BlockCountPattern[Sats] = BlockCountPattern(client, acc) self.sats: BlockCountPattern[Sats] = BlockCountPattern(client, acc)
class CostBasisPattern2:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.max: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'max_cost_basis'))
self.min: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'min_cost_basis'))
self.percentiles: PercentilesPattern = PercentilesPattern(client, _m(acc, 'cost_basis'))
class SegwitAdoptionPattern: class SegwitAdoptionPattern:
"""Pattern struct for repeated tree structure.""" """Pattern struct for repeated tree structure."""
@@ -2347,15 +2348,6 @@ class SegwitAdoptionPattern:
self.cumulative: MetricPattern2[StoredF32] = MetricPattern2(client, _m(acc, 'cumulative')) self.cumulative: MetricPattern2[StoredF32] = MetricPattern2(client, _m(acc, 'cumulative'))
self.sum: MetricPattern2[StoredF32] = MetricPattern2(client, _m(acc, 'sum')) self.sum: MetricPattern2[StoredF32] = MetricPattern2(client, _m(acc, 'sum'))
class CoinbasePattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.bitcoin: BitcoinPattern = BitcoinPattern(client, _m(acc, 'btc'))
self.dollars: DollarsPattern[Dollars] = DollarsPattern(client, _m(acc, 'usd'))
self.sats: DollarsPattern[Sats] = DollarsPattern(client, acc)
class UnclaimedRewardsPattern: class UnclaimedRewardsPattern:
"""Pattern struct for repeated tree structure.""" """Pattern struct for repeated tree structure."""
@@ -2365,13 +2357,14 @@ class UnclaimedRewardsPattern:
self.dollars: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'usd')) self.dollars: BlockCountPattern[Dollars] = BlockCountPattern(client, _m(acc, 'usd'))
self.sats: BlockCountPattern[Sats] = BlockCountPattern(client, acc) self.sats: BlockCountPattern[Sats] = BlockCountPattern(client, acc)
class SupplyPattern2: class CoinbasePattern:
"""Pattern struct for repeated tree structure.""" """Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str): def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name.""" """Create pattern node with accumulated metric name."""
self.halved: ActiveSupplyPattern = ActiveSupplyPattern(client, _m(acc, 'halved')) self.bitcoin: BitcoinPattern = BitcoinPattern(client, _m(acc, 'btc'))
self.total: ActiveSupplyPattern = ActiveSupplyPattern(client, acc) self.dollars: DollarsPattern[Dollars] = DollarsPattern(client, _m(acc, 'usd'))
self.sats: DollarsPattern[Sats] = DollarsPattern(client, acc)
class RelativePattern4: class RelativePattern4:
"""Pattern struct for repeated tree structure.""" """Pattern struct for repeated tree structure."""
@@ -2381,14 +2374,6 @@ class RelativePattern4:
self.supply_in_loss_rel_to_own_supply: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'loss_rel_to_own_supply')) self.supply_in_loss_rel_to_own_supply: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'loss_rel_to_own_supply'))
self.supply_in_profit_rel_to_own_supply: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'profit_rel_to_own_supply')) self.supply_in_profit_rel_to_own_supply: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'profit_rel_to_own_supply'))
class CostBasisPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.max: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'max_cost_basis'))
self.min: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'min_cost_basis'))
class _1dReturns1mSdPattern: class _1dReturns1mSdPattern:
"""Pattern struct for repeated tree structure.""" """Pattern struct for repeated tree structure."""
@@ -2397,13 +2382,21 @@ class _1dReturns1mSdPattern:
self.sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'sd')) self.sd: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'sd'))
self.sma: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'sma')) self.sma: MetricPattern4[StoredF32] = MetricPattern4(client, _m(acc, 'sma'))
class BitcoinPattern2(Generic[T]): class CostBasisPattern:
"""Pattern struct for repeated tree structure.""" """Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str): def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name.""" """Create pattern node with accumulated metric name."""
self.cumulative: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'cumulative')) self.max: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'max_cost_basis'))
self.sum: MetricPattern1[T] = MetricPattern1(client, acc) self.min: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'min_cost_basis'))
class SupplyPattern2:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.halved: ActiveSupplyPattern = ActiveSupplyPattern(client, _m(acc, 'halved'))
self.total: ActiveSupplyPattern = ActiveSupplyPattern(client, acc)
class BlockCountPattern(Generic[T]): class BlockCountPattern(Generic[T]):
"""Pattern struct for repeated tree structure.""" """Pattern struct for repeated tree structure."""
@@ -2413,20 +2406,21 @@ class BlockCountPattern(Generic[T]):
self.cumulative: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'cumulative')) self.cumulative: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'cumulative'))
self.sum: MetricPattern1[T] = MetricPattern1(client, acc) self.sum: MetricPattern1[T] = MetricPattern1(client, acc)
class BitcoinPattern2(Generic[T]):
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.cumulative: MetricPattern2[T] = MetricPattern2(client, _m(acc, 'cumulative'))
self.sum: MetricPattern1[T] = MetricPattern1(client, acc)
class SatsPattern(Generic[T]): class SatsPattern(Generic[T]):
"""Pattern struct for repeated tree structure.""" """Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str): def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name.""" """Create pattern node with accumulated metric name."""
self.ohlc: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'ohlc')) self.ohlc: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'ohlc_sats'))
self.split: SplitPattern2[T] = SplitPattern2(client, acc) self.split: SplitPattern2[T] = SplitPattern2(client, _m(acc, 'sats'))
class RealizedPriceExtraPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.ratio: MetricPattern4[StoredF32] = MetricPattern4(client, acc)
class OutputsPattern: class OutputsPattern:
"""Pattern struct for repeated tree structure.""" """Pattern struct for repeated tree structure."""
@@ -2435,6 +2429,13 @@ class OutputsPattern:
"""Create pattern node with accumulated metric name.""" """Create pattern node with accumulated metric name."""
self.utxo_count: MetricPattern1[StoredU64] = MetricPattern1(client, acc) self.utxo_count: MetricPattern1[StoredU64] = MetricPattern1(client, acc)
class RealizedPriceExtraPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.ratio: MetricPattern4[StoredF32] = MetricPattern4(client, acc)
# Metrics tree classes # Metrics tree classes
class MetricsTree_Addresses: class MetricsTree_Addresses:
@@ -3684,15 +3685,29 @@ class MetricsTree_Price_Oracle:
self.phase_daily_dollars: PhaseDailyCentsPattern[Dollars] = PhaseDailyCentsPattern(client, 'phase_daily_dollars') self.phase_daily_dollars: PhaseDailyCentsPattern[Dollars] = PhaseDailyCentsPattern(client, 'phase_daily_dollars')
self.phase_histogram: MetricPattern11[OracleBins] = MetricPattern11(client, 'phase_histogram') self.phase_histogram: MetricPattern11[OracleBins] = MetricPattern11(client, 'phase_histogram')
self.phase_price_cents: MetricPattern11[Cents] = MetricPattern11(client, 'phase_price_cents') self.phase_price_cents: MetricPattern11[Cents] = MetricPattern11(client, 'phase_price_cents')
self.phase_v2_daily_cents: PhaseDailyCentsPattern[Cents] = PhaseDailyCentsPattern(client, 'phase_v2_daily')
self.phase_v2_daily_dollars: PhaseDailyCentsPattern[Dollars] = PhaseDailyCentsPattern(client, 'phase_v2_daily_dollars')
self.phase_v2_histogram: MetricPattern11[OracleBinsV2] = MetricPattern11(client, 'phase_v2_histogram')
self.phase_v2_peak_daily_cents: PhaseDailyCentsPattern[Cents] = PhaseDailyCentsPattern(client, 'phase_v2_peak_daily')
self.phase_v2_peak_daily_dollars: PhaseDailyCentsPattern[Dollars] = PhaseDailyCentsPattern(client, 'phase_v2_peak_daily_dollars')
self.phase_v2_peak_price_cents: MetricPattern11[Cents] = MetricPattern11(client, 'phase_v2_peak_price_cents')
self.phase_v2_price_cents: MetricPattern11[Cents] = MetricPattern11(client, 'phase_v2_price_cents')
self.phase_v3_daily_cents: PhaseDailyCentsPattern[Cents] = PhaseDailyCentsPattern(client, 'phase_v3_daily')
self.phase_v3_daily_dollars: PhaseDailyCentsPattern[Dollars] = PhaseDailyCentsPattern(client, 'phase_v3_daily_dollars')
self.phase_v3_histogram: MetricPattern11[OracleBinsV2] = MetricPattern11(client, 'phase_v3_histogram')
self.phase_v3_peak_daily_cents: PhaseDailyCentsPattern[Cents] = PhaseDailyCentsPattern(client, 'phase_v3_peak_daily')
self.phase_v3_peak_daily_dollars: PhaseDailyCentsPattern[Dollars] = PhaseDailyCentsPattern(client, 'phase_v3_peak_daily_dollars')
self.phase_v3_peak_price_cents: MetricPattern11[Cents] = MetricPattern11(client, 'phase_v3_peak_price_cents')
self.phase_v3_price_cents: MetricPattern11[Cents] = MetricPattern11(client, 'phase_v3_price_cents')
self.price_cents: MetricPattern11[Cents] = MetricPattern11(client, 'oracle_price_cents') self.price_cents: MetricPattern11[Cents] = MetricPattern11(client, 'oracle_price_cents')
self.tx_count: MetricPattern6[StoredU32] = MetricPattern6(client, 'oracle_tx_count') self.tx_count: MetricPattern6[StoredU32] = MetricPattern6(client, 'oracle_tx_count')
class MetricsTree_Price_Sats: class MetricsTree_Price_Usd:
"""Metrics tree node.""" """Metrics tree node."""
def __init__(self, client: BrkClientBase, base_path: str = ''): def __init__(self, client: BrkClientBase, base_path: str = ''):
self.ohlc: MetricPattern1[OHLCSats] = MetricPattern1(client, 'price_ohlc_sats') self.ohlc: MetricPattern1[OHLCDollars] = MetricPattern1(client, 'price_ohlc')
self.split: SplitPattern2[Sats] = SplitPattern2(client, 'price_sats') self.split: SplitPattern2[Dollars] = SplitPattern2(client, 'price')
class MetricsTree_Price: class MetricsTree_Price:
"""Metrics tree node.""" """Metrics tree node."""
@@ -3700,8 +3715,8 @@ class MetricsTree_Price:
def __init__(self, client: BrkClientBase, base_path: str = ''): def __init__(self, client: BrkClientBase, base_path: str = ''):
self.cents: MetricsTree_Price_Cents = MetricsTree_Price_Cents(client) self.cents: MetricsTree_Price_Cents = MetricsTree_Price_Cents(client)
self.oracle: MetricsTree_Price_Oracle = MetricsTree_Price_Oracle(client) self.oracle: MetricsTree_Price_Oracle = MetricsTree_Price_Oracle(client)
self.sats: MetricsTree_Price_Sats = MetricsTree_Price_Sats(client) self.sats: SatsPattern[OHLCSats] = SatsPattern(client, 'price')
self.usd: SatsPattern[OHLCDollars] = SatsPattern(client, 'price') self.usd: MetricsTree_Price_Usd = MetricsTree_Price_Usd(client)
class MetricsTree_Scripts_Count: class MetricsTree_Scripts_Count:
"""Metrics tree node.""" """Metrics tree node."""
@@ -4791,22 +4806,14 @@ class BrkClient(BrkClientBase):
""" """
return MetricEndpointBuilder(self, metric, index) return MetricEndpointBuilder(self, metric, index)
def get_openapi(self) -> Any: def get_api(self) -> Any:
"""OpenAPI specification. """Compact OpenAPI specification.
Full OpenAPI 3.1 specification for this API. Compact OpenAPI specification optimized for LLM consumption. Removes redundant fields while preserving essential API information. Full spec available at `/openapi.json`.
Endpoint: `GET /api.json`""" Endpoint: `GET /api.json`"""
return self.get_json('/api.json') return self.get_json('/api.json')
def get_openapi_trimmed(self) -> Any:
"""Trimmed OpenAPI specification.
Compact OpenAPI specification optimized for LLM consumption. Removes redundant fields while preserving essential API information.
Endpoint: `GET /api.trimmed.json`"""
return self.get_json('/api.trimmed.json')
def get_address(self, address: Address) -> AddressStats: def get_address(self, address: Address) -> AddressStats:
"""Address information. """Address information.
@@ -5313,6 +5320,14 @@ class BrkClient(BrkClientBase):
Endpoint: `GET /health`""" Endpoint: `GET /health`"""
return self.get_json('/health') return self.get_json('/health')
def get_openapi(self) -> Any:
"""OpenAPI specification.
Full OpenAPI 3.1 specification for this API.
Endpoint: `GET /openapi.json`"""
return self.get_json('/openapi.json')
def get_version(self) -> str: def get_version(self) -> str:
"""API version. """API version.

File diff suppressed because it is too large Load Diff

View File

@@ -1,174 +0,0 @@
# Oracle Filter Analysis
## Summary
Analysis of ~20M outputs across 2017-2018 to find filters that distinguish accurate price signals from noise.
## Key Finding: Round USD is the Only Reliable Filter
| Filter | Accuracy Advantage | Consistency |
|--------|-------------------|-------------|
| **Round USD = True** | **+20% to +29%** | **12/12 months** |
| Round BTC | +12% to -8% | Flips with price |
| Value range/Decade | varies | Shifts with price |
| Same-day spend | ~3% | Weak |
| Micro-round sats | 0-5% | Inconsistent |
| Tx pattern | <5% | Weak |
| Is smaller output | ~3-4% | Weak |
## Why Other Filters Fail
### Round BTC (Unreliable)
- Jan-Mar 2017 ($1k): Round BTC = True is GOOD (+10-12%)
- Jun-Jul 2017 ($2.5k): Round BTC = True is BAD (-7%)
- Reason: Round BTC only correlates with accuracy when it happens to align with round USD at current price
### Value Range / Decade (Price-Dependent)
- At $1,000/BTC: Decade 5 (100k-1M sats) is good
- At $10,000/BTC: Decade 6 (1M-10M sats) is good
- At $100,000/BTC: Decade 7 (10M-100M sats) would be good
- These shift with price, making them useless as static filters
## The Round USD Insight
Round USD amounts ($1, $5, $10, $20, $50, $100, etc.) always map to the **same phase bins** regardless of price level:
```
$100 at $10,000/BTC = 1,000,000 sats → log10 = 6.0 → phase = 0.0 → bin 0
$100 at $100,000/BTC = 100,000 sats → log10 = 5.0 → phase = 0.0 → bin 0
$100 at $1,000/BTC = 10,000,000 sats → log10 = 7.0 → phase = 0.0 → bin 0
```
The phase = `frac(log10(sats))` is **invariant** to price decade!
## Round USD Phase Bins
| USD Amount | log10(USD) | Phase = frac(log10) | Bin (×100) |
|------------|------------|---------------------|------------|
| $1, $10, $100, $1000 | 0, 1, 2, 3 | 0.00 | 0 |
| $1.50, $15, $150 | 0.18, 1.18, 2.18 | 0.18 | 18 |
| $2, $20, $200 | 0.30, 1.30, 2.30 | 0.30 | 30 |
| $2.50, $25, $250 | 0.40, 1.40, 2.40 | 0.40 | 40 |
| $3, $30, $300 | 0.48, 1.48, 2.48 | 0.48 | 48 |
| $4, $40, $400 | 0.60, 1.60, 2.60 | 0.60 | 60 |
| $5, $50, $500 | 0.70, 1.70, 2.70 | 0.70 | 70 |
| $6, $60, $600 | 0.78, 1.78, 2.78 | 0.78 | 78 |
| $7, $70, $700 | 0.85, 1.85, 2.85 | 0.85 | 85 |
| $8, $80, $800 | 0.90, 1.90, 2.90 | 0.90 | 90 |
| $9, $90, $900 | 0.95, 1.95, 2.95 | 0.95 | 95 |
## Implementation Plan
### Approach: Phase-Based Round USD Filtering
Filter outputs to only those whose phase bin corresponds to a round USD amount. No price knowledge needed.
```rust
/// Phase bins where round USD amounts cluster
/// Computed as: bin = round(frac(log10(usd_cents)) * 100)
const ROUND_USD_BINS: &[u8] = &[
0, // $1, $10, $100, $1000 (and $0.10, $0.01)
18, // $1.50, $15, $150
30, // $2, $20, $200
40, // $2.50, $25, $250
48, // $3, $30, $300
60, // $4, $40, $400
70, // $5, $50, $500
78, // $6, $60, $600
85, // $7, $70, $700
90, // $8, $80, $800
95, // $9, $90, $900
];
/// Check if a histogram bin corresponds to a round USD amount
fn is_round_usd_bin(bin: usize, tolerance: u8) -> bool {
let phase_bin = (bin % 100) as u8;
ROUND_USD_BINS.iter().any(|&round_bin| {
let diff = if phase_bin >= round_bin {
phase_bin - round_bin
} else {
round_bin - phase_bin
};
// Handle wraparound (bin 99 is close to bin 0)
diff <= tolerance || (100 - diff) <= tolerance
})
}
```
### Where to Apply Filter
In `compute.rs`, when adding outputs to histogram:
```rust
for sats in values {
if let Some(bin) = Histogram::sats_to_bin(sats) {
// Only include outputs in round-USD phase bins
if is_round_usd_bin(bin, 2) { // ±2 bin tolerance
block_sparse.push((bin as u16, 1.0));
// ... rest of processing
}
}
}
```
### Expected Impact
- Reduces histogram noise by ~60-70% (only ~35% of accurate outputs are round USD)
- Remaining outputs are 2-3x more likely to be accurate signals
- Stencil matching should be more reliable with cleaner signal
- Decade selection via anchors remains unchanged
### Alternative: Weighted Approach
Instead of hard filtering, weight round-USD bins higher:
```rust
let weight = if is_round_usd_bin(bin, 2) { 3.0 } else { 1.0 };
block_sparse.push((bin as u16, weight));
```
This preserves some signal from non-round outputs while emphasizing round USD.
## Bin Resolution: 100 vs 200
UTXOracle uses **200 bins per decade**. Current phase oracle uses 100.
| Resolution | Precision | Round USD cluster |
|------------|-----------|-------------------|
| 100 bins | 1% per bin | Wider, more overlap |
| 200 bins | 0.5% per bin | Tighter, cleaner separation |
**Round USD bins at 200 resolution:**
| USD Amount | Phase = frac(log10) | Bin (×200) |
|------------|---------------------|------------|
| $1, $10, $100 | 0.000 | 0 |
| $1.50, $15, $150 | 0.176 | 35 |
| $2, $20, $200 | 0.301 | 60 |
| $2.50, $25, $250 | 0.398 | 80 |
| $3, $30, $300 | 0.477 | 95 |
| $4, $40, $400 | 0.602 | 120 |
| $5, $50, $500 | 0.699 | 140 |
| $6, $60, $600 | 0.778 | 156 |
| $7, $70, $700 | 0.845 | 169 |
| $8, $80, $800 | 0.903 | 181 |
| $9, $90, $900 | 0.954 | 191 |
**Recommendation**: Use 200 bins for:
1. Compatibility with UTXOracle stencil
2. Tighter round-USD detection
3. Better separation of signal from noise
## Questions to Resolve
1. **Tolerance**: ±2 bins (at 200) = ±1% vs ±4 bins = ±2%
2. **Hard filter vs weight**: Filter completely or just weight higher?
3. **Minimum count threshold**: What if too few outputs pass filter?
4. **Interaction with existing smooth_round_btc()**: Still needed?
5. **Migration**: Update PHASE_BINS constant from 100 to 200
## Validation Plan
1. Implement phase-based filtering
2. Run on 2017-2018 data
3. Compare accuracy vs current approach
4. Tune tolerance parameter

File diff suppressed because it is too large Load Diff

View File

@@ -1,282 +0,0 @@
#!/usr/bin/env python3
"""
Test price phase detection from outputs alone.
The idea: Round USD outputs create a fingerprint pattern that reveals the price phase.
"""
import math
import http.client
import json
import time
from collections import defaultdict
API_HOST = "localhost"
API_PORT = 3110
# Round USD phases (fixed fingerprint)
# These are frac(log10(usd_cents)) for round USD values
ROUND_USD_PHASES = [
0.00, # $1, $10, $100, $1000
0.18, # $1.50, $15, $150
0.30, # $2, $20, $200
0.40, # $2.50, $25, $250
0.48, # $3, $30, $300
0.60, # $4, $40, $400
0.70, # $5, $50, $500
0.78, # $6, $60, $600
0.85, # $7, $70, $700
0.90, # $8, $80, $800
0.95, # $9, $90, $900
]
_conn = None
def get_conn():
global _conn
if _conn is None:
_conn = http.client.HTTPConnection(API_HOST, API_PORT, timeout=300)
return _conn
def reset_conn():
global _conn
if _conn:
try:
_conn.close()
except:
pass
_conn = None
def fetch(path: str, retries: int = 3):
for attempt in range(retries):
try:
conn = get_conn()
conn.request("GET", path)
resp = conn.getresponse()
data = resp.read().decode('utf-8')
return json.loads(data)
except Exception as e:
reset_conn()
if attempt < retries - 1:
time.sleep(2)
else:
raise
def fetch_chunked(path_template: str, start: int, end: int, chunk_size: int = 25000) -> list:
result = []
for chunk_start in range(start, end, chunk_size):
chunk_end = min(chunk_start + chunk_size, end)
path = path_template.format(start=chunk_start, end=chunk_end)
data = fetch(path)["data"]
result.extend(data)
return result
def get_sats_phase(sats: int) -> float:
"""Get the phase (fractional part of log10) for a sats value."""
if sats <= 0:
return 0.0
return math.log10(sats) % 1.0
def count_round_usd_matches(outputs: list, price_phase: float, tolerance: float = 0.02) -> int:
"""
Count how many outputs match round USD bins at the given price phase.
At price_phase P, round USD outputs should appear at sats_phase = (usd_phase - P) mod 1
"""
# Compute expected sats phases for round USD at this price phase
expected_phases = [(usd_phase - price_phase) % 1.0 for usd_phase in ROUND_USD_PHASES]
count = 0
for sats in outputs:
if sats is None or sats < 1000:
continue
sats_phase = get_sats_phase(sats)
# Check if sats_phase matches any expected phase
for exp_phase in expected_phases:
diff = abs(sats_phase - exp_phase)
# Handle wraparound (0.99 is close to 0.01)
if diff < tolerance or diff > (1.0 - tolerance):
count += 1
break
return count
def find_best_price_phase(outputs: list, tolerance: float = 0.02, resolution: int = 100) -> tuple:
"""
Find the price phase that maximizes round USD matches.
Returns (best_phase, best_count, all_counts).
"""
counts = []
best_phase = 0.0
best_count = 0
for i in range(resolution):
price_phase = i / resolution
count = count_round_usd_matches(outputs, price_phase, tolerance)
counts.append(count)
if count > best_count:
best_count = count
best_phase = price_phase
return best_phase, best_count, counts
def actual_price_phase(price: float) -> float:
"""Get the actual price phase from a price."""
return math.log10(price) % 1.0
def analyze_day(date_str: str, start_height: int, end_height: int, actual_price: float):
"""Analyze a single day's outputs."""
# Get transaction range for these heights
first_tx = fetch(f"/api/metric/first_txindex/height?start={start_height}&end={end_height}")
first_txs = first_tx["data"]
if not first_txs or len(first_txs) < 2:
return None
tx_start = first_txs[0]
tx_end = first_txs[-1]
# Get output range
tx_first_out = fetch_chunked("/api/metric/first_txoutindex/txindex?start={start}&end={end}", tx_start, tx_end)
if not tx_first_out:
return None
out_start = tx_first_out[0]
out_end = tx_first_out[-1] + 10 # estimate
# Fetch output values
out_values = fetch_chunked("/api/metric/value/txoutindex?start={start}&end={end}", out_start, out_end)
# Filter to reasonable range (1000 sats to 100 BTC)
outputs = [v for v in out_values if v and 1000 <= v <= 10_000_000_000]
if len(outputs) < 1000:
return None
# Find best price phase
detected_phase, match_count, _ = find_best_price_phase(outputs, tolerance=0.02)
# Compare with actual
actual_phase = actual_price_phase(actual_price)
# Phase error (handle wraparound)
phase_error = abs(detected_phase - actual_phase)
if phase_error > 0.5:
phase_error = 1.0 - phase_error
return {
'date': date_str,
'actual_price': actual_price,
'actual_phase': actual_phase,
'detected_phase': detected_phase,
'phase_error': phase_error,
'match_count': match_count,
'total_outputs': len(outputs),
'match_pct': 100 * match_count / len(outputs),
}
def main():
print("=" * 60)
print("PRICE PHASE DETECTION TEST")
print("=" * 60)
print("\nIdea: Round USD outputs form a fingerprint pattern.")
print("Sliding this pattern across the histogram reveals the price phase.\n")
# Fetch dates
print("Fetching date index...")
dates = fetch("/api/metric/date/dateindex?start=0&end=4000")["data"]
# Fetch daily OHLC
print("Fetching daily prices...")
ohlc_data = fetch("/api/metric/price_ohlc/dateindex?start=2800&end=3600")["data"]
# Fetch heights
print("Fetching heights...")
heights = fetch("/api/metric/first_height/dateindex?start=2800&end=3600")["data"]
results = []
# Test on 2017-2018 (roughly dateindex 2900-3600)
# Sample every 7 days to speed up
for di in range(2900, 3550, 7):
if di - 2800 >= len(ohlc_data) or di - 2800 >= len(heights):
continue
ohlc = ohlc_data[di - 2800]
if not ohlc or len(ohlc) < 4:
continue
# Use close price as "actual"
actual_price = ohlc[3]
if not actual_price or actual_price <= 0:
continue
date_str = dates[di] if di < len(dates) else f"di={di}"
start_height = heights[di - 2800]
end_height = heights[di - 2800 + 1] if di - 2800 + 1 < len(heights) else start_height + 144
if not start_height:
continue
print(f"\nAnalyzing {date_str} (${actual_price:.0f})...")
try:
result = analyze_day(date_str, start_height, end_height, actual_price)
if result:
results.append(result)
print(f" Actual phase: {result['actual_phase']:.3f}")
print(f" Detected phase: {result['detected_phase']:.3f}")
print(f" Phase error: {result['phase_error']:.3f} ({result['phase_error']*100:.1f}%)")
print(f" Matches: {result['match_count']:,} / {result['total_outputs']:,} ({result['match_pct']:.1f}%)")
except Exception as e:
print(f" Error: {e}")
continue
# Summary
if results:
print("\n" + "=" * 60)
print("SUMMARY")
print("=" * 60)
errors = [r['phase_error'] for r in results]
avg_error = sum(errors) / len(errors)
# Count how many are within various thresholds
within_01 = sum(1 for e in errors if e <= 0.01)
within_02 = sum(1 for e in errors if e <= 0.02)
within_05 = sum(1 for e in errors if e <= 0.05)
within_10 = sum(1 for e in errors if e <= 0.10)
print(f"\nTotal days analyzed: {len(results)}")
print(f"Average phase error: {avg_error:.3f} ({avg_error*100:.1f}%)")
print(f"\nPhase error distribution:")
print(f" ≤1%: {within_01:3d} / {len(results)} ({100*within_01/len(results):.0f}%)")
print(f" ≤2%: {within_02:3d} / {len(results)} ({100*within_02/len(results):.0f}%)")
print(f" ≤5%: {within_05:3d} / {len(results)} ({100*within_05/len(results):.0f}%)")
print(f" ≤10%: {within_10:3d} / {len(results)} ({100*within_10/len(results):.0f}%)")
# Show worst cases
print(f"\nWorst cases:")
worst = sorted(results, key=lambda r: -r['phase_error'])[:5]
for r in worst:
print(f" {r['date']}: detected {r['detected_phase']:.2f} vs actual {r['actual_phase']:.2f} "
f"(error {r['phase_error']:.2f}, ${r['actual_price']:.0f})")
# Show best cases
print(f"\nBest cases:")
best = sorted(results, key=lambda r: r['phase_error'])[:5]
for r in best:
print(f" {r['date']}: detected {r['detected_phase']:.2f} vs actual {r['actual_phase']:.2f} "
f"(error {r['phase_error']:.3f}, ${r['actual_price']:.0f})")
if __name__ == "__main__":
main()

View File

@@ -13,7 +13,7 @@ import { createInvestingSection } from "./investing.js";
* @returns {PartialOptionsGroup} * @returns {PartialOptionsGroup}
*/ */
export function createMarketSection(ctx) { export function createMarketSection(ctx) {
const { colors, brk, line, candlestick } = ctx; const { colors, brk, line } = ctx;
const { market, supply, price } = brk.metrics; const { market, supply, price } = brk.metrics;
const { const {
movingAverage, movingAverage,
@@ -35,35 +35,71 @@ export function createMarketSection(ctx) {
{ {
name: "Price", name: "Price",
title: "Bitcoin Price", title: "Bitcoin Price",
...(localhost && { },
...(localhost
? [
{
name: "Oracle",
title: "Oracle Price",
top: [ top: [
// candlestick({
// metric: price.oracle.ohlcDollars,
// name: "Oracle base",
// unit: Unit.usd,
// colors: [colors.cyan, colors.purple],
// }),
line({ line({
metric: price.oracle.phaseDailyDollars.median, metric: price.oracle.phaseDailyDollars.median,
name: "o. p50", name: "o. p50",
unit: Unit.usd, unit: Unit.usd,
color: colors.yellow, color: colors.yellow,
}), }),
line({
metric: price.oracle.phaseV2DailyDollars.median,
name: "o2. p50",
unit: Unit.usd,
color: colors.orange,
}),
line({
metric: price.oracle.phaseV2PeakDailyDollars.median,
name: "o2.2 p50",
unit: Unit.usd,
color: colors.orange,
}),
line({
metric: price.oracle.phaseV3DailyDollars.median,
name: "o3. p50",
unit: Unit.usd,
color: colors.red,
}),
line({
metric: price.oracle.phaseV3PeakDailyDollars.median,
name: "o3.2 p50",
unit: Unit.usd,
color: colors.red,
}),
line({ line({
metric: price.oracle.phaseDailyDollars.max, metric: price.oracle.phaseDailyDollars.max,
name: "o. max", name: "o. max",
unit: Unit.usd, unit: Unit.usd,
color: colors.lime, color: colors.lime,
}), }),
line({
metric: price.oracle.phaseV2DailyDollars.max,
name: "o.2 max",
unit: Unit.usd,
color: colors.emerald,
}),
line({ line({
metric: price.oracle.phaseDailyDollars.min, metric: price.oracle.phaseDailyDollars.min,
name: "o. min", name: "o. min",
unit: Unit.usd, unit: Unit.usd,
color: colors.rose, color: colors.rose,
}), }),
], line({
metric: price.oracle.phaseV2DailyDollars.min,
name: "o.2 min",
unit: Unit.usd,
color: colors.purple,
}), }),
],
}, },
]
: []),
// Capitalization // Capitalization
{ {