diff --git a/Cargo.lock b/Cargo.lock index 687beb16a..b5ff9c632 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -742,11 +742,12 @@ dependencies = [ name = "brk_mcp" version = "0.1.0-alpha.1" dependencies = [ - "aide", - "brk_query", + "axum", "brk_rmcp", - "brk_types", "log", + "minreq", + "schemars", + "serde", "serde_json", ] @@ -2780,9 +2781,9 @@ checksum = "7ee5b5339afb4c41626dde77b7a611bd4f2c202b897852b4bcf5d03eddc61010" [[package]] name = "jiff" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49cce2b81f2098e7e3efc35bc2e0a6b7abec9d34128283d7a26fa8f32a6dbb35" +checksum = "a87d9b8105c23642f50cbbae03d1f75d8422c5cb98ce7ee9271f7ff7505be6b8" dependencies = [ "jiff-static", "jiff-tzdb-platform", @@ -2795,9 +2796,9 @@ dependencies = [ [[package]] name = "jiff-static" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "980af8b43c3ad5d8d349ace167ec8170839f753a42d233ba19e08afe1850fa69" +checksum = "b787bebb543f8969132630c51fd0afab173a86c6abae56ff3b9e5e3e3f9f6e58" dependencies = [ "proc-macro2", "quote", @@ -3635,14 +3636,15 @@ dependencies = [ [[package]] name = "oxc_resolver" -version = "11.16.0" +version = "11.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82835b74b32841714c1342b1636992d19622d4ec19666b55edb4c654fb6eb719" +checksum = "5467a6fd6e1b2a0cc25f4f89a5ece8594213427e430ba8f0a8f900808553cb1e" dependencies = [ "cfg-if", "fast-glob", "indexmap", "json-strip-comments", + "nodejs-built-in-modules", "once_cell", "papaya", "parking_lot", @@ -4235,8 +4237,6 @@ dependencies = [ [[package]] name = "rawdb" version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c73aead6409391fb8d52ca74985f75983c61a81247de5d78312b7134dd1818a" dependencies = [ "libc", "log", @@ -5427,8 +5427,6 @@ checksum = "8f54a172d0620933a27a4360d3db3e2ae0dd6cceae9730751a036bbf182c4b23" [[package]] name = "vecdb" version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8386c4148b31b9ba931394b0e97f5fc8d5f1644c5b55cdae52611e18227aee5" dependencies = [ "ctrlc", "log", @@ -5436,6 +5434,7 @@ dependencies = [ "parking_lot", "pco", "rawdb", + "schemars", "serde", "serde_json", "thiserror 2.0.17", @@ -5447,8 +5446,6 @@ dependencies = [ [[package]] name = "vecdb_derive" version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54a57c4efc56bf5aa76ccf39e52bc4d3154848c13996a1c0779dec4fd21eaf4a" dependencies = [ "quote", "syn 2.0.111", @@ -6042,9 +6039,9 @@ checksum = "40990edd51aae2c2b6907af74ffb635029d5788228222c4bb811e9351c0caad3" [[package]] name = "zmij" -version = "0.1.7" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e404bcd8afdaf006e529269d3e85a743f9480c3cef60034d77860d02964f3ba" +checksum = "d0095ecd462946aa3927d9297b63ef82fb9a5316d7a37d134eeb36e58228615a" [[package]] name = "zopfli" diff --git a/Cargo.toml b/Cargo.toml index 5a8c70563..4ee8d320f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -66,7 +66,7 @@ byteview = "0.9.1" color-eyre = "0.6.5" derive_deref = "1.1.1" fjall = "3.0.0-rc.6" -jiff = "0.2.16" +jiff = "0.2.17" log = "0.4.29" mimalloc = { version = "0.1.48", features = ["v3"] } minreq = { version = "2.14.1", features = ["https", "serde_json"] } @@ -80,8 +80,8 @@ serde_derive = "1.0.228" serde_json = { version = "1.0.147", features = ["float_roundtrip"] } smallvec = "1.15.1" tokio = { version = "1.48.0", features = ["rt-multi-thread"] } -vecdb = { version = "0.4.6", features = ["derive", "serde_json", "pco"] } -# vecdb = { path = "../anydb/crates/vecdb", features = ["derive", "serde_json", "pco", "schemars"] } +# vecdb = { version = "0.4.6", features = ["derive", "serde_json", "pco"] } +vecdb = { path = "../anydb/crates/vecdb", features = ["derive", "serde_json", "pco", "schemars"] } # vecdb = { git = "https://github.com/anydb-rs/anydb", features = ["derive", "serde_json", "pco"] } [workspace.metadata.release] diff --git a/crates/brk_binder/src/javascript.rs b/crates/brk_binder/src/javascript.rs index b6c959583..00df370f1 100644 --- a/crates/brk_binder/src/javascript.rs +++ b/crates/brk_binder/src/javascript.rs @@ -13,12 +13,14 @@ use crate::{ get_node_fields, get_pattern_instance_base, to_camel_case, to_pascal_case, }; -/// Generate JavaScript + JSDoc client from metadata and OpenAPI endpoints +/// Generate JavaScript + JSDoc client from metadata and OpenAPI endpoints. +/// +/// `output_path` is the full path to the output file (e.g., "modules/brk-client/index.js"). pub fn generate_javascript_client( metadata: &ClientMetadata, endpoints: &[Endpoint], schemas: &TypeSchemas, - output_dir: &Path, + output_path: &Path, ) -> io::Result<()> { let mut output = String::new(); @@ -44,7 +46,7 @@ pub fn generate_javascript_client( // Generate the main client class with tree and API methods generate_main_client(&mut output, &metadata.catalog, metadata, endpoints); - fs::write(output_dir.join("client.js"), output)?; + fs::write(output_path, output)?; Ok(()) } @@ -446,13 +448,21 @@ fn generate_structural_patterns( // Generate factory function writeln!(output, "/**").unwrap(); writeln!(output, " * Create a {} pattern node", pattern.name).unwrap(); + if pattern.is_generic { + writeln!(output, " * @template T").unwrap(); + } writeln!(output, " * @param {{BrkClientBase}} client").unwrap(); if is_parameterizable { writeln!(output, " * @param {{string}} acc - Accumulated metric name").unwrap(); } else { writeln!(output, " * @param {{string}} basePath").unwrap(); } - writeln!(output, " * @returns {{{}}}", pattern.name).unwrap(); + let return_type = if pattern.is_generic { + format!("{}", pattern.name) + } else { + pattern.name.clone() + }; + writeln!(output, " * @returns {{{}}}", return_type).unwrap(); writeln!(output, " */").unwrap(); let param_name = if is_parameterizable { @@ -613,11 +623,17 @@ fn field_to_js_type_with_generic_value( }; if metadata.is_pattern_type(&field.rust_type) { - // Check if this pattern is generic and we have a value type - if metadata.is_pattern_generic(&field.rust_type) - && let Some(vt) = generic_value_type - { - return format!("{}<{}>", field.rust_type, vt); + // Check if this pattern is generic + if metadata.is_pattern_generic(&field.rust_type) { + if let Some(vt) = generic_value_type { + return format!("{}<{}>", field.rust_type, vt); + } else if is_generic { + // Propagate T when inside a generic pattern + return format!("{}", field.rust_type); + } else { + // Generic pattern without known type - use unknown + return format!("{}", field.rust_type); + } } field.rust_type.clone() } else if let Some(accessor) = metadata.find_index_set_pattern(&field.indexes) { @@ -629,7 +645,6 @@ fn field_to_js_type_with_generic_value( } } - /// Generate tree typedefs fn generate_tree_typedefs(output: &mut String, catalog: &TreeNode, metadata: &ClientMetadata) { writeln!(output, "// Catalog tree typedefs\n").unwrap(); @@ -666,7 +681,8 @@ fn generate_tree_typedef( .collect(); // Skip if this matches a pattern (already generated) - if pattern_lookup.contains_key(&fields) && pattern_lookup.get(&fields) != Some(&name.to_string()) + if pattern_lookup.contains_key(&fields) + && pattern_lookup.get(&fields) != Some(&name.to_string()) { return; } @@ -680,15 +696,13 @@ fn generate_tree_typedef( writeln!(output, " * @typedef {{Object}} {}", name).unwrap(); for (field, child_fields) in &fields_with_child_info { + // Look up type parameter for generic patterns let generic_value_type = child_fields .as_ref() - .and_then(|cf| metadata.get_generic_value_type(&field.rust_type, cf)); - let js_type = field_to_js_type_with_generic_value( - field, - metadata, - false, - generic_value_type.as_deref(), - ); + .and_then(|cf| metadata.get_type_param(cf)) + .map(String::as_str); + let js_type = + field_to_js_type_with_generic_value(field, metadata, false, generic_value_type); writeln!( output, " * @property {{{}}} {}", @@ -899,6 +913,11 @@ fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) { if let Some(summary) = &endpoint.summary { writeln!(output, " * {}", summary).unwrap(); } + if let Some(desc) = &endpoint.description + && endpoint.summary.as_ref() != Some(desc) + { + writeln!(output, " * @description {}", desc).unwrap(); + } for param in &endpoint.path_params { let desc = param.description.as_deref().unwrap_or(""); diff --git a/crates/brk_binder/src/lib.rs b/crates/brk_binder/src/lib.rs index 6df160f0f..5d6306be8 100644 --- a/crates/brk_binder/src/lib.rs +++ b/crates/brk_binder/src/lib.rs @@ -27,14 +27,57 @@ //! 2. **Schema collection** - Merges OpenAPI schemas with schemars-generated type schemas //! //! 3. **Code generation** - Produces language-specific clients: -//! - Rust: Uses `brk_types` directly, generates structs with lifetimes +//! - Rust: Uses `brk_types` directly, generates structs with Arc-based sharing //! - JavaScript: Generates JSDoc-typed ES modules with factory functions //! - Python: Generates typed classes with TypedDict and Generic support -use std::{collections::btree_map::Entry, fs::create_dir_all, io, path::Path}; +use std::{collections::btree_map::Entry, fs::create_dir_all, io, path::PathBuf}; use brk_query::Vecs; +/// Output path configuration for each language client. +/// +/// Each path should be the full path to the output file, not just a directory. +/// Parent directories will be created automatically if they don't exist. +/// +/// # Example +/// ```ignore +/// let paths = ClientOutputPaths::new() +/// .rust("crates/brk_client/src/lib.rs") +/// .javascript("modules/brk-client/index.js") +/// .python("packages/brk_client/__init__.py"); +/// ``` +#[derive(Debug, Clone, Default)] +pub struct ClientOutputPaths { + /// Full path to Rust client file (e.g., "crates/brk_client/src/lib.rs") + pub rust: Option, + /// Full path to JavaScript client file (e.g., "modules/brk-client/index.js") + pub javascript: Option, + /// Full path to Python client file (e.g., "packages/brk_client/__init__.py") + pub python: Option, +} + +impl ClientOutputPaths { + pub fn new() -> Self { + Self::default() + } + + pub fn rust(mut self, path: impl Into) -> Self { + self.rust = Some(path.into()); + self + } + + pub fn javascript(mut self, path: impl Into) -> Self { + self.javascript = Some(path.into()); + self + } + + pub fn python(mut self, path: impl Into) -> Self { + self.python = Some(path.into()); + self + } +} + mod javascript; mod js; mod openapi; @@ -51,8 +94,25 @@ pub use types::*; pub const VERSION: &str = env!("CARGO_PKG_VERSION"); -/// Generate all client libraries from the query vecs and OpenAPI JSON -pub fn generate_clients(vecs: &Vecs, openapi_json: &str, output_dir: &Path) -> io::Result<()> { +/// Generate all client libraries from the query vecs and OpenAPI JSON. +/// +/// Uses `ClientOutputPaths` to specify the output file path for each language. +/// Only languages with a configured path will be generated. +/// +/// # Example +/// ```ignore +/// let paths = ClientOutputPaths::new() +/// .rust("crates/brk_client/src/lib.rs") +/// .javascript("modules/brk-client/index.js") +/// .python("packages/brk_client/__init__.py"); +/// +/// generate_clients(&vecs, &openapi_json, &paths)?; +/// ``` +pub fn generate_clients( + vecs: &Vecs, + openapi_json: &str, + output_paths: &ClientOutputPaths, +) -> io::Result<()> { let metadata = ClientMetadata::from_vecs(vecs); // Parse OpenAPI spec @@ -71,19 +131,28 @@ pub fn generate_clients(vecs: &Vecs, openapi_json: &str, output_dir: &Path) -> i } // Generate Rust client (uses real brk_types, no schema conversion needed) - let rust_path = output_dir.join("rust"); - create_dir_all(&rust_path)?; - generate_rust_client(&metadata, &endpoints, &rust_path)?; + if let Some(rust_path) = &output_paths.rust { + if let Some(parent) = rust_path.parent() { + create_dir_all(parent)?; + } + generate_rust_client(&metadata, &endpoints, rust_path)?; + } // Generate JavaScript client (needs schemas for type definitions) - let js_path = output_dir.join("javascript"); - create_dir_all(&js_path)?; - generate_javascript_client(&metadata, &endpoints, &schemas, &js_path)?; + if let Some(js_path) = &output_paths.javascript { + if let Some(parent) = js_path.parent() { + create_dir_all(parent)?; + } + generate_javascript_client(&metadata, &endpoints, &schemas, js_path)?; + } // Generate Python client (needs schemas for type definitions) - let python_path = output_dir.join("python"); - create_dir_all(&python_path)?; - generate_python_client(&metadata, &endpoints, &schemas, &python_path)?; + if let Some(python_path) = &output_paths.python { + if let Some(parent) = python_path.parent() { + create_dir_all(parent)?; + } + generate_python_client(&metadata, &endpoints, &schemas, python_path)?; + } Ok(()) } diff --git a/crates/brk_binder/src/openapi.rs b/crates/brk_binder/src/openapi.rs index c3c96e3b5..94a92b8eb 100644 --- a/crates/brk_binder/src/openapi.rs +++ b/crates/brk_binder/src/openapi.rs @@ -17,8 +17,10 @@ pub struct Endpoint { pub path: String, /// Operation ID (e.g., "getBlockByHash") pub operation_id: Option, - /// Summary/description + /// Short summary pub summary: Option, + /// Detailed description + pub description: Option, /// Tags for grouping pub tags: Vec, /// Path parameters @@ -185,10 +187,8 @@ fn extract_endpoint(path: &str, method: &str, operation: &Operation) -> Option io::Result<()> { let mut output = String::new(); @@ -57,7 +59,7 @@ pub fn generate_python_client( // Generate main client with tree and API methods generate_main_client(&mut output, endpoints); - fs::write(output_dir.join("client.py"), output)?; + fs::write(output_path, output)?; Ok(()) } @@ -720,14 +722,16 @@ fn generate_tree_class( for ((field, child_fields_opt), (child_name, child_node)) in fields_with_child_info.iter().zip(children.iter()) { + // Look up type parameter for generic patterns let generic_value_type = child_fields_opt .as_ref() - .and_then(|cf| metadata.get_generic_value_type(&field.rust_type, cf)); + .and_then(|cf| metadata.get_type_param(cf)) + .map(String::as_str); let py_type = field_to_python_type_with_generic_value( field, metadata, false, - generic_value_type.as_deref(), + generic_value_type, ); let field_name_py = to_snake_case(&field.name); @@ -864,8 +868,19 @@ fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) { .unwrap(); // Docstring - if let Some(summary) = &endpoint.summary { - writeln!(output, " \"\"\"{}\"\"\"", summary).unwrap(); + match (&endpoint.summary, &endpoint.description) { + (Some(summary), Some(desc)) if summary != desc => { + writeln!(output, " \"\"\"{}.", summary.trim_end_matches('.')).unwrap(); + writeln!(output).unwrap(); + writeln!(output, " {}\"\"\"", desc).unwrap(); + } + (Some(summary), _) => { + writeln!(output, " \"\"\"{}\"\"\"", summary).unwrap(); + } + (None, Some(desc)) => { + writeln!(output, " \"\"\"{}\"\"\"", desc).unwrap(); + } + (None, None) => {} } // Build path diff --git a/crates/brk_binder/src/rust.rs b/crates/brk_binder/src/rust.rs index 631487ccb..05ff32125 100644 --- a/crates/brk_binder/src/rust.rs +++ b/crates/brk_binder/src/rust.rs @@ -12,11 +12,13 @@ use crate::{ to_pascal_case, to_snake_case, }; -/// Generate Rust client from metadata and OpenAPI endpoints +/// Generate Rust client from metadata and OpenAPI endpoints. +/// +/// `output_path` is the full path to the output file (e.g., "crates/brk_client/src/lib.rs"). pub fn generate_rust_client( metadata: &ClientMetadata, endpoints: &[Endpoint], - output_dir: &Path, + output_path: &Path, ) -> io::Result<()> { let mut output = String::new(); @@ -47,7 +49,7 @@ pub fn generate_rust_client( // Generate main client with API methods generate_main_client(&mut output, endpoints); - fs::write(output_dir.join("client.rs"), output)?; + fs::write(output_path, output)?; Ok(()) } @@ -55,7 +57,7 @@ pub fn generate_rust_client( fn generate_imports(output: &mut String) { writeln!( output, - r#"use std::marker::PhantomData; + r#"use std::sync::Arc; use serde::de::DeserializeOwned; use brk_types::*; @@ -88,14 +90,14 @@ pub type Result = std::result::Result; #[derive(Debug, Clone)] pub struct BrkClientOptions {{ pub base_url: String, - pub timeout_ms: u64, + pub timeout_secs: u64, }} impl Default for BrkClientOptions {{ fn default() -> Self {{ Self {{ base_url: "http://localhost:3000".to_string(), - timeout_ms: 30000, + timeout_secs: 30, }} }} }} @@ -104,36 +106,41 @@ impl Default for BrkClientOptions {{ #[derive(Debug, Clone)] pub struct BrkClientBase {{ base_url: String, - client: reqwest::blocking::Client, + timeout_secs: u64, }} impl BrkClientBase {{ /// Create a new client with the given base URL. - pub fn new(base_url: impl Into) -> Result {{ - let base_url = base_url.into(); - let client = reqwest::blocking::Client::new(); - Ok(Self {{ base_url, client }}) + pub fn new(base_url: impl Into) -> Self {{ + Self {{ + base_url: base_url.into(), + timeout_secs: 30, + }} }} /// Create a new client with options. - pub fn with_options(options: BrkClientOptions) -> Result {{ - let client = reqwest::blocking::Client::builder() - .timeout(std::time::Duration::from_millis(options.timeout_ms)) - .build() - .map_err(|e| BrkError {{ message: e.to_string() }})?; - Ok(Self {{ + pub fn with_options(options: BrkClientOptions) -> Self {{ + Self {{ base_url: options.base_url, - client, - }}) + timeout_secs: options.timeout_secs, + }} }} /// Make a GET request. pub fn get(&self, path: &str) -> Result {{ let url = format!("{{}}{{}}", self.base_url, path); - self.client - .get(&url) + let response = minreq::get(&url) + .with_timeout(self.timeout_secs) .send() - .map_err(|e| BrkError {{ message: e.to_string() }})? + .map_err(|e| BrkError {{ message: e.to_string() }})?; + + if response.status_code >= 400 {{ + return Err(BrkError {{ + message: format!("HTTP {{}}", response.status_code), + }}); + }} + + response .json() .map_err(|e| BrkError {{ message: e.to_string() }}) }} @@ -148,18 +155,18 @@ fn generate_metric_node(output: &mut String) { writeln!( output, r#"/// A metric node that can fetch data for different indexes. -pub struct MetricNode<'a, T> {{ - client: &'a BrkClientBase, +pub struct MetricNode {{ + client: Arc, path: String, - _marker: PhantomData, + _marker: std::marker::PhantomData, }} -impl<'a, T: DeserializeOwned> MetricNode<'a, T> {{ - pub fn new(client: &'a BrkClientBase, path: String) -> Self {{ +impl MetricNode {{ + pub fn new(client: Arc, path: String) -> Self {{ Self {{ client, path, - _marker: PhantomData, + _marker: std::marker::PhantomData, }} }} @@ -168,7 +175,7 @@ impl<'a, T: DeserializeOwned> MetricNode<'a, T> {{ self.client.get(&self.path) }} - /// Fetch data points within a date range. + /// Fetch data points within a range. pub fn get_range(&self, from: &str, to: &str) -> Result> {{ let path = format!("{{}}?from={{}}&to={{}}", self.path, from, to); self.client.get(&path) @@ -195,26 +202,20 @@ fn generate_index_accessors(output: &mut String, patterns: &[IndexSetPattern]) { pattern.indexes.len() ) .unwrap(); - writeln!(output, "pub struct {}<'a, T> {{", pattern.name).unwrap(); + writeln!(output, "pub struct {} {{", pattern.name).unwrap(); for index in &pattern.indexes { let field_name = index_to_field_name(index); - writeln!(output, " pub {}: MetricNode<'a, T>,", field_name).unwrap(); + writeln!(output, " pub {}: MetricNode,", field_name).unwrap(); } - writeln!(output, " _marker: PhantomData,").unwrap(); writeln!(output, "}}\n").unwrap(); // Generate impl block with constructor + writeln!(output, "impl {} {{", pattern.name).unwrap(); writeln!( output, - "impl<'a, T: DeserializeOwned> {}<'a, T> {{", - pattern.name - ) - .unwrap(); - writeln!( - output, - " pub fn new(client: &'a BrkClientBase, base_path: &str) -> Self {{" + " pub fn new(client: Arc, base_path: &str) -> Self {{" ) .unwrap(); writeln!(output, " Self {{").unwrap(); @@ -224,13 +225,12 @@ fn generate_index_accessors(output: &mut String, patterns: &[IndexSetPattern]) { let path_segment = index.serialize_long(); writeln!( output, - " {}: MetricNode::new(client, format!(\"{{base_path}}/{}\")),", + " {}: MetricNode::new(client.clone(), format!(\"{{base_path}}/{}\")),", field_name, path_segment ) .unwrap(); } - writeln!(output, " _marker: PhantomData,").unwrap(); writeln!(output, " }}").unwrap(); writeln!(output, " }}").unwrap(); writeln!(output, "}}\n").unwrap(); @@ -256,11 +256,7 @@ fn generate_pattern_structs( for pattern in patterns { let is_parameterizable = pattern.is_parameterizable(); - let generic_params = if pattern.is_generic { - "<'a, T>" - } else { - "<'a>" - }; + let generic_params = if pattern.is_generic { "" } else { "" }; writeln!(output, "/// Pattern struct for repeated tree structure.").unwrap(); writeln!(output, "pub struct {}{} {{", pattern.name, generic_params).unwrap(); @@ -275,10 +271,15 @@ fn generate_pattern_structs( writeln!(output, "}}\n").unwrap(); // Generate impl block with constructor + let impl_generic = if pattern.is_generic { + "" + } else { + "" + }; writeln!( output, "impl{} {}{} {{", - generic_params, pattern.name, generic_params + impl_generic, pattern.name, generic_params ) .unwrap(); @@ -290,13 +291,13 @@ fn generate_pattern_structs( .unwrap(); writeln!( output, - " pub fn new(client: &'a BrkClientBase, acc: &str) -> Self {{" + " pub fn new(client: Arc, acc: &str) -> Self {{" ) .unwrap(); } else { writeln!( output, - " pub fn new(client: &'a BrkClientBase, base_path: &str) -> Self {{" + " pub fn new(client: Arc, base_path: &str) -> Self {{" ) .unwrap(); } @@ -340,7 +341,7 @@ fn generate_parameterized_rust_field( writeln!( output, - " {}: {}::new(client, {}),", + " {}: {}::new(client.clone(), {}),", field_name, field.rust_type, child_acc ) .unwrap(); @@ -363,14 +364,14 @@ fn generate_parameterized_rust_field( let accessor = metadata.find_index_set_pattern(&field.indexes).unwrap(); writeln!( output, - " {}: {}::new(client, &{}),", + " {}: {}::new(client.clone(), &{}),", field_name, accessor.name, metric_expr ) .unwrap(); } else { writeln!( output, - " {}: MetricNode::new(client, {}),", + " {}: MetricNode::new(client.clone(), {}),", field_name, metric_expr ) .unwrap(); @@ -388,7 +389,7 @@ fn generate_tree_path_rust_field( if metadata.is_pattern_type(&field.rust_type) { writeln!( output, - " {}: {}::new(client, &format!(\"{{base_path}}/{}\")),", + " {}: {}::new(client.clone(), &format!(\"{{base_path}}/{}\")),", field_name, field.rust_type, field.name ) .unwrap(); @@ -396,14 +397,14 @@ fn generate_tree_path_rust_field( let accessor = metadata.find_index_set_pattern(&field.indexes).unwrap(); writeln!( output, - " {}: {}::new(client, &format!(\"{{base_path}}/{}\")),", + " {}: {}::new(client.clone(), &format!(\"{{base_path}}/{}\")),", field_name, accessor.name, field.name ) .unwrap(); } else { writeln!( output, - " {}: MetricNode::new(client, format!(\"{{base_path}}/{}\")),", + " {}: MetricNode::new(client.clone(), format!(\"{{base_path}}/{}\")),", field_name, field.name ) .unwrap(); @@ -441,15 +442,16 @@ fn field_to_type_annotation_with_generic( if metadata.is_pattern_generic(&field.rust_type) && let Some(vt) = generic_value_type { - return format!("{}<'a, {}>", field.rust_type, vt); + return format!("{}<{}>", field.rust_type, vt); } - format!("{}<'a>", field.rust_type) + // Non-generic pattern has no type params + field.rust_type.clone() } else if let Some(accessor) = metadata.find_index_set_pattern(&field.indexes) { // Leaf with a reusable accessor pattern - format!("{}<'a, {}>", accessor.name, value_type) + format!("{}<{}>", accessor.name, value_type) } else { // Leaf with unique index set - use MetricNode directly - format!("MetricNode<'a, {}>", value_type) + format!("MetricNode<{}>", value_type) } } @@ -501,29 +503,27 @@ fn generate_tree_node( generated.insert(name.to_string()); writeln!(output, "/// Catalog tree node.").unwrap(); - writeln!(output, "pub struct {}<'a> {{", name).unwrap(); + writeln!(output, "pub struct {} {{", name).unwrap(); for (field, child_fields) in &fields_with_child_info { let field_name = to_snake_case(&field.name); + // Look up type parameter for generic patterns let generic_value_type = child_fields .as_ref() - .and_then(|cf| metadata.get_generic_value_type(&field.rust_type, cf)); - let type_annotation = field_to_type_annotation_with_generic( - field, - metadata, - false, - generic_value_type.as_deref(), - ); + .and_then(|cf| metadata.get_type_param(cf)) + .map(String::as_str); + let type_annotation = + field_to_type_annotation_with_generic(field, metadata, false, generic_value_type); writeln!(output, " pub {}: {},", field_name, type_annotation).unwrap(); } writeln!(output, "}}\n").unwrap(); // Generate impl block - writeln!(output, "impl<'a> {}<'a> {{", name).unwrap(); + writeln!(output, "impl {} {{", name).unwrap(); writeln!( output, - " pub fn new(client: &'a BrkClientBase, base_path: &str) -> Self {{" + " pub fn new(client: Arc, base_path: &str) -> Self {{" ) .unwrap(); writeln!(output, " Self {{").unwrap(); @@ -538,14 +538,14 @@ fn generate_tree_node( let metric_base = get_pattern_instance_base(child_node, child_name); writeln!( output, - " {}: {}::new(client, \"{}\"),", + " {}: {}::new(client.clone(), \"{}\"),", field_name, field.rust_type, metric_base ) .unwrap(); } else { writeln!( output, - " {}: {}::new(client, &format!(\"{{base_path}}/{}\")),", + " {}: {}::new(client.clone(), &format!(\"{{base_path}}/{}\")),", field_name, field.rust_type, field.name ) .unwrap(); @@ -560,14 +560,14 @@ fn generate_tree_node( if metric_path.contains("{base_path}") { writeln!( output, - " {}: {}::new(client, &format!(\"{}\")),", + " {}: {}::new(client.clone(), &format!(\"{}\")),", field_name, accessor.name, metric_path ) .unwrap(); } else { writeln!( output, - " {}: {}::new(client, \"{}\"),", + " {}: {}::new(client.clone(), \"{}\"),", field_name, accessor.name, metric_path ) .unwrap(); @@ -581,14 +581,14 @@ fn generate_tree_node( if metric_path.contains("{base_path}") { writeln!( output, - " {}: MetricNode::new(client, format!(\"{}\")),", + " {}: MetricNode::new(client.clone(), format!(\"{}\")),", field_name, metric_path ) .unwrap(); } else { writeln!( output, - " {}: MetricNode::new(client, \"{}\".to_string()),", + " {}: MetricNode::new(client.clone(), \"{}\".to_string()),", field_name, metric_path ) .unwrap(); @@ -625,27 +625,28 @@ fn generate_main_client(output: &mut String, endpoints: &[Endpoint]) { output, r#"/// Main BRK client with catalog tree and API methods. pub struct BrkClient {{ - base: BrkClientBase, + base: Arc, + tree: CatalogTree, }} impl BrkClient {{ /// Create a new client with the given base URL. - pub fn new(base_url: impl Into) -> Result {{ - Ok(Self {{ - base: BrkClientBase::new(base_url)?, - }}) + pub fn new(base_url: impl Into) -> Self {{ + let base = Arc::new(BrkClientBase::new(base_url)); + let tree = CatalogTree::new(base.clone(), ""); + Self {{ base, tree }} }} /// Create a new client with options. - pub fn with_options(options: BrkClientOptions) -> Result {{ - Ok(Self {{ - base: BrkClientBase::with_options(options)?, - }}) + pub fn with_options(options: BrkClientOptions) -> Self {{ + let base = Arc::new(BrkClientBase::with_options(options)); + let tree = CatalogTree::new(base.clone(), ""); + Self {{ base, tree }} }} /// Get the catalog tree for navigating metrics. - pub fn tree(&self) -> CatalogTree<'_> {{ - CatalogTree::new(&self.base, "") + pub fn tree(&self) -> &CatalogTree {{ + &self.tree }} "# ) @@ -678,6 +679,12 @@ fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) { endpoint.summary.as_deref().unwrap_or(&method_name) ) .unwrap(); + if let Some(desc) = &endpoint.description + && endpoint.summary.as_ref() != Some(desc) + { + writeln!(output, " ///").unwrap(); + writeln!(output, " /// {}", desc).unwrap(); + } // Build method signature let params = build_method_params(endpoint); diff --git a/crates/brk_binder/src/types/mod.rs b/crates/brk_binder/src/types/mod.rs index 0c5b9963d..839b2f94f 100644 --- a/crates/brk_binder/src/types/mod.rs +++ b/crates/brk_binder/src/types/mod.rs @@ -39,14 +39,16 @@ pub struct ClientMetadata { /// Index set patterns - sets of indexes that appear together on metrics pub index_set_patterns: Vec, /// Maps concrete field signatures to pattern names - pub concrete_to_pattern: HashMap, String>, + concrete_to_pattern: HashMap, String>, + /// Maps concrete field signatures to their type parameter (for generic patterns) + concrete_to_type_param: HashMap, String>, } impl ClientMetadata { /// Extract metadata from brk_query::Vecs. pub fn from_vecs(vecs: &Vecs) -> Self { let catalog = vecs.catalog().clone(); - let (structural_patterns, concrete_to_pattern) = + let (structural_patterns, concrete_to_pattern, concrete_to_type_param) = patterns::detect_structural_patterns(&catalog); let (used_indexes, index_set_patterns) = tree::detect_index_patterns(&catalog); @@ -56,6 +58,7 @@ impl ClientMetadata { used_indexes, index_set_patterns, concrete_to_pattern, + concrete_to_type_param, } } @@ -81,19 +84,9 @@ impl ClientMetadata { self.find_pattern(name).is_some_and(|p| p.is_generic) } - /// Extract the value type from concrete fields for a generic pattern. - pub fn get_generic_value_type( - &self, - pattern_name: &str, - fields: &[PatternField], - ) -> Option { - if !self.is_pattern_generic(pattern_name) { - return None; - } - fields - .iter() - .find(|f| f.is_leaf()) - .map(|f| extract_inner_type(&f.rust_type)) + /// Get the type parameter for a generic pattern given its concrete fields. + pub fn get_type_param(&self, fields: &[PatternField]) -> Option<&String> { + self.concrete_to_type_param.get(fields) } /// Build a lookup map from field signatures to pattern names. diff --git a/crates/brk_binder/src/types/patterns.rs b/crates/brk_binder/src/types/patterns.rs index b8c22ad91..b12271a8d 100644 --- a/crates/brk_binder/src/types/patterns.rs +++ b/crates/brk_binder/src/types/patterns.rs @@ -1,19 +1,24 @@ //! Pattern detection for structural patterns in the metric tree. -use std::collections::{BTreeMap, BTreeSet, HashMap}; +use std::collections::{BTreeSet, HashMap}; use brk_types::TreeNode; use super::{ - case::to_pascal_case, schema::schema_to_json_type, FieldNamePosition, PatternField, - StructuralPattern, + FieldNamePosition, PatternField, StructuralPattern, case::to_pascal_case, + schema::schema_to_json_type, + tree::{get_first_leaf_name, get_node_fields}, }; /// Detect structural patterns in the tree using a bottom-up approach. -/// Returns (patterns, concrete_to_pattern_mapping). +/// Returns (patterns, concrete_to_pattern, concrete_to_type_param). pub fn detect_structural_patterns( tree: &TreeNode, -) -> (Vec, HashMap, String>) { +) -> ( + Vec, + HashMap, String>, + HashMap, String>, +) { let mut signature_to_pattern: HashMap, String> = HashMap::new(); let mut signature_counts: HashMap, usize> = HashMap::new(); let mut normalized_to_name: HashMap, String> = HashMap::new(); @@ -29,8 +34,9 @@ pub fn detect_structural_patterns( &mut name_counts, ); - // Identify generic patterns - let (generic_patterns, generic_mappings) = detect_generic_patterns(&signature_to_pattern); + // Identify generic patterns (also extracts type params) + let (generic_patterns, generic_mappings, type_mappings) = + detect_generic_patterns(&signature_to_pattern); // Build non-generic patterns: signatures appearing 2+ times that weren't merged into generics let mut patterns: Vec = signature_to_pattern @@ -64,33 +70,43 @@ pub fn detect_structural_patterns( analyze_pattern_field_positions(tree, &mut patterns, &pattern_lookup); patterns.sort_by(|a, b| b.fields.len().cmp(&a.fields.len())); - (patterns, concrete_to_pattern) + (patterns, concrete_to_pattern, type_mappings) } /// Detect generic patterns by grouping signatures by their normalized form. +/// Returns (patterns, concrete_to_pattern, concrete_to_type_param). fn detect_generic_patterns( signature_to_pattern: &HashMap, String>, -) -> (Vec, HashMap, String>) { - let mut normalized_groups: HashMap, Vec<(Vec, String)>> = - HashMap::new(); +) -> ( + Vec, + HashMap, String>, + HashMap, String>, +) { + // Group by normalized form, tracking the extracted type for each concrete signature + let mut normalized_groups: HashMap< + Vec, + Vec<(Vec, String, String)>, + > = HashMap::new(); for (fields, name) in signature_to_pattern { - if let Some(normalized) = normalize_fields_for_generic(fields) { + if let Some((normalized, extracted_type)) = normalize_fields_for_generic(fields) { normalized_groups .entry(normalized) .or_default() - .push((fields.clone(), name.clone())); + .push((fields.clone(), name.clone(), extracted_type)); } } let mut patterns = Vec::new(); - let mut mappings: HashMap, String> = HashMap::new(); + let mut pattern_mappings: HashMap, String> = HashMap::new(); + let mut type_mappings: HashMap, String> = HashMap::new(); for (normalized_fields, group) in normalized_groups { if group.len() >= 2 { let generic_name = group[0].1.clone(); - for (concrete_fields, _) in &group { - mappings.insert(concrete_fields.clone(), generic_name.clone()); + for (concrete_fields, _, extracted_type) in &group { + pattern_mappings.insert(concrete_fields.clone(), generic_name.clone()); + type_mappings.insert(concrete_fields.clone(), extracted_type.clone()); } patterns.push(StructuralPattern { name: generic_name, @@ -101,11 +117,12 @@ fn detect_generic_patterns( } } - (patterns, mappings) + (patterns, pattern_mappings, type_mappings) } /// Normalize fields by replacing concrete value types with "T". -fn normalize_fields_for_generic(fields: &[PatternField]) -> Option> { +/// Returns (normalized_fields, extracted_type) where extracted_type is the concrete type replaced. +fn normalize_fields_for_generic(fields: &[PatternField]) -> Option<(Vec, String)> { let leaf_types: Vec<&str> = fields .iter() .filter(|f| f.is_leaf()) @@ -137,7 +154,7 @@ fn normalize_fields_for_generic(fields: &[PatternField]) -> Option leaf.name().to_string(), TreeNode::Branch(_) => { - if let Some(desc_leaf_name) = get_descendant_leaf_name(child_node) { + if let Some(desc_leaf_name) = get_first_leaf_name(child_node) { infer_accumulated_name(accumulated_name, field_name, &desc_leaf_name) } else if accumulated_name.is_empty() { field_name.clone() @@ -296,13 +313,6 @@ fn collect_pattern_instances( } } -fn get_descendant_leaf_name(node: &TreeNode) -> Option { - match node { - TreeNode::Leaf(leaf) => Some(leaf.name().to_string()), - TreeNode::Branch(children) => children.values().find_map(get_descendant_leaf_name), - } -} - fn infer_accumulated_name(parent_acc: &str, field_name: &str, descendant_leaf: &str) -> String { if let Some(pos) = descendant_leaf.find(field_name) { if pos == 0 { @@ -324,40 +334,6 @@ fn infer_accumulated_name(parent_acc: &str, field_name: &str, descendant_leaf: & } } -fn get_node_fields_for_analysis( - children: &BTreeMap, - pattern_lookup: &HashMap, String>, -) -> Vec { - let mut fields: Vec = children - .iter() - .map(|(name, node)| { - let (rust_type, json_type, indexes) = match node { - TreeNode::Leaf(leaf) => ( - leaf.value_type().to_string(), - schema_to_json_type(&leaf.schema), - leaf.indexes().clone(), - ), - TreeNode::Branch(grandchildren) => { - let child_fields = get_node_fields_for_analysis(grandchildren, pattern_lookup); - let pattern_name = pattern_lookup - .get(&child_fields) - .cloned() - .unwrap_or_else(|| "Unknown".to_string()); - (pattern_name.clone(), pattern_name, BTreeSet::new()) - } - }; - PatternField { - name: name.clone(), - rust_type, - json_type, - indexes, - } - }) - .collect(); - fields.sort_by(|a, b| a.name.cmp(&b.name)); - fields -} - fn analyze_field_positions_from_instances( instances: &[(String, String, String)], ) -> HashMap { diff --git a/crates/brk_binder/src/types/tree.rs b/crates/brk_binder/src/types/tree.rs index 8b46f5811..1d01291cf 100644 --- a/crates/brk_binder/src/types/tree.rs +++ b/crates/brk_binder/src/types/tree.rs @@ -59,42 +59,6 @@ pub fn get_node_fields( fields } -/// Like get_node_fields but takes a parent name for generating child pattern names. -pub fn get_node_fields_with_parent( - children: &BTreeMap, - parent_name: &str, - pattern_lookup: &HashMap, String>, -) -> Vec { - let mut fields: Vec = children - .iter() - .map(|(name, node)| { - let (rust_type, json_type, indexes) = match node { - TreeNode::Leaf(leaf) => ( - leaf.value_type().to_string(), - schema_to_json_type(&leaf.schema), - leaf.indexes().clone(), - ), - TreeNode::Branch(grandchildren) => { - let child_fields = get_node_fields(grandchildren, pattern_lookup); - let pattern_name = pattern_lookup - .get(&child_fields) - .cloned() - .unwrap_or_else(|| format!("{}_{}", parent_name, to_pascal_case(name))); - (pattern_name.clone(), pattern_name, BTreeSet::new()) - } - }; - PatternField { - name: name.clone(), - rust_type, - json_type, - indexes, - } - }) - .collect(); - fields.sort_by(|a, b| a.name.cmp(&b.name)); - fields -} - /// Get fields with child field information for generic pattern lookup. /// Returns (field, child_fields) pairs where child_fields is Some for branches. pub fn get_fields_with_child_info( diff --git a/crates/brk_computer/examples/computer_read.rs b/crates/brk_computer/examples/computer_read.rs index 280e1723c..8be7a92f7 100644 --- a/crates/brk_computer/examples/computer_read.rs +++ b/crates/brk_computer/examples/computer_read.rs @@ -35,7 +35,7 @@ fn run() -> Result<()> { let computer = Computer::forced_import(&outputs_dir, &indexer, Some(fetcher))?; let _a = dbg!(computer.chain.txinindex_to_value.region().meta()); - let _b = dbg!(indexer.vecs.txout.txoutindex_to_value.region().meta()); + let _b = dbg!(indexer.vecs.txout.txoutindex_to_txoutdata.region().meta()); Ok(()) } diff --git a/crates/brk_computer/src/chain/compute.rs b/crates/brk_computer/src/chain/compute.rs index 9ebc299ba..0c2414523 100644 --- a/crates/brk_computer/src/chain/compute.rs +++ b/crates/brk_computer/src/chain/compute.rs @@ -4,7 +4,7 @@ use brk_types::{ CheckedSub, FeeRate, HalvingEpoch, Height, ONE_DAY_IN_SEC_F64, Sats, StoredF32, StoredF64, StoredU32, StoredU64, Timestamp, TxOutIndex, TxVersion, }; -use vecdb::{Exit, GenericStoredVec, IterableVec, TypedVecIterator, VecIndex, unlikely}; +use vecdb::{Exit, IterableVec, TypedVecIterator, VecIndex, unlikely}; use crate::{grouped::ComputedVecsFromHeight, indexes, price, utils::OptionExt, Indexes}; @@ -275,39 +275,11 @@ impl Vecs { // TxInIndex // --- - let txindex_to_first_txoutindex = &indexer.vecs.tx.txindex_to_first_txoutindex; - let txindex_to_first_txoutindex_reader = txindex_to_first_txoutindex.create_reader(); - let txoutindex_to_value = &indexer.vecs.txout.txoutindex_to_value; - let txoutindex_to_value_reader = indexer.vecs.txout.txoutindex_to_value.create_reader(); - self.txinindex_to_value.compute_transform( - starting_indexes.txinindex, - &indexer.vecs.txin.txinindex_to_outpoint, - |(txinindex, outpoint, ..)| { - if unlikely(outpoint.is_coinbase()) { - return (txinindex, Sats::MAX); - } - let txoutindex = txindex_to_first_txoutindex - .read_unwrap(outpoint.txindex(), &txindex_to_first_txoutindex_reader) - + outpoint.vout(); - - let value = if unlikely(txoutindex == TxOutIndex::COINBASE) { - unreachable!() - } else { - txoutindex_to_value - .unchecked_read(txoutindex, &txoutindex_to_value_reader) - .unwrap() - }; - - (txinindex, value) - }, - exit, - )?; - self.txindex_to_input_value.compute_sum_from_indexes( starting_indexes.txindex, &indexer.vecs.tx.txindex_to_first_txinindex, &indexes.txindex_to_input_count, - &self.txinindex_to_value, + &indexer.vecs.txin.txinindex_to_value, exit, )?; @@ -393,7 +365,8 @@ impl Vecs { let mut txindex_to_first_txoutindex_iter = indexer.vecs.tx.txindex_to_first_txoutindex.iter()?; let mut txindex_to_output_count_iter = indexes.txindex_to_output_count.iter(); - let mut txoutindex_to_value_iter = indexer.vecs.txout.txoutindex_to_value.iter()?; + let mut txoutindex_to_txoutdata_iter = + indexer.vecs.txout.txoutindex_to_txoutdata.iter()?; vec.compute_transform( starting_indexes.height, &indexer.vecs.tx.height_to_first_txindex, @@ -405,8 +378,9 @@ impl Vecs { let mut sats = Sats::ZERO; (first_txoutindex..first_txoutindex + usize::from(output_count)).for_each( |txoutindex| { - sats += txoutindex_to_value_iter - .get_unwrap(TxOutIndex::from(txoutindex)); + sats += txoutindex_to_txoutdata_iter + .get_unwrap(TxOutIndex::from(txoutindex)) + .value; }, ); (height, sats) diff --git a/crates/brk_computer/src/pools/mod.rs b/crates/brk_computer/src/pools/mod.rs index 445fcfaa0..8f7fa5456 100644 --- a/crates/brk_computer/src/pools/mod.rs +++ b/crates/brk_computer/src/pools/mod.rs @@ -126,9 +126,8 @@ impl Vecs { let mut txindex_to_first_txoutindex_iter = indexer.vecs.tx.txindex_to_first_txoutindex.iter()?; let mut txindex_to_output_count_iter = indexes.txindex_to_output_count.iter(); - let mut txoutindex_to_outputtype_iter = - indexer.vecs.txout.txoutindex_to_outputtype.iter()?; - let mut txoutindex_to_typeindex_iter = indexer.vecs.txout.txoutindex_to_typeindex.iter()?; + let mut txoutindex_to_txoutdata_iter = + indexer.vecs.txout.txoutindex_to_txoutdata.iter()?; let mut p2pk65addressindex_to_p2pk65bytes_iter = indexer .vecs .address @@ -181,8 +180,9 @@ impl Vecs { let pool = (*txoutindex..(*txoutindex + *outputcount)) .map(TxOutIndex::from) .find_map(|txoutindex| { - let outputtype = txoutindex_to_outputtype_iter.get_unwrap(txoutindex); - let typeindex = txoutindex_to_typeindex_iter.get_unwrap(txoutindex); + let txoutdata = txoutindex_to_txoutdata_iter.get_unwrap(txoutindex); + let outputtype = txoutdata.outputtype; + let typeindex = txoutdata.typeindex; match outputtype { OutputType::P2PK65 => Some(AddressBytes::from( diff --git a/crates/brk_computer/src/stateful/compute/block_loop.rs b/crates/brk_computer/src/stateful/compute/block_loop.rs index f5899d271..389035bba 100644 --- a/crates/brk_computer/src/stateful/compute/block_loop.rs +++ b/crates/brk_computer/src/stateful/compute/block_loop.rs @@ -24,8 +24,8 @@ use crate::{ address::AddressTypeToAddressCount, compute::write::{process_address_updates, write}, process::{ - AddressCache, InputsResult, build_txoutindex_to_height_map, process_inputs, - process_outputs, process_received, process_sent, + AddressCache, InputsResult, process_inputs, process_outputs, process_received, + process_sent, }, states::{BlockState, Transacted}, }, @@ -38,8 +38,8 @@ use super::{ vecs::Vecs, }, BIP30_DUPLICATE_HEIGHT_1, BIP30_DUPLICATE_HEIGHT_2, BIP30_ORIGINAL_HEIGHT_1, - BIP30_ORIGINAL_HEIGHT_2, ComputeContext, FLUSH_INTERVAL, IndexerReaders, TxInIterators, - TxOutIterators, VecsReaders, build_txinindex_to_txindex, build_txoutindex_to_txindex, + BIP30_ORIGINAL_HEIGHT_2, ComputeContext, FLUSH_INTERVAL, TxInIterators, TxOutIterators, + VecsReaders, build_txinindex_to_txindex, build_txoutindex_to_txindex, }; /// Process all blocks from starting_height to last_height. @@ -124,15 +124,8 @@ pub fn process_blocks( let mut height_to_price_iter = height_to_price.map(|v| v.into_iter()); let mut dateindex_to_price_iter = dateindex_to_price.map(|v| v.into_iter()); - info!("Building txoutindex_to_height map..."); - - // Build txoutindex -> height map for input processing - let txoutindex_to_height = build_txoutindex_to_height_map(height_to_first_txoutindex); - info!("Creating readers..."); - // Create readers for parallel data access - let ir = IndexerReaders::new(indexer); let mut vr = VecsReaders::new(&vecs.any_address_indexes, &vecs.addresses_data); // Create reusable iterators for sequential txout/txin reads (16KB buffered) @@ -273,14 +266,14 @@ pub fn process_blocks( // Collect output/input data using reusable iterators (16KB buffered reads) // Must be done before thread::scope since iterators aren't Send - let (output_values, output_types, output_typeindexes) = - txout_iters.collect_block_outputs(first_txoutindex, output_count); + let txoutdata_vec = txout_iters.collect_block_outputs(first_txoutindex, output_count); - let input_outpoints = if input_count > 1 { - txin_iters.collect_block_outpoints(first_txinindex + 1, input_count - 1) - } else { - Vec::new() - }; + let (input_values, input_prev_heights, input_outputtypes, input_typeindexes) = + if input_count > 1 { + txin_iters.collect_block_inputs(first_txinindex + 1, input_count - 1) + } else { + (Vec::new(), Vec::new(), Vec::new(), Vec::new()) + }; // Process outputs and inputs in parallel with tick-tock let (outputs_result, inputs_result) = thread::scope(|scope| { @@ -293,11 +286,8 @@ pub fn process_blocks( let outputs_handle = scope.spawn(|| { // Process outputs (receive) process_outputs( - output_count, &txoutindex_to_txindex, - &output_values, - &output_types, - &output_typeindexes, + &txoutdata_vec, &first_addressindexes, &cache, &vr, @@ -309,16 +299,12 @@ pub fn process_blocks( // Process inputs (send) - skip coinbase input let inputs_result = if input_count > 1 { process_inputs( - first_txinindex + 1, // Skip coinbase input_count - 1, &txinindex_to_txindex[1..], // Skip coinbase - &input_outpoints, - &indexer.vecs.tx.txindex_to_first_txoutindex, - &indexer.vecs.txout.txoutindex_to_value, - &indexer.vecs.txout.txoutindex_to_outputtype, - &indexer.vecs.txout.txoutindex_to_typeindex, - &txoutindex_to_height, - &ir, + &input_values, + &input_outputtypes, + &input_typeindexes, + &input_prev_heights, &first_addressindexes, &cache, &vr, @@ -331,7 +317,6 @@ pub fn process_blocks( sent_data: Default::default(), address_data: Default::default(), txindex_vecs: Default::default(), - txoutindex_to_txinindex_updates: Default::default(), } }; @@ -426,12 +411,6 @@ pub fn process_blocks( vecs.utxo_cohorts.send(height_to_sent, chain_state); }); - // Update txoutindex_to_txinindex - vecs.update_txoutindex_to_txinindex( - output_count, - inputs_result.txoutindex_to_txinindex_updates, - )?; - // Push to height-indexed vectors vecs.height_to_unspendable_supply .truncate_push(height, unspendable_supply)?; diff --git a/crates/brk_computer/src/stateful/compute/mod.rs b/crates/brk_computer/src/stateful/compute/mod.rs index 3d993109a..828259c47 100644 --- a/crates/brk_computer/src/stateful/compute/mod.rs +++ b/crates/brk_computer/src/stateful/compute/mod.rs @@ -17,7 +17,7 @@ mod write; pub use block_loop::process_blocks; pub use context::ComputeContext; pub use readers::{ - IndexerReaders, TxInIterators, TxOutIterators, VecsReaders, build_txinindex_to_txindex, + TxInIterators, TxOutIterators, VecsReaders, build_txinindex_to_txindex, build_txoutindex_to_txindex, }; pub use recover::{StartMode, determine_start_mode, recover_state, reset_state}; diff --git a/crates/brk_computer/src/stateful/compute/readers.rs b/crates/brk_computer/src/stateful/compute/readers.rs index 123327ce7..b3c8594cf 100644 --- a/crates/brk_computer/src/stateful/compute/readers.rs +++ b/crates/brk_computer/src/stateful/compute/readers.rs @@ -4,7 +4,9 @@ use brk_grouper::{ByAddressType, ByAnyAddress}; use brk_indexer::Indexer; -use brk_types::{OutPoint, OutputType, Sats, StoredU64, TxInIndex, TxIndex, TxOutIndex, TypeIndex}; +use brk_types::{ + Height, OutputType, Sats, StoredU64, TxInIndex, TxIndex, TxOutData, TxOutIndex, TypeIndex, +}; use vecdb::{ BoxedVecIterator, BytesVecIterator, GenericStoredVec, PcodecVecIterator, Reader, VecIndex, VecIterator, @@ -12,45 +14,18 @@ use vecdb::{ use crate::stateful::address::{AddressesDataVecs, AnyAddressIndexesVecs}; -/// Cached readers for indexer vectors. -pub struct IndexerReaders { - pub txindex_to_first_txoutindex: Reader, - pub txoutindex_to_value: Reader, - pub txoutindex_to_outputtype: Reader, - pub txoutindex_to_typeindex: Reader, -} - -impl IndexerReaders { - pub fn new(indexer: &Indexer) -> Self { - Self { - txindex_to_first_txoutindex: indexer - .vecs - .tx - .txindex_to_first_txoutindex - .create_reader(), - txoutindex_to_value: indexer.vecs.txout.txoutindex_to_value.create_reader(), - txoutindex_to_outputtype: indexer.vecs.txout.txoutindex_to_outputtype.create_reader(), - txoutindex_to_typeindex: indexer.vecs.txout.txoutindex_to_typeindex.create_reader(), - } - } -} - /// Reusable iterators for txout vectors (16KB buffered reads). /// /// Iterators are created once and re-positioned each block to avoid /// creating new file handles repeatedly. pub struct TxOutIterators<'a> { - value_iter: BytesVecIterator<'a, TxOutIndex, Sats>, - outputtype_iter: BytesVecIterator<'a, TxOutIndex, OutputType>, - typeindex_iter: BytesVecIterator<'a, TxOutIndex, TypeIndex>, + txoutdata_iter: BytesVecIterator<'a, TxOutIndex, TxOutData>, } impl<'a> TxOutIterators<'a> { pub fn new(indexer: &'a Indexer) -> Self { Self { - value_iter: indexer.vecs.txout.txoutindex_to_value.into_iter(), - outputtype_iter: indexer.vecs.txout.txoutindex_to_outputtype.into_iter(), - typeindex_iter: indexer.vecs.txout.txoutindex_to_typeindex.into_iter(), + txoutdata_iter: indexer.vecs.txout.txoutindex_to_txoutdata.into_iter(), } } @@ -59,43 +34,50 @@ impl<'a> TxOutIterators<'a> { &mut self, first_txoutindex: usize, output_count: usize, - ) -> (Vec, Vec, Vec) { - let mut values = Vec::with_capacity(output_count); - let mut output_types = Vec::with_capacity(output_count); - let mut type_indexes = Vec::with_capacity(output_count); - - for i in first_txoutindex..first_txoutindex + output_count { - values.push(self.value_iter.get_at_unwrap(i)); - output_types.push(self.outputtype_iter.get_at_unwrap(i)); - type_indexes.push(self.typeindex_iter.get_at_unwrap(i)); - } - - (values, output_types, type_indexes) + ) -> Vec { + (first_txoutindex..first_txoutindex + output_count) + .map(|i| self.txoutdata_iter.get_at_unwrap(i)) + .collect() } } -/// Reusable iterator for txin outpoints (PcoVec - avoids repeated page decompression). +/// Reusable iterators for txin vectors (PcoVec - avoids repeated page decompression). pub struct TxInIterators<'a> { - outpoint_iter: PcodecVecIterator<'a, TxInIndex, OutPoint>, + value_iter: PcodecVecIterator<'a, TxInIndex, Sats>, + prev_height_iter: PcodecVecIterator<'a, TxInIndex, Height>, + outputtype_iter: PcodecVecIterator<'a, TxInIndex, OutputType>, + typeindex_iter: PcodecVecIterator<'a, TxInIndex, TypeIndex>, } impl<'a> TxInIterators<'a> { pub fn new(indexer: &'a Indexer) -> Self { Self { - outpoint_iter: indexer.vecs.txin.txinindex_to_outpoint.into_iter(), + value_iter: indexer.vecs.txin.txinindex_to_value.into_iter(), + prev_height_iter: indexer.vecs.txin.txinindex_to_prev_height.into_iter(), + outputtype_iter: indexer.vecs.txin.txinindex_to_outputtype.into_iter(), + typeindex_iter: indexer.vecs.txin.txinindex_to_typeindex.into_iter(), } } - /// Collect outpoints for a block range using buffered iteration. - /// This avoids repeated PcoVec page decompression (~1000x speedup). - pub fn collect_block_outpoints( + /// Collect input data for a block range using buffered iteration. + pub fn collect_block_inputs( &mut self, first_txinindex: usize, input_count: usize, - ) -> Vec { - (first_txinindex..first_txinindex + input_count) - .map(|i| self.outpoint_iter.get_at_unwrap(i)) - .collect() + ) -> (Vec, Vec, Vec, Vec) { + let mut values = Vec::with_capacity(input_count); + let mut prev_heights = Vec::with_capacity(input_count); + let mut outputtypes = Vec::with_capacity(input_count); + let mut typeindexes = Vec::with_capacity(input_count); + + for i in first_txinindex..first_txinindex + input_count { + values.push(self.value_iter.get_at_unwrap(i)); + prev_heights.push(self.prev_height_iter.get_at_unwrap(i)); + outputtypes.push(self.outputtype_iter.get_at_unwrap(i)); + typeindexes.push(self.typeindex_iter.get_at_unwrap(i)); + } + + (values, prev_heights, outputtypes, typeindexes) } } diff --git a/crates/brk_computer/src/stateful/compute/recover.rs b/crates/brk_computer/src/stateful/compute/recover.rs index a1f62b17a..db7c6ef86 100644 --- a/crates/brk_computer/src/stateful/compute/recover.rs +++ b/crates/brk_computer/src/stateful/compute/recover.rs @@ -27,7 +27,6 @@ pub struct RecoveredState { pub fn recover_state( height: Height, chain_state_rollback: vecdb::Result, - txoutindex_rollback: vecdb::Result, any_address_indexes: &mut AnyAddressIndexesVecs, addresses_data: &mut AddressesDataVecs, utxo_cohorts: &mut UTXOCohorts, @@ -42,7 +41,6 @@ pub fn recover_state( // Verify rollback consistency - all must agree on the same height let consistent_height = rollback_states( chain_state_rollback, - txoutindex_rollback, address_indexes_rollback, address_data_rollback, ); @@ -127,7 +125,6 @@ pub enum StartMode { /// otherwise returns Height::ZERO (need fresh start). fn rollback_states( chain_state_rollback: vecdb::Result, - txoutindex_rollback: vecdb::Result, address_indexes_rollbacks: Result>, address_data_rollbacks: Result<[Stamp; 2]>, ) -> Height { @@ -139,11 +136,6 @@ fn rollback_states( }; heights.insert(Height::from(s).incremented()); - let Ok(s) = txoutindex_rollback else { - return Height::ZERO; - }; - heights.insert(Height::from(s).incremented()); - let Ok(stamps) = address_indexes_rollbacks else { return Height::ZERO; }; diff --git a/crates/brk_computer/src/stateful/compute/write.rs b/crates/brk_computer/src/stateful/compute/write.rs index c219a6692..d07325706 100644 --- a/crates/brk_computer/src/stateful/compute/write.rs +++ b/crates/brk_computer/src/stateful/compute/write.rs @@ -89,9 +89,6 @@ pub fn write( vecs.addresstype_to_height_to_empty_addr_count .par_iter_mut(), ) - .chain(rayon::iter::once( - &mut vecs.txoutindex_to_txinindex as &mut dyn AnyStoredVec, - )) .chain(rayon::iter::once( &mut vecs.chain_state as &mut dyn AnyStoredVec, )) diff --git a/crates/brk_computer/src/stateful/mod.rs b/crates/brk_computer/src/stateful/mod.rs index cd06f7489..b900fc861 100644 --- a/crates/brk_computer/src/stateful/mod.rs +++ b/crates/brk_computer/src/stateful/mod.rs @@ -39,8 +39,3 @@ pub use address::{AddressTypeToTypeIndexMap, AddressesDataVecs, AnyAddressIndexe // Cohort re-exports pub use cohorts::{AddressCohorts, CohortVecs, DynCohortVecs, UTXOCohorts}; - -// Compute re-exports -pub use compute::IndexerReaders; - -// Metrics re-exports diff --git a/crates/brk_computer/src/stateful/process/inputs.rs b/crates/brk_computer/src/stateful/process/inputs.rs index 9481b84f4..0df89c242 100644 --- a/crates/brk_computer/src/stateful/process/inputs.rs +++ b/crates/brk_computer/src/stateful/process/inputs.rs @@ -1,24 +1,20 @@ //! Parallel input processing. -//! -//! Processes a block's inputs (spent UTXOs) in parallel, building: -//! - height_to_sent: map from creation height -> Transacted for sends -//! - Address data for address cohort tracking (optional) use brk_grouper::ByAddressType; -use brk_types::{Height, OutPoint, OutputType, Sats, TxInIndex, TxIndex, TxOutIndex, TypeIndex}; +use brk_types::{Height, OutputType, Sats, TxIndex, TypeIndex}; use rayon::prelude::*; use rustc_hash::FxHashMap; -use vecdb::{BytesVec, GenericStoredVec}; -use crate::stateful::address::{ - AddressTypeToTypeIndexMap, AddressesDataVecs, AnyAddressIndexesVecs, +use crate::stateful::{ + address::{AddressTypeToTypeIndexMap, AddressesDataVecs, AnyAddressIndexesVecs}, + compute::VecsReaders, + states::Transacted, }; -use crate::stateful::compute::VecsReaders; -use crate::stateful::states::Transacted; -use crate::stateful::{IndexerReaders, process::RangeMap}; -use super::super::address::HeightToAddressTypeToVec; -use super::{load_uncached_address_data, AddressCache, LoadedAddressDataWithSource, TxIndexVec}; +use super::{ + super::address::HeightToAddressTypeToVec, AddressCache, LoadedAddressDataWithSource, + TxIndexVec, load_uncached_address_data, +}; /// Result of processing inputs for a block. pub struct InputsResult { @@ -30,8 +26,6 @@ pub struct InputsResult { pub address_data: AddressTypeToTypeIndexMap, /// Transaction indexes per address for tx_count tracking. pub txindex_vecs: AddressTypeToTypeIndexMap, - /// Updates to txoutindex_to_txinindex: (spent txoutindex, spending txinindex). - pub txoutindex_to_txinindex_updates: Vec<(TxOutIndex, TxInIndex)>, } /// Process inputs (spent UTXOs) for a block. @@ -49,52 +43,32 @@ pub struct InputsResult { /// expensive merge overhead from rayon's fold/reduce pattern. #[allow(clippy::too_many_arguments)] pub fn process_inputs( - first_txinindex: usize, input_count: usize, txinindex_to_txindex: &[TxIndex], - // Pre-collected outpoints (from reusable iterator with page caching) - outpoints: &[OutPoint], - txindex_to_first_txoutindex: &BytesVec, - txoutindex_to_value: &BytesVec, - txoutindex_to_outputtype: &BytesVec, - txoutindex_to_typeindex: &BytesVec, - txoutindex_to_height: &RangeMap, - ir: &IndexerReaders, - // Address lookup parameters + txinindex_to_value: &[Sats], + txinindex_to_outputtype: &[OutputType], + txinindex_to_typeindex: &[TypeIndex], + txinindex_to_prev_height: &[Height], first_addressindexes: &ByAddressType, cache: &AddressCache, vr: &VecsReaders, any_address_indexes: &AnyAddressIndexesVecs, addresses_data: &AddressesDataVecs, ) -> InputsResult { - // Parallel reads - collect all input data (outpoints already in memory) let items: Vec<_> = (0..input_count) .into_par_iter() .map(|local_idx| { - let txinindex = TxInIndex::from(first_txinindex + local_idx); let txindex = txinindex_to_txindex[local_idx]; - // Get outpoint from pre-collected vec and resolve to txoutindex - let outpoint = outpoints[local_idx]; - let first_txoutindex = txindex_to_first_txoutindex - .read_unwrap(outpoint.txindex(), &ir.txindex_to_first_txoutindex); - let txoutindex = first_txoutindex + outpoint.vout(); + let prev_height = *txinindex_to_prev_height.get(local_idx).unwrap(); + let value = *txinindex_to_value.get(local_idx).unwrap(); + let input_type = *txinindex_to_outputtype.get(local_idx).unwrap(); - // Get creation height - let prev_height = *txoutindex_to_height.get(txoutindex).unwrap(); - - // Get value and type from the output being spent - let value = txoutindex_to_value.read_unwrap(txoutindex, &ir.txoutindex_to_value); - let input_type = - txoutindex_to_outputtype.read_unwrap(txoutindex, &ir.txoutindex_to_outputtype); - - // Non-address inputs don't need typeindex or address lookup if input_type.is_not_address() { - return (txinindex, txoutindex, prev_height, value, input_type, None); + return (prev_height, value, input_type, None); } - let typeindex = - txoutindex_to_typeindex.read_unwrap(txoutindex, &ir.txoutindex_to_typeindex); + let typeindex = *txinindex_to_typeindex.get(local_idx).unwrap(); // Look up address data let addr_data_opt = load_uncached_address_data( @@ -108,8 +82,6 @@ pub fn process_inputs( ); ( - txinindex, - txoutindex, prev_height, value, input_type, @@ -131,16 +103,13 @@ pub fn process_inputs( AddressTypeToTypeIndexMap::::with_capacity(estimated_per_type); let mut txindex_vecs = AddressTypeToTypeIndexMap::::with_capacity(estimated_per_type); - let mut txoutindex_to_txinindex_updates = Vec::with_capacity(input_count); - for (txinindex, txoutindex, prev_height, value, output_type, addr_info) in items { + for (prev_height, value, output_type, addr_info) in items { height_to_sent .entry(prev_height) .or_default() .iterate(value, output_type); - txoutindex_to_txinindex_updates.push((txoutindex, txinindex)); - if let Some((typeindex, txindex, value, addr_data_opt)) = addr_info { sent_data .entry(prev_height) @@ -167,7 +136,5 @@ pub fn process_inputs( sent_data, address_data, txindex_vecs, - txoutindex_to_txinindex_updates, } } - diff --git a/crates/brk_computer/src/stateful/process/mod.rs b/crates/brk_computer/src/stateful/process/mod.rs index c85e84644..00ee345ce 100644 --- a/crates/brk_computer/src/stateful/process/mod.rs +++ b/crates/brk_computer/src/stateful/process/mod.rs @@ -3,7 +3,6 @@ mod cache; mod inputs; mod lookup; mod outputs; -mod range_map; mod received; mod sent; mod tx_counts; @@ -14,7 +13,6 @@ pub use cache::*; pub use inputs::*; pub use lookup::*; pub use outputs::*; -pub use range_map::*; pub use received::*; pub use sent::*; pub use tx_counts::*; diff --git a/crates/brk_computer/src/stateful/process/outputs.rs b/crates/brk_computer/src/stateful/process/outputs.rs index e8b21e8cd..9e8a245a7 100644 --- a/crates/brk_computer/src/stateful/process/outputs.rs +++ b/crates/brk_computer/src/stateful/process/outputs.rs @@ -5,7 +5,7 @@ //! - Address data for address cohort tracking (optional) use brk_grouper::ByAddressType; -use brk_types::{OutputType, Sats, TxIndex, TypeIndex}; +use brk_types::{Sats, TxIndex, TxOutData, TypeIndex}; use crate::stateful::address::{ AddressTypeToTypeIndexMap, AddressesDataVecs, AnyAddressIndexesVecs, @@ -37,19 +37,16 @@ pub struct OutputsResult { /// 4. Track address-specific data for address cohort processing #[allow(clippy::too_many_arguments)] pub fn process_outputs( - output_count: usize, txoutindex_to_txindex: &[TxIndex], - // Pre-collected output data (from reusable iterators with 16KB buffered reads) - values: &[Sats], - output_types: &[OutputType], - typeindexes: &[TypeIndex], - // Address lookup parameters + txoutdata_vec: &[TxOutData], first_addressindexes: &ByAddressType, cache: &AddressCache, vr: &VecsReaders, any_address_indexes: &AnyAddressIndexesVecs, addresses_data: &AddressesDataVecs, ) -> OutputsResult { + let output_count = txoutdata_vec.len(); + // Pre-allocate result structures let estimated_per_type = (output_count / 8).max(8); let mut transacted = Transacted::default(); @@ -60,10 +57,10 @@ pub fn process_outputs( AddressTypeToTypeIndexMap::::with_capacity(estimated_per_type); // Single pass: read from pre-collected vecs and accumulate - for local_idx in 0..output_count { + for (local_idx, txoutdata) in txoutdata_vec.iter().enumerate() { let txindex = txoutindex_to_txindex[local_idx]; - let value = values[local_idx]; - let output_type = output_types[local_idx]; + let value = txoutdata.value; + let output_type = txoutdata.outputtype; transacted.iterate(value, output_type); @@ -71,7 +68,7 @@ pub fn process_outputs( continue; } - let typeindex = typeindexes[local_idx]; + let typeindex = txoutdata.typeindex; received_data .get_mut(output_type) diff --git a/crates/brk_computer/src/stateful/process/range_map.rs b/crates/brk_computer/src/stateful/process/range_map.rs deleted file mode 100644 index 0d33bf052..000000000 --- a/crates/brk_computer/src/stateful/process/range_map.rs +++ /dev/null @@ -1,65 +0,0 @@ -//! Range-based lookup map. -//! -//! Maps ranges of indices to values for efficient reverse lookups. - -use std::collections::BTreeMap; - -use brk_types::{Height, TxOutIndex}; -use vecdb::{BytesVec, BytesVecValue, PcoVec, PcoVecValue, VecIndex}; - -/// Maps ranges of indices to their corresponding height. -/// Used to efficiently look up which block a txoutindex belongs to. -#[derive(Debug)] -pub struct RangeMap(BTreeMap); - -impl RangeMap -where - I: VecIndex, - T: VecIndex, -{ - /// Look up value for a key using range search. - /// Returns the value associated with the largest key <= given key. - #[inline] - pub fn get(&self, key: I) -> Option<&T> { - self.0.range(..=key).next_back().map(|(_, value)| value) - } -} - -impl From<&BytesVec> for RangeMap -where - I: VecIndex, - T: VecIndex + BytesVecValue, -{ - #[inline] - fn from(vec: &BytesVec) -> Self { - Self( - vec.into_iter() - .enumerate() - .map(|(i, v)| (v, I::from(i))) - .collect(), - ) - } -} - -impl From<&PcoVec> for RangeMap -where - I: VecIndex, - T: VecIndex + PcoVecValue, -{ - #[inline] - fn from(vec: &PcoVec) -> Self { - Self( - vec.into_iter() - .enumerate() - .map(|(i, v)| (v, I::from(i))) - .collect(), - ) - } -} - -/// Creates a RangeMap from height_to_first_txoutindex for fast txoutindex -> height lookups. -pub fn build_txoutindex_to_height_map( - height_to_first_txoutindex: &PcoVec, -) -> RangeMap { - RangeMap::from(height_to_first_txoutindex) -} diff --git a/crates/brk_computer/src/stateful/vecs.rs b/crates/brk_computer/src/stateful/vecs.rs index 21576a268..8d8757ac7 100644 --- a/crates/brk_computer/src/stateful/vecs.rs +++ b/crates/brk_computer/src/stateful/vecs.rs @@ -7,11 +7,11 @@ use brk_indexer::Indexer; use brk_traversable::Traversable; use brk_types::{ Dollars, EmptyAddressData, EmptyAddressIndex, Height, LoadedAddressData, LoadedAddressIndex, - Sats, StoredU64, TxInIndex, TxOutIndex, Version, + Sats, StoredU64, Version, }; use log::info; use vecdb::{ - AnyStoredVec, AnyVec, BytesVec, Database, EagerVec, Exit, GenericStoredVec, ImportableVec, + AnyVec, BytesVec, Database, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableCloneableVec, LazyVecFrom1, PAGE_SIZE, PcoVec, Stamp, TypedVecIterator, VecIndex, }; @@ -47,7 +47,6 @@ pub struct Vecs { // States // --- pub chain_state: BytesVec, - pub txoutindex_to_txinindex: BytesVec, pub any_address_indexes: AnyAddressIndexesVecs, pub addresses_data: AddressesDataVecs, pub utxo_cohorts: UTXOCohorts, @@ -126,10 +125,6 @@ impl Vecs { vecdb::ImportOptions::new(&db, "chain", v0) .with_saved_stamped_changes(SAVED_STAMPED_CHANGES), )?, - txoutindex_to_txinindex: BytesVec::forced_import_with( - vecdb::ImportOptions::new(&db, "txinindex", v0) - .with_saved_stamped_changes(SAVED_STAMPED_CHANGES), - )?, height_to_unspendable_supply: EagerVec::forced_import(&db, "unspendable_supply", v0)?, indexes_to_unspendable_supply: ComputedValueVecsFromHeight::forced_import( @@ -265,12 +260,13 @@ impl Vecs { let stateful_min = utxo_min .min(address_min) .min(Height::from(self.chain_state.len())) - .min(Height::from(self.txoutindex_to_txinindex.stamp()).incremented()) .min(self.any_address_indexes.min_stamped_height()) .min(self.addresses_data.min_stamped_height()) .min(Height::from(self.height_to_unspendable_supply.len())) .min(Height::from(self.height_to_opreturn_supply.len())) - .min(Height::from(self.addresstype_to_height_to_addr_count.min_len())) + .min(Height::from( + self.addresstype_to_height_to_addr_count.min_len(), + )) .min(Height::from( self.addresstype_to_height_to_empty_addr_count.min_len(), )); @@ -285,13 +281,11 @@ impl Vecs { // Rollback BytesVec state and capture results for validation let chain_state_rollback = self.chain_state.rollback_before(stamp); - let txoutindex_rollback = self.txoutindex_to_txinindex.rollback_before(stamp); // Validate all rollbacks and imports are consistent let recovered = recover_state( height, chain_state_rollback, - txoutindex_rollback, &mut self.any_address_indexes, &mut self.addresses_data, &mut self.utxo_cohorts, @@ -309,7 +303,6 @@ impl Vecs { // Fresh start: reset all state let (starting_height, mut chain_state) = if recovered_height.is_zero() { self.chain_state.reset()?; - self.txoutindex_to_txinindex.reset()?; self.height_to_unspendable_supply.reset()?; self.height_to_opreturn_supply.reset()?; self.addresstype_to_height_to_addr_count.reset()?; @@ -505,24 +498,4 @@ impl Vecs { self.db.compact()?; Ok(()) } - - /// Update txoutindex_to_txinindex for a block. - /// - /// 1. Push UNSPENT for all new outputs in the block - /// 2. Update spent outputs with their spending txinindex - pub fn update_txoutindex_to_txinindex( - &mut self, - output_count: usize, - updates: Vec<(TxOutIndex, TxInIndex)>, - ) -> Result<()> { - // Push UNSPENT for all new outputs in this block - for _ in 0..output_count { - self.txoutindex_to_txinindex.push(TxInIndex::UNSPENT); - } - // Update spent outputs with their spending txinindex - for (txoutindex, txinindex) in updates { - self.txoutindex_to_txinindex.update(txoutindex, txinindex)?; - } - Ok(()) - } } diff --git a/crates/brk_error/src/lib.rs b/crates/brk_error/src/lib.rs index 27a3cf852..e73da4316 100644 --- a/crates/brk_error/src/lib.rs +++ b/crates/brk_error/src/lib.rs @@ -1,6 +1,6 @@ #![doc = include_str!("../README.md")] -use std::{io, result, time}; +use std::{io, path::PathBuf, result, time}; use thiserror::Error; @@ -123,6 +123,13 @@ pub enum Error { #[error("Fetch failed after retries: {0}")] FetchFailed(String), + + #[error("Version mismatch at {path:?}: expected {expected}, found {found}")] + VersionMismatch { + path: PathBuf, + expected: usize, + found: usize, + }, } diff --git a/crates/brk_grouper/src/by_type.rs b/crates/brk_grouper/src/by_type.rs index 672acdc92..9b8743dc3 100644 --- a/crates/brk_grouper/src/by_type.rs +++ b/crates/brk_grouper/src/by_type.rs @@ -25,7 +25,6 @@ impl GroupedByType { OutputType::Empty => &self.spendable.empty, OutputType::Unknown => &self.spendable.unknown, OutputType::OpReturn => &self.unspendable.opreturn, - _ => unreachable!(), } } @@ -43,7 +42,6 @@ impl GroupedByType { OutputType::Unknown => &mut self.spendable.unknown, OutputType::Empty => &mut self.spendable.empty, OutputType::OpReturn => &mut self.unspendable.opreturn, - _ => unreachable!(), } } } diff --git a/crates/brk_indexer/examples/indexer_read.rs b/crates/brk_indexer/examples/indexer_read.rs index 2577a2b17..982d42000 100644 --- a/crates/brk_indexer/examples/indexer_read.rs +++ b/crates/brk_indexer/examples/indexer_read.rs @@ -29,7 +29,7 @@ fn main() -> Result<()> { indexer .vecs .txout - .txoutindex_to_value + .txoutindex_to_txoutdata .iter()? .enumerate() .take(200) diff --git a/crates/brk_indexer/examples/indexer_read_speed.rs b/crates/brk_indexer/examples/indexer_read_speed.rs index b14dab7e4..471479774 100644 --- a/crates/brk_indexer/examples/indexer_read_speed.rs +++ b/crates/brk_indexer/examples/indexer_read_speed.rs @@ -13,9 +13,8 @@ fn run_benchmark(indexer: &Indexer) -> (Sats, std::time::Duration, usize) { let mut sum = Sats::ZERO; let mut count = 0; - for value in indexer.vecs.txout.txoutindex_to_value.clean_iter().unwrap() { - // for value in indexer.vecs.txoutindex_to_value.values() { - sum += value; + for txoutdata in indexer.vecs.txout.txoutindex_to_txoutdata.clean_iter().unwrap() { + sum += txoutdata.value; count += 1; } diff --git a/crates/brk_indexer/src/constants.rs b/crates/brk_indexer/src/constants.rs index 39aa1a082..1f04bea41 100644 --- a/crates/brk_indexer/src/constants.rs +++ b/crates/brk_indexer/src/constants.rs @@ -4,7 +4,7 @@ use brk_types::{Height, TxIndex, Txid, TxidPrefix, Version}; // One version for all data sources // Increment on **change _OR_ addition** -pub const VERSION: Version = Version::new(23); +pub const VERSION: Version = Version::new(24); pub const SNAPSHOT_BLOCK_RANGE: usize = 1_000; pub const COLLISIONS_CHECKED_UP_TO: Height = Height::new(0); diff --git a/crates/brk_indexer/src/indexes.rs b/crates/brk_indexer/src/indexes.rs index 378ace4d3..2f763f56e 100644 --- a/crates/brk_indexer/src/indexes.rs +++ b/crates/brk_indexer/src/indexes.rs @@ -45,7 +45,6 @@ impl Indexes { OutputType::P2WPKH => *self.p2wpkhaddressindex, OutputType::P2WSH => *self.p2wshaddressindex, OutputType::Unknown => *self.unknownoutputindex, - _ => unreachable!(), } } @@ -225,7 +224,7 @@ impl From<(Height, &mut Vecs, &Stores)> for Indexes { let txoutindex = starting_index( &vecs.txout.height_to_first_txoutindex, - &vecs.txout.txoutindex_to_value, + &vecs.txout.txoutindex_to_txoutdata, height, ) .unwrap(); diff --git a/crates/brk_indexer/src/lib.rs b/crates/brk_indexer/src/lib.rs index 8e56d6e14..b02533690 100644 --- a/crates/brk_indexer/src/lib.rs +++ b/crates/brk_indexer/src/lib.rs @@ -1,8 +1,8 @@ #![doc = include_str!("../README.md")] -use std::{path::Path, thread, time::Instant}; +use std::{fs, path::Path, thread, time::Instant}; -use brk_error::Result; +use brk_error::{Error, Result}; use brk_iterator::Blocks; use brk_rpc::Client; use brk_types::Height; @@ -11,6 +11,7 @@ use vecdb::Exit; mod constants; mod indexes; mod processor; +mod range_map; mod readers; mod stores; mod vecs; @@ -18,6 +19,7 @@ mod vecs; use constants::*; pub use indexes::*; pub use processor::*; +pub use range_map::*; pub use readers::*; pub use stores::*; pub use vecs::*; @@ -30,6 +32,19 @@ pub struct Indexer { impl Indexer { pub fn forced_import(outputs_dir: &Path) -> Result { + match Self::forced_import_inner(outputs_dir) { + Ok(result) => Ok(result), + Err(Error::VersionMismatch { path, .. }) => { + let indexed_path = outputs_dir.join("indexed"); + info!("Version mismatch at {path:?}, deleting {indexed_path:?} and retrying"); + fs::remove_dir_all(&indexed_path)?; + Self::forced_import(outputs_dir) + } + Err(e) => Err(e), + } + } + + fn forced_import_inner(outputs_dir: &Path) -> Result { info!("Increasing number of open files limit..."); let no_file_limit = rlimit::getrlimit(rlimit::Resource::NOFILE)?; rlimit::setrlimit( @@ -129,6 +144,13 @@ impl Indexer { let mut readers = Readers::new(&self.vecs); + // Build txindex -> height map from existing data for efficient lookups + let mut txindex_to_height = RangeMap::new(); + for (height, first_txindex) in self.vecs.tx.height_to_first_txindex.into_iter().enumerate() + { + txindex_to_height.insert(first_txindex, Height::from(height)); + } + let vecs = &mut self.vecs; let stores = &mut self.stores; @@ -139,6 +161,9 @@ impl Indexer { indexes.height = height; + // Insert current block's first_txindex -> height before processing inputs + txindex_to_height.insert(indexes.txindex, height); + // Used to check rapidhash collisions let block_check_collisions = check_collisions && height > COLLISIONS_CHECKED_UP_TO; @@ -150,6 +175,7 @@ impl Indexer { vecs, stores, readers: &readers, + txindex_to_height: &txindex_to_height, }; // Phase 1: Process block metadata diff --git a/crates/brk_indexer/src/processor.rs b/crates/brk_indexer/src/processor.rs deleted file mode 100644 index 02ed1323d..000000000 --- a/crates/brk_indexer/src/processor.rs +++ /dev/null @@ -1,717 +0,0 @@ -use bitcoin::{Transaction, TxIn, TxOut}; -use brk_error::{Error, Result}; -use brk_grouper::ByAddressType; -use brk_types::{ - AddressBytes, AddressHash, AddressIndexOutPoint, AddressIndexTxIndex, Block, BlockHashPrefix, - Height, OutPoint, OutputType, Sats, StoredBool, Timestamp, TxInIndex, TxIndex, TxOutIndex, - Txid, TxidPrefix, TypeIndex, Unit, Vin, Vout, -}; -use log::error; -use rayon::prelude::*; -use rustc_hash::{FxHashMap, FxHashSet}; -use vecdb::{AnyVec, GenericStoredVec, TypedVecIterator, likely}; - -use crate::{Indexes, Readers, Stores, Vecs, constants::*}; - -/// Input source for tracking where an input came from. -#[derive(Debug)] -pub enum InputSource<'a> { - PreviousBlock { - vin: Vin, - txindex: TxIndex, - outpoint: OutPoint, - address_info: Option<(OutputType, TypeIndex)>, - }, - SameBlock { - txindex: TxIndex, - txin: &'a TxIn, - vin: Vin, - outpoint: OutPoint, - }, -} - -/// Processed output data from parallel output processing. -pub struct ProcessedOutput<'a> { - pub txoutindex: TxOutIndex, - pub txout: &'a TxOut, - pub txindex: TxIndex, - pub vout: Vout, - pub outputtype: OutputType, - pub address_info: Option<(AddressBytes, AddressHash)>, - pub existing_typeindex: Option, -} - -/// Computed transaction data from parallel TXID computation. -pub struct ComputedTx<'a> { - pub txindex: TxIndex, - pub tx: &'a Transaction, - pub txid: Txid, - pub txid_prefix: TxidPrefix, - pub prev_txindex_opt: Option, -} - -/// Processes a single block, extracting and storing all indexed data. -pub struct BlockProcessor<'a> { - pub block: &'a Block, - pub height: Height, - pub check_collisions: bool, - pub indexes: &'a mut Indexes, - pub vecs: &'a mut Vecs, - pub stores: &'a mut Stores, - pub readers: &'a Readers, -} - -impl<'a> BlockProcessor<'a> { - /// Process block metadata (blockhash, difficulty, timestamp, etc.) - pub fn process_block_metadata(&mut self) -> Result<()> { - let height = self.height; - let blockhash = self.block.hash(); - let blockhash_prefix = BlockHashPrefix::from(blockhash); - - // Check for blockhash prefix collision - if self - .stores - .blockhashprefix_to_height - .get(&blockhash_prefix)? - .is_some_and(|prev_height| *prev_height != height) - { - error!("BlockHash: {blockhash}"); - return Err(Error::Internal("BlockHash prefix collision")); - } - - self.indexes.checked_push(self.vecs)?; - - self.stores - .blockhashprefix_to_height - .insert_if_needed(blockhash_prefix, height, height); - - self.stores.height_to_coinbase_tag.insert_if_needed( - height, - self.block.coinbase_tag().into(), - height, - ); - - self.vecs - .block - .height_to_blockhash - .checked_push(height, blockhash.clone())?; - self.vecs - .block - .height_to_difficulty - .checked_push(height, self.block.header.difficulty_float().into())?; - self.vecs - .block - .height_to_timestamp - .checked_push(height, Timestamp::from(self.block.header.time))?; - self.vecs - .block - .height_to_total_size - .checked_push(height, self.block.total_size().into())?; - self.vecs - .block - .height_to_weight - .checked_push(height, self.block.weight().into())?; - - Ok(()) - } - - /// Compute TXIDs in parallel (CPU-intensive operation). - pub fn compute_txids(&self) -> Result>> { - let will_check_collisions = - self.check_collisions && self.stores.txidprefix_to_txindex.needs(self.height); - let base_txindex = self.indexes.txindex; - - self.block - .txdata - .par_iter() - .enumerate() - .map(|(index, tx)| { - let txid = Txid::from(tx.compute_txid()); - let txid_prefix = TxidPrefix::from(&txid); - - let prev_txindex_opt = if will_check_collisions { - self.stores - .txidprefix_to_txindex - .get(&txid_prefix)? - .map(|v| *v) - } else { - None - }; - - Ok(ComputedTx { - txindex: base_txindex + TxIndex::from(index), - tx, - txid, - txid_prefix, - prev_txindex_opt, - }) - }) - .collect() - } - - /// Process inputs in parallel. - /// - /// Uses collect().into_par_iter() pattern because: - /// 1. The inner work (store lookups, vector reads) is expensive - /// 2. We want to parallelize across ALL inputs, not just per-transaction - /// 3. The intermediate allocation (~8KB per block) is negligible compared to parallelism gains - pub fn process_inputs<'c>( - &self, - txs: &[ComputedTx<'c>], - ) -> Result)>> { - let txid_prefix_to_txindex: FxHashMap<_, _> = - txs.iter().map(|ct| (ct.txid_prefix, &ct.txindex)).collect(); - - let base_txindex = self.indexes.txindex; - let base_txinindex = self.indexes.txinindex; - - let txins = self - .block - .txdata - .iter() - .enumerate() - .flat_map(|(index, tx)| { - tx.input - .iter() - .enumerate() - .map(move |(vin, txin)| (TxIndex::from(index), Vin::from(vin), txin, tx)) - }) - .collect::>() - .into_par_iter() - .enumerate() - .map( - |(block_txinindex, (block_txindex, vin, txin, tx))| -> Result<(TxInIndex, InputSource)> { - let txindex = base_txindex + block_txindex; - let txinindex = base_txinindex + TxInIndex::from(block_txinindex); - - if tx.is_coinbase() { - return Ok(( - txinindex, - InputSource::SameBlock { - txindex, - txin, - vin, - outpoint: OutPoint::COINBASE, - }, - )); - } - - let outpoint = txin.previous_output; - let txid = Txid::from(outpoint.txid); - let txid_prefix = TxidPrefix::from(&txid); - let vout = Vout::from(outpoint.vout); - - if let Some(&&same_block_txindex) = txid_prefix_to_txindex - .get(&txid_prefix) { - let outpoint = OutPoint::new(same_block_txindex, vout); - return Ok(( - txinindex, - InputSource::SameBlock { - txindex, - txin, - vin, - outpoint, - }, - )); - } - - let prev_txindex = if let Some(txindex) = self - .stores - .txidprefix_to_txindex - .get(&txid_prefix)? - .map(|v| *v) - .and_then(|txindex| { - (txindex < self.indexes.txindex).then_some(txindex) - }) - { - txindex - } else { - return Err(Error::UnknownTxid); - }; - - let txoutindex = self - .vecs - .tx - .txindex_to_first_txoutindex - .get_pushed_or_read(prev_txindex, &self.readers.txindex_to_first_txoutindex)? - .ok_or(Error::Internal("Missing txoutindex"))? - + vout; - - let outpoint = OutPoint::new(prev_txindex, vout); - let outputtype = self - .vecs - .txout - .txoutindex_to_outputtype - .get_pushed_or_read(txoutindex, &self.readers.txoutindex_to_outputtype)? - .ok_or(Error::Internal("Missing outputtype"))?; - - let address_info = if outputtype.is_address() { - let typeindex = self - .vecs - .txout - .txoutindex_to_typeindex - .get_pushed_or_read(txoutindex, &self.readers.txoutindex_to_typeindex)? - .ok_or(Error::Internal("Missing typeindex"))?; - Some((outputtype, typeindex)) - } else { - None - }; - - Ok(( - txinindex, - InputSource::PreviousBlock { - vin, - txindex, - outpoint, - address_info, - }, - )) - }, - ) - .collect::>>()?; - - Ok(txins) - } - - /// Collect same-block spent outpoints. - pub fn collect_same_block_spent_outpoints( - txins: &[(TxInIndex, InputSource)], - ) -> FxHashSet { - txins - .iter() - .filter_map(|(_, input_source)| { - let InputSource::SameBlock { outpoint, .. } = input_source else { - return None; - }; - if !outpoint.is_coinbase() { - Some(*outpoint) - } else { - None - } - }) - .collect() - } - - /// Process outputs in parallel. - pub fn process_outputs(&self) -> Result>> { - let height = self.height; - let check_collisions = self.check_collisions; - - let base_txindex = self.indexes.txindex; - let base_txoutindex = self.indexes.txoutindex; - - // Same pattern as inputs: collect then parallelize for maximum parallelism - self.block - .txdata - .iter() - .enumerate() - .flat_map(|(index, tx)| { - tx.output - .iter() - .enumerate() - .map(move |(vout, txout)| (TxIndex::from(index), Vout::from(vout), txout, tx)) - }) - .collect::>() - .into_par_iter() - .enumerate() - .map( - |(block_txoutindex, (block_txindex, vout, txout, tx))| -> Result { - let txindex = base_txindex + block_txindex; - let txoutindex = base_txoutindex + TxOutIndex::from(block_txoutindex); - - let script = &txout.script_pubkey; - let outputtype = OutputType::from(script); - - if outputtype.is_not_address() { - return Ok(ProcessedOutput { - txoutindex, - txout, - txindex, - vout, - outputtype, - address_info: None, - existing_typeindex: None, - }); - } - - let addresstype = outputtype; - let address_bytes = AddressBytes::try_from((script, addresstype)).unwrap(); - let address_hash = AddressHash::from(&address_bytes); - - let existing_typeindex = self - .stores - .addresstype_to_addresshash_to_addressindex - .get_unwrap(addresstype) - .get(&address_hash) - .unwrap() - .map(|v| *v) - .and_then(|typeindex_local| { - (typeindex_local < self.indexes.to_typeindex(addresstype)) - .then_some(typeindex_local) - }); - - if check_collisions && let Some(typeindex) = existing_typeindex { - let prev_addressbytes_opt = self.vecs.get_addressbytes_by_type( - addresstype, - typeindex, - self.readers.addressbytes.get_unwrap(addresstype), - )?; - let prev_addressbytes = prev_addressbytes_opt - .as_ref() - .ok_or(Error::Internal("Missing addressbytes"))?; - - if self - .stores - .addresstype_to_addresshash_to_addressindex - .get_unwrap(addresstype) - .needs(height) - && prev_addressbytes != &address_bytes - { - let txid = tx.compute_txid(); - dbg!( - height, - txid, - vout, - block_txindex, - addresstype, - prev_addressbytes, - &address_bytes, - &self.indexes, - typeindex, - txout, - AddressHash::from(&address_bytes), - ); - panic!() - } - } - - Ok(ProcessedOutput { - txoutindex, - txout, - txindex, - vout, - outputtype, - address_info: Some((address_bytes, address_hash)), - existing_typeindex, - }) - }, - ) - .collect() - } - - /// Finalize outputs sequentially (stores addresses, tracks UTXOs). - pub fn finalize_outputs( - &mut self, - txouts: Vec, - same_block_spent_outpoints: &FxHashSet, - ) -> Result> { - let height = self.height; - let mut already_added_addresshash: ByAddressType> = - ByAddressType::default(); - // Pre-size based on the number of same-block spent outpoints - let mut same_block_output_info: FxHashMap = - FxHashMap::with_capacity_and_hasher( - same_block_spent_outpoints.len(), - Default::default(), - ); - - for ProcessedOutput { - txoutindex, - txout, - txindex, - vout, - outputtype, - address_info, - existing_typeindex, - } in txouts - { - let sats = Sats::from(txout.value); - - if vout.is_zero() { - self.vecs - .tx - .txindex_to_first_txoutindex - .checked_push(txindex, txoutindex)?; - } - - self.vecs - .txout - .txoutindex_to_value - .checked_push(txoutindex, sats)?; - self.vecs - .txout - .txoutindex_to_txindex - .checked_push(txoutindex, txindex)?; - self.vecs - .txout - .txoutindex_to_outputtype - .checked_push(txoutindex, outputtype)?; - - let typeindex = if let Some(ti) = existing_typeindex { - ti - } else if let Some((address_bytes, address_hash)) = address_info { - let addresstype = outputtype; - if let Some(&ti) = already_added_addresshash - .get_unwrap(addresstype) - .get(&address_hash) - { - ti - } else { - let ti = self.indexes.increment_address_index(addresstype); - - already_added_addresshash - .get_mut_unwrap(addresstype) - .insert(address_hash, ti); - self.stores - .addresstype_to_addresshash_to_addressindex - .get_mut_unwrap(addresstype) - .insert_if_needed(address_hash, ti, height); - self.vecs.push_bytes_if_needed(ti, address_bytes)?; - - ti - } - } else { - match outputtype { - OutputType::P2MS => { - self.vecs - .output - .p2msoutputindex_to_txindex - .checked_push(self.indexes.p2msoutputindex, txindex)?; - self.indexes.p2msoutputindex.copy_then_increment() - } - OutputType::OpReturn => { - self.vecs - .output - .opreturnindex_to_txindex - .checked_push(self.indexes.opreturnindex, txindex)?; - self.indexes.opreturnindex.copy_then_increment() - } - OutputType::Empty => { - self.vecs - .output - .emptyoutputindex_to_txindex - .checked_push(self.indexes.emptyoutputindex, txindex)?; - self.indexes.emptyoutputindex.copy_then_increment() - } - OutputType::Unknown => { - self.vecs - .output - .unknownoutputindex_to_txindex - .checked_push(self.indexes.unknownoutputindex, txindex)?; - self.indexes.unknownoutputindex.copy_then_increment() - } - _ => unreachable!(), - } - }; - - self.vecs - .txout - .txoutindex_to_typeindex - .checked_push(txoutindex, typeindex)?; - - if outputtype.is_unspendable() { - continue; - } else if outputtype.is_address() { - let addresstype = outputtype; - let addressindex = typeindex; - - self.stores - .addresstype_to_addressindex_and_txindex - .get_mut_unwrap(addresstype) - .insert_if_needed( - AddressIndexTxIndex::from((addressindex, txindex)), - Unit, - height, - ); - } - - let outpoint = OutPoint::new(txindex, vout); - - if same_block_spent_outpoints.contains(&outpoint) { - same_block_output_info.insert(outpoint, (outputtype, typeindex)); - } else if outputtype.is_address() { - let addresstype = outputtype; - let addressindex = typeindex; - - self.stores - .addresstype_to_addressindex_and_unspentoutpoint - .get_mut_unwrap(addresstype) - .insert_if_needed( - AddressIndexOutPoint::from((addressindex, outpoint)), - Unit, - height, - ); - } - } - - Ok(same_block_output_info) - } - - /// Finalize inputs sequentially (stores outpoints, updates address UTXOs). - pub fn finalize_inputs( - &mut self, - txins: Vec<(TxInIndex, InputSource)>, - same_block_output_info: &mut FxHashMap, - ) -> Result<()> { - let height = self.height; - - for (txinindex, input_source) in txins { - let (vin, txindex, outpoint, address_info) = match input_source { - InputSource::PreviousBlock { - vin, - txindex, - outpoint, - address_info, - } => (vin, txindex, outpoint, address_info), - InputSource::SameBlock { - txindex, - txin, - vin, - outpoint, - } => { - if outpoint.is_coinbase() { - (vin, txindex, outpoint, None) - } else { - let outputtype_typeindex = same_block_output_info - .remove(&outpoint) - .ok_or(Error::Internal("Same-block addressindex not found")) - .inspect_err(|_| { - dbg!(&same_block_output_info, txin); - })?; - let address_info = if outputtype_typeindex.0.is_address() { - Some(outputtype_typeindex) - } else { - None - }; - (vin, txindex, outpoint, address_info) - } - } - }; - - if vin.is_zero() { - self.vecs - .tx - .txindex_to_first_txinindex - .checked_push(txindex, txinindex)?; - } - - self.vecs - .txin - .txinindex_to_txindex - .checked_push(txinindex, txindex)?; - self.vecs - .txin - .txinindex_to_outpoint - .checked_push(txinindex, outpoint)?; - - let Some((addresstype, addressindex)) = address_info else { - continue; - }; - - self.stores - .addresstype_to_addressindex_and_txindex - .get_mut_unwrap(addresstype) - .insert_if_needed( - AddressIndexTxIndex::from((addressindex, txindex)), - Unit, - height, - ); - - self.stores - .addresstype_to_addressindex_and_unspentoutpoint - .get_mut_unwrap(addresstype) - .remove_if_needed(AddressIndexOutPoint::from((addressindex, outpoint)), height); - } - - Ok(()) - } - - /// Check for TXID collisions (only for known duplicate TXIDs). - pub fn check_txid_collisions(&self, txs: &[ComputedTx]) -> Result<()> { - if likely(!self.check_collisions) { - return Ok(()); - } - - let mut txindex_to_txid_iter = self.vecs.tx.txindex_to_txid.into_iter(); - for ct in txs.iter() { - let Some(prev_txindex) = ct.prev_txindex_opt else { - continue; - }; - - // In case if we start at an already parsed height - if ct.txindex == prev_txindex { - continue; - } - - let len = self.vecs.tx.txindex_to_txid.len(); - let prev_txid = txindex_to_txid_iter - .get(prev_txindex) - .ok_or(Error::Internal("Missing txid for txindex")) - .inspect_err(|_| { - dbg!(ct.txindex, len); - })?; - - let is_dup = DUPLICATE_TXIDS.contains(&prev_txid); - - if !is_dup { - dbg!(self.height, ct.txindex, prev_txid, prev_txindex); - return Err(Error::Internal("Unexpected TXID collision")); - } - } - - Ok(()) - } - - /// Store transaction metadata. - pub fn store_transaction_metadata(&mut self, txs: Vec) -> Result<()> { - let height = self.height; - - for ct in txs { - if ct.prev_txindex_opt.is_none() { - self.stores.txidprefix_to_txindex.insert_if_needed( - ct.txid_prefix, - ct.txindex, - height, - ); - } - - self.vecs - .tx - .txindex_to_height - .checked_push(ct.txindex, height)?; - self.vecs - .tx - .txindex_to_txversion - .checked_push(ct.txindex, ct.tx.version.into())?; - self.vecs - .tx - .txindex_to_txid - .checked_push(ct.txindex, ct.txid)?; - self.vecs - .tx - .txindex_to_rawlocktime - .checked_push(ct.txindex, ct.tx.lock_time.into())?; - self.vecs - .tx - .txindex_to_base_size - .checked_push(ct.txindex, ct.tx.base_size().into())?; - self.vecs - .tx - .txindex_to_total_size - .checked_push(ct.txindex, ct.tx.total_size().into())?; - self.vecs - .tx - .txindex_to_is_explicitly_rbf - .checked_push(ct.txindex, StoredBool::from(ct.tx.is_explicitly_rbf()))?; - } - - Ok(()) - } - - /// Update global indexes after processing a block. - pub fn update_indexes(&mut self, tx_count: usize, input_count: usize, output_count: usize) { - self.indexes.txindex += TxIndex::from(tx_count); - self.indexes.txinindex += TxInIndex::from(input_count); - self.indexes.txoutindex += TxOutIndex::from(output_count); - } -} diff --git a/crates/brk_indexer/src/processor/metadata.rs b/crates/brk_indexer/src/processor/metadata.rs new file mode 100644 index 000000000..0afb0bf70 --- /dev/null +++ b/crates/brk_indexer/src/processor/metadata.rs @@ -0,0 +1,63 @@ +//! Block metadata processing. + +use brk_error::{Error, Result}; +use brk_types::{BlockHashPrefix, Timestamp}; +use log::error; +use vecdb::GenericStoredVec; + +use super::BlockProcessor; + +impl BlockProcessor<'_> { + /// Process block metadata (blockhash, difficulty, timestamp, etc.) + pub fn process_block_metadata(&mut self) -> Result<()> { + let height = self.height; + let blockhash = self.block.hash(); + let blockhash_prefix = BlockHashPrefix::from(blockhash); + + // Check for blockhash prefix collision + if self + .stores + .blockhashprefix_to_height + .get(&blockhash_prefix)? + .is_some_and(|prev_height| *prev_height != height) + { + error!("BlockHash: {blockhash}"); + return Err(Error::Internal("BlockHash prefix collision")); + } + + self.indexes.checked_push(self.vecs)?; + + self.stores + .blockhashprefix_to_height + .insert_if_needed(blockhash_prefix, height, height); + + self.stores.height_to_coinbase_tag.insert_if_needed( + height, + self.block.coinbase_tag().into(), + height, + ); + + self.vecs + .block + .height_to_blockhash + .checked_push(height, blockhash.clone())?; + self.vecs + .block + .height_to_difficulty + .checked_push(height, self.block.header.difficulty_float().into())?; + self.vecs + .block + .height_to_timestamp + .checked_push(height, Timestamp::from(self.block.header.time))?; + self.vecs + .block + .height_to_total_size + .checked_push(height, self.block.total_size().into())?; + self.vecs + .block + .height_to_weight + .checked_push(height, self.block.weight().into())?; + + Ok(()) + } +} diff --git a/crates/brk_indexer/src/processor/mod.rs b/crates/brk_indexer/src/processor/mod.rs new file mode 100644 index 000000000..a4d6b6fbe --- /dev/null +++ b/crates/brk_indexer/src/processor/mod.rs @@ -0,0 +1,37 @@ +//! Block processing for indexing. +//! +//! This module handles the extraction and storage of all indexed data from blocks. +//! Processing is split into phases that can be parallelized where possible. + +mod metadata; +mod tx; +mod txin; +mod txout; +mod types; + +pub use types::*; + +use brk_types::{Block, Height, TxInIndex, TxIndex, TxOutIndex}; + +use crate::{Indexes, RangeMap, Readers, Stores, Vecs}; + +/// Processes a single block, extracting and storing all indexed data. +pub struct BlockProcessor<'a> { + pub block: &'a Block, + pub height: Height, + pub check_collisions: bool, + pub indexes: &'a mut Indexes, + pub vecs: &'a mut Vecs, + pub stores: &'a mut Stores, + pub readers: &'a Readers, + pub txindex_to_height: &'a RangeMap, +} + +impl BlockProcessor<'_> { + /// Update global indexes after processing a block. + pub fn update_indexes(&mut self, tx_count: usize, input_count: usize, output_count: usize) { + self.indexes.txindex += TxIndex::from(tx_count); + self.indexes.txinindex += TxInIndex::from(input_count); + self.indexes.txoutindex += TxOutIndex::from(output_count); + } +} diff --git a/crates/brk_indexer/src/processor/tx.rs b/crates/brk_indexer/src/processor/tx.rs new file mode 100644 index 000000000..679d405ec --- /dev/null +++ b/crates/brk_indexer/src/processor/tx.rs @@ -0,0 +1,128 @@ +//! TXID computation and collision checking. + +use brk_error::{Error, Result}; +use brk_types::{StoredBool, TxIndex, Txid, TxidPrefix}; +use rayon::prelude::*; +use vecdb::{AnyVec, GenericStoredVec, TypedVecIterator, likely}; + +use crate::constants::DUPLICATE_TXIDS; + +use super::{BlockProcessor, ComputedTx}; + +impl<'a> BlockProcessor<'a> { + /// Compute TXIDs in parallel (CPU-intensive operation). + pub fn compute_txids(&self) -> Result>> { + let will_check_collisions = + self.check_collisions && self.stores.txidprefix_to_txindex.needs(self.height); + let base_txindex = self.indexes.txindex; + + self.block + .txdata + .par_iter() + .enumerate() + .map(|(index, tx)| { + let txid = Txid::from(tx.compute_txid()); + let txid_prefix = TxidPrefix::from(&txid); + + let prev_txindex_opt = if will_check_collisions { + self.stores + .txidprefix_to_txindex + .get(&txid_prefix)? + .map(|v| *v) + } else { + None + }; + + Ok(ComputedTx { + txindex: base_txindex + TxIndex::from(index), + tx, + txid, + txid_prefix, + prev_txindex_opt, + }) + }) + .collect() + } + + /// Check for TXID collisions (only for known duplicate TXIDs). + pub fn check_txid_collisions(&self, txs: &[ComputedTx]) -> Result<()> { + if likely(!self.check_collisions) { + return Ok(()); + } + + let mut txindex_to_txid_iter = self.vecs.tx.txindex_to_txid.into_iter(); + for ct in txs.iter() { + let Some(prev_txindex) = ct.prev_txindex_opt else { + continue; + }; + + // In case if we start at an already parsed height + if ct.txindex == prev_txindex { + continue; + } + + let len = self.vecs.tx.txindex_to_txid.len(); + let prev_txid = txindex_to_txid_iter + .get(prev_txindex) + .ok_or(Error::Internal("Missing txid for txindex")) + .inspect_err(|_| { + dbg!(ct.txindex, len); + })?; + + let is_dup = DUPLICATE_TXIDS.contains(&prev_txid); + + if !is_dup { + dbg!(self.height, ct.txindex, prev_txid, prev_txindex); + return Err(Error::Internal("Unexpected TXID collision")); + } + } + + Ok(()) + } + + /// Store transaction metadata. + pub fn store_transaction_metadata(&mut self, txs: Vec) -> Result<()> { + let height = self.height; + + for ct in txs { + if ct.prev_txindex_opt.is_none() { + self.stores.txidprefix_to_txindex.insert_if_needed( + ct.txid_prefix, + ct.txindex, + height, + ); + } + + self.vecs + .tx + .txindex_to_height + .checked_push(ct.txindex, height)?; + self.vecs + .tx + .txindex_to_txversion + .checked_push(ct.txindex, ct.tx.version.into())?; + self.vecs + .tx + .txindex_to_txid + .checked_push(ct.txindex, ct.txid)?; + self.vecs + .tx + .txindex_to_rawlocktime + .checked_push(ct.txindex, ct.tx.lock_time.into())?; + self.vecs + .tx + .txindex_to_base_size + .checked_push(ct.txindex, ct.tx.base_size().into())?; + self.vecs + .tx + .txindex_to_total_size + .checked_push(ct.txindex, ct.tx.total_size().into())?; + self.vecs + .tx + .txindex_to_is_explicitly_rbf + .checked_push(ct.txindex, StoredBool::from(ct.tx.is_explicitly_rbf()))?; + } + + Ok(()) + } +} diff --git a/crates/brk_indexer/src/processor/txin.rs b/crates/brk_indexer/src/processor/txin.rs new file mode 100644 index 000000000..0ba7c0f46 --- /dev/null +++ b/crates/brk_indexer/src/processor/txin.rs @@ -0,0 +1,284 @@ +//! Input processing for block indexing. + +use brk_error::{Error, Result}; +use brk_types::{ + AddressIndexOutPoint, AddressIndexTxIndex, OutPoint, OutputType, Sats, TxInIndex, TxIndex, + TxOutIndex, Txid, TxidPrefix, TypeIndex, Unit, Vin, Vout, +}; +use rayon::prelude::*; +use rustc_hash::{FxHashMap, FxHashSet}; +use vecdb::GenericStoredVec; + +use super::{BlockProcessor, ComputedTx, InputSource, SameBlockOutputInfo}; + +impl<'a> BlockProcessor<'a> { + /// Process inputs in parallel. + /// + /// Uses collect().into_par_iter() pattern because: + /// 1. The inner work (store lookups, vector reads) is expensive + /// 2. We want to parallelize across ALL inputs, not just per-transaction + /// 3. The intermediate allocation (~8KB per block) is negligible compared to parallelism gains + pub fn process_inputs<'c>( + &self, + txs: &[ComputedTx<'c>], + ) -> Result)>> { + let txid_prefix_to_txindex: FxHashMap<_, _> = + txs.iter().map(|ct| (ct.txid_prefix, &ct.txindex)).collect(); + + let base_txindex = self.indexes.txindex; + let base_txinindex = self.indexes.txinindex; + + let txins = self + .block + .txdata + .iter() + .enumerate() + .flat_map(|(index, tx)| { + tx.input + .iter() + .enumerate() + .map(move |(vin, txin)| (TxIndex::from(index), Vin::from(vin), txin, tx)) + }) + .collect::>() + .into_par_iter() + .enumerate() + .map( + |(block_txinindex, (block_txindex, vin, txin, tx))| -> Result<(TxInIndex, InputSource)> { + let txindex = base_txindex + block_txindex; + let txinindex = base_txinindex + TxInIndex::from(block_txinindex); + + if tx.is_coinbase() { + return Ok(( + txinindex, + InputSource::SameBlock { + txindex, + txin, + vin, + outpoint: OutPoint::COINBASE, + }, + )); + } + + let outpoint = txin.previous_output; + let txid = Txid::from(outpoint.txid); + let txid_prefix = TxidPrefix::from(&txid); + let vout = Vout::from(outpoint.vout); + + if let Some(&&same_block_txindex) = txid_prefix_to_txindex + .get(&txid_prefix) { + let outpoint = OutPoint::new(same_block_txindex, vout); + return Ok(( + txinindex, + InputSource::SameBlock { + txindex, + txin, + vin, + outpoint, + }, + )); + } + + let prev_txindex = if let Some(txindex) = self + .stores + .txidprefix_to_txindex + .get(&txid_prefix)? + .map(|v| *v) + .and_then(|txindex| { + (txindex < self.indexes.txindex).then_some(txindex) + }) + { + txindex + } else { + return Err(Error::UnknownTxid); + }; + + let txoutindex = self + .vecs + .tx + .txindex_to_first_txoutindex + .get_pushed_or_read(prev_txindex, &self.readers.txindex_to_first_txoutindex)? + .ok_or(Error::Internal("Missing txoutindex"))? + + vout; + + let outpoint = OutPoint::new(prev_txindex, vout); + + let txoutdata = self + .vecs + .txout + .txoutindex_to_txoutdata + .get_pushed_or_read(txoutindex, &self.readers.txoutindex_to_txoutdata)? + .ok_or(Error::Internal("Missing txout data"))?; + + let value = txoutdata.value; + let outputtype = txoutdata.outputtype; + let typeindex = txoutdata.typeindex; + + let height = self + .txindex_to_height + .get(prev_txindex) + .ok_or(Error::Internal("Missing height in txindex_to_height map"))?; + + Ok(( + txinindex, + InputSource::PreviousBlock { + vin, + value, + height, + txindex, + txoutindex, + outpoint, + outputtype, + typeindex, + }, + )) + }, + ) + .collect::>>()?; + + Ok(txins) + } + + /// Collect same-block spent outpoints. + pub fn collect_same_block_spent_outpoints( + txins: &[(TxInIndex, InputSource)], + ) -> FxHashSet { + txins + .iter() + .filter_map(|(_, input_source)| { + let InputSource::SameBlock { outpoint, .. } = input_source else { + return None; + }; + if !outpoint.is_coinbase() { + Some(*outpoint) + } else { + None + } + }) + .collect() + } + + /// Finalize inputs sequentially (stores outpoints, updates address UTXOs). + pub fn finalize_inputs( + &mut self, + txins: Vec<(TxInIndex, InputSource)>, + same_block_output_info: &mut FxHashMap, + ) -> Result<()> { + let height = self.height; + + for (txinindex, input_source) in txins { + let (prev_height, vin, txindex, value, outpoint, txoutindex, outputtype, typeindex) = + match input_source { + InputSource::PreviousBlock { + height, + vin, + txindex, + txoutindex, + value, + outpoint, + outputtype, + typeindex, + } => ( + height, vin, txindex, value, outpoint, txoutindex, outputtype, typeindex, + ), + InputSource::SameBlock { + txindex, + txin, + vin, + outpoint, + } => { + if outpoint.is_coinbase() { + ( + height, + vin, + txindex, + Sats::COINBASE, + outpoint, + TxOutIndex::COINBASE, + OutputType::Unknown, + TypeIndex::COINBASE, + ) + } else { + let info = same_block_output_info + .remove(&outpoint) + .ok_or(Error::Internal("Same-block output not found")) + .inspect_err(|_| { + dbg!(&same_block_output_info, txin); + })?; + ( + height, + vin, + txindex, + info.value, + outpoint, + info.txoutindex, + info.outputtype, + info.typeindex, + ) + } + } + }; + + if vin.is_zero() { + self.vecs + .tx + .txindex_to_first_txinindex + .checked_push(txindex, txinindex)?; + } + + self.vecs + .txin + .txinindex_to_txindex + .checked_push(txinindex, txindex)?; + self.vecs + .txin + .txinindex_to_outpoint + .checked_push(txinindex, outpoint)?; + self.vecs + .txin + .txinindex_to_value + .checked_push(txinindex, value)?; + self.vecs + .txin + .txinindex_to_prev_height + .checked_push(txinindex, prev_height)?; + self.vecs + .txin + .txinindex_to_outputtype + .checked_push(txinindex, outputtype)?; + self.vecs + .txin + .txinindex_to_typeindex + .checked_push(txinindex, typeindex)?; + + // Update txoutindex_to_txinindex for non-coinbase inputs + if !txoutindex.is_coinbase() { + self.vecs + .txout + .txoutindex_to_txinindex + .update(txoutindex, txinindex)?; + } + + if !outputtype.is_address() { + continue; + } + let addresstype = outputtype; + let addressindex = typeindex; + + self.stores + .addresstype_to_addressindex_and_txindex + .get_mut_unwrap(addresstype) + .insert_if_needed( + AddressIndexTxIndex::from((addressindex, txindex)), + Unit, + height, + ); + + self.stores + .addresstype_to_addressindex_and_unspentoutpoint + .get_mut_unwrap(addresstype) + .remove_if_needed(AddressIndexOutPoint::from((addressindex, outpoint)), height); + } + + Ok(()) + } +} diff --git a/crates/brk_indexer/src/processor/txout.rs b/crates/brk_indexer/src/processor/txout.rs new file mode 100644 index 000000000..04ed9cd40 --- /dev/null +++ b/crates/brk_indexer/src/processor/txout.rs @@ -0,0 +1,275 @@ +//! Output processing for block indexing. + +use brk_error::{Error, Result}; +use brk_grouper::ByAddressType; +use brk_types::{ + AddressBytes, AddressHash, AddressIndexOutPoint, AddressIndexTxIndex, OutPoint, OutputType, + Sats, TxInIndex, TxIndex, TxOutData, TxOutIndex, TypeIndex, Unit, Vout, +}; +use rayon::prelude::*; +use rustc_hash::{FxHashMap, FxHashSet}; +use vecdb::GenericStoredVec; + +use super::{BlockProcessor, ProcessedOutput, SameBlockOutputInfo}; + +impl<'a> BlockProcessor<'a> { + /// Process outputs in parallel. + pub fn process_outputs(&self) -> Result>> { + let height = self.height; + let check_collisions = self.check_collisions; + + let base_txindex = self.indexes.txindex; + let base_txoutindex = self.indexes.txoutindex; + + // Same pattern as inputs: collect then parallelize for maximum parallelism + self.block + .txdata + .iter() + .enumerate() + .flat_map(|(index, tx)| { + tx.output + .iter() + .enumerate() + .map(move |(vout, txout)| (TxIndex::from(index), Vout::from(vout), txout, tx)) + }) + .collect::>() + .into_par_iter() + .enumerate() + .map( + |(block_txoutindex, (block_txindex, vout, txout, tx))| -> Result { + let txindex = base_txindex + block_txindex; + let txoutindex = base_txoutindex + TxOutIndex::from(block_txoutindex); + + let script = &txout.script_pubkey; + let outputtype = OutputType::from(script); + + if outputtype.is_not_address() { + return Ok(ProcessedOutput { + txoutindex, + txout, + txindex, + vout, + outputtype, + address_info: None, + existing_typeindex: None, + }); + } + + let addresstype = outputtype; + let address_bytes = AddressBytes::try_from((script, addresstype)).unwrap(); + let address_hash = AddressHash::from(&address_bytes); + + let existing_typeindex = self + .stores + .addresstype_to_addresshash_to_addressindex + .get_unwrap(addresstype) + .get(&address_hash) + .unwrap() + .map(|v| *v) + .and_then(|typeindex_local| { + (typeindex_local < self.indexes.to_typeindex(addresstype)) + .then_some(typeindex_local) + }); + + if check_collisions && let Some(typeindex) = existing_typeindex { + let prev_addressbytes_opt = self.vecs.get_addressbytes_by_type( + addresstype, + typeindex, + self.readers.addressbytes.get_unwrap(addresstype), + )?; + let prev_addressbytes = prev_addressbytes_opt + .as_ref() + .ok_or(Error::Internal("Missing addressbytes"))?; + + if self + .stores + .addresstype_to_addresshash_to_addressindex + .get_unwrap(addresstype) + .needs(height) + && prev_addressbytes != &address_bytes + { + let txid = tx.compute_txid(); + dbg!( + height, + txid, + vout, + block_txindex, + addresstype, + prev_addressbytes, + &address_bytes, + &self.indexes, + typeindex, + txout, + AddressHash::from(&address_bytes), + ); + panic!() + } + } + + Ok(ProcessedOutput { + txoutindex, + txout, + txindex, + vout, + outputtype, + address_info: Some((address_bytes, address_hash)), + existing_typeindex, + }) + }, + ) + .collect() + } + + /// Finalize outputs sequentially (stores addresses, tracks UTXOs). + pub fn finalize_outputs( + &mut self, + txouts: Vec, + same_block_spent_outpoints: &FxHashSet, + ) -> Result> { + let height = self.height; + let mut already_added_addresshash: ByAddressType> = + ByAddressType::default(); + // Pre-size based on the number of same-block spent outpoints + let mut same_block_output_info: FxHashMap = + FxHashMap::with_capacity_and_hasher( + same_block_spent_outpoints.len(), + Default::default(), + ); + + for ProcessedOutput { + txoutindex, + txout, + txindex, + vout, + outputtype, + address_info, + existing_typeindex, + } in txouts + { + let sats = Sats::from(txout.value); + + if vout.is_zero() { + self.vecs + .tx + .txindex_to_first_txoutindex + .checked_push(txindex, txoutindex)?; + } + + self.vecs + .txout + .txoutindex_to_txindex + .checked_push(txoutindex, txindex)?; + + let typeindex = if let Some(ti) = existing_typeindex { + ti + } else if let Some((address_bytes, address_hash)) = address_info { + let addresstype = outputtype; + if let Some(&ti) = already_added_addresshash + .get_unwrap(addresstype) + .get(&address_hash) + { + ti + } else { + let ti = self.indexes.increment_address_index(addresstype); + + already_added_addresshash + .get_mut_unwrap(addresstype) + .insert(address_hash, ti); + self.stores + .addresstype_to_addresshash_to_addressindex + .get_mut_unwrap(addresstype) + .insert_if_needed(address_hash, ti, height); + self.vecs.push_bytes_if_needed(ti, address_bytes)?; + + ti + } + } else { + match outputtype { + OutputType::P2MS => { + self.vecs + .output + .p2msoutputindex_to_txindex + .checked_push(self.indexes.p2msoutputindex, txindex)?; + self.indexes.p2msoutputindex.copy_then_increment() + } + OutputType::OpReturn => { + self.vecs + .output + .opreturnindex_to_txindex + .checked_push(self.indexes.opreturnindex, txindex)?; + self.indexes.opreturnindex.copy_then_increment() + } + OutputType::Empty => { + self.vecs + .output + .emptyoutputindex_to_txindex + .checked_push(self.indexes.emptyoutputindex, txindex)?; + self.indexes.emptyoutputindex.copy_then_increment() + } + OutputType::Unknown => { + self.vecs + .output + .unknownoutputindex_to_txindex + .checked_push(self.indexes.unknownoutputindex, txindex)?; + self.indexes.unknownoutputindex.copy_then_increment() + } + _ => unreachable!(), + } + }; + + let txoutdata = TxOutData::new(sats, outputtype, typeindex); + self.vecs + .txout + .txoutindex_to_txoutdata + .checked_push(txoutindex, txoutdata)?; + self.vecs + .txout + .txoutindex_to_txinindex + .checked_push(txoutindex, TxInIndex::UNSPENT)?; + + if outputtype.is_unspendable() { + continue; + } else if outputtype.is_address() { + let addresstype = outputtype; + let addressindex = typeindex; + + self.stores + .addresstype_to_addressindex_and_txindex + .get_mut_unwrap(addresstype) + .insert_if_needed( + AddressIndexTxIndex::from((addressindex, txindex)), + Unit, + height, + ); + } + + let outpoint = OutPoint::new(txindex, vout); + + if same_block_spent_outpoints.contains(&outpoint) { + same_block_output_info.insert( + outpoint, + SameBlockOutputInfo { + outputtype, + typeindex, + value: sats, + txoutindex, + }, + ); + } else if outputtype.is_address() { + let addresstype = outputtype; + let addressindex = typeindex; + + self.stores + .addresstype_to_addressindex_and_unspentoutpoint + .get_mut_unwrap(addresstype) + .insert_if_needed( + AddressIndexOutPoint::from((addressindex, outpoint)), + Unit, + height, + ); + } + } + + Ok(same_block_output_info) + } +} diff --git a/crates/brk_indexer/src/processor/types.rs b/crates/brk_indexer/src/processor/types.rs new file mode 100644 index 000000000..b51157c79 --- /dev/null +++ b/crates/brk_indexer/src/processor/types.rs @@ -0,0 +1,57 @@ +//! Type definitions for block processing. + +use bitcoin::{Transaction, TxIn, TxOut}; +use brk_types::{ + AddressBytes, AddressHash, Height, OutPoint, OutputType, Sats, TxIndex, TxOutIndex, Txid, + TxidPrefix, TypeIndex, Vin, Vout, +}; + +/// Input source for tracking where an input came from. +#[derive(Debug)] +pub enum InputSource<'a> { + PreviousBlock { + vin: Vin, + value: Sats, + height: Height, + txindex: TxIndex, + txoutindex: TxOutIndex, + outpoint: OutPoint, + outputtype: OutputType, + typeindex: TypeIndex, + }, + SameBlock { + txindex: TxIndex, + txin: &'a TxIn, + vin: Vin, + outpoint: OutPoint, + }, +} + +/// Output info for same-block spends (output created and spent in the same block). +#[derive(Debug, Clone, Copy)] +pub struct SameBlockOutputInfo { + pub outputtype: OutputType, + pub typeindex: TypeIndex, + pub value: Sats, + pub txoutindex: TxOutIndex, +} + +/// Processed output data from parallel output processing. +pub struct ProcessedOutput<'a> { + pub txoutindex: TxOutIndex, + pub txout: &'a TxOut, + pub txindex: TxIndex, + pub vout: Vout, + pub outputtype: OutputType, + pub address_info: Option<(AddressBytes, AddressHash)>, + pub existing_typeindex: Option, +} + +/// Computed transaction data from parallel TXID computation. +pub struct ComputedTx<'a> { + pub txindex: TxIndex, + pub tx: &'a Transaction, + pub txid: Txid, + pub txid_prefix: TxidPrefix, + pub prev_txindex_opt: Option, +} diff --git a/crates/brk_indexer/src/range_map.rs b/crates/brk_indexer/src/range_map.rs new file mode 100644 index 000000000..02f16d833 --- /dev/null +++ b/crates/brk_indexer/src/range_map.rs @@ -0,0 +1,40 @@ +//! Range-based lookup map for efficient index -> value lookups. +//! +//! Uses the pattern that many indices share the same value (e.g., all txindexes +//! in a block have the same height) to provide O(log n) lookups via BTreeMap. + +use std::collections::BTreeMap; + +use vecdb::VecIndex; + +/// Maps ranges of indices to values for efficient reverse lookups. +/// +/// Instead of storing a value for every index, stores (first_index, value) +/// pairs and uses range search to find the value for any index. +#[derive(Debug, Default)] +pub struct RangeMap(BTreeMap); + +impl RangeMap { + /// Create a new empty map. + pub fn new() -> Self { + Self(BTreeMap::new()) + } + + /// Insert a new (first_index, value) mapping. + #[inline] + pub fn insert(&mut self, first_index: I, value: V) { + self.0.insert(first_index, value); + } + + /// Look up value for an index using range search. + /// Returns the value associated with the largest first_index <= given index. + #[inline] + pub fn get(&self, index: I) -> Option { + self.0.range(..=index).next_back().map(|(_, &v)| v) + } + + /// Clear all entries (for reset/rollback). + pub fn clear(&mut self) { + self.0.clear(); + } +} diff --git a/crates/brk_indexer/src/readers.rs b/crates/brk_indexer/src/readers.rs index 52229f1f3..184f0f844 100644 --- a/crates/brk_indexer/src/readers.rs +++ b/crates/brk_indexer/src/readers.rs @@ -7,8 +7,7 @@ use crate::Vecs; /// These provide consistent snapshots for reading while the main vectors are being modified. pub struct Readers { pub txindex_to_first_txoutindex: Reader, - pub txoutindex_to_outputtype: Reader, - pub txoutindex_to_typeindex: Reader, + pub txoutindex_to_txoutdata: Reader, pub addressbytes: ByAddressType, } @@ -16,14 +15,22 @@ impl Readers { pub fn new(vecs: &Vecs) -> Self { Self { txindex_to_first_txoutindex: vecs.tx.txindex_to_first_txoutindex.create_reader(), - txoutindex_to_outputtype: vecs.txout.txoutindex_to_outputtype.create_reader(), - txoutindex_to_typeindex: vecs.txout.txoutindex_to_typeindex.create_reader(), + txoutindex_to_txoutdata: vecs.txout.txoutindex_to_txoutdata.create_reader(), addressbytes: ByAddressType { - p2pk65: vecs.address.p2pk65addressindex_to_p2pk65bytes.create_reader(), - p2pk33: vecs.address.p2pk33addressindex_to_p2pk33bytes.create_reader(), + p2pk65: vecs + .address + .p2pk65addressindex_to_p2pk65bytes + .create_reader(), + p2pk33: vecs + .address + .p2pk33addressindex_to_p2pk33bytes + .create_reader(), p2pkh: vecs.address.p2pkhaddressindex_to_p2pkhbytes.create_reader(), p2sh: vecs.address.p2shaddressindex_to_p2shbytes.create_reader(), - p2wpkh: vecs.address.p2wpkhaddressindex_to_p2wpkhbytes.create_reader(), + p2wpkh: vecs + .address + .p2wpkhaddressindex_to_p2wpkhbytes + .create_reader(), p2wsh: vecs.address.p2wshaddressindex_to_p2wshbytes.create_reader(), p2tr: vecs.address.p2traddressindex_to_p2trbytes.create_reader(), p2a: vecs.address.p2aaddressindex_to_p2abytes.create_reader(), diff --git a/crates/brk_indexer/src/stores.rs b/crates/brk_indexer/src/stores.rs index 93c44b1ef..2cca9e5d9 100644 --- a/crates/brk_indexer/src/stores.rs +++ b/crates/brk_indexer/src/stores.rs @@ -5,7 +5,8 @@ use brk_grouper::ByAddressType; use brk_store::{AnyStore, Kind, Mode, Store}; use brk_types::{ AddressHash, AddressIndexOutPoint, AddressIndexTxIndex, BlockHashPrefix, Height, OutPoint, - OutputType, StoredString, TxIndex, TxOutIndex, TxidPrefix, TypeIndex, Unit, Version, Vout, + OutputType, StoredString, TxInIndex, TxIndex, TxOutIndex, TxidPrefix, TypeIndex, Unit, Version, + Vout, }; use fjall::{Database, PersistMode}; use log::info; @@ -270,20 +271,14 @@ impl Stores { let mut txindex_to_first_txoutindex_iter = vecs.tx.txindex_to_first_txoutindex.iter()?; vecs.txout - .txoutindex_to_outputtype + .txoutindex_to_txoutdata .iter()? .enumerate() .skip(starting_indexes.txoutindex.to_usize()) - .zip( - vecs.txout - .txoutindex_to_typeindex - .iter()? - .skip(starting_indexes.txoutindex.to_usize()), - ) - .filter(|((_, outputtype), _): &((usize, OutputType), TypeIndex)| { - outputtype.is_address() - }) - .for_each(|((txoutindex, addresstype), addressindex)| { + .filter(|(_, txoutdata)| txoutdata.outputtype.is_address()) + .for_each(|(txoutindex, txoutdata)| { + let addresstype = txoutdata.outputtype; + let addressindex = txoutdata.typeindex; let txindex = txoutindex_to_txindex_iter.get_at_unwrap(txoutindex); self.addresstype_to_addressindex_and_txindex @@ -303,20 +298,22 @@ impl Stores { .remove(AddressIndexOutPoint::from((addressindex, outpoint))); }); - // Add back outputs that were spent after the rollback point + // Collect outputs that were spent after the rollback point + // We need to: 1) reset their spend status, 2) restore address stores let mut txindex_to_first_txoutindex_iter = vecs.tx.txindex_to_first_txoutindex.iter()?; - let mut txoutindex_to_outputtype_iter = vecs.txout.txoutindex_to_outputtype.iter()?; - let mut txoutindex_to_typeindex_iter = vecs.txout.txoutindex_to_typeindex.iter()?; + let mut txoutindex_to_txoutdata_iter = vecs.txout.txoutindex_to_txoutdata.iter()?; let mut txinindex_to_txindex_iter = vecs.txin.txinindex_to_txindex.iter()?; - vecs.txin + + let outputs_to_unspend: Vec<_> = vecs + .txin .txinindex_to_outpoint .iter()? .enumerate() .skip(starting_indexes.txinindex.to_usize()) - .for_each(|(txinindex, outpoint): (usize, OutPoint)| { + .filter_map(|(txinindex, outpoint): (usize, OutPoint)| { if outpoint.is_coinbase() { - return; + return None; } let output_txindex = outpoint.txindex(); @@ -328,29 +325,38 @@ impl Stores { // Only process if this output was created before the rollback point if txoutindex < starting_indexes.txoutindex { - let outputtype = txoutindex_to_outputtype_iter.get_unwrap(txoutindex); + let txoutdata = txoutindex_to_txoutdata_iter.get_unwrap(txoutindex); + let spending_txindex = + txinindex_to_txindex_iter.get_at_unwrap(txinindex); - if outputtype.is_address() { - let addresstype = outputtype; - let addressindex = txoutindex_to_typeindex_iter.get_unwrap(txoutindex); - - // Get the SPENDING tx's index (not the output's tx) - let spending_txindex = - txinindex_to_txindex_iter.get_at_unwrap(txinindex); - - self.addresstype_to_addressindex_and_txindex - .get_mut_unwrap(addresstype) - .remove(AddressIndexTxIndex::from(( - addressindex, - spending_txindex, - ))); - - self.addresstype_to_addressindex_and_unspentoutpoint - .get_mut_unwrap(addresstype) - .insert(AddressIndexOutPoint::from((addressindex, outpoint)), Unit); - } + Some((txoutindex, outpoint, txoutdata, spending_txindex)) + } else { + None } - }); + }) + .collect(); + + // Now process the collected outputs (iterators dropped, can mutate vecs) + for (txoutindex, outpoint, txoutdata, spending_txindex) in outputs_to_unspend { + // Reset spend status back to unspent + vecs.txout + .txoutindex_to_txinindex + .update(txoutindex, TxInIndex::UNSPENT)?; + + // Restore address stores if this is an address output + if txoutdata.outputtype.is_address() { + let addresstype = txoutdata.outputtype; + let addressindex = txoutdata.typeindex; + + self.addresstype_to_addressindex_and_txindex + .get_mut_unwrap(addresstype) + .remove(AddressIndexTxIndex::from((addressindex, spending_txindex))); + + self.addresstype_to_addressindex_and_unspentoutpoint + .get_mut_unwrap(addresstype) + .insert(AddressIndexOutPoint::from((addressindex, outpoint)), Unit); + } + } } else { unreachable!(); } diff --git a/crates/brk_indexer/src/vecs/address.rs b/crates/brk_indexer/src/vecs/address.rs index e673a4385..a2aa270b5 100644 --- a/crates/brk_indexer/src/vecs/address.rs +++ b/crates/brk_indexer/src/vecs/address.rs @@ -12,6 +12,8 @@ use vecdb::{ TypedVecIterator, }; +use crate::parallel_import; + #[derive(Clone, Traversable)] pub struct AddressVecs { // Height to first address index (per address type) @@ -36,55 +38,58 @@ pub struct AddressVecs { impl AddressVecs { pub fn forced_import(db: &Database, version: Version) -> Result { + let ( + height_to_first_p2pk65addressindex, + height_to_first_p2pk33addressindex, + height_to_first_p2pkhaddressindex, + height_to_first_p2shaddressindex, + height_to_first_p2wpkhaddressindex, + height_to_first_p2wshaddressindex, + height_to_first_p2traddressindex, + height_to_first_p2aaddressindex, + p2pk65addressindex_to_p2pk65bytes, + p2pk33addressindex_to_p2pk33bytes, + p2pkhaddressindex_to_p2pkhbytes, + p2shaddressindex_to_p2shbytes, + p2wpkhaddressindex_to_p2wpkhbytes, + p2wshaddressindex_to_p2wshbytes, + p2traddressindex_to_p2trbytes, + p2aaddressindex_to_p2abytes, + ) = parallel_import! { + height_to_first_p2pk65addressindex = PcoVec::forced_import(db, "first_p2pk65addressindex", version), + height_to_first_p2pk33addressindex = PcoVec::forced_import(db, "first_p2pk33addressindex", version), + height_to_first_p2pkhaddressindex = PcoVec::forced_import(db, "first_p2pkhaddressindex", version), + height_to_first_p2shaddressindex = PcoVec::forced_import(db, "first_p2shaddressindex", version), + height_to_first_p2wpkhaddressindex = PcoVec::forced_import(db, "first_p2wpkhaddressindex", version), + height_to_first_p2wshaddressindex = PcoVec::forced_import(db, "first_p2wshaddressindex", version), + height_to_first_p2traddressindex = PcoVec::forced_import(db, "first_p2traddressindex", version), + height_to_first_p2aaddressindex = PcoVec::forced_import(db, "first_p2aaddressindex", version), + p2pk65addressindex_to_p2pk65bytes = BytesVec::forced_import(db, "p2pk65bytes", version), + p2pk33addressindex_to_p2pk33bytes = BytesVec::forced_import(db, "p2pk33bytes", version), + p2pkhaddressindex_to_p2pkhbytes = BytesVec::forced_import(db, "p2pkhbytes", version), + p2shaddressindex_to_p2shbytes = BytesVec::forced_import(db, "p2shbytes", version), + p2wpkhaddressindex_to_p2wpkhbytes = BytesVec::forced_import(db, "p2wpkhbytes", version), + p2wshaddressindex_to_p2wshbytes = BytesVec::forced_import(db, "p2wshbytes", version), + p2traddressindex_to_p2trbytes = BytesVec::forced_import(db, "p2trbytes", version), + p2aaddressindex_to_p2abytes = BytesVec::forced_import(db, "p2abytes", version), + }; Ok(Self { - height_to_first_p2pk65addressindex: PcoVec::forced_import( - db, - "first_p2pk65addressindex", - version, - )?, - height_to_first_p2pk33addressindex: PcoVec::forced_import( - db, - "first_p2pk33addressindex", - version, - )?, - height_to_first_p2pkhaddressindex: PcoVec::forced_import( - db, - "first_p2pkhaddressindex", - version, - )?, - height_to_first_p2shaddressindex: PcoVec::forced_import( - db, - "first_p2shaddressindex", - version, - )?, - height_to_first_p2wpkhaddressindex: PcoVec::forced_import( - db, - "first_p2wpkhaddressindex", - version, - )?, - height_to_first_p2wshaddressindex: PcoVec::forced_import( - db, - "first_p2wshaddressindex", - version, - )?, - height_to_first_p2traddressindex: PcoVec::forced_import( - db, - "first_p2traddressindex", - version, - )?, - height_to_first_p2aaddressindex: PcoVec::forced_import( - db, - "first_p2aaddressindex", - version, - )?, - p2pk65addressindex_to_p2pk65bytes: BytesVec::forced_import(db, "p2pk65bytes", version)?, - p2pk33addressindex_to_p2pk33bytes: BytesVec::forced_import(db, "p2pk33bytes", version)?, - p2pkhaddressindex_to_p2pkhbytes: BytesVec::forced_import(db, "p2pkhbytes", version)?, - p2shaddressindex_to_p2shbytes: BytesVec::forced_import(db, "p2shbytes", version)?, - p2wpkhaddressindex_to_p2wpkhbytes: BytesVec::forced_import(db, "p2wpkhbytes", version)?, - p2wshaddressindex_to_p2wshbytes: BytesVec::forced_import(db, "p2wshbytes", version)?, - p2traddressindex_to_p2trbytes: BytesVec::forced_import(db, "p2trbytes", version)?, - p2aaddressindex_to_p2abytes: BytesVec::forced_import(db, "p2abytes", version)?, + height_to_first_p2pk65addressindex, + height_to_first_p2pk33addressindex, + height_to_first_p2pkhaddressindex, + height_to_first_p2shaddressindex, + height_to_first_p2wpkhaddressindex, + height_to_first_p2wshaddressindex, + height_to_first_p2traddressindex, + height_to_first_p2aaddressindex, + p2pk65addressindex_to_p2pk65bytes, + p2pk33addressindex_to_p2pk33bytes, + p2pkhaddressindex_to_p2pkhbytes, + p2shaddressindex_to_p2shbytes, + p2wpkhaddressindex_to_p2wpkhbytes, + p2wshaddressindex_to_p2wshbytes, + p2traddressindex_to_p2trbytes, + p2aaddressindex_to_p2abytes, }) } diff --git a/crates/brk_indexer/src/vecs/blocks.rs b/crates/brk_indexer/src/vecs/blocks.rs index a02971067..bcabf1ba5 100644 --- a/crates/brk_indexer/src/vecs/blocks.rs +++ b/crates/brk_indexer/src/vecs/blocks.rs @@ -4,6 +4,8 @@ use brk_types::{BlockHash, Height, StoredF64, StoredU64, Timestamp, Version, Wei use rayon::prelude::*; use vecdb::{AnyStoredVec, BytesVec, Database, GenericStoredVec, ImportableVec, PcoVec, Stamp}; +use crate::parallel_import; + #[derive(Clone, Traversable)] pub struct BlockVecs { pub height_to_blockhash: BytesVec, @@ -16,12 +18,25 @@ pub struct BlockVecs { impl BlockVecs { pub fn forced_import(db: &Database, version: Version) -> Result { + let ( + height_to_blockhash, + height_to_difficulty, + height_to_timestamp, + height_to_total_size, + height_to_weight, + ) = parallel_import! { + height_to_blockhash = BytesVec::forced_import(db, "blockhash", version), + height_to_difficulty = PcoVec::forced_import(db, "difficulty", version), + height_to_timestamp = PcoVec::forced_import(db, "timestamp", version), + height_to_total_size = PcoVec::forced_import(db, "total_size", version), + height_to_weight = PcoVec::forced_import(db, "weight", version), + }; Ok(Self { - height_to_blockhash: BytesVec::forced_import(db, "blockhash", version)?, - height_to_difficulty: PcoVec::forced_import(db, "difficulty", version)?, - height_to_timestamp: PcoVec::forced_import(db, "timestamp", version)?, - height_to_total_size: PcoVec::forced_import(db, "total_size", version)?, - height_to_weight: PcoVec::forced_import(db, "weight", version)?, + height_to_blockhash, + height_to_difficulty, + height_to_timestamp, + height_to_total_size, + height_to_weight, }) } diff --git a/crates/brk_indexer/src/vecs/macros.rs b/crates/brk_indexer/src/vecs/macros.rs new file mode 100644 index 000000000..6be993eef --- /dev/null +++ b/crates/brk_indexer/src/vecs/macros.rs @@ -0,0 +1,20 @@ +/// Imports multiple items in parallel using thread::scope. +/// Each expression must return Result. +/// +/// # Example +/// ```ignore +/// let (a, b, c) = parallel_import! { +/// a = SomeVec::forced_import(&db, version), +/// b = OtherVec::forced_import(&db, version), +/// c = ThirdVec::forced_import(&db, version), +/// }; +/// ``` +#[macro_export] +macro_rules! parallel_import { + ($($name:ident = $expr:expr),+ $(,)?) => {{ + std::thread::scope(|s| -> brk_error::Result<_> { + $(let $name = s.spawn(|| $expr);)+ + Ok(($($name.join().unwrap()?,)+)) + })? + }}; +} diff --git a/crates/brk_indexer/src/vecs/mod.rs b/crates/brk_indexer/src/vecs/mod.rs index e140225a6..d332257f5 100644 --- a/crates/brk_indexer/src/vecs/mod.rs +++ b/crates/brk_indexer/src/vecs/mod.rs @@ -6,8 +6,11 @@ use brk_types::{AddressBytes, AddressHash, Height, OutputType, TypeIndex, Versio use rayon::prelude::*; use vecdb::{AnyStoredVec, Database, PAGE_SIZE, Reader, Stamp}; +use crate::parallel_import; + mod address; mod blocks; +mod macros; mod output; mod tx; mod txin; @@ -35,15 +38,51 @@ pub struct Vecs { impl Vecs { pub fn forced_import(parent: &Path, version: Version) -> Result { + log::debug!("Opening vecs database..."); let db = Database::open(&parent.join("vecs"))?; + log::debug!("Setting min len..."); db.set_min_len(PAGE_SIZE * 50_000_000)?; - let block = BlockVecs::forced_import(&db, version)?; - let tx = TxVecs::forced_import(&db, version)?; - let txin = TxinVecs::forced_import(&db, version)?; - let txout = TxoutVecs::forced_import(&db, version)?; - let address = AddressVecs::forced_import(&db, version)?; - let output = OutputVecs::forced_import(&db, version)?; + log::debug!("Importing sub-vecs in parallel..."); + let (block, tx, txin, txout, address, output) = parallel_import! { + block = { + log::debug!("Importing BlockVecs..."); + let r = BlockVecs::forced_import(&db, version); + log::debug!("BlockVecs imported."); + r + }, + tx = { + log::debug!("Importing TxVecs..."); + let r = TxVecs::forced_import(&db, version); + log::debug!("TxVecs imported."); + r + }, + txin = { + log::debug!("Importing TxinVecs..."); + let r = TxinVecs::forced_import(&db, version); + log::debug!("TxinVecs imported."); + r + }, + txout = { + log::debug!("Importing TxoutVecs..."); + let r = TxoutVecs::forced_import(&db, version); + log::debug!("TxoutVecs imported."); + r + }, + address = { + log::debug!("Importing AddressVecs..."); + let r = AddressVecs::forced_import(&db, version); + log::debug!("AddressVecs imported."); + r + }, + output = { + log::debug!("Importing OutputVecs..."); + let r = OutputVecs::forced_import(&db, version); + log::debug!("OutputVecs imported."); + r + }, + }; + log::debug!("Sub-vecs imported."); let this = Self { db, @@ -55,13 +94,16 @@ impl Vecs { output, }; + log::debug!("Retaining regions..."); this.db.retain_regions( this.iter_any_exportable() .flat_map(|v| v.region_names()) .collect(), )?; + log::debug!("Compacting database..."); this.db.compact()?; + log::debug!("Vecs import complete."); Ok(this) } diff --git a/crates/brk_indexer/src/vecs/output.rs b/crates/brk_indexer/src/vecs/output.rs index 8e3c764d5..8001e6f3e 100644 --- a/crates/brk_indexer/src/vecs/output.rs +++ b/crates/brk_indexer/src/vecs/output.rs @@ -6,6 +6,8 @@ use brk_types::{ use rayon::prelude::*; use vecdb::{AnyStoredVec, Database, GenericStoredVec, ImportableVec, PcoVec, Stamp}; +use crate::parallel_import; + #[derive(Clone, Traversable)] pub struct OutputVecs { // Height to first output index (per output type) @@ -22,31 +24,34 @@ pub struct OutputVecs { impl OutputVecs { pub fn forced_import(db: &Database, version: Version) -> Result { + let ( + height_to_first_emptyoutputindex, + height_to_first_opreturnindex, + height_to_first_p2msoutputindex, + height_to_first_unknownoutputindex, + emptyoutputindex_to_txindex, + opreturnindex_to_txindex, + p2msoutputindex_to_txindex, + unknownoutputindex_to_txindex, + ) = parallel_import! { + height_to_first_emptyoutputindex = PcoVec::forced_import(db, "first_emptyoutputindex", version), + height_to_first_opreturnindex = PcoVec::forced_import(db, "first_opreturnindex", version), + height_to_first_p2msoutputindex = PcoVec::forced_import(db, "first_p2msoutputindex", version), + height_to_first_unknownoutputindex = PcoVec::forced_import(db, "first_unknownoutputindex", version), + emptyoutputindex_to_txindex = PcoVec::forced_import(db, "txindex", version), + opreturnindex_to_txindex = PcoVec::forced_import(db, "txindex", version), + p2msoutputindex_to_txindex = PcoVec::forced_import(db, "txindex", version), + unknownoutputindex_to_txindex = PcoVec::forced_import(db, "txindex", version), + }; Ok(Self { - height_to_first_emptyoutputindex: PcoVec::forced_import( - db, - "first_emptyoutputindex", - version, - )?, - height_to_first_opreturnindex: PcoVec::forced_import( - db, - "first_opreturnindex", - version, - )?, - height_to_first_p2msoutputindex: PcoVec::forced_import( - db, - "first_p2msoutputindex", - version, - )?, - height_to_first_unknownoutputindex: PcoVec::forced_import( - db, - "first_unknownoutputindex", - version, - )?, - emptyoutputindex_to_txindex: PcoVec::forced_import(db, "txindex", version)?, - opreturnindex_to_txindex: PcoVec::forced_import(db, "txindex", version)?, - p2msoutputindex_to_txindex: PcoVec::forced_import(db, "txindex", version)?, - unknownoutputindex_to_txindex: PcoVec::forced_import(db, "txindex", version)?, + height_to_first_emptyoutputindex, + height_to_first_opreturnindex, + height_to_first_p2msoutputindex, + height_to_first_unknownoutputindex, + emptyoutputindex_to_txindex, + opreturnindex_to_txindex, + p2msoutputindex_to_txindex, + unknownoutputindex_to_txindex, }) } diff --git a/crates/brk_indexer/src/vecs/tx.rs b/crates/brk_indexer/src/vecs/tx.rs index aae22d9e1..9cc47994d 100644 --- a/crates/brk_indexer/src/vecs/tx.rs +++ b/crates/brk_indexer/src/vecs/tx.rs @@ -7,6 +7,8 @@ use brk_types::{ use rayon::prelude::*; use vecdb::{AnyStoredVec, BytesVec, Database, GenericStoredVec, ImportableVec, PcoVec, Stamp}; +use crate::parallel_import; + #[derive(Clone, Traversable)] pub struct TxVecs { pub height_to_first_txindex: PcoVec, @@ -23,17 +25,40 @@ pub struct TxVecs { impl TxVecs { pub fn forced_import(db: &Database, version: Version) -> Result { + let ( + height_to_first_txindex, + txindex_to_height, + txindex_to_txid, + txindex_to_txversion, + txindex_to_rawlocktime, + txindex_to_base_size, + txindex_to_total_size, + txindex_to_is_explicitly_rbf, + txindex_to_first_txinindex, + txindex_to_first_txoutindex, + ) = parallel_import! { + height_to_first_txindex = PcoVec::forced_import(db, "first_txindex", version), + txindex_to_height = PcoVec::forced_import(db, "height", version), + txindex_to_txid = BytesVec::forced_import(db, "txid", version), + txindex_to_txversion = PcoVec::forced_import(db, "txversion", version), + txindex_to_rawlocktime = PcoVec::forced_import(db, "rawlocktime", version), + txindex_to_base_size = PcoVec::forced_import(db, "base_size", version), + txindex_to_total_size = PcoVec::forced_import(db, "total_size", version), + txindex_to_is_explicitly_rbf = PcoVec::forced_import(db, "is_explicitly_rbf", version), + txindex_to_first_txinindex = PcoVec::forced_import(db, "first_txinindex", version), + txindex_to_first_txoutindex = BytesVec::forced_import(db, "first_txoutindex", version), + }; Ok(Self { - height_to_first_txindex: PcoVec::forced_import(db, "first_txindex", version)?, - txindex_to_height: PcoVec::forced_import(db, "height", version)?, - txindex_to_txid: BytesVec::forced_import(db, "txid", version)?, - txindex_to_txversion: PcoVec::forced_import(db, "txversion", version)?, - txindex_to_rawlocktime: PcoVec::forced_import(db, "rawlocktime", version)?, - txindex_to_base_size: PcoVec::forced_import(db, "base_size", version)?, - txindex_to_total_size: PcoVec::forced_import(db, "total_size", version)?, - txindex_to_is_explicitly_rbf: PcoVec::forced_import(db, "is_explicitly_rbf", version)?, - txindex_to_first_txinindex: PcoVec::forced_import(db, "first_txinindex", version)?, - txindex_to_first_txoutindex: BytesVec::forced_import(db, "first_txoutindex", version)?, + height_to_first_txindex, + txindex_to_height, + txindex_to_txid, + txindex_to_txversion, + txindex_to_rawlocktime, + txindex_to_base_size, + txindex_to_total_size, + txindex_to_is_explicitly_rbf, + txindex_to_first_txinindex, + txindex_to_first_txoutindex, }) } diff --git a/crates/brk_indexer/src/vecs/txin.rs b/crates/brk_indexer/src/vecs/txin.rs index 140c2b19b..18dcc6f2a 100644 --- a/crates/brk_indexer/src/vecs/txin.rs +++ b/crates/brk_indexer/src/vecs/txin.rs @@ -1,22 +1,49 @@ use brk_error::Result; use brk_traversable::Traversable; -use brk_types::{Height, OutPoint, TxInIndex, TxIndex, Version}; +use brk_types::{Height, OutPoint, OutputType, Sats, TxInIndex, TxIndex, TypeIndex, Version}; use rayon::prelude::*; use vecdb::{AnyStoredVec, Database, GenericStoredVec, ImportableVec, PcoVec, Stamp}; +use crate::parallel_import; + #[derive(Clone, Traversable)] pub struct TxinVecs { pub height_to_first_txinindex: PcoVec, pub txinindex_to_outpoint: PcoVec, pub txinindex_to_txindex: PcoVec, + pub txinindex_to_value: PcoVec, + pub txinindex_to_prev_height: PcoVec, + pub txinindex_to_outputtype: PcoVec, + pub txinindex_to_typeindex: PcoVec, } impl TxinVecs { pub fn forced_import(db: &Database, version: Version) -> Result { + let ( + height_to_first_txinindex, + txinindex_to_outpoint, + txinindex_to_txindex, + txinindex_to_value, + txinindex_to_prev_height, + txinindex_to_outputtype, + txinindex_to_typeindex, + ) = parallel_import! { + height_to_first_txinindex = PcoVec::forced_import(db, "first_txinindex", version), + txinindex_to_outpoint = PcoVec::forced_import(db, "outpoint", version), + txinindex_to_txindex = PcoVec::forced_import(db, "txindex", version), + txinindex_to_value = PcoVec::forced_import(db, "value", version), + txinindex_to_prev_height = PcoVec::forced_import(db, "prev_height", version), + txinindex_to_outputtype = PcoVec::forced_import(db, "outputtype", version), + txinindex_to_typeindex = PcoVec::forced_import(db, "typeindex", version), + }; Ok(Self { - height_to_first_txinindex: PcoVec::forced_import(db, "first_txinindex", version)?, - txinindex_to_outpoint: PcoVec::forced_import(db, "outpoint", version)?, - txinindex_to_txindex: PcoVec::forced_import(db, "txindex", version)?, + height_to_first_txinindex, + txinindex_to_outpoint, + txinindex_to_txindex, + txinindex_to_value, + txinindex_to_prev_height, + txinindex_to_outputtype, + txinindex_to_typeindex, }) } @@ -27,6 +54,14 @@ impl TxinVecs { .truncate_if_needed_with_stamp(txinindex, stamp)?; self.txinindex_to_txindex .truncate_if_needed_with_stamp(txinindex, stamp)?; + self.txinindex_to_value + .truncate_if_needed_with_stamp(txinindex, stamp)?; + self.txinindex_to_prev_height + .truncate_if_needed_with_stamp(txinindex, stamp)?; + self.txinindex_to_outputtype + .truncate_if_needed_with_stamp(txinindex, stamp)?; + self.txinindex_to_typeindex + .truncate_if_needed_with_stamp(txinindex, stamp)?; Ok(()) } @@ -35,6 +70,10 @@ impl TxinVecs { &mut self.height_to_first_txinindex as &mut dyn AnyStoredVec, &mut self.txinindex_to_outpoint, &mut self.txinindex_to_txindex, + &mut self.txinindex_to_value, + &mut self.txinindex_to_prev_height, + &mut self.txinindex_to_outputtype, + &mut self.txinindex_to_typeindex, ] .into_par_iter() } diff --git a/crates/brk_indexer/src/vecs/txout.rs b/crates/brk_indexer/src/vecs/txout.rs index 66e764218..119662e83 100644 --- a/crates/brk_indexer/src/vecs/txout.rs +++ b/crates/brk_indexer/src/vecs/txout.rs @@ -1,50 +1,69 @@ use brk_error::Result; use brk_traversable::Traversable; -use brk_types::{Height, OutputType, Sats, TxIndex, TxOutIndex, TypeIndex, Version}; +use brk_types::{Height, Sats, TxInIndex, TxIndex, TxOutData, TxOutIndex, Version}; use rayon::prelude::*; -use vecdb::{AnyStoredVec, BytesVec, Database, GenericStoredVec, ImportableVec, PcoVec, Stamp}; +use vecdb::{ + AnyStoredVec, AnyVec, BytesVec, Database, GenericStoredVec, ImportableVec, IterableCloneableVec, + LazyVecFrom1, PcoVec, Stamp, +}; + +use crate::parallel_import; #[derive(Clone, Traversable)] pub struct TxoutVecs { pub height_to_first_txoutindex: PcoVec, - pub txoutindex_to_value: BytesVec, - pub txoutindex_to_outputtype: BytesVec, - pub txoutindex_to_typeindex: BytesVec, + pub txoutindex_to_txoutdata: BytesVec, pub txoutindex_to_txindex: PcoVec, + pub txoutindex_to_txinindex: BytesVec, + pub txoutindex_to_value: LazyVecFrom1, } impl TxoutVecs { pub fn forced_import(db: &Database, version: Version) -> Result { + let ( + height_to_first_txoutindex, + txoutindex_to_txoutdata, + txoutindex_to_txindex, + txoutindex_to_txinindex, + ) = parallel_import! { + height_to_first_txoutindex = PcoVec::forced_import(db, "first_txoutindex", version), + txoutindex_to_txoutdata = BytesVec::forced_import(db, "txoutdata", version), + txoutindex_to_txindex = PcoVec::forced_import(db, "txindex", version), + txoutindex_to_txinindex = BytesVec::forced_import(db, "txinindex", version), + }; + let txoutindex_to_value = LazyVecFrom1::init( + "value", + txoutindex_to_txoutdata.version(), + txoutindex_to_txoutdata.boxed_clone(), + |index, iter| iter.get(index).map(|txoutdata: TxOutData| txoutdata.value), + ); Ok(Self { - height_to_first_txoutindex: PcoVec::forced_import(db, "first_txoutindex", version)?, - txoutindex_to_value: BytesVec::forced_import(db, "value", version)?, - txoutindex_to_outputtype: BytesVec::forced_import(db, "outputtype", version)?, - txoutindex_to_typeindex: BytesVec::forced_import(db, "typeindex", version)?, - txoutindex_to_txindex: PcoVec::forced_import(db, "txindex", version)?, + height_to_first_txoutindex, + txoutindex_to_txoutdata, + txoutindex_to_txindex, + txoutindex_to_txinindex, + txoutindex_to_value, }) } pub fn truncate(&mut self, height: Height, txoutindex: TxOutIndex, stamp: Stamp) -> Result<()> { self.height_to_first_txoutindex .truncate_if_needed_with_stamp(height, stamp)?; - self.txoutindex_to_value - .truncate_if_needed_with_stamp(txoutindex, stamp)?; - self.txoutindex_to_outputtype - .truncate_if_needed_with_stamp(txoutindex, stamp)?; - self.txoutindex_to_typeindex + self.txoutindex_to_txoutdata .truncate_if_needed_with_stamp(txoutindex, stamp)?; self.txoutindex_to_txindex .truncate_if_needed_with_stamp(txoutindex, stamp)?; + self.txoutindex_to_txinindex + .truncate_if_needed_with_stamp(txoutindex, stamp)?; Ok(()) } pub fn par_iter_mut_any(&mut self) -> impl ParallelIterator { [ &mut self.height_to_first_txoutindex as &mut dyn AnyStoredVec, - &mut self.txoutindex_to_value, - &mut self.txoutindex_to_outputtype, - &mut self.txoutindex_to_typeindex, + &mut self.txoutindex_to_txoutdata, &mut self.txoutindex_to_txindex, + &mut self.txoutindex_to_txinindex, ] .into_par_iter() } diff --git a/crates/brk_mcp/Cargo.toml b/crates/brk_mcp/Cargo.toml index d71bbb262..381df6098 100644 --- a/crates/brk_mcp/Cargo.toml +++ b/crates/brk_mcp/Cargo.toml @@ -9,14 +9,15 @@ repository.workspace = true build = "build.rs" [dependencies] -aide = { workspace = true } -brk_query = { workspace = true } +axum = { workspace = true } brk_rmcp = { version = "0.8.0", features = [ "transport-worker", "transport-streamable-http-server", ] } -brk_types = { workspace = true } log = { workspace = true } +minreq = { workspace = true } +schemars = { workspace = true } +serde = { workspace = true } serde_json = { workspace = true } [package.metadata.cargo-machete] diff --git a/crates/brk_mcp/README.md b/crates/brk_mcp/README.md index 64b35354f..318b91e95 100644 --- a/crates/brk_mcp/README.md +++ b/crates/brk_mcp/README.md @@ -4,42 +4,31 @@ Model Context Protocol (MCP) server for Bitcoin on-chain data. ## What It Enables -Expose BRK's query capabilities to AI assistants via the MCP standard. LLMs can browse metrics, fetch datasets, and analyze on-chain data through structured tool calls. - -## Key Features - -- **Tool-based API**: 8 tools for metric discovery and data retrieval -- **Pagination support**: Browse large metric catalogs in chunks -- **Self-documenting**: Built-in instructions explain available capabilities -- **Async**: Full tokio integration via `AsyncQuery` +Expose BRK's REST API to AI assistants via MCP. The LLM reads the OpenAPI spec and calls any endpoint through a generic fetch tool. ## Available Tools | Tool | Description | |------|-------------| -| `get_metric_count` | Count of unique metrics | -| `get_vec_count` | Total metric × index combinations | -| `get_indexes` | List all index types and variants | -| `get_vecids` | Paginated list of metric IDs | -| `get_index_to_vecids` | Metrics supporting a given index | -| `get_vecid_to_indexes` | Indexes supported by a metric | -| `get_vecs` | Fetch metric data with range selection | -| `get_version` | BRK version string | +| `get_openapi` | Get the OpenAPI specification for all REST endpoints | +| `fetch` | Call any REST API endpoint by path and query | + +## Workflow + +1. LLM calls `get_openapi` to discover available endpoints +2. LLM calls `fetch` with the desired path and query parameters ## Usage ```rust,ignore -let mcp = MCP::new(&async_query); - -// The MCP server implements ServerHandler for use with rmcp -// Tools are auto-registered via the #[tool_router] macro +let mcp = MCP::new("http://127.0.0.1:3110", openapi_json); ``` ## Integration -The MCP server is integrated into `brk_server` and exposed at `/mcp` endpoint for MCP transport. +The MCP server is integrated into `brk_server` and exposed at `/mcp` endpoint. ## Built On -- `brk_query` for data access - `brk_rmcp` for MCP protocol implementation +- `minreq` for HTTP requests diff --git a/crates/brk_mcp/src/lib.rs b/crates/brk_mcp/src/lib.rs index 4a45b43a6..147e6df34 100644 --- a/crates/brk_mcp/src/lib.rs +++ b/crates/brk_mcp/src/lib.rs @@ -1,6 +1,7 @@ #![doc = include_str!("../README.md")] -use brk_query::{AsyncQuery, MetricSelection, Pagination, PaginationIndex}; +use std::sync::Arc; + use brk_rmcp::{ ErrorData as McpError, RoleServer, ServerHandler, handler::server::{router::tool::ToolRouter, wrapper::Parameters}, @@ -8,135 +9,68 @@ use brk_rmcp::{ service::RequestContext, tool, tool_handler, tool_router, }; -use brk_types::Metric; use log::info; +use schemars::JsonSchema; +use serde::Deserialize; pub mod route; #[derive(Clone)] pub struct MCP { - query: AsyncQuery, + base_url: Arc, + openapi_json: Arc, tool_router: ToolRouter, } -const VERSION: &str = env!("CARGO_PKG_VERSION"); +/// Parameters for fetching from the REST API. +#[derive(Deserialize, JsonSchema)] +pub struct FetchParams { + /// API path (e.g., "/api/blocks" or "/api/metrics/list") + pub path: String, + /// Optional query string (e.g., "page=0" or "from=-1&to=-10") + pub query: Option, +} #[tool_router] impl MCP { - pub fn new(query: &AsyncQuery) -> Self { + pub fn new(base_url: impl Into, openapi_json: impl Into) -> Self { Self { - query: query.clone(), + base_url: Arc::new(base_url.into()), + openapi_json: Arc::new(openapi_json.into()), tool_router: Self::tool_router(), } } - #[tool(description = " -Get the count of unique metrics. -")] - async fn get_metric_count(&self) -> Result { - info!("mcp: distinct_metric_count"); - Ok(CallToolResult::success(vec![ - Content::json(self.query.sync(|q| q.distinct_metric_count())).unwrap(), - ])) - } - - #[tool(description = " -Get the count of all metrics. (distinct metrics multiplied by the number of indexes supported by each one) -")] - async fn get_vec_count(&self) -> Result { - info!("mcp: total_metric_count"); - Ok(CallToolResult::success(vec![ - Content::json(self.query.sync(|q| q.total_metric_count())).unwrap(), - ])) - } - - #[tool(description = " -Get the list of all existing indexes and their accepted variants. -")] - async fn get_indexes(&self) -> Result { - info!("mcp: get_indexes"); - Ok(CallToolResult::success(vec![ - Content::json(self.query.inner().indexes()).unwrap(), - ])) - } - - #[tool(description = " -Get a paginated list of all existing vec ids. -There are up to 1,000 values per page. -If the `page` param is omitted, it will default to the first page. -")] - async fn get_vecids( - &self, - Parameters(pagination): Parameters, - ) -> Result { - info!("mcp: get_metrics"); - Ok(CallToolResult::success(vec![ - Content::json(self.query.sync(|q| q.metrics(pagination))).unwrap(), - ])) - } - - #[tool(description = " -Get a paginated list of all vec ids which support a given index. -There are up to 1,000 values per page. -If the `page` param is omitted, it will default to the first page. -")] - async fn get_index_to_vecids( - &self, - Parameters(paginated_index): Parameters, - ) -> Result { - info!("mcp: get_index_to_vecids"); - let result = self - .query - .inner() - .index_to_vecids(paginated_index) - .unwrap_or_default(); - Ok(CallToolResult::success(vec![ - Content::json(result).unwrap(), - ])) - } - - #[tool(description = " -Get a list of all indexes supported by a given vec id. -The list will be empty if the vec id isn't correct. -")] - async fn get_vecid_to_indexes( - &self, - Parameters(metric): Parameters, - ) -> Result { - info!("mcp: get_vecid_to_indexes"); - Ok(CallToolResult::success(vec![ - Content::json(self.query.inner().metric_to_indexes(metric)).unwrap(), - ])) - } - - #[tool(description = " -Get one or multiple vecs depending on given parameters. -The response's format will depend on the given parameters, it will be: -- A value: If requested only one vec and the given range returns one value (for example: `from=-1`) -- A list: If requested only one vec and the given range returns multiple values (for example: `from=-1000&count=100` or `from=-444&to=-333`) -- A matrix: When multiple vecs are requested, even if they each return one value. -")] - async fn get_vecs( - &self, - Parameters(params): Parameters, - ) -> Result { - info!("mcp: get_vecs"); + #[tool(description = "Get the OpenAPI specification describing all available REST API endpoints.")] + async fn get_openapi(&self) -> Result { + info!("mcp: get_openapi"); Ok(CallToolResult::success(vec![Content::text( - match self.query.run(move |q| q.search_and_format_legacy(params)).await { - Ok(output) => output.to_string(), - Err(e) => format!("Error:\n{e}"), - }, + self.openapi_json.as_str(), )])) } - #[tool(description = " -Get the running version of the Bitcoin Research Kit. -")] - async fn get_version(&self) -> Result { - info!("mcp: get_version"); - Ok(CallToolResult::success(vec![Content::text(format!( - "v{VERSION}" - ))])) + #[tool(description = "Call a REST API endpoint. Use get_openapi first to discover available endpoints.")] + async fn fetch( + &self, + Parameters(params): Parameters, + ) -> Result { + info!("mcp: fetch {}", params.path); + + let url = match ¶ms.query { + Some(q) if !q.is_empty() => format!("{}{}?{}", self.base_url, params.path, q), + _ => format!("{}{}", self.base_url, params.path), + }; + + match minreq::get(&url).send() { + Ok(response) => { + let body = response.as_str().unwrap_or("").to_string(); + Ok(CallToolResult::success(vec![Content::text(body)])) + } + Err(e) => Err(McpError::internal_error( + format!("HTTP request failed: {e}"), + None, + )), + } } } @@ -149,17 +83,13 @@ impl ServerHandler for MCP { server_info: Implementation::from_build_env(), instructions: Some( " -This server provides an interface to communicate with a running instance of the Bitcoin Research Kit (also called brk or BRK). +Bitcoin Research Kit (BRK) - Bitcoin on-chain metrics and market data. -Multiple tools are at your disposal including ones to fetch all sorts of Bitcoin on-chain metrics and market prices. +Workflow: +1. Call get_openapi to get the full API specification +2. Use fetch to call any endpoint described in the spec -If you're unsure which datasets are available, try out different tools before browsing the web. Each tool gives important information about BRK's capabilities. - -Vectors can also be called 'Vecs', 'Arrays' or 'Datasets', they can all be used interchangeably. - -An 'Index' (or indexes) is the timeframe of a dataset. - -'VecId' (or vecids) are the name of the dataset and what it represents. +Example: fetch with path=\"/api/metrics/list\" to list metrics. " .to_string(), ), diff --git a/crates/brk_mcp/src/route.rs b/crates/brk_mcp/src/route.rs index 347c98ef2..f16b89b58 100644 --- a/crates/brk_mcp/src/route.rs +++ b/crates/brk_mcp/src/route.rs @@ -1,39 +1,26 @@ -use aide::axum::ApiRouter; -use brk_query::AsyncQuery; +use std::sync::Arc; + +use axum::Router; use brk_rmcp::transport::{ StreamableHttpServerConfig, streamable_http_server::{StreamableHttpService, session::local::LocalSessionManager}, }; - use log::info; use crate::MCP; -pub trait MCPRoutes { - fn add_mcp_routes(self, query: &AsyncQuery, mcp: bool) -> Self; -} - -impl MCPRoutes for ApiRouter -where - T: Clone + Send + Sync + 'static, -{ - fn add_mcp_routes(self, query: &AsyncQuery, mcp: bool) -> Self { - if !mcp { - return self; - } - - let query = query.clone(); - let service = StreamableHttpService::new( - move || Ok(MCP::new(&query)), - LocalSessionManager::default().into(), - StreamableHttpServerConfig { - stateful_mode: false, - ..Default::default() - }, - ); - - info!("Setting MCP..."); - - self.nest_service("/mcp", service) - } +/// Create an MCP service router. +pub fn mcp_router(base_url: String, openapi_json: Arc) -> Router { + info!("Setting up MCP..."); + + let service = StreamableHttpService::new( + move || Ok(MCP::new(base_url.clone(), openapi_json.as_str())), + LocalSessionManager::default().into(), + StreamableHttpServerConfig { + stateful_mode: false, + ..Default::default() + }, + ); + + Router::new().nest_service("/mcp", service) } diff --git a/crates/brk_query/src/impl/address.rs b/crates/brk_query/src/impl/address.rs index 51c78d00f..1d3e3e403 100644 --- a/crates/brk_query/src/impl/address.rs +++ b/crates/brk_query/src/impl/address.rs @@ -7,7 +7,7 @@ use brk_types::{ AddressIndexTxIndex, AddressStats, AnyAddressDataIndexEnum, OutputType, Sats, TxIndex, TxStatus, Txid, TypeIndex, Unit, Utxo, Vout, }; -use vecdb::TypedVecIterator; +use vecdb::{IterableVec, TypedVecIterator}; use crate::Query; @@ -169,7 +169,7 @@ impl Query { let mut txindex_to_txid_iter = vecs.tx.txindex_to_txid.iter()?; let mut txindex_to_height_iter = vecs.tx.txindex_to_height.iter()?; let mut txindex_to_first_txoutindex_iter = vecs.tx.txindex_to_first_txoutindex.iter()?; - let mut txoutindex_to_value_iter = vecs.txout.txoutindex_to_value.iter()?; + let mut txoutindex_to_value_iter = vecs.txout.txoutindex_to_value.iter(); let mut height_to_blockhash_iter = vecs.block.height_to_blockhash.iter()?; let mut height_to_timestamp_iter = vecs.block.height_to_timestamp.iter()?; diff --git a/crates/brk_query/src/impl/transaction.rs b/crates/brk_query/src/impl/transaction.rs index 6f609edde..6c3bf1af3 100644 --- a/crates/brk_query/src/impl/transaction.rs +++ b/crates/brk_query/src/impl/transaction.rs @@ -6,7 +6,7 @@ use brk_types::{ Sats, Transaction, TxIn, TxInIndex, TxIndex, TxOut, TxOutspend, TxStatus, Txid, TxidParam, TxidPrefix, Vin, Vout, Weight, }; -use vecdb::{GenericStoredVec, TypedVecIterator}; +use vecdb::{GenericStoredVec, IterableVec, TypedVecIterator}; use crate::Query; @@ -119,9 +119,10 @@ impl Query { let txoutindex = first_txoutindex + vout; // Look up spend status - let computer = self.computer(); - let txinindex = computer - .stateful + let indexer = self.indexer(); + let txinindex = indexer + .vecs + .txout .txoutindex_to_txinindex .read_once(txoutindex)?; @@ -167,8 +168,7 @@ impl Query { let output_count = usize::from(next_first_txoutindex) - usize::from(first_txoutindex); // Get spend status for each output - let computer = self.computer(); - let mut txoutindex_to_txinindex_iter = computer.stateful.txoutindex_to_txinindex.iter()?; + let mut txoutindex_to_txinindex_iter = indexer.vecs.txout.txoutindex_to_txinindex.iter()?; let mut outspends = Vec::with_capacity(output_count); for i in 0..output_count { @@ -220,7 +220,7 @@ impl Query { let mut txindex_to_first_txoutindex_iter = indexer.vecs.tx.txindex_to_first_txoutindex.iter()?; let mut txinindex_to_outpoint_iter = indexer.vecs.txin.txinindex_to_outpoint.iter()?; - let mut txoutindex_to_value_iter = indexer.vecs.txout.txoutindex_to_value.iter()?; + let mut txoutindex_to_value_iter = indexer.vecs.txout.txoutindex_to_value.iter(); // Build inputs with prevout information let input: Vec = tx diff --git a/crates/brk_server/src/lib.rs b/crates/brk_server/src/lib.rs index 0d6a97f02..b8fa7b7f0 100644 --- a/crates/brk_server/src/lib.rs +++ b/crates/brk_server/src/lib.rs @@ -14,7 +14,7 @@ use axum::{ }; use brk_error::Result; use brk_logger::OwoColorize; -use brk_mcp::route::MCPRoutes; +use brk_mcp::route::mcp_router; use brk_query::AsyncQuery; use log::{error, info}; use quick_cache::sync::Cache; @@ -92,7 +92,6 @@ impl Server { let vecs = state.query.inner().vecs(); let router = ApiRouter::new() .add_api_routes() - .add_mcp_routes(&state.query, mcp) .add_files_routes(state.path.as_ref()) .route( "/discord", @@ -136,24 +135,34 @@ impl Server { let mut openapi = create_openapi(); let router = router.finish_api(&mut openapi); - let clients_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")) + let workspace_root: PathBuf = PathBuf::from(env!("CARGO_MANIFEST_DIR")) .parent() + .and_then(|p| p.parent()) .unwrap() - .join("brk_binder") - .join("clients"); - if clients_path.exists() { - let openapi_json = serde_json::to_string(&openapi).unwrap(); - let result = panic::catch_unwind(panic::AssertUnwindSafe(|| { - brk_binder::generate_clients(vecs, &openapi_json, &clients_path) - })); + .into(); + let output_paths = brk_binder::ClientOutputPaths::new() + .rust(workspace_root.join("crates/brk_client/src/lib.rs")) + .javascript(workspace_root.join("modules/brk-client/index.js")) + .python(workspace_root.join("packages/brk_client/__init__.py")); - match result { - Ok(Ok(())) => info!("Generated clients at {}", clients_path.display()), - Ok(Err(e)) => error!("Failed to generate clients: {e}"), - Err(_) => error!("Client generation panicked"), - } + let openapi_json = Arc::new(serde_json::to_string(&openapi).unwrap()); + let result = panic::catch_unwind(panic::AssertUnwindSafe(|| { + brk_binder::generate_clients(vecs, &openapi_json, &output_paths) + })); + + match result { + Ok(Ok(())) => info!("Generated clients"), + Ok(Err(e)) => error!("Failed to generate clients: {e}"), + Err(_) => error!("Client generation panicked"), } + let router = if mcp { + let base_url = format!("http://127.0.0.1:{port}"); + router.merge(mcp_router(base_url, openapi_json)) + } else { + router + }; + serve( listener, router diff --git a/crates/brk_store/src/lib.rs b/crates/brk_store/src/lib.rs index d4f8eeace..685d5fe4d 100644 --- a/crates/brk_store/src/lib.rs +++ b/crates/brk_store/src/lib.rs @@ -277,6 +277,7 @@ where ingestion.write(ByteView::from(key), ByteView::from(value))?; } Item::Tomb(key) => { + // TODO: switch to write_weak_tombstone when lsm-tree ingestion API supports it ingestion.write_tombstone(ByteView::from(key))?; } } diff --git a/crates/brk_store/src/meta.rs b/crates/brk_store/src/meta.rs index 58bab452c..2ee788673 100644 --- a/crates/brk_store/src/meta.rs +++ b/crates/brk_store/src/meta.rs @@ -3,7 +3,7 @@ use std::{ path::{Path, PathBuf}, }; -use brk_error::Result; +use brk_error::{Error, Result}; use brk_types::Version; use fjall::{Database, Keyspace}; @@ -30,16 +30,14 @@ impl StoreMeta { let partition = open_partition_handle()?; - if Version::try_from(Self::path_version_(path).as_path()) - .is_ok_and(|prev_version| version != prev_version) + if let Ok(prev_version) = Version::try_from(Self::path_version_(path).as_path()) + && version != prev_version { - todo!(); - // fs::remove_dir_all(path)?; - // // Doesn't exist - // // database.delete_partition(partition)?; - // fs::create_dir(path)?; - // database.persist(PersistMode::SyncAll)?; - // partition = open_partition_handle()?; + return Err(Error::VersionMismatch { + path: path.to_path_buf(), + expected: u64::from(version) as usize, + found: u64::from(prev_version) as usize, + }); } let slf = Self { diff --git a/crates/brk_types/src/addressbytes.rs b/crates/brk_types/src/addressbytes.rs index 20c81f05b..8681b8d3d 100644 --- a/crates/brk_types/src/addressbytes.rs +++ b/crates/brk_types/src/addressbytes.rs @@ -142,11 +142,10 @@ impl TryFrom<(&ScriptBuf, OutputType)> for AddressBytes { let bytes = &script.as_bytes()[2..]; Ok(Self::P2A(Box::new(P2ABytes::from(bytes)))) } - OutputType::P2MS => Err(Error::WrongAddressType), - OutputType::Unknown => Err(Error::WrongAddressType), - OutputType::Empty => Err(Error::WrongAddressType), - OutputType::OpReturn => Err(Error::WrongAddressType), - _ => unreachable!(), + OutputType::P2MS + | OutputType::Unknown + | OutputType::Empty + | OutputType::OpReturn => Err(Error::WrongAddressType), } } } diff --git a/crates/brk_types/src/lib.rs b/crates/brk_types/src/lib.rs index d3dc3cd17..d18a2279d 100644 --- a/crates/brk_types/src/lib.rs +++ b/crates/brk_types/src/lib.rs @@ -138,6 +138,7 @@ mod txin; mod txindex; mod txinindex; mod txout; +mod txoutdata; mod txoutindex; mod txoutspend; mod txstatus; @@ -292,6 +293,7 @@ pub use txin::*; pub use txindex::*; pub use txinindex::*; pub use txout::*; +pub use txoutdata::*; pub use txoutindex::*; pub use txoutspend::*; pub use txstatus::*; diff --git a/crates/brk_types/src/outputtype.rs b/crates/brk_types/src/outputtype.rs index f928b289a..554bf10f8 100644 --- a/crates/brk_types/src/outputtype.rs +++ b/crates/brk_types/src/outputtype.rs @@ -3,765 +3,35 @@ use brk_error::Error; use schemars::JsonSchema; use serde::Serialize; use strum::Display; -use vecdb::{Bytes, Formattable}; +use vecdb::{Bytes, Formattable, Pco, TransparentPco}; use crate::AddressBytes; -#[derive( - Debug, Clone, Copy, Display, PartialEq, Eq, PartialOrd, Ord, Serialize, JsonSchema, Hash, -)] +#[derive(Debug, Clone, Copy, Display, PartialEq, Eq, PartialOrd, Ord, Serialize, JsonSchema, Hash)] #[serde(rename_all = "lowercase")] #[strum(serialize_all = "lowercase")] -#[repr(u8)] +#[repr(u16)] /// Type (P2PKH, P2WPKH, P2SH, P2TR, etc.) pub enum OutputType { - P2PK65, - P2PK33, - P2PKH, - P2MS, - P2SH, - OpReturn, - P2WPKH, - P2WSH, - P2TR, - P2A, - #[doc(hidden)] - #[schemars(skip)] - Dummy10, - #[doc(hidden)] - #[schemars(skip)] - Dummy11, - #[doc(hidden)] - #[schemars(skip)] - Dummy12, - #[doc(hidden)] - #[schemars(skip)] - Dummy13, - #[doc(hidden)] - #[schemars(skip)] - Dummy14, - #[doc(hidden)] - #[schemars(skip)] - Dummy15, - #[doc(hidden)] - #[schemars(skip)] - Dummy16, - #[doc(hidden)] - #[schemars(skip)] - Dummy17, - #[doc(hidden)] - #[schemars(skip)] - Dummy18, - #[doc(hidden)] - #[schemars(skip)] - Dummy19, - #[doc(hidden)] - #[schemars(skip)] - Dummy20, - #[doc(hidden)] - #[schemars(skip)] - Dummy21, - #[doc(hidden)] - #[schemars(skip)] - Dummy22, - #[doc(hidden)] - #[schemars(skip)] - Dummy23, - #[doc(hidden)] - #[schemars(skip)] - Dummy24, - #[doc(hidden)] - #[schemars(skip)] - Dummy25, - #[doc(hidden)] - #[schemars(skip)] - Dummy26, - #[doc(hidden)] - #[schemars(skip)] - Dummy27, - #[doc(hidden)] - #[schemars(skip)] - Dummy28, - #[doc(hidden)] - #[schemars(skip)] - Dummy29, - #[doc(hidden)] - #[schemars(skip)] - Dummy30, - #[doc(hidden)] - #[schemars(skip)] - Dummy31, - #[doc(hidden)] - #[schemars(skip)] - Dummy32, - #[doc(hidden)] - #[schemars(skip)] - Dummy33, - #[doc(hidden)] - #[schemars(skip)] - Dummy34, - #[doc(hidden)] - #[schemars(skip)] - Dummy35, - #[doc(hidden)] - #[schemars(skip)] - Dummy36, - #[doc(hidden)] - #[schemars(skip)] - Dummy37, - #[doc(hidden)] - #[schemars(skip)] - Dummy38, - #[doc(hidden)] - #[schemars(skip)] - Dummy39, - #[doc(hidden)] - #[schemars(skip)] - Dummy40, - #[doc(hidden)] - #[schemars(skip)] - Dummy41, - #[doc(hidden)] - #[schemars(skip)] - Dummy42, - #[doc(hidden)] - #[schemars(skip)] - Dummy43, - #[doc(hidden)] - #[schemars(skip)] - Dummy44, - #[doc(hidden)] - #[schemars(skip)] - Dummy45, - #[doc(hidden)] - #[schemars(skip)] - Dummy46, - #[doc(hidden)] - #[schemars(skip)] - Dummy47, - #[doc(hidden)] - #[schemars(skip)] - Dummy48, - #[doc(hidden)] - #[schemars(skip)] - Dummy49, - #[doc(hidden)] - #[schemars(skip)] - Dummy50, - #[doc(hidden)] - #[schemars(skip)] - Dummy51, - #[doc(hidden)] - #[schemars(skip)] - Dummy52, - #[doc(hidden)] - #[schemars(skip)] - Dummy53, - #[doc(hidden)] - #[schemars(skip)] - Dummy54, - #[doc(hidden)] - #[schemars(skip)] - Dummy55, - #[doc(hidden)] - #[schemars(skip)] - Dummy56, - #[doc(hidden)] - #[schemars(skip)] - Dummy57, - #[doc(hidden)] - #[schemars(skip)] - Dummy58, - #[doc(hidden)] - #[schemars(skip)] - Dummy59, - #[doc(hidden)] - #[schemars(skip)] - Dummy60, - #[doc(hidden)] - #[schemars(skip)] - Dummy61, - #[doc(hidden)] - #[schemars(skip)] - Dummy62, - #[doc(hidden)] - #[schemars(skip)] - Dummy63, - #[doc(hidden)] - #[schemars(skip)] - Dummy64, - #[doc(hidden)] - #[schemars(skip)] - Dummy65, - #[doc(hidden)] - #[schemars(skip)] - Dummy66, - #[doc(hidden)] - #[schemars(skip)] - Dummy67, - #[doc(hidden)] - #[schemars(skip)] - Dummy68, - #[doc(hidden)] - #[schemars(skip)] - Dummy69, - #[doc(hidden)] - #[schemars(skip)] - Dummy70, - #[doc(hidden)] - #[schemars(skip)] - Dummy71, - #[doc(hidden)] - #[schemars(skip)] - Dummy72, - #[doc(hidden)] - #[schemars(skip)] - Dummy73, - #[doc(hidden)] - #[schemars(skip)] - Dummy74, - #[doc(hidden)] - #[schemars(skip)] - Dummy75, - #[doc(hidden)] - #[schemars(skip)] - Dummy76, - #[doc(hidden)] - #[schemars(skip)] - Dummy77, - #[doc(hidden)] - #[schemars(skip)] - Dummy78, - #[doc(hidden)] - #[schemars(skip)] - Dummy79, - #[doc(hidden)] - #[schemars(skip)] - Dummy80, - #[doc(hidden)] - #[schemars(skip)] - Dummy81, - #[doc(hidden)] - #[schemars(skip)] - Dummy82, - #[doc(hidden)] - #[schemars(skip)] - Dummy83, - #[doc(hidden)] - #[schemars(skip)] - Dummy84, - #[doc(hidden)] - #[schemars(skip)] - Dummy85, - #[doc(hidden)] - #[schemars(skip)] - Dummy86, - #[doc(hidden)] - #[schemars(skip)] - Dummy87, - #[doc(hidden)] - #[schemars(skip)] - Dummy88, - #[doc(hidden)] - #[schemars(skip)] - Dummy89, - #[doc(hidden)] - #[schemars(skip)] - Dummy90, - #[doc(hidden)] - #[schemars(skip)] - Dummy91, - #[doc(hidden)] - #[schemars(skip)] - Dummy92, - #[doc(hidden)] - #[schemars(skip)] - Dummy93, - #[doc(hidden)] - #[schemars(skip)] - Dummy94, - #[doc(hidden)] - #[schemars(skip)] - Dummy95, - #[doc(hidden)] - #[schemars(skip)] - Dummy96, - #[doc(hidden)] - #[schemars(skip)] - Dummy97, - #[doc(hidden)] - #[schemars(skip)] - Dummy98, - #[doc(hidden)] - #[schemars(skip)] - Dummy99, - #[doc(hidden)] - #[schemars(skip)] - Dummy100, - #[doc(hidden)] - #[schemars(skip)] - Dummy101, - #[doc(hidden)] - #[schemars(skip)] - Dummy102, - #[doc(hidden)] - #[schemars(skip)] - Dummy103, - #[doc(hidden)] - #[schemars(skip)] - Dummy104, - #[doc(hidden)] - #[schemars(skip)] - Dummy105, - #[doc(hidden)] - #[schemars(skip)] - Dummy106, - #[doc(hidden)] - #[schemars(skip)] - Dummy107, - #[doc(hidden)] - #[schemars(skip)] - Dummy108, - #[doc(hidden)] - #[schemars(skip)] - Dummy109, - #[doc(hidden)] - #[schemars(skip)] - Dummy110, - #[doc(hidden)] - #[schemars(skip)] - Dummy111, - #[doc(hidden)] - #[schemars(skip)] - Dummy112, - #[doc(hidden)] - #[schemars(skip)] - Dummy113, - #[doc(hidden)] - #[schemars(skip)] - Dummy114, - #[doc(hidden)] - #[schemars(skip)] - Dummy115, - #[doc(hidden)] - #[schemars(skip)] - Dummy116, - #[doc(hidden)] - #[schemars(skip)] - Dummy117, - #[doc(hidden)] - #[schemars(skip)] - Dummy118, - #[doc(hidden)] - #[schemars(skip)] - Dummy119, - #[doc(hidden)] - #[schemars(skip)] - Dummy120, - #[doc(hidden)] - #[schemars(skip)] - Dummy121, - #[doc(hidden)] - #[schemars(skip)] - Dummy122, - #[doc(hidden)] - #[schemars(skip)] - Dummy123, - #[doc(hidden)] - #[schemars(skip)] - Dummy124, - #[doc(hidden)] - #[schemars(skip)] - Dummy125, - #[doc(hidden)] - #[schemars(skip)] - Dummy126, - #[doc(hidden)] - #[schemars(skip)] - Dummy127, - #[doc(hidden)] - #[schemars(skip)] - Dummy128, - #[doc(hidden)] - #[schemars(skip)] - Dummy129, - #[doc(hidden)] - #[schemars(skip)] - Dummy130, - #[doc(hidden)] - #[schemars(skip)] - Dummy131, - #[doc(hidden)] - #[schemars(skip)] - Dummy132, - #[doc(hidden)] - #[schemars(skip)] - Dummy133, - #[doc(hidden)] - #[schemars(skip)] - Dummy134, - #[doc(hidden)] - #[schemars(skip)] - Dummy135, - #[doc(hidden)] - #[schemars(skip)] - Dummy136, - #[doc(hidden)] - #[schemars(skip)] - Dummy137, - #[doc(hidden)] - #[schemars(skip)] - Dummy138, - #[doc(hidden)] - #[schemars(skip)] - Dummy139, - #[doc(hidden)] - #[schemars(skip)] - Dummy140, - #[doc(hidden)] - #[schemars(skip)] - Dummy141, - #[doc(hidden)] - #[schemars(skip)] - Dummy142, - #[doc(hidden)] - #[schemars(skip)] - Dummy143, - #[doc(hidden)] - #[schemars(skip)] - Dummy144, - #[doc(hidden)] - #[schemars(skip)] - Dummy145, - #[doc(hidden)] - #[schemars(skip)] - Dummy146, - #[doc(hidden)] - #[schemars(skip)] - Dummy147, - #[doc(hidden)] - #[schemars(skip)] - Dummy148, - #[doc(hidden)] - #[schemars(skip)] - Dummy149, - #[doc(hidden)] - #[schemars(skip)] - Dummy150, - #[doc(hidden)] - #[schemars(skip)] - Dummy151, - #[doc(hidden)] - #[schemars(skip)] - Dummy152, - #[doc(hidden)] - #[schemars(skip)] - Dummy153, - #[doc(hidden)] - #[schemars(skip)] - Dummy154, - #[doc(hidden)] - #[schemars(skip)] - Dummy155, - #[doc(hidden)] - #[schemars(skip)] - Dummy156, - #[doc(hidden)] - #[schemars(skip)] - Dummy157, - #[doc(hidden)] - #[schemars(skip)] - Dummy158, - #[doc(hidden)] - #[schemars(skip)] - Dummy159, - #[doc(hidden)] - #[schemars(skip)] - Dummy160, - #[doc(hidden)] - #[schemars(skip)] - Dummy161, - #[doc(hidden)] - #[schemars(skip)] - Dummy162, - #[doc(hidden)] - #[schemars(skip)] - Dummy163, - #[doc(hidden)] - #[schemars(skip)] - Dummy164, - #[doc(hidden)] - #[schemars(skip)] - Dummy165, - #[doc(hidden)] - #[schemars(skip)] - Dummy166, - #[doc(hidden)] - #[schemars(skip)] - Dummy167, - #[doc(hidden)] - #[schemars(skip)] - Dummy168, - #[doc(hidden)] - #[schemars(skip)] - Dummy169, - #[doc(hidden)] - #[schemars(skip)] - Dummy170, - #[doc(hidden)] - #[schemars(skip)] - Dummy171, - #[doc(hidden)] - #[schemars(skip)] - Dummy172, - #[doc(hidden)] - #[schemars(skip)] - Dummy173, - #[doc(hidden)] - #[schemars(skip)] - Dummy174, - #[doc(hidden)] - #[schemars(skip)] - Dummy175, - #[doc(hidden)] - #[schemars(skip)] - Dummy176, - #[doc(hidden)] - #[schemars(skip)] - Dummy177, - #[doc(hidden)] - #[schemars(skip)] - Dummy178, - #[doc(hidden)] - #[schemars(skip)] - Dummy179, - #[doc(hidden)] - #[schemars(skip)] - Dummy180, - #[doc(hidden)] - #[schemars(skip)] - Dummy181, - #[doc(hidden)] - #[schemars(skip)] - Dummy182, - #[doc(hidden)] - #[schemars(skip)] - Dummy183, - #[doc(hidden)] - #[schemars(skip)] - Dummy184, - #[doc(hidden)] - #[schemars(skip)] - Dummy185, - #[doc(hidden)] - #[schemars(skip)] - Dummy186, - #[doc(hidden)] - #[schemars(skip)] - Dummy187, - #[doc(hidden)] - #[schemars(skip)] - Dummy188, - #[doc(hidden)] - #[schemars(skip)] - Dummy189, - #[doc(hidden)] - #[schemars(skip)] - Dummy190, - #[doc(hidden)] - #[schemars(skip)] - Dummy191, - #[doc(hidden)] - #[schemars(skip)] - Dummy192, - #[doc(hidden)] - #[schemars(skip)] - Dummy193, - #[doc(hidden)] - #[schemars(skip)] - Dummy194, - #[doc(hidden)] - #[schemars(skip)] - Dummy195, - #[doc(hidden)] - #[schemars(skip)] - Dummy196, - #[doc(hidden)] - #[schemars(skip)] - Dummy197, - #[doc(hidden)] - #[schemars(skip)] - Dummy198, - #[doc(hidden)] - #[schemars(skip)] - Dummy199, - #[doc(hidden)] - #[schemars(skip)] - Dummy200, - #[doc(hidden)] - #[schemars(skip)] - Dummy201, - #[doc(hidden)] - #[schemars(skip)] - Dummy202, - #[doc(hidden)] - #[schemars(skip)] - Dummy203, - #[doc(hidden)] - #[schemars(skip)] - Dummy204, - #[doc(hidden)] - #[schemars(skip)] - Dummy205, - #[doc(hidden)] - #[schemars(skip)] - Dummy206, - #[doc(hidden)] - #[schemars(skip)] - Dummy207, - #[doc(hidden)] - #[schemars(skip)] - Dummy208, - #[doc(hidden)] - #[schemars(skip)] - Dummy209, - #[doc(hidden)] - #[schemars(skip)] - Dummy210, - #[doc(hidden)] - #[schemars(skip)] - Dummy211, - #[doc(hidden)] - #[schemars(skip)] - Dummy212, - #[doc(hidden)] - #[schemars(skip)] - Dummy213, - #[doc(hidden)] - #[schemars(skip)] - Dummy214, - #[doc(hidden)] - #[schemars(skip)] - Dummy215, - #[doc(hidden)] - #[schemars(skip)] - Dummy216, - #[doc(hidden)] - #[schemars(skip)] - Dummy217, - #[doc(hidden)] - #[schemars(skip)] - Dummy218, - #[doc(hidden)] - #[schemars(skip)] - Dummy219, - #[doc(hidden)] - #[schemars(skip)] - Dummy220, - #[doc(hidden)] - #[schemars(skip)] - Dummy221, - #[doc(hidden)] - #[schemars(skip)] - Dummy222, - #[doc(hidden)] - #[schemars(skip)] - Dummy223, - #[doc(hidden)] - #[schemars(skip)] - Dummy224, - #[doc(hidden)] - #[schemars(skip)] - Dummy225, - #[doc(hidden)] - #[schemars(skip)] - Dummy226, - #[doc(hidden)] - #[schemars(skip)] - Dummy227, - #[doc(hidden)] - #[schemars(skip)] - Dummy228, - #[doc(hidden)] - #[schemars(skip)] - Dummy229, - #[doc(hidden)] - #[schemars(skip)] - Dummy230, - #[doc(hidden)] - #[schemars(skip)] - Dummy231, - #[doc(hidden)] - #[schemars(skip)] - Dummy232, - #[doc(hidden)] - #[schemars(skip)] - Dummy233, - #[doc(hidden)] - #[schemars(skip)] - Dummy234, - #[doc(hidden)] - #[schemars(skip)] - Dummy235, - #[doc(hidden)] - #[schemars(skip)] - Dummy236, - #[doc(hidden)] - #[schemars(skip)] - Dummy237, - #[doc(hidden)] - #[schemars(skip)] - Dummy238, - #[doc(hidden)] - #[schemars(skip)] - Dummy239, - #[doc(hidden)] - #[schemars(skip)] - Dummy240, - #[doc(hidden)] - #[schemars(skip)] - Dummy241, - #[doc(hidden)] - #[schemars(skip)] - Dummy242, - #[doc(hidden)] - #[schemars(skip)] - Dummy243, - #[doc(hidden)] - #[schemars(skip)] - Dummy244, - #[doc(hidden)] - #[schemars(skip)] - Dummy245, - #[doc(hidden)] - #[schemars(skip)] - Dummy246, - #[doc(hidden)] - #[schemars(skip)] - Dummy247, - #[doc(hidden)] - #[schemars(skip)] - Dummy248, - #[doc(hidden)] - #[schemars(skip)] - Dummy249, - #[doc(hidden)] - #[schemars(skip)] - Dummy250, - #[doc(hidden)] - #[schemars(skip)] - Dummy251, - #[doc(hidden)] - #[schemars(skip)] - Dummy252, - #[doc(hidden)] - #[schemars(skip)] - Dummy253, - Empty = 254, - Unknown = 255, + P2PK65 = 0, + P2PK33 = 1, + P2PKH = 2, + P2MS = 3, + P2SH = 4, + OpReturn = 5, + P2WPKH = 6, + P2WSH = 7, + P2TR = 8, + P2A = 9, + Empty = u16::MAX - 1, + Unknown = u16::MAX, } impl OutputType { + fn is_valid(value: u16) -> bool { + value <= Self::P2A as u16 || value >= Self::Empty as u16 + } + pub fn is_spendable(&self) -> bool { match self { Self::P2PK65 => true, @@ -776,7 +46,6 @@ impl OutputType { Self::P2A => true, Self::Empty => true, Self::Unknown => true, - _ => unreachable!(), } } @@ -794,7 +63,6 @@ impl OutputType { Self::P2A => true, Self::Empty => false, Self::Unknown => false, - _ => unreachable!(), } } @@ -924,7 +192,7 @@ impl Bytes for OutputType { #[inline] fn to_bytes(&self) -> Self::Array { - [*self as u8] + (*self as u16).to_le_bytes() } #[inline] @@ -935,9 +203,18 @@ impl Bytes for OutputType { received: bytes.len(), }); }; - // SAFETY: OutputType is repr(u8) and we're transmuting from u8 - // All values 0-255 are valid (includes dummy variants) - let s: Self = unsafe { std::mem::transmute(bytes[0]) }; + let value = u16::from_le_bytes([bytes[0], bytes[1]]); + if !Self::is_valid(value) { + return Err(vecdb::Error::InvalidArgument("invalid OutputType")); + } + // SAFETY: We validated that value is a valid variant + let s: Self = unsafe { std::mem::transmute(value) }; Ok(s) } } + +impl Pco for OutputType { + type NumberType = u16; +} + +impl TransparentPco for OutputType {} diff --git a/crates/brk_types/src/sats.rs b/crates/brk_types/src/sats.rs index 820b00acb..93edd064d 100644 --- a/crates/brk_types/src/sats.rs +++ b/crates/brk_types/src/sats.rs @@ -50,6 +50,7 @@ impl Sats { pub const _100K_BTC: Self = Self(100_000_00_000_000); pub const ONE_BTC: Self = Self(1_00_000_000); pub const MAX: Self = Self(u64::MAX); + pub const COINBASE: Self = Self(u64::MAX); pub const FIFTY_BTC: Self = Self(50_00_000_000); pub const ONE_BTC_U128: u128 = 1_00_000_000; diff --git a/crates/brk_types/src/txoutdata.rs b/crates/brk_types/src/txoutdata.rs new file mode 100644 index 000000000..d4701e0b5 --- /dev/null +++ b/crates/brk_types/src/txoutdata.rs @@ -0,0 +1,100 @@ +//! Combined transaction output data for efficient access. + +use std::fmt::{self, Display}; +use std::mem::size_of; + +use schemars::JsonSchema; +use serde::Serialize; +use vecdb::{Bytes, Formattable}; + +use crate::{OutputType, Sats, TypeIndex}; + +/// Core transaction output data: value, type, and type index. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, JsonSchema)] +#[repr(C)] +pub struct TxOutData { + pub value: Sats, + pub typeindex: TypeIndex, + pub outputtype: OutputType, + _padding: u16, +} + +impl TxOutData { + #[inline] + pub const fn new(value: Sats, outputtype: OutputType, typeindex: TypeIndex) -> Self { + Self { + value, + typeindex, + outputtype, + _padding: 0, + } + } +} + +impl Bytes for TxOutData { + type Array = [u8; size_of::()]; + + #[inline] + fn to_bytes(&self) -> Self::Array { + let mut bytes = [0u8; 16]; + bytes[0..8].copy_from_slice(&self.value.to_bytes()); + bytes[8..12].copy_from_slice(&self.typeindex.to_bytes()); + bytes[12..14].copy_from_slice(&self.outputtype.to_bytes()); + // bytes[14..16] is padding, already zero + bytes + } + + #[inline] + fn from_bytes(bytes: &[u8]) -> vecdb::Result { + if bytes.len() != size_of::() { + return Err(vecdb::Error::WrongLength { + expected: size_of::(), + received: bytes.len(), + }); + } + Ok(Self { + value: Sats::from_bytes(&bytes[0..8])?, + typeindex: TypeIndex::from_bytes(&bytes[8..12])?, + outputtype: OutputType::from_bytes(&bytes[12..14])?, + _padding: 0, + }) + } +} + +impl Display for TxOutData { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "value: {}, outputtype: {}, typeindex: {}", + self.value, self.outputtype, self.typeindex + ) + } +} + +impl Formattable for TxOutData { + fn may_need_escaping() -> bool { + true + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_size() { + assert_eq!(size_of::(), 16); + } + + #[test] + fn test_roundtrip() { + let data = TxOutData::new( + Sats::from(123456789u64), + OutputType::P2TR, + TypeIndex::from(42u32), + ); + let bytes = data.to_bytes(); + let decoded = TxOutData::from_bytes(&bytes).unwrap(); + assert_eq!(data, decoded); + } +} diff --git a/crates/brk_types/src/typeindex.rs b/crates/brk_types/src/typeindex.rs index 10a56bf4e..2320304f6 100644 --- a/crates/brk_types/src/typeindex.rs +++ b/crates/brk_types/src/typeindex.rs @@ -24,6 +24,8 @@ use vecdb::{CheckedSub, Formattable, Pco}; pub struct TypeIndex(u32); impl TypeIndex { + pub const COINBASE: Self = Self(u32::MAX); + pub fn new(i: u32) -> Self { Self(i) } diff --git a/modules/brk-client/.gitignore b/modules/brk-client/.gitignore index 86d4c2dd3..435c24fa8 100644 --- a/modules/brk-client/.gitignore +++ b/modules/brk-client/.gitignore @@ -1 +1,2 @@ generated +index.js diff --git a/modules/brk-client/idle.js b/modules/brk-client/idle.js deleted file mode 100644 index 22dcee191..000000000 --- a/modules/brk-client/idle.js +++ /dev/null @@ -1,11 +0,0 @@ -/** - * @param {VoidFunction} callback - * @param {number} [timeout = 1] - */ -export function runWhenIdle(callback, timeout = 1) { - if ("requestIdleCallback" in window) { - requestIdleCallback(callback); - } else { - setTimeout(callback, timeout); - } -} diff --git a/modules/brk-client/index.js b/modules/brk-client/index.js index 7a9dd388b..dc728ac43 100644 --- a/modules/brk-client/index.js +++ b/modules/brk-client/index.js @@ -1,132 +1,4587 @@ +// Auto-generated BRK JavaScript client +// Do not edit manually + +// Type definitions + +/** @typedef {string} Address */ /** - * @import { IndexName } from "./generated/metrics" - * @import { Metric } from './metrics' - * - * @typedef {ReturnType} BRK + * @typedef {Object} AddressChainStats + * @property {number} funded_txo_count + * @property {Sats} funded_txo_sum + * @property {number} spent_txo_count + * @property {Sats} spent_txo_sum + * @property {number} tx_count + * @property {TypeIndex} type_index */ - -// client.metrics.catalog.a.b.c() -> string (uncompress inside) - -import { runWhenIdle } from "./idle"; - -import { POOL_ID_TO_POOL_NAME } from "./generated/pools"; -import { INDEXES } from "./generated/metrics"; -import { hasMetric, getIndexesFromMetric } from "./metrics"; -import { VERSION } from "./generated/version"; - -const CACHE_NAME = "__BRK_CLIENT__"; +/** + * @typedef {Object} AddressMempoolStats + * @property {number} funded_txo_count + * @property {Sats} funded_txo_sum + * @property {number} spent_txo_count + * @property {Sats} spent_txo_sum + * @property {number} tx_count + */ +/** + * @typedef {Object} AddressParam + * @property {Address} address + */ +/** + * @typedef {Object} AddressStats + * @property {Address} address + * @property {AddressChainStats} chain_stats + * @property {(AddressMempoolStats|null)=} mempool_stats + */ +/** + * @typedef {Object} AddressTxidsParam + * @property {(Txid|null)=} after_txid + * @property {number=} limit + */ +/** + * @typedef {Object} AddressValidation + * @property {boolean} isvalid + * @property {?string=} address + * @property {?string=} scriptPubKey + * @property {?boolean=} isscript + * @property {?boolean=} iswitness + * @property {?number=} witness_version + * @property {?string=} witness_program + */ +/** @typedef {TypeIndex} AnyAddressIndex */ +/** @typedef {number} Bitcoin */ +/** @typedef {number} BlkPosition */ +/** + * @typedef {Object} BlockCountParam + * @property {number} block_count + */ +/** + * @typedef {Object} BlockFeesEntry + * @property {Height} avgHeight + * @property {Timestamp} timestamp + * @property {Sats} avgFees + */ +/** @typedef {string} BlockHash */ +/** + * @typedef {Object} BlockHashParam + * @property {BlockHash} hash + */ +/** + * @typedef {Object} BlockHashStartIndex + * @property {BlockHash} hash + * @property {TxIndex} start_index + */ +/** + * @typedef {Object} BlockHashTxIndex + * @property {BlockHash} hash + * @property {TxIndex} index + */ +/** + * @typedef {Object} BlockInfo + * @property {BlockHash} id + * @property {Height} height + * @property {number} tx_count + * @property {number} size + * @property {Weight} weight + * @property {Timestamp} timestamp + * @property {number} difficulty + */ +/** + * @typedef {Object} BlockRewardsEntry + * @property {number} avgHeight + * @property {number} timestamp + * @property {number} avgRewards + */ +/** + * @typedef {Object} BlockSizeEntry + * @property {number} avgHeight + * @property {number} timestamp + * @property {number} avgSize + */ +/** + * @typedef {Object} BlockSizesWeights + * @property {BlockSizeEntry[]} sizes + * @property {BlockWeightEntry[]} weights + */ +/** + * @typedef {Object} BlockStatus + * @property {boolean} in_best_chain + * @property {(Height|null)=} height + * @property {(BlockHash|null)=} next_best + */ +/** + * @typedef {Object} BlockTimestamp + * @property {Height} height + * @property {BlockHash} hash + * @property {string} timestamp + */ +/** + * @typedef {Object} BlockWeightEntry + * @property {number} avgHeight + * @property {number} timestamp + * @property {number} avgWeight + */ +/** @typedef {number} Cents */ +/** @typedef {Cents} Close */ +/** + * @typedef {Object} DataRangeFormat + * @property {?number=} from + * @property {?number=} to + * @property {?number=} count + * @property {Format=} format + */ +/** @typedef {number} Date */ +/** @typedef {number} DateIndex */ +/** @typedef {number} DecadeIndex */ +/** + * @typedef {Object} DifficultyAdjustment + * @property {number} progressPercent + * @property {number} difficultyChange + * @property {number} estimatedRetargetDate + * @property {number} remainingBlocks + * @property {number} remainingTime + * @property {number} previousRetarget + * @property {Height} nextRetargetHeight + * @property {number} timeAvg + * @property {number} adjustedTimeAvg + * @property {number} timeOffset + */ +/** + * @typedef {Object} DifficultyAdjustmentEntry + * @property {Timestamp} timestamp + * @property {Height} height + * @property {number} difficulty + * @property {number} change_percent + */ +/** + * @typedef {Object} DifficultyEntry + * @property {Timestamp} timestamp + * @property {number} difficulty + * @property {Height} height + */ +/** @typedef {number} DifficultyEpoch */ +/** @typedef {number} Dollars */ +/** + * @typedef {Object} EmptyAddressData + * @property {number} tx_count + * @property {number} funded_txo_count + * @property {Sats} transfered + */ +/** @typedef {TypeIndex} EmptyAddressIndex */ +/** @typedef {TypeIndex} EmptyOutputIndex */ +/** @typedef {number} FeeRate */ +/** @typedef {("json"|"csv")} Format */ +/** @typedef {number} HalvingEpoch */ +/** + * @typedef {Object} HashrateEntry + * @property {Timestamp} timestamp + * @property {number} avgHashrate + */ +/** + * @typedef {Object} HashrateSummary + * @property {HashrateEntry[]} hashrates + * @property {DifficultyEntry[]} difficulty + * @property {number} currentHashrate + * @property {number} currentDifficulty + */ +/** + * @typedef {Object} Health + * @property {string} status + * @property {string} service + * @property {string} timestamp + */ +/** @typedef {number} Height */ +/** + * @typedef {Object} HeightParam + * @property {Height} height + */ +/** @typedef {Cents} High */ +/** @typedef {("dateindex"|"decadeindex"|"difficultyepoch"|"emptyoutputindex"|"halvingepoch"|"height"|"txinindex"|"monthindex"|"opreturnindex"|"txoutindex"|"p2aaddressindex"|"p2msoutputindex"|"p2pk33addressindex"|"p2pk65addressindex"|"p2pkhaddressindex"|"p2shaddressindex"|"p2traddressindex"|"p2wpkhaddressindex"|"p2wshaddressindex"|"quarterindex"|"semesterindex"|"txindex"|"unknownoutputindex"|"weekindex"|"yearindex"|"loadedaddressindex"|"emptyaddressindex")} Index */ +/** + * @typedef {Object} IndexInfo + * @property {Index} index + * @property {string[]} aliases + */ +/** @typedef {number} Limit */ +/** + * @typedef {Object} LimitParam + * @property {Limit=} limit + */ +/** + * @typedef {Object} LoadedAddressData + * @property {number} tx_count + * @property {number} funded_txo_count + * @property {number} spent_txo_count + * @property {Sats} received + * @property {Sats} sent + * @property {Dollars} realized_cap + */ +/** @typedef {TypeIndex} LoadedAddressIndex */ +/** @typedef {Cents} Low */ +/** + * @typedef {Object} MempoolBlock + * @property {number} blockSize + * @property {number} blockVSize + * @property {number} nTx + * @property {Sats} totalFees + * @property {FeeRate} medianFee + * @property {FeeRate[]} feeRange + */ +/** + * @typedef {Object} MempoolInfo + * @property {number} count + * @property {VSize} vsize + * @property {Sats} total_fee + */ +/** @typedef {string} Metric */ +/** + * @typedef {Object} MetricCount + * @property {number} distinct_metrics + * @property {number} total_endpoints + */ +/** + * @typedef {Object} MetricData + * @property {number} total + * @property {number} from + * @property {number} to + * @property {*[]} data + */ +/** + * @typedef {Object} MetricLeafWithSchema + * @property {string} name + * @property {string} value_type + * @property {Index[]} indexes + */ +/** + * @typedef {Object} MetricParam + * @property {Metric} metric + */ +/** + * @typedef {Object} MetricSelection + * @property {Metrics} metrics + * @property {Index} index + * @property {?number=} from + * @property {?number=} to + * @property {?number=} count + * @property {Format=} format + */ +/** + * @typedef {Object} MetricSelectionLegacy + * @property {Index} index + * @property {Metrics} ids + * @property {?number=} from + * @property {?number=} to + * @property {?number=} count + * @property {Format=} format + */ +/** + * @typedef {Object} MetricWithIndex + * @property {Metric} metric + * @property {Index} index + */ +/** @typedef {string} Metrics */ +/** @typedef {number} MonthIndex */ +/** + * @typedef {Object} OHLCCents + * @property {Open} open + * @property {High} high + * @property {Low} low + * @property {Close} close + */ +/** + * @typedef {Object} OHLCDollars + * @property {Open} open + * @property {High} high + * @property {Low} low + * @property {Close} close + */ +/** + * @typedef {Object} OHLCSats + * @property {Open} open + * @property {High} high + * @property {Low} low + * @property {Close} close + */ +/** @typedef {TypeIndex} OpReturnIndex */ +/** @typedef {Cents} Open */ +/** @typedef {number} OutPoint */ +/** @typedef {("p2pk65"|"p2pk33"|"p2pkh"|"p2ms"|"p2sh"|"opreturn"|"p2wpkh"|"p2wsh"|"p2tr"|"p2a"|"empty"|"unknown")} OutputType */ +/** @typedef {TypeIndex} P2AAddressIndex */ +/** @typedef {U8x2} P2ABytes */ +/** @typedef {TypeIndex} P2MSOutputIndex */ +/** @typedef {TypeIndex} P2PK33AddressIndex */ +/** @typedef {U8x33} P2PK33Bytes */ +/** @typedef {TypeIndex} P2PK65AddressIndex */ +/** @typedef {U8x65} P2PK65Bytes */ +/** @typedef {TypeIndex} P2PKHAddressIndex */ +/** @typedef {U8x20} P2PKHBytes */ +/** @typedef {TypeIndex} P2SHAddressIndex */ +/** @typedef {U8x20} P2SHBytes */ +/** @typedef {TypeIndex} P2TRAddressIndex */ +/** @typedef {U8x32} P2TRBytes */ +/** @typedef {TypeIndex} P2WPKHAddressIndex */ +/** @typedef {U8x20} P2WPKHBytes */ +/** @typedef {TypeIndex} P2WSHAddressIndex */ +/** @typedef {U8x32} P2WSHBytes */ +/** + * @typedef {Object} PaginatedMetrics + * @property {number} current_page + * @property {number} max_page + * @property {string[]} metrics + */ +/** + * @typedef {Object} Pagination + * @property {?number=} page + */ +/** + * @typedef {Object} PoolBlockCounts + * @property {number} all + * @property {number} 24h + * @property {number} 1w + */ +/** + * @typedef {Object} PoolBlockShares + * @property {number} all + * @property {number} 24h + * @property {number} 1w + */ +/** + * @typedef {Object} PoolDetail + * @property {PoolDetailInfo} pool + * @property {PoolBlockCounts} blockCount + * @property {PoolBlockShares} blockShare + * @property {number} estimatedHashrate + * @property {?number=} reportedHashrate + */ +/** + * @typedef {Object} PoolDetailInfo + * @property {number} id + * @property {string} name + * @property {string} link + * @property {string[]} addresses + * @property {string[]} regexes + * @property {PoolSlug} slug + */ +/** + * @typedef {Object} PoolInfo + * @property {string} name + * @property {PoolSlug} slug + * @property {number} unique_id + */ +/** @typedef {("unknown"|"blockfills"|"ultimuspool"|"terrapool"|"luxor"|"onethash"|"btccom"|"bitfarms"|"huobipool"|"wayicn"|"canoepool"|"btctop"|"bitcoincom"|"pool175btc"|"gbminers"|"axbt"|"asicminer"|"bitminter"|"bitcoinrussia"|"btcserv"|"simplecoinus"|"btcguild"|"eligius"|"ozcoin"|"eclipsemc"|"maxbtc"|"triplemining"|"coinlab"|"pool50btc"|"ghashio"|"stminingcorp"|"bitparking"|"mmpool"|"polmine"|"kncminer"|"bitalo"|"f2pool"|"hhtt"|"megabigpower"|"mtred"|"nmcbit"|"yourbtcnet"|"givemecoins"|"braiinspool"|"antpool"|"multicoinco"|"bcpoolio"|"cointerra"|"kanopool"|"solock"|"ckpool"|"nicehash"|"bitclub"|"bitcoinaffiliatenetwork"|"btcc"|"bwpool"|"exxbw"|"bitsolo"|"bitfury"|"twentyoneinc"|"digitalbtc"|"eightbaochi"|"mybtccoinpool"|"tbdice"|"hashpool"|"nexious"|"bravomining"|"hotpool"|"okexpool"|"bcmonster"|"onehash"|"bixin"|"tatmaspool"|"viabtc"|"connectbtc"|"batpool"|"waterhole"|"dcexploration"|"dcex"|"btpool"|"fiftyeightcoin"|"bitcoinindia"|"shawnp0wers"|"phashio"|"rigpool"|"haozhuzhu"|"sevenpool"|"miningkings"|"hashbx"|"dpool"|"rawpool"|"haominer"|"helix"|"bitcoinukraine"|"poolin"|"secretsuperstar"|"tigerpoolnet"|"sigmapoolcom"|"okpooltop"|"hummerpool"|"tangpool"|"bytepool"|"spiderpool"|"novablock"|"miningcity"|"binancepool"|"minerium"|"lubiancom"|"okkong"|"aaopool"|"emcdpool"|"foundryusa"|"sbicrypto"|"arkpool"|"purebtccom"|"marapool"|"kucoinpool"|"entrustcharitypool"|"okminer"|"titan"|"pegapool"|"btcnuggets"|"cloudhashing"|"digitalxmintsy"|"telco214"|"btcpoolparty"|"multipool"|"transactioncoinmining"|"btcdig"|"trickysbtcpool"|"btcmp"|"eobot"|"unomp"|"patels"|"gogreenlight"|"ekanembtc"|"canoe"|"tiger"|"onem1x"|"zulupool"|"secpool"|"ocean"|"whitepool"|"wk057"|"futurebitapollosolo"|"carbonnegative"|"portlandhodl"|"phoenix"|"neopool"|"maxipool"|"bitfufupool"|"luckypool"|"miningdutch"|"publicpool"|"miningsquared"|"innopolistech"|"btclab"|"parasite")} PoolSlug */ +/** + * @typedef {Object} PoolSlugParam + * @property {PoolSlug} slug + */ +/** + * @typedef {Object} PoolStats + * @property {number} poolId + * @property {string} name + * @property {string} link + * @property {number} blockCount + * @property {number} rank + * @property {number} emptyBlocks + * @property {PoolSlug} slug + * @property {number} share + */ +/** + * @typedef {Object} PoolsSummary + * @property {PoolStats[]} pools + * @property {number} blockCount + * @property {number} lastEstimatedHashrate + */ +/** @typedef {number} QuarterIndex */ +/** @typedef {number} RawLockTime */ +/** + * @typedef {Object} RecommendedFees + * @property {FeeRate} fastestFee + * @property {FeeRate} halfHourFee + * @property {FeeRate} hourFee + * @property {FeeRate} economyFee + * @property {FeeRate} minimumFee + */ +/** + * @typedef {Object} RewardStats + * @property {Height} startBlock + * @property {Height} endBlock + * @property {Sats} totalReward + * @property {Sats} totalFee + * @property {number} totalTx + */ +/** @typedef {number} Sats */ +/** @typedef {number} SemesterIndex */ +/** @typedef {number} StoredBool */ +/** @typedef {number} StoredF32 */ +/** @typedef {number} StoredF64 */ +/** @typedef {number} StoredI16 */ +/** @typedef {number} StoredU16 */ +/** @typedef {number} StoredU32 */ +/** @typedef {number} StoredU64 */ +/** + * @typedef {Object} SupplyState + * @property {number} utxo_count + * @property {Sats} value + */ +/** @typedef {("24h"|"3d"|"1w"|"1m"|"3m"|"6m"|"1y"|"2y"|"3y")} TimePeriod */ +/** + * @typedef {Object} TimePeriodParam + * @property {TimePeriod} time_period + */ +/** @typedef {number} Timestamp */ +/** + * @typedef {Object} TimestampParam + * @property {Timestamp} timestamp + */ +/** + * @typedef {Object} Transaction + * @property {(TxIndex|null)=} index + * @property {Txid} txid + * @property {TxVersion} version + * @property {RawLockTime} locktime + * @property {number} size + * @property {Weight} weight + * @property {number} sigops + * @property {Sats} fee + * @property {TxIn[]} vin + * @property {TxOut[]} vout + * @property {TxStatus} status + */ +/** @typedef {({ [key: string]: TreeNode }|MetricLeafWithSchema)} TreeNode */ +/** + * @typedef {Object} TxIn + * @property {Txid} txid + * @property {Vout} vout + * @property {(TxOut|null)=} prevout + * @property {string} scriptsig + * @property {string} scriptsig_asm + * @property {boolean} is_coinbase + * @property {number} sequence + * @property {?string=} inner_redeemscript_asm + */ +/** @typedef {number} TxInIndex */ +/** @typedef {number} TxIndex */ +/** + * @typedef {Object} TxOut + * @property {string} scriptpubkey + * @property {Sats} value + */ +/** @typedef {number} TxOutIndex */ +/** + * @typedef {Object} TxOutspend + * @property {boolean} spent + * @property {(Txid|null)=} txid + * @property {(Vin|null)=} vin + * @property {(TxStatus|null)=} status + */ +/** + * @typedef {Object} TxStatus + * @property {boolean} confirmed + * @property {(Height|null)=} block_height + * @property {(BlockHash|null)=} block_hash + * @property {(Timestamp|null)=} block_time + */ +/** @typedef {number} TxVersion */ +/** @typedef {string} Txid */ +/** + * @typedef {Object} TxidParam + * @property {Txid} txid + */ +/** + * @typedef {Object} TxidVout + * @property {Txid} txid + * @property {Vout} vout + */ +/** @typedef {number} TypeIndex */ +/** @typedef {number[]} U8x2 */ +/** @typedef {number[]} U8x20 */ +/** @typedef {number[]} U8x32 */ +/** @typedef {string} U8x33 */ +/** @typedef {string} U8x65 */ +/** @typedef {TypeIndex} UnknownOutputIndex */ +/** + * @typedef {Object} Utxo + * @property {Txid} txid + * @property {Vout} vout + * @property {TxStatus} status + * @property {Sats} value + */ +/** @typedef {number} VSize */ +/** + * @typedef {Object} ValidateAddressParam + * @property {string} address + */ +/** @typedef {number} Vin */ +/** @typedef {number} Vout */ +/** @typedef {number} WeekIndex */ +/** @typedef {number} Weight */ +/** @typedef {number} YearIndex */ /** - * @param {string} origin + * @typedef {Object} BrkClientOptions + * @property {string} baseUrl - Base URL for the API + * @property {number} [timeout] - Request timeout in milliseconds */ -export function createClient(origin) { + +const _isBrowser = typeof window !== 'undefined' && 'caches' in window; +const _runIdle = (fn) => (globalThis.requestIdleCallback ?? setTimeout)(fn); + +/** @type {Promise} */ +const _cachePromise = _isBrowser + ? caches.open('__BRK_CLIENT__').catch(() => null) + : Promise.resolve(null); + +/** + * Custom error class for BRK client errors + */ +class BrkError extends Error { /** - * @template T - * @param {(value: T) => void} callback - * @param {string} url + * @param {string} message + * @param {number} [status] */ - async function fetchJson(callback, url) { - /** @type {T | null} */ - let cachedJson = null; + constructor(message, status) { + super(message); + this.name = 'BrkError'; + this.status = status; + } +} - /** @type {Cache | undefined} */ - let cache; - /** @type {Response | undefined} */ - let cachedResponse; - try { - cache = await caches.open(CACHE_NAME); - cachedResponse = await cache.match(url); - if (cachedResponse) { - console.debug(`cache: ${url}`); - const json = /** @type {T} */ (await cachedResponse.json()); - cachedJson = json; - callback(json); - } - } catch {} +/** + * A metric node that can fetch data for different indexes. + * @template T + */ +class MetricNode { + /** + * @param {BrkClientBase} client + * @param {string} path + */ + constructor(client, path) { + this._client = client; + this._path = path; + } + + /** + * Fetch all data points for this metric. + * @param {(value: T[]) => void} [onUpdate] - Called when data is available (may be called twice: cache then fresh) + * @returns {Promise} + */ + get(onUpdate) { + return this._client.get(this._path, onUpdate); + } + + /** + * Fetch data points within a range. + * @param {string | number} from + * @param {string | number} to + * @param {(value: T[]) => void} [onUpdate] - Called when data is available (may be called twice: cache then fresh) + * @returns {Promise} + */ + getRange(from, to, onUpdate) { + return this._client.get(`${this._path}?from=${from}&to=${to}`, onUpdate); + } +} + +/** + * Base HTTP client for making requests with caching support + */ +class BrkClientBase { + /** + * @param {BrkClientOptions|string} options + */ + constructor(options) { + const isString = typeof options === 'string'; + this.baseUrl = isString ? options : options.baseUrl; + this.timeout = isString ? 5000 : (options.timeout ?? 5000); + } + + /** + * Make a GET request with stale-while-revalidate caching + * @template T + * @param {string} path + * @param {(value: T) => void} [onUpdate] - Called when data is available + * @returns {Promise} + */ + async get(path, onUpdate) { + const url = `${this.baseUrl}${path}`; + const cache = await _cachePromise; + const cachedRes = await cache?.match(url); + const cachedJson = cachedRes ? await cachedRes.json() : null; + + if (cachedJson) onUpdate?.(cachedJson); + if (!globalThis.navigator?.onLine) { + if (cachedJson) return cachedJson; + throw new BrkError('Offline and no cached data', 0); + } try { - if (!navigator.onLine) { - throw "Offline"; - } + const res = await fetch(url, { signal: AbortSignal.timeout(this.timeout) }); + if (!res.ok) throw new BrkError(`HTTP ${res.status}`, res.status); + if (cachedRes?.headers.get('ETag') === res.headers.get('ETag')) return cachedJson; - console.debug(`fetch: ${url}`); - - const fetchedResponse = await fetch(url, { - signal: AbortSignal.timeout(5000), - }); - if (!fetchedResponse.ok) { - throw `Bad response: ${fetchedResponse}`; - } - - if ( - cachedResponse?.headers.get("ETag") === - fetchedResponse.headers.get("ETag") - ) { - return cachedJson; - } - - const clonedResponse = fetchedResponse.clone(); - - const fetchedJson = /** @type {T} */ (await fetchedResponse.json()); - if (!fetchedJson) throw `JSON is false`; - - callback(fetchedJson); - - runWhenIdle(async function () { - try { - await cache?.put(url, clonedResponse); - } catch (_) {} - }); - - return fetchedJson; + const cloned = res.clone(); + const json = await res.json(); + onUpdate?.(json); + if (cache) _runIdle(() => cache.put(url, cloned)); + return json; } catch (e) { - console.error(e); - return cachedJson; + if (cachedJson) return cachedJson; + throw e; } } +} - /** - * @param {Metric} metric - * @param {IndexName} index - * @param {number} [from] - * @param {number} [to] - */ - function genMetricURL(metric, index, from, to) { - let path = `${origin}api/metrics/${metric.replaceAll("_", "-")}/${index}?`; - if (from !== undefined) { - path += `from=${from}`; - } - if (to !== undefined) { - if (!path.endsWith("?")) { - path += `&`; - } - path += `to=${to}`; - } - return path; - } +// Index accessor factory functions - /** - * @template T - * @param {(v: T[]) => void} callback - * @param {IndexName} index - * @param {Metric} metric - * @param {number} [from] - * @param {number} [to] - */ - function fetchMetric(callback, index, metric, from, to) { - return fetchJson(callback, genMetricURL(metric, index, from, to)); - } +/** + * @template T + * @typedef {Object} Indexes3 + * @property {MetricNode} byDateindex + * @property {MetricNode} byDecadeindex + * @property {MetricNode} byDifficultyepoch + * @property {MetricNode} byHeight + * @property {MetricNode} byMonthindex + * @property {MetricNode} byQuarterindex + * @property {MetricNode} bySemesterindex + * @property {MetricNode} byWeekindex + * @property {MetricNode} byYearindex + */ +/** + * Create a Indexes3 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {Indexes3} + */ +function createIndexes3(client, basePath) { return { - VERSION, - POOL_ID_TO_POOL_NAME, - INDEXES, - - hasMetric, - getIndexesFromMetric, - - genMetricURL, - fetchMetric, + byDateindex: new MetricNode(client, `${basePath}/dateindex`), + byDecadeindex: new MetricNode(client, `${basePath}/decadeindex`), + byDifficultyepoch: new MetricNode(client, `${basePath}/difficultyepoch`), + byHeight: new MetricNode(client, `${basePath}/height`), + byMonthindex: new MetricNode(client, `${basePath}/monthindex`), + byQuarterindex: new MetricNode(client, `${basePath}/quarterindex`), + bySemesterindex: new MetricNode(client, `${basePath}/semesterindex`), + byWeekindex: new MetricNode(client, `${basePath}/weekindex`), + byYearindex: new MetricNode(client, `${basePath}/yearindex`) }; } + +/** + * @template T + * @typedef {Object} Indexes4 + * @property {MetricNode} byDateindex + * @property {MetricNode} byDecadeindex + * @property {MetricNode} byDifficultyepoch + * @property {MetricNode} byMonthindex + * @property {MetricNode} byQuarterindex + * @property {MetricNode} bySemesterindex + * @property {MetricNode} byWeekindex + * @property {MetricNode} byYearindex + */ + +/** + * Create a Indexes4 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {Indexes4} + */ +function createIndexes4(client, basePath) { + return { + byDateindex: new MetricNode(client, `${basePath}/dateindex`), + byDecadeindex: new MetricNode(client, `${basePath}/decadeindex`), + byDifficultyepoch: new MetricNode(client, `${basePath}/difficultyepoch`), + byMonthindex: new MetricNode(client, `${basePath}/monthindex`), + byQuarterindex: new MetricNode(client, `${basePath}/quarterindex`), + bySemesterindex: new MetricNode(client, `${basePath}/semesterindex`), + byWeekindex: new MetricNode(client, `${basePath}/weekindex`), + byYearindex: new MetricNode(client, `${basePath}/yearindex`) + }; +} + +/** + * @template T + * @typedef {Object} Indexes21 + * @property {MetricNode} byDateindex + * @property {MetricNode} byDecadeindex + * @property {MetricNode} byHeight + * @property {MetricNode} byMonthindex + * @property {MetricNode} byQuarterindex + * @property {MetricNode} bySemesterindex + * @property {MetricNode} byWeekindex + * @property {MetricNode} byYearindex + */ + +/** + * Create a Indexes21 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {Indexes21} + */ +function createIndexes21(client, basePath) { + return { + byDateindex: new MetricNode(client, `${basePath}/dateindex`), + byDecadeindex: new MetricNode(client, `${basePath}/decadeindex`), + byHeight: new MetricNode(client, `${basePath}/height`), + byMonthindex: new MetricNode(client, `${basePath}/monthindex`), + byQuarterindex: new MetricNode(client, `${basePath}/quarterindex`), + bySemesterindex: new MetricNode(client, `${basePath}/semesterindex`), + byWeekindex: new MetricNode(client, `${basePath}/weekindex`), + byYearindex: new MetricNode(client, `${basePath}/yearindex`) + }; +} + +/** + * @template T + * @typedef {Object} Indexes + * @property {MetricNode} byDateindex + * @property {MetricNode} byDecadeindex + * @property {MetricNode} byMonthindex + * @property {MetricNode} byQuarterindex + * @property {MetricNode} bySemesterindex + * @property {MetricNode} byWeekindex + * @property {MetricNode} byYearindex + */ + +/** + * Create a Indexes accessor + * @template T + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {Indexes} + */ +function createIndexes(client, basePath) { + return { + byDateindex: new MetricNode(client, `${basePath}/dateindex`), + byDecadeindex: new MetricNode(client, `${basePath}/decadeindex`), + byMonthindex: new MetricNode(client, `${basePath}/monthindex`), + byQuarterindex: new MetricNode(client, `${basePath}/quarterindex`), + bySemesterindex: new MetricNode(client, `${basePath}/semesterindex`), + byWeekindex: new MetricNode(client, `${basePath}/weekindex`), + byYearindex: new MetricNode(client, `${basePath}/yearindex`) + }; +} + +/** + * @template T + * @typedef {Object} Indexes22 + * @property {MetricNode} byDecadeindex + * @property {MetricNode} byMonthindex + * @property {MetricNode} byQuarterindex + * @property {MetricNode} bySemesterindex + * @property {MetricNode} byWeekindex + * @property {MetricNode} byYearindex + */ + +/** + * Create a Indexes22 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {Indexes22} + */ +function createIndexes22(client, basePath) { + return { + byDecadeindex: new MetricNode(client, `${basePath}/decadeindex`), + byMonthindex: new MetricNode(client, `${basePath}/monthindex`), + byQuarterindex: new MetricNode(client, `${basePath}/quarterindex`), + bySemesterindex: new MetricNode(client, `${basePath}/semesterindex`), + byWeekindex: new MetricNode(client, `${basePath}/weekindex`), + byYearindex: new MetricNode(client, `${basePath}/yearindex`) + }; +} + +/** + * @template T + * @typedef {Object} Indexes10 + * @property {MetricNode} byQuarterindex + * @property {MetricNode} bySemesterindex + * @property {MetricNode} byYearindex + */ + +/** + * Create a Indexes10 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {Indexes10} + */ +function createIndexes10(client, basePath) { + return { + byQuarterindex: new MetricNode(client, `${basePath}/quarterindex`), + bySemesterindex: new MetricNode(client, `${basePath}/semesterindex`), + byYearindex: new MetricNode(client, `${basePath}/yearindex`) + }; +} + +/** + * @template T + * @typedef {Object} Indexes8 + * @property {MetricNode} byDateindex + * @property {MetricNode} byHeight + */ + +/** + * Create a Indexes8 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {Indexes8} + */ +function createIndexes8(client, basePath) { + return { + byDateindex: new MetricNode(client, `${basePath}/dateindex`), + byHeight: new MetricNode(client, `${basePath}/height`) + }; +} + +/** + * @template T + * @typedef {Object} Indexes9 + * @property {MetricNode} byMonthindex + * @property {MetricNode} byWeekindex + */ + +/** + * Create a Indexes9 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {Indexes9} + */ +function createIndexes9(client, basePath) { + return { + byMonthindex: new MetricNode(client, `${basePath}/monthindex`), + byWeekindex: new MetricNode(client, `${basePath}/weekindex`) + }; +} + +/** + * @template T + * @typedef {Object} Indexes2 + * @property {MetricNode} byHeight + */ + +/** + * Create a Indexes2 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {Indexes2} + */ +function createIndexes2(client, basePath) { + return { + byHeight: new MetricNode(client, `${basePath}/height`) + }; +} + +/** + * @template T + * @typedef {Object} Indexes5 + * @property {MetricNode} byDateindex + */ + +/** + * Create a Indexes5 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {Indexes5} + */ +function createIndexes5(client, basePath) { + return { + byDateindex: new MetricNode(client, `${basePath}/dateindex`) + }; +} + +/** + * @template T + * @typedef {Object} Indexes6 + * @property {MetricNode} byTxindex + */ + +/** + * Create a Indexes6 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {Indexes6} + */ +function createIndexes6(client, basePath) { + return { + byTxindex: new MetricNode(client, `${basePath}/txindex`) + }; +} + +/** + * @template T + * @typedef {Object} Indexes7 + * @property {MetricNode} byTxinindex + */ + +/** + * Create a Indexes7 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {Indexes7} + */ +function createIndexes7(client, basePath) { + return { + byTxinindex: new MetricNode(client, `${basePath}/txinindex`) + }; +} + +/** + * @template T + * @typedef {Object} Indexes11 + * @property {MetricNode} byDecadeindex + */ + +/** + * Create a Indexes11 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {Indexes11} + */ +function createIndexes11(client, basePath) { + return { + byDecadeindex: new MetricNode(client, `${basePath}/decadeindex`) + }; +} + +/** + * @template T + * @typedef {Object} Indexes12 + * @property {MetricNode} byP2aaddressindex + */ + +/** + * Create a Indexes12 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {Indexes12} + */ +function createIndexes12(client, basePath) { + return { + byP2aaddressindex: new MetricNode(client, `${basePath}/p2aaddressindex`) + }; +} + +/** + * @template T + * @typedef {Object} Indexes13 + * @property {MetricNode} byP2pk33addressindex + */ + +/** + * Create a Indexes13 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {Indexes13} + */ +function createIndexes13(client, basePath) { + return { + byP2pk33addressindex: new MetricNode(client, `${basePath}/p2pk33addressindex`) + }; +} + +/** + * @template T + * @typedef {Object} Indexes14 + * @property {MetricNode} byP2pk65addressindex + */ + +/** + * Create a Indexes14 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {Indexes14} + */ +function createIndexes14(client, basePath) { + return { + byP2pk65addressindex: new MetricNode(client, `${basePath}/p2pk65addressindex`) + }; +} + +/** + * @template T + * @typedef {Object} Indexes15 + * @property {MetricNode} byP2pkhaddressindex + */ + +/** + * Create a Indexes15 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {Indexes15} + */ +function createIndexes15(client, basePath) { + return { + byP2pkhaddressindex: new MetricNode(client, `${basePath}/p2pkhaddressindex`) + }; +} + +/** + * @template T + * @typedef {Object} Indexes16 + * @property {MetricNode} byP2shaddressindex + */ + +/** + * Create a Indexes16 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {Indexes16} + */ +function createIndexes16(client, basePath) { + return { + byP2shaddressindex: new MetricNode(client, `${basePath}/p2shaddressindex`) + }; +} + +/** + * @template T + * @typedef {Object} Indexes17 + * @property {MetricNode} byP2traddressindex + */ + +/** + * Create a Indexes17 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {Indexes17} + */ +function createIndexes17(client, basePath) { + return { + byP2traddressindex: new MetricNode(client, `${basePath}/p2traddressindex`) + }; +} + +/** + * @template T + * @typedef {Object} Indexes18 + * @property {MetricNode} byP2wpkhaddressindex + */ + +/** + * Create a Indexes18 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {Indexes18} + */ +function createIndexes18(client, basePath) { + return { + byP2wpkhaddressindex: new MetricNode(client, `${basePath}/p2wpkhaddressindex`) + }; +} + +/** + * @template T + * @typedef {Object} Indexes19 + * @property {MetricNode} byP2wshaddressindex + */ + +/** + * Create a Indexes19 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {Indexes19} + */ +function createIndexes19(client, basePath) { + return { + byP2wshaddressindex: new MetricNode(client, `${basePath}/p2wshaddressindex`) + }; +} + +/** + * @template T + * @typedef {Object} Indexes20 + * @property {MetricNode} byTxoutindex + */ + +/** + * Create a Indexes20 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {Indexes20} + */ +function createIndexes20(client, basePath) { + return { + byTxoutindex: new MetricNode(client, `${basePath}/txoutindex`) + }; +} + +/** + * @template T + * @typedef {Object} Indexes23 + * @property {MetricNode} byEmptyaddressindex + */ + +/** + * Create a Indexes23 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {Indexes23} + */ +function createIndexes23(client, basePath) { + return { + byEmptyaddressindex: new MetricNode(client, `${basePath}/emptyaddressindex`) + }; +} + +/** + * @template T + * @typedef {Object} Indexes24 + * @property {MetricNode} byLoadedaddressindex + */ + +/** + * Create a Indexes24 accessor + * @template T + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {Indexes24} + */ +function createIndexes24(client, basePath) { + return { + byLoadedaddressindex: new MetricNode(client, `${basePath}/loadedaddressindex`) + }; +} + +// Reusable structural pattern factories + +/** + * @typedef {Object} RealizedPattern3 + * @property {Indexes5} adjustedSopr + * @property {Indexes5} adjustedSopr30dEma + * @property {Indexes5} adjustedSopr7dEma + * @property {Indexes3} adjustedValueCreated + * @property {Indexes3} adjustedValueDestroyed + * @property {BlockCountPattern} negRealizedLoss + * @property {BlockCountPattern} netRealizedPnl + * @property {Indexes} netRealizedPnlCumulative30dDelta + * @property {Indexes} netRealizedPnlCumulative30dDeltaRelToMarketCap + * @property {Indexes} netRealizedPnlCumulative30dDeltaRelToRealizedCap + * @property {Indexes3} netRealizedPnlRelToRealizedCap + * @property {Indexes3} realizedCap + * @property {Indexes} realizedCap30dDelta + * @property {Indexes3} realizedCapRelToOwnMarketCap + * @property {BlockCountPattern} realizedLoss + * @property {Indexes3} realizedLossRelToRealizedCap + * @property {Indexes3} realizedPrice + * @property {ActivePriceRatioPattern} realizedPriceExtra + * @property {BlockCountPattern} realizedProfit + * @property {Indexes3} realizedProfitRelToRealizedCap + * @property {Indexes5} realizedProfitToLossRatio + * @property {Indexes3} realizedValue + * @property {Indexes5} sellSideRiskRatio + * @property {Indexes5} sellSideRiskRatio30dEma + * @property {Indexes5} sellSideRiskRatio7dEma + * @property {Indexes5} sopr + * @property {Indexes5} sopr30dEma + * @property {Indexes5} sopr7dEma + * @property {Indexes21} totalRealizedPnl + * @property {Indexes3} valueCreated + * @property {Indexes3} valueDestroyed + */ + +/** + * Create a RealizedPattern3 pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {RealizedPattern3} + */ +function createRealizedPattern3(client, basePath) { + return { + adjustedSopr: createIndexes5(client, `${basePath}/adjusted_sopr`), + adjustedSopr30dEma: createIndexes5(client, `${basePath}/adjusted_sopr_30d_ema`), + adjustedSopr7dEma: createIndexes5(client, `${basePath}/adjusted_sopr_7d_ema`), + adjustedValueCreated: createIndexes3(client, `${basePath}/adjusted_value_created`), + adjustedValueDestroyed: createIndexes3(client, `${basePath}/adjusted_value_destroyed`), + negRealizedLoss: createBlockCountPattern(client, `${basePath}/neg_realized_loss`), + netRealizedPnl: createBlockCountPattern(client, `${basePath}/net_realized_pnl`), + netRealizedPnlCumulative30dDelta: createIndexes(client, `${basePath}/net_realized_pnl_cumulative_30d_delta`), + netRealizedPnlCumulative30dDeltaRelToMarketCap: createIndexes(client, `${basePath}/net_realized_pnl_cumulative_30d_delta_rel_to_market_cap`), + netRealizedPnlCumulative30dDeltaRelToRealizedCap: createIndexes(client, `${basePath}/net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap`), + netRealizedPnlRelToRealizedCap: createIndexes3(client, `${basePath}/net_realized_pnl_rel_to_realized_cap`), + realizedCap: createIndexes3(client, `${basePath}/realized_cap`), + realizedCap30dDelta: createIndexes(client, `${basePath}/realized_cap_30d_delta`), + realizedCapRelToOwnMarketCap: createIndexes3(client, `${basePath}/realized_cap_rel_to_own_market_cap`), + realizedLoss: createBlockCountPattern(client, `${basePath}/realized_loss`), + realizedLossRelToRealizedCap: createIndexes3(client, `${basePath}/realized_loss_rel_to_realized_cap`), + realizedPrice: createIndexes3(client, `${basePath}/realized_price`), + realizedPriceExtra: createActivePriceRatioPattern(client, `${basePath}/realized_price_extra`), + realizedProfit: createBlockCountPattern(client, `${basePath}/realized_profit`), + realizedProfitRelToRealizedCap: createIndexes3(client, `${basePath}/realized_profit_rel_to_realized_cap`), + realizedProfitToLossRatio: createIndexes5(client, `${basePath}/realized_profit_to_loss_ratio`), + realizedValue: createIndexes3(client, `${basePath}/realized_value`), + sellSideRiskRatio: createIndexes5(client, `${basePath}/sell_side_risk_ratio`), + sellSideRiskRatio30dEma: createIndexes5(client, `${basePath}/sell_side_risk_ratio_30d_ema`), + sellSideRiskRatio7dEma: createIndexes5(client, `${basePath}/sell_side_risk_ratio_7d_ema`), + sopr: createIndexes5(client, `${basePath}/sopr`), + sopr30dEma: createIndexes5(client, `${basePath}/sopr_30d_ema`), + sopr7dEma: createIndexes5(client, `${basePath}/sopr_7d_ema`), + totalRealizedPnl: createIndexes21(client, `${basePath}/total_realized_pnl`), + valueCreated: createIndexes3(client, `${basePath}/value_created`), + valueDestroyed: createIndexes3(client, `${basePath}/value_destroyed`) + }; +} + +/** + * @typedef {Object} Ratio1ySdPattern + * @property {Indexes} _0sdUsd + * @property {Indexes} m05sd + * @property {Indexes} m05sdUsd + * @property {Indexes} m15sd + * @property {Indexes} m15sdUsd + * @property {Indexes} m1sd + * @property {Indexes} m1sdUsd + * @property {Indexes} m25sd + * @property {Indexes} m25sdUsd + * @property {Indexes} m2sd + * @property {Indexes} m2sdUsd + * @property {Indexes} m3sd + * @property {Indexes} m3sdUsd + * @property {Indexes} p05sd + * @property {Indexes} p05sdUsd + * @property {Indexes} p15sd + * @property {Indexes} p15sdUsd + * @property {Indexes} p1sd + * @property {Indexes} p1sdUsd + * @property {Indexes} p25sd + * @property {Indexes} p25sdUsd + * @property {Indexes} p2sd + * @property {Indexes} p2sdUsd + * @property {Indexes} p3sd + * @property {Indexes} p3sdUsd + * @property {Indexes} sd + * @property {Indexes} sma + * @property {Indexes} zscore + */ + +/** + * Create a Ratio1ySdPattern pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {Ratio1ySdPattern} + */ +function createRatio1ySdPattern(client, basePath) { + return { + _0sdUsd: createIndexes(client, `${basePath}/_0sd_usd`), + m05sd: createIndexes(client, `${basePath}/m0_5sd`), + m05sdUsd: createIndexes(client, `${basePath}/m0_5sd_usd`), + m15sd: createIndexes(client, `${basePath}/m1_5sd`), + m15sdUsd: createIndexes(client, `${basePath}/m1_5sd_usd`), + m1sd: createIndexes(client, `${basePath}/m1sd`), + m1sdUsd: createIndexes(client, `${basePath}/m1sd_usd`), + m25sd: createIndexes(client, `${basePath}/m2_5sd`), + m25sdUsd: createIndexes(client, `${basePath}/m2_5sd_usd`), + m2sd: createIndexes(client, `${basePath}/m2sd`), + m2sdUsd: createIndexes(client, `${basePath}/m2sd_usd`), + m3sd: createIndexes(client, `${basePath}/m3sd`), + m3sdUsd: createIndexes(client, `${basePath}/m3sd_usd`), + p05sd: createIndexes(client, `${basePath}/p0_5sd`), + p05sdUsd: createIndexes(client, `${basePath}/p0_5sd_usd`), + p15sd: createIndexes(client, `${basePath}/p1_5sd`), + p15sdUsd: createIndexes(client, `${basePath}/p1_5sd_usd`), + p1sd: createIndexes(client, `${basePath}/p1sd`), + p1sdUsd: createIndexes(client, `${basePath}/p1sd_usd`), + p25sd: createIndexes(client, `${basePath}/p2_5sd`), + p25sdUsd: createIndexes(client, `${basePath}/p2_5sd_usd`), + p2sd: createIndexes(client, `${basePath}/p2sd`), + p2sdUsd: createIndexes(client, `${basePath}/p2sd_usd`), + p3sd: createIndexes(client, `${basePath}/p3sd`), + p3sdUsd: createIndexes(client, `${basePath}/p3sd_usd`), + sd: createIndexes(client, `${basePath}/sd`), + sma: createIndexes(client, `${basePath}/sma`), + zscore: createIndexes(client, `${basePath}/zscore`) + }; +} + +/** + * @typedef {Object} RealizedPattern2 + * @property {BlockCountPattern} negRealizedLoss + * @property {BlockCountPattern} netRealizedPnl + * @property {Indexes} netRealizedPnlCumulative30dDelta + * @property {Indexes} netRealizedPnlCumulative30dDeltaRelToMarketCap + * @property {Indexes} netRealizedPnlCumulative30dDeltaRelToRealizedCap + * @property {Indexes3} netRealizedPnlRelToRealizedCap + * @property {Indexes3} realizedCap + * @property {Indexes} realizedCap30dDelta + * @property {Indexes3} realizedCapRelToOwnMarketCap + * @property {BlockCountPattern} realizedLoss + * @property {Indexes3} realizedLossRelToRealizedCap + * @property {Indexes3} realizedPrice + * @property {ActivePriceRatioPattern} realizedPriceExtra + * @property {BlockCountPattern} realizedProfit + * @property {Indexes3} realizedProfitRelToRealizedCap + * @property {Indexes5} realizedProfitToLossRatio + * @property {Indexes3} realizedValue + * @property {Indexes5} sellSideRiskRatio + * @property {Indexes5} sellSideRiskRatio30dEma + * @property {Indexes5} sellSideRiskRatio7dEma + * @property {Indexes5} sopr + * @property {Indexes5} sopr30dEma + * @property {Indexes5} sopr7dEma + * @property {Indexes21} totalRealizedPnl + * @property {Indexes3} valueCreated + * @property {Indexes3} valueDestroyed + */ + +/** + * Create a RealizedPattern2 pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {RealizedPattern2} + */ +function createRealizedPattern2(client, basePath) { + return { + negRealizedLoss: createBlockCountPattern(client, `${basePath}/neg_realized_loss`), + netRealizedPnl: createBlockCountPattern(client, `${basePath}/net_realized_pnl`), + netRealizedPnlCumulative30dDelta: createIndexes(client, `${basePath}/net_realized_pnl_cumulative_30d_delta`), + netRealizedPnlCumulative30dDeltaRelToMarketCap: createIndexes(client, `${basePath}/net_realized_pnl_cumulative_30d_delta_rel_to_market_cap`), + netRealizedPnlCumulative30dDeltaRelToRealizedCap: createIndexes(client, `${basePath}/net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap`), + netRealizedPnlRelToRealizedCap: createIndexes3(client, `${basePath}/net_realized_pnl_rel_to_realized_cap`), + realizedCap: createIndexes3(client, `${basePath}/realized_cap`), + realizedCap30dDelta: createIndexes(client, `${basePath}/realized_cap_30d_delta`), + realizedCapRelToOwnMarketCap: createIndexes3(client, `${basePath}/realized_cap_rel_to_own_market_cap`), + realizedLoss: createBlockCountPattern(client, `${basePath}/realized_loss`), + realizedLossRelToRealizedCap: createIndexes3(client, `${basePath}/realized_loss_rel_to_realized_cap`), + realizedPrice: createIndexes3(client, `${basePath}/realized_price`), + realizedPriceExtra: createActivePriceRatioPattern(client, `${basePath}/realized_price_extra`), + realizedProfit: createBlockCountPattern(client, `${basePath}/realized_profit`), + realizedProfitRelToRealizedCap: createIndexes3(client, `${basePath}/realized_profit_rel_to_realized_cap`), + realizedProfitToLossRatio: createIndexes5(client, `${basePath}/realized_profit_to_loss_ratio`), + realizedValue: createIndexes3(client, `${basePath}/realized_value`), + sellSideRiskRatio: createIndexes5(client, `${basePath}/sell_side_risk_ratio`), + sellSideRiskRatio30dEma: createIndexes5(client, `${basePath}/sell_side_risk_ratio_30d_ema`), + sellSideRiskRatio7dEma: createIndexes5(client, `${basePath}/sell_side_risk_ratio_7d_ema`), + sopr: createIndexes5(client, `${basePath}/sopr`), + sopr30dEma: createIndexes5(client, `${basePath}/sopr_30d_ema`), + sopr7dEma: createIndexes5(client, `${basePath}/sopr_7d_ema`), + totalRealizedPnl: createIndexes21(client, `${basePath}/total_realized_pnl`), + valueCreated: createIndexes3(client, `${basePath}/value_created`), + valueDestroyed: createIndexes3(client, `${basePath}/value_destroyed`) + }; +} + +/** + * @typedef {Object} RealizedPattern + * @property {BlockCountPattern} negRealizedLoss + * @property {BlockCountPattern} netRealizedPnl + * @property {Indexes} netRealizedPnlCumulative30dDelta + * @property {Indexes} netRealizedPnlCumulative30dDeltaRelToMarketCap + * @property {Indexes} netRealizedPnlCumulative30dDeltaRelToRealizedCap + * @property {Indexes3} netRealizedPnlRelToRealizedCap + * @property {Indexes3} realizedCap + * @property {Indexes} realizedCap30dDelta + * @property {BlockCountPattern} realizedLoss + * @property {Indexes3} realizedLossRelToRealizedCap + * @property {Indexes3} realizedPrice + * @property {RealizedPriceExtraPattern} realizedPriceExtra + * @property {BlockCountPattern} realizedProfit + * @property {Indexes3} realizedProfitRelToRealizedCap + * @property {Indexes3} realizedValue + * @property {Indexes5} sellSideRiskRatio + * @property {Indexes5} sellSideRiskRatio30dEma + * @property {Indexes5} sellSideRiskRatio7dEma + * @property {Indexes5} sopr + * @property {Indexes5} sopr30dEma + * @property {Indexes5} sopr7dEma + * @property {Indexes21} totalRealizedPnl + * @property {Indexes3} valueCreated + * @property {Indexes3} valueDestroyed + */ + +/** + * Create a RealizedPattern pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {RealizedPattern} + */ +function createRealizedPattern(client, basePath) { + return { + negRealizedLoss: createBlockCountPattern(client, `${basePath}/neg_realized_loss`), + netRealizedPnl: createBlockCountPattern(client, `${basePath}/net_realized_pnl`), + netRealizedPnlCumulative30dDelta: createIndexes(client, `${basePath}/net_realized_pnl_cumulative_30d_delta`), + netRealizedPnlCumulative30dDeltaRelToMarketCap: createIndexes(client, `${basePath}/net_realized_pnl_cumulative_30d_delta_rel_to_market_cap`), + netRealizedPnlCumulative30dDeltaRelToRealizedCap: createIndexes(client, `${basePath}/net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap`), + netRealizedPnlRelToRealizedCap: createIndexes3(client, `${basePath}/net_realized_pnl_rel_to_realized_cap`), + realizedCap: createIndexes3(client, `${basePath}/realized_cap`), + realizedCap30dDelta: createIndexes(client, `${basePath}/realized_cap_30d_delta`), + realizedLoss: createBlockCountPattern(client, `${basePath}/realized_loss`), + realizedLossRelToRealizedCap: createIndexes3(client, `${basePath}/realized_loss_rel_to_realized_cap`), + realizedPrice: createIndexes3(client, `${basePath}/realized_price`), + realizedPriceExtra: createRealizedPriceExtraPattern(client, `${basePath}/realized_price_extra`), + realizedProfit: createBlockCountPattern(client, `${basePath}/realized_profit`), + realizedProfitRelToRealizedCap: createIndexes3(client, `${basePath}/realized_profit_rel_to_realized_cap`), + realizedValue: createIndexes3(client, `${basePath}/realized_value`), + sellSideRiskRatio: createIndexes5(client, `${basePath}/sell_side_risk_ratio`), + sellSideRiskRatio30dEma: createIndexes5(client, `${basePath}/sell_side_risk_ratio_30d_ema`), + sellSideRiskRatio7dEma: createIndexes5(client, `${basePath}/sell_side_risk_ratio_7d_ema`), + sopr: createIndexes5(client, `${basePath}/sopr`), + sopr30dEma: createIndexes5(client, `${basePath}/sopr_30d_ema`), + sopr7dEma: createIndexes5(client, `${basePath}/sopr_7d_ema`), + totalRealizedPnl: createIndexes21(client, `${basePath}/total_realized_pnl`), + valueCreated: createIndexes3(client, `${basePath}/value_created`), + valueDestroyed: createIndexes3(client, `${basePath}/value_destroyed`) + }; +} + +/** + * @typedef {Object} Price13dEmaPattern + * @property {Indexes} price + * @property {Indexes} ratio + * @property {Indexes} ratio1mSma + * @property {Indexes} ratio1wSma + * @property {Ratio1ySdPattern} ratio1ySd + * @property {Ratio1ySdPattern} ratio2ySd + * @property {Ratio1ySdPattern} ratio4ySd + * @property {Indexes} ratioPct1 + * @property {Indexes} ratioPct1Usd + * @property {Indexes} ratioPct2 + * @property {Indexes} ratioPct2Usd + * @property {Indexes} ratioPct5 + * @property {Indexes} ratioPct5Usd + * @property {Indexes} ratioPct95 + * @property {Indexes} ratioPct95Usd + * @property {Indexes} ratioPct98 + * @property {Indexes} ratioPct98Usd + * @property {Indexes} ratioPct99 + * @property {Indexes} ratioPct99Usd + * @property {Ratio1ySdPattern} ratioSd + */ + +/** + * Create a Price13dEmaPattern pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {Price13dEmaPattern} + */ +function createPrice13dEmaPattern(client, acc) { + return { + price: createIndexes(client, `/${acc}`), + ratio: createIndexes(client, `/${acc}_ratio`), + ratio1mSma: createIndexes(client, `/${acc}_ratio_1m_sma`), + ratio1wSma: createIndexes(client, `/${acc}_ratio_1w_sma`), + ratio1ySd: createRatio1ySdPattern(client, `${acc}_ratio_1y_sd`), + ratio2ySd: createRatio1ySdPattern(client, `${acc}_ratio_2y_sd`), + ratio4ySd: createRatio1ySdPattern(client, `${acc}_ratio_4y_sd`), + ratioPct1: createIndexes(client, `/${acc}_ratio_pct1`), + ratioPct1Usd: createIndexes(client, `/${acc}_ratio_pct1_usd`), + ratioPct2: createIndexes(client, `/${acc}_ratio_pct2`), + ratioPct2Usd: createIndexes(client, `/${acc}_ratio_pct2_usd`), + ratioPct5: createIndexes(client, `/${acc}_ratio_pct5`), + ratioPct5Usd: createIndexes(client, `/${acc}_ratio_pct5_usd`), + ratioPct95: createIndexes(client, `/${acc}_ratio_pct95`), + ratioPct95Usd: createIndexes(client, `/${acc}_ratio_pct95_usd`), + ratioPct98: createIndexes(client, `/${acc}_ratio_pct98`), + ratioPct98Usd: createIndexes(client, `/${acc}_ratio_pct98_usd`), + ratioPct99: createIndexes(client, `/${acc}_ratio_pct99`), + ratioPct99Usd: createIndexes(client, `/${acc}_ratio_pct99_usd`), + ratioSd: createRatio1ySdPattern(client, `${acc}_ratio_sd`) + }; +} + +/** + * @typedef {Object} PricePercentilesPattern + * @property {Indexes} pct05 + * @property {Indexes} pct10 + * @property {Indexes} pct15 + * @property {Indexes} pct20 + * @property {Indexes} pct25 + * @property {Indexes} pct30 + * @property {Indexes} pct35 + * @property {Indexes} pct40 + * @property {Indexes} pct45 + * @property {Indexes} pct50 + * @property {Indexes} pct55 + * @property {Indexes} pct60 + * @property {Indexes} pct65 + * @property {Indexes} pct70 + * @property {Indexes} pct75 + * @property {Indexes} pct80 + * @property {Indexes} pct85 + * @property {Indexes} pct90 + * @property {Indexes} pct95 + */ + +/** + * Create a PricePercentilesPattern pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {PricePercentilesPattern} + */ +function createPricePercentilesPattern(client, basePath) { + return { + pct05: createIndexes(client, `${basePath}/pct05`), + pct10: createIndexes(client, `${basePath}/pct10`), + pct15: createIndexes(client, `${basePath}/pct15`), + pct20: createIndexes(client, `${basePath}/pct20`), + pct25: createIndexes(client, `${basePath}/pct25`), + pct30: createIndexes(client, `${basePath}/pct30`), + pct35: createIndexes(client, `${basePath}/pct35`), + pct40: createIndexes(client, `${basePath}/pct40`), + pct45: createIndexes(client, `${basePath}/pct45`), + pct50: createIndexes(client, `${basePath}/pct50`), + pct55: createIndexes(client, `${basePath}/pct55`), + pct60: createIndexes(client, `${basePath}/pct60`), + pct65: createIndexes(client, `${basePath}/pct65`), + pct70: createIndexes(client, `${basePath}/pct70`), + pct75: createIndexes(client, `${basePath}/pct75`), + pct80: createIndexes(client, `${basePath}/pct80`), + pct85: createIndexes(client, `${basePath}/pct85`), + pct90: createIndexes(client, `${basePath}/pct90`), + pct95: createIndexes(client, `${basePath}/pct95`) + }; +} + +/** + * @typedef {Object} ActivePriceRatioPattern + * @property {Indexes} ratio + * @property {Indexes} ratio1mSma + * @property {Indexes} ratio1wSma + * @property {Ratio1ySdPattern} ratio1ySd + * @property {Ratio1ySdPattern} ratio2ySd + * @property {Ratio1ySdPattern} ratio4ySd + * @property {Indexes} ratioPct1 + * @property {Indexes} ratioPct1Usd + * @property {Indexes} ratioPct2 + * @property {Indexes} ratioPct2Usd + * @property {Indexes} ratioPct5 + * @property {Indexes} ratioPct5Usd + * @property {Indexes} ratioPct95 + * @property {Indexes} ratioPct95Usd + * @property {Indexes} ratioPct98 + * @property {Indexes} ratioPct98Usd + * @property {Indexes} ratioPct99 + * @property {Indexes} ratioPct99Usd + * @property {Ratio1ySdPattern} ratioSd + */ + +/** + * Create a ActivePriceRatioPattern pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {ActivePriceRatioPattern} + */ +function createActivePriceRatioPattern(client, basePath) { + return { + ratio: createIndexes(client, `${basePath}/ratio`), + ratio1mSma: createIndexes(client, `${basePath}/ratio_1m_sma`), + ratio1wSma: createIndexes(client, `${basePath}/ratio_1w_sma`), + ratio1ySd: createRatio1ySdPattern(client, `${basePath}/ratio_1y_sd`), + ratio2ySd: createRatio1ySdPattern(client, `${basePath}/ratio_2y_sd`), + ratio4ySd: createRatio1ySdPattern(client, `${basePath}/ratio_4y_sd`), + ratioPct1: createIndexes(client, `${basePath}/ratio_pct1`), + ratioPct1Usd: createIndexes(client, `${basePath}/ratio_pct1_usd`), + ratioPct2: createIndexes(client, `${basePath}/ratio_pct2`), + ratioPct2Usd: createIndexes(client, `${basePath}/ratio_pct2_usd`), + ratioPct5: createIndexes(client, `${basePath}/ratio_pct5`), + ratioPct5Usd: createIndexes(client, `${basePath}/ratio_pct5_usd`), + ratioPct95: createIndexes(client, `${basePath}/ratio_pct95`), + ratioPct95Usd: createIndexes(client, `${basePath}/ratio_pct95_usd`), + ratioPct98: createIndexes(client, `${basePath}/ratio_pct98`), + ratioPct98Usd: createIndexes(client, `${basePath}/ratio_pct98_usd`), + ratioPct99: createIndexes(client, `${basePath}/ratio_pct99`), + ratioPct99Usd: createIndexes(client, `${basePath}/ratio_pct99_usd`), + ratioSd: createRatio1ySdPattern(client, `${basePath}/ratio_sd`) + }; +} + +/** + * @typedef {Object} RelativePattern2 + * @property {Indexes21} negUnrealizedLossRelToMarketCap + * @property {Indexes21} negUnrealizedLossRelToOwnMarketCap + * @property {Indexes21} negUnrealizedLossRelToOwnTotalUnrealizedPnl + * @property {Indexes21} netUnrealizedPnlRelToMarketCap + * @property {Indexes21} netUnrealizedPnlRelToOwnMarketCap + * @property {Indexes21} netUnrealizedPnlRelToOwnTotalUnrealizedPnl + * @property {Indexes21} supplyInLossRelToCirculatingSupply + * @property {Indexes21} supplyInLossRelToOwnSupply + * @property {Indexes21} supplyInProfitRelToCirculatingSupply + * @property {Indexes21} supplyInProfitRelToOwnSupply + * @property {Indexes3} supplyRelToCirculatingSupply + * @property {Indexes21} unrealizedLossRelToMarketCap + * @property {Indexes21} unrealizedLossRelToOwnMarketCap + * @property {Indexes21} unrealizedLossRelToOwnTotalUnrealizedPnl + * @property {Indexes21} unrealizedProfitRelToMarketCap + * @property {Indexes21} unrealizedProfitRelToOwnMarketCap + * @property {Indexes21} unrealizedProfitRelToOwnTotalUnrealizedPnl + */ + +/** + * Create a RelativePattern2 pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {RelativePattern2} + */ +function createRelativePattern2(client, basePath) { + return { + negUnrealizedLossRelToMarketCap: createIndexes21(client, `${basePath}/neg_unrealized_loss_rel_to_market_cap`), + negUnrealizedLossRelToOwnMarketCap: createIndexes21(client, `${basePath}/neg_unrealized_loss_rel_to_own_market_cap`), + negUnrealizedLossRelToOwnTotalUnrealizedPnl: createIndexes21(client, `${basePath}/neg_unrealized_loss_rel_to_own_total_unrealized_pnl`), + netUnrealizedPnlRelToMarketCap: createIndexes21(client, `${basePath}/net_unrealized_pnl_rel_to_market_cap`), + netUnrealizedPnlRelToOwnMarketCap: createIndexes21(client, `${basePath}/net_unrealized_pnl_rel_to_own_market_cap`), + netUnrealizedPnlRelToOwnTotalUnrealizedPnl: createIndexes21(client, `${basePath}/net_unrealized_pnl_rel_to_own_total_unrealized_pnl`), + supplyInLossRelToCirculatingSupply: createIndexes21(client, `${basePath}/supply_in_loss_rel_to_circulating_supply`), + supplyInLossRelToOwnSupply: createIndexes21(client, `${basePath}/supply_in_loss_rel_to_own_supply`), + supplyInProfitRelToCirculatingSupply: createIndexes21(client, `${basePath}/supply_in_profit_rel_to_circulating_supply`), + supplyInProfitRelToOwnSupply: createIndexes21(client, `${basePath}/supply_in_profit_rel_to_own_supply`), + supplyRelToCirculatingSupply: createIndexes3(client, `${basePath}/supply_rel_to_circulating_supply`), + unrealizedLossRelToMarketCap: createIndexes21(client, `${basePath}/unrealized_loss_rel_to_market_cap`), + unrealizedLossRelToOwnMarketCap: createIndexes21(client, `${basePath}/unrealized_loss_rel_to_own_market_cap`), + unrealizedLossRelToOwnTotalUnrealizedPnl: createIndexes21(client, `${basePath}/unrealized_loss_rel_to_own_total_unrealized_pnl`), + unrealizedProfitRelToMarketCap: createIndexes21(client, `${basePath}/unrealized_profit_rel_to_market_cap`), + unrealizedProfitRelToOwnMarketCap: createIndexes21(client, `${basePath}/unrealized_profit_rel_to_own_market_cap`), + unrealizedProfitRelToOwnTotalUnrealizedPnl: createIndexes21(client, `${basePath}/unrealized_profit_rel_to_own_total_unrealized_pnl`) + }; +} + +/** + * @typedef {Object} AXbtPattern + * @property {Indexes} _1dDominance + * @property {Indexes} _1mBlocksMined + * @property {Indexes} _1mDominance + * @property {Indexes} _1wBlocksMined + * @property {Indexes} _1wDominance + * @property {Indexes} _1yBlocksMined + * @property {Indexes} _1yDominance + * @property {BlockCountPattern} blocksMined + * @property {UnclaimedRewardsPattern} coinbase + * @property {Indexes} daysSinceBlock + * @property {Indexes} dominance + * @property {UnclaimedRewardsPattern} fee + * @property {UnclaimedRewardsPattern} subsidy + */ + +/** + * Create a AXbtPattern pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {AXbtPattern} + */ +function createAXbtPattern(client, basePath) { + return { + _1dDominance: createIndexes(client, `${basePath}/1d_dominance`), + _1mBlocksMined: createIndexes(client, `${basePath}/1m_blocks_mined`), + _1mDominance: createIndexes(client, `${basePath}/1m_dominance`), + _1wBlocksMined: createIndexes(client, `${basePath}/1w_blocks_mined`), + _1wDominance: createIndexes(client, `${basePath}/1w_dominance`), + _1yBlocksMined: createIndexes(client, `${basePath}/1y_blocks_mined`), + _1yDominance: createIndexes(client, `${basePath}/1y_dominance`), + blocksMined: createBlockCountPattern(client, `${basePath}/blocks_mined`), + coinbase: createUnclaimedRewardsPattern(client, `${basePath}/coinbase`), + daysSinceBlock: createIndexes(client, `${basePath}/days_since_block`), + dominance: createIndexes(client, `${basePath}/dominance`), + fee: createUnclaimedRewardsPattern(client, `${basePath}/fee`), + subsidy: createUnclaimedRewardsPattern(client, `${basePath}/subsidy`) + }; +} + +/** + * @template T + * @typedef {Object} BitcoinPattern + * @property {Indexes4} average + * @property {Indexes2} base + * @property {Indexes3} cumulative + * @property {Indexes4} max + * @property {Indexes5} median + * @property {Indexes4} min + * @property {Indexes5} pct10 + * @property {Indexes5} pct25 + * @property {Indexes5} pct75 + * @property {Indexes5} pct90 + * @property {Indexes4} sum + */ + +/** + * Create a BitcoinPattern pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {BitcoinPattern} + */ +function createBitcoinPattern(client, basePath) { + return { + average: createIndexes4(client, `${basePath}/average`), + base: createIndexes2(client, `${basePath}/base`), + cumulative: createIndexes3(client, `${basePath}/cumulative`), + max: createIndexes4(client, `${basePath}/max`), + median: createIndexes5(client, `${basePath}/median`), + min: createIndexes4(client, `${basePath}/min`), + pct10: createIndexes5(client, `${basePath}/pct10`), + pct25: createIndexes5(client, `${basePath}/pct25`), + pct75: createIndexes5(client, `${basePath}/pct75`), + pct90: createIndexes5(client, `${basePath}/pct90`), + sum: createIndexes4(client, `${basePath}/sum`) + }; +} + +/** + * @template T + * @typedef {Object} BlockSizePattern + * @property {Indexes3} average + * @property {Indexes3} cumulative + * @property {Indexes3} max + * @property {Indexes2} median + * @property {Indexes3} min + * @property {Indexes2} pct10 + * @property {Indexes2} pct25 + * @property {Indexes2} pct75 + * @property {Indexes2} pct90 + * @property {Indexes3} sum + */ + +/** + * Create a BlockSizePattern pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {BlockSizePattern} + */ +function createBlockSizePattern(client, basePath) { + return { + average: createIndexes3(client, `${basePath}/average`), + cumulative: createIndexes3(client, `${basePath}/cumulative`), + max: createIndexes3(client, `${basePath}/max`), + median: createIndexes2(client, `${basePath}/median`), + min: createIndexes3(client, `${basePath}/min`), + pct10: createIndexes2(client, `${basePath}/pct10`), + pct25: createIndexes2(client, `${basePath}/pct25`), + pct75: createIndexes2(client, `${basePath}/pct75`), + pct90: createIndexes2(client, `${basePath}/pct90`), + sum: createIndexes3(client, `${basePath}/sum`) + }; +} + +/** + * @typedef {Object} RelativePattern + * @property {Indexes21} negUnrealizedLossRelToMarketCap + * @property {Indexes21} netUnrealizedPnlRelToMarketCap + * @property {Indexes21} supplyInLossRelToCirculatingSupply + * @property {Indexes21} supplyInLossRelToOwnSupply + * @property {Indexes21} supplyInProfitRelToCirculatingSupply + * @property {Indexes21} supplyInProfitRelToOwnSupply + * @property {Indexes3} supplyRelToCirculatingSupply + * @property {Indexes21} unrealizedLossRelToMarketCap + * @property {Indexes21} unrealizedProfitRelToMarketCap + */ + +/** + * Create a RelativePattern pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {RelativePattern} + */ +function createRelativePattern(client, basePath) { + return { + negUnrealizedLossRelToMarketCap: createIndexes21(client, `${basePath}/neg_unrealized_loss_rel_to_market_cap`), + netUnrealizedPnlRelToMarketCap: createIndexes21(client, `${basePath}/net_unrealized_pnl_rel_to_market_cap`), + supplyInLossRelToCirculatingSupply: createIndexes21(client, `${basePath}/supply_in_loss_rel_to_circulating_supply`), + supplyInLossRelToOwnSupply: createIndexes21(client, `${basePath}/supply_in_loss_rel_to_own_supply`), + supplyInProfitRelToCirculatingSupply: createIndexes21(client, `${basePath}/supply_in_profit_rel_to_circulating_supply`), + supplyInProfitRelToOwnSupply: createIndexes21(client, `${basePath}/supply_in_profit_rel_to_own_supply`), + supplyRelToCirculatingSupply: createIndexes3(client, `${basePath}/supply_rel_to_circulating_supply`), + unrealizedLossRelToMarketCap: createIndexes21(client, `${basePath}/unrealized_loss_rel_to_market_cap`), + unrealizedProfitRelToMarketCap: createIndexes21(client, `${basePath}/unrealized_profit_rel_to_market_cap`) + }; +} + +/** + * @typedef {Object} UnrealizedPattern + * @property {Indexes21} negUnrealizedLoss + * @property {Indexes21} netUnrealizedPnl + * @property {SupplyPattern} supplyInLoss + * @property {SupplyValuePattern} supplyInLossValue + * @property {SupplyPattern} supplyInProfit + * @property {SupplyValuePattern} supplyInProfitValue + * @property {Indexes21} totalUnrealizedPnl + * @property {Indexes21} unrealizedLoss + * @property {Indexes21} unrealizedProfit + */ + +/** + * Create a UnrealizedPattern pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {UnrealizedPattern} + */ +function createUnrealizedPattern(client, basePath) { + return { + negUnrealizedLoss: createIndexes21(client, `${basePath}/neg_unrealized_loss`), + netUnrealizedPnl: createIndexes21(client, `${basePath}/net_unrealized_pnl`), + supplyInLoss: createSupplyPattern(client, `${basePath}/supply_in_loss`), + supplyInLossValue: createSupplyValuePattern(client, `${basePath}/supply_in_loss_value`), + supplyInProfit: createSupplyPattern(client, `${basePath}/supply_in_profit`), + supplyInProfitValue: createSupplyValuePattern(client, `${basePath}/supply_in_profit_value`), + totalUnrealizedPnl: createIndexes21(client, `${basePath}/total_unrealized_pnl`), + unrealizedLoss: createIndexes21(client, `${basePath}/unrealized_loss`), + unrealizedProfit: createIndexes21(client, `${basePath}/unrealized_profit`) + }; +} + +/** + * @template T + * @typedef {Object} BlockIntervalPattern + * @property {Indexes4} average + * @property {Indexes4} max + * @property {Indexes5} median + * @property {Indexes4} min + * @property {Indexes5} pct10 + * @property {Indexes5} pct25 + * @property {Indexes5} pct75 + * @property {Indexes5} pct90 + */ + +/** + * Create a BlockIntervalPattern pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {BlockIntervalPattern} + */ +function createBlockIntervalPattern(client, acc) { + return { + average: createIndexes4(client, `/${acc}_avg`), + max: createIndexes4(client, `/${acc}_max`), + median: createIndexes5(client, `/${acc}_median`), + min: createIndexes4(client, `/${acc}_min`), + pct10: createIndexes5(client, `/${acc}_pct10`), + pct25: createIndexes5(client, `/${acc}_pct25`), + pct75: createIndexes5(client, `/${acc}_pct75`), + pct90: createIndexes5(client, `/${acc}_pct90`) + }; +} + +/** + * @template T + * @typedef {Object} AddresstypeToHeightToAddrCountPattern + * @property {Indexes2} p2a + * @property {Indexes2} p2pk33 + * @property {Indexes2} p2pk65 + * @property {Indexes2} p2pkh + * @property {Indexes2} p2sh + * @property {Indexes2} p2tr + * @property {Indexes2} p2wpkh + * @property {Indexes2} p2wsh + */ + +/** + * Create a AddresstypeToHeightToAddrCountPattern pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {AddresstypeToHeightToAddrCountPattern} + */ +function createAddresstypeToHeightToAddrCountPattern(client, basePath) { + return { + p2a: createIndexes2(client, `${basePath}/p2a`), + p2pk33: createIndexes2(client, `${basePath}/p2pk33`), + p2pk65: createIndexes2(client, `${basePath}/p2pk65`), + p2pkh: createIndexes2(client, `${basePath}/p2pkh`), + p2sh: createIndexes2(client, `${basePath}/p2sh`), + p2tr: createIndexes2(client, `${basePath}/p2tr`), + p2wpkh: createIndexes2(client, `${basePath}/p2wpkh`), + p2wsh: createIndexes2(client, `${basePath}/p2wsh`) + }; +} + +/** + * @typedef {Object} _0satsPattern + * @property {ActivityPattern} activity + * @property {Indexes3} addrCount + * @property {PricePaidPattern} pricePaid + * @property {RealizedPattern} realized + * @property {RelativePattern} relative + * @property {SupplyPattern2} supply + * @property {UnrealizedPattern} unrealized + */ + +/** + * Create a _0satsPattern pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {_0satsPattern} + */ +function create_0satsPattern(client, basePath) { + return { + activity: createActivityPattern(client, `${basePath}/activity`), + addrCount: createIndexes3(client, `${basePath}/addr_count`), + pricePaid: createPricePaidPattern(client, `${basePath}/price_paid`), + realized: createRealizedPattern(client, `${basePath}/realized`), + relative: createRelativePattern(client, `${basePath}/relative`), + supply: createSupplyPattern2(client, `${basePath}/supply`), + unrealized: createUnrealizedPattern(client, `${basePath}/unrealized`) + }; +} + +/** + * @typedef {Object} _10yTo12yPattern + * @property {ActivityPattern} activity + * @property {PricePaidPattern2} pricePaid + * @property {RealizedPattern2} realized + * @property {RelativePattern2} relative + * @property {SupplyPattern2} supply + * @property {UnrealizedPattern} unrealized + */ + +/** + * Create a _10yTo12yPattern pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {_10yTo12yPattern} + */ +function create_10yTo12yPattern(client, basePath) { + return { + activity: createActivityPattern(client, `${basePath}/activity`), + pricePaid: createPricePaidPattern2(client, `${basePath}/price_paid`), + realized: createRealizedPattern2(client, `${basePath}/realized`), + relative: createRelativePattern2(client, `${basePath}/relative`), + supply: createSupplyPattern2(client, `${basePath}/supply`), + unrealized: createUnrealizedPattern(client, `${basePath}/unrealized`) + }; +} + +/** + * @typedef {Object} _0satsPattern2 + * @property {ActivityPattern} activity + * @property {PricePaidPattern} pricePaid + * @property {RealizedPattern} realized + * @property {RelativePattern} relative + * @property {SupplyPattern2} supply + * @property {UnrealizedPattern} unrealized + */ + +/** + * Create a _0satsPattern2 pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {_0satsPattern2} + */ +function create_0satsPattern2(client, basePath) { + return { + activity: createActivityPattern(client, `${basePath}/activity`), + pricePaid: createPricePaidPattern(client, `${basePath}/price_paid`), + realized: createRealizedPattern(client, `${basePath}/realized`), + relative: createRelativePattern(client, `${basePath}/relative`), + supply: createSupplyPattern2(client, `${basePath}/supply`), + unrealized: createUnrealizedPattern(client, `${basePath}/unrealized`) + }; +} + +/** + * @typedef {Object} UpTo1dPattern + * @property {ActivityPattern} activity + * @property {PricePaidPattern2} pricePaid + * @property {RealizedPattern3} realized + * @property {RelativePattern2} relative + * @property {SupplyPattern2} supply + * @property {UnrealizedPattern} unrealized + */ + +/** + * Create a UpTo1dPattern pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {UpTo1dPattern} + */ +function createUpTo1dPattern(client, basePath) { + return { + activity: createActivityPattern(client, `${basePath}/activity`), + pricePaid: createPricePaidPattern2(client, `${basePath}/price_paid`), + realized: createRealizedPattern3(client, `${basePath}/realized`), + relative: createRelativePattern2(client, `${basePath}/relative`), + supply: createSupplyPattern2(client, `${basePath}/supply`), + unrealized: createUnrealizedPattern(client, `${basePath}/unrealized`) + }; +} + +/** + * @typedef {Object} ActivityPattern + * @property {BlockCountPattern} coinblocksDestroyed + * @property {BlockCountPattern} coindaysDestroyed + * @property {Indexes2} satblocksDestroyed + * @property {Indexes2} satdaysDestroyed + * @property {SentPattern} sent + */ + +/** + * Create a ActivityPattern pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {ActivityPattern} + */ +function createActivityPattern(client, basePath) { + return { + coinblocksDestroyed: createBlockCountPattern(client, `${basePath}/coinblocks_destroyed`), + coindaysDestroyed: createBlockCountPattern(client, `${basePath}/coindays_destroyed`), + satblocksDestroyed: createIndexes2(client, `${basePath}/satblocks_destroyed`), + satdaysDestroyed: createIndexes2(client, `${basePath}/satdays_destroyed`), + sent: createSentPattern(client, `${basePath}/sent`) + }; +} + +/** + * @typedef {Object} SupplyPattern2 + * @property {SupplyPattern} supply + * @property {SentSumPattern} supplyHalf + * @property {SentSumPattern} supplyHalfValue + * @property {SupplyValuePattern} supplyValue + * @property {Indexes3} utxoCount + */ + +/** + * Create a SupplyPattern2 pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {SupplyPattern2} + */ +function createSupplyPattern2(client, basePath) { + return { + supply: createSupplyPattern(client, `${basePath}/supply`), + supplyHalf: createSentSumPattern(client, `${basePath}/supply_half`), + supplyHalfValue: createSentSumPattern(client, `${basePath}/supply_half_value`), + supplyValue: createSupplyValuePattern(client, `${basePath}/supply_value`), + utxoCount: createIndexes3(client, `${basePath}/utxo_count`) + }; +} + +/** + * @typedef {Object} SupplyPattern + * @property {Indexes2} base + * @property {Indexes} bitcoin + * @property {Indexes} dollars + * @property {Indexes} sats + */ + +/** + * Create a SupplyPattern pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {SupplyPattern} + */ +function createSupplyPattern(client, basePath) { + return { + base: createIndexes2(client, `${basePath}/base`), + bitcoin: createIndexes(client, `${basePath}/bitcoin`), + dollars: createIndexes(client, `${basePath}/dollars`), + sats: createIndexes(client, `${basePath}/sats`) + }; +} + +/** + * @typedef {Object} SentPattern + * @property {Indexes2} base + * @property {BlockCountPattern} bitcoin + * @property {BlockCountPattern} dollars + * @property {SatsPattern} sats + */ + +/** + * Create a SentPattern pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {SentPattern} + */ +function createSentPattern(client, basePath) { + return { + base: createIndexes2(client, `${basePath}/base`), + bitcoin: createBlockCountPattern(client, `${basePath}/bitcoin`), + dollars: createBlockCountPattern(client, `${basePath}/dollars`), + sats: createSatsPattern(client, `${basePath}/sats`) + }; +} + +/** + * @typedef {Object} PricePaidPattern2 + * @property {Indexes3} maxPricePaid + * @property {Indexes3} minPricePaid + * @property {PricePercentilesPattern} pricePercentiles + */ + +/** + * Create a PricePaidPattern2 pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {PricePaidPattern2} + */ +function createPricePaidPattern2(client, basePath) { + return { + maxPricePaid: createIndexes3(client, `${basePath}/max_price_paid`), + minPricePaid: createIndexes3(client, `${basePath}/min_price_paid`), + pricePercentiles: createPricePercentilesPattern(client, `${basePath}/price_percentiles`) + }; +} + +/** + * @typedef {Object} CoinbasePattern + * @property {BitcoinPattern} bitcoin + * @property {BitcoinPattern} dollars + * @property {BitcoinPattern} sats + */ + +/** + * Create a CoinbasePattern pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {CoinbasePattern} + */ +function createCoinbasePattern(client, basePath) { + return { + bitcoin: createBitcoinPattern(client, `${basePath}/bitcoin`), + dollars: createBitcoinPattern(client, `${basePath}/dollars`), + sats: createBitcoinPattern(client, `${basePath}/sats`) + }; +} + +/** + * @typedef {Object} SentSumPattern + * @property {Indexes3} bitcoin + * @property {Indexes3} dollars + * @property {Indexes3} sats + */ + +/** + * Create a SentSumPattern pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {SentSumPattern} + */ +function createSentSumPattern(client, basePath) { + return { + bitcoin: createIndexes3(client, `${basePath}/bitcoin`), + dollars: createIndexes3(client, `${basePath}/dollars`), + sats: createIndexes3(client, `${basePath}/sats`) + }; +} + +/** + * @typedef {Object} UnclaimedRewardsPattern + * @property {BlockCountPattern} bitcoin + * @property {BlockCountPattern} dollars + * @property {BlockCountPattern} sats + */ + +/** + * Create a UnclaimedRewardsPattern pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {UnclaimedRewardsPattern} + */ +function createUnclaimedRewardsPattern(client, basePath) { + return { + bitcoin: createBlockCountPattern(client, `${basePath}/bitcoin`), + dollars: createBlockCountPattern(client, `${basePath}/dollars`), + sats: createBlockCountPattern(client, `${basePath}/sats`) + }; +} + +/** + * @template T + * @typedef {Object} BlockCountPattern + * @property {Indexes2} base + * @property {Indexes3} cumulative + * @property {Indexes4} sum + */ + +/** + * Create a BlockCountPattern pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {BlockCountPattern} + */ +function createBlockCountPattern(client, basePath) { + return { + base: createIndexes2(client, `${basePath}/base`), + cumulative: createIndexes3(client, `${basePath}/cumulative`), + sum: createIndexes4(client, `${basePath}/sum`) + }; +} + +/** + * @typedef {Object} _1dReturns1mSdPattern + * @property {Indexes} sd + * @property {Indexes} sma + */ + +/** + * Create a _1dReturns1mSdPattern pattern node + * @param {BrkClientBase} client + * @param {string} acc - Accumulated metric name + * @returns {_1dReturns1mSdPattern} + */ +function create_1dReturns1mSdPattern(client, acc) { + return { + sd: createIndexes(client, `/${acc}_sd`), + sma: createIndexes(client, `/${acc}_sma`) + }; +} + +/** + * @typedef {Object} SupplyValuePattern + * @property {Indexes2} bitcoin + * @property {Indexes2} dollars + */ + +/** + * Create a SupplyValuePattern pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {SupplyValuePattern} + */ +function createSupplyValuePattern(client, basePath) { + return { + bitcoin: createIndexes2(client, `${basePath}/bitcoin`), + dollars: createIndexes2(client, `${basePath}/dollars`) + }; +} + +/** + * @typedef {Object} SatsPattern + * @property {Indexes3} cumulative + * @property {Indexes4} sum + */ + +/** + * Create a SatsPattern pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {SatsPattern} + */ +function createSatsPattern(client, basePath) { + return { + cumulative: createIndexes3(client, `${basePath}/cumulative`), + sum: createIndexes4(client, `${basePath}/sum`) + }; +} + +/** + * @typedef {Object} PricePaidPattern + * @property {Indexes3} maxPricePaid + * @property {Indexes3} minPricePaid + */ + +/** + * Create a PricePaidPattern pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {PricePaidPattern} + */ +function createPricePaidPattern(client, basePath) { + return { + maxPricePaid: createIndexes3(client, `${basePath}/max_price_paid`), + minPricePaid: createIndexes3(client, `${basePath}/min_price_paid`) + }; +} + +/** + * @typedef {Object} RealizedPriceExtraPattern + * @property {Indexes} ratio + */ + +/** + * Create a RealizedPriceExtraPattern pattern node + * @param {BrkClientBase} client + * @param {string} basePath + * @returns {RealizedPriceExtraPattern} + */ +function createRealizedPriceExtraPattern(client, basePath) { + return { + ratio: createIndexes(client, `${basePath}/ratio`) + }; +} + +// Catalog tree typedefs + +/** + * @typedef {Object} CatalogTree + * @property {MetricNode} computed + * @property {MetricNode} indexed + */ + +/** + * @typedef {Object} CatalogTree_Computed + * @property {MetricNode} blks + * @property {MetricNode} chain + * @property {MetricNode} cointime + * @property {MetricNode} constants + * @property {MetricNode} fetched + * @property {MetricNode} indexes + * @property {MetricNode} market + * @property {MetricNode} pools + * @property {MetricNode} price + * @property {MetricNode} stateful + */ + +/** + * @typedef {Object} CatalogTree_Computed_Blks + * @property {MetricNode} position + */ + +/** + * @typedef {Object} CatalogTree_Computed_Chain + * @property {Indexes} _1mBlockCount + * @property {Indexes} _1wBlockCount + * @property {Indexes} _1yBlockCount + * @property {Indexes2} _24hBlockCount + * @property {Indexes2} _24hCoinbaseSum + * @property {Indexes2} _24hCoinbaseUsdSum + * @property {Indexes} annualizedVolume + * @property {Indexes} annualizedVolumeBtc + * @property {Indexes} annualizedVolumeUsd + * @property {BlockCountPattern} blockCount + * @property {Indexes} blockCountTarget + * @property {BlockIntervalPattern} blockInterval + * @property {BlockSizePattern} blockSize + * @property {BlockSizePattern} blockVbytes + * @property {BlockSizePattern} blockWeight + * @property {Indexes3} blocksBeforeNextDifficultyAdjustment + * @property {Indexes3} blocksBeforeNextHalving + * @property {CoinbasePattern} coinbase + * @property {Indexes3} daysBeforeNextDifficultyAdjustment + * @property {Indexes3} daysBeforeNextHalving + * @property {Indexes4} difficulty + * @property {Indexes3} difficultyAdjustment + * @property {Indexes3} difficultyAsHash + * @property {Indexes} difficultyepoch + * @property {BitcoinPattern} emptyoutputCount + * @property {Indexes3} exactUtxoCount + * @property {MetricNode} fee + * @property {Indexes5} feeDominance + * @property {MetricNode} feeRate + * @property {Indexes} halvingepoch + * @property {Indexes3} hashPricePhs + * @property {Indexes3} hashPricePhsMin + * @property {Indexes3} hashPriceRebound + * @property {Indexes3} hashPriceThs + * @property {Indexes3} hashPriceThsMin + * @property {Indexes3} hashRate + * @property {Indexes} hashRate1mSma + * @property {Indexes} hashRate1wSma + * @property {Indexes} hashRate1ySma + * @property {Indexes} hashRate2mSma + * @property {Indexes3} hashValuePhs + * @property {Indexes3} hashValuePhsMin + * @property {Indexes3} hashValueRebound + * @property {Indexes3} hashValueThs + * @property {Indexes3} hashValueThsMin + * @property {Indexes} inflationRate + * @property {BlockSizePattern} inputCount + * @property {Indexes6} inputValue + * @property {Indexes} inputsPerSec + * @property {Indexes2} interval + * @property {Indexes6} isCoinbase + * @property {BitcoinPattern} opreturnCount + * @property {BlockSizePattern} outputCount + * @property {Indexes6} outputValue + * @property {Indexes} outputsPerSec + * @property {BitcoinPattern} p2aCount + * @property {BitcoinPattern} p2msCount + * @property {BitcoinPattern} p2pk33Count + * @property {BitcoinPattern} p2pk65Count + * @property {BitcoinPattern} p2pkhCount + * @property {BitcoinPattern} p2shCount + * @property {BitcoinPattern} p2trCount + * @property {BitcoinPattern} p2wpkhCount + * @property {BitcoinPattern} p2wshCount + * @property {Indexes} puellMultiple + * @property {SentSumPattern} sentSum + * @property {CoinbasePattern} subsidy + * @property {Indexes5} subsidyDominance + * @property {Indexes} subsidyUsd1ySma + * @property {MetricNode} timestamp + * @property {Indexes} txBtcVelocity + * @property {BitcoinPattern} txCount + * @property {Indexes} txPerSec + * @property {Indexes} txUsdVelocity + * @property {BlockCountPattern} txV1 + * @property {BlockCountPattern} txV2 + * @property {BlockCountPattern} txV3 + * @property {BlockIntervalPattern} txVsize + * @property {BlockIntervalPattern} txWeight + * @property {UnclaimedRewardsPattern} unclaimedRewards + * @property {BitcoinPattern} unknownoutputCount + * @property {Indexes7} value + * @property {Indexes2} vbytes + * @property {Indexes6} vsize + * @property {Indexes6} weight + */ + +/** + * @typedef {Object} CatalogTree_Computed_Chain_Fee + * @property {Indexes6} base + * @property {BlockSizePattern} bitcoin + * @property {Indexes6} bitcoinTxindex + * @property {BlockSizePattern} dollars + * @property {Indexes6} dollarsTxindex + * @property {BlockSizePattern} sats + */ + +/** + * @typedef {Object} CatalogTree_Computed_Chain_FeeRate + * @property {Indexes3} average + * @property {Indexes6} base + * @property {Indexes3} max + * @property {Indexes2} median + * @property {Indexes3} min + * @property {Indexes2} pct10 + * @property {Indexes2} pct25 + * @property {Indexes2} pct75 + * @property {Indexes2} pct90 + */ + +/** + * @typedef {Object} CatalogTree_Computed_Cointime + * @property {Indexes3} activeCap + * @property {Indexes3} activePrice + * @property {ActivePriceRatioPattern} activePriceRatio + * @property {SentSumPattern} activeSupply + * @property {Indexes3} activityToVaultednessRatio + * @property {BlockCountPattern} coinblocksCreated + * @property {BlockCountPattern} coinblocksStored + * @property {Indexes} cointimeAdjInflationRate + * @property {Indexes} cointimeAdjTxBtcVelocity + * @property {Indexes} cointimeAdjTxUsdVelocity + * @property {Indexes3} cointimeCap + * @property {Indexes3} cointimePrice + * @property {ActivePriceRatioPattern} cointimePriceRatio + * @property {BlockCountPattern} cointimeValueCreated + * @property {BlockCountPattern} cointimeValueDestroyed + * @property {BlockCountPattern} cointimeValueStored + * @property {Indexes3} investorCap + * @property {Indexes3} liveliness + * @property {Indexes3} thermoCap + * @property {Indexes3} trueMarketMean + * @property {ActivePriceRatioPattern} trueMarketMeanRatio + * @property {Indexes3} vaultedCap + * @property {Indexes3} vaultedPrice + * @property {ActivePriceRatioPattern} vaultedPriceRatio + * @property {SentSumPattern} vaultedSupply + * @property {Indexes3} vaultedness + */ + +/** + * @typedef {Object} CatalogTree_Computed_Constants + * @property {Indexes3} constant0 + * @property {Indexes3} constant1 + * @property {Indexes3} constant100 + * @property {Indexes3} constant2 + * @property {Indexes3} constant3 + * @property {Indexes3} constant382 + * @property {Indexes3} constant4 + * @property {Indexes3} constant50 + * @property {Indexes3} constant600 + * @property {Indexes3} constant618 + * @property {Indexes3} constantMinus1 + * @property {Indexes3} constantMinus2 + * @property {Indexes3} constantMinus3 + * @property {Indexes3} constantMinus4 + */ + +/** + * @typedef {Object} CatalogTree_Computed_Fetched + * @property {Indexes8} priceOhlcInCents + */ + +/** + * @typedef {Object} CatalogTree_Computed_Indexes + * @property {Indexes8} date + * @property {Indexes2} dateFixed + * @property {Indexes8} dateindex + * @property {Indexes9} dateindexCount + * @property {MetricNode} decadeindex + * @property {MetricNode} difficultyepoch + * @property {MetricNode} emptyoutputindex + * @property {Indexes9} firstDateindex + * @property {MetricNode} firstHeight + * @property {Indexes10} firstMonthindex + * @property {Indexes11} firstYearindex + * @property {MetricNode} halvingepoch + * @property {Indexes2} height + * @property {MetricNode} heightCount + * @property {Indexes6} inputCount + * @property {MetricNode} monthindex + * @property {Indexes10} monthindexCount + * @property {MetricNode} opreturnindex + * @property {Indexes6} outputCount + * @property {Indexes12} p2aaddressindex + * @property {MetricNode} p2msoutputindex + * @property {Indexes13} p2pk33addressindex + * @property {Indexes14} p2pk65addressindex + * @property {Indexes15} p2pkhaddressindex + * @property {Indexes16} p2shaddressindex + * @property {Indexes17} p2traddressindex + * @property {Indexes18} p2wpkhaddressindex + * @property {Indexes19} p2wshaddressindex + * @property {MetricNode} quarterindex + * @property {MetricNode} semesterindex + * @property {Indexes2} timestampFixed + * @property {Indexes6} txindex + * @property {Indexes2} txindexCount + * @property {Indexes7} txinindex + * @property {Indexes20} txoutindex + * @property {MetricNode} unknownoutputindex + * @property {MetricNode} weekindex + * @property {MetricNode} yearindex + * @property {Indexes11} yearindexCount + */ + +/** + * @typedef {Object} CatalogTree_Computed_Market + * @property {_1dReturns1mSdPattern} _1dReturns1mSd + * @property {_1dReturns1mSdPattern} _1dReturns1wSd + * @property {_1dReturns1mSdPattern} _1dReturns1ySd + * @property {Indexes} _10yCagr + * @property {Indexes} _10yDcaAvgPrice + * @property {Indexes} _10yDcaCagr + * @property {Indexes} _10yDcaReturns + * @property {Indexes} _10yDcaStack + * @property {Indexes} _10yPriceReturns + * @property {Indexes} _1dPriceReturns + * @property {Indexes} _1mDcaAvgPrice + * @property {Indexes} _1mDcaReturns + * @property {Indexes} _1mDcaStack + * @property {Indexes} _1mPriceReturns + * @property {Indexes} _1wDcaAvgPrice + * @property {Indexes} _1wDcaReturns + * @property {Indexes} _1wDcaStack + * @property {Indexes} _1wPriceReturns + * @property {Indexes} _1yDcaAvgPrice + * @property {Indexes} _1yDcaReturns + * @property {Indexes} _1yDcaStack + * @property {Indexes} _1yPriceReturns + * @property {Indexes} _2yCagr + * @property {Indexes} _2yDcaAvgPrice + * @property {Indexes} _2yDcaCagr + * @property {Indexes} _2yDcaReturns + * @property {Indexes} _2yDcaStack + * @property {Indexes} _2yPriceReturns + * @property {Indexes} _3mDcaAvgPrice + * @property {Indexes} _3mDcaReturns + * @property {Indexes} _3mDcaStack + * @property {Indexes} _3mPriceReturns + * @property {Indexes} _3yCagr + * @property {Indexes} _3yDcaAvgPrice + * @property {Indexes} _3yDcaCagr + * @property {Indexes} _3yDcaReturns + * @property {Indexes} _3yDcaStack + * @property {Indexes} _3yPriceReturns + * @property {Indexes} _4yCagr + * @property {Indexes} _4yDcaAvgPrice + * @property {Indexes} _4yDcaCagr + * @property {Indexes} _4yDcaReturns + * @property {Indexes} _4yDcaStack + * @property {Indexes} _4yPriceReturns + * @property {Indexes} _5yCagr + * @property {Indexes} _5yDcaAvgPrice + * @property {Indexes} _5yDcaCagr + * @property {Indexes} _5yDcaReturns + * @property {Indexes} _5yDcaStack + * @property {Indexes} _5yPriceReturns + * @property {Indexes} _6mDcaAvgPrice + * @property {Indexes} _6mDcaReturns + * @property {Indexes} _6mDcaStack + * @property {Indexes} _6mPriceReturns + * @property {Indexes} _6yCagr + * @property {Indexes} _6yDcaAvgPrice + * @property {Indexes} _6yDcaCagr + * @property {Indexes} _6yDcaReturns + * @property {Indexes} _6yDcaStack + * @property {Indexes} _6yPriceReturns + * @property {Indexes} _8yCagr + * @property {Indexes} _8yDcaAvgPrice + * @property {Indexes} _8yDcaCagr + * @property {Indexes} _8yDcaReturns + * @property {Indexes} _8yDcaStack + * @property {Indexes} _8yPriceReturns + * @property {Indexes} daysSincePriceAth + * @property {Indexes} dcaClass2015AvgPrice + * @property {Indexes} dcaClass2015Returns + * @property {Indexes} dcaClass2015Stack + * @property {Indexes} dcaClass2016AvgPrice + * @property {Indexes} dcaClass2016Returns + * @property {Indexes} dcaClass2016Stack + * @property {Indexes} dcaClass2017AvgPrice + * @property {Indexes} dcaClass2017Returns + * @property {Indexes} dcaClass2017Stack + * @property {Indexes} dcaClass2018AvgPrice + * @property {Indexes} dcaClass2018Returns + * @property {Indexes} dcaClass2018Stack + * @property {Indexes} dcaClass2019AvgPrice + * @property {Indexes} dcaClass2019Returns + * @property {Indexes} dcaClass2019Stack + * @property {Indexes} dcaClass2020AvgPrice + * @property {Indexes} dcaClass2020Returns + * @property {Indexes} dcaClass2020Stack + * @property {Indexes} dcaClass2021AvgPrice + * @property {Indexes} dcaClass2021Returns + * @property {Indexes} dcaClass2021Stack + * @property {Indexes} dcaClass2022AvgPrice + * @property {Indexes} dcaClass2022Returns + * @property {Indexes} dcaClass2022Stack + * @property {Indexes} dcaClass2023AvgPrice + * @property {Indexes} dcaClass2023Returns + * @property {Indexes} dcaClass2023Stack + * @property {Indexes} dcaClass2024AvgPrice + * @property {Indexes} dcaClass2024Returns + * @property {Indexes} dcaClass2024Stack + * @property {Indexes} dcaClass2025AvgPrice + * @property {Indexes} dcaClass2025Returns + * @property {Indexes} dcaClass2025Stack + * @property {Indexes} maxDaysBetweenPriceAths + * @property {Indexes} maxYearsBetweenPriceAths + * @property {Indexes} price10yAgo + * @property {Price13dEmaPattern} price13dEma + * @property {Price13dEmaPattern} price13dSma + * @property {Price13dEmaPattern} price144dEma + * @property {Price13dEmaPattern} price144dSma + * @property {Indexes} price1dAgo + * @property {Indexes} price1mAgo + * @property {Price13dEmaPattern} price1mEma + * @property {Indexes} price1mMax + * @property {Indexes} price1mMin + * @property {Price13dEmaPattern} price1mSma + * @property {Indexes} price1mVolatility + * @property {Indexes} price1wAgo + * @property {Price13dEmaPattern} price1wEma + * @property {Indexes} price1wMax + * @property {Indexes} price1wMin + * @property {Price13dEmaPattern} price1wSma + * @property {Indexes} price1wVolatility + * @property {Indexes} price1yAgo + * @property {Price13dEmaPattern} price1yEma + * @property {Indexes} price1yMax + * @property {Indexes} price1yMin + * @property {Price13dEmaPattern} price1ySma + * @property {Indexes} price1yVolatility + * @property {Price13dEmaPattern} price200dEma + * @property {Price13dEmaPattern} price200dSma + * @property {Indexes} price200dSmaX08 + * @property {Indexes} price200dSmaX24 + * @property {Price13dEmaPattern} price200wEma + * @property {Price13dEmaPattern} price200wSma + * @property {Price13dEmaPattern} price21dEma + * @property {Price13dEmaPattern} price21dSma + * @property {Indexes} price2wChoppinessIndex + * @property {Indexes} price2wMax + * @property {Indexes} price2wMin + * @property {Indexes} price2yAgo + * @property {Price13dEmaPattern} price2yEma + * @property {Price13dEmaPattern} price2ySma + * @property {Price13dEmaPattern} price34dEma + * @property {Price13dEmaPattern} price34dSma + * @property {Indexes} price3mAgo + * @property {Indexes} price3yAgo + * @property {Indexes} price4yAgo + * @property {Price13dEmaPattern} price4yEma + * @property {Price13dEmaPattern} price4ySma + * @property {Price13dEmaPattern} price55dEma + * @property {Price13dEmaPattern} price55dSma + * @property {Indexes} price5yAgo + * @property {Indexes} price6mAgo + * @property {Indexes} price6yAgo + * @property {Price13dEmaPattern} price89dEma + * @property {Price13dEmaPattern} price89dSma + * @property {Price13dEmaPattern} price8dEma + * @property {Price13dEmaPattern} price8dSma + * @property {Indexes} price8yAgo + * @property {Indexes21} priceAth + * @property {Indexes21} priceDrawdown + * @property {Indexes5} priceTrueRange + * @property {Indexes5} priceTrueRange2wSum + */ + +/** + * @typedef {Object} CatalogTree_Computed_Pools + * @property {Indexes2} pool + * @property {MetricNode} vecs + */ + +/** + * @typedef {Object} CatalogTree_Computed_Pools_Vecs + * @property {AXbtPattern} aXbt + * @property {AXbtPattern} aaoPool + * @property {AXbtPattern} antPool + * @property {AXbtPattern} arkPool + * @property {AXbtPattern} asicMiner + * @property {AXbtPattern} batPool + * @property {AXbtPattern} bcMonster + * @property {AXbtPattern} bcpoolIo + * @property {AXbtPattern} binancePool + * @property {AXbtPattern} bitClub + * @property {AXbtPattern} bitFuFuPool + * @property {AXbtPattern} bitFury + * @property {AXbtPattern} bitMinter + * @property {AXbtPattern} bitalo + * @property {AXbtPattern} bitcoinAffiliateNetwork + * @property {AXbtPattern} bitcoinCom + * @property {AXbtPattern} bitcoinIndia + * @property {AXbtPattern} bitcoinRussia + * @property {AXbtPattern} bitcoinUkraine + * @property {AXbtPattern} bitfarms + * @property {AXbtPattern} bitparking + * @property {AXbtPattern} bitsolo + * @property {AXbtPattern} bixin + * @property {AXbtPattern} blockFills + * @property {AXbtPattern} braiinsPool + * @property {AXbtPattern} bravoMining + * @property {AXbtPattern} btPool + * @property {AXbtPattern} btcCom + * @property {AXbtPattern} btcDig + * @property {AXbtPattern} btcGuild + * @property {AXbtPattern} btcLab + * @property {AXbtPattern} btcMp + * @property {AXbtPattern} btcNuggets + * @property {AXbtPattern} btcPoolParty + * @property {AXbtPattern} btcServ + * @property {AXbtPattern} btcTop + * @property {AXbtPattern} btcc + * @property {AXbtPattern} bwPool + * @property {AXbtPattern} bytePool + * @property {AXbtPattern} canoe + * @property {AXbtPattern} canoePool + * @property {AXbtPattern} carbonNegative + * @property {AXbtPattern} ckPool + * @property {AXbtPattern} cloudHashing + * @property {AXbtPattern} coinLab + * @property {AXbtPattern} cointerra + * @property {AXbtPattern} connectBtc + * @property {AXbtPattern} dPool + * @property {AXbtPattern} dcExploration + * @property {AXbtPattern} dcex + * @property {AXbtPattern} digitalBtc + * @property {AXbtPattern} digitalXMintsy + * @property {AXbtPattern} eclipseMc + * @property {AXbtPattern} eightBaochi + * @property {AXbtPattern} ekanemBtc + * @property {AXbtPattern} eligius + * @property {AXbtPattern} emcdPool + * @property {AXbtPattern} entrustCharityPool + * @property {AXbtPattern} eobot + * @property {AXbtPattern} exxBw + * @property {AXbtPattern} f2Pool + * @property {AXbtPattern} fiftyEightCoin + * @property {AXbtPattern} foundryUsa + * @property {AXbtPattern} futureBitApolloSolo + * @property {AXbtPattern} gbMiners + * @property {AXbtPattern} ghashIo + * @property {AXbtPattern} giveMeCoins + * @property {AXbtPattern} goGreenLight + * @property {AXbtPattern} haoZhuZhu + * @property {AXbtPattern} haominer + * @property {AXbtPattern} hashBx + * @property {AXbtPattern} hashPool + * @property {AXbtPattern} helix + * @property {AXbtPattern} hhtt + * @property {AXbtPattern} hotPool + * @property {AXbtPattern} hummerpool + * @property {AXbtPattern} huobiPool + * @property {AXbtPattern} innopolisTech + * @property {AXbtPattern} kanoPool + * @property {AXbtPattern} kncMiner + * @property {AXbtPattern} kuCoinPool + * @property {AXbtPattern} lubianCom + * @property {AXbtPattern} luckyPool + * @property {AXbtPattern} luxor + * @property {AXbtPattern} maraPool + * @property {AXbtPattern} maxBtc + * @property {AXbtPattern} maxiPool + * @property {AXbtPattern} megaBigPower + * @property {AXbtPattern} minerium + * @property {AXbtPattern} miningCity + * @property {AXbtPattern} miningDutch + * @property {AXbtPattern} miningKings + * @property {AXbtPattern} miningSquared + * @property {AXbtPattern} mmpool + * @property {AXbtPattern} mtRed + * @property {AXbtPattern} multiCoinCo + * @property {AXbtPattern} multipool + * @property {AXbtPattern} myBtcCoinPool + * @property {AXbtPattern} neopool + * @property {AXbtPattern} nexious + * @property {AXbtPattern} niceHash + * @property {AXbtPattern} nmcBit + * @property {AXbtPattern} novaBlock + * @property {AXbtPattern} ocean + * @property {AXbtPattern} okExPool + * @property {AXbtPattern} okMiner + * @property {AXbtPattern} okkong + * @property {AXbtPattern} okpoolTop + * @property {AXbtPattern} oneHash + * @property {AXbtPattern} oneM1x + * @property {AXbtPattern} oneThash + * @property {AXbtPattern} ozCoin + * @property {AXbtPattern} pHashIo + * @property {AXbtPattern} parasite + * @property {AXbtPattern} patels + * @property {AXbtPattern} pegaPool + * @property {AXbtPattern} phoenix + * @property {AXbtPattern} polmine + * @property {AXbtPattern} pool175btc + * @property {AXbtPattern} pool50btc + * @property {AXbtPattern} poolin + * @property {AXbtPattern} portlandHodl + * @property {AXbtPattern} publicPool + * @property {AXbtPattern} pureBtcCom + * @property {AXbtPattern} rawpool + * @property {AXbtPattern} rigPool + * @property {AXbtPattern} sbiCrypto + * @property {AXbtPattern} secPool + * @property {AXbtPattern} secretSuperstar + * @property {AXbtPattern} sevenPool + * @property {AXbtPattern} shawnP0wers + * @property {AXbtPattern} sigmapoolCom + * @property {AXbtPattern} simplecoinUs + * @property {AXbtPattern} soloCk + * @property {AXbtPattern} spiderPool + * @property {AXbtPattern} stMiningCorp + * @property {AXbtPattern} tangpool + * @property {AXbtPattern} tatmasPool + * @property {AXbtPattern} tbDice + * @property {AXbtPattern} telco214 + * @property {AXbtPattern} terraPool + * @property {AXbtPattern} tiger + * @property {AXbtPattern} tigerpoolNet + * @property {AXbtPattern} titan + * @property {AXbtPattern} transactionCoinMining + * @property {AXbtPattern} trickysBtcPool + * @property {AXbtPattern} tripleMining + * @property {AXbtPattern} twentyOneInc + * @property {AXbtPattern} ultimusPool + * @property {AXbtPattern} unknown + * @property {AXbtPattern} unomp + * @property {AXbtPattern} viaBtc + * @property {AXbtPattern} waterhole + * @property {AXbtPattern} wayiCn + * @property {AXbtPattern} whitePool + * @property {AXbtPattern} wk057 + * @property {AXbtPattern} yourbtcNet + * @property {AXbtPattern} zulupool + */ + +/** + * @typedef {Object} CatalogTree_Computed_Price + * @property {Indexes3} priceClose + * @property {Indexes8} priceCloseInCents + * @property {Indexes3} priceCloseInSats + * @property {Indexes3} priceHigh + * @property {Indexes8} priceHighInCents + * @property {Indexes3} priceHighInSats + * @property {Indexes3} priceLow + * @property {Indexes8} priceLowInCents + * @property {Indexes3} priceLowInSats + * @property {Indexes3} priceOhlc + * @property {Indexes3} priceOhlcInSats + * @property {Indexes3} priceOpen + * @property {Indexes8} priceOpenInCents + * @property {Indexes3} priceOpenInSats + */ + +/** + * @typedef {Object} CatalogTree_Computed_Stateful + * @property {Indexes3} addrCount + * @property {MetricNode} addressCohorts + * @property {MetricNode} addressesData + * @property {AddresstypeToHeightToAddrCountPattern} addresstypeToHeightToAddrCount + * @property {AddresstypeToHeightToAddrCountPattern} addresstypeToHeightToEmptyAddrCount + * @property {AddresstypeToHeightToAddrCountPattern} addresstypeToIndexesToAddrCount + * @property {AddresstypeToHeightToAddrCountPattern} addresstypeToIndexesToEmptyAddrCount + * @property {AddresstypeToHeightToAddrCountPattern} anyAddressIndexes + * @property {Indexes2} chainState + * @property {Indexes3} emptyAddrCount + * @property {Indexes23} emptyaddressindex + * @property {Indexes24} loadedaddressindex + * @property {Indexes21} marketCap + * @property {SupplyPattern} opreturnSupply + * @property {Indexes20} txinindex + * @property {SupplyPattern} unspendableSupply + * @property {MetricNode} utxoCohorts + */ + +/** + * @typedef {Object} CatalogTree_Computed_Stateful_AddressCohorts + * @property {MetricNode} amountRange + * @property {MetricNode} geAmount + * @property {MetricNode} ltAmount + */ + +/** + * @typedef {Object} CatalogTree_Computed_Stateful_AddressCohorts_AmountRange + * @property {_0satsPattern} _0sats + * @property {_0satsPattern} _100btcTo1kBtc + * @property {_0satsPattern} _100kBtcOrMore + * @property {_0satsPattern} _100kSatsTo1mSats + * @property {_0satsPattern} _100satsTo1kSats + * @property {_0satsPattern} _10btcTo100btc + * @property {_0satsPattern} _10kBtcTo100kBtc + * @property {_0satsPattern} _10kSatsTo100kSats + * @property {_0satsPattern} _10mSatsTo1btc + * @property {_0satsPattern} _10satsTo100sats + * @property {_0satsPattern} _1btcTo10btc + * @property {_0satsPattern} _1kBtcTo10kBtc + * @property {_0satsPattern} _1kSatsTo10kSats + * @property {_0satsPattern} _1mSatsTo10mSats + * @property {_0satsPattern} _1satTo10sats + */ + +/** + * @typedef {Object} CatalogTree_Computed_Stateful_AddressCohorts_GeAmount + * @property {_0satsPattern} _100btc + * @property {_0satsPattern} _100kSats + * @property {_0satsPattern} _100sats + * @property {_0satsPattern} _10btc + * @property {_0satsPattern} _10kBtc + * @property {_0satsPattern} _10kSats + * @property {_0satsPattern} _10mSats + * @property {_0satsPattern} _10sats + * @property {_0satsPattern} _1btc + * @property {_0satsPattern} _1kBtc + * @property {_0satsPattern} _1kSats + * @property {_0satsPattern} _1mSats + * @property {_0satsPattern} _1sat + */ + +/** + * @typedef {Object} CatalogTree_Computed_Stateful_AddressCohorts_LtAmount + * @property {_0satsPattern} _100btc + * @property {_0satsPattern} _100kBtc + * @property {_0satsPattern} _100kSats + * @property {_0satsPattern} _100sats + * @property {_0satsPattern} _10btc + * @property {_0satsPattern} _10kBtc + * @property {_0satsPattern} _10kSats + * @property {_0satsPattern} _10mSats + * @property {_0satsPattern} _10sats + * @property {_0satsPattern} _1btc + * @property {_0satsPattern} _1kBtc + * @property {_0satsPattern} _1kSats + * @property {_0satsPattern} _1mSats + */ + +/** + * @typedef {Object} CatalogTree_Computed_Stateful_AddressesData + * @property {Indexes23} empty + * @property {Indexes24} loaded + */ + +/** + * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts + * @property {MetricNode} ageRange + * @property {MetricNode} all + * @property {MetricNode} amountRange + * @property {MetricNode} epoch + * @property {MetricNode} geAmount + * @property {MetricNode} ltAmount + * @property {MetricNode} maxAge + * @property {MetricNode} minAge + * @property {MetricNode} term + * @property {MetricNode} type + * @property {MetricNode} year + */ + +/** + * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts_AgeRange + * @property {_10yTo12yPattern} _10yTo12y + * @property {_10yTo12yPattern} _12yTo15y + * @property {_10yTo12yPattern} _1dTo1w + * @property {_10yTo12yPattern} _1mTo2m + * @property {_10yTo12yPattern} _1wTo1m + * @property {_10yTo12yPattern} _1yTo2y + * @property {_10yTo12yPattern} _2mTo3m + * @property {_10yTo12yPattern} _2yTo3y + * @property {_10yTo12yPattern} _3mTo4m + * @property {_10yTo12yPattern} _3yTo4y + * @property {_10yTo12yPattern} _4mTo5m + * @property {_10yTo12yPattern} _4yTo5y + * @property {_10yTo12yPattern} _5mTo6m + * @property {_10yTo12yPattern} _5yTo6y + * @property {_10yTo12yPattern} _6mTo1y + * @property {_10yTo12yPattern} _6yTo7y + * @property {_10yTo12yPattern} _7yTo8y + * @property {_10yTo12yPattern} _8yTo10y + * @property {_10yTo12yPattern} from15y + * @property {UpTo1dPattern} upTo1d + */ + +/** + * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts_All + * @property {ActivityPattern} activity + * @property {PricePaidPattern2} pricePaid + * @property {RealizedPattern3} realized + * @property {MetricNode} relative + * @property {SupplyPattern2} supply + * @property {UnrealizedPattern} unrealized + */ + +/** + * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts_All_Relative + * @property {Indexes21} negUnrealizedLossRelToMarketCap + * @property {Indexes21} negUnrealizedLossRelToOwnTotalUnrealizedPnl + * @property {Indexes21} netUnrealizedPnlRelToMarketCap + * @property {Indexes21} netUnrealizedPnlRelToOwnTotalUnrealizedPnl + * @property {Indexes21} supplyInLossRelToOwnSupply + * @property {Indexes21} supplyInProfitRelToOwnSupply + * @property {Indexes21} unrealizedLossRelToMarketCap + * @property {Indexes21} unrealizedLossRelToOwnTotalUnrealizedPnl + * @property {Indexes21} unrealizedProfitRelToMarketCap + * @property {Indexes21} unrealizedProfitRelToOwnTotalUnrealizedPnl + */ + +/** + * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts_AmountRange + * @property {_0satsPattern2} _0sats + * @property {_0satsPattern2} _100btcTo1kBtc + * @property {_0satsPattern2} _100kBtcOrMore + * @property {_0satsPattern2} _100kSatsTo1mSats + * @property {_0satsPattern2} _100satsTo1kSats + * @property {_0satsPattern2} _10btcTo100btc + * @property {_0satsPattern2} _10kBtcTo100kBtc + * @property {_0satsPattern2} _10kSatsTo100kSats + * @property {_0satsPattern2} _10mSatsTo1btc + * @property {_0satsPattern2} _10satsTo100sats + * @property {_0satsPattern2} _1btcTo10btc + * @property {_0satsPattern2} _1kBtcTo10kBtc + * @property {_0satsPattern2} _1kSatsTo10kSats + * @property {_0satsPattern2} _1mSatsTo10mSats + * @property {_0satsPattern2} _1satTo10sats + */ + +/** + * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts_Epoch + * @property {_10yTo12yPattern} _0 + * @property {_10yTo12yPattern} _1 + * @property {_10yTo12yPattern} _2 + * @property {_10yTo12yPattern} _3 + * @property {_10yTo12yPattern} _4 + */ + +/** + * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts_GeAmount + * @property {_0satsPattern2} _100btc + * @property {_0satsPattern2} _100kSats + * @property {_0satsPattern2} _100sats + * @property {_0satsPattern2} _10btc + * @property {_0satsPattern2} _10kBtc + * @property {_0satsPattern2} _10kSats + * @property {_0satsPattern2} _10mSats + * @property {_0satsPattern2} _10sats + * @property {_0satsPattern2} _1btc + * @property {_0satsPattern2} _1kBtc + * @property {_0satsPattern2} _1kSats + * @property {_0satsPattern2} _1mSats + * @property {_0satsPattern2} _1sat + */ + +/** + * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts_LtAmount + * @property {_0satsPattern2} _100btc + * @property {_0satsPattern2} _100kBtc + * @property {_0satsPattern2} _100kSats + * @property {_0satsPattern2} _100sats + * @property {_0satsPattern2} _10btc + * @property {_0satsPattern2} _10kBtc + * @property {_0satsPattern2} _10kSats + * @property {_0satsPattern2} _10mSats + * @property {_0satsPattern2} _10sats + * @property {_0satsPattern2} _1btc + * @property {_0satsPattern2} _1kBtc + * @property {_0satsPattern2} _1kSats + * @property {_0satsPattern2} _1mSats + */ + +/** + * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts_MaxAge + * @property {UpTo1dPattern} _10y + * @property {UpTo1dPattern} _12y + * @property {UpTo1dPattern} _15y + * @property {UpTo1dPattern} _1m + * @property {UpTo1dPattern} _1w + * @property {UpTo1dPattern} _1y + * @property {UpTo1dPattern} _2m + * @property {UpTo1dPattern} _2y + * @property {UpTo1dPattern} _3m + * @property {UpTo1dPattern} _3y + * @property {UpTo1dPattern} _4m + * @property {UpTo1dPattern} _4y + * @property {UpTo1dPattern} _5m + * @property {UpTo1dPattern} _5y + * @property {UpTo1dPattern} _6m + * @property {UpTo1dPattern} _6y + * @property {UpTo1dPattern} _7y + * @property {UpTo1dPattern} _8y + */ + +/** + * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts_MinAge + * @property {_10yTo12yPattern} _10y + * @property {_10yTo12yPattern} _12y + * @property {_10yTo12yPattern} _1d + * @property {_10yTo12yPattern} _1m + * @property {_10yTo12yPattern} _1w + * @property {_10yTo12yPattern} _1y + * @property {_10yTo12yPattern} _2m + * @property {_10yTo12yPattern} _2y + * @property {_10yTo12yPattern} _3m + * @property {_10yTo12yPattern} _3y + * @property {_10yTo12yPattern} _4m + * @property {_10yTo12yPattern} _4y + * @property {_10yTo12yPattern} _5m + * @property {_10yTo12yPattern} _5y + * @property {_10yTo12yPattern} _6m + * @property {_10yTo12yPattern} _6y + * @property {_10yTo12yPattern} _7y + * @property {_10yTo12yPattern} _8y + */ + +/** + * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts_Term + * @property {UpTo1dPattern} long + * @property {UpTo1dPattern} short + */ + +/** + * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts_Type + * @property {_0satsPattern2} empty + * @property {_0satsPattern2} p2a + * @property {_0satsPattern2} p2ms + * @property {_0satsPattern2} p2pk33 + * @property {_0satsPattern2} p2pk65 + * @property {_0satsPattern2} p2pkh + * @property {_0satsPattern2} p2sh + * @property {_0satsPattern2} p2tr + * @property {_0satsPattern2} p2wpkh + * @property {_0satsPattern2} p2wsh + * @property {_0satsPattern2} unknown + */ + +/** + * @typedef {Object} CatalogTree_Computed_Stateful_UtxoCohorts_Year + * @property {_10yTo12yPattern} _2009 + * @property {_10yTo12yPattern} _2010 + * @property {_10yTo12yPattern} _2011 + * @property {_10yTo12yPattern} _2012 + * @property {_10yTo12yPattern} _2013 + * @property {_10yTo12yPattern} _2014 + * @property {_10yTo12yPattern} _2015 + * @property {_10yTo12yPattern} _2016 + * @property {_10yTo12yPattern} _2017 + * @property {_10yTo12yPattern} _2018 + * @property {_10yTo12yPattern} _2019 + * @property {_10yTo12yPattern} _2020 + * @property {_10yTo12yPattern} _2021 + * @property {_10yTo12yPattern} _2022 + * @property {_10yTo12yPattern} _2023 + * @property {_10yTo12yPattern} _2024 + * @property {_10yTo12yPattern} _2025 + * @property {_10yTo12yPattern} _2026 + */ + +/** + * @typedef {Object} CatalogTree_Indexed + * @property {MetricNode} address + * @property {MetricNode} block + * @property {MetricNode} output + * @property {MetricNode} tx + * @property {MetricNode} txin + * @property {MetricNode} txout + */ + +/** + * @typedef {Object} CatalogTree_Indexed_Address + * @property {Indexes2} firstP2aaddressindex + * @property {Indexes2} firstP2pk33addressindex + * @property {Indexes2} firstP2pk65addressindex + * @property {Indexes2} firstP2pkhaddressindex + * @property {Indexes2} firstP2shaddressindex + * @property {Indexes2} firstP2traddressindex + * @property {Indexes2} firstP2wpkhaddressindex + * @property {Indexes2} firstP2wshaddressindex + * @property {Indexes12} p2abytes + * @property {Indexes13} p2pk33bytes + * @property {Indexes14} p2pk65bytes + * @property {Indexes15} p2pkhbytes + * @property {Indexes16} p2shbytes + * @property {Indexes17} p2trbytes + * @property {Indexes18} p2wpkhbytes + * @property {Indexes19} p2wshbytes + */ + +/** + * @typedef {Object} CatalogTree_Indexed_Block + * @property {Indexes2} blockhash + * @property {Indexes2} difficulty + * @property {Indexes2} timestamp + * @property {Indexes2} totalSize + * @property {Indexes2} weight + */ + +/** + * @typedef {Object} CatalogTree_Indexed_Output + * @property {Indexes2} firstEmptyoutputindex + * @property {Indexes2} firstOpreturnindex + * @property {Indexes2} firstP2msoutputindex + * @property {Indexes2} firstUnknownoutputindex + * @property {MetricNode} txindex + */ + +/** + * @typedef {Object} CatalogTree_Indexed_Tx + * @property {Indexes6} baseSize + * @property {Indexes2} firstTxindex + * @property {Indexes6} firstTxinindex + * @property {Indexes6} firstTxoutindex + * @property {Indexes6} height + * @property {Indexes6} isExplicitlyRbf + * @property {Indexes6} rawlocktime + * @property {Indexes6} totalSize + * @property {Indexes6} txid + * @property {Indexes6} txversion + */ + +/** + * @typedef {Object} CatalogTree_Indexed_Txin + * @property {Indexes2} firstTxinindex + * @property {Indexes7} outpoint + * @property {Indexes7} txindex + */ + +/** + * @typedef {Object} CatalogTree_Indexed_Txout + * @property {Indexes2} firstTxoutindex + * @property {Indexes20} outputtype + * @property {Indexes20} txindex + * @property {Indexes20} typeindex + * @property {Indexes20} value + */ + +/** + * Main BRK client with catalog tree and API methods + * @extends BrkClientBase + */ +class BrkClient extends BrkClientBase { + /** + * @param {BrkClientOptions|string} options + */ + constructor(options) { + super(options); + /** @type {CatalogTree} */ + this.tree = this._buildTree(''); + } + + /** + * @private + * @param {string} basePath + * @returns {CatalogTree} + */ + _buildTree(basePath) { + return { + computed: { + blks: { + position: new MetricNode(this, '/position') + }, + chain: { + _1mBlockCount: createIndexes(this, '/1m_block_count'), + _1wBlockCount: createIndexes(this, '/1w_block_count'), + _1yBlockCount: createIndexes(this, '/1y_block_count'), + _24hBlockCount: createIndexes2(this, '/24h_block_count'), + _24hCoinbaseSum: createIndexes2(this, '/24h_coinbase_sum'), + _24hCoinbaseUsdSum: createIndexes2(this, '/24h_coinbase_usd_sum'), + annualizedVolume: createIndexes(this, '/annualized_volume'), + annualizedVolumeBtc: createIndexes(this, '/annualized_volume_btc'), + annualizedVolumeUsd: createIndexes(this, '/annualized_volume_usd'), + blockCount: createBlockCountPattern(this, 'computed_chain/block_count'), + blockCountTarget: createIndexes(this, '/block_count_target'), + blockInterval: createBlockIntervalPattern(this, 'block_interval'), + blockSize: createBlockSizePattern(this, 'computed_chain/block_size'), + blockVbytes: createBlockSizePattern(this, 'computed_chain/block_vbytes'), + blockWeight: createBlockSizePattern(this, 'computed_chain/block_weight'), + blocksBeforeNextDifficultyAdjustment: createIndexes3(this, '/blocks_before_next_difficulty_adjustment'), + blocksBeforeNextHalving: createIndexes3(this, '/blocks_before_next_halving'), + coinbase: createCoinbasePattern(this, 'computed_chain/coinbase'), + daysBeforeNextDifficultyAdjustment: createIndexes3(this, '/days_before_next_difficulty_adjustment'), + daysBeforeNextHalving: createIndexes3(this, '/days_before_next_halving'), + difficulty: createIndexes4(this, '/difficulty'), + difficultyAdjustment: createIndexes3(this, '/difficulty_adjustment'), + difficultyAsHash: createIndexes3(this, '/difficulty_as_hash'), + difficultyepoch: createIndexes(this, '/difficultyepoch'), + emptyoutputCount: createBitcoinPattern(this, 'computed_chain/emptyoutput_count'), + exactUtxoCount: createIndexes3(this, '/exact_utxo_count'), + fee: { + base: createIndexes6(this, '/fee'), + bitcoin: createBlockSizePattern(this, 'fee/bitcoin'), + bitcoinTxindex: createIndexes6(this, '/fee_btc'), + dollars: createBlockSizePattern(this, 'fee/dollars'), + dollarsTxindex: createIndexes6(this, '/fee_usd'), + sats: createBlockSizePattern(this, 'fee/sats') + }, + feeDominance: createIndexes5(this, '/fee_dominance'), + feeRate: { + average: createIndexes3(this, '/fee_rate_avg'), + base: createIndexes6(this, '/fee_rate'), + max: createIndexes3(this, '/fee_rate_max'), + median: createIndexes2(this, '/fee_rate_median'), + min: createIndexes3(this, '/fee_rate_min'), + pct10: createIndexes2(this, '/fee_rate_pct10'), + pct25: createIndexes2(this, '/fee_rate_pct25'), + pct75: createIndexes2(this, '/fee_rate_pct75'), + pct90: createIndexes2(this, '/fee_rate_pct90') + }, + halvingepoch: createIndexes(this, '/halvingepoch'), + hashPricePhs: createIndexes3(this, '/hash_price_phs'), + hashPricePhsMin: createIndexes3(this, '/hash_price_phs_min'), + hashPriceRebound: createIndexes3(this, '/hash_price_rebound'), + hashPriceThs: createIndexes3(this, '/hash_price_ths'), + hashPriceThsMin: createIndexes3(this, '/hash_price_ths_min'), + hashRate: createIndexes3(this, '/hash_rate'), + hashRate1mSma: createIndexes(this, '/hash_rate_1m_sma'), + hashRate1wSma: createIndexes(this, '/hash_rate_1w_sma'), + hashRate1ySma: createIndexes(this, '/hash_rate_1y_sma'), + hashRate2mSma: createIndexes(this, '/hash_rate_2m_sma'), + hashValuePhs: createIndexes3(this, '/hash_value_phs'), + hashValuePhsMin: createIndexes3(this, '/hash_value_phs_min'), + hashValueRebound: createIndexes3(this, '/hash_value_rebound'), + hashValueThs: createIndexes3(this, '/hash_value_ths'), + hashValueThsMin: createIndexes3(this, '/hash_value_ths_min'), + inflationRate: createIndexes(this, '/inflation_rate'), + inputCount: createBlockSizePattern(this, 'computed_chain/input_count'), + inputValue: createIndexes6(this, '/input_value'), + inputsPerSec: createIndexes(this, '/inputs_per_sec'), + interval: createIndexes2(this, '/interval'), + isCoinbase: createIndexes6(this, '/is_coinbase'), + opreturnCount: createBitcoinPattern(this, 'computed_chain/opreturn_count'), + outputCount: createBlockSizePattern(this, 'computed_chain/output_count'), + outputValue: createIndexes6(this, '/output_value'), + outputsPerSec: createIndexes(this, '/outputs_per_sec'), + p2aCount: createBitcoinPattern(this, 'computed_chain/p2a_count'), + p2msCount: createBitcoinPattern(this, 'computed_chain/p2ms_count'), + p2pk33Count: createBitcoinPattern(this, 'computed_chain/p2pk33_count'), + p2pk65Count: createBitcoinPattern(this, 'computed_chain/p2pk65_count'), + p2pkhCount: createBitcoinPattern(this, 'computed_chain/p2pkh_count'), + p2shCount: createBitcoinPattern(this, 'computed_chain/p2sh_count'), + p2trCount: createBitcoinPattern(this, 'computed_chain/p2tr_count'), + p2wpkhCount: createBitcoinPattern(this, 'computed_chain/p2wpkh_count'), + p2wshCount: createBitcoinPattern(this, 'computed_chain/p2wsh_count'), + puellMultiple: createIndexes(this, '/puell_multiple'), + sentSum: createSentSumPattern(this, 'computed_chain/sent_sum'), + subsidy: createCoinbasePattern(this, 'computed_chain/subsidy'), + subsidyDominance: createIndexes5(this, '/subsidy_dominance'), + subsidyUsd1ySma: createIndexes(this, '/subsidy_usd_1y_sma'), + timestamp: new MetricNode(this, '/timestamp'), + txBtcVelocity: createIndexes(this, '/tx_btc_velocity'), + txCount: createBitcoinPattern(this, 'computed_chain/tx_count'), + txPerSec: createIndexes(this, '/tx_per_sec'), + txUsdVelocity: createIndexes(this, '/tx_usd_velocity'), + txV1: createBlockCountPattern(this, 'computed_chain/tx_v1'), + txV2: createBlockCountPattern(this, 'computed_chain/tx_v2'), + txV3: createBlockCountPattern(this, 'computed_chain/tx_v3'), + txVsize: createBlockIntervalPattern(this, 'tx_vsize'), + txWeight: createBlockIntervalPattern(this, 'tx_weight'), + unclaimedRewards: createUnclaimedRewardsPattern(this, 'computed_chain/unclaimed_rewards'), + unknownoutputCount: createBitcoinPattern(this, 'computed_chain/unknownoutput_count'), + value: createIndexes7(this, '/value'), + vbytes: createIndexes2(this, '/vbytes'), + vsize: createIndexes6(this, '/vsize'), + weight: createIndexes6(this, '/weight') + }, + cointime: { + activeCap: createIndexes3(this, '/active_cap'), + activePrice: createIndexes3(this, '/active_price'), + activePriceRatio: createActivePriceRatioPattern(this, 'computed_cointime/active_price_ratio'), + activeSupply: createSentSumPattern(this, 'computed_cointime/active_supply'), + activityToVaultednessRatio: createIndexes3(this, '/activity_to_vaultedness_ratio'), + coinblocksCreated: createBlockCountPattern(this, 'computed_cointime/coinblocks_created'), + coinblocksStored: createBlockCountPattern(this, 'computed_cointime/coinblocks_stored'), + cointimeAdjInflationRate: createIndexes(this, '/cointime_adj_inflation_rate'), + cointimeAdjTxBtcVelocity: createIndexes(this, '/cointime_adj_tx_btc_velocity'), + cointimeAdjTxUsdVelocity: createIndexes(this, '/cointime_adj_tx_usd_velocity'), + cointimeCap: createIndexes3(this, '/cointime_cap'), + cointimePrice: createIndexes3(this, '/cointime_price'), + cointimePriceRatio: createActivePriceRatioPattern(this, 'computed_cointime/cointime_price_ratio'), + cointimeValueCreated: createBlockCountPattern(this, 'computed_cointime/cointime_value_created'), + cointimeValueDestroyed: createBlockCountPattern(this, 'computed_cointime/cointime_value_destroyed'), + cointimeValueStored: createBlockCountPattern(this, 'computed_cointime/cointime_value_stored'), + investorCap: createIndexes3(this, '/investor_cap'), + liveliness: createIndexes3(this, '/liveliness'), + thermoCap: createIndexes3(this, '/thermo_cap'), + trueMarketMean: createIndexes3(this, '/true_market_mean'), + trueMarketMeanRatio: createActivePriceRatioPattern(this, 'computed_cointime/true_market_mean_ratio'), + vaultedCap: createIndexes3(this, '/vaulted_cap'), + vaultedPrice: createIndexes3(this, '/vaulted_price'), + vaultedPriceRatio: createActivePriceRatioPattern(this, 'computed_cointime/vaulted_price_ratio'), + vaultedSupply: createSentSumPattern(this, 'computed_cointime/vaulted_supply'), + vaultedness: createIndexes3(this, '/vaultedness') + }, + constants: { + constant0: createIndexes3(this, '/constant_0'), + constant1: createIndexes3(this, '/constant_1'), + constant100: createIndexes3(this, '/constant_100'), + constant2: createIndexes3(this, '/constant_2'), + constant3: createIndexes3(this, '/constant_3'), + constant382: createIndexes3(this, '/constant_38_2'), + constant4: createIndexes3(this, '/constant_4'), + constant50: createIndexes3(this, '/constant_50'), + constant600: createIndexes3(this, '/constant_600'), + constant618: createIndexes3(this, '/constant_61_8'), + constantMinus1: createIndexes3(this, '/constant_minus_1'), + constantMinus2: createIndexes3(this, '/constant_minus_2'), + constantMinus3: createIndexes3(this, '/constant_minus_3'), + constantMinus4: createIndexes3(this, '/constant_minus_4') + }, + fetched: { + priceOhlcInCents: createIndexes8(this, '/price_ohlc_in_cents') + }, + indexes: { + date: createIndexes8(this, '/date'), + dateFixed: createIndexes2(this, '/date_fixed'), + dateindex: createIndexes8(this, '/dateindex'), + dateindexCount: createIndexes9(this, '/dateindex_count'), + decadeindex: new MetricNode(this, '/decadeindex'), + difficultyepoch: new MetricNode(this, '/difficultyepoch'), + emptyoutputindex: new MetricNode(this, '/emptyoutputindex'), + firstDateindex: createIndexes9(this, '/first_dateindex'), + firstHeight: new MetricNode(this, '/first_height'), + firstMonthindex: createIndexes10(this, '/first_monthindex'), + firstYearindex: createIndexes11(this, '/first_yearindex'), + halvingepoch: new MetricNode(this, '/halvingepoch'), + height: createIndexes2(this, '/height'), + heightCount: new MetricNode(this, '/height_count'), + inputCount: createIndexes6(this, '/input_count'), + monthindex: new MetricNode(this, '/monthindex'), + monthindexCount: createIndexes10(this, '/monthindex_count'), + opreturnindex: new MetricNode(this, '/opreturnindex'), + outputCount: createIndexes6(this, '/output_count'), + p2aaddressindex: createIndexes12(this, '/p2aaddressindex'), + p2msoutputindex: new MetricNode(this, '/p2msoutputindex'), + p2pk33addressindex: createIndexes13(this, '/p2pk33addressindex'), + p2pk65addressindex: createIndexes14(this, '/p2pk65addressindex'), + p2pkhaddressindex: createIndexes15(this, '/p2pkhaddressindex'), + p2shaddressindex: createIndexes16(this, '/p2shaddressindex'), + p2traddressindex: createIndexes17(this, '/p2traddressindex'), + p2wpkhaddressindex: createIndexes18(this, '/p2wpkhaddressindex'), + p2wshaddressindex: createIndexes19(this, '/p2wshaddressindex'), + quarterindex: new MetricNode(this, '/quarterindex'), + semesterindex: new MetricNode(this, '/semesterindex'), + timestampFixed: createIndexes2(this, '/timestamp_fixed'), + txindex: createIndexes6(this, '/txindex'), + txindexCount: createIndexes2(this, '/txindex_count'), + txinindex: createIndexes7(this, '/txinindex'), + txoutindex: createIndexes20(this, '/txoutindex'), + unknownoutputindex: new MetricNode(this, '/unknownoutputindex'), + weekindex: new MetricNode(this, '/weekindex'), + yearindex: new MetricNode(this, '/yearindex'), + yearindexCount: createIndexes11(this, '/yearindex_count') + }, + market: { + _1dReturns1mSd: create_1dReturns1mSdPattern(this, '1d_returns_1m_sd'), + _1dReturns1wSd: create_1dReturns1mSdPattern(this, '1d_returns_1w_sd'), + _1dReturns1ySd: create_1dReturns1mSdPattern(this, '1d_returns_1y_sd'), + _10yCagr: createIndexes(this, '/10y_cagr'), + _10yDcaAvgPrice: createIndexes(this, '/10y_dca_avg_price'), + _10yDcaCagr: createIndexes(this, '/10y_dca_cagr'), + _10yDcaReturns: createIndexes(this, '/10y_dca_returns'), + _10yDcaStack: createIndexes(this, '/10y_dca_stack'), + _10yPriceReturns: createIndexes(this, '/10y_price_returns'), + _1dPriceReturns: createIndexes(this, '/1d_price_returns'), + _1mDcaAvgPrice: createIndexes(this, '/1m_dca_avg_price'), + _1mDcaReturns: createIndexes(this, '/1m_dca_returns'), + _1mDcaStack: createIndexes(this, '/1m_dca_stack'), + _1mPriceReturns: createIndexes(this, '/1m_price_returns'), + _1wDcaAvgPrice: createIndexes(this, '/1w_dca_avg_price'), + _1wDcaReturns: createIndexes(this, '/1w_dca_returns'), + _1wDcaStack: createIndexes(this, '/1w_dca_stack'), + _1wPriceReturns: createIndexes(this, '/1w_price_returns'), + _1yDcaAvgPrice: createIndexes(this, '/1y_dca_avg_price'), + _1yDcaReturns: createIndexes(this, '/1y_dca_returns'), + _1yDcaStack: createIndexes(this, '/1y_dca_stack'), + _1yPriceReturns: createIndexes(this, '/1y_price_returns'), + _2yCagr: createIndexes(this, '/2y_cagr'), + _2yDcaAvgPrice: createIndexes(this, '/2y_dca_avg_price'), + _2yDcaCagr: createIndexes(this, '/2y_dca_cagr'), + _2yDcaReturns: createIndexes(this, '/2y_dca_returns'), + _2yDcaStack: createIndexes(this, '/2y_dca_stack'), + _2yPriceReturns: createIndexes(this, '/2y_price_returns'), + _3mDcaAvgPrice: createIndexes(this, '/3m_dca_avg_price'), + _3mDcaReturns: createIndexes(this, '/3m_dca_returns'), + _3mDcaStack: createIndexes(this, '/3m_dca_stack'), + _3mPriceReturns: createIndexes(this, '/3m_price_returns'), + _3yCagr: createIndexes(this, '/3y_cagr'), + _3yDcaAvgPrice: createIndexes(this, '/3y_dca_avg_price'), + _3yDcaCagr: createIndexes(this, '/3y_dca_cagr'), + _3yDcaReturns: createIndexes(this, '/3y_dca_returns'), + _3yDcaStack: createIndexes(this, '/3y_dca_stack'), + _3yPriceReturns: createIndexes(this, '/3y_price_returns'), + _4yCagr: createIndexes(this, '/4y_cagr'), + _4yDcaAvgPrice: createIndexes(this, '/4y_dca_avg_price'), + _4yDcaCagr: createIndexes(this, '/4y_dca_cagr'), + _4yDcaReturns: createIndexes(this, '/4y_dca_returns'), + _4yDcaStack: createIndexes(this, '/4y_dca_stack'), + _4yPriceReturns: createIndexes(this, '/4y_price_returns'), + _5yCagr: createIndexes(this, '/5y_cagr'), + _5yDcaAvgPrice: createIndexes(this, '/5y_dca_avg_price'), + _5yDcaCagr: createIndexes(this, '/5y_dca_cagr'), + _5yDcaReturns: createIndexes(this, '/5y_dca_returns'), + _5yDcaStack: createIndexes(this, '/5y_dca_stack'), + _5yPriceReturns: createIndexes(this, '/5y_price_returns'), + _6mDcaAvgPrice: createIndexes(this, '/6m_dca_avg_price'), + _6mDcaReturns: createIndexes(this, '/6m_dca_returns'), + _6mDcaStack: createIndexes(this, '/6m_dca_stack'), + _6mPriceReturns: createIndexes(this, '/6m_price_returns'), + _6yCagr: createIndexes(this, '/6y_cagr'), + _6yDcaAvgPrice: createIndexes(this, '/6y_dca_avg_price'), + _6yDcaCagr: createIndexes(this, '/6y_dca_cagr'), + _6yDcaReturns: createIndexes(this, '/6y_dca_returns'), + _6yDcaStack: createIndexes(this, '/6y_dca_stack'), + _6yPriceReturns: createIndexes(this, '/6y_price_returns'), + _8yCagr: createIndexes(this, '/8y_cagr'), + _8yDcaAvgPrice: createIndexes(this, '/8y_dca_avg_price'), + _8yDcaCagr: createIndexes(this, '/8y_dca_cagr'), + _8yDcaReturns: createIndexes(this, '/8y_dca_returns'), + _8yDcaStack: createIndexes(this, '/8y_dca_stack'), + _8yPriceReturns: createIndexes(this, '/8y_price_returns'), + daysSincePriceAth: createIndexes(this, '/days_since_price_ath'), + dcaClass2015AvgPrice: createIndexes(this, '/dca_class_2015_avg_price'), + dcaClass2015Returns: createIndexes(this, '/dca_class_2015_returns'), + dcaClass2015Stack: createIndexes(this, '/dca_class_2015_stack'), + dcaClass2016AvgPrice: createIndexes(this, '/dca_class_2016_avg_price'), + dcaClass2016Returns: createIndexes(this, '/dca_class_2016_returns'), + dcaClass2016Stack: createIndexes(this, '/dca_class_2016_stack'), + dcaClass2017AvgPrice: createIndexes(this, '/dca_class_2017_avg_price'), + dcaClass2017Returns: createIndexes(this, '/dca_class_2017_returns'), + dcaClass2017Stack: createIndexes(this, '/dca_class_2017_stack'), + dcaClass2018AvgPrice: createIndexes(this, '/dca_class_2018_avg_price'), + dcaClass2018Returns: createIndexes(this, '/dca_class_2018_returns'), + dcaClass2018Stack: createIndexes(this, '/dca_class_2018_stack'), + dcaClass2019AvgPrice: createIndexes(this, '/dca_class_2019_avg_price'), + dcaClass2019Returns: createIndexes(this, '/dca_class_2019_returns'), + dcaClass2019Stack: createIndexes(this, '/dca_class_2019_stack'), + dcaClass2020AvgPrice: createIndexes(this, '/dca_class_2020_avg_price'), + dcaClass2020Returns: createIndexes(this, '/dca_class_2020_returns'), + dcaClass2020Stack: createIndexes(this, '/dca_class_2020_stack'), + dcaClass2021AvgPrice: createIndexes(this, '/dca_class_2021_avg_price'), + dcaClass2021Returns: createIndexes(this, '/dca_class_2021_returns'), + dcaClass2021Stack: createIndexes(this, '/dca_class_2021_stack'), + dcaClass2022AvgPrice: createIndexes(this, '/dca_class_2022_avg_price'), + dcaClass2022Returns: createIndexes(this, '/dca_class_2022_returns'), + dcaClass2022Stack: createIndexes(this, '/dca_class_2022_stack'), + dcaClass2023AvgPrice: createIndexes(this, '/dca_class_2023_avg_price'), + dcaClass2023Returns: createIndexes(this, '/dca_class_2023_returns'), + dcaClass2023Stack: createIndexes(this, '/dca_class_2023_stack'), + dcaClass2024AvgPrice: createIndexes(this, '/dca_class_2024_avg_price'), + dcaClass2024Returns: createIndexes(this, '/dca_class_2024_returns'), + dcaClass2024Stack: createIndexes(this, '/dca_class_2024_stack'), + dcaClass2025AvgPrice: createIndexes(this, '/dca_class_2025_avg_price'), + dcaClass2025Returns: createIndexes(this, '/dca_class_2025_returns'), + dcaClass2025Stack: createIndexes(this, '/dca_class_2025_stack'), + maxDaysBetweenPriceAths: createIndexes(this, '/max_days_between_price_aths'), + maxYearsBetweenPriceAths: createIndexes(this, '/max_years_between_price_aths'), + price10yAgo: createIndexes(this, '/price_10y_ago'), + price13dEma: createPrice13dEmaPattern(this, 'price_13d_ema'), + price13dSma: createPrice13dEmaPattern(this, 'price_13d_sma'), + price144dEma: createPrice13dEmaPattern(this, 'price_144d_ema'), + price144dSma: createPrice13dEmaPattern(this, 'price_144d_sma'), + price1dAgo: createIndexes(this, '/price_1d_ago'), + price1mAgo: createIndexes(this, '/price_1m_ago'), + price1mEma: createPrice13dEmaPattern(this, 'price_1m_ema'), + price1mMax: createIndexes(this, '/price_1m_max'), + price1mMin: createIndexes(this, '/price_1m_min'), + price1mSma: createPrice13dEmaPattern(this, 'price_1m_sma'), + price1mVolatility: createIndexes(this, '/price_1m_volatility'), + price1wAgo: createIndexes(this, '/price_1w_ago'), + price1wEma: createPrice13dEmaPattern(this, 'price_1w_ema'), + price1wMax: createIndexes(this, '/price_1w_max'), + price1wMin: createIndexes(this, '/price_1w_min'), + price1wSma: createPrice13dEmaPattern(this, 'price_1w_sma'), + price1wVolatility: createIndexes(this, '/price_1w_volatility'), + price1yAgo: createIndexes(this, '/price_1y_ago'), + price1yEma: createPrice13dEmaPattern(this, 'price_1y_ema'), + price1yMax: createIndexes(this, '/price_1y_max'), + price1yMin: createIndexes(this, '/price_1y_min'), + price1ySma: createPrice13dEmaPattern(this, 'price_1y_sma'), + price1yVolatility: createIndexes(this, '/price_1y_volatility'), + price200dEma: createPrice13dEmaPattern(this, 'price_200d_ema'), + price200dSma: createPrice13dEmaPattern(this, 'price_200d_sma'), + price200dSmaX08: createIndexes(this, '/price_200d_sma_x0_8'), + price200dSmaX24: createIndexes(this, '/price_200d_sma_x2_4'), + price200wEma: createPrice13dEmaPattern(this, 'price_200w_ema'), + price200wSma: createPrice13dEmaPattern(this, 'price_200w_sma'), + price21dEma: createPrice13dEmaPattern(this, 'price_21d_ema'), + price21dSma: createPrice13dEmaPattern(this, 'price_21d_sma'), + price2wChoppinessIndex: createIndexes(this, '/price_2w_choppiness_index'), + price2wMax: createIndexes(this, '/price_2w_max'), + price2wMin: createIndexes(this, '/price_2w_min'), + price2yAgo: createIndexes(this, '/price_2y_ago'), + price2yEma: createPrice13dEmaPattern(this, 'price_2y_ema'), + price2ySma: createPrice13dEmaPattern(this, 'price_2y_sma'), + price34dEma: createPrice13dEmaPattern(this, 'price_34d_ema'), + price34dSma: createPrice13dEmaPattern(this, 'price_34d_sma'), + price3mAgo: createIndexes(this, '/price_3m_ago'), + price3yAgo: createIndexes(this, '/price_3y_ago'), + price4yAgo: createIndexes(this, '/price_4y_ago'), + price4yEma: createPrice13dEmaPattern(this, 'price_4y_ema'), + price4ySma: createPrice13dEmaPattern(this, 'price_4y_sma'), + price55dEma: createPrice13dEmaPattern(this, 'price_55d_ema'), + price55dSma: createPrice13dEmaPattern(this, 'price_55d_sma'), + price5yAgo: createIndexes(this, '/price_5y_ago'), + price6mAgo: createIndexes(this, '/price_6m_ago'), + price6yAgo: createIndexes(this, '/price_6y_ago'), + price89dEma: createPrice13dEmaPattern(this, 'price_89d_ema'), + price89dSma: createPrice13dEmaPattern(this, 'price_89d_sma'), + price8dEma: createPrice13dEmaPattern(this, 'price_8d_ema'), + price8dSma: createPrice13dEmaPattern(this, 'price_8d_sma'), + price8yAgo: createIndexes(this, '/price_8y_ago'), + priceAth: createIndexes21(this, '/price_ath'), + priceDrawdown: createIndexes21(this, '/price_drawdown'), + priceTrueRange: createIndexes5(this, '/price_true_range'), + priceTrueRange2wSum: createIndexes5(this, '/price_true_range_2w_sum') + }, + pools: { + pool: createIndexes2(this, '/pool'), + vecs: { + aXbt: createAXbtPattern(this, 'computed_pools_vecs/AXbt'), + aaoPool: createAXbtPattern(this, 'computed_pools_vecs/AaoPool'), + antPool: createAXbtPattern(this, 'computed_pools_vecs/AntPool'), + arkPool: createAXbtPattern(this, 'computed_pools_vecs/ArkPool'), + asicMiner: createAXbtPattern(this, 'computed_pools_vecs/AsicMiner'), + batPool: createAXbtPattern(this, 'computed_pools_vecs/BatPool'), + bcMonster: createAXbtPattern(this, 'computed_pools_vecs/BcMonster'), + bcpoolIo: createAXbtPattern(this, 'computed_pools_vecs/BcpoolIo'), + binancePool: createAXbtPattern(this, 'computed_pools_vecs/BinancePool'), + bitClub: createAXbtPattern(this, 'computed_pools_vecs/BitClub'), + bitFuFuPool: createAXbtPattern(this, 'computed_pools_vecs/BitFuFuPool'), + bitFury: createAXbtPattern(this, 'computed_pools_vecs/BitFury'), + bitMinter: createAXbtPattern(this, 'computed_pools_vecs/BitMinter'), + bitalo: createAXbtPattern(this, 'computed_pools_vecs/Bitalo'), + bitcoinAffiliateNetwork: createAXbtPattern(this, 'computed_pools_vecs/BitcoinAffiliateNetwork'), + bitcoinCom: createAXbtPattern(this, 'computed_pools_vecs/BitcoinCom'), + bitcoinIndia: createAXbtPattern(this, 'computed_pools_vecs/BitcoinIndia'), + bitcoinRussia: createAXbtPattern(this, 'computed_pools_vecs/BitcoinRussia'), + bitcoinUkraine: createAXbtPattern(this, 'computed_pools_vecs/BitcoinUkraine'), + bitfarms: createAXbtPattern(this, 'computed_pools_vecs/Bitfarms'), + bitparking: createAXbtPattern(this, 'computed_pools_vecs/Bitparking'), + bitsolo: createAXbtPattern(this, 'computed_pools_vecs/Bitsolo'), + bixin: createAXbtPattern(this, 'computed_pools_vecs/Bixin'), + blockFills: createAXbtPattern(this, 'computed_pools_vecs/BlockFills'), + braiinsPool: createAXbtPattern(this, 'computed_pools_vecs/BraiinsPool'), + bravoMining: createAXbtPattern(this, 'computed_pools_vecs/BravoMining'), + btPool: createAXbtPattern(this, 'computed_pools_vecs/BtPool'), + btcCom: createAXbtPattern(this, 'computed_pools_vecs/BtcCom'), + btcDig: createAXbtPattern(this, 'computed_pools_vecs/BtcDig'), + btcGuild: createAXbtPattern(this, 'computed_pools_vecs/BtcGuild'), + btcLab: createAXbtPattern(this, 'computed_pools_vecs/BtcLab'), + btcMp: createAXbtPattern(this, 'computed_pools_vecs/BtcMp'), + btcNuggets: createAXbtPattern(this, 'computed_pools_vecs/BtcNuggets'), + btcPoolParty: createAXbtPattern(this, 'computed_pools_vecs/BtcPoolParty'), + btcServ: createAXbtPattern(this, 'computed_pools_vecs/BtcServ'), + btcTop: createAXbtPattern(this, 'computed_pools_vecs/BtcTop'), + btcc: createAXbtPattern(this, 'computed_pools_vecs/Btcc'), + bwPool: createAXbtPattern(this, 'computed_pools_vecs/BwPool'), + bytePool: createAXbtPattern(this, 'computed_pools_vecs/BytePool'), + canoe: createAXbtPattern(this, 'computed_pools_vecs/Canoe'), + canoePool: createAXbtPattern(this, 'computed_pools_vecs/CanoePool'), + carbonNegative: createAXbtPattern(this, 'computed_pools_vecs/CarbonNegative'), + ckPool: createAXbtPattern(this, 'computed_pools_vecs/CkPool'), + cloudHashing: createAXbtPattern(this, 'computed_pools_vecs/CloudHashing'), + coinLab: createAXbtPattern(this, 'computed_pools_vecs/CoinLab'), + cointerra: createAXbtPattern(this, 'computed_pools_vecs/Cointerra'), + connectBtc: createAXbtPattern(this, 'computed_pools_vecs/ConnectBtc'), + dPool: createAXbtPattern(this, 'computed_pools_vecs/DPool'), + dcExploration: createAXbtPattern(this, 'computed_pools_vecs/DcExploration'), + dcex: createAXbtPattern(this, 'computed_pools_vecs/Dcex'), + digitalBtc: createAXbtPattern(this, 'computed_pools_vecs/DigitalBtc'), + digitalXMintsy: createAXbtPattern(this, 'computed_pools_vecs/DigitalXMintsy'), + eclipseMc: createAXbtPattern(this, 'computed_pools_vecs/EclipseMc'), + eightBaochi: createAXbtPattern(this, 'computed_pools_vecs/EightBaochi'), + ekanemBtc: createAXbtPattern(this, 'computed_pools_vecs/EkanemBtc'), + eligius: createAXbtPattern(this, 'computed_pools_vecs/Eligius'), + emcdPool: createAXbtPattern(this, 'computed_pools_vecs/EmcdPool'), + entrustCharityPool: createAXbtPattern(this, 'computed_pools_vecs/EntrustCharityPool'), + eobot: createAXbtPattern(this, 'computed_pools_vecs/Eobot'), + exxBw: createAXbtPattern(this, 'computed_pools_vecs/ExxBw'), + f2Pool: createAXbtPattern(this, 'computed_pools_vecs/F2Pool'), + fiftyEightCoin: createAXbtPattern(this, 'computed_pools_vecs/FiftyEightCoin'), + foundryUsa: createAXbtPattern(this, 'computed_pools_vecs/FoundryUsa'), + futureBitApolloSolo: createAXbtPattern(this, 'computed_pools_vecs/FutureBitApolloSolo'), + gbMiners: createAXbtPattern(this, 'computed_pools_vecs/GbMiners'), + ghashIo: createAXbtPattern(this, 'computed_pools_vecs/GhashIo'), + giveMeCoins: createAXbtPattern(this, 'computed_pools_vecs/GiveMeCoins'), + goGreenLight: createAXbtPattern(this, 'computed_pools_vecs/GoGreenLight'), + haoZhuZhu: createAXbtPattern(this, 'computed_pools_vecs/HaoZhuZhu'), + haominer: createAXbtPattern(this, 'computed_pools_vecs/Haominer'), + hashBx: createAXbtPattern(this, 'computed_pools_vecs/HashBx'), + hashPool: createAXbtPattern(this, 'computed_pools_vecs/HashPool'), + helix: createAXbtPattern(this, 'computed_pools_vecs/Helix'), + hhtt: createAXbtPattern(this, 'computed_pools_vecs/Hhtt'), + hotPool: createAXbtPattern(this, 'computed_pools_vecs/HotPool'), + hummerpool: createAXbtPattern(this, 'computed_pools_vecs/Hummerpool'), + huobiPool: createAXbtPattern(this, 'computed_pools_vecs/HuobiPool'), + innopolisTech: createAXbtPattern(this, 'computed_pools_vecs/InnopolisTech'), + kanoPool: createAXbtPattern(this, 'computed_pools_vecs/KanoPool'), + kncMiner: createAXbtPattern(this, 'computed_pools_vecs/KncMiner'), + kuCoinPool: createAXbtPattern(this, 'computed_pools_vecs/KuCoinPool'), + lubianCom: createAXbtPattern(this, 'computed_pools_vecs/LubianCom'), + luckyPool: createAXbtPattern(this, 'computed_pools_vecs/LuckyPool'), + luxor: createAXbtPattern(this, 'computed_pools_vecs/Luxor'), + maraPool: createAXbtPattern(this, 'computed_pools_vecs/MaraPool'), + maxBtc: createAXbtPattern(this, 'computed_pools_vecs/MaxBtc'), + maxiPool: createAXbtPattern(this, 'computed_pools_vecs/MaxiPool'), + megaBigPower: createAXbtPattern(this, 'computed_pools_vecs/MegaBigPower'), + minerium: createAXbtPattern(this, 'computed_pools_vecs/Minerium'), + miningCity: createAXbtPattern(this, 'computed_pools_vecs/MiningCity'), + miningDutch: createAXbtPattern(this, 'computed_pools_vecs/MiningDutch'), + miningKings: createAXbtPattern(this, 'computed_pools_vecs/MiningKings'), + miningSquared: createAXbtPattern(this, 'computed_pools_vecs/MiningSquared'), + mmpool: createAXbtPattern(this, 'computed_pools_vecs/Mmpool'), + mtRed: createAXbtPattern(this, 'computed_pools_vecs/MtRed'), + multiCoinCo: createAXbtPattern(this, 'computed_pools_vecs/MultiCoinCo'), + multipool: createAXbtPattern(this, 'computed_pools_vecs/Multipool'), + myBtcCoinPool: createAXbtPattern(this, 'computed_pools_vecs/MyBtcCoinPool'), + neopool: createAXbtPattern(this, 'computed_pools_vecs/Neopool'), + nexious: createAXbtPattern(this, 'computed_pools_vecs/Nexious'), + niceHash: createAXbtPattern(this, 'computed_pools_vecs/NiceHash'), + nmcBit: createAXbtPattern(this, 'computed_pools_vecs/NmcBit'), + novaBlock: createAXbtPattern(this, 'computed_pools_vecs/NovaBlock'), + ocean: createAXbtPattern(this, 'computed_pools_vecs/Ocean'), + okExPool: createAXbtPattern(this, 'computed_pools_vecs/OkExPool'), + okMiner: createAXbtPattern(this, 'computed_pools_vecs/OkMiner'), + okkong: createAXbtPattern(this, 'computed_pools_vecs/Okkong'), + okpoolTop: createAXbtPattern(this, 'computed_pools_vecs/OkpoolTop'), + oneHash: createAXbtPattern(this, 'computed_pools_vecs/OneHash'), + oneM1x: createAXbtPattern(this, 'computed_pools_vecs/OneM1x'), + oneThash: createAXbtPattern(this, 'computed_pools_vecs/OneThash'), + ozCoin: createAXbtPattern(this, 'computed_pools_vecs/OzCoin'), + pHashIo: createAXbtPattern(this, 'computed_pools_vecs/PHashIo'), + parasite: createAXbtPattern(this, 'computed_pools_vecs/Parasite'), + patels: createAXbtPattern(this, 'computed_pools_vecs/Patels'), + pegaPool: createAXbtPattern(this, 'computed_pools_vecs/PegaPool'), + phoenix: createAXbtPattern(this, 'computed_pools_vecs/Phoenix'), + polmine: createAXbtPattern(this, 'computed_pools_vecs/Polmine'), + pool175btc: createAXbtPattern(this, 'computed_pools_vecs/Pool175btc'), + pool50btc: createAXbtPattern(this, 'computed_pools_vecs/Pool50btc'), + poolin: createAXbtPattern(this, 'computed_pools_vecs/Poolin'), + portlandHodl: createAXbtPattern(this, 'computed_pools_vecs/PortlandHodl'), + publicPool: createAXbtPattern(this, 'computed_pools_vecs/PublicPool'), + pureBtcCom: createAXbtPattern(this, 'computed_pools_vecs/PureBtcCom'), + rawpool: createAXbtPattern(this, 'computed_pools_vecs/Rawpool'), + rigPool: createAXbtPattern(this, 'computed_pools_vecs/RigPool'), + sbiCrypto: createAXbtPattern(this, 'computed_pools_vecs/SbiCrypto'), + secPool: createAXbtPattern(this, 'computed_pools_vecs/SecPool'), + secretSuperstar: createAXbtPattern(this, 'computed_pools_vecs/SecretSuperstar'), + sevenPool: createAXbtPattern(this, 'computed_pools_vecs/SevenPool'), + shawnP0wers: createAXbtPattern(this, 'computed_pools_vecs/ShawnP0wers'), + sigmapoolCom: createAXbtPattern(this, 'computed_pools_vecs/SigmapoolCom'), + simplecoinUs: createAXbtPattern(this, 'computed_pools_vecs/SimplecoinUs'), + soloCk: createAXbtPattern(this, 'computed_pools_vecs/SoloCk'), + spiderPool: createAXbtPattern(this, 'computed_pools_vecs/SpiderPool'), + stMiningCorp: createAXbtPattern(this, 'computed_pools_vecs/StMiningCorp'), + tangpool: createAXbtPattern(this, 'computed_pools_vecs/Tangpool'), + tatmasPool: createAXbtPattern(this, 'computed_pools_vecs/TatmasPool'), + tbDice: createAXbtPattern(this, 'computed_pools_vecs/TbDice'), + telco214: createAXbtPattern(this, 'computed_pools_vecs/Telco214'), + terraPool: createAXbtPattern(this, 'computed_pools_vecs/TerraPool'), + tiger: createAXbtPattern(this, 'computed_pools_vecs/Tiger'), + tigerpoolNet: createAXbtPattern(this, 'computed_pools_vecs/TigerpoolNet'), + titan: createAXbtPattern(this, 'computed_pools_vecs/Titan'), + transactionCoinMining: createAXbtPattern(this, 'computed_pools_vecs/TransactionCoinMining'), + trickysBtcPool: createAXbtPattern(this, 'computed_pools_vecs/TrickysBtcPool'), + tripleMining: createAXbtPattern(this, 'computed_pools_vecs/TripleMining'), + twentyOneInc: createAXbtPattern(this, 'computed_pools_vecs/TwentyOneInc'), + ultimusPool: createAXbtPattern(this, 'computed_pools_vecs/UltimusPool'), + unknown: createAXbtPattern(this, 'computed_pools_vecs/Unknown'), + unomp: createAXbtPattern(this, 'computed_pools_vecs/Unomp'), + viaBtc: createAXbtPattern(this, 'computed_pools_vecs/ViaBtc'), + waterhole: createAXbtPattern(this, 'computed_pools_vecs/Waterhole'), + wayiCn: createAXbtPattern(this, 'computed_pools_vecs/WayiCn'), + whitePool: createAXbtPattern(this, 'computed_pools_vecs/WhitePool'), + wk057: createAXbtPattern(this, 'computed_pools_vecs/Wk057'), + yourbtcNet: createAXbtPattern(this, 'computed_pools_vecs/YourbtcNet'), + zulupool: createAXbtPattern(this, 'computed_pools_vecs/Zulupool') + } + }, + price: { + priceClose: createIndexes3(this, '/price_close'), + priceCloseInCents: createIndexes8(this, '/price_close_in_cents'), + priceCloseInSats: createIndexes3(this, '/price_close_in_sats'), + priceHigh: createIndexes3(this, '/price_high'), + priceHighInCents: createIndexes8(this, '/price_high_in_cents'), + priceHighInSats: createIndexes3(this, '/price_high_in_sats'), + priceLow: createIndexes3(this, '/price_low'), + priceLowInCents: createIndexes8(this, '/price_low_in_cents'), + priceLowInSats: createIndexes3(this, '/price_low_in_sats'), + priceOhlc: createIndexes3(this, '/price_ohlc'), + priceOhlcInSats: createIndexes3(this, '/price_ohlc_in_sats'), + priceOpen: createIndexes3(this, '/price_open'), + priceOpenInCents: createIndexes8(this, '/price_open_in_cents'), + priceOpenInSats: createIndexes3(this, '/price_open_in_sats') + }, + stateful: { + addrCount: createIndexes3(this, '/addr_count'), + addressCohorts: { + amountRange: { + _0sats: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_0sats'), + _100btcTo1kBtc: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_100btc_to_1k_btc'), + _100kBtcOrMore: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_100k_btc_or_more'), + _100kSatsTo1mSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_100k_sats_to_1m_sats'), + _100satsTo1kSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_100sats_to_1k_sats'), + _10btcTo100btc: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_10btc_to_100btc'), + _10kBtcTo100kBtc: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_10k_btc_to_100k_btc'), + _10kSatsTo100kSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_10k_sats_to_100k_sats'), + _10mSatsTo1btc: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_10m_sats_to_1btc'), + _10satsTo100sats: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_10sats_to_100sats'), + _1btcTo10btc: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_1btc_to_10btc'), + _1kBtcTo10kBtc: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_1k_btc_to_10k_btc'), + _1kSatsTo10kSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_1k_sats_to_10k_sats'), + _1mSatsTo10mSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_1m_sats_to_10m_sats'), + _1satTo10sats: create_0satsPattern(this, 'computed_stateful_address_cohorts_amount_range/_1sat_to_10sats') + }, + geAmount: { + _100btc: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_100btc'), + _100kSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_100k_sats'), + _100sats: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_100sats'), + _10btc: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_10btc'), + _10kBtc: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_10k_btc'), + _10kSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_10k_sats'), + _10mSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_10m_sats'), + _10sats: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_10sats'), + _1btc: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_1btc'), + _1kBtc: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_1k_btc'), + _1kSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_1k_sats'), + _1mSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_1m_sats'), + _1sat: create_0satsPattern(this, 'computed_stateful_address_cohorts_ge_amount/_1sat') + }, + ltAmount: { + _100btc: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_100btc'), + _100kBtc: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_100k_btc'), + _100kSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_100k_sats'), + _100sats: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_100sats'), + _10btc: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_10btc'), + _10kBtc: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_10k_btc'), + _10kSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_10k_sats'), + _10mSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_10m_sats'), + _10sats: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_10sats'), + _1btc: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_1btc'), + _1kBtc: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_1k_btc'), + _1kSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_1k_sats'), + _1mSats: create_0satsPattern(this, 'computed_stateful_address_cohorts_lt_amount/_1m_sats') + } + }, + addressesData: { + empty: createIndexes23(this, '/emptyaddressdata'), + loaded: createIndexes24(this, '/loadedaddressdata') + }, + addresstypeToHeightToAddrCount: createAddresstypeToHeightToAddrCountPattern(this, 'computed_stateful/addresstype_to_height_to_addr_count'), + addresstypeToHeightToEmptyAddrCount: createAddresstypeToHeightToAddrCountPattern(this, 'computed_stateful/addresstype_to_height_to_empty_addr_count'), + addresstypeToIndexesToAddrCount: createAddresstypeToHeightToAddrCountPattern(this, 'computed_stateful/addresstype_to_indexes_to_addr_count'), + addresstypeToIndexesToEmptyAddrCount: createAddresstypeToHeightToAddrCountPattern(this, 'computed_stateful/addresstype_to_indexes_to_empty_addr_count'), + anyAddressIndexes: createAddresstypeToHeightToAddrCountPattern(this, 'computed_stateful/any_address_indexes'), + chainState: createIndexes2(this, '/chain'), + emptyAddrCount: createIndexes3(this, '/empty_addr_count'), + emptyaddressindex: createIndexes23(this, '/emptyaddressindex'), + loadedaddressindex: createIndexes24(this, '/loadedaddressindex'), + marketCap: createIndexes21(this, '/market_cap'), + opreturnSupply: createSupplyPattern(this, 'computed_stateful/opreturn_supply'), + txinindex: createIndexes20(this, '/txinindex'), + unspendableSupply: createSupplyPattern(this, 'computed_stateful/unspendable_supply'), + utxoCohorts: { + ageRange: { + _10yTo12y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_10y_to_12y'), + _12yTo15y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_12y_to_15y'), + _1dTo1w: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_1d_to_1w'), + _1mTo2m: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_1m_to_2m'), + _1wTo1m: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_1w_to_1m'), + _1yTo2y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_1y_to_2y'), + _2mTo3m: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_2m_to_3m'), + _2yTo3y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_2y_to_3y'), + _3mTo4m: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_3m_to_4m'), + _3yTo4y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_3y_to_4y'), + _4mTo5m: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_4m_to_5m'), + _4yTo5y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_4y_to_5y'), + _5mTo6m: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_5m_to_6m'), + _5yTo6y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_5y_to_6y'), + _6mTo1y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_6m_to_1y'), + _6yTo7y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_6y_to_7y'), + _7yTo8y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_7y_to_8y'), + _8yTo10y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/_8y_to_10y'), + from15y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_age_range/from_15y'), + upTo1d: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_age_range/up_to_1d') + }, + all: { + activity: createActivityPattern(this, 'computed_stateful_utxo_cohorts_all/activity'), + pricePaid: createPricePaidPattern2(this, 'computed_stateful_utxo_cohorts_all/price_paid'), + realized: createRealizedPattern3(this, 'computed_stateful_utxo_cohorts_all/realized'), + relative: { + negUnrealizedLossRelToMarketCap: createIndexes21(this, '/neg_unrealized_loss_rel_to_market_cap'), + negUnrealizedLossRelToOwnTotalUnrealizedPnl: createIndexes21(this, '/neg_unrealized_loss_rel_to_own_total_unrealized_pnl'), + netUnrealizedPnlRelToMarketCap: createIndexes21(this, '/net_unrealized_pnl_rel_to_market_cap'), + netUnrealizedPnlRelToOwnTotalUnrealizedPnl: createIndexes21(this, '/net_unrealized_pnl_rel_to_own_total_unrealized_pnl'), + supplyInLossRelToOwnSupply: createIndexes21(this, '/supply_in_loss_rel_to_own_supply'), + supplyInProfitRelToOwnSupply: createIndexes21(this, '/supply_in_profit_rel_to_own_supply'), + unrealizedLossRelToMarketCap: createIndexes21(this, '/unrealized_loss_rel_to_market_cap'), + unrealizedLossRelToOwnTotalUnrealizedPnl: createIndexes21(this, '/unrealized_loss_rel_to_own_total_unrealized_pnl'), + unrealizedProfitRelToMarketCap: createIndexes21(this, '/unrealized_profit_rel_to_market_cap'), + unrealizedProfitRelToOwnTotalUnrealizedPnl: createIndexes21(this, '/unrealized_profit_rel_to_own_total_unrealized_pnl') + }, + supply: createSupplyPattern2(this, 'computed_stateful_utxo_cohorts_all/supply'), + unrealized: createUnrealizedPattern(this, 'computed_stateful_utxo_cohorts_all/unrealized') + }, + amountRange: { + _0sats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_0sats'), + _100btcTo1kBtc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_100btc_to_1k_btc'), + _100kBtcOrMore: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_100k_btc_or_more'), + _100kSatsTo1mSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_100k_sats_to_1m_sats'), + _100satsTo1kSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_100sats_to_1k_sats'), + _10btcTo100btc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_10btc_to_100btc'), + _10kBtcTo100kBtc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_10k_btc_to_100k_btc'), + _10kSatsTo100kSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_10k_sats_to_100k_sats'), + _10mSatsTo1btc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_10m_sats_to_1btc'), + _10satsTo100sats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_10sats_to_100sats'), + _1btcTo10btc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_1btc_to_10btc'), + _1kBtcTo10kBtc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_1k_btc_to_10k_btc'), + _1kSatsTo10kSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_1k_sats_to_10k_sats'), + _1mSatsTo10mSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_1m_sats_to_10m_sats'), + _1satTo10sats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_amount_range/_1sat_to_10sats') + }, + epoch: { + _0: create_10yTo12yPattern(this, 'epoch/_0'), + _1: create_10yTo12yPattern(this, 'epoch/_1'), + _2: create_10yTo12yPattern(this, 'epoch/_2'), + _3: create_10yTo12yPattern(this, 'epoch/_3'), + _4: create_10yTo12yPattern(this, 'epoch/_4') + }, + geAmount: { + _100btc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_100btc'), + _100kSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_100k_sats'), + _100sats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_100sats'), + _10btc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_10btc'), + _10kBtc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_10k_btc'), + _10kSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_10k_sats'), + _10mSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_10m_sats'), + _10sats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_10sats'), + _1btc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_1btc'), + _1kBtc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_1k_btc'), + _1kSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_1k_sats'), + _1mSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_1m_sats'), + _1sat: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_ge_amount/_1sat') + }, + ltAmount: { + _100btc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_100btc'), + _100kBtc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_100k_btc'), + _100kSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_100k_sats'), + _100sats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_100sats'), + _10btc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_10btc'), + _10kBtc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_10k_btc'), + _10kSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_10k_sats'), + _10mSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_10m_sats'), + _10sats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_10sats'), + _1btc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_1btc'), + _1kBtc: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_1k_btc'), + _1kSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_1k_sats'), + _1mSats: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_lt_amount/_1m_sats') + }, + maxAge: { + _10y: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_10y'), + _12y: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_12y'), + _15y: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_15y'), + _1m: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_1m'), + _1w: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_1w'), + _1y: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_1y'), + _2m: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_2m'), + _2y: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_2y'), + _3m: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_3m'), + _3y: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_3y'), + _4m: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_4m'), + _4y: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_4y'), + _5m: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_5m'), + _5y: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_5y'), + _6m: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_6m'), + _6y: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_6y'), + _7y: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_7y'), + _8y: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_max_age/_8y') + }, + minAge: { + _10y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_10y'), + _12y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_12y'), + _1d: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_1d'), + _1m: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_1m'), + _1w: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_1w'), + _1y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_1y'), + _2m: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_2m'), + _2y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_2y'), + _3m: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_3m'), + _3y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_3y'), + _4m: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_4m'), + _4y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_4y'), + _5m: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_5m'), + _5y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_5y'), + _6m: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_6m'), + _6y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_6y'), + _7y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_7y'), + _8y: create_10yTo12yPattern(this, 'computed_stateful_utxo_cohorts_min_age/_8y') + }, + term: { + long: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_term/long'), + short: createUpTo1dPattern(this, 'computed_stateful_utxo_cohorts_term/short') + }, + type: { + empty: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_type_/empty'), + p2a: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_type_/p2a'), + p2ms: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_type_/p2ms'), + p2pk33: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_type_/p2pk33'), + p2pk65: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_type_/p2pk65'), + p2pkh: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_type_/p2pkh'), + p2sh: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_type_/p2sh'), + p2tr: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_type_/p2tr'), + p2wpkh: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_type_/p2wpkh'), + p2wsh: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_type_/p2wsh'), + unknown: create_0satsPattern2(this, 'computed_stateful_utxo_cohorts_type_/unknown') + }, + year: { + _2009: create_10yTo12yPattern(this, 'year/_2009'), + _2010: create_10yTo12yPattern(this, 'year/_2010'), + _2011: create_10yTo12yPattern(this, 'year/_2011'), + _2012: create_10yTo12yPattern(this, 'year/_2012'), + _2013: create_10yTo12yPattern(this, 'year/_2013'), + _2014: create_10yTo12yPattern(this, 'year/_2014'), + _2015: create_10yTo12yPattern(this, 'year/_2015'), + _2016: create_10yTo12yPattern(this, 'year/_2016'), + _2017: create_10yTo12yPattern(this, 'year/_2017'), + _2018: create_10yTo12yPattern(this, 'year/_2018'), + _2019: create_10yTo12yPattern(this, 'year/_2019'), + _2020: create_10yTo12yPattern(this, 'year/_2020'), + _2021: create_10yTo12yPattern(this, 'year/_2021'), + _2022: create_10yTo12yPattern(this, 'year/_2022'), + _2023: create_10yTo12yPattern(this, 'year/_2023'), + _2024: create_10yTo12yPattern(this, 'year/_2024'), + _2025: create_10yTo12yPattern(this, 'year/_2025'), + _2026: create_10yTo12yPattern(this, 'year/_2026') + } + } + } + }, + indexed: { + address: { + firstP2aaddressindex: createIndexes2(this, '/first_p2aaddressindex'), + firstP2pk33addressindex: createIndexes2(this, '/first_p2pk33addressindex'), + firstP2pk65addressindex: createIndexes2(this, '/first_p2pk65addressindex'), + firstP2pkhaddressindex: createIndexes2(this, '/first_p2pkhaddressindex'), + firstP2shaddressindex: createIndexes2(this, '/first_p2shaddressindex'), + firstP2traddressindex: createIndexes2(this, '/first_p2traddressindex'), + firstP2wpkhaddressindex: createIndexes2(this, '/first_p2wpkhaddressindex'), + firstP2wshaddressindex: createIndexes2(this, '/first_p2wshaddressindex'), + p2abytes: createIndexes12(this, '/p2abytes'), + p2pk33bytes: createIndexes13(this, '/p2pk33bytes'), + p2pk65bytes: createIndexes14(this, '/p2pk65bytes'), + p2pkhbytes: createIndexes15(this, '/p2pkhbytes'), + p2shbytes: createIndexes16(this, '/p2shbytes'), + p2trbytes: createIndexes17(this, '/p2trbytes'), + p2wpkhbytes: createIndexes18(this, '/p2wpkhbytes'), + p2wshbytes: createIndexes19(this, '/p2wshbytes') + }, + block: { + blockhash: createIndexes2(this, '/blockhash'), + difficulty: createIndexes2(this, '/difficulty'), + timestamp: createIndexes2(this, '/timestamp'), + totalSize: createIndexes2(this, '/total_size'), + weight: createIndexes2(this, '/weight') + }, + output: { + firstEmptyoutputindex: createIndexes2(this, '/first_emptyoutputindex'), + firstOpreturnindex: createIndexes2(this, '/first_opreturnindex'), + firstP2msoutputindex: createIndexes2(this, '/first_p2msoutputindex'), + firstUnknownoutputindex: createIndexes2(this, '/first_unknownoutputindex'), + txindex: new MetricNode(this, '/txindex') + }, + tx: { + baseSize: createIndexes6(this, '/base_size'), + firstTxindex: createIndexes2(this, '/first_txindex'), + firstTxinindex: createIndexes6(this, '/first_txinindex'), + firstTxoutindex: createIndexes6(this, '/first_txoutindex'), + height: createIndexes6(this, '/height'), + isExplicitlyRbf: createIndexes6(this, '/is_explicitly_rbf'), + rawlocktime: createIndexes6(this, '/rawlocktime'), + totalSize: createIndexes6(this, '/total_size'), + txid: createIndexes6(this, '/txid'), + txversion: createIndexes6(this, '/txversion') + }, + txin: { + firstTxinindex: createIndexes2(this, '/first_txinindex'), + outpoint: createIndexes7(this, '/outpoint'), + txindex: createIndexes7(this, '/txindex') + }, + txout: { + firstTxoutindex: createIndexes2(this, '/first_txoutindex'), + outputtype: createIndexes20(this, '/outputtype'), + txindex: createIndexes20(this, '/txindex'), + typeindex: createIndexes20(this, '/typeindex'), + value: createIndexes20(this, '/value') + } + } + }; + } + + /** + * Address information + * @param {string} address + * @returns {Promise} + */ + async getApiAddressByAddress(address) { + return this.get(`/api/address/${address}`); + } + + /** + * Address transaction IDs + * @param {string} address + * @param {string=} [after_txid] Txid to paginate from (return transactions before this one) + * @param {string=} [limit] Maximum number of results to return. Defaults to 25 if not specified. + * @returns {Promise} + */ + async getApiAddressByAddressTxs(address, after_txid, limit) { + const params = new URLSearchParams(); + if (after_txid !== undefined) params.set('after_txid', String(after_txid)); + if (limit !== undefined) params.set('limit', String(limit)); + const query = params.toString(); + return this.get(`/api/address/${address}/txs${query ? '?' + query : ''}`); + } + + /** + * Address confirmed transactions + * @param {string} address + * @param {string=} [after_txid] Txid to paginate from (return transactions before this one) + * @param {string=} [limit] Maximum number of results to return. Defaults to 25 if not specified. + * @returns {Promise} + */ + async getApiAddressByAddressTxsChain(address, after_txid, limit) { + const params = new URLSearchParams(); + if (after_txid !== undefined) params.set('after_txid', String(after_txid)); + if (limit !== undefined) params.set('limit', String(limit)); + const query = params.toString(); + return this.get(`/api/address/${address}/txs/chain${query ? '?' + query : ''}`); + } + + /** + * Address mempool transactions + * @param {string} address + * @returns {Promise} + */ + async getApiAddressByAddressTxsMempool(address) { + return this.get(`/api/address/${address}/txs/mempool`); + } + + /** + * Address UTXOs + * @param {string} address + * @returns {Promise} + */ + async getApiAddressByAddressUtxo(address) { + return this.get(`/api/address/${address}/utxo`); + } + + /** + * Block by height + * @param {string} height + * @returns {Promise} + */ + async getApiBlockHeightByHeight(height) { + return this.get(`/api/block-height/${height}`); + } + + /** + * Block information + * @param {string} hash + * @returns {Promise} + */ + async getApiBlockByHash(hash) { + return this.get(`/api/block/${hash}`); + } + + /** + * Raw block + * @param {string} hash + * @returns {Promise} + */ + async getApiBlockByHashRaw(hash) { + return this.get(`/api/block/${hash}/raw`); + } + + /** + * Block status + * @param {string} hash + * @returns {Promise} + */ + async getApiBlockByHashStatus(hash) { + return this.get(`/api/block/${hash}/status`); + } + + /** + * Transaction ID at index + * @param {string} hash Bitcoin block hash + * @param {string} index Transaction index within the block (0-based) + * @returns {Promise} + */ + async getApiBlockByHashTxidByIndex(hash, index) { + return this.get(`/api/block/${hash}/txid/${index}`); + } + + /** + * Block transaction IDs + * @param {string} hash + * @returns {Promise} + */ + async getApiBlockByHashTxids(hash) { + return this.get(`/api/block/${hash}/txids`); + } + + /** + * Block transactions (paginated) + * @param {string} hash Bitcoin block hash + * @param {string} start_index Starting transaction index within the block (0-based) + * @returns {Promise} + */ + async getApiBlockByHashTxsByStartIndex(hash, start_index) { + return this.get(`/api/block/${hash}/txs/${start_index}`); + } + + /** + * Recent blocks + * @returns {Promise} + */ + async getApiBlocks() { + return this.get(`/api/blocks`); + } + + /** + * Blocks from height + * @param {string} height + * @returns {Promise} + */ + async getApiBlocksByHeight(height) { + return this.get(`/api/blocks/${height}`); + } + + /** + * Mempool statistics + * @returns {Promise} + */ + async getApiMempoolInfo() { + return this.get(`/api/mempool/info`); + } + + /** + * Mempool transaction IDs + * @returns {Promise} + */ + async getApiMempoolTxids() { + return this.get(`/api/mempool/txids`); + } + + /** + * Get supported indexes for a metric + * @param {string} metric + * @returns {Promise} + */ + async getApiMetricByMetric(metric) { + return this.get(`/api/metric/${metric}`); + } + + /** + * Get metric data + * @param {string} metric Metric name + * @param {string} index Aggregation index + * @param {string=} [from] Inclusive starting index, if negative counts from end + * @param {string=} [to] Exclusive ending index, if negative counts from end + * @param {string=} [count] Number of values to return (ignored if `to` is set) + * @param {string=} [format] Format of the output + * @returns {Promise} + */ + async getApiMetricByMetricByIndex(metric, index, from, to, count, format) { + const params = new URLSearchParams(); + if (from !== undefined) params.set('from', String(from)); + if (to !== undefined) params.set('to', String(to)); + if (count !== undefined) params.set('count', String(count)); + if (format !== undefined) params.set('format', String(format)); + const query = params.toString(); + return this.get(`/api/metric/${metric}/${index}${query ? '?' + query : ''}`); + } + + /** + * Bulk metric data + * @param {string} [metrics] Requested metrics + * @param {string} [index] Index to query + * @param {string=} [from] Inclusive starting index, if negative counts from end + * @param {string=} [to] Exclusive ending index, if negative counts from end + * @param {string=} [count] Number of values to return (ignored if `to` is set) + * @param {string=} [format] Format of the output + * @returns {Promise} + */ + async getApiMetricsBulk(metrics, index, from, to, count, format) { + const params = new URLSearchParams(); + params.set('metrics', String(metrics)); + params.set('index', String(index)); + if (from !== undefined) params.set('from', String(from)); + if (to !== undefined) params.set('to', String(to)); + if (count !== undefined) params.set('count', String(count)); + if (format !== undefined) params.set('format', String(format)); + const query = params.toString(); + return this.get(`/api/metrics/bulk${query ? '?' + query : ''}`); + } + + /** + * Metrics catalog + * @returns {Promise} + */ + async getApiMetricsCatalog() { + return this.get(`/api/metrics/catalog`); + } + + /** + * Metric count + * @returns {Promise} + */ + async getApiMetricsCount() { + return this.get(`/api/metrics/count`); + } + + /** + * List available indexes + * @returns {Promise} + */ + async getApiMetricsIndexes() { + return this.get(`/api/metrics/indexes`); + } + + /** + * Metrics list + * @param {string=} [page] Pagination index + * @returns {Promise} + */ + async getApiMetricsList(page) { + const params = new URLSearchParams(); + if (page !== undefined) params.set('page', String(page)); + const query = params.toString(); + return this.get(`/api/metrics/list${query ? '?' + query : ''}`); + } + + /** + * Search metrics + * @param {string} metric + * @param {string=} [limit] + * @returns {Promise} + */ + async getApiMetricsSearchByMetric(metric, limit) { + const params = new URLSearchParams(); + if (limit !== undefined) params.set('limit', String(limit)); + const query = params.toString(); + return this.get(`/api/metrics/search/${metric}${query ? '?' + query : ''}`); + } + + /** + * Transaction information + * @param {string} txid + * @returns {Promise} + */ + async getApiTxByTxid(txid) { + return this.get(`/api/tx/${txid}`); + } + + /** + * Transaction hex + * @param {string} txid + * @returns {Promise} + */ + async getApiTxByTxidHex(txid) { + return this.get(`/api/tx/${txid}/hex`); + } + + /** + * Output spend status + * @param {string} txid Transaction ID + * @param {string} vout Output index + * @returns {Promise} + */ + async getApiTxByTxidOutspendByVout(txid, vout) { + return this.get(`/api/tx/${txid}/outspend/${vout}`); + } + + /** + * All output spend statuses + * @param {string} txid + * @returns {Promise} + */ + async getApiTxByTxidOutspends(txid) { + return this.get(`/api/tx/${txid}/outspends`); + } + + /** + * Transaction status + * @param {string} txid + * @returns {Promise} + */ + async getApiTxByTxidStatus(txid) { + return this.get(`/api/tx/${txid}/status`); + } + + /** + * Difficulty adjustment + * @returns {Promise} + */ + async getApiV1DifficultyAdjustment() { + return this.get(`/api/v1/difficulty-adjustment`); + } + + /** + * Projected mempool blocks + * @returns {Promise} + */ + async getApiV1FeesMempoolBlocks() { + return this.get(`/api/v1/fees/mempool-blocks`); + } + + /** + * Recommended fees + * @returns {Promise} + */ + async getApiV1FeesRecommended() { + return this.get(`/api/v1/fees/recommended`); + } + + /** + * Block fees + * @param {string} time_period + * @returns {Promise} + */ + async getApiV1MiningBlocksFeesByTimePeriod(time_period) { + return this.get(`/api/v1/mining/blocks/fees/${time_period}`); + } + + /** + * Block rewards + * @param {string} time_period + * @returns {Promise} + */ + async getApiV1MiningBlocksRewardsByTimePeriod(time_period) { + return this.get(`/api/v1/mining/blocks/rewards/${time_period}`); + } + + /** + * Block sizes and weights + * @param {string} time_period + * @returns {Promise} + */ + async getApiV1MiningBlocksSizesWeightsByTimePeriod(time_period) { + return this.get(`/api/v1/mining/blocks/sizes-weights/${time_period}`); + } + + /** + * Block by timestamp + * @param {string} timestamp + * @returns {Promise} + */ + async getApiV1MiningBlocksTimestampByTimestamp(timestamp) { + return this.get(`/api/v1/mining/blocks/timestamp/${timestamp}`); + } + + /** + * Difficulty adjustments (all time) + * @returns {Promise} + */ + async getApiV1MiningDifficultyAdjustments() { + return this.get(`/api/v1/mining/difficulty-adjustments`); + } + + /** + * Difficulty adjustments + * @param {string} time_period + * @returns {Promise} + */ + async getApiV1MiningDifficultyAdjustmentsByTimePeriod(time_period) { + return this.get(`/api/v1/mining/difficulty-adjustments/${time_period}`); + } + + /** + * Network hashrate (all time) + * @returns {Promise} + */ + async getApiV1MiningHashrate() { + return this.get(`/api/v1/mining/hashrate`); + } + + /** + * Network hashrate + * @param {string} time_period + * @returns {Promise} + */ + async getApiV1MiningHashrateByTimePeriod(time_period) { + return this.get(`/api/v1/mining/hashrate/${time_period}`); + } + + /** + * Mining pool details + * @param {string} slug + * @returns {Promise} + */ + async getApiV1MiningPoolBySlug(slug) { + return this.get(`/api/v1/mining/pool/${slug}`); + } + + /** + * List all mining pools + * @returns {Promise} + */ + async getApiV1MiningPools() { + return this.get(`/api/v1/mining/pools`); + } + + /** + * Mining pool statistics + * @param {string} time_period + * @returns {Promise} + */ + async getApiV1MiningPoolsByTimePeriod(time_period) { + return this.get(`/api/v1/mining/pools/${time_period}`); + } + + /** + * Mining reward statistics + * @param {string} block_count Number of recent blocks to include + * @returns {Promise} + */ + async getApiV1MiningRewardStatsByBlockCount(block_count) { + return this.get(`/api/v1/mining/reward-stats/${block_count}`); + } + + /** + * Validate address + * @param {string} address Bitcoin address to validate (can be any string) + * @returns {Promise} + */ + async getApiV1ValidateAddressByAddress(address) { + return this.get(`/api/v1/validate-address/${address}`); + } + + /** + * Health check + * @returns {Promise} + */ + async getHealth() { + return this.get(`/health`); + } + + /** + * API version + * @returns {Promise} + */ + async getVersion() { + return this.get(`/version`); + } + +} + +export { BrkClient, BrkClientBase, BrkError, MetricNode }; diff --git a/modules/brk-client/metrics.js b/modules/brk-client/metrics.js deleted file mode 100644 index f54e6cb7f..000000000 --- a/modules/brk-client/metrics.js +++ /dev/null @@ -1,111 +0,0 @@ -import { - INDEX_TO_WORD, - COMPRESSED_METRIC_TO_INDEXES, -} from "./generated/metrics"; - -/** - * @typedef {typeof import("./generated/metrics")["COMPRESSED_METRIC_TO_INDEXES"]} MetricToIndexes - * @typedef {string} Metric - */ - -/** @type {Record} */ -const WORD_TO_INDEX = {}; - -INDEX_TO_WORD.forEach((word, index) => { - WORD_TO_INDEX[word] = index; -}); - -/** - * @param {Metric} metric - */ -export function getIndexesFromMetric(metric) { - return COMPRESSED_METRIC_TO_INDEXES[compressMetric(metric)]; -} - -/** - * @param {Metric} metric - */ -export function hasMetric(metric) { - return compressMetric(metric) in COMPRESSED_METRIC_TO_INDEXES; -} - -/** - * @param {string} metric - */ -function compressMetric(metric) { - return metric - .split("_") - .map((word) => { - const index = WORD_TO_INDEX[word]; - return index !== undefined ? indexToLetters(index) : word; - }) - .join("_"); -} - -/** - * @param {string} compressedMetric - */ -function decompressMetric(compressedMetric) { - return compressedMetric - .split("_") - .map((code) => { - const index = lettersToIndex(code); - return INDEX_TO_WORD[index] || code; // Fallback to original if not found - }) - .join("_"); -} - -/** - * @param {string} letters - */ -function lettersToIndex(letters) { - let result = 0; - for (let i = 0; i < letters.length; i++) { - const value = charToIndex(letters.charCodeAt(i)); - result = result * 52 + value + 1; - } - return result - 1; -} - -/** - * @param {number} byte - */ -function charToIndex(byte) { - if (byte >= 65 && byte <= 90) { - // 'A' to 'Z' - return byte - 65; - } else if (byte >= 97 && byte <= 122) { - // 'a' to 'z' - return byte - 97 + 26; - } else { - return 255; // Invalid - } -} - -/** - * @param {number} index - */ -function indexToLetters(index) { - if (index < 52) { - return indexToChar(index); - } - let result = []; - while (true) { - result.push(indexToChar(index % 52)); - index = Math.floor(index / 52); - if (index === 0) break; - index -= 1; - } - return result.reverse().join(""); -} - -/** - * @param {number} index - */ -function indexToChar(index) { - if (index <= 25) { - return String.fromCharCode(65 + index); // A-Z - } else { - return String.fromCharCode(97 + index - 26); // a-z - } -}