diff --git a/Cargo.lock b/Cargo.lock index c10461b34..270d34709 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -568,6 +568,8 @@ version = "0.1.0-alpha.0" dependencies = [ "brk_query", "brk_types", + "schemars", + "vecdb", ] [[package]] @@ -634,6 +636,7 @@ dependencies = [ "pco", "rayon", "rustc-hash", + "schemars", "serde", "smallvec", "vecdb", @@ -1253,7 +1256,9 @@ version = "0.1.0-alpha.0" dependencies = [ "brk_traversable_derive", "brk_types", + "schemars", "serde", + "serde_json", "vecdb", ] @@ -4193,8 +4198,6 @@ dependencies = [ [[package]] name = "rawdb" version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e76ca8167fb720e8feb864618de99327b628fa0ff0e5221f09ebea1abdffcac2" dependencies = [ "libc", "log", @@ -5385,8 +5388,6 @@ checksum = "8f54a172d0620933a27a4360d3db3e2ae0dd6cceae9730751a036bbf182c4b23" [[package]] name = "vecdb" version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf728869972437c4b600555ed98b8102378a0194010e6399f2430a4f79b6eaa7" dependencies = [ "ctrlc", "log", @@ -5394,6 +5395,7 @@ dependencies = [ "parking_lot", "pco", "rawdb", + "schemars", "serde", "serde_json", "thiserror 2.0.17", @@ -5405,8 +5407,6 @@ dependencies = [ [[package]] name = "vecdb_derive" version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ba8822fd1709751e026000483d85211870112d89e8404b33226416c2d9a6de4" dependencies = [ "quote", "syn 2.0.111", diff --git a/Cargo.toml b/Cargo.toml index 3aff21b50..8a7e36a68 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -65,8 +65,6 @@ byteview = "0.9.1" color-eyre = "0.6.5" derive_deref = "1.1.1" fjall = "3.0.0-rc.6" -# fjall3 = { path = "../fjall3", package = "fjall" } -# fjall3 = { git = "https://github.com/fjall-rs/fjall.git", rev = "434979ef59d8fd2b36b91e6ff759a36d19a397ee", package = "fjall" } jiff = "0.2.16" log = "0.4.29" mimalloc = { version = "0.1.48", features = ["v3"] } @@ -81,8 +79,8 @@ serde_derive = "1.0.228" serde_json = { version = "1.0.145", features = ["float_roundtrip"] } smallvec = "1.15.1" tokio = { version = "1.48.0", features = ["rt-multi-thread"] } -vecdb = { version = "0.4.3", features = ["derive", "serde_json", "pco"] } -# vecdb = { path = "../anydb/crates/vecdb", features = ["derive", "serde_json", "pco"] } +# vecdb = { version = "0.4.3", features = ["derive", "serde_json", "pco"] } +vecdb = { path = "../anydb/crates/vecdb", features = ["derive", "serde_json", "pco", "schemars"] } # vecdb = { git = "https://github.com/anydb-rs/anydb", features = ["derive", "serde_json", "pco"] } [workspace.metadata.release] diff --git a/crates/brk_binder/Cargo.toml b/crates/brk_binder/Cargo.toml index 0900f2f80..7d3746345 100644 --- a/crates/brk_binder/Cargo.toml +++ b/crates/brk_binder/Cargo.toml @@ -11,3 +11,5 @@ build = "build.rs" [dependencies] brk_query = { workspace = true } brk_types = { workspace = true } +schemars = { workspace = true } +vecdb = { workspace = true } diff --git a/crates/brk_binder/DESIGN.md b/crates/brk_binder/DESIGN.md new file mode 100644 index 000000000..d6b7c75db --- /dev/null +++ b/crates/brk_binder/DESIGN.md @@ -0,0 +1,336 @@ +# brk_binder Design Document + +## Goal + +Generate typed API clients for **Rust, TypeScript, and Python** with: +- **Discoverability**: Full IDE autocomplete for 20k+ metrics +- **Ease of use**: Fluent API with `.fetch()` on each metric node + +## Current State + +### What Exists + +1. **`js.rs`**: Generates compressed metric catalogs for JS (constants only, no HTTP client) +2. **`tree.rs`**: (kept for reference, not compiled) Brainstorming output for pattern extraction +3. **`generator/`**: Module structure for client generation + - `types.rs`: Intermediate representation (`ClientMetadata`, `MetricInfo`, `IndexPattern`) + - `rust.rs`: Rust client generation (stub) + - `typescript.rs`: TypeScript client generation (stub) + - `python.rs`: Python client generation (stub) + +### What's Missing + +- HTTP client integration (`.fetch()` methods) +- OpenAPI as input source +- Rust client using `brk_types` instead of generating types +- Typed response types per metric + +## Target Architecture + +### Input Sources + +``` +┌─────────────────────────────────────────────────────────────┐ +│ Input Sources │ +├─────────────────────────────────────────────────────────────┤ +│ 1. OpenAPI spec (from aide) - endpoint definitions │ +│ 2. brk_query catalog - metric tree structure │ +│ 3. brk_types - Rust types for responses (Rust client only) │ +└─────────────────────────────────────────────────────────────┘ +``` + +### Output: Fluent Client (Option B) + +```typescript +// TypeScript +const client = new BrkClient("http://localhost:3000"); +const data = await client.tree.supply.active.by_date.fetch(); +// ^^^^ autocomplete all the way down +``` + +```python +# Python +client = BrkClient("http://localhost:3000") +data = await client.tree.supply.active.by_date.fetch() +``` + +```rust +// Rust +let client = BrkClient::new("http://localhost:3000"); +let data = client.tree.supply.active.by_date.fetch().await?; +``` + +## Implementation Details + +### Smart Metric Nodes + +Each tree leaf becomes a "smart node" holding a client reference: + +```typescript +// TypeScript +class MetricNode { + constructor(private client: BrkClient, private path: string) {} + async fetch(): Promise { + return this.client.get(this.path); + } +} +``` + +```python +# Python +class MetricNode(Generic[T]): + def __init__(self, client: BrkClient, path: str): + self._client = client + self._path = path + + async def fetch(self) -> T: + return await self._client.get(self._path) +``` + +```rust +// Rust +pub struct MetricNode { + client: Arc, + path: &'static str, + _phantom: PhantomData, +} + +impl MetricNode { + pub async fn fetch(&self) -> Result { + self.client.get(&self.path).await + } +} +``` + +### Pattern Reuse (from tree.rs) + +To avoid 20k+ individual types, reuse structural patterns: + +```rust +// Shared pattern for metrics with same index groupings +struct ByDateHeightMonth { + by_date: MetricNode, + by_height: MetricNode, + by_month: MetricNode, +} + +// Composed into full tree +struct Supply { + active: ByDateHeightMonth>, + total: ByDateHeightMonth>, +} +``` + +### Rust Client: Using brk_types + +The Rust client should import `brk_types` rather than generating duplicate types: + +```rust +use brk_types::{Height, Sats, DateIndex, ...}; + +// Response types come from brk_types +pub struct MetricNode { ... } +``` + +## Type Discovery Solution ✅ IMPLEMENTED + +### The Problem + +Type information was erased at runtime because metrics are stored as `&dyn AnyExportableVec` trait objects. + +### The Solution + +Use `std::any::type_name::()` with caching to extract short type names. + +> **Note**: Unlike `PrintableIndex` which needs `to_possible_strings()` for parsing from +> multiple string representations, for values we only need output, so `type_name` suffices. + +#### Implementation (vecdb) + +Added `short_type_name()` helper in `traits/printable.rs`: + +```rust +pub fn short_type_name() -> &'static str { + static CACHE: OnceLock>> = OnceLock::new(); + + let full: &'static str = std::any::type_name::(); + // ... caching logic, extracts "Sats" from "brk_types::sats::Sats" +} +``` + +Added `value_type_to_string()` to `AnyVec` trait in `traits/any.rs`: + +```rust +pub trait AnyVec: Send + Sync { + // ... existing methods + fn value_type_to_string(&self) -> &'static str; +} +``` + +Implemented in all vec variants: +- `variants/eager/mod.rs` +- `variants/lazy/from1/mod.rs`, `from2/mod.rs`, `from3/mod.rs` +- `variants/raw/inner/mod.rs` +- `variants/compressed/inner/mod.rs` +- `variants/macros.rs` (for wrapper types) + +```rust +fn value_type_to_string(&self) -> &'static str { + short_type_name::() +} +``` + +**No changes needed to brk_types** - works automatically for all types. + +### Result + +`brk_query` now exposes: + +```rust +for (metric_name, index_to_vec) in &vecs.metric_to_index_to_vec { + for (index, vec) in index_to_vec { + println!("{} @ {} -> {}", + metric_name, // "difficulty" + vec.index_type_to_string(), // "Height" + vec.value_type_to_string(), // "StoredF64" + ); + } +} +``` + +This enables fully typed client generation. + +## TreeNode Enhancement ✅ IMPLEMENTED + +### The Problem + +`TreeNode::Leaf` originally held just a `String` (the metric name), losing type and index information. + +### The Solution + +Changed `TreeNode::Leaf(String)` to `TreeNode::Leaf(MetricLeaf)` where: + +```rust +#[derive(Debug, Clone, Serialize, PartialEq, Eq, JsonSchema)] +pub struct MetricLeaf { + pub name: String, + pub value_type: String, + pub indexes: BTreeSet, +} +``` + +#### Implementation + +**brk_types/src/treenode.rs**: +- Added `MetricLeaf` struct with `name`, `value_type`, and `indexes` +- Added `merge_indexes()` method to union indexes when flattening tree +- Updated `TreeNode` enum to use `Leaf(MetricLeaf)` +- Updated merge logic to handle index merging + +**brk_traversable/src/lib.rs**: +- Added `make_leaf()` helper that creates `MetricLeaf` with proper fields +- Updated all `Traversable::to_tree_node()` implementations + +### Result + +The catalog tree now includes full type information at each leaf: + +```rust +TreeNode::Leaf(MetricLeaf { + name: "difficulty".to_string(), + value_type: "StoredF64".to_string(), + indexes: btreeset![Index::Height, Index::Date], +}) +``` + +When trees are merged/simplified, indexes are unioned together. + +### 2. Async Runtime + +- TypeScript: Native `Promise` +- Python: `asyncio` or sync variant? +- Rust: `tokio` assumed, or feature-flag for other runtimes? + +### 3. Error Handling + +- HTTP errors (4xx, 5xx) +- Deserialization errors +- Network errors +- Should errors be typed per language? + +### 4. Additional Client Features + +- Request timeout configuration +- Retry logic +- Rate limiting +- Caching +- Batch requests (fetch multiple metrics at once) + +## Tasks + +### Phase 0: Type Infrastructure ✅ COMPLETE + +- [x] **vecdb**: Add `short_type_name()` helper in `traits/printable.rs` +- [x] **vecdb**: Add `value_type_to_string()` to `AnyVec` trait +- [x] **vecdb**: Implement in all vec variants (eager, lazy, raw, compressed, macros) +- [x] **brk_types**: Enhance `TreeNode::Leaf` to include `MetricLeaf` with name, value_type, indexes +- [x] **brk_traversable**: Update all `to_tree_node()` implementations to populate `MetricLeaf` +- [x] **brk_query**: Export `Vecs` publicly for client generation +- [x] **brk_binder**: Set up generator module structure (types, rust, typescript, python stubs) +- [x] **brk**: Verify compilation + +### Phase 1: Client Foundation + +- [ ] Define `MetricNode` struct/class for each language +- [ ] Define `BrkClient` with base HTTP functionality +- [ ] Implement `ClientMetadata::from_vecs()` to extract metadata from `brk_query::Vecs` +- [ ] Client holds reference, nodes borrow it + +### Phase 2: Type-Aware Generation + +- [ ] Create type mapping: `value_type_string → Rust type / TS type / Python type` +- [ ] Generate typed `MetricNode` with correct `T` per metric +- [ ] For Rust: import from `brk_types` instead of generating + +### Phase 3: OpenAPI Integration + +- [ ] Parse OpenAPI spec with `openapiv3` crate +- [ ] Extract non-metric endpoint definitions +- [ ] Generate methods for health, info, catalog, etc. + +### Phase 4: Polish + +- [ ] Error types per language +- [ ] Documentation generation +- [ ] Tests +- [ ] Example usage in each language + +## File Structure + +``` +crates/brk_binder/ +├── src/ +│ ├── lib.rs +│ ├── js.rs # JS constants generation (existing) +│ ├── tree.rs # Pattern extraction (reference only, not compiled) +│ └── generator/ +│ ├── mod.rs +│ ├── types.rs # ClientMetadata, MetricInfo, IndexPattern +│ ├── rust.rs # Rust client generation +│ ├── typescript.rs +│ └── python.rs +├── Cargo.toml +├── README.md +└── DESIGN.md # This file +``` + +## Dependencies (Proposed) + +```toml +[dependencies] +openapiv3 = "2" # OpenAPI parsing +serde = { version = "1", features = ["derive"] } +serde_json = "1" +serde_yaml = "0.9" # If parsing YAML specs +tera = "1" # Optional: templating +``` diff --git a/crates/brk_binder/src/generator/javascript.rs b/crates/brk_binder/src/generator/javascript.rs new file mode 100644 index 000000000..c6af2bd66 --- /dev/null +++ b/crates/brk_binder/src/generator/javascript.rs @@ -0,0 +1,303 @@ +use std::collections::BTreeMap; +use std::fmt::Write as FmtWrite; +use std::fs; +use std::io; +use std::path::Path; + +use brk_types::{MetricLeaf, TreeNode}; + +use super::{to_camel_case, ClientMetadata, IndexPattern}; + +/// Generate TypeScript client from metadata +pub fn generate_typescript_client(metadata: &ClientMetadata, output_dir: &Path) -> io::Result<()> { + let mut output = String::new(); + + // Header + writeln!(output, "// Auto-generated BRK TypeScript client").unwrap(); + writeln!(output, "// Do not edit manually\n").unwrap(); + + // Generate pattern interfaces for index groupings + generate_pattern_interfaces(&mut output, &metadata.patterns); + + // Generate value type aliases + generate_value_types(&mut output, metadata); + + // Generate the base client class + generate_base_client(&mut output); + + // Generate tree types from catalog + generate_tree_types(&mut output, &metadata.catalog); + + // Generate the main client class with tree + generate_main_client(&mut output, &metadata.catalog); + + fs::write(output_dir.join("client.ts"), output)?; + + Ok(()) +} + +/// Generate TypeScript interfaces for common index patterns +fn generate_pattern_interfaces(output: &mut String, patterns: &[IndexPattern]) { + writeln!(output, "// Index pattern interfaces").unwrap(); + writeln!(output, "// Reusable patterns for metrics with the same index groupings\n").unwrap(); + + for pattern in patterns { + let pattern_name = pattern_to_name(pattern); + writeln!(output, "export interface {} {{", pattern_name).unwrap(); + + for index in &pattern.indexes { + let field_name = to_camel_case(&index.serialize_long()); + writeln!(output, " {}: MetricNode;", field_name).unwrap(); + } + + writeln!(output, "}}\n").unwrap(); + } +} + +/// Generate TypeScript type aliases for value types +fn generate_value_types(output: &mut String, metadata: &ClientMetadata) { + writeln!(output, "// Value type aliases").unwrap(); + writeln!(output, "// Maps Rust types to TypeScript types\n").unwrap(); + + // Collect unique value types + let mut value_types: Vec<&str> = metadata + .metrics + .values() + .map(|m| m.value_type.as_str()) + .collect(); + value_types.sort(); + value_types.dedup(); + + for vt in value_types { + let ts_type = rust_type_to_ts(vt); + writeln!(output, "export type {} = {};", vt, ts_type).unwrap(); + } + writeln!(output).unwrap(); +} + +/// Generate the base BrkClient class with HTTP functionality +fn generate_base_client(output: &mut String) { + writeln!( + output, + r#"// Base HTTP client +export interface BrkClientOptions {{ + baseUrl: string; + timeout?: number; +}} + +export class BrkClientBase {{ + private baseUrl: string; + private timeout: number; + + constructor(options: BrkClientOptions | string) {{ + if (typeof options === 'string') {{ + this.baseUrl = options.replace(/\/$/, ''); + this.timeout = 30000; + }} else {{ + this.baseUrl = options.baseUrl.replace(/\/$/, ''); + this.timeout = options.timeout ?? 30000; + }} + }} + + async get(path: string): Promise {{ + const controller = new AbortController(); + const timeoutId = setTimeout(() => controller.abort(), this.timeout); + + try {{ + const response = await fetch(`${{this.baseUrl}}${{path}}`, {{ + signal: controller.signal, + headers: {{ 'Accept': 'application/json' }}, + }}); + + if (!response.ok) {{ + throw new BrkError(`HTTP ${{response.status}}: ${{response.statusText}}`, response.status); + }} + + return await response.json(); + }} finally {{ + clearTimeout(timeoutId); + }} + }} +}} + +export class BrkError extends Error {{ + constructor(message: string, public statusCode?: number) {{ + super(message); + this.name = 'BrkError'; + }} +}} + +// Metric node with fetch capability +export class MetricNode {{ + constructor(private client: BrkClientBase, private path: string) {{}} + + async fetch(): Promise {{ + return this.client.get(this.path); + }} + + toString(): string {{ + return this.path; + }} +}} + +"# + ) + .unwrap(); +} + +/// Generate TypeScript types for the catalog tree +fn generate_tree_types(output: &mut String, catalog: &TreeNode) { + writeln!(output, "// Catalog tree types\n").unwrap(); + generate_node_type(output, "CatalogTree", catalog, ""); +} + +/// Recursively generate type for a tree node +fn generate_node_type(output: &mut String, name: &str, node: &TreeNode, path: &str) { + match node { + TreeNode::Leaf(leaf) => { + // Leaf nodes are MetricNode + // No separate interface needed, handled inline + } + TreeNode::Branch(children) => { + writeln!(output, "export interface {} {{", name).unwrap(); + + for (child_name, child_node) in children { + let field_name = to_camel_case(child_name); + let child_path = if path.is_empty() { + format!("/{}", child_name) + } else { + format!("{}/{}", path, child_name) + }; + + match child_node { + TreeNode::Leaf(leaf) => { + let value_type = &leaf.value_type; + writeln!(output, " {}: MetricNode<{}>;", field_name, value_type).unwrap(); + } + TreeNode::Branch(_) => { + let child_type_name = format!("{}_{}", name, to_pascal_case(child_name)); + writeln!(output, " {}: {};", field_name, child_type_name).unwrap(); + } + } + } + + writeln!(output, "}}\n").unwrap(); + + // Generate child types + for (child_name, child_node) in children { + if let TreeNode::Branch(_) = child_node { + let child_type_name = format!("{}_{}", name, to_pascal_case(child_name)); + let child_path = if path.is_empty() { + format!("/{}", child_name) + } else { + format!("{}/{}", path, child_name) + }; + generate_node_type(output, &child_type_name, child_node, &child_path); + } + } + } + } +} + +/// Generate the main client class with initialized tree +fn generate_main_client(output: &mut String, catalog: &TreeNode) { + writeln!(output, "// Main client class with catalog tree").unwrap(); + writeln!(output, "export class BrkClient extends BrkClientBase {{").unwrap(); + writeln!(output, " readonly tree: CatalogTree;\n").unwrap(); + writeln!(output, " constructor(options: BrkClientOptions | string) {{").unwrap(); + writeln!(output, " super(options);").unwrap(); + writeln!(output, " this.tree = this._buildTree();").unwrap(); + writeln!(output, " }}\n").unwrap(); + + // Generate _buildTree method + writeln!(output, " private _buildTree(): CatalogTree {{").unwrap(); + writeln!(output, " return {{").unwrap(); + generate_tree_initializer(output, catalog, "", 3); + writeln!(output, " }};").unwrap(); + writeln!(output, " }}").unwrap(); + writeln!(output, "}}").unwrap(); +} + +/// Generate the tree initializer code +fn generate_tree_initializer(output: &mut String, node: &TreeNode, path: &str, indent: usize) { + let indent_str = " ".repeat(indent); + + if let TreeNode::Branch(children) = node { + for (i, (child_name, child_node)) in children.iter().enumerate() { + let field_name = to_camel_case(child_name); + let child_path = if path.is_empty() { + format!("/{}", child_name) + } else { + format!("{}/{}", path, child_name) + }; + + let comma = if i < children.len() - 1 { "," } else { "" }; + + match child_node { + TreeNode::Leaf(leaf) => { + writeln!( + output, + "{}{}: new MetricNode(this, '{}'){}", + indent_str, field_name, child_path, comma + ) + .unwrap(); + } + TreeNode::Branch(_) => { + writeln!(output, "{}{}: {{", indent_str, field_name).unwrap(); + generate_tree_initializer(output, child_node, &child_path, indent + 1); + writeln!(output, "{}}}{}", indent_str, comma).unwrap(); + } + } + } + } +} + +/// Convert pattern to a TypeScript interface name +fn pattern_to_name(pattern: &IndexPattern) -> String { + let index_names: Vec = pattern + .indexes + .iter() + .map(|i| to_pascal_case(&i.serialize_long())) + .collect(); + format!("Pattern_{}", index_names.join("_")) +} + +/// Convert Rust type name to TypeScript type +fn rust_type_to_ts(rust_type: &str) -> &'static str { + match rust_type { + // Numeric types + "f32" | "f64" | "StoredF32" | "StoredF64" => "number", + "u8" | "u16" | "u32" | "u64" | "i8" | "i16" | "i32" | "i64" => "number", + "usize" | "isize" => "number", + + // Boolean + "bool" => "boolean", + + // String types + "String" | "str" => "string", + + // Bitcoin types (typically numeric or string representations) + "Sats" | "SatsPerVbyte" | "WU" | "VBytes" => "number", + "Height" | "Timestamp" => "number", + "Blockhash" | "Txid" => "string", + + // Arrays/Vecs become arrays + _ if rust_type.starts_with("Vec<") => "unknown[]", + + // Default to unknown for unmapped types + _ => "unknown", + } +} + +/// Convert string to PascalCase +fn to_pascal_case(s: &str) -> String { + s.split('_') + .map(|word| { + let mut chars = word.chars(); + match chars.next() { + None => String::new(), + Some(first) => first.to_uppercase().collect::() + chars.as_str(), + } + }) + .collect() +} diff --git a/crates/brk_binder/src/generator/mod.rs b/crates/brk_binder/src/generator/mod.rs new file mode 100644 index 000000000..60a23f7a1 --- /dev/null +++ b/crates/brk_binder/src/generator/mod.rs @@ -0,0 +1,35 @@ +mod javascript; +mod python; +mod rust; +mod types; + +pub use javascript::generate_javascript_client; +pub use python::generate_python_client; +pub use rust::generate_rust_client; +pub use types::*; + +use brk_query::Vecs; +use std::io; +use std::path::Path; + +/// Generate all client libraries from the query vecs +pub fn generate_clients(vecs: &Vecs, output_dir: &Path) -> io::Result<()> { + let metadata = ClientMetadata::from_vecs(vecs); + + // Generate Rust client + let rust_path = output_dir.join("rust"); + std::fs::create_dir_all(&rust_path)?; + generate_rust_client(&metadata, &rust_path)?; + + // Generate JavaScript client + let js_path = output_dir.join("javascript"); + std::fs::create_dir_all(&js_path)?; + generate_javascript_client(&metadata, &js_path)?; + + // Generate Python client + let python_path = output_dir.join("python"); + std::fs::create_dir_all(&python_path)?; + generate_python_client(&metadata, &python_path)?; + + Ok(()) +} diff --git a/crates/brk_binder/src/generator/python.rs b/crates/brk_binder/src/generator/python.rs new file mode 100644 index 000000000..4f72ef05c --- /dev/null +++ b/crates/brk_binder/src/generator/python.rs @@ -0,0 +1,10 @@ +use std::io; +use std::path::Path; + +use super::ClientMetadata; + +/// Generate Python client from metadata +pub fn generate_python_client(_metadata: &ClientMetadata, _output_dir: &Path) -> io::Result<()> { + // TODO: Implement Python client generation + Ok(()) +} diff --git a/crates/brk_binder/src/generator/rust.rs b/crates/brk_binder/src/generator/rust.rs new file mode 100644 index 000000000..3ca7f8ac6 --- /dev/null +++ b/crates/brk_binder/src/generator/rust.rs @@ -0,0 +1,10 @@ +use std::io; +use std::path::Path; + +use super::ClientMetadata; + +/// Generate Rust client from metadata +pub fn generate_rust_client(_metadata: &ClientMetadata, _output_dir: &Path) -> io::Result<()> { + // TODO: Implement Rust client generation + Ok(()) +} diff --git a/crates/brk_binder/src/generator/types.rs b/crates/brk_binder/src/generator/types.rs new file mode 100644 index 000000000..a2cbd8910 --- /dev/null +++ b/crates/brk_binder/src/generator/types.rs @@ -0,0 +1,171 @@ +use std::collections::{BTreeMap, BTreeSet}; + +use brk_query::Vecs; +use brk_types::{Index, TreeNode}; + +/// Metadata extracted from brk_query for client generation +#[derive(Debug)] +pub struct ClientMetadata { + /// All metrics with their available indexes and value type + pub metrics: BTreeMap, + /// The catalog tree structure (with schemas in leaves) + pub catalog: TreeNode, + /// Discovered patterns (sets of indexes that appear together frequently) + pub patterns: Vec, +} + +/// Information about a single metric +#[derive(Debug, Clone)] +pub struct MetricInfo { + /// Metric name (e.g., "difficulty", "supply_total") + pub name: String, + /// Available indexes for this metric + pub indexes: BTreeSet, + /// Value type name (e.g., "Sats", "StoredF64") + pub value_type: String, +} + +/// A pattern of indexes that appears multiple times across metrics +#[derive(Debug, Clone)] +pub struct IndexPattern { + /// Unique identifier for this pattern + pub id: usize, + /// The set of indexes in this pattern + pub indexes: BTreeSet, + /// How many metrics use this exact pattern + pub usage_count: usize, +} + +impl ClientMetadata { + /// Extract metadata from brk_query::Vecs + pub fn from_vecs(vecs: &Vecs) -> Self { + let mut metrics = BTreeMap::new(); + let mut pattern_counts: BTreeMap, usize> = BTreeMap::new(); + + // Extract metric information + for (name, index_to_vec) in &vecs.metric_to_index_to_vec { + let indexes: BTreeSet = index_to_vec.keys().copied().collect(); + + // Get value type from the first available vec + let value_type = index_to_vec + .values() + .next() + .map(|v| v.value_type_to_string().to_string()) + .unwrap_or_else(|| "unknown".to_string()); + + // Count pattern usage + *pattern_counts.entry(indexes.clone()).or_insert(0) += 1; + + metrics.insert( + name.to_string(), + MetricInfo { + name: name.to_string(), + indexes, + value_type, + }, + ); + } + + // Extract patterns that are used by multiple metrics + let mut patterns: Vec = pattern_counts + .into_iter() + .filter(|(_, count)| *count >= 2) // Only patterns used by 2+ metrics + .enumerate() + .map(|(id, (indexes, usage_count))| IndexPattern { + id, + indexes, + usage_count, + }) + .collect(); + + // Sort by usage count descending + patterns.sort_by(|a, b| b.usage_count.cmp(&a.usage_count)); + + ClientMetadata { + metrics, + catalog: vecs.catalog().clone(), + patterns, + } + } + + /// Find the pattern that matches a metric's indexes, if any + pub fn find_pattern_for_metric(&self, metric: &MetricInfo) -> Option<&IndexPattern> { + self.patterns + .iter() + .find(|p| p.indexes == metric.indexes) + } +} + +/// Convert a metric name to PascalCase (for struct/class names) +pub fn to_pascal_case(s: &str) -> String { + s.split('_') + .map(|word| { + let mut chars = word.chars(); + match chars.next() { + None => String::new(), + Some(first) => first.to_uppercase().collect::() + chars.as_str(), + } + }) + .collect() +} + +/// Convert a metric name to snake_case (already snake_case, but sanitize) +pub fn to_snake_case(s: &str) -> String { + let sanitized = s.replace('-', "_"); + // Handle Rust keywords + match sanitized.as_str() { + "type" | "const" | "static" | "match" | "if" | "else" | "loop" | "while" | "for" + | "break" | "continue" | "return" | "fn" | "let" | "mut" | "ref" | "self" | "super" + | "mod" | "use" | "pub" | "crate" | "extern" | "impl" | "trait" | "struct" | "enum" + | "where" | "async" | "await" | "dyn" | "move" => format!("r#{}", sanitized), + _ => sanitized, + } +} + +/// Convert a metric name to camelCase (for JS/TS) +pub fn to_camel_case(s: &str) -> String { + let pascal = to_pascal_case(s); + let mut chars = pascal.chars(); + match chars.next() { + None => String::new(), + Some(first) => first.to_lowercase().collect::() + chars.as_str(), + } +} + +/// Convert a serde_json::Value (JSON Schema) to a JSDoc type annotation +pub fn schema_to_jsdoc(schema: &serde_json::Value) -> String { + if let Some(ty) = schema.get("type").and_then(|v| v.as_str()) { + match ty { + "null" => "null".to_string(), + "boolean" => "boolean".to_string(), + "integer" | "number" => "number".to_string(), + "string" => "string".to_string(), + "array" => { + if let Some(items) = schema.get("items") { + format!("{}[]", schema_to_jsdoc(items)) + } else { + "Array<*>".to_string() + } + } + "object" => "Object".to_string(), + _ => "*".to_string(), + } + } else if schema.get("anyOf").is_some() || schema.get("oneOf").is_some() { + let variants = schema + .get("anyOf") + .or_else(|| schema.get("oneOf")) + .and_then(|v| v.as_array()) + .map(|arr| { + arr.iter() + .map(schema_to_jsdoc) + .collect::>() + .join("|") + }) + .unwrap_or_else(|| "*".to_string()); + format!("({})", variants) + } else if let Some(reference) = schema.get("$ref").and_then(|v| v.as_str()) { + reference.rsplit('/').next().unwrap_or("*").to_string() + } else { + "*".to_string() + } +} diff --git a/crates/brk_binder/src/lib.rs b/crates/brk_binder/src/lib.rs index 00e202b57..aa5cc02c6 100644 --- a/crates/brk_binder/src/lib.rs +++ b/crates/brk_binder/src/lib.rs @@ -1,5 +1,10 @@ mod js; +mod generator; + +// tree.rs is kept for reference but not compiled +// mod tree; pub use js::*; +pub use generator::*; pub const VERSION: &str = env!("CARGO_PKG_VERSION"); diff --git a/crates/brk_binder/src/tree.rs b/crates/brk_binder/src/tree.rs new file mode 100644 index 000000000..667397d66 --- /dev/null +++ b/crates/brk_binder/src/tree.rs @@ -0,0 +1,917 @@ +use serde_json::{Map, Value}; +use std::collections::{HashMap, HashSet}; +use std::fs; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +struct Pattern { + fields: Vec, + field_count: usize, +} + +fn sanitize_name(name: &str) -> String { + // Python identifiers can't start with numbers + if name.chars().next().unwrap().is_numeric() { + format!("_{}", name) + } else { + name.replace("-", "_") + } +} + +fn extract_pattern(obj: &Map) -> Pattern { + let mut fields: Vec = obj.keys().cloned().collect(); + fields.sort(); + Pattern { + field_count: fields.len(), + fields, + } +} + +// Calculate similarity between two patterns (0.0 = different, 1.0 = identical) +fn pattern_similarity(p1: &Pattern, p2: &Pattern) -> f64 { + if p1.field_count == 0 || p2.field_count == 0 { + return 0.0; + } + + let set1: HashSet<_> = p1.fields.iter().collect(); + let set2: HashSet<_> = p2.fields.iter().collect(); + + let intersection = set1.intersection(&set2).count(); + let union = set1.union(&set2).count(); + + intersection as f64 / union as f64 +} + +// Group similar patterns together +fn cluster_patterns(patterns: &HashMap>) -> Vec)>> { + let mut clusters: Vec)>> = Vec::new(); + let similarity_threshold = 0.7; // 70% overlap + + for (pattern, paths) in patterns { + let mut found_cluster = false; + + for cluster in clusters.iter_mut() { + let representative = &cluster[0].0; + if pattern_similarity(pattern, representative) >= similarity_threshold { + cluster.push((pattern.clone(), paths.clone())); + found_cluster = true; + break; + } + } + + if !found_cluster { + clusters.push(vec![(pattern.clone(), paths.clone())]); + } + } + + clusters +} + +// Merge similar patterns into a flexible pattern +fn merge_patterns_in_cluster( + cluster: &[(Pattern, Vec)], +) -> (Pattern, HashMap) { + let mut all_fields: HashSet = HashSet::new(); + let mut field_counts: HashMap = HashMap::new(); + let total_patterns = cluster.len(); + + // Collect all fields and count occurrences + for (pattern, _) in cluster { + for field in &pattern.fields { + all_fields.insert(field.clone()); + *field_counts.entry(field.clone()).or_insert(0) += 1; + } + } + + // Sort fields + let mut sorted_fields: Vec = all_fields.into_iter().collect(); + sorted_fields.sort(); + + // Mark which fields are required (present in >80% of patterns) + let mut required_fields: HashMap = HashMap::new(); + for field in &sorted_fields { + let count = field_counts.get(field).unwrap_or(&0); + required_fields.insert(field.clone(), *count as f64 / total_patterns as f64 > 0.8); + } + + ( + Pattern { + fields: sorted_fields, + field_count: field_counts.len(), + }, + required_fields, + ) +} + +fn find_patterns(tree: &Value, patterns: &mut HashMap>, path: String) { + match tree { + Value::Object(map) => { + // Check if this is a leaf object (all values are strings) + let is_leaf = map.values().all(|v| v.is_string()); + + if is_leaf && map.len() > 5 { + // This might be a reusable pattern + let pattern = extract_pattern(map); + patterns + .entry(pattern) + .or_insert_with(Vec::new) + .push(path.clone()); + } + + // Recurse into children + for (key, value) in map { + let new_path = if path.is_empty() { + key.clone() + } else { + format!("{}.{}", path, key) + }; + find_patterns(value, patterns, new_path); + } + } + _ => {} + } +} + +fn traverse_to_path<'a>(tree: &'a Value, path: &[&str]) -> Option<&'a Value> { + let mut current = tree; + for segment in path { + if let Value::Object(map) = current { + current = map.get(*segment)?; + } else { + return None; + } + } + Some(current) +} + +fn generate_python_pattern_class( + merged_pattern: &Pattern, + required_fields: &HashMap, + class_name: &str, + example_path: &str, + tree: &Value, +) -> String { + let mut output = String::new(); + + output.push_str(&format!("class {}Namespace:\n", class_name)); + output.push_str(&format!( + " \"\"\"Pattern for {} (supports {} fields)\"\"\"\n", + class_name, merged_pattern.field_count + )); + + let slots: Vec = merged_pattern + .fields + .iter() + .map(|f| sanitize_name(f)) + .collect(); + output.push_str(&format!( + " __slots__ = ({})\n\n", + slots + .iter() + .map(|s| format!("'{}'", s)) + .collect::>() + .join(", ") + )); + + output.push_str(" def __init__(self, path: str, prefix: str):\n"); + + let path_segments: Vec<&str> = example_path.split('.').collect(); + if let Some(obj) = traverse_to_path(tree, &path_segments) { + if let Value::Object(map) = obj { + for field in &merged_pattern.fields { + let safe_field = sanitize_name(field); + if let Some(Value::String(metric_name)) = map.get(field) { + output.push_str(&format!( + " self.{} = f\"{{path}}/{{prefix}}_{}\"\n", + safe_field, metric_name + )); + } + } + } + } + + output.push_str("\n\n"); + output +} + +fn generate_python_namespace_class( + name: &str, + obj: &Map, + tree: &Value, + api_path: &str, + pattern_classes: &HashMap, +) -> String { + let mut output = String::new(); + let class_name = format!( + "{}Namespace", + name.split('_') + .map(|s| { + let mut c = s.chars(); + match c.next() { + None => String::new(), + Some(f) => f.to_uppercase().collect::() + c.as_str(), + } + }) + .collect::() + ); + + output.push_str(&format!("class {}:\n", class_name)); + output.push_str(&format!(" \"\"\"Namespace for {} metrics\"\"\"\n", name)); + + let mut slots = vec![]; + let mut init_lines = vec![]; + + for (key, value) in obj { + let safe_key = sanitize_name(key); + slots.push(safe_key.clone()); + + match value { + Value::String(metric_name) => { + init_lines.push(format!( + " self.{} = f\"{}/{}\"", + safe_key, api_path, metric_name + )); + } + Value::Object(nested_map) => { + let pattern = extract_pattern(nested_map); + if let Some(pattern_class) = pattern_classes.get(&pattern) { + init_lines.push(format!( + " self.{} = {}Namespace(\"{}\", \"{}\")", + safe_key, pattern_class, api_path, key + )); + } else { + let nested_class = format!( + "{}{}", + class_name.trim_end_matches("Namespace"), + key.split('_') + .map(|s| { + let mut c = s.chars(); + match c.next() { + None => String::new(), + Some(f) => f.to_uppercase().collect::() + c.as_str(), + } + }) + .collect::() + ); + init_lines.push(format!(" self.{} = {}Namespace()", safe_key)); + } + } + _ => {} + } + } + + output.push_str(&format!( + " __slots__ = ({})\n\n", + slots + .iter() + .map(|s| format!("'{}'", s)) + .collect::>() + .join(", ") + )); + + output.push_str(" def __init__(self):\n"); + for line in init_lines { + output.push_str(&format!("{}\n", line)); + } + + output.push_str("\n\n"); + output +} + +fn generate_python_namespaces_recursive( + obj: &Map, + tree: &Value, + pattern_classes: &HashMap, + path: &str, + output: &mut String, +) { + for (key, value) in obj { + if let Value::Object(nested_map) = value { + let new_path = if path.is_empty() { + key.clone() + } else { + format!("{}/{}", path, key) + }; + + let is_leaf = nested_map.values().all(|v| v.is_string()); + if !is_leaf { + generate_python_namespaces_recursive( + nested_map, + tree, + pattern_classes, + &new_path, + output, + ); + } + } + } + + let api_path = path.replace(".", "/"); + let name = path.split('/').last().unwrap_or("Root"); + output.push_str(&generate_python_namespace_class( + name, + obj, + tree, + &api_path, + pattern_classes, + )); +} + +fn generate_python_client(tree: &Value) -> String { + let mut output = String::new(); + + output.push_str( + r#"""" +BRK API Tree - Auto-generated from config + +Each attribute is a string representing the API path + metric name. +Use these paths with your own fetch implementation. + +DO NOT EDIT - This file is generated by codegen +""" + +"#, + ); + + output.push_str( + "# ============================================================================\n", + ); + output.push_str("# PATTERN CLASSES\n"); + output.push_str( + "# ============================================================================\n\n", + ); + + let mut patterns: HashMap> = HashMap::new(); + find_patterns(tree, &mut patterns, String::new()); + + let clusters = cluster_patterns(&patterns); + let mut pattern_classes: HashMap = HashMap::new(); + let mut cluster_id = 0; + + for cluster in clusters.iter() { + let total_usage: usize = cluster.iter().map(|(_, paths)| paths.len()).sum(); + + if total_usage >= 3 && cluster[0].0.field_count >= 8 { + let (merged_pattern, required_fields) = merge_patterns_in_cluster(cluster); + + let class_name = if merged_pattern.fields.iter().any(|f| f.contains("ratio")) { + format!("RatioPattern{}", cluster_id) + } else if merged_pattern.fields.iter().any(|f| f.contains("count")) { + format!("CountPattern{}", cluster_id) + } else { + format!("CommonPattern{}", cluster_id) + }; + + output.push_str(&generate_python_pattern_class( + &merged_pattern, + &required_fields, + &class_name, + &cluster[0].1[0], + tree, + )); + + for (pattern, _) in cluster { + pattern_classes.insert(pattern.clone(), class_name.clone()); + } + + cluster_id += 1; + } + } + + output.push_str( + "# ============================================================================\n", + ); + output.push_str("# NAMESPACE CLASSES\n"); + output.push_str( + "# ============================================================================\n\n", + ); + + if let Value::Object(root) = tree { + generate_python_namespaces_recursive(root, tree, &pattern_classes, "", &mut output); + } + + output.push_str( + r#" +class BRKTree: + """ + BRK API Tree + + Usage: + tree = BRKTree() + path = tree.computed.chain.block_count.base + # path is now "computed/chain/block_count" + # Use this path with your own HTTP client + """ + __slots__ = ("computed", "cointime", "constants", "fetched", "indexes", "market") + + def __init__(self): +"#, + ); + + if let Value::Object(root) = tree { + for key in root.keys() { + output.push_str(&format!( + " self.{} = {}Namespace()\n", + sanitize_name(key), + key.split('_') + .map(|s| { + let mut c = s.chars(); + match c.next() { + None => String::new(), + Some(f) => f.to_uppercase().collect::() + c.as_str(), + } + }) + .collect::() + )); + } + } + + output +} + +fn to_pascal_case(s: &str) -> String { + s.split('_') + .map(|word| { + let mut chars = word.chars(); + match chars.next() { + None => String::new(), + Some(first) => first.to_uppercase().collect::() + chars.as_str(), + } + }) + .collect() +} + +fn generate_typescript_pattern_class( + merged_pattern: &Pattern, + class_name: &str, + example_path: &str, + tree: &Value, +) -> String { + let mut output = String::new(); + + output.push_str(&format!("export class {}Namespace {{\n", class_name)); + + for field in &merged_pattern.fields { + let safe_field = sanitize_name(field); + output.push_str(&format!(" readonly {}: string;\n", safe_field)); + } + + output.push_str("\n constructor(path: string, prefix: string) {\n"); + + let path_segments: Vec<&str> = example_path.split('.').collect(); + if let Some(obj) = traverse_to_path(tree, &path_segments) { + if let Value::Object(map) = obj { + for field in &merged_pattern.fields { + let safe_field = sanitize_name(field); + if let Some(Value::String(metric_name)) = map.get(field) { + output.push_str(&format!( + " this.{} = `${{path}}/${{prefix}}_{}`;\n", + safe_field, metric_name + )); + } + } + } + } + + output.push_str(" }\n}\n\n"); + output +} + +fn generate_typescript_namespaces_recursive( + obj: &Map, + tree: &Value, + pattern_classes: &HashMap, + path: &str, + output: &mut String, +) { + for (key, value) in obj { + if let Value::Object(nested_map) = value { + let new_path = if path.is_empty() { + key.clone() + } else { + format!("{}/{}", path, key) + }; + + let is_leaf = nested_map.values().all(|v| v.is_string()); + if !is_leaf { + generate_typescript_namespaces_recursive( + nested_map, + tree, + pattern_classes, + &new_path, + output, + ); + } + } + } + + let api_path = path.replace(".", "/"); + let name = path.split('/').last().unwrap_or("Root"); + let class_name = to_pascal_case(name); + + output.push_str(&format!("export class {}Namespace {{\n", class_name)); + + for (key, value) in obj { + let safe_key = sanitize_name(key); + match value { + Value::String(_) => { + output.push_str(&format!(" readonly {}: string;\n", safe_key)); + } + Value::Object(nested_map) => { + let pattern = extract_pattern(nested_map); + if let Some(pattern_class) = pattern_classes.get(&pattern) { + output.push_str(&format!( + " readonly {}: {}Namespace;\n", + safe_key, pattern_class + )); + } else { + let nested_class = format!("{}{}", class_name, to_pascal_case(key)); + output.push_str(&format!( + " readonly {}: {}Namespace;\n", + safe_key, nested_class + )); + } + } + _ => {} + } + } + + output.push_str("\n constructor() {\n"); + + for (key, value) in obj { + let safe_key = sanitize_name(key); + match value { + Value::String(metric_name) => { + output.push_str(&format!( + " this.{} = '{}/{}';\n", + safe_key, api_path, metric_name + )); + } + Value::Object(nested_map) => { + let pattern = extract_pattern(nested_map); + if let Some(pattern_class) = pattern_classes.get(&pattern) { + output.push_str(&format!( + " this.{} = new {}Namespace('{}', '{}');\n", + safe_key, pattern_class, api_path, key + )); + } else { + let nested_class = format!("{}{}", class_name, to_pascal_case(key)); + output.push_str(&format!( + " this.{} = new {}Namespace();\n", + safe_key, nested_class + )); + } + } + _ => {} + } + } + + output.push_str(" }\n}\n\n"); +} + +fn generate_typescript_client(tree: &Value) -> String { + let mut output = String::new(); + + output.push_str( + r#"/** + * BRK API Tree - Auto-generated from config + * + * Each property is a string representing the API path + metric name. + * Use these paths with your own fetch implementation. + * + * DO NOT EDIT - This file is generated by codegen + */ + +"#, + ); + + let mut patterns: HashMap> = HashMap::new(); + find_patterns(tree, &mut patterns, String::new()); + let clusters = cluster_patterns(&patterns); + + let mut pattern_classes: HashMap = HashMap::new(); + let mut cluster_id = 0; + + for cluster in clusters.iter() { + let total_usage: usize = cluster.iter().map(|(_, paths)| paths.len()).sum(); + + if total_usage >= 3 && cluster[0].0.field_count >= 8 { + let (merged_pattern, _) = merge_patterns_in_cluster(cluster); + + let class_name = if merged_pattern.fields.iter().any(|f| f.contains("ratio")) { + format!("RatioPattern{}", cluster_id) + } else if merged_pattern.fields.iter().any(|f| f.contains("count")) { + format!("CountPattern{}", cluster_id) + } else { + format!("CommonPattern{}", cluster_id) + }; + + output.push_str(&generate_typescript_pattern_class( + &merged_pattern, + &class_name, + &cluster[0].1[0], + tree, + )); + + for (pattern, _) in cluster { + pattern_classes.insert(pattern.clone(), class_name.clone()); + } + + cluster_id += 1; + } + } + + if let Value::Object(root) = tree { + generate_typescript_namespaces_recursive(root, tree, &pattern_classes, "", &mut output); + } + + output.push_str( + r#" +export class BRKTree { +"#, + ); + + if let Value::Object(root) = tree { + for key in root.keys() { + let class_name = to_pascal_case(key); + output.push_str(&format!( + " readonly {}: {}Namespace;\n", + sanitize_name(key), + class_name + )); + } + } + + output.push_str("\n constructor() {\n"); + + if let Value::Object(root) = tree { + for key in root.keys() { + let class_name = to_pascal_case(key); + output.push_str(&format!( + " this.{} = new {}Namespace();\n", + sanitize_name(key), + class_name + )); + } + } + + output.push_str(" }\n}\n"); + + output +} + +fn to_snake_case(s: &str) -> String { + let sanitized = s.replace("-", "_"); + match sanitized.as_str() { + "type" | "const" | "static" | "match" | "if" | "else" | "loop" | "while" => { + format!("r#{}", sanitized) + } + _ => sanitized, + } +} + +fn generate_rust_pattern_struct( + merged_pattern: &Pattern, + struct_name: &str, + example_path: &str, + tree: &Value, +) -> String { + let mut output = String::new(); + + output.push_str(&format!("/// Pattern for {} metrics\n", struct_name)); + output.push_str("#[derive(Clone, Debug)]\n"); + output.push_str(&format!("pub struct {}Namespace {{\n", struct_name)); + + for field in &merged_pattern.fields { + let safe_field = to_snake_case(&sanitize_name(field)); + output.push_str(&format!(" pub {}: String,\n", safe_field)); + } + + output.push_str("}\n\n"); + + output.push_str(&format!("impl {}Namespace {{\n", struct_name)); + output.push_str(" fn new(path: &str, prefix: &str) -> Self {\n"); + output.push_str(" Self {\n"); + + let path_segments: Vec<&str> = example_path.split('.').collect(); + if let Some(obj) = traverse_to_path(tree, &path_segments) { + if let Value::Object(map) = obj { + for field in &merged_pattern.fields { + let safe_field = to_snake_case(&sanitize_name(field)); + if let Some(Value::String(metric_name)) = map.get(field) { + output.push_str(&format!( + " {}: format!(\"{{}}/{{}}_{}}\", path, prefix),\n", + safe_field, metric_name + )); + } + } + } + } + + output.push_str(" }\n }\n}\n\n"); + output +} + +fn generate_rust_namespaces_recursive( + obj: &Map, + tree: &Value, + pattern_classes: &HashMap, + path: &str, + output: &mut String, +) { + for (key, value) in obj { + if let Value::Object(nested_map) = value { + let new_path = if path.is_empty() { + key.clone() + } else { + format!("{}/{}", path, key) + }; + + let is_leaf = nested_map.values().all(|v| v.is_string()); + if !is_leaf { + generate_rust_namespaces_recursive( + nested_map, + tree, + pattern_classes, + &new_path, + output, + ); + } + } + } + + let api_path = path.replace(".", "/"); + let name = path.split('/').last().unwrap_or("Root"); + let struct_name = to_pascal_case(name); + + output.push_str(&format!("/// Namespace for {} metrics\n", name)); + output.push_str("#[derive(Clone, Debug)]\n"); + output.push_str(&format!("pub struct {}Namespace {{\n", struct_name)); + + for (key, value) in obj { + let safe_key = to_snake_case(&sanitize_name(key)); + match value { + Value::String(_) => { + output.push_str(&format!(" pub {}: String,\n", safe_key)); + } + Value::Object(nested_map) => { + let pattern = extract_pattern(nested_map); + if let Some(pattern_class) = pattern_classes.get(&pattern) { + output.push_str(&format!( + " pub {}: {}Namespace,\n", + safe_key, pattern_class + )); + } else { + let nested_struct = format!("{}{}", struct_name, to_pascal_case(key)); + output.push_str(&format!( + " pub {}: {}Namespace,\n", + safe_key, nested_struct + )); + } + } + _ => {} + } + } + + output.push_str("}\n\n"); + + output.push_str(&format!("impl {}Namespace {{\n", struct_name)); + output.push_str(" fn new() -> Self {\n Self {\n"); + + for (key, value) in obj { + let safe_key = to_snake_case(&sanitize_name(key)); + match value { + Value::String(metric_name) => { + output.push_str(&format!( + " {}: \"{}/{}\".to_string(),\n", + safe_key, api_path, metric_name + )); + } + Value::Object(nested_map) => { + let pattern = extract_pattern(nested_map); + if let Some(pattern_class) = pattern_classes.get(&pattern) { + output.push_str(&format!( + " {}: {}Namespace::new(\"{}\", \"{}\"),\n", + safe_key, pattern_class, api_path, key + )); + } else { + let nested_struct = format!("{}{}", struct_name, to_pascal_case(key)); + output.push_str(&format!( + " {}: {}Namespace::new(),\n", + safe_key, nested_struct + )); + } + } + _ => {} + } + } + + output.push_str(" }\n }\n}\n\n"); +} + +fn generate_rust_client(tree: &Value) -> String { + let mut output = String::new(); + + output.push_str( + r#"//! BRK API Tree - Auto-generated from config +//! +//! Each field is a String representing the API path + metric name. +//! Use these paths with your own HTTP client. +//! +//! DO NOT EDIT - This file is generated by codegen + +"#, + ); + + let mut patterns: HashMap> = HashMap::new(); + find_patterns(tree, &mut patterns, String::new()); + let clusters = cluster_patterns(&patterns); + + let mut pattern_classes: HashMap = HashMap::new(); + let mut cluster_id = 0; + + for cluster in clusters.iter() { + let total_usage: usize = cluster.iter().map(|(_, paths)| paths.len()).sum(); + + if total_usage >= 3 && cluster[0].0.field_count >= 8 { + let (merged_pattern, _) = merge_patterns_in_cluster(cluster); + + let class_name = if merged_pattern.fields.iter().any(|f| f.contains("ratio")) { + format!("RatioPattern{}", cluster_id) + } else if merged_pattern.fields.iter().any(|f| f.contains("count")) { + format!("CountPattern{}", cluster_id) + } else { + format!("CommonPattern{}", cluster_id) + }; + + output.push_str(&generate_rust_pattern_struct( + &merged_pattern, + &class_name, + &cluster[0].1[0], + tree, + )); + + for (pattern, _) in cluster { + pattern_classes.insert(pattern.clone(), class_name.clone()); + } + + cluster_id += 1; + } + } + + if let Value::Object(root) = tree { + generate_rust_namespaces_recursive(root, tree, &pattern_classes, "", &mut output); + } + + output.push_str("/// Main BRK API tree\n"); + output.push_str("#[derive(Clone, Debug)]\n"); + output.push_str("pub struct BRKTree {\n"); + + if let Value::Object(root) = tree { + for key in root.keys() { + let struct_name = to_pascal_case(key); + output.push_str(&format!( + " pub {}: {}Namespace,\n", + to_snake_case(key), + struct_name + )); + } + } + + output.push_str("}\n\nimpl BRKTree {\n pub fn new() -> Self {\n Self {\n"); + + if let Value::Object(root) = tree { + for key in root.keys() { + let struct_name = to_pascal_case(key); + output.push_str(&format!( + " {}: {}Namespace::new(),\n", + to_snake_case(key), + struct_name + )); + } + } + + output.push_str(" }\n }\n}\n\nimpl Default for BRKTree {\n fn default() -> Self {\n Self::new()\n }\n}\n"); + + output +} + +fn main() { + let json_str = fs::read_to_string("brk_config.json").expect("Failed to read config file"); + + let tree: Value = serde_json::from_str(&json_str).expect("Failed to parse JSON"); + + // Generate Python tree + let python_code = generate_python_client(&tree); + fs::write("brk_tree_generated.py", python_code).expect("Failed to write Python file"); + println!("✓ Generated brk_tree_generated.py"); + + // Generate TypeScript tree + let ts_code = generate_typescript_client(&tree); + fs::write("brk_tree_generated.ts", ts_code).expect("Failed to write TypeScript file"); + println!("✓ Generated brk_tree_generated.ts"); + + // Generate Rust tree + let rust_code = generate_rust_client(&tree); + fs::write("brk_tree_generated.rs", rust_code).expect("Failed to write Rust file"); + println!("✓ Generated brk_tree_generated.rs"); +} diff --git a/crates/brk_computer/Cargo.toml b/crates/brk_computer/Cargo.toml index 92be1b5ea..fcf080206 100644 --- a/crates/brk_computer/Cargo.toml +++ b/crates/brk_computer/Cargo.toml @@ -27,6 +27,7 @@ log = { workspace = true } pco = "0.4.7" rayon = { workspace = true } rustc-hash = { workspace = true } +schemars = { workspace = true } serde = { workspace = true } smallvec = { workspace = true } vecdb = { workspace = true } diff --git a/crates/brk_computer/src/grouped/builder_eager.rs b/crates/brk_computer/src/grouped/builder_eager.rs index 669d0a13c..5d159652c 100644 --- a/crates/brk_computer/src/grouped/builder_eager.rs +++ b/crates/brk_computer/src/grouped/builder_eager.rs @@ -1,6 +1,7 @@ use brk_error::{Error, Result}; use brk_traversable::Traversable; use brk_types::{CheckedSub, StoredU64, Version}; +use schemars::JsonSchema; use vecdb::{ AnyStoredVec, Database, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableVec, PcoVec, VecIndex, VecValue, @@ -16,7 +17,7 @@ const VERSION: Version = Version::ZERO; pub struct EagerVecsBuilder where I: VecIndex, - T: ComputedVecValue, + T: ComputedVecValue + JsonSchema, { pub first: Option>>>, pub average: Option>>>, @@ -35,7 +36,7 @@ where impl EagerVecsBuilder where I: VecIndex, - T: ComputedVecValue, + T: ComputedVecValue + JsonSchema, { pub fn forced_import( db: &Database, @@ -159,7 +160,12 @@ where /// Compute percentiles from sorted values (assumes values is already sorted) fn compute_percentiles_from_sorted(&mut self, index: usize, values: &[T]) -> Result<()> { if let Some(max) = self.max.as_mut() { - max.truncate_push_at(index, *values.last().ok_or(Error::Internal("Empty values for percentiles"))?)?; + max.truncate_push_at( + index, + *values + .last() + .ok_or(Error::Internal("Empty values for percentiles"))?, + )?; } if let Some(pct90) = self.pct90.as_mut() { pct90.truncate_push_at(index, get_percentile(values, 0.90))?; diff --git a/crates/brk_computer/src/grouped/builder_lazy.rs b/crates/brk_computer/src/grouped/builder_lazy.rs index 698e14341..5f347f336 100644 --- a/crates/brk_computer/src/grouped/builder_lazy.rs +++ b/crates/brk_computer/src/grouped/builder_lazy.rs @@ -1,5 +1,6 @@ use brk_traversable::Traversable; use brk_types::Version; +use schemars::JsonSchema; use vecdb::{FromCoarserIndex, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2, VecIndex}; use crate::grouped::{EagerVecsBuilder, VecBuilderOptions}; @@ -12,7 +13,7 @@ use super::ComputedVecValue; pub struct LazyVecsBuilder where I: VecIndex, - T: ComputedVecValue, + T: ComputedVecValue + JsonSchema, S1I: VecIndex, S2T: ComputedVecValue, { @@ -30,7 +31,7 @@ const VERSION: Version = Version::ZERO; impl LazyVecsBuilder where I: VecIndex, - T: ComputedVecValue + 'static, + T: ComputedVecValue + JsonSchema + 'static, S1I: VecIndex + 'static + FromCoarserIndex, S2T: ComputedVecValue, { diff --git a/crates/brk_computer/src/grouped/from_dateindex.rs b/crates/brk_computer/src/grouped/from_dateindex.rs index 4466a660e..9c06f8409 100644 --- a/crates/brk_computer/src/grouped/from_dateindex.rs +++ b/crates/brk_computer/src/grouped/from_dateindex.rs @@ -4,6 +4,7 @@ use brk_traversable::Traversable; use brk_types::{ DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex, }; +use schemars::JsonSchema; use vecdb::{ AnyExportableVec, Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, IterableVec, PcoVec, @@ -16,7 +17,7 @@ use super::{ComputedVecValue, EagerVecsBuilder, Source, VecBuilderOptions}; #[derive(Clone)] pub struct ComputedVecsFromDateIndex where - T: ComputedVecValue + PartialOrd, + T: ComputedVecValue + PartialOrd + JsonSchema, { pub dateindex: Option>>, pub dateindex_extra: EagerVecsBuilder, @@ -32,7 +33,7 @@ const VERSION: Version = Version::ZERO; impl ComputedVecsFromDateIndex where - T: ComputedVecValue + 'static, + T: ComputedVecValue + JsonSchema + 'static, { #[allow(clippy::too_many_arguments)] pub fn forced_import( @@ -149,7 +150,7 @@ where impl Traversable for ComputedVecsFromDateIndex where - T: ComputedVecValue, + T: ComputedVecValue + JsonSchema, { fn to_tree_node(&self) -> brk_traversable::TreeNode { let dateindex_extra_node = self.dateindex_extra.to_tree_node(); diff --git a/crates/brk_computer/src/grouped/from_height.rs b/crates/brk_computer/src/grouped/from_height.rs index e8e20506d..528f6d539 100644 --- a/crates/brk_computer/src/grouped/from_height.rs +++ b/crates/brk_computer/src/grouped/from_height.rs @@ -5,6 +5,7 @@ use brk_types::{ DateIndex, DecadeIndex, DifficultyEpoch, Height, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex, }; +use schemars::JsonSchema; use vecdb::{ AnyExportableVec, Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, IterableVec, PcoVec, @@ -22,7 +23,7 @@ use super::{ComputedVecValue, EagerVecsBuilder, VecBuilderOptions}; #[derive(Clone)] pub struct ComputedVecsFromHeight where - T: ComputedVecValue + PartialOrd, + T: ComputedVecValue + PartialOrd + JsonSchema, { pub height: Option>>, pub height_extra: EagerVecsBuilder, @@ -41,7 +42,7 @@ const VERSION: Version = Version::ZERO; impl ComputedVecsFromHeight where - T: ComputedVecValue + Ord + From + 'static, + T: ComputedVecValue + Ord + From + JsonSchema + 'static, f64: From, { #[allow(clippy::too_many_arguments)] @@ -202,7 +203,7 @@ where impl Traversable for ComputedVecsFromHeight where - T: ComputedVecValue, + T: ComputedVecValue + JsonSchema, { fn to_tree_node(&self) -> brk_traversable::TreeNode { let height_extra_node = self.height_extra.to_tree_node(); diff --git a/crates/brk_computer/src/grouped/from_height_strict.rs b/crates/brk_computer/src/grouped/from_height_strict.rs index 822678a56..6196abcea 100644 --- a/crates/brk_computer/src/grouped/from_height_strict.rs +++ b/crates/brk_computer/src/grouped/from_height_strict.rs @@ -2,6 +2,7 @@ use brk_error::Result; use brk_traversable::Traversable; use brk_types::{DifficultyEpoch, Height, Version}; +use schemars::JsonSchema; use vecdb::{AnyExportableVec, Database, EagerVec, Exit, ImportableVec, PcoVec}; use crate::{Indexes, indexes}; @@ -11,7 +12,7 @@ use super::{ComputedVecValue, EagerVecsBuilder, VecBuilderOptions}; #[derive(Clone)] pub struct ComputedVecsFromHeightStrict where - T: ComputedVecValue + PartialOrd, + T: ComputedVecValue + PartialOrd + JsonSchema, { pub height: EagerVec>, pub height_extra: EagerVecsBuilder, @@ -23,7 +24,7 @@ const VERSION: Version = Version::ZERO; impl ComputedVecsFromHeightStrict where - T: ComputedVecValue + Ord + From, + T: ComputedVecValue + Ord + From + JsonSchema, f64: From, { pub fn forced_import( @@ -85,7 +86,7 @@ where impl Traversable for ComputedVecsFromHeightStrict where - T: ComputedVecValue, + T: ComputedVecValue + JsonSchema, { fn to_tree_node(&self) -> brk_traversable::TreeNode { let height_extra_node = self.height_extra.to_tree_node(); diff --git a/crates/brk_computer/src/grouped/from_txindex.rs b/crates/brk_computer/src/grouped/from_txindex.rs index d7f25392e..c22f3a471 100644 --- a/crates/brk_computer/src/grouped/from_txindex.rs +++ b/crates/brk_computer/src/grouped/from_txindex.rs @@ -5,6 +5,7 @@ use brk_types::{ Bitcoin, DateIndex, DecadeIndex, DifficultyEpoch, Dollars, Height, MonthIndex, QuarterIndex, Sats, SemesterIndex, TxIndex, Version, WeekIndex, YearIndex, }; +use schemars::JsonSchema; use vecdb::{ AnyExportableVec, AnyVec, CollectableVec, Database, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableCloneableVec, PcoVec, TypedVecIterator, VecIndex, @@ -22,7 +23,7 @@ use super::{ComputedVecValue, EagerVecsBuilder, VecBuilderOptions}; #[derive(Clone)] pub struct ComputedVecsFromTxindex where - T: ComputedVecValue + PartialOrd, + T: ComputedVecValue + PartialOrd + JsonSchema, { pub txindex: Option>>>, pub height: EagerVecsBuilder, @@ -41,7 +42,7 @@ const VERSION: Version = Version::ZERO; impl ComputedVecsFromTxindex where - T: ComputedVecValue + Ord + From + 'static, + T: ComputedVecValue + Ord + From + JsonSchema + 'static, f64: From, { #[allow(clippy::too_many_arguments)] @@ -256,10 +257,8 @@ impl ComputedVecsFromTxindex { .map(Height::from) .try_for_each(|height| -> Result<()> { if let Some(first) = self.height.first.as_mut() { - first.truncate_push( - height, - Bitcoin::from(first_iter.um().get_unwrap(height)), - )?; + first + .truncate_push(height, Bitcoin::from(first_iter.um().get_unwrap(height)))?; } if let Some(average) = self.height.average.as_mut() { average.truncate_push( @@ -268,28 +267,18 @@ impl ComputedVecsFromTxindex { )?; } if let Some(sum) = self.height.sum.as_mut() { - sum.truncate_push( - height, - Bitcoin::from(sum_iter.um().get_unwrap(height)), - )?; + sum.truncate_push(height, Bitcoin::from(sum_iter.um().get_unwrap(height)))?; } if let Some(max) = self.height.max.as_mut() { - max.truncate_push( - height, - Bitcoin::from(max_iter.um().get_unwrap(height)), - )?; + max.truncate_push(height, Bitcoin::from(max_iter.um().get_unwrap(height)))?; } if let Some(pct90) = self.height.pct90.as_mut() { - pct90.truncate_push( - height, - Bitcoin::from(pct90_iter.um().get_unwrap(height)), - )?; + pct90 + .truncate_push(height, Bitcoin::from(pct90_iter.um().get_unwrap(height)))?; } if let Some(pct75) = self.height.pct75.as_mut() { - pct75.truncate_push( - height, - Bitcoin::from(pct75_iter.um().get_unwrap(height)), - )?; + pct75 + .truncate_push(height, Bitcoin::from(pct75_iter.um().get_unwrap(height)))?; } if let Some(median) = self.height.median.as_mut() { median.truncate_push( @@ -298,28 +287,18 @@ impl ComputedVecsFromTxindex { )?; } if let Some(pct25) = self.height.pct25.as_mut() { - pct25.truncate_push( - height, - Bitcoin::from(pct25_iter.um().get_unwrap(height)), - )?; + pct25 + .truncate_push(height, Bitcoin::from(pct25_iter.um().get_unwrap(height)))?; } if let Some(pct10) = self.height.pct10.as_mut() { - pct10.truncate_push( - height, - Bitcoin::from(pct10_iter.um().get_unwrap(height)), - )?; + pct10 + .truncate_push(height, Bitcoin::from(pct10_iter.um().get_unwrap(height)))?; } if let Some(min) = self.height.min.as_mut() { - min.truncate_push( - height, - Bitcoin::from(min_iter.um().get_unwrap(height)), - )?; + min.truncate_push(height, Bitcoin::from(min_iter.um().get_unwrap(height)))?; } if let Some(last) = self.height.last.as_mut() { - last.truncate_push( - height, - Bitcoin::from(last_iter.um().get_unwrap(height)), - )?; + last.truncate_push(height, Bitcoin::from(last_iter.um().get_unwrap(height)))?; } if let Some(cumulative) = self.height.cumulative.as_mut() { cumulative.truncate_push( @@ -381,76 +360,41 @@ impl ComputedVecsFromTxindex { let price = *close_iter.get_unwrap(height); if let Some(first) = self.height.first.as_mut() { - first.truncate_push( - height, - price * first_iter.um().get_unwrap(height), - )?; + first.truncate_push(height, price * first_iter.um().get_unwrap(height))?; } if let Some(average) = self.height.average.as_mut() { - average.truncate_push( - height, - price * average_iter.um().get_unwrap(height), - )?; + average.truncate_push(height, price * average_iter.um().get_unwrap(height))?; } if let Some(sum) = self.height.sum.as_mut() { - sum.truncate_push( - height, - price * sum_iter.um().get_unwrap(height), - )?; + sum.truncate_push(height, price * sum_iter.um().get_unwrap(height))?; } if let Some(max) = self.height.max.as_mut() { - max.truncate_push( - height, - price * max_iter.um().get_unwrap(height), - )?; + max.truncate_push(height, price * max_iter.um().get_unwrap(height))?; } if let Some(pct90) = self.height.pct90.as_mut() { - pct90.truncate_push( - height, - price * pct90_iter.um().get_unwrap(height), - )?; + pct90.truncate_push(height, price * pct90_iter.um().get_unwrap(height))?; } if let Some(pct75) = self.height.pct75.as_mut() { - pct75.truncate_push( - height, - price * pct75_iter.um().get_unwrap(height), - )?; + pct75.truncate_push(height, price * pct75_iter.um().get_unwrap(height))?; } if let Some(median) = self.height.median.as_mut() { - median.truncate_push( - height, - price * median_iter.um().get_unwrap(height), - )?; + median.truncate_push(height, price * median_iter.um().get_unwrap(height))?; } if let Some(pct25) = self.height.pct25.as_mut() { - pct25.truncate_push( - height, - price * pct25_iter.um().get_unwrap(height), - )?; + pct25.truncate_push(height, price * pct25_iter.um().get_unwrap(height))?; } if let Some(pct10) = self.height.pct10.as_mut() { - pct10.truncate_push( - height, - price * pct10_iter.um().get_unwrap(height), - )?; + pct10.truncate_push(height, price * pct10_iter.um().get_unwrap(height))?; } if let Some(min) = self.height.min.as_mut() { - min.truncate_push( - height, - price * min_iter.um().get_unwrap(height), - )?; + min.truncate_push(height, price * min_iter.um().get_unwrap(height))?; } if let Some(last) = self.height.last.as_mut() { - last.truncate_push( - height, - price * last_iter.um().get_unwrap(height), - )?; + last.truncate_push(height, price * last_iter.um().get_unwrap(height))?; } if let Some(cumulative) = self.height.cumulative.as_mut() { - cumulative.truncate_push( - height, - price * cumulative_iter.um().get_unwrap(height), - )?; + cumulative + .truncate_push(height, price * cumulative_iter.um().get_unwrap(height))?; } Ok(()) })?; @@ -463,7 +407,7 @@ impl ComputedVecsFromTxindex { impl Traversable for ComputedVecsFromTxindex where - T: ComputedVecValue, + T: ComputedVecValue + JsonSchema, { fn to_tree_node(&self) -> brk_traversable::TreeNode { brk_traversable::TreeNode::Branch( diff --git a/crates/brk_computer/src/grouped/sd_from_dateindex.rs b/crates/brk_computer/src/grouped/sd_from_dateindex.rs index 8426e5deb..c41218fe7 100644 --- a/crates/brk_computer/src/grouped/sd_from_dateindex.rs +++ b/crates/brk_computer/src/grouped/sd_from_dateindex.rs @@ -1,9 +1,11 @@ +use std::mem; + use brk_error::Result; use brk_traversable::Traversable; use brk_types::{Date, DateIndex, Dollars, StoredF32, Version}; -use vecdb::{PcoVec, +use vecdb::{ AnyStoredVec, AnyVec, BoxedVecIterator, CollectableVec, Database, EagerVec, Exit, - GenericStoredVec, IterableVec, VecIndex, + GenericStoredVec, IterableVec, PcoVec, VecIndex, }; use crate::{Indexes, grouped::source::Source, indexes, utils::OptionExt}; @@ -109,8 +111,14 @@ impl ComputedStandardDeviationVecsFromDateIndex { macro_rules! import { ($suffix:expr) => { ComputedVecsFromDateIndex::forced_import( - db, &format!("{name}_{}", $suffix), Source::Compute, version, indexes, opts, - ).unwrap() + db, + &format!("{name}_{}", $suffix), + Source::Compute, + version, + indexes, + opts, + ) + .unwrap() }; } @@ -183,9 +191,7 @@ impl ComputedStandardDeviationVecsFromDateIndex { source: &impl CollectableVec, price_opt: Option<&impl IterableVec>, ) -> Result<()> { - let sma = sma_opt.unwrap_or_else(|| unsafe { - std::mem::transmute(&self.sma.u().dateindex) - }); + let sma = sma_opt.unwrap_or_else(|| unsafe { mem::transmute(&self.sma.u().dateindex) }); let min_date = DateIndex::try_from(Date::MIN_RATIO).unwrap(); @@ -345,7 +351,11 @@ impl ComputedStandardDeviationVecsFromDateIndex { .try_for_each(|v| v.safe_flush(exit))?; self.mut_stateful_computed().try_for_each(|v| { - v.compute_rest(starting_indexes, exit, None as Option<&EagerVec>>) + v.compute_rest( + starting_indexes, + exit, + None as Option<&EagerVec>>, + ) })?; if let Some(zscore) = self.zscore.as_mut() { @@ -536,7 +546,6 @@ impl ComputedStandardDeviationVecsFromDateIndex { fn mut_stateful_date_vecs( &mut self, ) -> impl Iterator>> { - self.mut_stateful_computed() - .map(|c| c.dateindex.um()) + self.mut_stateful_computed().map(|c| c.dateindex.um()) } } diff --git a/crates/brk_computer/src/pools/mod.rs b/crates/brk_computer/src/pools/mod.rs index 8468391f9..445fcfaa0 100644 --- a/crates/brk_computer/src/pools/mod.rs +++ b/crates/brk_computer/src/pools/mod.rs @@ -217,7 +217,7 @@ impl Vecs { .or_else(|| self.pools.find_from_coinbase_tag(&coinbase_tag)) .unwrap_or(unknown); - self.height_to_pool.push_if_needed(height, pool.slug)?; + self.height_to_pool.truncate_push(height, pool.slug)?; Ok(()) })?; diff --git a/crates/brk_computer/src/stateful/address/address_count.rs b/crates/brk_computer/src/stateful/address/address_count.rs index eeeb63616..51790a047 100644 --- a/crates/brk_computer/src/stateful/address/address_count.rs +++ b/crates/brk_computer/src/stateful/address/address_count.rs @@ -6,7 +6,7 @@ use brk_traversable::Traversable; use brk_types::{Height, StoredU64, Version}; use derive_deref::{Deref, DerefMut}; use vecdb::{ - AnyStoredVec, Database, EagerVec, Exit, GenericStoredVec, ImportableVec, PcoVec, + AnyStoredVec, AnyVec, Database, EagerVec, Exit, GenericStoredVec, ImportableVec, PcoVec, TypedVecIterator, }; @@ -54,6 +54,18 @@ impl From>>> } impl AddressTypeToHeightToAddressCount { + pub fn min_len(&self) -> usize { + self.p2pk65 + .len() + .min(self.p2pk33.len()) + .min(self.p2pkh.len()) + .min(self.p2sh.len()) + .min(self.p2wpkh.len()) + .min(self.p2wsh.len()) + .min(self.p2tr.len()) + .min(self.p2a.len()) + } + pub fn forced_import(db: &Database, name: &str, version: Version) -> Result { Ok(Self::from(ByAddressType::new_with_name(|type_name| { Ok(EagerVec::forced_import( diff --git a/crates/brk_computer/src/stateful/compute/write.rs b/crates/brk_computer/src/stateful/compute/write.rs index f29dffe9b..1b8e405b7 100644 --- a/crates/brk_computer/src/stateful/compute/write.rs +++ b/crates/brk_computer/src/stateful/compute/write.rs @@ -36,6 +36,8 @@ pub fn process_address_updates( empty_updates: AddressTypeToTypeIndexMap, loaded_updates: AddressTypeToTypeIndexMap, ) -> Result<()> { + info!("Processing address updates..."); + let empty_result = process_empty_addresses(addresses_data, empty_updates)?; let loaded_result = process_loaded_addresses(addresses_data, loaded_updates)?; let all_updates = empty_result.merge(loaded_result); diff --git a/crates/brk_computer/src/stateful/process/lookup.rs b/crates/brk_computer/src/stateful/process/lookup.rs index b37d8f7ab..070a37003 100644 --- a/crates/brk_computer/src/stateful/process/lookup.rs +++ b/crates/brk_computer/src/stateful/process/lookup.rs @@ -5,52 +5,52 @@ use brk_types::{LoadedAddressData, OutputType, TypeIndex}; use super::super::address::AddressTypeToTypeIndexMap; use super::{EmptyAddressDataWithSource, LoadedAddressDataWithSource, WithAddressDataSource}; +/// Source of an address in lookup - reports where the data came from. +#[derive(Clone, Copy)] +pub enum AddressSource { + /// Brand new address (never seen before) + New, + /// Loaded from disk (has existing balance) + Loaded, + /// Was empty (zero balance), now receiving + FromEmpty, +} + /// Context for looking up and storing address data during block processing. -/// -/// All addresses should be pre-fetched into the cache before using this. -/// - `loaded`: addresses with non-zero balance (wrapped with source info) -/// - `empty`: addresses that became empty this block (wrapped with source info) pub struct AddressLookup<'a> { - /// Loaded addresses touched in current block pub loaded: &'a mut AddressTypeToTypeIndexMap, - /// Empty addresses touched in current block pub empty: &'a mut AddressTypeToTypeIndexMap, } impl<'a> AddressLookup<'a> { - /// Get or create address data for a receive operation. - /// - /// Returns (address_data, is_new, from_empty) pub fn get_or_create_for_receive( &mut self, output_type: OutputType, type_index: TypeIndex, - ) -> (&mut LoadedAddressDataWithSource, bool, bool) { + ) -> (&mut LoadedAddressDataWithSource, AddressSource) { use std::collections::hash_map::Entry; let map = self.loaded.get_mut(output_type).unwrap(); match map.entry(type_index) { Entry::Occupied(entry) => { - // Entry already exists - check its source - let data = entry.into_mut(); - let is_new = data.is_new(); - let from_empty = data.is_from_emptyaddressdata(); - (data, is_new, from_empty) + let source = match entry.get() { + WithAddressDataSource::New(_) => AddressSource::New, + WithAddressDataSource::FromLoaded(..) => AddressSource::Loaded, + WithAddressDataSource::FromEmpty(..) => AddressSource::FromEmpty, + }; + (entry.into_mut(), source) } Entry::Vacant(entry) => { - // Check if it was in empty set if let Some(empty_data) = self.empty.get_mut(output_type).unwrap().remove(&type_index) { - let data = entry.insert(empty_data.into()); - return (data, false, true); + return (entry.insert(empty_data.into()), AddressSource::FromEmpty); } - - // Not found - create new address - let data = - entry.insert(WithAddressDataSource::New(LoadedAddressData::default())); - (data, true, false) + ( + entry.insert(WithAddressDataSource::New(LoadedAddressData::default())), + AddressSource::New, + ) } } } diff --git a/crates/brk_computer/src/stateful/process/received.rs b/crates/brk_computer/src/stateful/process/received.rs index 9b9cac28e..b09bdeb04 100644 --- a/crates/brk_computer/src/stateful/process/received.rs +++ b/crates/brk_computer/src/stateful/process/received.rs @@ -1,23 +1,13 @@ //! Process received outputs for address cohorts. -//! -//! Updates address cohort states when addresses receive funds: -//! - New addresses enter a cohort -//! - Existing addresses may cross cohort boundaries -//! - Empty addresses become non-empty again -use brk_grouper::{ByAddressType, Filtered}; +use brk_grouper::{AmountBucket, ByAddressType}; use brk_types::{Dollars, Sats, TypeIndex}; +use rustc_hash::FxHashMap; use super::super::address::AddressTypeToVec; use super::super::cohorts::AddressCohorts; -use super::lookup::AddressLookup; +use super::lookup::{AddressLookup, AddressSource}; -/// Process received outputs for address cohorts. -/// -/// For each received output: -/// 1. Look up or create address data -/// 2. Update address balance and cohort membership -/// 3. Update cohort states (add/subtract for boundary crossings, receive otherwise) pub fn process_received( received_data: AddressTypeToVec<(TypeIndex, Sats)>, cohorts: &mut AddressCohorts, @@ -31,30 +21,47 @@ pub fn process_received( continue; } + // Aggregate receives by address - each address processed exactly once + // Track (total_value, output_count) for correct UTXO counting + let mut aggregated: FxHashMap = FxHashMap::default(); for (type_index, value) in vec { - let (addr_data, is_new, from_empty) = - lookup.get_or_create_for_receive(output_type, type_index); + let entry = aggregated.entry(type_index).or_default(); + entry.0 += value; + entry.1 += 1; + } - // Update address counts - if is_new || from_empty { - *addr_count.get_mut(output_type).unwrap() += 1; - if from_empty { + for (type_index, (total_value, output_count)) in aggregated { + let (addr_data, source) = lookup.get_or_create_for_receive(output_type, type_index); + + match source { + AddressSource::New => { + *addr_count.get_mut(output_type).unwrap() += 1; + } + AddressSource::FromEmpty => { + *addr_count.get_mut(output_type).unwrap() += 1; *empty_addr_count.get_mut(output_type).unwrap() -= 1; } + AddressSource::Loaded => {} } - let prev_balance = addr_data.balance(); - let new_balance = prev_balance + value; + let is_new_entry = matches!(source, AddressSource::New | AddressSource::FromEmpty); - // Check if crossing cohort boundary - let prev_cohort = cohorts.amount_range.get(prev_balance); - let new_cohort = cohorts.amount_range.get(new_balance); - let filters_differ = prev_cohort.filter() != new_cohort.filter(); + if is_new_entry { + // New/from-empty address - just add to cohort + addr_data.receive_outputs(total_value, price, output_count); + cohorts + .amount_range + .get_mut(total_value) // new_balance = 0 + total_value + .state + .as_mut() + .unwrap() + .add(addr_data); + } else { + let prev_balance = addr_data.balance(); + let new_balance = prev_balance + total_value; - if is_new || from_empty || filters_differ { - // Address entering or changing cohorts - if !is_new && !from_empty { - // Subtract from old cohort + if AmountBucket::from(prev_balance) != AmountBucket::from(new_balance) { + // Crossing cohort boundary - subtract from old, add to new cohorts .amount_range .get_mut(prev_balance) @@ -62,28 +69,24 @@ pub fn process_received( .as_mut() .unwrap() .subtract(addr_data); + addr_data.receive_outputs(total_value, price, output_count); + cohorts + .amount_range + .get_mut(new_balance) + .state + .as_mut() + .unwrap() + .add(addr_data); + } else { + // Staying in same cohort - just receive + cohorts + .amount_range + .get_mut(new_balance) + .state + .as_mut() + .unwrap() + .receive_outputs(addr_data, total_value, price, output_count); } - - // Update address data - addr_data.receive(value, price); - - // Add to new cohort - cohorts - .amount_range - .get_mut(new_balance) - .state - .as_mut() - .unwrap() - .add(addr_data); - } else { - // Address staying in same cohort - update in place - cohorts - .amount_range - .get_mut(new_balance) - .state - .as_mut() - .unwrap() - .receive(addr_data, value, price); } } } diff --git a/crates/brk_computer/src/stateful/process/with_source.rs b/crates/brk_computer/src/stateful/process/with_source.rs index 0d8cd64e6..f86e2a6c4 100644 --- a/crates/brk_computer/src/stateful/process/with_source.rs +++ b/crates/brk_computer/src/stateful/process/with_source.rs @@ -26,16 +26,6 @@ pub enum WithAddressDataSource { FromEmpty(EmptyAddressIndex, T), } -impl WithAddressDataSource { - pub fn is_new(&self) -> bool { - matches!(self, Self::New(_)) - } - - pub fn is_from_emptyaddressdata(&self) -> bool { - matches!(self, Self::FromEmpty(..)) - } -} - impl std::ops::Deref for WithAddressDataSource { type Target = T; diff --git a/crates/brk_computer/src/stateful/states/address_cohort.rs b/crates/brk_computer/src/stateful/states/address_cohort.rs index dfb2d8d5f..9e2d8ce98 100644 --- a/crates/brk_computer/src/stateful/states/address_cohort.rs +++ b/crates/brk_computer/src/stateful/states/address_cohort.rs @@ -89,6 +89,16 @@ impl AddressCohortState { address_data: &mut LoadedAddressData, value: Sats, price: Option, + ) { + self.receive_outputs(address_data, value, price, 1); + } + + pub fn receive_outputs( + &mut self, + address_data: &mut LoadedAddressData, + value: Sats, + price: Option, + output_count: u32, ) { let compute_price = price.is_some(); @@ -98,7 +108,7 @@ impl AddressCohortState { value: address_data.balance(), }; - address_data.receive(value, price); + address_data.receive_outputs(value, price, output_count); let supply_state = SupplyState { utxo_count: address_data.utxo_count() as u64, @@ -107,7 +117,7 @@ impl AddressCohortState { self.inner.receive_( &SupplyState { - utxo_count: 1, + utxo_count: output_count as u64, value, }, price, diff --git a/crates/brk_computer/src/stateful/states/supply.rs b/crates/brk_computer/src/stateful/states/supply.rs index 4109e76cd..c82c41eee 100644 --- a/crates/brk_computer/src/stateful/states/supply.rs +++ b/crates/brk_computer/src/stateful/states/supply.rs @@ -1,10 +1,11 @@ use std::ops::{Add, AddAssign, SubAssign}; use brk_types::{CheckedSub, LoadedAddressData, Sats}; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{Bytes, Formattable}; -#[derive(Debug, Default, Clone, Serialize)] +#[derive(Debug, Default, Clone, Serialize, JsonSchema)] pub struct SupplyState { pub utxo_count: u64, pub value: Sats, diff --git a/crates/brk_computer/src/stateful/vecs.rs b/crates/brk_computer/src/stateful/vecs.rs index ab9a5ada6..21576a268 100644 --- a/crates/brk_computer/src/stateful/vecs.rs +++ b/crates/brk_computer/src/stateful/vecs.rs @@ -269,7 +269,11 @@ impl Vecs { .min(self.any_address_indexes.min_stamped_height()) .min(self.addresses_data.min_stamped_height()) .min(Height::from(self.height_to_unspendable_supply.len())) - .min(Height::from(self.height_to_opreturn_supply.len())); + .min(Height::from(self.height_to_opreturn_supply.len())) + .min(Height::from(self.addresstype_to_height_to_addr_count.min_len())) + .min(Height::from( + self.addresstype_to_height_to_empty_addr_count.min_len(), + )); // 2. Determine start mode and recover/reset state let start_mode = determine_start_mode(stateful_min, chain_state_height); diff --git a/crates/brk_fetcher/src/source.rs b/crates/brk_fetcher/src/source.rs index 5b9899245..8d89724dc 100644 --- a/crates/brk_fetcher/src/source.rs +++ b/crates/brk_fetcher/src/source.rs @@ -64,7 +64,10 @@ impl TrackedSource { } /// Try to fetch, tracking health state - fn try_fetch(&mut self, fetch: impl FnOnce(&mut T) -> Option>) -> Option> { + fn try_fetch( + &mut self, + fetch: impl FnOnce(&mut T) -> Option>, + ) -> Option> { if !self.is_healthy() { return Some(Err(Error::FetchFailed(format!( "{} temporarily disabled (recheck in {}s)", diff --git a/crates/brk_grouper/src/by_amount_range.rs b/crates/brk_grouper/src/by_amount_range.rs index c76236dc0..2d3759a04 100644 --- a/crates/brk_grouper/src/by_amount_range.rs +++ b/crates/brk_grouper/src/by_amount_range.rs @@ -6,6 +6,33 @@ use rayon::prelude::*; use super::{AmountFilter, Filter}; +/// Bucket index for amount ranges. Use for cheap comparisons. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct AmountBucket(u8); + +impl From for AmountBucket { + #[inline(always)] + fn from(value: Sats) -> Self { + Self(match value { + v if v < Sats::_1 => 0, + v if v < Sats::_10 => 1, + v if v < Sats::_100 => 2, + v if v < Sats::_1K => 3, + v if v < Sats::_10K => 4, + v if v < Sats::_100K => 5, + v if v < Sats::_1M => 6, + v if v < Sats::_10M => 7, + v if v < Sats::_1BTC => 8, + v if v < Sats::_10BTC => 9, + v if v < Sats::_100BTC => 10, + v if v < Sats::_1K_BTC => 11, + v if v < Sats::_10K_BTC => 12, + v if v < Sats::_100K_BTC => 13, + _ => 14, + }) + } +} + #[derive(Debug, Default, Clone, Traversable)] pub struct ByAmountRange { pub _0sats: T, @@ -35,87 +62,79 @@ impl ByAmountRange { _1sat_to_10sats: create(Filter::Amount(AmountFilter::Range(Sats::_1..Sats::_10))), _10sats_to_100sats: create(Filter::Amount(AmountFilter::Range(Sats::_10..Sats::_100))), _100sats_to_1k_sats: create(Filter::Amount(AmountFilter::Range(Sats::_100..Sats::_1K))), - _1k_sats_to_10k_sats: create(Filter::Amount(AmountFilter::Range(Sats::_1K..Sats::_10K))), - _10k_sats_to_100k_sats: create(Filter::Amount(AmountFilter::Range(Sats::_10K..Sats::_100K))), - _100k_sats_to_1m_sats: create(Filter::Amount(AmountFilter::Range(Sats::_100K..Sats::_1M))), - _1m_sats_to_10m_sats: create(Filter::Amount(AmountFilter::Range(Sats::_1M..Sats::_10M))), + _1k_sats_to_10k_sats: create(Filter::Amount(AmountFilter::Range( + Sats::_1K..Sats::_10K, + ))), + _10k_sats_to_100k_sats: create(Filter::Amount(AmountFilter::Range( + Sats::_10K..Sats::_100K, + ))), + _100k_sats_to_1m_sats: create(Filter::Amount(AmountFilter::Range( + Sats::_100K..Sats::_1M, + ))), + _1m_sats_to_10m_sats: create(Filter::Amount(AmountFilter::Range( + Sats::_1M..Sats::_10M, + ))), _10m_sats_to_1btc: create(Filter::Amount(AmountFilter::Range(Sats::_10M..Sats::_1BTC))), - _1btc_to_10btc: create(Filter::Amount(AmountFilter::Range(Sats::_1BTC..Sats::_10BTC))), - _10btc_to_100btc: create(Filter::Amount(AmountFilter::Range(Sats::_10BTC..Sats::_100BTC))), - _100btc_to_1k_btc: create(Filter::Amount(AmountFilter::Range(Sats::_100BTC..Sats::_1K_BTC))), - _1k_btc_to_10k_btc: create(Filter::Amount(AmountFilter::Range(Sats::_1K_BTC..Sats::_10K_BTC))), - _10k_btc_to_100k_btc: create(Filter::Amount(AmountFilter::Range(Sats::_10K_BTC..Sats::_100K_BTC))), - _100k_btc_or_more: create(Filter::Amount(AmountFilter::Range(Sats::_100K_BTC..Sats::MAX))), + _1btc_to_10btc: create(Filter::Amount(AmountFilter::Range( + Sats::_1BTC..Sats::_10BTC, + ))), + _10btc_to_100btc: create(Filter::Amount(AmountFilter::Range( + Sats::_10BTC..Sats::_100BTC, + ))), + _100btc_to_1k_btc: create(Filter::Amount(AmountFilter::Range( + Sats::_100BTC..Sats::_1K_BTC, + ))), + _1k_btc_to_10k_btc: create(Filter::Amount(AmountFilter::Range( + Sats::_1K_BTC..Sats::_10K_BTC, + ))), + _10k_btc_to_100k_btc: create(Filter::Amount(AmountFilter::Range( + Sats::_10K_BTC..Sats::_100K_BTC, + ))), + _100k_btc_or_more: create(Filter::Amount(AmountFilter::Range( + Sats::_100K_BTC..Sats::MAX, + ))), } } - #[allow(clippy::inconsistent_digit_grouping)] + #[inline(always)] pub fn get(&self, value: Sats) -> &T { - if value == Sats::ZERO { - &self._0sats - } else if value < Sats::_10 { - &self._1sat_to_10sats - } else if value < Sats::_100 { - &self._10sats_to_100sats - } else if value < Sats::_1K { - &self._100sats_to_1k_sats - } else if value < Sats::_10K { - &self._1k_sats_to_10k_sats - } else if value < Sats::_100K { - &self._10k_sats_to_100k_sats - } else if value < Sats::_1M { - &self._100k_sats_to_1m_sats - } else if value < Sats::_10M { - &self._1m_sats_to_10m_sats - } else if value < Sats::_1BTC { - &self._10m_sats_to_1btc - } else if value < Sats::_10BTC { - &self._1btc_to_10btc - } else if value < Sats::_100BTC { - &self._10btc_to_100btc - } else if value < Sats::_1K_BTC { - &self._100btc_to_1k_btc - } else if value < Sats::_10K_BTC { - &self._1k_btc_to_10k_btc - } else if value < Sats::_100K_BTC { - &self._10k_btc_to_100k_btc - } else { - &self._100k_btc_or_more + match AmountBucket::from(value).0 { + 0 => &self._0sats, + 1 => &self._1sat_to_10sats, + 2 => &self._10sats_to_100sats, + 3 => &self._100sats_to_1k_sats, + 4 => &self._1k_sats_to_10k_sats, + 5 => &self._10k_sats_to_100k_sats, + 6 => &self._100k_sats_to_1m_sats, + 7 => &self._1m_sats_to_10m_sats, + 8 => &self._10m_sats_to_1btc, + 9 => &self._1btc_to_10btc, + 10 => &self._10btc_to_100btc, + 11 => &self._100btc_to_1k_btc, + 12 => &self._1k_btc_to_10k_btc, + 13 => &self._10k_btc_to_100k_btc, + _ => &self._100k_btc_or_more, } } - #[allow(clippy::inconsistent_digit_grouping)] + #[inline(always)] pub fn get_mut(&mut self, value: Sats) -> &mut T { - if value == Sats::ZERO { - &mut self._0sats - } else if value < Sats::_10 { - &mut self._1sat_to_10sats - } else if value < Sats::_100 { - &mut self._10sats_to_100sats - } else if value < Sats::_1K { - &mut self._100sats_to_1k_sats - } else if value < Sats::_10K { - &mut self._1k_sats_to_10k_sats - } else if value < Sats::_100K { - &mut self._10k_sats_to_100k_sats - } else if value < Sats::_1M { - &mut self._100k_sats_to_1m_sats - } else if value < Sats::_10M { - &mut self._1m_sats_to_10m_sats - } else if value < Sats::_1BTC { - &mut self._10m_sats_to_1btc - } else if value < Sats::_10BTC { - &mut self._1btc_to_10btc - } else if value < Sats::_100BTC { - &mut self._10btc_to_100btc - } else if value < Sats::_1K_BTC { - &mut self._100btc_to_1k_btc - } else if value < Sats::_10K_BTC { - &mut self._1k_btc_to_10k_btc - } else if value < Sats::_100K_BTC { - &mut self._10k_btc_to_100k_btc - } else { - &mut self._100k_btc_or_more + match AmountBucket::from(value).0 { + 0 => &mut self._0sats, + 1 => &mut self._1sat_to_10sats, + 2 => &mut self._10sats_to_100sats, + 3 => &mut self._100sats_to_1k_sats, + 4 => &mut self._1k_sats_to_10k_sats, + 5 => &mut self._10k_sats_to_100k_sats, + 6 => &mut self._100k_sats_to_1m_sats, + 7 => &mut self._1m_sats_to_10m_sats, + 8 => &mut self._10m_sats_to_1btc, + 9 => &mut self._1btc_to_10btc, + 10 => &mut self._10btc_to_100btc, + 11 => &mut self._100btc_to_1k_btc, + 12 => &mut self._1k_btc_to_10k_btc, + 13 => &mut self._10k_btc_to_100k_btc, + _ => &mut self._100k_btc_or_more, } } diff --git a/crates/brk_indexer/src/indexes.rs b/crates/brk_indexer/src/indexes.rs index 72db26c33..378ace4d3 100644 --- a/crates/brk_indexer/src/indexes.rs +++ b/crates/brk_indexer/src/indexes.rs @@ -66,53 +66,53 @@ impl Indexes { } } - pub fn push_if_needed(&self, vecs: &mut Vecs) -> Result<()> { + pub fn checked_push(&self, vecs: &mut Vecs) -> Result<()> { let height = self.height; vecs.tx .height_to_first_txindex - .push_if_needed(height, self.txindex)?; + .checked_push(height, self.txindex)?; vecs.txin .height_to_first_txinindex - .push_if_needed(height, self.txinindex)?; + .checked_push(height, self.txinindex)?; vecs.txout .height_to_first_txoutindex - .push_if_needed(height, self.txoutindex)?; + .checked_push(height, self.txoutindex)?; vecs.output .height_to_first_emptyoutputindex - .push_if_needed(height, self.emptyoutputindex)?; + .checked_push(height, self.emptyoutputindex)?; vecs.output .height_to_first_p2msoutputindex - .push_if_needed(height, self.p2msoutputindex)?; + .checked_push(height, self.p2msoutputindex)?; vecs.output .height_to_first_opreturnindex - .push_if_needed(height, self.opreturnindex)?; + .checked_push(height, self.opreturnindex)?; vecs.address .height_to_first_p2aaddressindex - .push_if_needed(height, self.p2aaddressindex)?; + .checked_push(height, self.p2aaddressindex)?; vecs.output .height_to_first_unknownoutputindex - .push_if_needed(height, self.unknownoutputindex)?; + .checked_push(height, self.unknownoutputindex)?; vecs.address .height_to_first_p2pk33addressindex - .push_if_needed(height, self.p2pk33addressindex)?; + .checked_push(height, self.p2pk33addressindex)?; vecs.address .height_to_first_p2pk65addressindex - .push_if_needed(height, self.p2pk65addressindex)?; + .checked_push(height, self.p2pk65addressindex)?; vecs.address .height_to_first_p2pkhaddressindex - .push_if_needed(height, self.p2pkhaddressindex)?; + .checked_push(height, self.p2pkhaddressindex)?; vecs.address .height_to_first_p2shaddressindex - .push_if_needed(height, self.p2shaddressindex)?; + .checked_push(height, self.p2shaddressindex)?; vecs.address .height_to_first_p2traddressindex - .push_if_needed(height, self.p2traddressindex)?; + .checked_push(height, self.p2traddressindex)?; vecs.address .height_to_first_p2wpkhaddressindex - .push_if_needed(height, self.p2wpkhaddressindex)?; + .checked_push(height, self.p2wpkhaddressindex)?; vecs.address .height_to_first_p2wshaddressindex - .push_if_needed(height, self.p2wshaddressindex)?; + .checked_push(height, self.p2wshaddressindex)?; Ok(()) } diff --git a/crates/brk_indexer/src/processor.rs b/crates/brk_indexer/src/processor.rs index 5192c5b06..5d7b713b2 100644 --- a/crates/brk_indexer/src/processor.rs +++ b/crates/brk_indexer/src/processor.rs @@ -79,7 +79,7 @@ impl<'a> BlockProcessor<'a> { return Err(Error::Internal("BlockHash prefix collision")); } - self.indexes.push_if_needed(self.vecs)?; + self.indexes.checked_push(self.vecs)?; self.stores .blockhashprefix_to_height @@ -94,23 +94,23 @@ impl<'a> BlockProcessor<'a> { self.vecs .block .height_to_blockhash - .push_if_needed(height, blockhash.clone())?; + .checked_push(height, blockhash.clone())?; self.vecs .block .height_to_difficulty - .push_if_needed(height, self.block.header.difficulty_float().into())?; + .checked_push(height, self.block.header.difficulty_float().into())?; self.vecs .block .height_to_timestamp - .push_if_needed(height, Timestamp::from(self.block.header.time))?; + .checked_push(height, Timestamp::from(self.block.header.time))?; self.vecs .block .height_to_total_size - .push_if_needed(height, self.block.total_size().into())?; + .checked_push(height, self.block.total_size().into())?; self.vecs .block .height_to_weight - .push_if_needed(height, self.block.weight().into())?; + .checked_push(height, self.block.weight().into())?; Ok(()) } @@ -431,21 +431,21 @@ impl<'a> BlockProcessor<'a> { self.vecs .tx .txindex_to_first_txoutindex - .push_if_needed(txindex, txoutindex)?; + .checked_push(txindex, txoutindex)?; } self.vecs .txout .txoutindex_to_value - .push_if_needed(txoutindex, sats)?; + .checked_push(txoutindex, sats)?; self.vecs .txout .txoutindex_to_txindex - .push_if_needed(txoutindex, txindex)?; + .checked_push(txoutindex, txindex)?; self.vecs .txout .txoutindex_to_outputtype - .push_if_needed(txoutindex, outputtype)?; + .checked_push(txoutindex, outputtype)?; let typeindex = if let Some(ti) = existing_typeindex { ti @@ -476,28 +476,28 @@ impl<'a> BlockProcessor<'a> { self.vecs .output .p2msoutputindex_to_txindex - .push_if_needed(self.indexes.p2msoutputindex, txindex)?; + .checked_push(self.indexes.p2msoutputindex, txindex)?; self.indexes.p2msoutputindex.copy_then_increment() } OutputType::OpReturn => { self.vecs .output .opreturnindex_to_txindex - .push_if_needed(self.indexes.opreturnindex, txindex)?; + .checked_push(self.indexes.opreturnindex, txindex)?; self.indexes.opreturnindex.copy_then_increment() } OutputType::Empty => { self.vecs .output .emptyoutputindex_to_txindex - .push_if_needed(self.indexes.emptyoutputindex, txindex)?; + .checked_push(self.indexes.emptyoutputindex, txindex)?; self.indexes.emptyoutputindex.copy_then_increment() } OutputType::Unknown => { self.vecs .output .unknownoutputindex_to_txindex - .push_if_needed(self.indexes.unknownoutputindex, txindex)?; + .checked_push(self.indexes.unknownoutputindex, txindex)?; self.indexes.unknownoutputindex.copy_then_increment() } _ => unreachable!(), @@ -507,7 +507,7 @@ impl<'a> BlockProcessor<'a> { self.vecs .txout .txoutindex_to_typeindex - .push_if_needed(txoutindex, typeindex)?; + .checked_push(txoutindex, typeindex)?; if outputtype.is_unspendable() { continue; @@ -592,17 +592,17 @@ impl<'a> BlockProcessor<'a> { self.vecs .tx .txindex_to_first_txinindex - .push_if_needed(txindex, txinindex)?; + .checked_push(txindex, txinindex)?; } self.vecs .txin .txinindex_to_txindex - .push_if_needed(txinindex, txindex)?; + .checked_push(txinindex, txindex)?; self.vecs .txin .txinindex_to_outpoint - .push_if_needed(txinindex, outpoint)?; + .checked_push(txinindex, outpoint)?; let Some((addresstype, addressindex)) = address_info else { continue; @@ -678,31 +678,31 @@ impl<'a> BlockProcessor<'a> { self.vecs .tx .txindex_to_height - .push_if_needed(ct.txindex, height)?; + .checked_push(ct.txindex, height)?; self.vecs .tx .txindex_to_txversion - .push_if_needed(ct.txindex, ct.tx.version.into())?; + .checked_push(ct.txindex, ct.tx.version.into())?; self.vecs .tx .txindex_to_txid - .push_if_needed(ct.txindex, ct.txid)?; + .checked_push(ct.txindex, ct.txid)?; self.vecs .tx .txindex_to_rawlocktime - .push_if_needed(ct.txindex, ct.tx.lock_time.into())?; + .checked_push(ct.txindex, ct.tx.lock_time.into())?; self.vecs .tx .txindex_to_base_size - .push_if_needed(ct.txindex, ct.tx.base_size().into())?; + .checked_push(ct.txindex, ct.tx.base_size().into())?; self.vecs .tx .txindex_to_total_size - .push_if_needed(ct.txindex, ct.tx.total_size().into())?; + .checked_push(ct.txindex, ct.tx.total_size().into())?; self.vecs .tx .txindex_to_is_explicitly_rbf - .push_if_needed(ct.txindex, StoredBool::from(ct.tx.is_explicitly_rbf()))?; + .checked_push(ct.txindex, StoredBool::from(ct.tx.is_explicitly_rbf()))?; } Ok(()) diff --git a/crates/brk_indexer/src/stores.rs b/crates/brk_indexer/src/stores.rs index efcf9b815..88722022a 100644 --- a/crates/brk_indexer/src/stores.rs +++ b/crates/brk_indexer/src/stores.rs @@ -12,7 +12,7 @@ use log::info; use rayon::prelude::*; use vecdb::{AnyVec, TypedVecIterator, VecIndex, VecIterator}; -use crate::{Indexes, constants::DUPLICATE_TXID_PREFIXES}; +use crate::{constants::DUPLICATE_TXID_PREFIXES, Indexes}; use super::Vecs; @@ -267,7 +267,8 @@ impl Stores { if starting_indexes.txoutindex != TxOutIndex::ZERO { let mut txoutindex_to_txindex_iter = vecs.txout.txoutindex_to_txindex.iter()?; - let mut txindex_to_first_txoutindex_iter = vecs.tx.txindex_to_first_txoutindex.iter()?; + let mut txindex_to_first_txoutindex_iter = + vecs.tx.txindex_to_first_txoutindex.iter()?; vecs.txout .txoutindex_to_outputtype .iter()? @@ -303,23 +304,27 @@ impl Stores { }); // Add back outputs that were spent after the rollback point - let mut txindex_to_first_txoutindex_iter = vecs.tx.txindex_to_first_txoutindex.iter()?; + let mut txindex_to_first_txoutindex_iter = + vecs.tx.txindex_to_first_txoutindex.iter()?; let mut txoutindex_to_outputtype_iter = vecs.txout.txoutindex_to_outputtype.iter()?; let mut txoutindex_to_typeindex_iter = vecs.txout.txoutindex_to_typeindex.iter()?; + let mut txinindex_to_txindex_iter = vecs.txin.txinindex_to_txindex.iter()?; vecs.txin .txinindex_to_outpoint .iter()? + .enumerate() .skip(starting_indexes.txinindex.to_usize()) - .for_each(|outpoint: OutPoint| { + .for_each(|(txinindex, outpoint): (usize, OutPoint)| { if outpoint.is_coinbase() { return; } - let txindex = outpoint.txindex(); + let output_txindex = outpoint.txindex(); let vout = outpoint.vout(); - // Calculate txoutindex from txindex and vout - let txoutindex = txindex_to_first_txoutindex_iter.get_unwrap(txindex) + vout; + // Calculate txoutindex from output's txindex and vout + let txoutindex = + txindex_to_first_txoutindex_iter.get_unwrap(output_txindex) + vout; // Only process if this output was created before the rollback point if txoutindex < starting_indexes.txoutindex { @@ -329,9 +334,16 @@ impl Stores { let addresstype = outputtype; let addressindex = txoutindex_to_typeindex_iter.get_unwrap(txoutindex); + // Get the SPENDING tx's index (not the output's tx) + let spending_txindex = + txinindex_to_txindex_iter.get_at_unwrap(txinindex); + self.addresstype_to_addressindex_and_txindex .get_mut_unwrap(addresstype) - .remove(AddressIndexTxIndex::from((addressindex, txindex))); + .remove(AddressIndexTxIndex::from(( + addressindex, + spending_txindex, + ))); self.addresstype_to_addressindex_and_unspentoutpoint .get_mut_unwrap(addresstype) diff --git a/crates/brk_indexer/src/vecs/address.rs b/crates/brk_indexer/src/vecs/address.rs index f8296411c..e41a60360 100644 --- a/crates/brk_indexer/src/vecs/address.rs +++ b/crates/brk_indexer/src/vecs/address.rs @@ -208,28 +208,28 @@ impl AddressVecs { match bytes { AddressBytes::P2PK65(bytes) => self .p2pk65addressindex_to_p2pk65bytes - .push_if_needed(index.into(), *bytes)?, + .checked_push(index.into(), *bytes)?, AddressBytes::P2PK33(bytes) => self .p2pk33addressindex_to_p2pk33bytes - .push_if_needed(index.into(), *bytes)?, + .checked_push(index.into(), *bytes)?, AddressBytes::P2PKH(bytes) => self .p2pkhaddressindex_to_p2pkhbytes - .push_if_needed(index.into(), *bytes)?, + .checked_push(index.into(), *bytes)?, AddressBytes::P2SH(bytes) => self .p2shaddressindex_to_p2shbytes - .push_if_needed(index.into(), *bytes)?, + .checked_push(index.into(), *bytes)?, AddressBytes::P2WPKH(bytes) => self .p2wpkhaddressindex_to_p2wpkhbytes - .push_if_needed(index.into(), *bytes)?, + .checked_push(index.into(), *bytes)?, AddressBytes::P2WSH(bytes) => self .p2wshaddressindex_to_p2wshbytes - .push_if_needed(index.into(), *bytes)?, + .checked_push(index.into(), *bytes)?, AddressBytes::P2TR(bytes) => self .p2traddressindex_to_p2trbytes - .push_if_needed(index.into(), *bytes)?, + .checked_push(index.into(), *bytes)?, AddressBytes::P2A(bytes) => self .p2aaddressindex_to_p2abytes - .push_if_needed(index.into(), *bytes)?, + .checked_push(index.into(), *bytes)?, }; Ok(()) } diff --git a/crates/brk_query/src/lib.rs b/crates/brk_query/src/lib.rs index d28259401..99a76723a 100644 --- a/crates/brk_query/src/lib.rs +++ b/crates/brk_query/src/lib.rs @@ -29,7 +29,7 @@ pub use brk_types::{ pub use r#impl::BLOCK_TXS_PAGE_SIZE; pub use output::{LegacyValue, Output, OutputLegacy}; -use vecs::Vecs; +pub use vecs::Vecs; #[derive(Clone)] pub struct Query(Arc>); diff --git a/crates/brk_traversable/Cargo.toml b/crates/brk_traversable/Cargo.toml index dd9eb3c54..420f20929 100644 --- a/crates/brk_traversable/Cargo.toml +++ b/crates/brk_traversable/Cargo.toml @@ -18,5 +18,7 @@ zstd = ["vecdb/zstd"] [dependencies] brk_types = { workspace = true } brk_traversable_derive = { workspace = true, optional = true } +schemars = { workspace = true } serde = { workspace = true } +serde_json = { workspace = true } vecdb = { workspace = true } diff --git a/crates/brk_traversable/src/lib.rs b/crates/brk_traversable/src/lib.rs index 90d163051..8c4c0995f 100644 --- a/crates/brk_traversable/src/lib.rs +++ b/crates/brk_traversable/src/lib.rs @@ -1,9 +1,10 @@ use std::{collections::BTreeMap, fmt::Debug}; -pub use brk_types::TreeNode; +pub use brk_types::{Index, MetricLeaf, MetricLeafWithSchema, TreeNode}; #[cfg(feature = "derive")] pub use brk_traversable_derive::Traversable; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{ AnyExportableVec, AnyVec, BytesVec, BytesVecValue, EagerVec, Formattable, LazyVecFrom1, @@ -15,18 +16,36 @@ pub trait Traversable { fn iter_any_exportable(&self) -> impl Iterator; } +/// Helper to create a MetricLeafWithSchema from a vec +fn make_leaf(vec: &V) -> TreeNode { + let index_str = I::to_string(); + let index = Index::try_from(index_str).ok(); + let indexes = index.into_iter().collect(); + + let leaf = MetricLeaf::new( + vec.name().to_string(), + vec.value_type_to_string().to_string(), + indexes, + ); + + let schema = schemars::SchemaGenerator::default().into_root_schema_for::(); + let schema_json = serde_json::to_value(schema).unwrap_or_default(); + + TreeNode::Leaf(MetricLeafWithSchema::new(leaf, schema_json)) +} + // BytesVec implementation impl Traversable for BytesVec where I: VecIndex, - T: BytesVecValue + Formattable + Serialize, + T: BytesVecValue + Formattable + Serialize + JsonSchema, { fn iter_any_exportable(&self) -> impl Iterator { std::iter::once(self as &dyn AnyExportableVec) } fn to_tree_node(&self) -> TreeNode { - TreeNode::Leaf(self.name().to_string()) + make_leaf::(self) } } @@ -35,14 +54,14 @@ where impl Traversable for vecdb::ZeroCopyVec where I: VecIndex, - T: vecdb::ZeroCopyVecValue + Formattable + Serialize, + T: vecdb::ZeroCopyVecValue + Formattable + Serialize + JsonSchema, { fn iter_any_exportable(&self) -> impl Iterator { std::iter::once(self as &dyn AnyExportableVec) } fn to_tree_node(&self) -> TreeNode { - TreeNode::Leaf(self.name().to_string()) + make_leaf::(self) } } @@ -51,14 +70,14 @@ where impl Traversable for vecdb::PcoVec where I: VecIndex, - T: vecdb::PcoVecValue + Formattable + Serialize, + T: vecdb::PcoVecValue + Formattable + Serialize + JsonSchema, { fn iter_any_exportable(&self) -> impl Iterator { std::iter::once(self as &dyn AnyExportableVec) } fn to_tree_node(&self) -> TreeNode { - TreeNode::Leaf(self.name().to_string()) + make_leaf::(self) } } @@ -67,14 +86,14 @@ where impl Traversable for vecdb::LZ4Vec where I: VecIndex, - T: vecdb::LZ4VecValue + Formattable + Serialize, + T: vecdb::LZ4VecValue + Formattable + Serialize + JsonSchema, { fn iter_any_exportable(&self) -> impl Iterator { std::iter::once(self as &dyn AnyExportableVec) } fn to_tree_node(&self) -> TreeNode { - TreeNode::Leaf(self.name().to_string()) + make_leaf::(self) } } @@ -83,14 +102,14 @@ where impl Traversable for vecdb::ZstdVec where I: VecIndex, - T: vecdb::ZstdVecValue + Formattable + Serialize, + T: vecdb::ZstdVecValue + Formattable + Serialize + JsonSchema, { fn iter_any_exportable(&self) -> impl Iterator { std::iter::once(self as &dyn AnyExportableVec) } fn to_tree_node(&self) -> TreeNode { - TreeNode::Leaf(self.name().to_string()) + make_leaf::(self) } } @@ -98,21 +117,21 @@ where impl Traversable for EagerVec where V: StoredVec, - V::T: Formattable + Serialize, + V::T: Formattable + Serialize + JsonSchema, { fn iter_any_exportable(&self) -> impl Iterator { std::iter::once(self as &dyn AnyExportableVec) } fn to_tree_node(&self) -> TreeNode { - TreeNode::Leaf(self.name().to_string()) + make_leaf::(self) } } impl Traversable for LazyVecFrom1 where I: VecIndex, - T: VecValue + Formattable + Serialize, + T: VecValue + Formattable + Serialize + JsonSchema, S1I: VecIndex, S1T: VecValue, { @@ -121,14 +140,14 @@ where } fn to_tree_node(&self) -> TreeNode { - TreeNode::Leaf(self.name().to_string()) + make_leaf::(self) } } impl Traversable for LazyVecFrom2 where I: VecIndex, - T: VecValue + Formattable + Serialize, + T: VecValue + Formattable + Serialize + JsonSchema, S1I: VecIndex, S1T: VecValue, S2I: VecIndex, @@ -139,7 +158,7 @@ where } fn to_tree_node(&self) -> TreeNode { - TreeNode::Leaf(self.name().to_string()) + make_leaf::(self) } } @@ -147,7 +166,7 @@ impl Traversable for LazyVecFrom3 where I: VecIndex, - T: VecValue + Formattable + Serialize, + T: VecValue + Formattable + Serialize + JsonSchema, S1I: VecIndex, S1T: VecValue, S2I: VecIndex, @@ -160,7 +179,7 @@ where } fn to_tree_node(&self) -> TreeNode { - TreeNode::Leaf(self.name().to_string()) + make_leaf::(self) } } diff --git a/crates/brk_types/src/anyaddressindex.rs b/crates/brk_types/src/anyaddressindex.rs index 2eb7bee36..7aac9a463 100644 --- a/crates/brk_types/src/anyaddressindex.rs +++ b/crates/brk_types/src/anyaddressindex.rs @@ -1,3 +1,4 @@ +use schemars::JsonSchema; use serde::Serialize; use vecdb::{Bytes, Formattable}; @@ -5,7 +6,7 @@ use crate::{EmptyAddressIndex, LoadedAddressIndex, TypeIndex}; const MIN_EMPTY_INDEX: u32 = u32::MAX - 4_000_000_000; -#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Bytes)] +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Bytes, JsonSchema)] pub struct AnyAddressIndex(TypeIndex); impl AnyAddressIndex { diff --git a/crates/brk_types/src/bitcoin.rs b/crates/brk_types/src/bitcoin.rs index a6783f3a4..8f3df9713 100644 --- a/crates/brk_types/src/bitcoin.rs +++ b/crates/brk_types/src/bitcoin.rs @@ -3,12 +3,13 @@ use std::{ ops::{Add, AddAssign, Div, Mul}, }; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{CheckedSub, Formattable, Pco}; use super::{Sats, StoredF64}; -#[derive(Debug, Default, Clone, Copy, Serialize, Pco)] +#[derive(Debug, Default, Clone, Copy, Serialize, Pco, JsonSchema)] pub struct Bitcoin(f64); impl Add for Bitcoin { diff --git a/crates/brk_types/src/blkposition.rs b/crates/brk_types/src/blkposition.rs index 641ee18eb..f04da9345 100644 --- a/crates/brk_types/src/blkposition.rs +++ b/crates/brk_types/src/blkposition.rs @@ -1,9 +1,10 @@ use std::ops::Add; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{Formattable, Pco}; -#[derive(Debug, Clone, Copy, Serialize, Pco)] +#[derive(Debug, Clone, Copy, Serialize, Pco, JsonSchema)] pub struct BlkPosition(u64); impl BlkPosition { diff --git a/crates/brk_types/src/bytes.rs b/crates/brk_types/src/bytes.rs index 420e6208b..caa94866c 100644 --- a/crates/brk_types/src/bytes.rs +++ b/crates/brk_types/src/bytes.rs @@ -1,8 +1,22 @@ use derive_deref::{Deref, DerefMut}; +use schemars::JsonSchema; use serde::Serialize; use vecdb::Bytes; -#[derive(Debug, Clone, Deref, DerefMut, PartialEq, Eq, PartialOrd, Ord, Serialize, Bytes, Hash)] +#[derive( + Debug, + Clone, + Deref, + DerefMut, + PartialEq, + Eq, + PartialOrd, + Ord, + Serialize, + Bytes, + Hash, + JsonSchema, +)] pub struct U8x2([u8; 2]); impl From<&[u8]> for U8x2 { #[inline] @@ -13,7 +27,20 @@ impl From<&[u8]> for U8x2 { } } -#[derive(Debug, Clone, Deref, DerefMut, PartialEq, Eq, PartialOrd, Ord, Serialize, Bytes, Hash)] +#[derive( + Debug, + Clone, + Deref, + DerefMut, + PartialEq, + Eq, + PartialOrd, + Ord, + Serialize, + Bytes, + Hash, + JsonSchema, +)] pub struct U8x20([u8; 20]); impl From<&[u8]> for U8x20 { #[inline] @@ -24,7 +51,20 @@ impl From<&[u8]> for U8x20 { } } -#[derive(Debug, Clone, Deref, DerefMut, PartialEq, Eq, PartialOrd, Ord, Serialize, Bytes, Hash)] +#[derive( + Debug, + Clone, + Deref, + DerefMut, + PartialEq, + Eq, + PartialOrd, + Ord, + Serialize, + Bytes, + Hash, + JsonSchema, +)] pub struct U8x32([u8; 32]); impl From<&[u8]> for U8x32 { #[inline] @@ -35,8 +75,20 @@ impl From<&[u8]> for U8x32 { } } -#[derive(Debug, Clone, Deref, DerefMut, PartialEq, Eq, PartialOrd, Ord, Serialize, Bytes, Hash)] +#[derive(Debug, Clone, Deref, DerefMut, PartialEq, Eq, PartialOrd, Ord, Bytes, Hash, Serialize)] pub struct U8x33(#[serde(with = "serde_bytes")] [u8; 33]); + +impl JsonSchema for U8x33 { + fn schema_name() -> std::borrow::Cow<'static, str> { + "U8x33".into() + } + + fn json_schema(_gen: &mut schemars::SchemaGenerator) -> schemars::Schema { + // Represent as a byte string + String::json_schema(_gen) + } +} + impl From<&[u8]> for U8x33 { #[inline] fn from(slice: &[u8]) -> Self { @@ -46,8 +98,20 @@ impl From<&[u8]> for U8x33 { } } -#[derive(Debug, Clone, Deref, DerefMut, PartialEq, Eq, PartialOrd, Ord, Serialize, Bytes, Hash)] +#[derive(Debug, Clone, Deref, DerefMut, PartialEq, Eq, PartialOrd, Ord, Bytes, Hash, Serialize)] pub struct U8x65(#[serde(with = "serde_bytes")] [u8; 65]); + +impl JsonSchema for U8x65 { + fn schema_name() -> std::borrow::Cow<'static, str> { + "U8x65".into() + } + + fn json_schema(_gen: &mut schemars::SchemaGenerator) -> schemars::Schema { + // Represent as a byte string + String::json_schema(_gen) + } +} + impl From<&[u8]> for U8x65 { #[inline] fn from(slice: &[u8]) -> Self { diff --git a/crates/brk_types/src/cents.rs b/crates/brk_types/src/cents.rs index 7692f7e6f..df3102edd 100644 --- a/crates/brk_types/src/cents.rs +++ b/crates/brk_types/src/cents.rs @@ -1,11 +1,14 @@ use std::ops::{Add, Div, Mul}; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{CheckedSub, Formattable, Pco}; use super::Dollars; -#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Pco)] +#[derive( + Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Pco, JsonSchema, +)] pub struct Cents(i64); impl Cents { diff --git a/crates/brk_types/src/date.rs b/crates/brk_types/src/date.rs index 12df41c73..f185609be 100644 --- a/crates/brk_types/src/date.rs +++ b/crates/brk_types/src/date.rs @@ -1,4 +1,5 @@ use jiff::{Span, Zoned, civil::Date as Date_, tz::TimeZone}; +use schemars::JsonSchema; use serde::{Serialize, Serializer}; use vecdb::{Formattable, Pco}; @@ -6,7 +7,7 @@ use crate::ONE_DAY_IN_SEC_F64; use super::{DateIndex, Timestamp}; -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Pco)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Pco, JsonSchema)] pub struct Date(u32); impl Date { diff --git a/crates/brk_types/src/dateindex.rs b/crates/brk_types/src/dateindex.rs index f79b70c92..fc172d9f1 100644 --- a/crates/brk_types/src/dateindex.rs +++ b/crates/brk_types/src/dateindex.rs @@ -2,6 +2,7 @@ use std::ops::{Add, Rem}; use brk_error::Error; use jiff::Span; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{CheckedSub, Formattable, FromCoarserIndex, Pco, PrintableIndex}; @@ -9,7 +10,9 @@ use crate::{DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, WeekIndex, Yea use super::Date; -#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Pco)] +#[derive( + Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Pco, JsonSchema, +)] pub struct DateIndex(u16); impl DateIndex { diff --git a/crates/brk_types/src/decadeindex.rs b/crates/brk_types/src/decadeindex.rs index 064ded0b9..727cf873c 100644 --- a/crates/brk_types/src/decadeindex.rs +++ b/crates/brk_types/src/decadeindex.rs @@ -3,13 +3,25 @@ use std::{ ops::{Add, AddAssign, Div}, }; +use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; use super::{Date, DateIndex, YearIndex}; #[derive( - Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default, Serialize, Deserialize, Pco, + Debug, + Clone, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + Default, + Serialize, + Deserialize, + Pco, + JsonSchema, )] pub struct DecadeIndex(u16); diff --git a/crates/brk_types/src/difficultyepoch.rs b/crates/brk_types/src/difficultyepoch.rs index e5b73ff28..597d728a7 100644 --- a/crates/brk_types/src/difficultyepoch.rs +++ b/crates/brk_types/src/difficultyepoch.rs @@ -3,6 +3,7 @@ use std::{ ops::{Add, AddAssign, Div}, }; +use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; @@ -11,7 +12,18 @@ use super::Height; pub const BLOCKS_PER_DIFF_EPOCHS: u32 = 2016; #[derive( - Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default, Serialize, Deserialize, Pco, + Debug, + Clone, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + Default, + Serialize, + Deserialize, + Pco, + JsonSchema, )] pub struct DifficultyEpoch(u16); diff --git a/crates/brk_types/src/emptyaddressdata.rs b/crates/brk_types/src/emptyaddressdata.rs index 89a8e8457..f225d95c3 100644 --- a/crates/brk_types/src/emptyaddressdata.rs +++ b/crates/brk_types/src/emptyaddressdata.rs @@ -1,10 +1,11 @@ +use schemars::JsonSchema; use serde::Serialize; use vecdb::{Bytes, Formattable}; use crate::{LoadedAddressData, Sats}; /// Data of an empty address -#[derive(Debug, Default, Clone, Serialize)] +#[derive(Debug, Default, Clone, Serialize, JsonSchema)] #[repr(C)] pub struct EmptyAddressData { /// Total transaction count diff --git a/crates/brk_types/src/emptyaddressindex.rs b/crates/brk_types/src/emptyaddressindex.rs index b6e995faf..96e8a123b 100644 --- a/crates/brk_types/src/emptyaddressindex.rs +++ b/crates/brk_types/src/emptyaddressindex.rs @@ -1,12 +1,15 @@ use std::ops::Add; use derive_deref::Deref; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; use crate::TypeIndex; -#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Deref, Serialize, Pco)] +#[derive( + Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Deref, Serialize, Pco, JsonSchema, +)] pub struct EmptyAddressIndex(TypeIndex); impl From for EmptyAddressIndex { diff --git a/crates/brk_types/src/emptyoutputindex.rs b/crates/brk_types/src/emptyoutputindex.rs index 6420ebf7e..4878082e7 100644 --- a/crates/brk_types/src/emptyoutputindex.rs +++ b/crates/brk_types/src/emptyoutputindex.rs @@ -1,13 +1,14 @@ use std::ops::Add; use derive_deref::{Deref, DerefMut}; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; use crate::TypeIndex; #[derive( - Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Deref, DerefMut, Default, Serialize, Pco, + Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Deref, DerefMut, Default, Serialize, Pco, JsonSchema, )] pub struct EmptyOutputIndex(TypeIndex); impl From for EmptyOutputIndex { diff --git a/crates/brk_types/src/halvingepoch.rs b/crates/brk_types/src/halvingepoch.rs index ae46b2b65..377d43471 100644 --- a/crates/brk_types/src/halvingepoch.rs +++ b/crates/brk_types/src/halvingepoch.rs @@ -3,6 +3,7 @@ use std::{ ops::{Add, AddAssign, Div}, }; +use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; @@ -11,7 +12,18 @@ use super::Height; pub const BLOCKS_PER_HALVING: u32 = 210_000; #[derive( - Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default, Serialize, Deserialize, Pco, + Debug, + Clone, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + Default, + Serialize, + Deserialize, + Pco, + JsonSchema, )] pub struct HalvingEpoch(u16); diff --git a/crates/brk_types/src/loadedaddressdata.rs b/crates/brk_types/src/loadedaddressdata.rs index 186998b2b..25e9c3acb 100644 --- a/crates/brk_types/src/loadedaddressdata.rs +++ b/crates/brk_types/src/loadedaddressdata.rs @@ -1,11 +1,12 @@ use brk_error::{Error, Result}; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{Bytes, CheckedSub, Formattable}; use crate::{Bitcoin, Dollars, EmptyAddressData, Sats}; /// Data for a loaded (non-empty) address with current balance -#[derive(Debug, Default, Clone, Serialize)] +#[derive(Debug, Default, Clone, Serialize, JsonSchema)] #[repr(C)] pub struct LoadedAddressData { /// Total transaction count @@ -64,8 +65,12 @@ impl LoadedAddressData { } pub fn receive(&mut self, amount: Sats, price: Option) { + self.receive_outputs(amount, price, 1); + } + + pub fn receive_outputs(&mut self, amount: Sats, price: Option, output_count: u32) { self.received += amount; - self.funded_txo_count += 1; + self.funded_txo_count += output_count; if let Some(price) = price { let added = price * amount; self.realized_cap += added; diff --git a/crates/brk_types/src/loadedaddressindex.rs b/crates/brk_types/src/loadedaddressindex.rs index 83f928ab2..01789d712 100644 --- a/crates/brk_types/src/loadedaddressindex.rs +++ b/crates/brk_types/src/loadedaddressindex.rs @@ -1,12 +1,15 @@ use std::ops::Add; use derive_deref::Deref; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; use crate::TypeIndex; -#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Deref, Default, Serialize, Pco)] +#[derive( + Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Deref, Default, Serialize, Pco, JsonSchema, +)] pub struct LoadedAddressIndex(TypeIndex); impl From for LoadedAddressIndex { diff --git a/crates/brk_types/src/monthindex.rs b/crates/brk_types/src/monthindex.rs index 83f847ccc..0f05ffa84 100644 --- a/crates/brk_types/src/monthindex.rs +++ b/crates/brk_types/src/monthindex.rs @@ -3,13 +3,25 @@ use std::{ ops::{Add, AddAssign, Div}, }; +use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; use super::{Date, DateIndex, YearIndex}; #[derive( - Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default, Serialize, Deserialize, Pco, + Debug, + Clone, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + Default, + Serialize, + Deserialize, + Pco, + JsonSchema, )] pub struct MonthIndex(u16); diff --git a/crates/brk_types/src/ohlc.rs b/crates/brk_types/src/ohlc.rs index ae667908f..d90fbdde5 100644 --- a/crates/brk_types/src/ohlc.rs +++ b/crates/brk_types/src/ohlc.rs @@ -5,6 +5,7 @@ use std::{ }; use derive_deref::{Deref, DerefMut}; +use schemars::JsonSchema; use serde::{Serialize, Serializer, ser::SerializeTuple}; use vecdb::{Bytes, Formattable, Pco, TransparentPco}; @@ -12,7 +13,7 @@ use crate::StoredF64; use super::{Cents, Dollars, Sats}; -#[derive(Debug, Default, Clone)] +#[derive(Debug, Default, Clone, JsonSchema)] #[repr(C)] pub struct OHLCCents { pub open: Open, @@ -98,7 +99,7 @@ impl Bytes for OHLCCents { } } -#[derive(Debug, Default, Clone, Copy)] +#[derive(Debug, Default, Clone, Copy, JsonSchema)] #[repr(C)] pub struct OHLCDollars { pub open: Open, @@ -210,7 +211,7 @@ impl Bytes for OHLCDollars { } } -#[derive(Debug, Default, Clone, Copy)] +#[derive(Debug, Default, Clone, Copy, JsonSchema)] #[repr(C)] pub struct OHLCSats { pub open: Open, @@ -304,7 +305,19 @@ impl Bytes for OHLCSats { } #[derive( - Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Deref, DerefMut, Serialize, Pco, + Debug, + Default, + Clone, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + Deref, + DerefMut, + Serialize, + Pco, + JsonSchema, )] #[repr(transparent)] pub struct Open(T); @@ -421,7 +434,19 @@ where } #[derive( - Debug, Default, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Deref, DerefMut, Serialize, Pco, + Debug, + Default, + PartialEq, + Eq, + PartialOrd, + Ord, + Clone, + Copy, + Deref, + DerefMut, + Serialize, + Pco, + JsonSchema, )] #[repr(transparent)] pub struct High(T); @@ -538,7 +563,19 @@ where } #[derive( - Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Deref, DerefMut, Serialize, Pco, + Debug, + Default, + Clone, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + Deref, + DerefMut, + Serialize, + Pco, + JsonSchema, )] #[repr(transparent)] pub struct Low(T); @@ -655,7 +692,19 @@ where } #[derive( - Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Deref, DerefMut, Serialize, Pco, + Debug, + Default, + Clone, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + Deref, + DerefMut, + Serialize, + Pco, + JsonSchema, )] #[repr(transparent)] pub struct Close(T); diff --git a/crates/brk_types/src/opreturnindex.rs b/crates/brk_types/src/opreturnindex.rs index 842dc4b0d..788efbe78 100644 --- a/crates/brk_types/src/opreturnindex.rs +++ b/crates/brk_types/src/opreturnindex.rs @@ -1,13 +1,26 @@ use std::ops::Add; use derive_deref::{Deref, DerefMut}; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; use crate::TypeIndex; #[derive( - Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Deref, DerefMut, Default, Serialize, Pco, + Debug, + PartialEq, + Eq, + PartialOrd, + Ord, + Clone, + Copy, + Deref, + DerefMut, + Default, + Serialize, + Pco, + JsonSchema, )] pub struct OpReturnIndex(TypeIndex); @@ -17,30 +30,35 @@ impl From for OpReturnIndex { Self(value) } } + impl From for usize { #[inline] fn from(value: OpReturnIndex) -> Self { Self::from(*value) } } + impl From for u64 { #[inline] fn from(value: OpReturnIndex) -> Self { Self::from(*value) } } + impl From for OpReturnIndex { #[inline] fn from(value: usize) -> Self { Self(TypeIndex::from(value)) } } + impl Add for OpReturnIndex { type Output = Self; fn add(self, rhs: usize) -> Self::Output { Self(*self + rhs) } } + impl CheckedSub for OpReturnIndex { fn checked_sub(self, rhs: Self) -> Option { self.0.checked_sub(rhs.0).map(Self) diff --git a/crates/brk_types/src/p2aaddressindex.rs b/crates/brk_types/src/p2aaddressindex.rs index ffd4cea76..4d4eb8ef2 100644 --- a/crates/brk_types/src/p2aaddressindex.rs +++ b/crates/brk_types/src/p2aaddressindex.rs @@ -1,68 +1,91 @@ use std::ops::Add; use derive_deref::{Deref, DerefMut}; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; use crate::TypeIndex; #[derive( - Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Deref, DerefMut, Default, Serialize, Pco, + Debug, + PartialEq, + Eq, + PartialOrd, + Ord, + Clone, + Copy, + Deref, + DerefMut, + Default, + Serialize, + Pco, + JsonSchema, )] pub struct P2AAddressIndex(TypeIndex); + impl From for P2AAddressIndex { #[inline] fn from(value: TypeIndex) -> Self { Self(value) } } + impl From for TypeIndex { #[inline] fn from(value: P2AAddressIndex) -> Self { value.0 } } + impl From for u32 { #[inline] fn from(value: P2AAddressIndex) -> Self { Self::from(*value) } } + impl From for u64 { #[inline] fn from(value: P2AAddressIndex) -> Self { Self::from(*value) } } + impl From for P2AAddressIndex { #[inline] fn from(value: u32) -> Self { Self(TypeIndex::from(value)) } } + impl From for usize { #[inline] fn from(value: P2AAddressIndex) -> Self { Self::from(*value) } } + impl From for P2AAddressIndex { #[inline] fn from(value: usize) -> Self { Self(TypeIndex::from(value)) } } + impl Add for P2AAddressIndex { type Output = Self; fn add(self, rhs: usize) -> Self::Output { Self(*self + rhs) } } + impl CheckedSub for P2AAddressIndex { fn checked_sub(self, rhs: Self) -> Option { self.0.checked_sub(rhs.0).map(Self) } } + impl PrintableIndex for P2AAddressIndex { fn to_string() -> &'static str { "p2aaddressindex" diff --git a/crates/brk_types/src/p2abytes.rs b/crates/brk_types/src/p2abytes.rs index 8c5f9d021..5c1722060 100644 --- a/crates/brk_types/src/p2abytes.rs +++ b/crates/brk_types/src/p2abytes.rs @@ -1,12 +1,13 @@ use std::fmt; use derive_deref::Deref; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{Bytes, Formattable}; use crate::U8x2; -#[derive(Debug, Clone, Deref, PartialEq, Eq, PartialOrd, Ord, Serialize, Bytes, Hash)] +#[derive(Debug, Clone, Deref, PartialEq, Eq, PartialOrd, Ord, Serialize, Bytes, Hash, JsonSchema)] pub struct P2ABytes(U8x2); impl From<&[u8]> for P2ABytes { diff --git a/crates/brk_types/src/p2msoutputindex.rs b/crates/brk_types/src/p2msoutputindex.rs index d34a58a1d..1506aad5e 100644 --- a/crates/brk_types/src/p2msoutputindex.rs +++ b/crates/brk_types/src/p2msoutputindex.rs @@ -1,45 +1,64 @@ use std::ops::Add; use derive_deref::{Deref, DerefMut}; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; use crate::TypeIndex; #[derive( - Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Deref, DerefMut, Default, Serialize, Pco, + Debug, + PartialEq, + Eq, + PartialOrd, + Ord, + Clone, + Copy, + Deref, + DerefMut, + Default, + Serialize, + Pco, + JsonSchema, )] pub struct P2MSOutputIndex(TypeIndex); + impl From for P2MSOutputIndex { #[inline] fn from(value: TypeIndex) -> Self { Self(value) } } + impl From for usize { #[inline] fn from(value: P2MSOutputIndex) -> Self { Self::from(*value) } } + impl From for u64 { #[inline] fn from(value: P2MSOutputIndex) -> Self { Self::from(*value) } } + impl From for P2MSOutputIndex { #[inline] fn from(value: usize) -> Self { Self(TypeIndex::from(value)) } } + impl Add for P2MSOutputIndex { type Output = Self; fn add(self, rhs: usize) -> Self::Output { Self(*self + rhs) } } + impl CheckedSub for P2MSOutputIndex { fn checked_sub(self, rhs: Self) -> Option { self.0.checked_sub(rhs.0).map(Self) diff --git a/crates/brk_types/src/p2pk33addressindex.rs b/crates/brk_types/src/p2pk33addressindex.rs index c4a708081..718c61b78 100644 --- a/crates/brk_types/src/p2pk33addressindex.rs +++ b/crates/brk_types/src/p2pk33addressindex.rs @@ -1,63 +1,85 @@ use std::ops::Add; use derive_deref::{Deref, DerefMut}; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; use crate::TypeIndex; #[derive( - Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Deref, DerefMut, Default, Serialize, Pco, + Debug, + PartialEq, + Eq, + PartialOrd, + Ord, + Clone, + Copy, + Deref, + DerefMut, + Default, + Serialize, + Pco, + JsonSchema, )] pub struct P2PK33AddressIndex(TypeIndex); + impl From for P2PK33AddressIndex { #[inline] fn from(value: TypeIndex) -> Self { Self(value) } } + impl From for TypeIndex { #[inline] fn from(value: P2PK33AddressIndex) -> Self { value.0 } } + impl From for u32 { #[inline] fn from(value: P2PK33AddressIndex) -> Self { Self::from(*value) } } + impl From for u64 { #[inline] fn from(value: P2PK33AddressIndex) -> Self { Self::from(*value) } } + impl From for P2PK33AddressIndex { #[inline] fn from(value: u32) -> Self { Self(TypeIndex::from(value)) } } + impl From for usize { #[inline] fn from(value: P2PK33AddressIndex) -> Self { Self::from(*value) } } + impl From for P2PK33AddressIndex { #[inline] fn from(value: usize) -> Self { Self(TypeIndex::from(value)) } } + impl Add for P2PK33AddressIndex { type Output = Self; fn add(self, rhs: usize) -> Self::Output { Self(*self + rhs) } } + impl CheckedSub for P2PK33AddressIndex { fn checked_sub(self, rhs: Self) -> Option { self.0.checked_sub(rhs.0).map(Self) diff --git a/crates/brk_types/src/p2pk33bytes.rs b/crates/brk_types/src/p2pk33bytes.rs index 559753dc6..712094de7 100644 --- a/crates/brk_types/src/p2pk33bytes.rs +++ b/crates/brk_types/src/p2pk33bytes.rs @@ -1,12 +1,13 @@ use std::fmt; use derive_deref::Deref; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{Bytes, Formattable}; use crate::U8x33; -#[derive(Debug, Clone, Deref, PartialEq, Eq, PartialOrd, Ord, Serialize, Bytes, Hash)] +#[derive(Debug, Clone, Deref, PartialEq, Eq, PartialOrd, Ord, Serialize, Bytes, Hash, JsonSchema)] pub struct P2PK33Bytes(U8x33); impl From<&[u8]> for P2PK33Bytes { diff --git a/crates/brk_types/src/p2pk65addressindex.rs b/crates/brk_types/src/p2pk65addressindex.rs index 73b8f433b..8c37f90d8 100644 --- a/crates/brk_types/src/p2pk65addressindex.rs +++ b/crates/brk_types/src/p2pk65addressindex.rs @@ -1,63 +1,85 @@ use std::ops::Add; use derive_deref::{Deref, DerefMut}; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; use crate::TypeIndex; #[derive( - Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Deref, DerefMut, Default, Serialize, Pco, + Debug, + PartialEq, + Eq, + PartialOrd, + Ord, + Clone, + Copy, + Deref, + DerefMut, + Default, + Serialize, + Pco, + JsonSchema, )] pub struct P2PK65AddressIndex(TypeIndex); + impl From for P2PK65AddressIndex { #[inline] fn from(value: TypeIndex) -> Self { Self(value) } } + impl From for TypeIndex { #[inline] fn from(value: P2PK65AddressIndex) -> Self { value.0 } } + impl From for u32 { #[inline] fn from(value: P2PK65AddressIndex) -> Self { Self::from(*value) } } + impl From for u64 { #[inline] fn from(value: P2PK65AddressIndex) -> Self { Self::from(*value) } } + impl From for usize { #[inline] fn from(value: P2PK65AddressIndex) -> Self { Self::from(*value) } } + impl From for P2PK65AddressIndex { #[inline] fn from(value: u32) -> Self { Self(TypeIndex::from(value)) } } + impl From for P2PK65AddressIndex { #[inline] fn from(value: usize) -> Self { Self(TypeIndex::from(value)) } } + impl Add for P2PK65AddressIndex { type Output = Self; fn add(self, rhs: usize) -> Self::Output { Self(*self + rhs) } } + impl CheckedSub for P2PK65AddressIndex { fn checked_sub(self, rhs: Self) -> Option { self.0.checked_sub(rhs.0).map(Self) diff --git a/crates/brk_types/src/p2pk65bytes.rs b/crates/brk_types/src/p2pk65bytes.rs index 1e665353d..b266065d0 100644 --- a/crates/brk_types/src/p2pk65bytes.rs +++ b/crates/brk_types/src/p2pk65bytes.rs @@ -1,12 +1,13 @@ use std::fmt; use derive_deref::Deref; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{Bytes, Formattable}; use crate::U8x65; -#[derive(Debug, Clone, Deref, PartialEq, Eq, PartialOrd, Ord, Serialize, Bytes, Hash)] +#[derive(Debug, Clone, Deref, PartialEq, Eq, PartialOrd, Ord, Serialize, Bytes, Hash, JsonSchema)] pub struct P2PK65Bytes(U8x65); impl From<&[u8]> for P2PK65Bytes { diff --git a/crates/brk_types/src/p2pkhaddressindex.rs b/crates/brk_types/src/p2pkhaddressindex.rs index 0f9307719..9bff56bb3 100644 --- a/crates/brk_types/src/p2pkhaddressindex.rs +++ b/crates/brk_types/src/p2pkhaddressindex.rs @@ -1,63 +1,85 @@ use std::ops::Add; use derive_deref::{Deref, DerefMut}; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; use crate::TypeIndex; #[derive( - Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Deref, DerefMut, Default, Serialize, Pco, + Debug, + PartialEq, + Eq, + PartialOrd, + Ord, + Clone, + Copy, + Deref, + DerefMut, + Default, + Serialize, + Pco, + JsonSchema, )] pub struct P2PKHAddressIndex(TypeIndex); + impl From for P2PKHAddressIndex { #[inline] fn from(value: TypeIndex) -> Self { Self(value) } } + impl From for TypeIndex { #[inline] fn from(value: P2PKHAddressIndex) -> Self { value.0 } } + impl From for usize { #[inline] fn from(value: P2PKHAddressIndex) -> Self { Self::from(*value) } } + impl From for u64 { #[inline] fn from(value: P2PKHAddressIndex) -> Self { Self::from(*value) } } + impl From for u32 { #[inline] fn from(value: P2PKHAddressIndex) -> Self { Self::from(*value) } } + impl From for P2PKHAddressIndex { #[inline] fn from(value: u32) -> Self { Self(TypeIndex::from(value)) } } + impl From for P2PKHAddressIndex { #[inline] fn from(value: usize) -> Self { Self(TypeIndex::from(value)) } } + impl Add for P2PKHAddressIndex { type Output = Self; fn add(self, rhs: usize) -> Self::Output { Self(*self + rhs) } } + impl CheckedSub for P2PKHAddressIndex { fn checked_sub(self, rhs: Self) -> Option { self.0.checked_sub(rhs.0).map(Self) diff --git a/crates/brk_types/src/p2pkhbytes.rs b/crates/brk_types/src/p2pkhbytes.rs index e4161105b..b7f1b7e68 100644 --- a/crates/brk_types/src/p2pkhbytes.rs +++ b/crates/brk_types/src/p2pkhbytes.rs @@ -1,12 +1,13 @@ use std::fmt; use derive_deref::Deref; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{Bytes, Formattable}; use crate::U8x20; -#[derive(Debug, Clone, Deref, PartialEq, Eq, PartialOrd, Ord, Serialize, Bytes, Hash)] +#[derive(Debug, Clone, Deref, PartialEq, Eq, PartialOrd, Ord, Serialize, Bytes, Hash, JsonSchema)] pub struct P2PKHBytes(U8x20); impl From<&[u8]> for P2PKHBytes { diff --git a/crates/brk_types/src/p2shaddressindex.rs b/crates/brk_types/src/p2shaddressindex.rs index 8d63a8c44..7f4f3b6c0 100644 --- a/crates/brk_types/src/p2shaddressindex.rs +++ b/crates/brk_types/src/p2shaddressindex.rs @@ -1,69 +1,92 @@ use std::ops::Add; use derive_deref::{Deref, DerefMut}; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; use crate::TypeIndex; #[derive( - Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Deref, DerefMut, Default, Serialize, Pco, + Debug, + PartialEq, + Eq, + PartialOrd, + Ord, + Clone, + Copy, + Deref, + DerefMut, + Default, + Serialize, + Pco, + JsonSchema, )] pub struct P2SHAddressIndex(TypeIndex); + impl From for P2SHAddressIndex { #[inline] fn from(value: TypeIndex) -> Self { Self(value) } } + impl From for TypeIndex { #[inline] fn from(value: P2SHAddressIndex) -> Self { value.0 } } + impl From for u32 { #[inline] fn from(value: P2SHAddressIndex) -> Self { Self::from(*value) } } + impl From for u64 { #[inline] fn from(value: P2SHAddressIndex) -> Self { Self::from(*value) } } + impl From for P2SHAddressIndex { #[inline] fn from(value: u32) -> Self { Self(TypeIndex::from(value)) } } + impl From for P2SHAddressIndex { #[inline] fn from(value: u64) -> Self { Self(TypeIndex::from(value)) } } + impl From for usize { #[inline] fn from(value: P2SHAddressIndex) -> Self { Self::from(*value) } } + impl From for P2SHAddressIndex { #[inline] fn from(value: usize) -> Self { Self(TypeIndex::from(value)) } } + impl Add for P2SHAddressIndex { type Output = Self; fn add(self, rhs: usize) -> Self::Output { Self(*self + rhs) } } + impl CheckedSub for P2SHAddressIndex { fn checked_sub(self, rhs: Self) -> Option { self.0.checked_sub(rhs.0).map(Self) diff --git a/crates/brk_types/src/p2shbytes.rs b/crates/brk_types/src/p2shbytes.rs index 7dd05fcc6..225ca2734 100644 --- a/crates/brk_types/src/p2shbytes.rs +++ b/crates/brk_types/src/p2shbytes.rs @@ -1,12 +1,13 @@ use std::fmt; use derive_deref::Deref; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{Bytes, Formattable}; use crate::U8x20; -#[derive(Debug, Clone, Deref, PartialEq, Eq, PartialOrd, Ord, Serialize, Bytes, Hash)] +#[derive(Debug, Clone, Deref, PartialEq, Eq, PartialOrd, Ord, Serialize, Bytes, Hash, JsonSchema)] pub struct P2SHBytes(U8x20); impl From<&[u8]> for P2SHBytes { diff --git a/crates/brk_types/src/p2traddressindex.rs b/crates/brk_types/src/p2traddressindex.rs index 00de4ac46..773d97dad 100644 --- a/crates/brk_types/src/p2traddressindex.rs +++ b/crates/brk_types/src/p2traddressindex.rs @@ -1,63 +1,85 @@ use std::ops::Add; use derive_deref::{Deref, DerefMut}; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; use crate::TypeIndex; #[derive( - Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Deref, DerefMut, Default, Serialize, Pco, + Debug, + PartialEq, + Eq, + PartialOrd, + Ord, + Clone, + Copy, + Deref, + DerefMut, + Default, + Serialize, + Pco, + JsonSchema, )] pub struct P2TRAddressIndex(TypeIndex); + impl From for P2TRAddressIndex { #[inline] fn from(value: TypeIndex) -> Self { Self(value) } } + impl From for TypeIndex { #[inline] fn from(value: P2TRAddressIndex) -> Self { value.0 } } + impl From for u32 { #[inline] fn from(value: P2TRAddressIndex) -> Self { Self::from(*value) } } + impl From for u64 { #[inline] fn from(value: P2TRAddressIndex) -> Self { Self::from(*value) } } + impl From for P2TRAddressIndex { #[inline] fn from(value: u32) -> Self { Self(TypeIndex::from(value)) } } + impl From for usize { #[inline] fn from(value: P2TRAddressIndex) -> Self { Self::from(*value) } } + impl From for P2TRAddressIndex { #[inline] fn from(value: usize) -> Self { Self(TypeIndex::from(value)) } } + impl Add for P2TRAddressIndex { type Output = Self; fn add(self, rhs: usize) -> Self::Output { Self(*self + rhs) } } + impl CheckedSub for P2TRAddressIndex { fn checked_sub(self, rhs: Self) -> Option { self.0.checked_sub(rhs.0).map(Self) diff --git a/crates/brk_types/src/p2trbytes.rs b/crates/brk_types/src/p2trbytes.rs index 4fc7f23a1..a23708594 100644 --- a/crates/brk_types/src/p2trbytes.rs +++ b/crates/brk_types/src/p2trbytes.rs @@ -1,12 +1,13 @@ use std::fmt; use derive_deref::Deref; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{Bytes, Formattable}; use crate::U8x32; -#[derive(Debug, Clone, Deref, PartialEq, Eq, PartialOrd, Ord, Serialize, Bytes, Hash)] +#[derive(Debug, Clone, Deref, PartialEq, Eq, PartialOrd, Ord, Serialize, Bytes, Hash, JsonSchema)] pub struct P2TRBytes(U8x32); impl From<&[u8]> for P2TRBytes { diff --git a/crates/brk_types/src/p2wpkhaddressindex.rs b/crates/brk_types/src/p2wpkhaddressindex.rs index f6aea3650..955574c03 100644 --- a/crates/brk_types/src/p2wpkhaddressindex.rs +++ b/crates/brk_types/src/p2wpkhaddressindex.rs @@ -1,63 +1,85 @@ use std::ops::Add; use derive_deref::{Deref, DerefMut}; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; use crate::TypeIndex; #[derive( - Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Deref, DerefMut, Default, Serialize, Pco, + Debug, + PartialEq, + Eq, + PartialOrd, + Ord, + Clone, + Copy, + Deref, + DerefMut, + Default, + Serialize, + Pco, + JsonSchema, )] pub struct P2WPKHAddressIndex(TypeIndex); + impl From for P2WPKHAddressIndex { #[inline] fn from(value: TypeIndex) -> Self { Self(value) } } + impl From for TypeIndex { #[inline] fn from(value: P2WPKHAddressIndex) -> Self { value.0 } } + impl From for u32 { #[inline] fn from(value: P2WPKHAddressIndex) -> Self { Self::from(*value) } } + impl From for u64 { #[inline] fn from(value: P2WPKHAddressIndex) -> Self { Self::from(*value) } } + impl From for usize { #[inline] fn from(value: P2WPKHAddressIndex) -> Self { Self::from(*value) } } + impl From for P2WPKHAddressIndex { #[inline] fn from(value: u32) -> Self { Self(TypeIndex::from(value)) } } + impl From for P2WPKHAddressIndex { #[inline] fn from(value: usize) -> Self { Self(TypeIndex::from(value)) } } + impl Add for P2WPKHAddressIndex { type Output = Self; fn add(self, rhs: usize) -> Self::Output { Self(*self + rhs) } } + impl CheckedSub for P2WPKHAddressIndex { fn checked_sub(self, rhs: Self) -> Option { self.0.checked_sub(rhs.0).map(Self) diff --git a/crates/brk_types/src/p2wpkhbytes.rs b/crates/brk_types/src/p2wpkhbytes.rs index b9ce86195..dadd08f8e 100644 --- a/crates/brk_types/src/p2wpkhbytes.rs +++ b/crates/brk_types/src/p2wpkhbytes.rs @@ -1,12 +1,13 @@ use std::fmt; use derive_deref::Deref; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{Bytes, Formattable}; use crate::U8x20; -#[derive(Debug, Clone, Deref, PartialEq, Eq, PartialOrd, Ord, Serialize, Bytes, Hash)] +#[derive(Debug, Clone, Deref, PartialEq, Eq, PartialOrd, Ord, Serialize, Bytes, Hash, JsonSchema)] pub struct P2WPKHBytes(U8x20); impl From<&[u8]> for P2WPKHBytes { diff --git a/crates/brk_types/src/p2wshaddressindex.rs b/crates/brk_types/src/p2wshaddressindex.rs index b4009e401..5a758352d 100644 --- a/crates/brk_types/src/p2wshaddressindex.rs +++ b/crates/brk_types/src/p2wshaddressindex.rs @@ -1,63 +1,85 @@ use std::ops::Add; use derive_deref::{Deref, DerefMut}; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; use crate::TypeIndex; #[derive( - Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Deref, DerefMut, Default, Serialize, Pco, + Debug, + PartialEq, + Eq, + PartialOrd, + Ord, + Clone, + Copy, + Deref, + DerefMut, + Default, + Serialize, + Pco, + JsonSchema, )] pub struct P2WSHAddressIndex(TypeIndex); + impl From for P2WSHAddressIndex { #[inline] fn from(value: TypeIndex) -> Self { Self(value) } } + impl From for TypeIndex { #[inline] fn from(value: P2WSHAddressIndex) -> Self { value.0 } } + impl From for u32 { #[inline] fn from(value: P2WSHAddressIndex) -> Self { Self::from(*value) } } + impl From for u64 { #[inline] fn from(value: P2WSHAddressIndex) -> Self { Self::from(*value) } } + impl From for P2WSHAddressIndex { #[inline] fn from(value: u32) -> Self { Self(TypeIndex::from(value)) } } + impl From for usize { #[inline] fn from(value: P2WSHAddressIndex) -> Self { Self::from(*value) } } + impl From for P2WSHAddressIndex { #[inline] fn from(value: usize) -> Self { Self(TypeIndex::from(value)) } } + impl Add for P2WSHAddressIndex { type Output = Self; fn add(self, rhs: usize) -> Self::Output { Self(*self + rhs) } } + impl CheckedSub for P2WSHAddressIndex { fn checked_sub(self, rhs: Self) -> Option { self.0.checked_sub(rhs.0).map(Self) diff --git a/crates/brk_types/src/p2wshbytes.rs b/crates/brk_types/src/p2wshbytes.rs index c37c2d742..f55c31a90 100644 --- a/crates/brk_types/src/p2wshbytes.rs +++ b/crates/brk_types/src/p2wshbytes.rs @@ -1,12 +1,13 @@ use std::fmt; use derive_deref::Deref; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{Bytes, Formattable}; use crate::U8x32; -#[derive(Debug, Clone, Deref, PartialEq, Eq, PartialOrd, Ord, Serialize, Bytes, Hash)] +#[derive(Debug, Clone, Deref, PartialEq, Eq, PartialOrd, Ord, Serialize, Bytes, Hash, JsonSchema)] pub struct P2WSHBytes(U8x32); impl From<&[u8]> for P2WSHBytes { diff --git a/crates/brk_types/src/quarterindex.rs b/crates/brk_types/src/quarterindex.rs index 23c7c6108..96d55a61b 100644 --- a/crates/brk_types/src/quarterindex.rs +++ b/crates/brk_types/src/quarterindex.rs @@ -3,13 +3,25 @@ use std::{ ops::{Add, AddAssign, Div}, }; +use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; use super::MonthIndex; #[derive( - Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default, Serialize, Deserialize, Pco, + Debug, + Clone, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + Default, + Serialize, + Deserialize, + Pco, + JsonSchema, )] pub struct QuarterIndex(u16); diff --git a/crates/brk_types/src/semesterindex.rs b/crates/brk_types/src/semesterindex.rs index 316cef7f4..e85b801d1 100644 --- a/crates/brk_types/src/semesterindex.rs +++ b/crates/brk_types/src/semesterindex.rs @@ -3,13 +3,25 @@ use std::{ ops::{Add, AddAssign, Div}, }; +use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; use super::MonthIndex; #[derive( - Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default, Serialize, Deserialize, Pco, + Debug, + Clone, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + Default, + Serialize, + Deserialize, + Pco, + JsonSchema, )] pub struct SemesterIndex(u16); diff --git a/crates/brk_types/src/stored_bool.rs b/crates/brk_types/src/stored_bool.rs index 940d10d23..503975748 100644 --- a/crates/brk_types/src/stored_bool.rs +++ b/crates/brk_types/src/stored_bool.rs @@ -1,8 +1,9 @@ use derive_deref::Deref; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{Formattable, Pco, PrintableIndex}; -#[derive(Debug, Deref, Clone, Default, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Pco)] +#[derive(Debug, Deref, Clone, Default, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Pco, JsonSchema)] pub struct StoredBool(u16); impl StoredBool { diff --git a/crates/brk_types/src/stored_f32.rs b/crates/brk_types/src/stored_f32.rs index b8b795845..9300b8449 100644 --- a/crates/brk_types/src/stored_f32.rs +++ b/crates/brk_types/src/stored_f32.rs @@ -7,6 +7,7 @@ use std::{ }; use derive_deref::Deref; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; @@ -14,7 +15,7 @@ use crate::{Close, StoredU32}; use super::{Dollars, StoredF64}; -#[derive(Debug, Deref, Default, Clone, Copy, Serialize, Pco)] +#[derive(Debug, Deref, Default, Clone, Copy, Serialize, Pco, JsonSchema)] pub struct StoredF32(f32); impl StoredF32 { diff --git a/crates/brk_types/src/stored_f64.rs b/crates/brk_types/src/stored_f64.rs index 29ea38f87..d485f2208 100644 --- a/crates/brk_types/src/stored_f64.rs +++ b/crates/brk_types/src/stored_f64.rs @@ -6,12 +6,13 @@ use std::{ }; use derive_deref::Deref; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; use crate::{Bitcoin, Dollars}; -#[derive(Debug, Deref, Default, Clone, Copy, Serialize, Pco)] +#[derive(Debug, Deref, Default, Clone, Copy, Serialize, Pco, JsonSchema)] pub struct StoredF64(f64); impl StoredF64 { diff --git a/crates/brk_types/src/stored_i16.rs b/crates/brk_types/src/stored_i16.rs index b583ea862..722a107d9 100644 --- a/crates/brk_types/src/stored_i16.rs +++ b/crates/brk_types/src/stored_i16.rs @@ -1,10 +1,13 @@ use std::ops::{Add, AddAssign, Div}; use derive_deref::Deref; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; -#[derive(Debug, Deref, Clone, Default, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Pco)] +#[derive( + Debug, Deref, Clone, Default, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Pco, JsonSchema, +)] pub struct StoredI16(i16); impl StoredI16 { diff --git a/crates/brk_types/src/stored_u16.rs b/crates/brk_types/src/stored_u16.rs index 359416021..cb2b4dd7c 100644 --- a/crates/brk_types/src/stored_u16.rs +++ b/crates/brk_types/src/stored_u16.rs @@ -1,6 +1,7 @@ use std::ops::{Add, AddAssign, Div}; use derive_deref::Deref; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; @@ -10,7 +11,9 @@ use super::{ P2WSHAddressIndex, UnknownOutputIndex, }; -#[derive(Debug, Deref, Clone, Default, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Pco)] +#[derive( + Debug, Deref, Clone, Default, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Pco, JsonSchema, +)] pub struct StoredU16(u16); impl StoredU16 { diff --git a/crates/brk_types/src/stored_u32.rs b/crates/brk_types/src/stored_u32.rs index 9f9993f40..41ddfee4f 100644 --- a/crates/brk_types/src/stored_u32.rs +++ b/crates/brk_types/src/stored_u32.rs @@ -1,6 +1,7 @@ use std::ops::{Add, AddAssign, Div, Mul}; use derive_deref::Deref; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; @@ -10,7 +11,7 @@ use super::{ P2WSHAddressIndex, UnknownOutputIndex, }; -#[derive(Debug, Deref, Clone, Default, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Pco)] +#[derive(Debug, Deref, Clone, Default, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Pco, JsonSchema)] pub struct StoredU32(u32); impl StoredU32 { diff --git a/crates/brk_types/src/stored_u64.rs b/crates/brk_types/src/stored_u64.rs index 2cf65aa32..7b0021eb1 100644 --- a/crates/brk_types/src/stored_u64.rs +++ b/crates/brk_types/src/stored_u64.rs @@ -1,6 +1,7 @@ use std::ops::{Add, AddAssign, Div}; use derive_deref::Deref; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; @@ -11,7 +12,7 @@ use super::{ UnknownOutputIndex, YearIndex, }; -#[derive(Debug, Default, Deref, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Pco)] +#[derive(Debug, Default, Deref, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Pco, JsonSchema)] pub struct StoredU64(u64); impl StoredU64 { diff --git a/crates/brk_types/src/treenode.rs b/crates/brk_types/src/treenode.rs index 0e2d2da2e..f57d913ca 100644 --- a/crates/brk_types/src/treenode.rs +++ b/crates/brk_types/src/treenode.rs @@ -1,19 +1,97 @@ -use std::{collections::BTreeMap, sync::LazyLock}; +use std::{ + collections::{BTreeMap, BTreeSet}, + sync::LazyLock, +}; use schemars::JsonSchema; use serde::Serialize; use super::Index; +/// Leaf node containing metric metadata #[derive(Debug, Clone, Serialize, PartialEq, Eq, JsonSchema)] +pub struct MetricLeaf { + /// The metric name/identifier + pub name: String, + /// The value type (e.g., "Sats", "StoredF64") + pub value_type: String, + /// Available indexes for this metric + pub indexes: BTreeSet, +} + +impl MetricLeaf { + pub fn new(name: String, value_type: String, indexes: BTreeSet) -> Self { + Self { + name, + value_type, + indexes, + } + } + + /// Merge another leaf's indexes into this one (union) + pub fn merge_indexes(&mut self, other: &MetricLeaf) { + self.indexes.extend(other.indexes.iter().copied()); + } +} + +/// MetricLeaf with JSON Schema for client generation +#[derive(Debug, Clone, Serialize)] +pub struct MetricLeafWithSchema { + /// The core metric metadata + #[serde(flatten)] + pub leaf: MetricLeaf, + /// JSON Schema for the value type + #[serde(skip)] + pub schema: serde_json::Value, +} + +impl MetricLeafWithSchema { + pub fn new(leaf: MetricLeaf, schema: serde_json::Value) -> Self { + Self { leaf, schema } + } + + /// The metric name/identifier + pub fn name(&self) -> &str { + &self.leaf.name + } + + /// The value type (e.g., "Sats", "StoredF64") + pub fn value_type(&self) -> &str { + &self.leaf.value_type + } + + /// Available indexes for this metric + pub fn indexes(&self) -> &BTreeSet { + &self.leaf.indexes + } + + /// Check if this leaf refers to the same metric as another + pub fn is_same_metric(&self, other: &MetricLeafWithSchema) -> bool { + self.leaf.name == other.leaf.name + } + + /// Merge another leaf's indexes into this one (union) + pub fn merge_indexes(&mut self, other: &MetricLeafWithSchema) { + self.leaf.merge_indexes(&other.leaf); + } +} + +impl PartialEq for MetricLeafWithSchema { + fn eq(&self, other: &Self) -> bool { + self.leaf == other.leaf + } +} + +impl Eq for MetricLeafWithSchema {} + +#[derive(Debug, Clone, Serialize, PartialEq, Eq)] #[serde(untagged)] /// Hierarchical tree node for organizing metrics into categories pub enum TreeNode { /// Branch node containing subcategories Branch(BTreeMap), - /// Leaf node containing the metric name - #[schemars(example = &"price_close", example = &"market_cap", example = &"realized_price")] - Leaf(String), + /// Leaf node containing metric metadata with schema + Leaf(MetricLeafWithSchema), } const BASE: &str = "base"; @@ -55,8 +133,8 @@ impl TreeNode { for node in tree.values() { match node { - Self::Leaf(value) => { - Self::merge_node(&mut merged, BASE, &Self::Leaf(value.clone()))?; + Self::Leaf(leaf) => { + Self::merge_node(&mut merged, BASE, &Self::Leaf(leaf.clone()))?; } Self::Branch(inner) => { for (key, inner_node) in inner { @@ -68,33 +146,36 @@ impl TreeNode { let result = Self::Branch(merged); - // Check if all leaves have the same value - if let Some(common_value) = result.all_leaves_same() { - Some(Self::Leaf(common_value)) + // Check if all leaves have the same name (can be collapsed) + if let Some(common_leaf) = result.all_leaves_same() { + Some(Self::Leaf(common_leaf)) } else { Some(result) } } - /// Checks if all leaves in the tree have the same value. - /// Returns Some(value) if all leaves are identical, None otherwise. - fn all_leaves_same(&self) -> Option { + /// Checks if all leaves in the tree have the same metric name. + /// Returns Some(merged_leaf) if all leaves have the same name, None otherwise. + /// When merging, indexes are unioned together. + fn all_leaves_same(&self) -> Option { match self { - Self::Leaf(value) => Some(value.clone()), + Self::Leaf(leaf) => Some(leaf.clone()), Self::Branch(map) => { - let mut common_value: Option = None; + let mut common_leaf: Option = None; for node in map.values() { - let node_value = node.all_leaves_same()?; + let node_leaf = node.all_leaves_same()?; - match &common_value { - None => common_value = Some(node_value), - Some(existing) if existing != &node_value => return None, - _ => {} + match &mut common_leaf { + None => common_leaf = Some(node_leaf), + Some(existing) if existing.is_same_metric(&node_leaf) => { + existing.merge_indexes(&node_leaf); + } + Some(_) => return None, } } - common_value + common_leaf } } } @@ -111,39 +192,42 @@ impl TreeNode { target.insert(key.to_string(), node.clone()); Some(()) } - Some(existing) => match (&existing, node) { - // Same leaf values: ok - (Self::Leaf(a), Self::Leaf(b)) if a == b => Some(()), - // Different leaf values: conflict - (Self::Leaf(a), Self::Leaf(b)) => { - eprintln!("Conflict: Different leaf values for key '{key}'"); - eprintln!(" Existing: {a:?}"); - eprintln!(" New: {b:?}"); - None - } - (Self::Leaf(leaf), Self::Branch(branch)) => { - let mut new_branch = BTreeMap::new(); - new_branch.insert(BASE.to_string(), Self::Leaf(leaf.clone())); - - for (k, v) in branch { - Self::merge_node(&mut new_branch, k, v)?; + Some(existing) => { + match (&mut *existing, node) { + (Self::Leaf(a), Self::Leaf(b)) if a.is_same_metric(b) => { + a.merge_indexes(b); + Some(()) } - - *existing = Self::Branch(new_branch); - Some(()) - } - (Self::Branch(_), Self::Leaf(leaf)) => { - Self::merge_node(existing.as_mut_branch(), BASE, &Self::Leaf(leaf.clone()))?; - Some(()) - } - // Both branches: merge recursively - (Self::Branch(_), Self::Branch(new_inner)) => { - for (k, v) in new_inner { - Self::merge_node(existing.as_mut_branch(), k, v)?; + (Self::Leaf(a), Self::Leaf(b)) => { + eprintln!("Conflict: Different leaf values for key '{key}'"); + eprintln!(" Existing: {a:?}"); + eprintln!(" New: {b:?}"); + None + } + (Self::Leaf(leaf), Self::Branch(branch)) => { + let mut new_branch = BTreeMap::new(); + new_branch.insert(BASE.to_string(), Self::Leaf(leaf.clone())); + + for (k, v) in branch { + Self::merge_node(&mut new_branch, k, v)?; + } + + *existing = Self::Branch(new_branch); + Some(()) + } + (Self::Branch(existing_branch), Self::Leaf(leaf)) => { + Self::merge_node(existing_branch, BASE, &Self::Leaf(leaf.clone()))?; + Some(()) + } + // Both branches: merge recursively + (Self::Branch(existing_branch), Self::Branch(new_inner)) => { + for (k, v) in new_inner { + Self::merge_node(existing_branch, k, v)?; + } + Some(()) } - Some(()) } - }, + } } } diff --git a/crates/brk_types/src/txinindex.rs b/crates/brk_types/src/txinindex.rs index baa93c95b..1b320811a 100644 --- a/crates/brk_types/src/txinindex.rs +++ b/crates/brk_types/src/txinindex.rs @@ -1,13 +1,14 @@ use std::ops::{Add, AddAssign}; use derive_deref::{Deref, DerefMut}; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; use super::Vin; #[derive( - Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Deref, DerefMut, Default, Serialize, Pco, + Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Deref, DerefMut, Default, Serialize, Pco, JsonSchema, )] pub struct TxInIndex(u64); diff --git a/crates/brk_types/src/txoutindex.rs b/crates/brk_types/src/txoutindex.rs index 3c1367785..589b32825 100644 --- a/crates/brk_types/src/txoutindex.rs +++ b/crates/brk_types/src/txoutindex.rs @@ -1,13 +1,14 @@ use std::ops::{Add, AddAssign}; use derive_deref::{Deref, DerefMut}; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; use super::Vout; #[derive( - Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Deref, DerefMut, Default, Serialize, Pco, + Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Deref, DerefMut, Default, Serialize, Pco, JsonSchema, )] pub struct TxOutIndex(u64); diff --git a/crates/brk_types/src/unknownoutputindex.rs b/crates/brk_types/src/unknownoutputindex.rs index e596b6372..7617273da 100644 --- a/crates/brk_types/src/unknownoutputindex.rs +++ b/crates/brk_types/src/unknownoutputindex.rs @@ -1,13 +1,14 @@ use std::ops::Add; use derive_deref::{Deref, DerefMut}; +use schemars::JsonSchema; use serde::Serialize; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; use crate::TypeIndex; #[derive( - Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Deref, DerefMut, Default, Serialize, Pco, + Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Deref, DerefMut, Default, Serialize, Pco, JsonSchema, )] pub struct UnknownOutputIndex(TypeIndex); diff --git a/crates/brk_types/src/weekindex.rs b/crates/brk_types/src/weekindex.rs index 4a8233cc2..c66133bcc 100644 --- a/crates/brk_types/src/weekindex.rs +++ b/crates/brk_types/src/weekindex.rs @@ -3,13 +3,25 @@ use std::{ ops::{Add, AddAssign, Div}, }; +use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; use super::{Date, DateIndex}; #[derive( - Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default, Serialize, Deserialize, Pco, + Debug, + Clone, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + Default, + Serialize, + Deserialize, + Pco, + JsonSchema, )] pub struct WeekIndex(u16); diff --git a/crates/brk_types/src/yearindex.rs b/crates/brk_types/src/yearindex.rs index b587878d7..03d8d540c 100644 --- a/crates/brk_types/src/yearindex.rs +++ b/crates/brk_types/src/yearindex.rs @@ -3,13 +3,25 @@ use std::{ ops::{Add, AddAssign, Div}, }; +use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex}; use super::{Date, DateIndex, MonthIndex}; #[derive( - Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default, Serialize, Deserialize, Pco, + Debug, + Clone, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + Default, + Serialize, + Deserialize, + Pco, + JsonSchema, )] pub struct YearIndex(u16);