global: snapshot

This commit is contained in:
nym21
2025-12-20 17:02:00 +01:00
parent 4b910ceaa7
commit 8c2402cacb
42 changed files with 2754 additions and 2930 deletions

View File

@@ -1,41 +0,0 @@
mod javascript;
mod openapi;
mod python;
mod rust;
mod types;
pub use javascript::*;
pub use openapi::*;
pub use python::*;
pub use rust::*;
pub use types::*;
use brk_query::Vecs;
use std::io;
use std::path::Path;
/// Generate all client libraries from the query vecs and OpenAPI JSON
pub fn generate_clients(vecs: &Vecs, openapi_json: &str, output_dir: &Path) -> io::Result<()> {
let metadata = ClientMetadata::from_vecs(vecs);
// Parse OpenAPI spec from JSON
let spec = parse_openapi_json(openapi_json)?;
let endpoints = extract_endpoints(&spec);
// Generate Rust client
let rust_path = output_dir.join("rust");
std::fs::create_dir_all(&rust_path)?;
generate_rust_client(&metadata, &endpoints, &rust_path)?;
// Generate JavaScript client
let js_path = output_dir.join("javascript");
std::fs::create_dir_all(&js_path)?;
generate_javascript_client(&metadata, &endpoints, &js_path)?;
// Generate Python client
let python_path = output_dir.join("python");
std::fs::create_dir_all(&python_path)?;
generate_python_client(&metadata, &endpoints, &python_path)?;
Ok(())
}

View File

@@ -1,427 +0,0 @@
use std::collections::HashSet;
use std::fmt::Write as FmtWrite;
use std::fs;
use std::io;
use std::path::Path;
use brk_types::{Index, TreeNode};
use super::{ClientMetadata, Endpoint, IndexSetPattern, PatternField, StructuralPattern, get_node_fields, to_pascal_case, to_snake_case};
/// Generate Python client from metadata and OpenAPI endpoints
pub fn generate_python_client(
metadata: &ClientMetadata,
endpoints: &[Endpoint],
output_dir: &Path,
) -> io::Result<()> {
let mut output = String::new();
// Header
writeln!(output, "# Auto-generated BRK Python client").unwrap();
writeln!(output, "# Do not edit manually\n").unwrap();
writeln!(output, "from __future__ import annotations").unwrap();
writeln!(output, "from typing import TypeVar, Generic, Any, Optional, List").unwrap();
writeln!(output, "from dataclasses import dataclass").unwrap();
writeln!(output, "import httpx\n").unwrap();
// Type variable for generic MetricNode
writeln!(output, "T = TypeVar('T')\n").unwrap();
// Generate base client class
generate_base_client(&mut output);
// Generate MetricNode class
generate_metric_node(&mut output);
// Generate index accessor classes
generate_index_accessors(&mut output, &metadata.index_set_patterns);
// Generate structural pattern classes
generate_structural_patterns(&mut output, &metadata.structural_patterns, metadata);
// Generate tree classes
generate_tree_classes(&mut output, &metadata.catalog, metadata);
// Generate main client with tree and API methods
generate_main_client(&mut output, endpoints);
fs::write(output_dir.join("client.py"), output)?;
Ok(())
}
/// Generate the base BrkClient class with HTTP functionality
fn generate_base_client(output: &mut String) {
writeln!(
output,
r#"class BrkError(Exception):
"""Custom error class for BRK client errors."""
def __init__(self, message: str, status: Optional[int] = None):
super().__init__(message)
self.status = status
class BrkClientBase:
"""Base HTTP client for making requests."""
def __init__(self, base_url: str, timeout: float = 30.0):
self.base_url = base_url
self.timeout = timeout
self._client = httpx.Client(timeout=timeout)
def get(self, path: str) -> Any:
"""Make a GET request."""
try:
response = self._client.get(f"{{self.base_url}}{{path}}")
response.raise_for_status()
return response.json()
except httpx.HTTPStatusError as e:
raise BrkError(f"HTTP error: {{e.response.status_code}}", e.response.status_code)
except httpx.RequestError as e:
raise BrkError(str(e))
def close(self):
"""Close the HTTP client."""
self._client.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
"#
)
.unwrap();
}
/// Generate the MetricNode class
fn generate_metric_node(output: &mut String) {
writeln!(
output,
r#"class MetricNode(Generic[T]):
"""A metric node that can fetch data for different indexes."""
def __init__(self, client: BrkClientBase, path: str):
self._client = client
self._path = path
def get(self) -> List[T]:
"""Fetch all data points for this metric."""
return self._client.get(self._path)
def get_range(self, from_date: str, to_date: str) -> List[T]:
"""Fetch data points within a date range."""
return self._client.get(f"{{self._path}}?from={{from_date}}&to={{to_date}}")
"#
)
.unwrap();
}
/// Generate index accessor classes
fn generate_index_accessors(output: &mut String, patterns: &[IndexSetPattern]) {
if patterns.is_empty() {
return;
}
writeln!(output, "# Index accessor classes\n").unwrap();
for pattern in patterns {
writeln!(output, "class {}(Generic[T]):", pattern.name).unwrap();
writeln!(output, " \"\"\"Index accessor for metrics with {} indexes.\"\"\"", pattern.indexes.len()).unwrap();
writeln!(output, " ").unwrap();
writeln!(output, " def __init__(self, client: BrkClientBase, base_path: str):").unwrap();
for index in &pattern.indexes {
let field_name = index_to_snake_case(index);
let path_segment = index.serialize_long();
writeln!(
output,
" self.{}: MetricNode[T] = MetricNode(client, f'{{base_path}}/{}')",
field_name, path_segment
).unwrap();
}
writeln!(output).unwrap();
}
}
/// Convert an Index to a snake_case field name (e.g., DateIndex -> by_date_index)
fn index_to_snake_case(index: &Index) -> String {
format!("by_{}", to_snake_case(index.serialize_long()))
}
/// Generate structural pattern classes
fn generate_structural_patterns(output: &mut String, patterns: &[StructuralPattern], metadata: &ClientMetadata) {
if patterns.is_empty() {
return;
}
writeln!(output, "# Reusable structural pattern classes\n").unwrap();
for pattern in patterns {
writeln!(output, "class {}:", pattern.name).unwrap();
writeln!(output, " \"\"\"Pattern struct for repeated tree structure.\"\"\"").unwrap();
writeln!(output, " ").unwrap();
writeln!(output, " def __init__(self, client: BrkClientBase, base_path: str):").unwrap();
for field in &pattern.fields {
let py_type = field_to_python_type(field, metadata);
if metadata.is_pattern_type(&field.rust_type) {
writeln!(
output,
" self.{}: {} = {}(client, f'{{base_path}}/{}')",
to_snake_case(&field.name), py_type, field.rust_type, field.name
).unwrap();
} else if field_uses_accessor(field, metadata) {
let accessor = metadata.find_index_set_pattern(&field.indexes).unwrap();
writeln!(
output,
" self.{}: {} = {}(client, f'{{base_path}}/{}')",
to_snake_case(&field.name), py_type, accessor.name, field.name
).unwrap();
} else {
writeln!(
output,
" self.{}: {} = MetricNode(client, f'{{base_path}}/{}')",
to_snake_case(&field.name), py_type, field.name
).unwrap();
}
}
writeln!(output).unwrap();
}
}
/// Convert pattern field to Python type annotation
fn field_to_python_type(field: &PatternField, metadata: &ClientMetadata) -> String {
if metadata.is_pattern_type(&field.rust_type) {
// Pattern type - use pattern name directly
field.rust_type.clone()
} else if let Some(accessor) = metadata.find_index_set_pattern(&field.indexes) {
// Leaf with a reusable accessor pattern
let py_type = json_type_to_python(&field.json_type);
format!("{}[{}]", accessor.name, py_type)
} else {
// Leaf with unique index set - use MetricNode directly
let py_type = json_type_to_python(&field.json_type);
format!("MetricNode[{}]", py_type)
}
}
/// Check if a field should use an index accessor
fn field_uses_accessor(field: &PatternField, metadata: &ClientMetadata) -> bool {
metadata.find_index_set_pattern(&field.indexes).is_some()
}
/// Convert JSON Schema type to Python type
fn json_type_to_python(json_type: &str) -> &str {
match json_type {
"integer" => "int",
"number" => "float",
"boolean" => "bool",
"string" => "str",
"array" => "List",
"object" => "dict",
_ => "Any",
}
}
/// Generate tree classes
fn generate_tree_classes(
output: &mut String,
catalog: &TreeNode,
metadata: &ClientMetadata,
) {
writeln!(output, "# Catalog tree classes\n").unwrap();
let pattern_lookup = metadata.pattern_lookup();
let mut generated = HashSet::new();
generate_tree_class(output, "CatalogTree", catalog, &pattern_lookup, metadata, &mut generated);
}
/// Recursively generate tree classes
fn generate_tree_class(
output: &mut String,
name: &str,
node: &TreeNode,
pattern_lookup: &std::collections::HashMap<Vec<PatternField>, String>,
metadata: &ClientMetadata,
generated: &mut HashSet<String>,
) {
if let TreeNode::Branch(children) = node {
// Build signature
let fields = get_node_fields(children, pattern_lookup);
// Skip if this matches a pattern (already generated)
if pattern_lookup.contains_key(&fields) && pattern_lookup.get(&fields) != Some(&name.to_string()) {
return;
}
if generated.contains(name) {
return;
}
generated.insert(name.to_string());
writeln!(output, "class {}:", name).unwrap();
writeln!(output, " \"\"\"Catalog tree node.\"\"\"").unwrap();
writeln!(output, " ").unwrap();
writeln!(output, " def __init__(self, client: BrkClientBase, base_path: str = ''):").unwrap();
for field in &fields {
let py_type = field_to_python_type(field, metadata);
if metadata.is_pattern_type(&field.rust_type) {
writeln!(
output,
" self.{}: {} = {}(client, f'{{base_path}}/{}')",
to_snake_case(&field.name), py_type, field.rust_type, field.name
).unwrap();
} else if field_uses_accessor(field, metadata) {
let accessor = metadata.find_index_set_pattern(&field.indexes).unwrap();
writeln!(
output,
" self.{}: {} = {}(client, f'{{base_path}}/{}')",
to_snake_case(&field.name), py_type, accessor.name, field.name
).unwrap();
} else {
writeln!(
output,
" self.{}: {} = MetricNode(client, f'{{base_path}}/{}')",
to_snake_case(&field.name), py_type, field.name
).unwrap();
}
}
writeln!(output).unwrap();
// Generate child classes
for (child_name, child_node) in children {
if let TreeNode::Branch(grandchildren) = child_node {
let child_fields = get_node_fields(grandchildren, pattern_lookup);
if !pattern_lookup.contains_key(&child_fields) {
let child_class_name = format!("{}_{}", name, to_pascal_case(child_name));
generate_tree_class(output, &child_class_name, child_node, pattern_lookup, metadata, generated);
}
}
}
}
}
/// Generate the main client class
fn generate_main_client(output: &mut String, endpoints: &[Endpoint]) {
writeln!(output, "class BrkClient(BrkClientBase):").unwrap();
writeln!(output, " \"\"\"Main BRK client with catalog tree and API methods.\"\"\"").unwrap();
writeln!(output, " ").unwrap();
writeln!(output, " def __init__(self, base_url: str = 'http://localhost:3000', timeout: float = 30.0):").unwrap();
writeln!(output, " super().__init__(base_url, timeout)").unwrap();
writeln!(output, " self.tree = CatalogTree(self)").unwrap();
writeln!(output).unwrap();
// Generate API methods
generate_api_methods(output, endpoints);
}
/// Generate API methods from OpenAPI endpoints
fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) {
for endpoint in endpoints {
if endpoint.method != "GET" {
continue;
}
let method_name = endpoint_to_method_name(endpoint);
let return_type = endpoint.response_type.as_deref().unwrap_or("Any");
// Build method signature
let params = build_method_params(endpoint);
writeln!(output, " def {}(self{}) -> {}:", method_name, params, return_type).unwrap();
// Docstring
if let Some(summary) = &endpoint.summary {
writeln!(output, " \"\"\"{}\"\"\"", summary).unwrap();
}
// Build path
let path = build_path_template(&endpoint.path, &endpoint.path_params);
if endpoint.query_params.is_empty() {
writeln!(output, " return self.get(f'{}')", path).unwrap();
} else {
writeln!(output, " params = []").unwrap();
for param in &endpoint.query_params {
if param.required {
writeln!(output, " params.append(f'{}={{{}}}')", param.name, param.name).unwrap();
} else {
writeln!(output, " if {} is not None: params.append(f'{}={{{}}}')", param.name, param.name, param.name).unwrap();
}
}
writeln!(output, " query = '&'.join(params)").unwrap();
writeln!(output, " return self.get(f'{}{{\"?\" + query if query else \"\"}}')", path).unwrap();
}
writeln!(output).unwrap();
}
}
fn endpoint_to_method_name(endpoint: &Endpoint) -> String {
if let Some(op_id) = &endpoint.operation_id {
return to_snake_case(op_id);
}
let parts: Vec<&str> = endpoint.path.split('/').filter(|s| !s.is_empty() && !s.starts_with('{')).collect();
format!("get_{}", parts.join("_"))
}
fn build_method_params(endpoint: &Endpoint) -> String {
let mut params = Vec::new();
for param in &endpoint.path_params {
params.push(format!(", {}: str", param.name));
}
for param in &endpoint.query_params {
if param.required {
params.push(format!(", {}: str", param.name));
} else {
params.push(format!(", {}: Optional[str] = None", param.name));
}
}
params.join("")
}
fn build_path_template(path: &str, path_params: &[super::Parameter]) -> String {
let mut result = path.to_string();
for param in path_params {
let placeholder = format!("{{{}}}", param.name);
let interpolation = format!("{{{{{}}}}}", param.name);
result = result.replace(&placeholder, &interpolation);
}
result
}
/// Convert JSON Schema to Python type hint
pub fn schema_to_python_type(schema: &serde_json::Value) -> String {
if let Some(ty) = schema.get("type").and_then(|v| v.as_str()) {
match ty {
"null" => "None".to_string(),
"boolean" => "bool".to_string(),
"integer" => "int".to_string(),
"number" => "float".to_string(),
"string" => "str".to_string(),
"array" => {
if let Some(items) = schema.get("items") {
format!("List[{}]", schema_to_python_type(items))
} else {
"List[Any]".to_string()
}
}
"object" => "dict[str, Any]".to_string(),
_ => "Any".to_string(),
}
} else if schema.get("anyOf").is_some() || schema.get("oneOf").is_some() {
"Any".to_string()
} else if let Some(reference) = schema.get("$ref").and_then(|v| v.as_str()) {
reference.rsplit('/').next().unwrap_or("Any").to_string()
} else {
"Any".to_string()
}
}

View File

@@ -1,532 +0,0 @@
use std::collections::HashSet;
use std::fmt::Write as FmtWrite;
use std::fs;
use std::io;
use std::path::Path;
use brk_types::{Index, TreeNode};
use super::{ClientMetadata, Endpoint, IndexSetPattern, PatternField, StructuralPattern, get_node_fields, to_pascal_case, to_snake_case};
/// Generate Rust client from metadata and OpenAPI endpoints
pub fn generate_rust_client(
metadata: &ClientMetadata,
endpoints: &[Endpoint],
output_dir: &Path,
) -> io::Result<()> {
let mut output = String::new();
// Header
writeln!(output, "// Auto-generated BRK Rust client").unwrap();
writeln!(output, "// Do not edit manually\n").unwrap();
writeln!(output, "#![allow(non_camel_case_types)]").unwrap();
writeln!(output, "#![allow(dead_code)]\n").unwrap();
// Imports
generate_imports(&mut output);
// Generate base client
generate_base_client(&mut output);
// Generate MetricNode
generate_metric_node(&mut output);
// Generate index accessor structs (for each unique set of indexes)
generate_index_accessors(&mut output, &metadata.index_set_patterns);
// Generate pattern structs (reusable, appearing 2+ times)
generate_pattern_structs(&mut output, &metadata.structural_patterns, metadata);
// Generate tree - each node uses its pattern or is generated inline
generate_tree(&mut output, &metadata.catalog, metadata);
// Generate main client with API methods
generate_main_client(&mut output, endpoints);
fs::write(output_dir.join("client.rs"), output)?;
Ok(())
}
fn generate_imports(output: &mut String) {
writeln!(
output,
r#"use std::marker::PhantomData;
use serde::de::DeserializeOwned;
use brk_types::*;
"#
)
.unwrap();
}
fn generate_base_client(output: &mut String) {
writeln!(
output,
r#"/// Error type for BRK client operations.
#[derive(Debug)]
pub struct BrkError {{
pub message: String,
}}
impl std::fmt::Display for BrkError {{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {{
write!(f, "{{}}", self.message)
}}
}}
impl std::error::Error for BrkError {{}}
/// Result type for BRK client operations.
pub type Result<T> = std::result::Result<T, BrkError>;
/// Options for configuring the BRK client.
#[derive(Debug, Clone)]
pub struct BrkClientOptions {{
pub base_url: String,
pub timeout_ms: u64,
}}
impl Default for BrkClientOptions {{
fn default() -> Self {{
Self {{
base_url: "http://localhost:3000".to_string(),
timeout_ms: 30000,
}}
}}
}}
/// Base HTTP client for making requests.
#[derive(Debug, Clone)]
pub struct BrkClientBase {{
base_url: String,
client: reqwest::blocking::Client,
}}
impl BrkClientBase {{
/// Create a new client with the given base URL.
pub fn new(base_url: impl Into<String>) -> Result<Self> {{
let base_url = base_url.into();
let client = reqwest::blocking::Client::new();
Ok(Self {{ base_url, client }})
}}
/// Create a new client with options.
pub fn with_options(options: BrkClientOptions) -> Result<Self> {{
let client = reqwest::blocking::Client::builder()
.timeout(std::time::Duration::from_millis(options.timeout_ms))
.build()
.map_err(|e| BrkError {{ message: e.to_string() }})?;
Ok(Self {{
base_url: options.base_url,
client,
}})
}}
/// Make a GET request.
pub fn get<T: DeserializeOwned>(&self, path: &str) -> Result<T> {{
let url = format!("{{}}{{}}", self.base_url, path);
self.client
.get(&url)
.send()
.map_err(|e| BrkError {{ message: e.to_string() }})?
.json()
.map_err(|e| BrkError {{ message: e.to_string() }})
}}
}}
"#
)
.unwrap();
}
fn generate_metric_node(output: &mut String) {
writeln!(
output,
r#"/// A metric node that can fetch data for different indexes.
pub struct MetricNode<'a, T> {{
client: &'a BrkClientBase,
path: String,
_marker: PhantomData<T>,
}}
impl<'a, T: DeserializeOwned> MetricNode<'a, T> {{
pub fn new(client: &'a BrkClientBase, path: String) -> Self {{
Self {{
client,
path,
_marker: PhantomData,
}}
}}
/// Fetch all data points for this metric.
pub fn get(&self) -> Result<Vec<T>> {{
self.client.get(&self.path)
}}
/// Fetch data points within a date range.
pub fn get_range(&self, from: &str, to: &str) -> Result<Vec<T>> {{
let path = format!("{{}}?from={{}}&to={{}}", self.path, from, to);
self.client.get(&path)
}}
}}
"#
)
.unwrap();
}
/// Generate index accessor structs for each unique set of indexes
fn generate_index_accessors(output: &mut String, patterns: &[IndexSetPattern]) {
if patterns.is_empty() {
return;
}
writeln!(output, "// Index accessor structs\n").unwrap();
for pattern in patterns {
writeln!(output, "/// Index accessor for metrics with {} indexes.", pattern.indexes.len()).unwrap();
writeln!(output, "pub struct {}<'a, T> {{", pattern.name).unwrap();
for index in &pattern.indexes {
let field_name = index_to_field_name(index);
writeln!(output, " pub {}: MetricNode<'a, T>,", field_name).unwrap();
}
writeln!(output, " _marker: PhantomData<T>,").unwrap();
writeln!(output, "}}\n").unwrap();
// Generate impl block with constructor
writeln!(output, "impl<'a, T: DeserializeOwned> {}<'a, T> {{", pattern.name).unwrap();
writeln!(output, " pub fn new(client: &'a BrkClientBase, base_path: &str) -> Self {{").unwrap();
writeln!(output, " Self {{").unwrap();
for index in &pattern.indexes {
let field_name = index_to_field_name(index);
let path_segment = index.serialize_long();
writeln!(
output,
" {}: MetricNode::new(client, format!(\"{{base_path}}/{}\")),",
field_name, path_segment
).unwrap();
}
writeln!(output, " _marker: PhantomData,").unwrap();
writeln!(output, " }}").unwrap();
writeln!(output, " }}").unwrap();
writeln!(output, "}}\n").unwrap();
}
}
/// Convert an Index to a snake_case field name (e.g., DateIndex -> by_date_index)
fn index_to_field_name(index: &Index) -> String {
format!("by_{}", to_snake_case(index.serialize_long()))
}
/// Generate pattern structs (those appearing 2+ times)
fn generate_pattern_structs(output: &mut String, patterns: &[StructuralPattern], metadata: &ClientMetadata) {
if patterns.is_empty() {
return;
}
writeln!(output, "// Reusable pattern structs\n").unwrap();
for pattern in patterns {
writeln!(output, "/// Pattern struct for repeated tree structure.").unwrap();
writeln!(output, "pub struct {}<'a> {{", pattern.name).unwrap();
for field in &pattern.fields {
let field_name = to_snake_case(&field.name);
let type_annotation = field_to_type_annotation(field, metadata);
writeln!(output, " pub {}: {},", field_name, type_annotation).unwrap();
}
writeln!(output, "}}\n").unwrap();
// Generate impl block with constructor
writeln!(output, "impl<'a> {}<'a> {{", pattern.name).unwrap();
writeln!(output, " pub fn new(client: &'a BrkClientBase, base_path: &str) -> Self {{").unwrap();
writeln!(output, " Self {{").unwrap();
for field in &pattern.fields {
let field_name = to_snake_case(&field.name);
if metadata.is_pattern_type(&field.rust_type) {
writeln!(
output,
" {}: {}::new(client, &format!(\"{{base_path}}/{}\"))," ,
field_name, field.rust_type, field.name
).unwrap();
} else if field_uses_accessor(field, metadata) {
let accessor = metadata.find_index_set_pattern(&field.indexes).unwrap();
writeln!(
output,
" {}: {}::new(client, &format!(\"{{base_path}}/{}\"))," ,
field_name, accessor.name, field.name
).unwrap();
} else {
writeln!(
output,
" {}: MetricNode::new(client, format!(\"{{base_path}}/{}\"))," ,
field_name, field.name
).unwrap();
}
}
writeln!(output, " }}").unwrap();
writeln!(output, " }}").unwrap();
writeln!(output, "}}\n").unwrap();
}
}
/// Convert a PatternField to the full type annotation
fn field_to_type_annotation(field: &PatternField, metadata: &ClientMetadata) -> String {
if metadata.is_pattern_type(&field.rust_type) {
format!("{}<'a>", field.rust_type)
} else if let Some(accessor) = metadata.find_index_set_pattern(&field.indexes) {
// Leaf with a reusable accessor pattern
format!("{}<'a, {}>", accessor.name, field.rust_type)
} else {
// Leaf with unique index set - use MetricNode directly
format!("MetricNode<'a, {}>", field.rust_type)
}
}
/// Check if a field should use an index accessor
fn field_uses_accessor(field: &PatternField, metadata: &ClientMetadata) -> bool {
metadata.find_index_set_pattern(&field.indexes).is_some()
}
/// Generate the catalog tree structure
fn generate_tree(
output: &mut String,
catalog: &TreeNode,
metadata: &ClientMetadata,
) {
writeln!(output, "// Catalog tree\n").unwrap();
let pattern_lookup = metadata.pattern_lookup();
let mut generated = HashSet::new();
generate_tree_node(output, "CatalogTree", catalog, &pattern_lookup, metadata, &mut generated);
}
/// Recursively generate tree nodes
fn generate_tree_node(
output: &mut String,
name: &str,
node: &TreeNode,
pattern_lookup: &std::collections::HashMap<Vec<PatternField>, String>,
metadata: &ClientMetadata,
generated: &mut HashSet<String>,
) {
if let TreeNode::Branch(children) = node {
// Build the signature for this node
let mut fields: Vec<PatternField> = children
.iter()
.map(|(child_name, child_node)| {
let (rust_type, json_type, indexes) = match child_node {
TreeNode::Leaf(leaf) => (
leaf.value_type().to_string(),
leaf.schema.get("type").and_then(|v| v.as_str()).unwrap_or("object").to_string(),
leaf.indexes().clone(),
),
TreeNode::Branch(grandchildren) => {
// Get pattern name for this child
let child_fields = get_node_fields(grandchildren, pattern_lookup);
let pattern_name = pattern_lookup
.get(&child_fields)
.cloned()
.unwrap_or_else(|| format!("{}_{}", name, to_pascal_case(child_name)));
(pattern_name.clone(), pattern_name, std::collections::BTreeSet::new())
}
};
PatternField {
name: child_name.clone(),
rust_type,
json_type,
indexes,
}
})
.collect();
fields.sort_by(|a, b| a.name.cmp(&b.name));
// Check if this matches a reusable pattern
if let Some(pattern_name) = pattern_lookup.get(&fields) {
// This node matches a pattern that will be generated separately
// Don't generate it here, it's already in pattern_structs
if pattern_name != name {
return;
}
}
// Generate this struct if not already generated
if generated.contains(name) {
return;
}
generated.insert(name.to_string());
writeln!(output, "/// Catalog tree node.").unwrap();
writeln!(output, "pub struct {}<'a> {{", name).unwrap();
for field in &fields {
let field_name = to_snake_case(&field.name);
let type_annotation = field_to_type_annotation(field, metadata);
writeln!(output, " pub {}: {},", field_name, type_annotation).unwrap();
}
writeln!(output, "}}\n").unwrap();
// Generate impl block
writeln!(output, "impl<'a> {}<'a> {{", name).unwrap();
writeln!(output, " pub fn new(client: &'a BrkClientBase, base_path: &str) -> Self {{").unwrap();
writeln!(output, " Self {{").unwrap();
for field in &fields {
let field_name = to_snake_case(&field.name);
if metadata.is_pattern_type(&field.rust_type) {
writeln!(
output,
" {}: {}::new(client, &format!(\"{{base_path}}/{}\"))," ,
field_name, field.rust_type, field.name
).unwrap();
} else if field_uses_accessor(field, metadata) {
let accessor = metadata.find_index_set_pattern(&field.indexes).unwrap();
writeln!(
output,
" {}: {}::new(client, &format!(\"{{base_path}}/{}\"))," ,
field_name, accessor.name, field.name
).unwrap();
} else {
writeln!(
output,
" {}: MetricNode::new(client, format!(\"{{base_path}}/{}\"))," ,
field_name, field.name
).unwrap();
}
}
writeln!(output, " }}").unwrap();
writeln!(output, " }}").unwrap();
writeln!(output, "}}\n").unwrap();
// Recursively generate child nodes that aren't patterns
for (child_name, child_node) in children {
if let TreeNode::Branch(grandchildren) = child_node {
let child_fields = get_node_fields(grandchildren, pattern_lookup);
if !pattern_lookup.contains_key(&child_fields) {
let child_struct_name = format!("{}_{}", name, to_pascal_case(child_name));
generate_tree_node(output, &child_struct_name, child_node, pattern_lookup, metadata, generated);
}
}
}
}
}
/// Generate the main client struct
fn generate_main_client(output: &mut String, endpoints: &[Endpoint]) {
writeln!(
output,
r#"/// Main BRK client with catalog tree and API methods.
pub struct BrkClient {{
base: BrkClientBase,
}}
impl BrkClient {{
/// Create a new client with the given base URL.
pub fn new(base_url: impl Into<String>) -> Result<Self> {{
Ok(Self {{
base: BrkClientBase::new(base_url)?,
}})
}}
/// Create a new client with options.
pub fn with_options(options: BrkClientOptions) -> Result<Self> {{
Ok(Self {{
base: BrkClientBase::with_options(options)?,
}})
}}
/// Get the catalog tree for navigating metrics.
pub fn tree(&self) -> CatalogTree<'_> {{
CatalogTree::new(&self.base, "")
}}
"#
)
.unwrap();
// Generate API methods
generate_api_methods(output, endpoints);
writeln!(output, "}}").unwrap();
}
/// Generate API methods from OpenAPI endpoints
fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) {
for endpoint in endpoints {
if endpoint.method != "GET" {
continue;
}
let method_name = endpoint_to_method_name(endpoint);
let return_type = endpoint.response_type.as_deref().unwrap_or("serde_json::Value");
// Build doc comment
writeln!(output, " /// {}", endpoint.summary.as_deref().unwrap_or(&method_name)).unwrap();
// Build method signature
let params = build_method_params(endpoint);
writeln!(output, " pub fn {}(&self{}) -> Result<{}> {{", method_name, params, return_type).unwrap();
// Build path
let path = build_path_template(&endpoint.path, &endpoint.path_params);
if endpoint.query_params.is_empty() {
writeln!(output, " self.base.get(&format!(\"{}\"))", path).unwrap();
} else {
writeln!(output, " let mut query = Vec::new();").unwrap();
for param in &endpoint.query_params {
if param.required {
writeln!(output, " query.push(format!(\"{}={{}}\", {}));", param.name, param.name).unwrap();
} else {
writeln!(output, " if let Some(v) = {} {{ query.push(format!(\"{}={{}}\", v)); }}", param.name, param.name).unwrap();
}
}
writeln!(output, " let query_str = if query.is_empty() {{ String::new() }} else {{ format!(\"?{{}}\", query.join(\"&\")) }};").unwrap();
writeln!(output, " self.base.get(&format!(\"{}{{}}\", query_str))", path).unwrap();
}
writeln!(output, " }}\n").unwrap();
}
}
fn endpoint_to_method_name(endpoint: &Endpoint) -> String {
if let Some(op_id) = &endpoint.operation_id {
return to_snake_case(op_id);
}
let parts: Vec<&str> = endpoint.path.split('/').filter(|s| !s.is_empty() && !s.starts_with('{')).collect();
format!("get_{}", parts.join("_"))
}
fn build_method_params(endpoint: &Endpoint) -> String {
let mut params = Vec::new();
for param in &endpoint.path_params {
params.push(format!(", {}: &str", param.name));
}
for param in &endpoint.query_params {
if param.required {
params.push(format!(", {}: &str", param.name));
} else {
params.push(format!(", {}: Option<&str>", param.name));
}
}
params.join("")
}
fn build_path_template(path: &str, path_params: &[super::Parameter]) -> String {
let mut result = path.to_string();
for param in path_params {
let placeholder = format!("{{{}}}", param.name);
let interpolation = format!("{{{}}}", param.name);
result = result.replace(&placeholder, &interpolation);
}
result
}

View File

@@ -5,13 +5,18 @@ use std::io;
use std::path::Path;
use brk_types::{Index, TreeNode};
use serde_json::Value;
use super::{ClientMetadata, Endpoint, IndexSetPattern, PatternField, StructuralPattern, get_node_fields, to_camel_case, to_pascal_case};
use crate::{
ClientMetadata, Endpoint, IndexSetPattern, PatternField, StructuralPattern, TypeSchemas,
get_node_fields, to_camel_case, to_pascal_case,
};
/// Generate JavaScript + JSDoc client from metadata and OpenAPI endpoints
pub fn generate_javascript_client(
metadata: &ClientMetadata,
endpoints: &[Endpoint],
schemas: &TypeSchemas,
output_dir: &Path,
) -> io::Result<()> {
let mut output = String::new();
@@ -20,6 +25,9 @@ pub fn generate_javascript_client(
writeln!(output, "// Auto-generated BRK JavaScript client").unwrap();
writeln!(output, "// Do not edit manually\n").unwrap();
// Generate type definitions from OpenAPI schemas
generate_type_definitions(&mut output, schemas);
// Generate the base client class
generate_base_client(&mut output);
@@ -40,6 +48,95 @@ pub fn generate_javascript_client(
Ok(())
}
/// Generate JSDoc type definitions from OpenAPI schemas
fn generate_type_definitions(output: &mut String, schemas: &TypeSchemas) {
if schemas.is_empty() {
return;
}
writeln!(output, "// Type definitions\n").unwrap();
for (name, schema) in schemas {
let js_type = schema_to_js_type(schema);
if is_primitive_alias(schema) {
// Simple type alias: @typedef {number} Height
writeln!(output, "/** @typedef {{{}}} {} */", js_type, name).unwrap();
} else if let Some(props) = schema.get("properties").and_then(|p| p.as_object()) {
// Object type with properties
writeln!(output, "/**").unwrap();
writeln!(output, " * @typedef {{Object}} {}", name).unwrap();
for (prop_name, prop_schema) in props {
let prop_type = schema_to_js_type(prop_schema);
let required = schema
.get("required")
.and_then(|r| r.as_array())
.map(|arr| arr.iter().any(|v| v.as_str() == Some(prop_name)))
.unwrap_or(false);
let optional = if required { "" } else { "=" };
writeln!(
output,
" * @property {{{}{}}} {}",
prop_type, optional, prop_name
)
.unwrap();
}
writeln!(output, " */").unwrap();
} else {
// Other schemas - just typedef
writeln!(output, "/** @typedef {{{}}} {} */", js_type, name).unwrap();
}
}
writeln!(output).unwrap();
}
/// Check if schema represents a primitive type alias (like Height = number)
fn is_primitive_alias(schema: &Value) -> bool {
schema.get("properties").is_none()
&& schema.get("items").is_none()
&& schema.get("anyOf").is_none()
&& schema.get("oneOf").is_none()
}
/// Convert JSON Schema to JavaScript/JSDoc type
fn schema_to_js_type(schema: &Value) -> String {
// Handle $ref
if let Some(ref_path) = schema.get("$ref").and_then(|r| r.as_str()) {
return ref_path.rsplit('/').next().unwrap_or("*").to_string();
}
// Handle type field
if let Some(ty) = schema.get("type").and_then(|t| t.as_str()) {
return match ty {
"integer" | "number" => "number".to_string(),
"boolean" => "boolean".to_string(),
"string" => "string".to_string(),
"null" => "null".to_string(),
"array" => {
let item_type = schema
.get("items")
.map(schema_to_js_type)
.unwrap_or_else(|| "*".to_string());
format!("{}[]", item_type)
}
"object" => "Object".to_string(),
_ => "*".to_string(),
};
}
// Handle anyOf/oneOf
if let Some(variants) = schema
.get("anyOf")
.or_else(|| schema.get("oneOf"))
.and_then(|v| v.as_array())
{
let types: Vec<String> = variants.iter().map(schema_to_js_type).collect();
return format!("({})", types.join("|"));
}
"*".to_string()
}
/// Generate the base BrkClient class with HTTP functionality
fn generate_base_client(output: &mut String) {
writeln!(
@@ -186,18 +283,28 @@ fn generate_index_accessors(output: &mut String, patterns: &[IndexSetPattern]) {
writeln!(output, " * @param {{string}} basePath").unwrap();
writeln!(output, " * @returns {{{}<T>}}", pattern.name).unwrap();
writeln!(output, " */").unwrap();
writeln!(output, "function create{}(client, basePath) {{", pattern.name).unwrap();
writeln!(
output,
"function create{}(client, basePath) {{",
pattern.name
)
.unwrap();
writeln!(output, " return {{").unwrap();
for (i, index) in pattern.indexes.iter().enumerate() {
let field_name = index_to_camel_case(index);
let path_segment = index.serialize_long();
let comma = if i < pattern.indexes.len() - 1 { "," } else { "" };
let comma = if i < pattern.indexes.len() - 1 {
","
} else {
""
};
writeln!(
output,
" {}: new MetricNode(client, `${{basePath}}/{}`){}",
field_name, path_segment, comma
).unwrap();
)
.unwrap();
}
writeln!(output, " }};").unwrap();
@@ -211,7 +318,11 @@ fn index_to_camel_case(index: &Index) -> String {
}
/// Generate structural pattern factory functions
fn generate_structural_patterns(output: &mut String, patterns: &[StructuralPattern], metadata: &ClientMetadata) {
fn generate_structural_patterns(
output: &mut String,
patterns: &[StructuralPattern],
metadata: &ClientMetadata,
) {
if patterns.is_empty() {
return;
}
@@ -224,7 +335,13 @@ fn generate_structural_patterns(output: &mut String, patterns: &[StructuralPatte
writeln!(output, " * @typedef {{Object}} {}", pattern.name).unwrap();
for field in &pattern.fields {
let js_type = field_to_js_type(field, metadata);
writeln!(output, " * @property {{{}}} {}", js_type, to_camel_case(&field.name)).unwrap();
writeln!(
output,
" * @property {{{}}} {}",
js_type,
to_camel_case(&field.name)
)
.unwrap();
}
writeln!(output, " */\n").unwrap();
@@ -235,30 +352,50 @@ fn generate_structural_patterns(output: &mut String, patterns: &[StructuralPatte
writeln!(output, " * @param {{string}} basePath").unwrap();
writeln!(output, " * @returns {{{}}}", pattern.name).unwrap();
writeln!(output, " */").unwrap();
writeln!(output, "function create{}(client, basePath) {{", pattern.name).unwrap();
writeln!(
output,
"function create{}(client, basePath) {{",
pattern.name
)
.unwrap();
writeln!(output, " return {{").unwrap();
for (i, field) in pattern.fields.iter().enumerate() {
let comma = if i < pattern.fields.len() - 1 { "," } else { "" };
let comma = if i < pattern.fields.len() - 1 {
","
} else {
""
};
if metadata.is_pattern_type(&field.rust_type) {
writeln!(
output,
" {}: create{}(client, `${{basePath}}/{}`){}",
to_camel_case(&field.name), field.rust_type, field.name, comma
).unwrap();
to_camel_case(&field.name),
field.rust_type,
field.name,
comma
)
.unwrap();
} else if field_uses_accessor(field, metadata) {
let accessor = metadata.find_index_set_pattern(&field.indexes).unwrap();
writeln!(
output,
" {}: create{}(client, `${{basePath}}/{}`){}",
to_camel_case(&field.name), accessor.name, field.name, comma
).unwrap();
to_camel_case(&field.name),
accessor.name,
field.name,
comma
)
.unwrap();
} else {
writeln!(
output,
" {}: new MetricNode(client, `${{basePath}}/{}`){}",
to_camel_case(&field.name), field.name, comma
).unwrap();
to_camel_case(&field.name),
field.name,
comma
)
.unwrap();
}
}
@@ -273,13 +410,11 @@ fn field_to_js_type(field: &PatternField, metadata: &ClientMetadata) -> String {
// Pattern type - use pattern name directly
field.rust_type.clone()
} else if let Some(accessor) = metadata.find_index_set_pattern(&field.indexes) {
// Leaf with a reusable accessor pattern
let js_type = json_type_to_js(&field.json_type);
format!("{}<{}>", accessor.name, js_type)
// Leaf with accessor - use rust_type as the generic (e.g., DateIndexAccessor<Height>)
format!("{}<{}>", accessor.name, field.rust_type)
} else {
// Leaf with unique index set - use MetricNode directly
let js_type = json_type_to_js(&field.json_type);
format!("MetricNode<{}>", js_type)
// Leaf - use rust_type as the generic (e.g., MetricNode<Height>)
format!("MetricNode<{}>", field.rust_type)
}
}
@@ -288,29 +423,20 @@ fn field_uses_accessor(field: &PatternField, metadata: &ClientMetadata) -> bool
metadata.find_index_set_pattern(&field.indexes).is_some()
}
/// Convert JSON Schema type to JSDoc type
fn json_type_to_js(json_type: &str) -> &str {
match json_type {
"integer" | "number" => "number",
"boolean" => "boolean",
"string" => "string",
"array" => "Array",
"object" => "Object",
_ => "*",
}
}
/// Generate tree typedefs
fn generate_tree_typedefs(
output: &mut String,
catalog: &TreeNode,
metadata: &ClientMetadata,
) {
fn generate_tree_typedefs(output: &mut String, catalog: &TreeNode, metadata: &ClientMetadata) {
writeln!(output, "// Catalog tree typedefs\n").unwrap();
let pattern_lookup = metadata.pattern_lookup();
let mut generated = HashSet::new();
generate_tree_typedef(output, "CatalogTree", catalog, &pattern_lookup, metadata, &mut generated);
generate_tree_typedef(
output,
"CatalogTree",
catalog,
&pattern_lookup,
metadata,
&mut generated,
);
}
/// Recursively generate tree typedefs
@@ -327,7 +453,9 @@ fn generate_tree_typedef(
let fields = get_node_fields(children, pattern_lookup);
// Skip if this matches a pattern (already generated)
if pattern_lookup.contains_key(&fields) && pattern_lookup.get(&fields) != Some(&name.to_string()) {
if pattern_lookup.contains_key(&fields)
&& pattern_lookup.get(&fields) != Some(&name.to_string())
{
return;
}
@@ -341,7 +469,13 @@ fn generate_tree_typedef(
for field in &fields {
let js_type = field_to_js_type(field, metadata);
writeln!(output, " * @property {{{}}} {}", js_type, to_camel_case(&field.name)).unwrap();
writeln!(
output,
" * @property {{{}}} {}",
js_type,
to_camel_case(&field.name)
)
.unwrap();
}
writeln!(output, " */\n").unwrap();
@@ -352,7 +486,14 @@ fn generate_tree_typedef(
let child_fields = get_node_fields(grandchildren, pattern_lookup);
if !pattern_lookup.contains_key(&child_fields) {
let child_type_name = format!("{}_{}", name, to_pascal_case(child_name));
generate_tree_typedef(output, &child_type_name, child_node, pattern_lookup, metadata, generated);
generate_tree_typedef(
output,
&child_type_name,
child_node,
pattern_lookup,
metadata,
generated,
);
}
}
}
@@ -369,7 +510,11 @@ fn generate_main_client(
let pattern_lookup = metadata.pattern_lookup();
writeln!(output, "/**").unwrap();
writeln!(output, " * Main BRK client with catalog tree and API methods").unwrap();
writeln!(
output,
" * Main BRK client with catalog tree and API methods"
)
.unwrap();
writeln!(output, " * @extends BrkClientBase").unwrap();
writeln!(output, " */").unwrap();
writeln!(output, "class BrkClient extends BrkClientBase {{").unwrap();
@@ -400,7 +545,11 @@ fn generate_main_client(
writeln!(output, "}}\n").unwrap();
// Export
writeln!(output, "export {{ BrkClient, BrkClientBase, BrkError, MetricNode }};").unwrap();
writeln!(
output,
"export {{ BrkClient, BrkClientBase, BrkError, MetricNode }};"
)
.unwrap();
}
/// Generate tree initializer
@@ -432,13 +581,15 @@ fn generate_tree_initializer(
output,
"{}{}: create{}(this, '{}'){}",
indent_str, field_name, accessor.name, child_path, comma
).unwrap();
)
.unwrap();
} else {
writeln!(
output,
"{}{}: new MetricNode(this, '{}'){}",
indent_str, field_name, child_path, comma
).unwrap();
)
.unwrap();
}
}
TreeNode::Branch(grandchildren) => {
@@ -448,10 +599,18 @@ fn generate_tree_initializer(
output,
"{}{}: create{}(this, '{}'){}",
indent_str, field_name, pattern_name, child_path, comma
).unwrap();
)
.unwrap();
} else {
writeln!(output, "{}{}: {{", indent_str, field_name).unwrap();
generate_tree_initializer(output, child_node, &child_path, indent + 1, pattern_lookup, metadata);
generate_tree_initializer(
output,
child_node,
&child_path,
indent + 1,
pattern_lookup,
metadata,
);
writeln!(output, "{}}}{}", indent_str, comma).unwrap();
}
}
@@ -477,12 +636,22 @@ fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) {
for param in &endpoint.path_params {
let desc = param.description.as_deref().unwrap_or("");
writeln!(output, " * @param {{{}}} {} {}", param.param_type, param.name, desc).unwrap();
writeln!(
output,
" * @param {{{}}} {} {}",
param.param_type, param.name, desc
)
.unwrap();
}
for param in &endpoint.query_params {
let optional = if param.required { "" } else { "=" };
let desc = param.description.as_deref().unwrap_or("");
writeln!(output, " * @param {{{}{}}} [{}] {}", param.param_type, optional, param.name, desc).unwrap();
writeln!(
output,
" * @param {{{}{}}} [{}] {}",
param.param_type, optional, param.name, desc
)
.unwrap();
}
writeln!(output, " * @returns {{Promise<{}>}}", return_type).unwrap();
@@ -499,13 +668,28 @@ fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) {
writeln!(output, " const params = new URLSearchParams();").unwrap();
for param in &endpoint.query_params {
if param.required {
writeln!(output, " params.set('{}', String({}));", param.name, param.name).unwrap();
writeln!(
output,
" params.set('{}', String({}));",
param.name, param.name
)
.unwrap();
} else {
writeln!(output, " if ({} !== undefined) params.set('{}', String({}));", param.name, param.name, param.name).unwrap();
writeln!(
output,
" if ({} !== undefined) params.set('{}', String({}));",
param.name, param.name, param.name
)
.unwrap();
}
}
writeln!(output, " const query = params.toString();").unwrap();
writeln!(output, " return this.get(`{}${{query ? '?' + query : ''}}`);", path).unwrap();
writeln!(
output,
" return this.get(`{}${{query ? '?' + query : ''}}`);",
path
)
.unwrap();
}
writeln!(output, " }}\n").unwrap();
@@ -516,7 +700,11 @@ fn endpoint_to_method_name(endpoint: &Endpoint) -> String {
if let Some(op_id) = &endpoint.operation_id {
return to_camel_case(op_id);
}
let parts: Vec<&str> = endpoint.path.split('/').filter(|s| !s.is_empty() && !s.starts_with('{')).collect();
let parts: Vec<&str> = endpoint
.path
.split('/')
.filter(|s| !s.is_empty() && !s.starts_with('{'))
.collect();
format!("get{}", to_pascal_case(&parts.join("_")))
}
@@ -540,4 +728,3 @@ fn build_path_template(path: &str, path_params: &[super::Parameter]) -> String {
}
result
}

View File

@@ -7,7 +7,7 @@ use std::{
use brk_query::Query;
use brk_types::{Index, pools};
use super::VERSION;
use crate::VERSION;
const AUTO_GENERATED_DISCLAIMER: &str = "//
// File auto-generated, any modifications will be overwritten

View File

@@ -1,10 +1,46 @@
mod js;
mod generator;
// tree.rs is kept for reference but not compiled
// mod tree;
mod javascript;
mod openapi;
mod python;
mod rust;
mod types;
pub use javascript::*;
pub use js::*;
pub use generator::*;
pub use openapi::*;
pub use python::*;
pub use rust::*;
pub use types::*;
use brk_query::Vecs;
use std::io;
use std::path::Path;
pub const VERSION: &str = env!("CARGO_PKG_VERSION");
/// Generate all client libraries from the query vecs and OpenAPI JSON
pub fn generate_clients(vecs: &Vecs, openapi_json: &str, output_dir: &Path) -> io::Result<()> {
let metadata = ClientMetadata::from_vecs(vecs);
// Parse OpenAPI spec
let spec = parse_openapi_json(openapi_json)?;
let endpoints = extract_endpoints(&spec);
let schemas = extract_schemas(openapi_json);
// Generate Rust client (uses real brk_types, no schema conversion needed)
let rust_path = output_dir.join("rust");
std::fs::create_dir_all(&rust_path)?;
generate_rust_client(&metadata, &endpoints, &rust_path)?;
// Generate JavaScript client (needs schemas for type definitions)
let js_path = output_dir.join("javascript");
std::fs::create_dir_all(&js_path)?;
generate_javascript_client(&metadata, &endpoints, &schemas, &js_path)?;
// Generate Python client (needs schemas for type definitions)
let python_path = output_dir.join("python");
std::fs::create_dir_all(&python_path)?;
generate_python_client(&metadata, &endpoints, &schemas, &python_path)?;
Ok(())
}

View File

@@ -1,9 +1,13 @@
use std::collections::BTreeMap;
use std::io;
use oas3::spec::{ObjectOrReference, Operation, ParameterIn, PathItem, Schema, SchemaTypeSet};
use oas3::Spec;
use oas3::spec::{ObjectOrReference, Operation, ParameterIn, PathItem, Schema, SchemaTypeSet};
use serde_json::Value;
/// Type schema extracted from OpenAPI components
pub type TypeSchemas = BTreeMap<String, Value>;
/// Endpoint information extracted from OpenAPI spec
#[derive(Debug, Clone)]
pub struct Endpoint {
@@ -45,12 +49,31 @@ pub fn parse_openapi_json(json: &str) -> io::Result<Spec> {
// Clean up for oas3 compatibility
clean_for_oas3(&mut value);
let cleaned_json = serde_json::to_string(&value)
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
let cleaned_json =
serde_json::to_string(&value).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
oas3::from_json(&cleaned_json).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
}
/// Extract type schemas from OpenAPI JSON
pub fn extract_schemas(json: &str) -> TypeSchemas {
let Ok(value) = serde_json::from_str::<Value>(json) else {
return BTreeMap::new();
};
value
.get("components")
.and_then(|c| c.get("schemas"))
.and_then(|s| s.as_object())
.map(|schemas| {
schemas
.iter()
.map(|(name, schema)| (name.clone(), schema.clone()))
.collect()
})
.unwrap_or_default()
}
/// Clean up OpenAPI spec for oas3 compatibility.
/// - Removes unsupported siblings from $ref objects (oas3 only supports summary and description)
/// - Converts boolean schemas to object schemas (oas3 doesn't handle `"schema": true`)
@@ -62,10 +85,10 @@ fn clean_for_oas3(value: &mut Value) {
map.retain(|k, _| k == "$ref" || k == "summary" || k == "description");
} else {
// Convert boolean schemas to empty object schemas
if let Some(schema) = map.get_mut("schema") {
if schema.is_boolean() {
*schema = Value::Object(serde_json::Map::new());
}
if let Some(schema) = map.get_mut("schema")
&& schema.is_boolean()
{
*schema = Value::Object(serde_json::Map::new());
}
for v in map.values_mut() {
clean_for_oas3(v);
@@ -130,7 +153,10 @@ fn extract_endpoint(path: &str, method: &str, operation: &Operation) -> Option<E
method: method.to_string(),
path: path.to_string(),
operation_id: operation.operation_id.clone(),
summary: operation.summary.clone().or_else(|| operation.description.clone()),
summary: operation
.summary
.clone()
.or_else(|| operation.description.clone()),
tags: operation.tags.clone(),
path_params,
query_params,

View File

@@ -1 +1,552 @@
// TODO ?
use std::collections::HashSet;
use std::fmt::Write as FmtWrite;
use std::fs;
use std::io;
use std::path::Path;
use brk_types::{Index, TreeNode};
use serde_json::Value;
use crate::{
ClientMetadata, Endpoint, IndexSetPattern, PatternField, StructuralPattern, TypeSchemas,
get_node_fields, to_pascal_case, to_snake_case,
};
/// Generate Python client from metadata and OpenAPI endpoints
pub fn generate_python_client(
metadata: &ClientMetadata,
endpoints: &[Endpoint],
schemas: &TypeSchemas,
output_dir: &Path,
) -> io::Result<()> {
let mut output = String::new();
// Header
writeln!(output, "# Auto-generated BRK Python client").unwrap();
writeln!(output, "# Do not edit manually\n").unwrap();
writeln!(output, "from __future__ import annotations").unwrap();
writeln!(
output,
"from typing import TypeVar, Generic, Any, Optional, List, TypedDict"
)
.unwrap();
writeln!(output, "import httpx\n").unwrap();
// Type variable for generic MetricNode
writeln!(output, "T = TypeVar('T')\n").unwrap();
// Generate type definitions from OpenAPI schemas
generate_type_definitions(&mut output, schemas);
// Generate base client class
generate_base_client(&mut output);
// Generate MetricNode class
generate_metric_node(&mut output);
// Generate index accessor classes
generate_index_accessors(&mut output, &metadata.index_set_patterns);
// Generate structural pattern classes
generate_structural_patterns(&mut output, &metadata.structural_patterns, metadata);
// Generate tree classes
generate_tree_classes(&mut output, &metadata.catalog, metadata);
// Generate main client with tree and API methods
generate_main_client(&mut output, endpoints);
fs::write(output_dir.join("client.py"), output)?;
Ok(())
}
/// Generate Python type definitions from OpenAPI schemas
fn generate_type_definitions(output: &mut String, schemas: &TypeSchemas) {
if schemas.is_empty() {
return;
}
writeln!(output, "# Type definitions\n").unwrap();
for (name, schema) in schemas {
if let Some(props) = schema.get("properties").and_then(|p| p.as_object()) {
// Object type -> TypedDict
writeln!(output, "class {}(TypedDict):", name).unwrap();
for (prop_name, prop_schema) in props {
let prop_type = schema_to_python_type(prop_schema);
writeln!(output, " {}: {}", prop_name, prop_type).unwrap();
}
writeln!(output).unwrap();
} else {
// Primitive type alias
let py_type = schema_to_python_type(schema);
writeln!(output, "{} = {}", name, py_type).unwrap();
}
}
writeln!(output).unwrap();
}
/// Convert JSON Schema to Python type
fn schema_to_python_type(schema: &Value) -> String {
// Handle $ref
if let Some(ref_path) = schema.get("$ref").and_then(|r| r.as_str()) {
return ref_path.rsplit('/').next().unwrap_or("Any").to_string();
}
// Handle type field
if let Some(ty) = schema.get("type").and_then(|t| t.as_str()) {
return match ty {
"integer" => "int".to_string(),
"number" => "float".to_string(),
"boolean" => "bool".to_string(),
"string" => "str".to_string(),
"null" => "None".to_string(),
"array" => {
let item_type = schema
.get("items")
.map(schema_to_python_type)
.unwrap_or_else(|| "Any".to_string());
format!("List[{}]", item_type)
}
"object" => "dict".to_string(),
_ => "Any".to_string(),
};
}
// Handle anyOf/oneOf
if let Some(variants) = schema
.get("anyOf")
.or_else(|| schema.get("oneOf"))
.and_then(|v| v.as_array())
{
let types: Vec<String> = variants.iter().map(schema_to_python_type).collect();
return types.join(" | ");
}
"Any".to_string()
}
/// Generate the base BrkClient class with HTTP functionality
fn generate_base_client(output: &mut String) {
writeln!(
output,
r#"class BrkError(Exception):
"""Custom error class for BRK client errors."""
def __init__(self, message: str, status: Optional[int] = None):
super().__init__(message)
self.status = status
class BrkClientBase:
"""Base HTTP client for making requests."""
def __init__(self, base_url: str, timeout: float = 30.0):
self.base_url = base_url
self.timeout = timeout
self._client = httpx.Client(timeout=timeout)
def get(self, path: str) -> Any:
"""Make a GET request."""
try:
response = self._client.get(f"{{self.base_url}}{{path}}")
response.raise_for_status()
return response.json()
except httpx.HTTPStatusError as e:
raise BrkError(f"HTTP error: {{e.response.status_code}}", e.response.status_code)
except httpx.RequestError as e:
raise BrkError(str(e))
def close(self):
"""Close the HTTP client."""
self._client.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
"#
)
.unwrap();
}
/// Generate the MetricNode class
fn generate_metric_node(output: &mut String) {
writeln!(
output,
r#"class MetricNode(Generic[T]):
"""A metric node that can fetch data for different indexes."""
def __init__(self, client: BrkClientBase, path: str):
self._client = client
self._path = path
def get(self) -> List[T]:
"""Fetch all data points for this metric."""
return self._client.get(self._path)
def get_range(self, from_date: str, to_date: str) -> List[T]:
"""Fetch data points within a date range."""
return self._client.get(f"{{self._path}}?from={{from_date}}&to={{to_date}}")
"#
)
.unwrap();
}
/// Generate index accessor classes
fn generate_index_accessors(output: &mut String, patterns: &[IndexSetPattern]) {
if patterns.is_empty() {
return;
}
writeln!(output, "# Index accessor classes\n").unwrap();
for pattern in patterns {
writeln!(output, "class {}(Generic[T]):", pattern.name).unwrap();
writeln!(
output,
" \"\"\"Index accessor for metrics with {} indexes.\"\"\"",
pattern.indexes.len()
)
.unwrap();
writeln!(output, " ").unwrap();
writeln!(
output,
" def __init__(self, client: BrkClientBase, base_path: str):"
)
.unwrap();
for index in &pattern.indexes {
let field_name = index_to_snake_case(index);
let path_segment = index.serialize_long();
writeln!(
output,
" self.{}: MetricNode[T] = MetricNode(client, f'{{base_path}}/{}')",
field_name, path_segment
)
.unwrap();
}
writeln!(output).unwrap();
}
}
/// Convert an Index to a snake_case field name (e.g., DateIndex -> by_date_index)
fn index_to_snake_case(index: &Index) -> String {
format!("by_{}", to_snake_case(index.serialize_long()))
}
/// Generate structural pattern classes
fn generate_structural_patterns(
output: &mut String,
patterns: &[StructuralPattern],
metadata: &ClientMetadata,
) {
if patterns.is_empty() {
return;
}
writeln!(output, "# Reusable structural pattern classes\n").unwrap();
for pattern in patterns {
writeln!(output, "class {}:", pattern.name).unwrap();
writeln!(
output,
" \"\"\"Pattern struct for repeated tree structure.\"\"\""
)
.unwrap();
writeln!(output, " ").unwrap();
writeln!(
output,
" def __init__(self, client: BrkClientBase, base_path: str):"
)
.unwrap();
for field in &pattern.fields {
let py_type = field_to_python_type(field, metadata);
if metadata.is_pattern_type(&field.rust_type) {
writeln!(
output,
" self.{}: {} = {}(client, f'{{base_path}}/{}')",
to_snake_case(&field.name),
py_type,
field.rust_type,
field.name
)
.unwrap();
} else if field_uses_accessor(field, metadata) {
let accessor = metadata.find_index_set_pattern(&field.indexes).unwrap();
writeln!(
output,
" self.{}: {} = {}(client, f'{{base_path}}/{}')",
to_snake_case(&field.name),
py_type,
accessor.name,
field.name
)
.unwrap();
} else {
writeln!(
output,
" self.{}: {} = MetricNode(client, f'{{base_path}}/{}')",
to_snake_case(&field.name),
py_type,
field.name
)
.unwrap();
}
}
writeln!(output).unwrap();
}
}
/// Convert pattern field to Python type annotation
fn field_to_python_type(field: &PatternField, metadata: &ClientMetadata) -> String {
if metadata.is_pattern_type(&field.rust_type) {
// Pattern type - use pattern name directly
field.rust_type.clone()
} else if let Some(accessor) = metadata.find_index_set_pattern(&field.indexes) {
// Leaf with accessor - use rust_type as the generic (e.g., DateIndexAccessor[Height])
format!("{}[{}]", accessor.name, field.rust_type)
} else {
// Leaf - use rust_type as the generic (e.g., MetricNode[Height])
format!("MetricNode[{}]", field.rust_type)
}
}
/// Check if a field should use an index accessor
fn field_uses_accessor(field: &PatternField, metadata: &ClientMetadata) -> bool {
metadata.find_index_set_pattern(&field.indexes).is_some()
}
/// Generate tree classes
fn generate_tree_classes(output: &mut String, catalog: &TreeNode, metadata: &ClientMetadata) {
writeln!(output, "# Catalog tree classes\n").unwrap();
let pattern_lookup = metadata.pattern_lookup();
let mut generated = HashSet::new();
generate_tree_class(
output,
"CatalogTree",
catalog,
&pattern_lookup,
metadata,
&mut generated,
);
}
/// Recursively generate tree classes
fn generate_tree_class(
output: &mut String,
name: &str,
node: &TreeNode,
pattern_lookup: &std::collections::HashMap<Vec<PatternField>, String>,
metadata: &ClientMetadata,
generated: &mut HashSet<String>,
) {
if let TreeNode::Branch(children) = node {
// Build signature
let fields = get_node_fields(children, pattern_lookup);
// Skip if this matches a pattern (already generated)
if pattern_lookup.contains_key(&fields)
&& pattern_lookup.get(&fields) != Some(&name.to_string())
{
return;
}
if generated.contains(name) {
return;
}
generated.insert(name.to_string());
writeln!(output, "class {}:", name).unwrap();
writeln!(output, " \"\"\"Catalog tree node.\"\"\"").unwrap();
writeln!(output, " ").unwrap();
writeln!(
output,
" def __init__(self, client: BrkClientBase, base_path: str = ''):"
)
.unwrap();
for field in &fields {
let py_type = field_to_python_type(field, metadata);
if metadata.is_pattern_type(&field.rust_type) {
writeln!(
output,
" self.{}: {} = {}(client, f'{{base_path}}/{}')",
to_snake_case(&field.name),
py_type,
field.rust_type,
field.name
)
.unwrap();
} else if field_uses_accessor(field, metadata) {
let accessor = metadata.find_index_set_pattern(&field.indexes).unwrap();
writeln!(
output,
" self.{}: {} = {}(client, f'{{base_path}}/{}')",
to_snake_case(&field.name),
py_type,
accessor.name,
field.name
)
.unwrap();
} else {
writeln!(
output,
" self.{}: {} = MetricNode(client, f'{{base_path}}/{}')",
to_snake_case(&field.name),
py_type,
field.name
)
.unwrap();
}
}
writeln!(output).unwrap();
// Generate child classes
for (child_name, child_node) in children {
if let TreeNode::Branch(grandchildren) = child_node {
let child_fields = get_node_fields(grandchildren, pattern_lookup);
if !pattern_lookup.contains_key(&child_fields) {
let child_class_name = format!("{}_{}", name, to_pascal_case(child_name));
generate_tree_class(
output,
&child_class_name,
child_node,
pattern_lookup,
metadata,
generated,
);
}
}
}
}
}
/// Generate the main client class
fn generate_main_client(output: &mut String, endpoints: &[Endpoint]) {
writeln!(output, "class BrkClient(BrkClientBase):").unwrap();
writeln!(
output,
" \"\"\"Main BRK client with catalog tree and API methods.\"\"\""
)
.unwrap();
writeln!(output, " ").unwrap();
writeln!(
output,
" def __init__(self, base_url: str = 'http://localhost:3000', timeout: float = 30.0):"
)
.unwrap();
writeln!(output, " super().__init__(base_url, timeout)").unwrap();
writeln!(output, " self.tree = CatalogTree(self)").unwrap();
writeln!(output).unwrap();
// Generate API methods
generate_api_methods(output, endpoints);
}
/// Generate API methods from OpenAPI endpoints
fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) {
for endpoint in endpoints {
if endpoint.method != "GET" {
continue;
}
let method_name = endpoint_to_method_name(endpoint);
let return_type = endpoint.response_type.as_deref().unwrap_or("Any");
// Build method signature
let params = build_method_params(endpoint);
writeln!(
output,
" def {}(self{}) -> {}:",
method_name, params, return_type
)
.unwrap();
// Docstring
if let Some(summary) = &endpoint.summary {
writeln!(output, " \"\"\"{}\"\"\"", summary).unwrap();
}
// Build path
let path = build_path_template(&endpoint.path, &endpoint.path_params);
if endpoint.query_params.is_empty() {
writeln!(output, " return self.get(f'{}')", path).unwrap();
} else {
writeln!(output, " params = []").unwrap();
for param in &endpoint.query_params {
if param.required {
writeln!(
output,
" params.append(f'{}={{{}}}')",
param.name, param.name
)
.unwrap();
} else {
writeln!(
output,
" if {} is not None: params.append(f'{}={{{}}}')",
param.name, param.name, param.name
)
.unwrap();
}
}
writeln!(output, " query = '&'.join(params)").unwrap();
writeln!(
output,
" return self.get(f'{}{{\"?\" + query if query else \"\"}}')",
path
)
.unwrap();
}
writeln!(output).unwrap();
}
}
fn endpoint_to_method_name(endpoint: &Endpoint) -> String {
if let Some(op_id) = &endpoint.operation_id {
return to_snake_case(op_id);
}
let parts: Vec<&str> = endpoint
.path
.split('/')
.filter(|s| !s.is_empty() && !s.starts_with('{'))
.collect();
format!("get_{}", parts.join("_"))
}
fn build_method_params(endpoint: &Endpoint) -> String {
let mut params = Vec::new();
for param in &endpoint.path_params {
params.push(format!(", {}: str", param.name));
}
for param in &endpoint.query_params {
if param.required {
params.push(format!(", {}: str", param.name));
} else {
params.push(format!(", {}: Optional[str] = None", param.name));
}
}
params.join("")
}
fn build_path_template(path: &str, path_params: &[super::Parameter]) -> String {
let mut result = path.to_string();
for param in path_params {
let placeholder = format!("{{{}}}", param.name);
let interpolation = format!("{{{{{}}}}}", param.name);
result = result.replace(&placeholder, &interpolation);
}
result
}

View File

@@ -0,0 +1,532 @@
use std::collections::HashSet;
use std::fmt::Write as FmtWrite;
use std::fs;
use std::io;
use std::path::Path;
use brk_types::{Index, TreeNode};
use crate::{ClientMetadata, Endpoint, IndexSetPattern, PatternField, StructuralPattern, get_node_fields, to_pascal_case, to_snake_case};
/// Generate Rust client from metadata and OpenAPI endpoints
pub fn generate_rust_client(
metadata: &ClientMetadata,
endpoints: &[Endpoint],
output_dir: &Path,
) -> io::Result<()> {
let mut output = String::new();
// Header
writeln!(output, "// Auto-generated BRK Rust client").unwrap();
writeln!(output, "// Do not edit manually\n").unwrap();
writeln!(output, "#![allow(non_camel_case_types)]").unwrap();
writeln!(output, "#![allow(dead_code)]\n").unwrap();
// Imports
generate_imports(&mut output);
// Generate base client
generate_base_client(&mut output);
// Generate MetricNode
generate_metric_node(&mut output);
// Generate index accessor structs (for each unique set of indexes)
generate_index_accessors(&mut output, &metadata.index_set_patterns);
// Generate pattern structs (reusable, appearing 2+ times)
generate_pattern_structs(&mut output, &metadata.structural_patterns, metadata);
// Generate tree - each node uses its pattern or is generated inline
generate_tree(&mut output, &metadata.catalog, metadata);
// Generate main client with API methods
generate_main_client(&mut output, endpoints);
fs::write(output_dir.join("client.rs"), output)?;
Ok(())
}
fn generate_imports(output: &mut String) {
writeln!(
output,
r#"use std::marker::PhantomData;
use serde::de::DeserializeOwned;
use brk_types::*;
"#
)
.unwrap();
}
fn generate_base_client(output: &mut String) {
writeln!(
output,
r#"/// Error type for BRK client operations.
#[derive(Debug)]
pub struct BrkError {{
pub message: String,
}}
impl std::fmt::Display for BrkError {{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {{
write!(f, "{{}}", self.message)
}}
}}
impl std::error::Error for BrkError {{}}
/// Result type for BRK client operations.
pub type Result<T> = std::result::Result<T, BrkError>;
/// Options for configuring the BRK client.
#[derive(Debug, Clone)]
pub struct BrkClientOptions {{
pub base_url: String,
pub timeout_ms: u64,
}}
impl Default for BrkClientOptions {{
fn default() -> Self {{
Self {{
base_url: "http://localhost:3000".to_string(),
timeout_ms: 30000,
}}
}}
}}
/// Base HTTP client for making requests.
#[derive(Debug, Clone)]
pub struct BrkClientBase {{
base_url: String,
client: reqwest::blocking::Client,
}}
impl BrkClientBase {{
/// Create a new client with the given base URL.
pub fn new(base_url: impl Into<String>) -> Result<Self> {{
let base_url = base_url.into();
let client = reqwest::blocking::Client::new();
Ok(Self {{ base_url, client }})
}}
/// Create a new client with options.
pub fn with_options(options: BrkClientOptions) -> Result<Self> {{
let client = reqwest::blocking::Client::builder()
.timeout(std::time::Duration::from_millis(options.timeout_ms))
.build()
.map_err(|e| BrkError {{ message: e.to_string() }})?;
Ok(Self {{
base_url: options.base_url,
client,
}})
}}
/// Make a GET request.
pub fn get<T: DeserializeOwned>(&self, path: &str) -> Result<T> {{
let url = format!("{{}}{{}}", self.base_url, path);
self.client
.get(&url)
.send()
.map_err(|e| BrkError {{ message: e.to_string() }})?
.json()
.map_err(|e| BrkError {{ message: e.to_string() }})
}}
}}
"#
)
.unwrap();
}
fn generate_metric_node(output: &mut String) {
writeln!(
output,
r#"/// A metric node that can fetch data for different indexes.
pub struct MetricNode<'a, T> {{
client: &'a BrkClientBase,
path: String,
_marker: PhantomData<T>,
}}
impl<'a, T: DeserializeOwned> MetricNode<'a, T> {{
pub fn new(client: &'a BrkClientBase, path: String) -> Self {{
Self {{
client,
path,
_marker: PhantomData,
}}
}}
/// Fetch all data points for this metric.
pub fn get(&self) -> Result<Vec<T>> {{
self.client.get(&self.path)
}}
/// Fetch data points within a date range.
pub fn get_range(&self, from: &str, to: &str) -> Result<Vec<T>> {{
let path = format!("{{}}?from={{}}&to={{}}", self.path, from, to);
self.client.get(&path)
}}
}}
"#
)
.unwrap();
}
/// Generate index accessor structs for each unique set of indexes
fn generate_index_accessors(output: &mut String, patterns: &[IndexSetPattern]) {
if patterns.is_empty() {
return;
}
writeln!(output, "// Index accessor structs\n").unwrap();
for pattern in patterns {
writeln!(output, "/// Index accessor for metrics with {} indexes.", pattern.indexes.len()).unwrap();
writeln!(output, "pub struct {}<'a, T> {{", pattern.name).unwrap();
for index in &pattern.indexes {
let field_name = index_to_field_name(index);
writeln!(output, " pub {}: MetricNode<'a, T>,", field_name).unwrap();
}
writeln!(output, " _marker: PhantomData<T>,").unwrap();
writeln!(output, "}}\n").unwrap();
// Generate impl block with constructor
writeln!(output, "impl<'a, T: DeserializeOwned> {}<'a, T> {{", pattern.name).unwrap();
writeln!(output, " pub fn new(client: &'a BrkClientBase, base_path: &str) -> Self {{").unwrap();
writeln!(output, " Self {{").unwrap();
for index in &pattern.indexes {
let field_name = index_to_field_name(index);
let path_segment = index.serialize_long();
writeln!(
output,
" {}: MetricNode::new(client, format!(\"{{base_path}}/{}\")),",
field_name, path_segment
).unwrap();
}
writeln!(output, " _marker: PhantomData,").unwrap();
writeln!(output, " }}").unwrap();
writeln!(output, " }}").unwrap();
writeln!(output, "}}\n").unwrap();
}
}
/// Convert an Index to a snake_case field name (e.g., DateIndex -> by_date_index)
fn index_to_field_name(index: &Index) -> String {
format!("by_{}", to_snake_case(index.serialize_long()))
}
/// Generate pattern structs (those appearing 2+ times)
fn generate_pattern_structs(output: &mut String, patterns: &[StructuralPattern], metadata: &ClientMetadata) {
if patterns.is_empty() {
return;
}
writeln!(output, "// Reusable pattern structs\n").unwrap();
for pattern in patterns {
writeln!(output, "/// Pattern struct for repeated tree structure.").unwrap();
writeln!(output, "pub struct {}<'a> {{", pattern.name).unwrap();
for field in &pattern.fields {
let field_name = to_snake_case(&field.name);
let type_annotation = field_to_type_annotation(field, metadata);
writeln!(output, " pub {}: {},", field_name, type_annotation).unwrap();
}
writeln!(output, "}}\n").unwrap();
// Generate impl block with constructor
writeln!(output, "impl<'a> {}<'a> {{", pattern.name).unwrap();
writeln!(output, " pub fn new(client: &'a BrkClientBase, base_path: &str) -> Self {{").unwrap();
writeln!(output, " Self {{").unwrap();
for field in &pattern.fields {
let field_name = to_snake_case(&field.name);
if metadata.is_pattern_type(&field.rust_type) {
writeln!(
output,
" {}: {}::new(client, &format!(\"{{base_path}}/{}\"))," ,
field_name, field.rust_type, field.name
).unwrap();
} else if field_uses_accessor(field, metadata) {
let accessor = metadata.find_index_set_pattern(&field.indexes).unwrap();
writeln!(
output,
" {}: {}::new(client, &format!(\"{{base_path}}/{}\"))," ,
field_name, accessor.name, field.name
).unwrap();
} else {
writeln!(
output,
" {}: MetricNode::new(client, format!(\"{{base_path}}/{}\"))," ,
field_name, field.name
).unwrap();
}
}
writeln!(output, " }}").unwrap();
writeln!(output, " }}").unwrap();
writeln!(output, "}}\n").unwrap();
}
}
/// Convert a PatternField to the full type annotation
fn field_to_type_annotation(field: &PatternField, metadata: &ClientMetadata) -> String {
if metadata.is_pattern_type(&field.rust_type) {
format!("{}<'a>", field.rust_type)
} else if let Some(accessor) = metadata.find_index_set_pattern(&field.indexes) {
// Leaf with a reusable accessor pattern
format!("{}<'a, {}>", accessor.name, field.rust_type)
} else {
// Leaf with unique index set - use MetricNode directly
format!("MetricNode<'a, {}>", field.rust_type)
}
}
/// Check if a field should use an index accessor
fn field_uses_accessor(field: &PatternField, metadata: &ClientMetadata) -> bool {
metadata.find_index_set_pattern(&field.indexes).is_some()
}
/// Generate the catalog tree structure
fn generate_tree(
output: &mut String,
catalog: &TreeNode,
metadata: &ClientMetadata,
) {
writeln!(output, "// Catalog tree\n").unwrap();
let pattern_lookup = metadata.pattern_lookup();
let mut generated = HashSet::new();
generate_tree_node(output, "CatalogTree", catalog, &pattern_lookup, metadata, &mut generated);
}
/// Recursively generate tree nodes
fn generate_tree_node(
output: &mut String,
name: &str,
node: &TreeNode,
pattern_lookup: &std::collections::HashMap<Vec<PatternField>, String>,
metadata: &ClientMetadata,
generated: &mut HashSet<String>,
) {
if let TreeNode::Branch(children) = node {
// Build the signature for this node
let mut fields: Vec<PatternField> = children
.iter()
.map(|(child_name, child_node)| {
let (rust_type, json_type, indexes) = match child_node {
TreeNode::Leaf(leaf) => (
leaf.value_type().to_string(),
leaf.schema.get("type").and_then(|v| v.as_str()).unwrap_or("object").to_string(),
leaf.indexes().clone(),
),
TreeNode::Branch(grandchildren) => {
// Get pattern name for this child
let child_fields = get_node_fields(grandchildren, pattern_lookup);
let pattern_name = pattern_lookup
.get(&child_fields)
.cloned()
.unwrap_or_else(|| format!("{}_{}", name, to_pascal_case(child_name)));
(pattern_name.clone(), pattern_name, std::collections::BTreeSet::new())
}
};
PatternField {
name: child_name.clone(),
rust_type,
json_type,
indexes,
}
})
.collect();
fields.sort_by(|a, b| a.name.cmp(&b.name));
// Check if this matches a reusable pattern
if let Some(pattern_name) = pattern_lookup.get(&fields) {
// This node matches a pattern that will be generated separately
// Don't generate it here, it's already in pattern_structs
if pattern_name != name {
return;
}
}
// Generate this struct if not already generated
if generated.contains(name) {
return;
}
generated.insert(name.to_string());
writeln!(output, "/// Catalog tree node.").unwrap();
writeln!(output, "pub struct {}<'a> {{", name).unwrap();
for field in &fields {
let field_name = to_snake_case(&field.name);
let type_annotation = field_to_type_annotation(field, metadata);
writeln!(output, " pub {}: {},", field_name, type_annotation).unwrap();
}
writeln!(output, "}}\n").unwrap();
// Generate impl block
writeln!(output, "impl<'a> {}<'a> {{", name).unwrap();
writeln!(output, " pub fn new(client: &'a BrkClientBase, base_path: &str) -> Self {{").unwrap();
writeln!(output, " Self {{").unwrap();
for field in &fields {
let field_name = to_snake_case(&field.name);
if metadata.is_pattern_type(&field.rust_type) {
writeln!(
output,
" {}: {}::new(client, &format!(\"{{base_path}}/{}\"))," ,
field_name, field.rust_type, field.name
).unwrap();
} else if field_uses_accessor(field, metadata) {
let accessor = metadata.find_index_set_pattern(&field.indexes).unwrap();
writeln!(
output,
" {}: {}::new(client, &format!(\"{{base_path}}/{}\"))," ,
field_name, accessor.name, field.name
).unwrap();
} else {
writeln!(
output,
" {}: MetricNode::new(client, format!(\"{{base_path}}/{}\"))," ,
field_name, field.name
).unwrap();
}
}
writeln!(output, " }}").unwrap();
writeln!(output, " }}").unwrap();
writeln!(output, "}}\n").unwrap();
// Recursively generate child nodes that aren't patterns
for (child_name, child_node) in children {
if let TreeNode::Branch(grandchildren) = child_node {
let child_fields = get_node_fields(grandchildren, pattern_lookup);
if !pattern_lookup.contains_key(&child_fields) {
let child_struct_name = format!("{}_{}", name, to_pascal_case(child_name));
generate_tree_node(output, &child_struct_name, child_node, pattern_lookup, metadata, generated);
}
}
}
}
}
/// Generate the main client struct
fn generate_main_client(output: &mut String, endpoints: &[Endpoint]) {
writeln!(
output,
r#"/// Main BRK client with catalog tree and API methods.
pub struct BrkClient {{
base: BrkClientBase,
}}
impl BrkClient {{
/// Create a new client with the given base URL.
pub fn new(base_url: impl Into<String>) -> Result<Self> {{
Ok(Self {{
base: BrkClientBase::new(base_url)?,
}})
}}
/// Create a new client with options.
pub fn with_options(options: BrkClientOptions) -> Result<Self> {{
Ok(Self {{
base: BrkClientBase::with_options(options)?,
}})
}}
/// Get the catalog tree for navigating metrics.
pub fn tree(&self) -> CatalogTree<'_> {{
CatalogTree::new(&self.base, "")
}}
"#
)
.unwrap();
// Generate API methods
generate_api_methods(output, endpoints);
writeln!(output, "}}").unwrap();
}
/// Generate API methods from OpenAPI endpoints
fn generate_api_methods(output: &mut String, endpoints: &[Endpoint]) {
for endpoint in endpoints {
if endpoint.method != "GET" {
continue;
}
let method_name = endpoint_to_method_name(endpoint);
let return_type = endpoint.response_type.as_deref().unwrap_or("serde_json::Value");
// Build doc comment
writeln!(output, " /// {}", endpoint.summary.as_deref().unwrap_or(&method_name)).unwrap();
// Build method signature
let params = build_method_params(endpoint);
writeln!(output, " pub fn {}(&self{}) -> Result<{}> {{", method_name, params, return_type).unwrap();
// Build path
let path = build_path_template(&endpoint.path, &endpoint.path_params);
if endpoint.query_params.is_empty() {
writeln!(output, " self.base.get(&format!(\"{}\"))", path).unwrap();
} else {
writeln!(output, " let mut query = Vec::new();").unwrap();
for param in &endpoint.query_params {
if param.required {
writeln!(output, " query.push(format!(\"{}={{}}\", {}));", param.name, param.name).unwrap();
} else {
writeln!(output, " if let Some(v) = {} {{ query.push(format!(\"{}={{}}\", v)); }}", param.name, param.name).unwrap();
}
}
writeln!(output, " let query_str = if query.is_empty() {{ String::new() }} else {{ format!(\"?{{}}\", query.join(\"&\")) }};").unwrap();
writeln!(output, " self.base.get(&format!(\"{}{{}}\", query_str))", path).unwrap();
}
writeln!(output, " }}\n").unwrap();
}
}
fn endpoint_to_method_name(endpoint: &Endpoint) -> String {
if let Some(op_id) = &endpoint.operation_id {
return to_snake_case(op_id);
}
let parts: Vec<&str> = endpoint.path.split('/').filter(|s| !s.is_empty() && !s.starts_with('{')).collect();
format!("get_{}", parts.join("_"))
}
fn build_method_params(endpoint: &Endpoint) -> String {
let mut params = Vec::new();
for param in &endpoint.path_params {
params.push(format!(", {}: &str", param.name));
}
for param in &endpoint.query_params {
if param.required {
params.push(format!(", {}: &str", param.name));
} else {
params.push(format!(", {}: Option<&str>", param.name));
}
}
params.join("")
}
fn build_path_template(path: &str, path_params: &[super::Parameter]) -> String {
let mut result = path.to_string();
for param in path_params {
let placeholder = format!("{{{}}}", param.name);
let interpolation = format!("{{{}}}", param.name);
result = result.replace(&placeholder, &interpolation);
}
result
}

View File

@@ -1,917 +0,0 @@
use serde_json::{Map, Value};
use std::collections::{HashMap, HashSet};
use std::fs;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
struct Pattern {
fields: Vec<String>,
field_count: usize,
}
fn sanitize_name(name: &str) -> String {
// Python identifiers can't start with numbers
if name.chars().next().unwrap().is_numeric() {
format!("_{}", name)
} else {
name.replace("-", "_")
}
}
fn extract_pattern(obj: &Map<String, Value>) -> Pattern {
let mut fields: Vec<String> = obj.keys().cloned().collect();
fields.sort();
Pattern {
field_count: fields.len(),
fields,
}
}
// Calculate similarity between two patterns (0.0 = different, 1.0 = identical)
fn pattern_similarity(p1: &Pattern, p2: &Pattern) -> f64 {
if p1.field_count == 0 || p2.field_count == 0 {
return 0.0;
}
let set1: HashSet<_> = p1.fields.iter().collect();
let set2: HashSet<_> = p2.fields.iter().collect();
let intersection = set1.intersection(&set2).count();
let union = set1.union(&set2).count();
intersection as f64 / union as f64
}
// Group similar patterns together
fn cluster_patterns(patterns: &HashMap<Pattern, Vec<String>>) -> Vec<Vec<(Pattern, Vec<String>)>> {
let mut clusters: Vec<Vec<(Pattern, Vec<String>)>> = Vec::new();
let similarity_threshold = 0.7; // 70% overlap
for (pattern, paths) in patterns {
let mut found_cluster = false;
for cluster in clusters.iter_mut() {
let representative = &cluster[0].0;
if pattern_similarity(pattern, representative) >= similarity_threshold {
cluster.push((pattern.clone(), paths.clone()));
found_cluster = true;
break;
}
}
if !found_cluster {
clusters.push(vec![(pattern.clone(), paths.clone())]);
}
}
clusters
}
// Merge similar patterns into a flexible pattern
fn merge_patterns_in_cluster(
cluster: &[(Pattern, Vec<String>)],
) -> (Pattern, HashMap<String, bool>) {
let mut all_fields: HashSet<String> = HashSet::new();
let mut field_counts: HashMap<String, usize> = HashMap::new();
let total_patterns = cluster.len();
// Collect all fields and count occurrences
for (pattern, _) in cluster {
for field in &pattern.fields {
all_fields.insert(field.clone());
*field_counts.entry(field.clone()).or_insert(0) += 1;
}
}
// Sort fields
let mut sorted_fields: Vec<String> = all_fields.into_iter().collect();
sorted_fields.sort();
// Mark which fields are required (present in >80% of patterns)
let mut required_fields: HashMap<String, bool> = HashMap::new();
for field in &sorted_fields {
let count = field_counts.get(field).unwrap_or(&0);
required_fields.insert(field.clone(), *count as f64 / total_patterns as f64 > 0.8);
}
(
Pattern {
fields: sorted_fields,
field_count: field_counts.len(),
},
required_fields,
)
}
fn find_patterns(tree: &Value, patterns: &mut HashMap<Pattern, Vec<String>>, path: String) {
match tree {
Value::Object(map) => {
// Check if this is a leaf object (all values are strings)
let is_leaf = map.values().all(|v| v.is_string());
if is_leaf && map.len() > 5 {
// This might be a reusable pattern
let pattern = extract_pattern(map);
patterns
.entry(pattern)
.or_insert_with(Vec::new)
.push(path.clone());
}
// Recurse into children
for (key, value) in map {
let new_path = if path.is_empty() {
key.clone()
} else {
format!("{}.{}", path, key)
};
find_patterns(value, patterns, new_path);
}
}
_ => {}
}
}
fn traverse_to_path<'a>(tree: &'a Value, path: &[&str]) -> Option<&'a Value> {
let mut current = tree;
for segment in path {
if let Value::Object(map) = current {
current = map.get(*segment)?;
} else {
return None;
}
}
Some(current)
}
fn generate_python_pattern_class(
merged_pattern: &Pattern,
required_fields: &HashMap<String, bool>,
class_name: &str,
example_path: &str,
tree: &Value,
) -> String {
let mut output = String::new();
output.push_str(&format!("class {}Namespace:\n", class_name));
output.push_str(&format!(
" \"\"\"Pattern for {} (supports {} fields)\"\"\"\n",
class_name, merged_pattern.field_count
));
let slots: Vec<String> = merged_pattern
.fields
.iter()
.map(|f| sanitize_name(f))
.collect();
output.push_str(&format!(
" __slots__ = ({})\n\n",
slots
.iter()
.map(|s| format!("'{}'", s))
.collect::<Vec<_>>()
.join(", ")
));
output.push_str(" def __init__(self, path: str, prefix: str):\n");
let path_segments: Vec<&str> = example_path.split('.').collect();
if let Some(obj) = traverse_to_path(tree, &path_segments) {
if let Value::Object(map) = obj {
for field in &merged_pattern.fields {
let safe_field = sanitize_name(field);
if let Some(Value::String(metric_name)) = map.get(field) {
output.push_str(&format!(
" self.{} = f\"{{path}}/{{prefix}}_{}\"\n",
safe_field, metric_name
));
}
}
}
}
output.push_str("\n\n");
output
}
fn generate_python_namespace_class(
name: &str,
obj: &Map<String, Value>,
tree: &Value,
api_path: &str,
pattern_classes: &HashMap<Pattern, String>,
) -> String {
let mut output = String::new();
let class_name = format!(
"{}Namespace",
name.split('_')
.map(|s| {
let mut c = s.chars();
match c.next() {
None => String::new(),
Some(f) => f.to_uppercase().collect::<String>() + c.as_str(),
}
})
.collect::<String>()
);
output.push_str(&format!("class {}:\n", class_name));
output.push_str(&format!(" \"\"\"Namespace for {} metrics\"\"\"\n", name));
let mut slots = vec![];
let mut init_lines = vec![];
for (key, value) in obj {
let safe_key = sanitize_name(key);
slots.push(safe_key.clone());
match value {
Value::String(metric_name) => {
init_lines.push(format!(
" self.{} = f\"{}/{}\"",
safe_key, api_path, metric_name
));
}
Value::Object(nested_map) => {
let pattern = extract_pattern(nested_map);
if let Some(pattern_class) = pattern_classes.get(&pattern) {
init_lines.push(format!(
" self.{} = {}Namespace(\"{}\", \"{}\")",
safe_key, pattern_class, api_path, key
));
} else {
let nested_class = format!(
"{}{}",
class_name.trim_end_matches("Namespace"),
key.split('_')
.map(|s| {
let mut c = s.chars();
match c.next() {
None => String::new(),
Some(f) => f.to_uppercase().collect::<String>() + c.as_str(),
}
})
.collect::<String>()
);
init_lines.push(format!(" self.{} = {}Namespace()", safe_key));
}
}
_ => {}
}
}
output.push_str(&format!(
" __slots__ = ({})\n\n",
slots
.iter()
.map(|s| format!("'{}'", s))
.collect::<Vec<_>>()
.join(", ")
));
output.push_str(" def __init__(self):\n");
for line in init_lines {
output.push_str(&format!("{}\n", line));
}
output.push_str("\n\n");
output
}
fn generate_python_namespaces_recursive(
obj: &Map<String, Value>,
tree: &Value,
pattern_classes: &HashMap<Pattern, String>,
path: &str,
output: &mut String,
) {
for (key, value) in obj {
if let Value::Object(nested_map) = value {
let new_path = if path.is_empty() {
key.clone()
} else {
format!("{}/{}", path, key)
};
let is_leaf = nested_map.values().all(|v| v.is_string());
if !is_leaf {
generate_python_namespaces_recursive(
nested_map,
tree,
pattern_classes,
&new_path,
output,
);
}
}
}
let api_path = path.replace(".", "/");
let name = path.split('/').last().unwrap_or("Root");
output.push_str(&generate_python_namespace_class(
name,
obj,
tree,
&api_path,
pattern_classes,
));
}
fn generate_python_client(tree: &Value) -> String {
let mut output = String::new();
output.push_str(
r#""""
BRK API Tree - Auto-generated from config
Each attribute is a string representing the API path + metric name.
Use these paths with your own fetch implementation.
DO NOT EDIT - This file is generated by codegen
"""
"#,
);
output.push_str(
"# ============================================================================\n",
);
output.push_str("# PATTERN CLASSES\n");
output.push_str(
"# ============================================================================\n\n",
);
let mut patterns: HashMap<Pattern, Vec<String>> = HashMap::new();
find_patterns(tree, &mut patterns, String::new());
let clusters = cluster_patterns(&patterns);
let mut pattern_classes: HashMap<Pattern, String> = HashMap::new();
let mut cluster_id = 0;
for cluster in clusters.iter() {
let total_usage: usize = cluster.iter().map(|(_, paths)| paths.len()).sum();
if total_usage >= 3 && cluster[0].0.field_count >= 8 {
let (merged_pattern, required_fields) = merge_patterns_in_cluster(cluster);
let class_name = if merged_pattern.fields.iter().any(|f| f.contains("ratio")) {
format!("RatioPattern{}", cluster_id)
} else if merged_pattern.fields.iter().any(|f| f.contains("count")) {
format!("CountPattern{}", cluster_id)
} else {
format!("CommonPattern{}", cluster_id)
};
output.push_str(&generate_python_pattern_class(
&merged_pattern,
&required_fields,
&class_name,
&cluster[0].1[0],
tree,
));
for (pattern, _) in cluster {
pattern_classes.insert(pattern.clone(), class_name.clone());
}
cluster_id += 1;
}
}
output.push_str(
"# ============================================================================\n",
);
output.push_str("# NAMESPACE CLASSES\n");
output.push_str(
"# ============================================================================\n\n",
);
if let Value::Object(root) = tree {
generate_python_namespaces_recursive(root, tree, &pattern_classes, "", &mut output);
}
output.push_str(
r#"
class BRKTree:
"""
BRK API Tree
Usage:
tree = BRKTree()
path = tree.computed.chain.block_count.base
# path is now "computed/chain/block_count"
# Use this path with your own HTTP client
"""
__slots__ = ("computed", "cointime", "constants", "fetched", "indexes", "market")
def __init__(self):
"#,
);
if let Value::Object(root) = tree {
for key in root.keys() {
output.push_str(&format!(
" self.{} = {}Namespace()\n",
sanitize_name(key),
key.split('_')
.map(|s| {
let mut c = s.chars();
match c.next() {
None => String::new(),
Some(f) => f.to_uppercase().collect::<String>() + c.as_str(),
}
})
.collect::<String>()
));
}
}
output
}
fn to_pascal_case(s: &str) -> String {
s.split('_')
.map(|word| {
let mut chars = word.chars();
match chars.next() {
None => String::new(),
Some(first) => first.to_uppercase().collect::<String>() + chars.as_str(),
}
})
.collect()
}
fn generate_typescript_pattern_class(
merged_pattern: &Pattern,
class_name: &str,
example_path: &str,
tree: &Value,
) -> String {
let mut output = String::new();
output.push_str(&format!("export class {}Namespace {{\n", class_name));
for field in &merged_pattern.fields {
let safe_field = sanitize_name(field);
output.push_str(&format!(" readonly {}: string;\n", safe_field));
}
output.push_str("\n constructor(path: string, prefix: string) {\n");
let path_segments: Vec<&str> = example_path.split('.').collect();
if let Some(obj) = traverse_to_path(tree, &path_segments) {
if let Value::Object(map) = obj {
for field in &merged_pattern.fields {
let safe_field = sanitize_name(field);
if let Some(Value::String(metric_name)) = map.get(field) {
output.push_str(&format!(
" this.{} = `${{path}}/${{prefix}}_{}`;\n",
safe_field, metric_name
));
}
}
}
}
output.push_str(" }\n}\n\n");
output
}
fn generate_typescript_namespaces_recursive(
obj: &Map<String, Value>,
tree: &Value,
pattern_classes: &HashMap<Pattern, String>,
path: &str,
output: &mut String,
) {
for (key, value) in obj {
if let Value::Object(nested_map) = value {
let new_path = if path.is_empty() {
key.clone()
} else {
format!("{}/{}", path, key)
};
let is_leaf = nested_map.values().all(|v| v.is_string());
if !is_leaf {
generate_typescript_namespaces_recursive(
nested_map,
tree,
pattern_classes,
&new_path,
output,
);
}
}
}
let api_path = path.replace(".", "/");
let name = path.split('/').last().unwrap_or("Root");
let class_name = to_pascal_case(name);
output.push_str(&format!("export class {}Namespace {{\n", class_name));
for (key, value) in obj {
let safe_key = sanitize_name(key);
match value {
Value::String(_) => {
output.push_str(&format!(" readonly {}: string;\n", safe_key));
}
Value::Object(nested_map) => {
let pattern = extract_pattern(nested_map);
if let Some(pattern_class) = pattern_classes.get(&pattern) {
output.push_str(&format!(
" readonly {}: {}Namespace;\n",
safe_key, pattern_class
));
} else {
let nested_class = format!("{}{}", class_name, to_pascal_case(key));
output.push_str(&format!(
" readonly {}: {}Namespace;\n",
safe_key, nested_class
));
}
}
_ => {}
}
}
output.push_str("\n constructor() {\n");
for (key, value) in obj {
let safe_key = sanitize_name(key);
match value {
Value::String(metric_name) => {
output.push_str(&format!(
" this.{} = '{}/{}';\n",
safe_key, api_path, metric_name
));
}
Value::Object(nested_map) => {
let pattern = extract_pattern(nested_map);
if let Some(pattern_class) = pattern_classes.get(&pattern) {
output.push_str(&format!(
" this.{} = new {}Namespace('{}', '{}');\n",
safe_key, pattern_class, api_path, key
));
} else {
let nested_class = format!("{}{}", class_name, to_pascal_case(key));
output.push_str(&format!(
" this.{} = new {}Namespace();\n",
safe_key, nested_class
));
}
}
_ => {}
}
}
output.push_str(" }\n}\n\n");
}
fn generate_typescript_client(tree: &Value) -> String {
let mut output = String::new();
output.push_str(
r#"/**
* BRK API Tree - Auto-generated from config
*
* Each property is a string representing the API path + metric name.
* Use these paths with your own fetch implementation.
*
* DO NOT EDIT - This file is generated by codegen
*/
"#,
);
let mut patterns: HashMap<Pattern, Vec<String>> = HashMap::new();
find_patterns(tree, &mut patterns, String::new());
let clusters = cluster_patterns(&patterns);
let mut pattern_classes: HashMap<Pattern, String> = HashMap::new();
let mut cluster_id = 0;
for cluster in clusters.iter() {
let total_usage: usize = cluster.iter().map(|(_, paths)| paths.len()).sum();
if total_usage >= 3 && cluster[0].0.field_count >= 8 {
let (merged_pattern, _) = merge_patterns_in_cluster(cluster);
let class_name = if merged_pattern.fields.iter().any(|f| f.contains("ratio")) {
format!("RatioPattern{}", cluster_id)
} else if merged_pattern.fields.iter().any(|f| f.contains("count")) {
format!("CountPattern{}", cluster_id)
} else {
format!("CommonPattern{}", cluster_id)
};
output.push_str(&generate_typescript_pattern_class(
&merged_pattern,
&class_name,
&cluster[0].1[0],
tree,
));
for (pattern, _) in cluster {
pattern_classes.insert(pattern.clone(), class_name.clone());
}
cluster_id += 1;
}
}
if let Value::Object(root) = tree {
generate_typescript_namespaces_recursive(root, tree, &pattern_classes, "", &mut output);
}
output.push_str(
r#"
export class BRKTree {
"#,
);
if let Value::Object(root) = tree {
for key in root.keys() {
let class_name = to_pascal_case(key);
output.push_str(&format!(
" readonly {}: {}Namespace;\n",
sanitize_name(key),
class_name
));
}
}
output.push_str("\n constructor() {\n");
if let Value::Object(root) = tree {
for key in root.keys() {
let class_name = to_pascal_case(key);
output.push_str(&format!(
" this.{} = new {}Namespace();\n",
sanitize_name(key),
class_name
));
}
}
output.push_str(" }\n}\n");
output
}
fn to_snake_case(s: &str) -> String {
let sanitized = s.replace("-", "_");
match sanitized.as_str() {
"type" | "const" | "static" | "match" | "if" | "else" | "loop" | "while" => {
format!("r#{}", sanitized)
}
_ => sanitized,
}
}
fn generate_rust_pattern_struct(
merged_pattern: &Pattern,
struct_name: &str,
example_path: &str,
tree: &Value,
) -> String {
let mut output = String::new();
output.push_str(&format!("/// Pattern for {} metrics\n", struct_name));
output.push_str("#[derive(Clone, Debug)]\n");
output.push_str(&format!("pub struct {}Namespace {{\n", struct_name));
for field in &merged_pattern.fields {
let safe_field = to_snake_case(&sanitize_name(field));
output.push_str(&format!(" pub {}: String,\n", safe_field));
}
output.push_str("}\n\n");
output.push_str(&format!("impl {}Namespace {{\n", struct_name));
output.push_str(" fn new(path: &str, prefix: &str) -> Self {\n");
output.push_str(" Self {\n");
let path_segments: Vec<&str> = example_path.split('.').collect();
if let Some(obj) = traverse_to_path(tree, &path_segments) {
if let Value::Object(map) = obj {
for field in &merged_pattern.fields {
let safe_field = to_snake_case(&sanitize_name(field));
if let Some(Value::String(metric_name)) = map.get(field) {
output.push_str(&format!(
" {}: format!(\"{{}}/{{}}_{}}\", path, prefix),\n",
safe_field, metric_name
));
}
}
}
}
output.push_str(" }\n }\n}\n\n");
output
}
fn generate_rust_namespaces_recursive(
obj: &Map<String, Value>,
tree: &Value,
pattern_classes: &HashMap<Pattern, String>,
path: &str,
output: &mut String,
) {
for (key, value) in obj {
if let Value::Object(nested_map) = value {
let new_path = if path.is_empty() {
key.clone()
} else {
format!("{}/{}", path, key)
};
let is_leaf = nested_map.values().all(|v| v.is_string());
if !is_leaf {
generate_rust_namespaces_recursive(
nested_map,
tree,
pattern_classes,
&new_path,
output,
);
}
}
}
let api_path = path.replace(".", "/");
let name = path.split('/').last().unwrap_or("Root");
let struct_name = to_pascal_case(name);
output.push_str(&format!("/// Namespace for {} metrics\n", name));
output.push_str("#[derive(Clone, Debug)]\n");
output.push_str(&format!("pub struct {}Namespace {{\n", struct_name));
for (key, value) in obj {
let safe_key = to_snake_case(&sanitize_name(key));
match value {
Value::String(_) => {
output.push_str(&format!(" pub {}: String,\n", safe_key));
}
Value::Object(nested_map) => {
let pattern = extract_pattern(nested_map);
if let Some(pattern_class) = pattern_classes.get(&pattern) {
output.push_str(&format!(
" pub {}: {}Namespace,\n",
safe_key, pattern_class
));
} else {
let nested_struct = format!("{}{}", struct_name, to_pascal_case(key));
output.push_str(&format!(
" pub {}: {}Namespace,\n",
safe_key, nested_struct
));
}
}
_ => {}
}
}
output.push_str("}\n\n");
output.push_str(&format!("impl {}Namespace {{\n", struct_name));
output.push_str(" fn new() -> Self {\n Self {\n");
for (key, value) in obj {
let safe_key = to_snake_case(&sanitize_name(key));
match value {
Value::String(metric_name) => {
output.push_str(&format!(
" {}: \"{}/{}\".to_string(),\n",
safe_key, api_path, metric_name
));
}
Value::Object(nested_map) => {
let pattern = extract_pattern(nested_map);
if let Some(pattern_class) = pattern_classes.get(&pattern) {
output.push_str(&format!(
" {}: {}Namespace::new(\"{}\", \"{}\"),\n",
safe_key, pattern_class, api_path, key
));
} else {
let nested_struct = format!("{}{}", struct_name, to_pascal_case(key));
output.push_str(&format!(
" {}: {}Namespace::new(),\n",
safe_key, nested_struct
));
}
}
_ => {}
}
}
output.push_str(" }\n }\n}\n\n");
}
fn generate_rust_client(tree: &Value) -> String {
let mut output = String::new();
output.push_str(
r#"//! BRK API Tree - Auto-generated from config
//!
//! Each field is a String representing the API path + metric name.
//! Use these paths with your own HTTP client.
//!
//! DO NOT EDIT - This file is generated by codegen
"#,
);
let mut patterns: HashMap<Pattern, Vec<String>> = HashMap::new();
find_patterns(tree, &mut patterns, String::new());
let clusters = cluster_patterns(&patterns);
let mut pattern_classes: HashMap<Pattern, String> = HashMap::new();
let mut cluster_id = 0;
for cluster in clusters.iter() {
let total_usage: usize = cluster.iter().map(|(_, paths)| paths.len()).sum();
if total_usage >= 3 && cluster[0].0.field_count >= 8 {
let (merged_pattern, _) = merge_patterns_in_cluster(cluster);
let class_name = if merged_pattern.fields.iter().any(|f| f.contains("ratio")) {
format!("RatioPattern{}", cluster_id)
} else if merged_pattern.fields.iter().any(|f| f.contains("count")) {
format!("CountPattern{}", cluster_id)
} else {
format!("CommonPattern{}", cluster_id)
};
output.push_str(&generate_rust_pattern_struct(
&merged_pattern,
&class_name,
&cluster[0].1[0],
tree,
));
for (pattern, _) in cluster {
pattern_classes.insert(pattern.clone(), class_name.clone());
}
cluster_id += 1;
}
}
if let Value::Object(root) = tree {
generate_rust_namespaces_recursive(root, tree, &pattern_classes, "", &mut output);
}
output.push_str("/// Main BRK API tree\n");
output.push_str("#[derive(Clone, Debug)]\n");
output.push_str("pub struct BRKTree {\n");
if let Value::Object(root) = tree {
for key in root.keys() {
let struct_name = to_pascal_case(key);
output.push_str(&format!(
" pub {}: {}Namespace,\n",
to_snake_case(key),
struct_name
));
}
}
output.push_str("}\n\nimpl BRKTree {\n pub fn new() -> Self {\n Self {\n");
if let Value::Object(root) = tree {
for key in root.keys() {
let struct_name = to_pascal_case(key);
output.push_str(&format!(
" {}: {}Namespace::new(),\n",
to_snake_case(key),
struct_name
));
}
}
output.push_str(" }\n }\n}\n\nimpl Default for BRKTree {\n fn default() -> Self {\n Self::new()\n }\n}\n");
output
}
fn main() {
let json_str = fs::read_to_string("brk_config.json").expect("Failed to read config file");
let tree: Value = serde_json::from_str(&json_str).expect("Failed to parse JSON");
// Generate Python tree
let python_code = generate_python_client(&tree);
fs::write("brk_tree_generated.py", python_code).expect("Failed to write Python file");
println!("✓ Generated brk_tree_generated.py");
// Generate TypeScript tree
let ts_code = generate_typescript_client(&tree);
fs::write("brk_tree_generated.ts", ts_code).expect("Failed to write TypeScript file");
println!("✓ Generated brk_tree_generated.ts");
// Generate Rust tree
let rust_code = generate_rust_client(&tree);
fs::write("brk_tree_generated.rs", rust_code).expect("Failed to write Rust file");
println!("✓ Generated brk_tree_generated.rs");
}

View File

@@ -70,7 +70,6 @@ impl PartialEq for PatternField {
impl Eq for PatternField {}
impl ClientMetadata {
/// Extract metadata from brk_query::Vecs
pub fn from_vecs(vecs: &Vecs) -> Self {
@@ -88,7 +87,9 @@ impl ClientMetadata {
/// Check if an index set matches a pattern
pub fn find_index_set_pattern(&self, indexes: &BTreeSet<Index>) -> Option<&IndexSetPattern> {
self.index_set_patterns.iter().find(|p| &p.indexes == indexes)
self.index_set_patterns
.iter()
.find(|p| &p.indexes == indexes)
}
/// Check if a type is a pattern (vs a primitive leaf type)
@@ -155,8 +156,12 @@ fn resolve_branch_patterns(
),
TreeNode::Branch(_) => {
// Branch: recursively get its pattern name
let pattern_name = resolve_branch_patterns(child_node, signature_to_pattern, signature_counts)
.unwrap_or_else(|| "Unknown".to_string());
let pattern_name = resolve_branch_patterns(
child_node,
signature_to_pattern,
signature_counts,
)
.unwrap_or_else(|| "Unknown".to_string());
(pattern_name.clone(), pattern_name, BTreeSet::new())
}
};
@@ -194,7 +199,12 @@ fn generate_pattern_name_from_fields(fields: &[PatternField]) -> String {
let raw_name = joined.join("_");
// Sanitize: ensure it starts with a letter (prepend "P_" if starts with digit)
let sanitized = if raw_name.chars().next().map(|c| c.is_ascii_digit()).unwrap_or(false) {
let sanitized = if raw_name
.chars()
.next()
.map(|c| c.is_ascii_digit())
.unwrap_or(false)
{
format!("P_{}", raw_name)
} else {
raw_name
@@ -228,11 +238,19 @@ pub fn get_node_fields(
),
TreeNode::Branch(grandchildren) => {
let child_fields = get_node_fields(grandchildren, pattern_lookup);
let pattern_name = pattern_lookup.get(&child_fields).cloned().unwrap_or_else(|| "Unknown".to_string());
let pattern_name = pattern_lookup
.get(&child_fields)
.cloned()
.unwrap_or_else(|| "Unknown".to_string());
(pattern_name.clone(), pattern_name, BTreeSet::new())
}
};
PatternField { name: name.clone(), rust_type, json_type, indexes }
PatternField {
name: name.clone(),
rust_type,
json_type,
indexes,
}
})
.collect();
fields.sort_by(|a, b| a.name.cmp(&b.name));
@@ -323,7 +341,7 @@ fn collect_indexes_from_tree(
index_sets.push(leaf.indexes().clone());
}
TreeNode::Branch(children) => {
for (_, child) in children {
for child in children.values() {
collect_indexes_from_tree(child, used_indexes, index_sets);
}
}
@@ -341,41 +359,3 @@ fn generate_index_set_name(indexes: &BTreeSet<Index>) -> String {
let names: Vec<&str> = indexes.iter().map(|i| i.serialize_long()).collect();
format!("{}Accessor", to_pascal_case(&names.join("_")))
}
/// Convert a serde_json::Value (JSON Schema) to a JSDoc type annotation
pub fn schema_to_jsdoc(schema: &serde_json::Value) -> String {
if let Some(ty) = schema.get("type").and_then(|v| v.as_str()) {
match ty {
"null" => "null".to_string(),
"boolean" => "boolean".to_string(),
"integer" | "number" => "number".to_string(),
"string" => "string".to_string(),
"array" => {
if let Some(items) = schema.get("items") {
format!("{}[]", schema_to_jsdoc(items))
} else {
"Array<*>".to_string()
}
}
"object" => "Object".to_string(),
_ => "*".to_string(),
}
} else if schema.get("anyOf").is_some() || schema.get("oneOf").is_some() {
let variants = schema
.get("anyOf")
.or_else(|| schema.get("oneOf"))
.and_then(|v| v.as_array())
.map(|arr| {
arr.iter()
.map(schema_to_jsdoc)
.collect::<Vec<_>>()
.join("|")
})
.unwrap_or_else(|| "*".to_string());
format!("({})", variants)
} else if let Some(reference) = schema.get("$ref").and_then(|v| v.as_str()) {
reference.rsplit('/').next().unwrap_or("*").to_string()
} else {
"*".to_string()
}
}

View File

@@ -482,7 +482,6 @@ where
self.first.u()
}
#[inline]
#[allow(unused)]
pub fn unwrap_average(&self) -> &EagerVec<PcoVec<I, T>> {
self.average.u()
}
@@ -495,27 +494,22 @@ where
self.max.u()
}
#[inline]
#[allow(unused)]
pub fn unwrap_pct90(&self) -> &EagerVec<PcoVec<I, T>> {
self.pct90.u()
}
#[inline]
#[allow(unused)]
pub fn unwrap_pct75(&self) -> &EagerVec<PcoVec<I, T>> {
self.pct75.u()
}
#[inline]
#[allow(unused)]
pub fn unwrap_median(&self) -> &EagerVec<PcoVec<I, T>> {
self.median.u()
}
#[inline]
#[allow(unused)]
pub fn unwrap_pct25(&self) -> &EagerVec<PcoVec<I, T>> {
self.pct25.u()
}
#[inline]
#[allow(unused)]
pub fn unwrap_pct10(&self) -> &EagerVec<PcoVec<I, T>> {
self.pct10.u()
}
@@ -528,7 +522,6 @@ where
self.last.u()
}
#[inline]
#[allow(unused)]
pub fn unwrap_cumulative(&self) -> &EagerVec<PcoVec<I, T>> {
self.cumulative.u()
}
@@ -701,7 +694,6 @@ impl VecBuilderOptions {
self
}
#[allow(unused)]
pub fn add_median(mut self) -> Self {
self.median = true;
self
@@ -717,25 +709,21 @@ impl VecBuilderOptions {
self
}
#[allow(unused)]
pub fn add_pct90(mut self) -> Self {
self.pct90 = true;
self
}
#[allow(unused)]
pub fn add_pct75(mut self) -> Self {
self.pct75 = true;
self
}
#[allow(unused)]
pub fn add_pct25(mut self) -> Self {
self.pct25 = true;
self
}
#[allow(unused)]
pub fn add_pct10(mut self) -> Self {
self.pct10 = true;
self
@@ -746,61 +734,51 @@ impl VecBuilderOptions {
self
}
#[allow(unused)]
pub fn rm_min(mut self) -> Self {
self.min = false;
self
}
#[allow(unused)]
pub fn rm_max(mut self) -> Self {
self.max = false;
self
}
#[allow(unused)]
pub fn rm_median(mut self) -> Self {
self.median = false;
self
}
#[allow(unused)]
pub fn rm_average(mut self) -> Self {
self.average = false;
self
}
#[allow(unused)]
pub fn rm_sum(mut self) -> Self {
self.sum = false;
self
}
#[allow(unused)]
pub fn rm_pct90(mut self) -> Self {
self.pct90 = false;
self
}
#[allow(unused)]
pub fn rm_pct75(mut self) -> Self {
self.pct75 = false;
self
}
#[allow(unused)]
pub fn rm_pct25(mut self) -> Self {
self.pct25 = false;
self
}
#[allow(unused)]
pub fn rm_pct10(mut self) -> Self {
self.pct10 = false;
self
}
#[allow(unused)]
pub fn rm_cumulative(mut self) -> Self {
self.cumulative = false;
self

View File

@@ -223,7 +223,6 @@ where
pub fn unwrap_first(&self) -> &LazyVecFrom2<I, T, S1I, T, I, S2T> {
self.first.u()
}
#[allow(unused)]
pub fn unwrap_average(&self) -> &LazyVecFrom2<I, T, S1I, T, I, S2T> {
self.average.u()
}
@@ -239,7 +238,6 @@ where
pub fn unwrap_last(&self) -> &LazyVecFrom2<I, T, S1I, T, I, S2T> {
self.last.u()
}
#[allow(unused)]
pub fn unwrap_cumulative(&self) -> &LazyVecFrom2<I, T, S1I, T, I, S2T> {
self.cumulative.u()
}
@@ -307,31 +305,26 @@ impl LazyVecBuilderOptions {
self
}
#[allow(unused)]
pub fn rm_min(mut self) -> Self {
self.min = false;
self
}
#[allow(unused)]
pub fn rm_max(mut self) -> Self {
self.max = false;
self
}
#[allow(unused)]
pub fn rm_average(mut self) -> Self {
self.average = false;
self
}
#[allow(unused)]
pub fn rm_sum(mut self) -> Self {
self.sum = false;
self
}
#[allow(unused)]
pub fn rm_cumulative(mut self) -> Self {
self.cumulative = false;
self

View File

@@ -129,7 +129,6 @@ where
})
}
// #[allow(unused)]
// pub fn compute_all<F>(
// &mut self,
// indexer: &Indexer,

View File

@@ -10,7 +10,9 @@ use brk_error::Result;
use brk_grouper::{
AmountFilter, ByAgeRange, ByAmountRange, ByEpoch, ByGreatEqualAmount, ByLowerThanAmount,
ByMaxAge, ByMinAge, BySpendableType, ByTerm, ByYear, Filter, Filtered, StateLevel, Term,
TimeFilter, UTXOGroups,
TimeFilter, UTXOGroups, DAYS_10Y, DAYS_12Y, DAYS_15Y, DAYS_1D, DAYS_1M, DAYS_1W, DAYS_1Y,
DAYS_2M, DAYS_2Y, DAYS_3M, DAYS_3Y, DAYS_4M, DAYS_4Y, DAYS_5M, DAYS_5Y, DAYS_6M, DAYS_6Y,
DAYS_7Y, DAYS_8Y,
};
use brk_traversable::Traversable;
use brk_types::{Bitcoin, DateIndex, Dollars, HalvingEpoch, Height, OutputType, Sats, Version, Year};
@@ -111,68 +113,68 @@ impl UTXOCohorts {
},
max_age: ByMaxAge {
_1w: none(Filter::Time(TimeFilter::LowerThan(7)))?,
_1m: none(Filter::Time(TimeFilter::LowerThan(30)))?,
_2m: none(Filter::Time(TimeFilter::LowerThan(2 * 30)))?,
_3m: none(Filter::Time(TimeFilter::LowerThan(3 * 30)))?,
_4m: none(Filter::Time(TimeFilter::LowerThan(4 * 30)))?,
_5m: none(Filter::Time(TimeFilter::LowerThan(5 * 30)))?,
_6m: none(Filter::Time(TimeFilter::LowerThan(6 * 30)))?,
_1y: none(Filter::Time(TimeFilter::LowerThan(365)))?,
_2y: none(Filter::Time(TimeFilter::LowerThan(2 * 365)))?,
_3y: none(Filter::Time(TimeFilter::LowerThan(3 * 365)))?,
_4y: none(Filter::Time(TimeFilter::LowerThan(4 * 365)))?,
_5y: none(Filter::Time(TimeFilter::LowerThan(5 * 365)))?,
_6y: none(Filter::Time(TimeFilter::LowerThan(6 * 365)))?,
_7y: none(Filter::Time(TimeFilter::LowerThan(7 * 365)))?,
_8y: none(Filter::Time(TimeFilter::LowerThan(8 * 365)))?,
_10y: none(Filter::Time(TimeFilter::LowerThan(10 * 365)))?,
_12y: none(Filter::Time(TimeFilter::LowerThan(12 * 365)))?,
_15y: none(Filter::Time(TimeFilter::LowerThan(15 * 365)))?,
_1w: none(Filter::Time(TimeFilter::LowerThan(DAYS_1W)))?,
_1m: none(Filter::Time(TimeFilter::LowerThan(DAYS_1M)))?,
_2m: none(Filter::Time(TimeFilter::LowerThan(DAYS_2M)))?,
_3m: none(Filter::Time(TimeFilter::LowerThan(DAYS_3M)))?,
_4m: none(Filter::Time(TimeFilter::LowerThan(DAYS_4M)))?,
_5m: none(Filter::Time(TimeFilter::LowerThan(DAYS_5M)))?,
_6m: none(Filter::Time(TimeFilter::LowerThan(DAYS_6M)))?,
_1y: none(Filter::Time(TimeFilter::LowerThan(DAYS_1Y)))?,
_2y: none(Filter::Time(TimeFilter::LowerThan(DAYS_2Y)))?,
_3y: none(Filter::Time(TimeFilter::LowerThan(DAYS_3Y)))?,
_4y: none(Filter::Time(TimeFilter::LowerThan(DAYS_4Y)))?,
_5y: none(Filter::Time(TimeFilter::LowerThan(DAYS_5Y)))?,
_6y: none(Filter::Time(TimeFilter::LowerThan(DAYS_6Y)))?,
_7y: none(Filter::Time(TimeFilter::LowerThan(DAYS_7Y)))?,
_8y: none(Filter::Time(TimeFilter::LowerThan(DAYS_8Y)))?,
_10y: none(Filter::Time(TimeFilter::LowerThan(DAYS_10Y)))?,
_12y: none(Filter::Time(TimeFilter::LowerThan(DAYS_12Y)))?,
_15y: none(Filter::Time(TimeFilter::LowerThan(DAYS_15Y)))?,
},
min_age: ByMinAge {
_1d: none(Filter::Time(TimeFilter::GreaterOrEqual(1)))?,
_1w: none(Filter::Time(TimeFilter::GreaterOrEqual(7)))?,
_1m: none(Filter::Time(TimeFilter::GreaterOrEqual(30)))?,
_2m: none(Filter::Time(TimeFilter::GreaterOrEqual(2 * 30)))?,
_3m: none(Filter::Time(TimeFilter::GreaterOrEqual(3 * 30)))?,
_4m: none(Filter::Time(TimeFilter::GreaterOrEqual(4 * 30)))?,
_5m: none(Filter::Time(TimeFilter::GreaterOrEqual(5 * 30)))?,
_6m: none(Filter::Time(TimeFilter::GreaterOrEqual(6 * 30)))?,
_1y: none(Filter::Time(TimeFilter::GreaterOrEqual(365)))?,
_2y: none(Filter::Time(TimeFilter::GreaterOrEqual(2 * 365)))?,
_3y: none(Filter::Time(TimeFilter::GreaterOrEqual(3 * 365)))?,
_4y: none(Filter::Time(TimeFilter::GreaterOrEqual(4 * 365)))?,
_5y: none(Filter::Time(TimeFilter::GreaterOrEqual(5 * 365)))?,
_6y: none(Filter::Time(TimeFilter::GreaterOrEqual(6 * 365)))?,
_7y: none(Filter::Time(TimeFilter::GreaterOrEqual(7 * 365)))?,
_8y: none(Filter::Time(TimeFilter::GreaterOrEqual(8 * 365)))?,
_10y: none(Filter::Time(TimeFilter::GreaterOrEqual(10 * 365)))?,
_12y: none(Filter::Time(TimeFilter::GreaterOrEqual(12 * 365)))?,
_1d: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_1D)))?,
_1w: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_1W)))?,
_1m: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_1M)))?,
_2m: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_2M)))?,
_3m: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_3M)))?,
_4m: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_4M)))?,
_5m: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_5M)))?,
_6m: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_6M)))?,
_1y: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_1Y)))?,
_2y: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_2Y)))?,
_3y: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_3Y)))?,
_4y: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_4Y)))?,
_5y: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_5Y)))?,
_6y: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_6Y)))?,
_7y: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_7Y)))?,
_8y: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_8Y)))?,
_10y: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_10Y)))?,
_12y: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_12Y)))?,
},
age_range: ByAgeRange {
up_to_1d: full(Filter::Time(TimeFilter::Range(0..1)))?,
_1d_to_1w: full(Filter::Time(TimeFilter::Range(1..7)))?,
_1w_to_1m: full(Filter::Time(TimeFilter::Range(7..30)))?,
_1m_to_2m: full(Filter::Time(TimeFilter::Range(30..2 * 30)))?,
_2m_to_3m: full(Filter::Time(TimeFilter::Range(2 * 30..3 * 30)))?,
_3m_to_4m: full(Filter::Time(TimeFilter::Range(3 * 30..4 * 30)))?,
_4m_to_5m: full(Filter::Time(TimeFilter::Range(4 * 30..5 * 30)))?,
_5m_to_6m: full(Filter::Time(TimeFilter::Range(5 * 30..6 * 30)))?,
_6m_to_1y: full(Filter::Time(TimeFilter::Range(6 * 30..365)))?,
_1y_to_2y: full(Filter::Time(TimeFilter::Range(365..2 * 365)))?,
_2y_to_3y: full(Filter::Time(TimeFilter::Range(2 * 365..3 * 365)))?,
_3y_to_4y: full(Filter::Time(TimeFilter::Range(3 * 365..4 * 365)))?,
_4y_to_5y: full(Filter::Time(TimeFilter::Range(4 * 365..5 * 365)))?,
_5y_to_6y: full(Filter::Time(TimeFilter::Range(5 * 365..6 * 365)))?,
_6y_to_7y: full(Filter::Time(TimeFilter::Range(6 * 365..7 * 365)))?,
_7y_to_8y: full(Filter::Time(TimeFilter::Range(7 * 365..8 * 365)))?,
_8y_to_10y: full(Filter::Time(TimeFilter::Range(8 * 365..10 * 365)))?,
_10y_to_12y: full(Filter::Time(TimeFilter::Range(10 * 365..12 * 365)))?,
_12y_to_15y: full(Filter::Time(TimeFilter::Range(12 * 365..15 * 365)))?,
from_15y: full(Filter::Time(TimeFilter::GreaterOrEqual(15 * 365)))?,
up_to_1d: full(Filter::Time(TimeFilter::Range(0..DAYS_1D)))?,
_1d_to_1w: full(Filter::Time(TimeFilter::Range(DAYS_1D..DAYS_1W)))?,
_1w_to_1m: full(Filter::Time(TimeFilter::Range(DAYS_1W..DAYS_1M)))?,
_1m_to_2m: full(Filter::Time(TimeFilter::Range(DAYS_1M..DAYS_2M)))?,
_2m_to_3m: full(Filter::Time(TimeFilter::Range(DAYS_2M..DAYS_3M)))?,
_3m_to_4m: full(Filter::Time(TimeFilter::Range(DAYS_3M..DAYS_4M)))?,
_4m_to_5m: full(Filter::Time(TimeFilter::Range(DAYS_4M..DAYS_5M)))?,
_5m_to_6m: full(Filter::Time(TimeFilter::Range(DAYS_5M..DAYS_6M)))?,
_6m_to_1y: full(Filter::Time(TimeFilter::Range(DAYS_6M..DAYS_1Y)))?,
_1y_to_2y: full(Filter::Time(TimeFilter::Range(DAYS_1Y..DAYS_2Y)))?,
_2y_to_3y: full(Filter::Time(TimeFilter::Range(DAYS_2Y..DAYS_3Y)))?,
_3y_to_4y: full(Filter::Time(TimeFilter::Range(DAYS_3Y..DAYS_4Y)))?,
_4y_to_5y: full(Filter::Time(TimeFilter::Range(DAYS_4Y..DAYS_5Y)))?,
_5y_to_6y: full(Filter::Time(TimeFilter::Range(DAYS_5Y..DAYS_6Y)))?,
_6y_to_7y: full(Filter::Time(TimeFilter::Range(DAYS_6Y..DAYS_7Y)))?,
_7y_to_8y: full(Filter::Time(TimeFilter::Range(DAYS_7Y..DAYS_8Y)))?,
_8y_to_10y: full(Filter::Time(TimeFilter::Range(DAYS_8Y..DAYS_10Y)))?,
_10y_to_12y: full(Filter::Time(TimeFilter::Range(DAYS_10Y..DAYS_12Y)))?,
_12y_to_15y: full(Filter::Time(TimeFilter::Range(DAYS_12Y..DAYS_15Y)))?,
from_15y: full(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_15Y)))?,
},
amount_range: ByAmountRange {

View File

@@ -1,6 +1,5 @@
//! Processing received outputs (new UTXOs).
use brk_grouper::{Filter, Filtered};
use brk_types::{Dollars, Height, Timestamp};
use crate::stateful::states::Transacted;
@@ -37,16 +36,14 @@ impl UTXOCohorts {
});
// Update output type cohorts
self.type_.iter_mut().for_each(|vecs| {
let output_type = match vecs.filter() {
Filter::Type(output_type) => *output_type,
_ => unreachable!(),
};
vecs.state
.as_mut()
.unwrap()
.receive(received.by_type.get(output_type), price)
});
self.type_
.iter_typed_mut()
.for_each(|(output_type, vecs)| {
vecs.state
.as_mut()
.unwrap()
.receive(received.by_type.get(output_type), price)
});
// Update amount range cohorts
received

View File

@@ -1,7 +1,6 @@
//! Processing spent inputs (UTXOs being spent).
use brk_grouper::{Filter, Filtered, TimeFilter};
use brk_types::{CheckedSub, HalvingEpoch, Height, Year};
use brk_types::{CheckedSub, Height};
use rustc_hash::FxHashMap;
use vecdb::VecIndex;
@@ -26,15 +25,6 @@ impl UTXOCohorts {
return;
}
// Time-based cohorts: age_range + epoch + year
let mut time_cohorts: Vec<_> = self
.0
.age_range
.iter_mut()
.chain(self.0.epoch.iter_mut())
.chain(self.0.year.iter_mut())
.collect();
let last_block = chain_state.last().unwrap();
let last_timestamp = last_block.timestamp;
let current_price = last_block.price;
@@ -55,27 +45,45 @@ impl UTXOCohorts {
.unwrap()
.is_more_than_hour();
// Update time-based cohorts
time_cohorts
.iter_mut()
.filter(|v| match v.filter() {
Filter::Time(TimeFilter::GreaterOrEqual(from)) => *from <= days_old,
Filter::Time(TimeFilter::LowerThan(to)) => *to > days_old,
Filter::Time(TimeFilter::Range(range)) => range.contains(&days_old),
Filter::Epoch(e) => *e == HalvingEpoch::from(height),
Filter::Year(y) => *y == Year::from(block_state.timestamp),
_ => unreachable!(),
})
.for_each(|vecs| {
vecs.state.um().send(
&sent.spendable_supply,
current_price,
prev_price,
blocks_old,
days_old_float,
older_than_hour,
);
});
// Update age range cohort (direct index lookup)
self.0
.age_range
.get_mut_by_days_old(days_old)
.state
.um()
.send(
&sent.spendable_supply,
current_price,
prev_price,
blocks_old,
days_old_float,
older_than_hour,
);
// Update epoch cohort (direct lookup by height)
self.0.epoch.mut_vec_from_height(height).state.um().send(
&sent.spendable_supply,
current_price,
prev_price,
blocks_old,
days_old_float,
older_than_hour,
);
// Update year cohort (direct lookup by timestamp)
self.0
.year
.mut_vec_from_timestamp(block_state.timestamp)
.state
.um()
.send(
&sent.spendable_supply,
current_price,
prev_price,
blocks_old,
days_old_float,
older_than_hour,
);
// Update output type cohorts
sent.by_type

View File

@@ -15,6 +15,7 @@ use brk_grouper::ByAddressType;
use brk_indexer::Indexer;
use brk_types::{DateIndex, Height, OutputType, Sats, TypeIndex};
use log::info;
use rayon::prelude::*;
use vecdb::{Exit, GenericStoredVec, IterableVec, TypedVecIterator, VecIndex};
use crate::{
@@ -420,7 +421,8 @@ pub fn process_blocks(
});
// Main thread: Update UTXO cohorts
vecs.utxo_cohorts.receive(transacted, height, timestamp, block_price);
vecs.utxo_cohorts
.receive(transacted, height, timestamp, block_price);
vecs.utxo_cohorts.send(height_to_sent, chain_state);
});
@@ -542,14 +544,14 @@ fn push_cohort_states(
dateindex: Option<DateIndex>,
date_price: Option<Option<brk_types::Dollars>>,
) -> Result<()> {
utxo_cohorts.iter_separate_mut().try_for_each(|v| {
// utxo_cohorts.par_iter_separate_mut().try_for_each(|v| {
// utxo_cohorts.iter_separate_mut().try_for_each(|v| {
utxo_cohorts.par_iter_separate_mut().try_for_each(|v| {
v.truncate_push(height)?;
v.compute_then_truncate_push_unrealized_states(height, height_price, dateindex, date_price)
})?;
address_cohorts.iter_separate_mut().try_for_each(|v| {
// address_cohorts.par_iter_separate_mut().try_for_each(|v| {
// address_cohorts.iter_separate_mut().try_for_each(|v| {
address_cohorts.par_iter_separate_mut().try_for_each(|v| {
v.truncate_push(height)?;
v.compute_then_truncate_push_unrealized_states(height, height_price, dateindex, date_price)
})?;

View File

@@ -151,6 +151,9 @@ impl CohortMetrics {
date_price: Option<Option<Dollars>>,
state: &mut CohortState,
) -> Result<()> {
// Apply pending updates before reading
state.apply_pending();
if let (Some(unrealized), Some(price_paid), Some(height_price)) = (
self.unrealized.as_mut(),
self.price_paid.as_mut(),
@@ -248,6 +251,14 @@ impl CohortMetrics {
realized.compute_rest_part1(indexes, price, starting_indexes, exit)?;
}
if let Some(unrealized) = self.unrealized.as_mut() {
unrealized.compute_rest_part1(price, starting_indexes, exit)?;
}
if let Some(price_paid) = self.price_paid.as_mut() {
price_paid.compute_rest_part1(indexes, starting_indexes, exit)?;
}
Ok(())
}
@@ -277,6 +288,18 @@ impl CohortMetrics {
exit,
)?;
if let Some(realized) = self.realized.as_mut() {
realized.compute_rest_part2(
indexes,
price,
starting_indexes,
height_to_supply,
height_to_market_cap,
dateindex_to_market_cap,
exit,
)?;
}
if let Some(relative) = self.relative.as_mut() {
relative.compute_rest_part2(
indexes,

View File

@@ -154,4 +154,28 @@ impl PricePaidMetrics {
)?;
Ok(())
}
/// First phase of computed metrics (indexes from height).
pub fn compute_rest_part1(
&mut self,
indexes: &crate::indexes::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_min_price_paid.compute_rest(
indexes,
starting_indexes,
exit,
Some(&self.height_to_min_price_paid),
)?;
self.indexes_to_max_price_paid.compute_rest(
indexes,
starting_indexes,
exit,
Some(&self.height_to_max_price_paid),
)?;
Ok(())
}
}

View File

@@ -4,8 +4,8 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Dollars, Height, StoredF32, StoredF64, Version};
use vecdb::{AnyStoredVec, EagerVec, Exit, GenericStoredVec, ImportableVec, PcoVec};
use brk_types::{Bitcoin, DateIndex, Dollars, Height, StoredF32, StoredF64, Version};
use vecdb::{AnyStoredVec, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableVec, PcoVec};
use crate::{
Indexes,
@@ -565,6 +565,50 @@ impl RealizedMetrics {
Some(&self.height_to_realized_loss),
)?;
// neg_realized_loss = realized_loss * -1
self.indexes_to_neg_realized_loss
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_transform(
starting_indexes.height,
&self.height_to_realized_loss,
|(i, v, ..)| (i, v * -1_i64),
exit,
)?;
Ok(())
})?;
// net_realized_pnl = profit - loss
self.indexes_to_net_realized_pnl
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_subtract(
starting_indexes.height,
&self.height_to_realized_profit,
&self.height_to_realized_loss,
exit,
)?;
Ok(())
})?;
// realized_value = profit + loss
self.indexes_to_realized_value
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_add(
starting_indexes.height,
&self.height_to_realized_profit,
&self.height_to_realized_loss,
exit,
)?;
Ok(())
})?;
// total_realized_pnl at height level = profit + loss
self.height_to_total_realized_pnl.compute_add(
starting_indexes.height,
&self.height_to_realized_profit,
&self.height_to_realized_loss,
exit,
)?;
self.indexes_to_value_created.compute_rest(
indexes,
starting_indexes,
@@ -579,6 +623,265 @@ impl RealizedMetrics {
Some(&self.height_to_value_destroyed),
)?;
// Optional: adjusted value
if let Some(adjusted_value_created) = self.indexes_to_adjusted_value_created.as_mut() {
adjusted_value_created.compute_rest(
indexes,
starting_indexes,
exit,
self.height_to_adjusted_value_created.as_ref(),
)?;
}
if let Some(adjusted_value_destroyed) = self.indexes_to_adjusted_value_destroyed.as_mut() {
adjusted_value_destroyed.compute_rest(
indexes,
starting_indexes,
exit,
self.height_to_adjusted_value_destroyed.as_ref(),
)?;
}
Ok(())
}
/// Second phase of computed metrics (realized price from realized cap / supply).
#[allow(clippy::too_many_arguments)]
pub fn compute_rest_part2(
&mut self,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
starting_indexes: &Indexes,
height_to_supply: &impl IterableVec<Height, Bitcoin>,
height_to_market_cap: Option<&impl IterableVec<Height, Dollars>>,
dateindex_to_market_cap: Option<&impl IterableVec<DateIndex, Dollars>>,
exit: &Exit,
) -> Result<()> {
// realized_price = realized_cap / supply
self.indexes_to_realized_price
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_divide(
starting_indexes.height,
&self.height_to_realized_cap,
height_to_supply,
exit,
)?;
Ok(())
})?;
if let Some(price) = price {
self.indexes_to_realized_price_extra.compute_rest(
price,
starting_indexes,
exit,
Some(self.indexes_to_realized_price.dateindex.unwrap_last()),
)?;
}
// realized_cap_30d_delta
self.indexes_to_realized_cap_30d_delta
.compute_all(starting_indexes, exit, |vec| {
vec.compute_change(
starting_indexes.dateindex,
self.indexes_to_realized_cap.dateindex.unwrap_last(),
30,
exit,
)?;
Ok(())
})?;
// total_realized_pnl at dateindex level
self.indexes_to_total_realized_pnl
.compute_all(starting_indexes, exit, |vec| {
vec.compute_add(
starting_indexes.dateindex,
self.indexes_to_realized_profit.dateindex.unwrap_sum(),
self.indexes_to_realized_loss.dateindex.unwrap_sum(),
exit,
)?;
Ok(())
})?;
// SOPR = value_created / value_destroyed
self.dateindex_to_sopr.compute_divide(
starting_indexes.dateindex,
self.indexes_to_value_created.dateindex.unwrap_sum(),
self.indexes_to_value_destroyed.dateindex.unwrap_sum(),
exit,
)?;
self.dateindex_to_sopr_7d_ema.compute_ema(
starting_indexes.dateindex,
&self.dateindex_to_sopr,
7,
exit,
)?;
self.dateindex_to_sopr_30d_ema.compute_ema(
starting_indexes.dateindex,
&self.dateindex_to_sopr,
30,
exit,
)?;
// Optional: adjusted SOPR
if let (Some(adjusted_sopr), Some(adj_created), Some(adj_destroyed)) = (
self.dateindex_to_adjusted_sopr.as_mut(),
self.indexes_to_adjusted_value_created.as_ref(),
self.indexes_to_adjusted_value_destroyed.as_ref(),
) {
adjusted_sopr.compute_divide(
starting_indexes.dateindex,
adj_created.dateindex.unwrap_sum(),
adj_destroyed.dateindex.unwrap_sum(),
exit,
)?;
if let Some(ema_7d) = self.dateindex_to_adjusted_sopr_7d_ema.as_mut() {
ema_7d.compute_ema(
starting_indexes.dateindex,
self.dateindex_to_adjusted_sopr.as_ref().unwrap(),
7,
exit,
)?;
}
if let Some(ema_30d) = self.dateindex_to_adjusted_sopr_30d_ema.as_mut() {
ema_30d.compute_ema(
starting_indexes.dateindex,
self.dateindex_to_adjusted_sopr.as_ref().unwrap(),
30,
exit,
)?;
}
}
// sell_side_risk_ratio = realized_value / realized_cap
self.dateindex_to_sell_side_risk_ratio.compute_percentage(
starting_indexes.dateindex,
self.indexes_to_realized_value.dateindex.unwrap_sum(),
self.indexes_to_realized_cap.dateindex.unwrap_last(),
exit,
)?;
self.dateindex_to_sell_side_risk_ratio_7d_ema.compute_ema(
starting_indexes.dateindex,
&self.dateindex_to_sell_side_risk_ratio,
7,
exit,
)?;
self.dateindex_to_sell_side_risk_ratio_30d_ema.compute_ema(
starting_indexes.dateindex,
&self.dateindex_to_sell_side_risk_ratio,
30,
exit,
)?;
// Ratios relative to realized cap
self.indexes_to_realized_profit_rel_to_realized_cap
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.height,
&self.height_to_realized_profit,
&self.height_to_realized_cap,
exit,
)?;
Ok(())
})?;
self.indexes_to_realized_loss_rel_to_realized_cap
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.height,
&self.height_to_realized_loss,
&self.height_to_realized_cap,
exit,
)?;
Ok(())
})?;
self.indexes_to_net_realized_pnl_rel_to_realized_cap
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.height,
self.indexes_to_net_realized_pnl.height.u(),
&self.height_to_realized_cap,
exit,
)?;
Ok(())
})?;
// Net realized PnL cumulative 30d delta
self.indexes_to_net_realized_pnl_cumulative_30d_delta
.compute_all(starting_indexes, exit, |vec| {
vec.compute_change(
starting_indexes.dateindex,
self.indexes_to_net_realized_pnl
.dateindex
.unwrap_cumulative(),
30,
exit,
)?;
Ok(())
})?;
// Relative to realized cap
self.indexes_to_net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap
.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
self.indexes_to_net_realized_pnl_cumulative_30d_delta
.dateindex
.u(),
self.indexes_to_realized_cap.dateindex.unwrap_last(),
exit,
)?;
Ok(())
})?;
// Relative to market cap
if let Some(dateindex_to_market_cap) = dateindex_to_market_cap {
self.indexes_to_net_realized_pnl_cumulative_30d_delta_rel_to_market_cap
.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
self.indexes_to_net_realized_pnl_cumulative_30d_delta
.dateindex
.u(),
dateindex_to_market_cap,
exit,
)?;
Ok(())
})?;
}
// Optional: realized_cap_rel_to_own_market_cap
if let (Some(rel_vec), Some(height_to_market_cap)) = (
self.indexes_to_realized_cap_rel_to_own_market_cap.as_mut(),
height_to_market_cap,
) {
rel_vec.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.height,
&self.height_to_realized_cap,
height_to_market_cap,
exit,
)?;
Ok(())
})?;
}
// Optional: realized_profit_to_loss_ratio
if let Some(ratio) = self.dateindex_to_realized_profit_to_loss_ratio.as_mut() {
ratio.compute_divide(
starting_indexes.dateindex,
self.indexes_to_realized_profit.dateindex.unwrap_sum(),
self.indexes_to_realized_loss.dateindex.unwrap_sum(),
exit,
)?;
}
Ok(())
}
}

View File

@@ -452,58 +452,67 @@ impl RelativeMetrics {
// === Supply in Profit/Loss Relative to Own Supply ===
if let Some(unrealized) = unrealized {
self.height_to_supply_in_profit_rel_to_own_supply.compute_percentage(
starting_indexes.height,
&unrealized.height_to_supply_in_profit_value.bitcoin,
&supply.height_to_supply_value.bitcoin,
exit,
)?;
self.height_to_supply_in_loss_rel_to_own_supply.compute_percentage(
starting_indexes.height,
&unrealized.height_to_supply_in_loss_value.bitcoin,
&supply.height_to_supply_value.bitcoin,
exit,
)?;
self.height_to_supply_in_profit_rel_to_own_supply
.compute_percentage(
starting_indexes.height,
&unrealized.height_to_supply_in_profit_value.bitcoin,
&supply.height_to_supply_value.bitcoin,
exit,
)?;
self.height_to_supply_in_loss_rel_to_own_supply
.compute_percentage(
starting_indexes.height,
&unrealized.height_to_supply_in_loss_value.bitcoin,
&supply.height_to_supply_value.bitcoin,
exit,
)?;
self.indexes_to_supply_in_profit_rel_to_own_supply.compute_all(
starting_indexes,
exit,
|v| {
if let Some(dateindex_vec) = unrealized.indexes_to_supply_in_profit.bitcoin.dateindex.as_ref()
&& let Some(supply_dateindex) = supply.indexes_to_supply.bitcoin.dateindex.as_ref() {
v.compute_percentage(
starting_indexes.dateindex,
dateindex_vec,
supply_dateindex,
exit,
)?;
}
self.indexes_to_supply_in_profit_rel_to_own_supply
.compute_all(starting_indexes, exit, |v| {
if let Some(dateindex_vec) = unrealized
.indexes_to_supply_in_profit
.bitcoin
.dateindex
.as_ref()
&& let Some(supply_dateindex) =
supply.indexes_to_supply.bitcoin.dateindex.as_ref()
{
v.compute_percentage(
starting_indexes.dateindex,
dateindex_vec,
supply_dateindex,
exit,
)?;
}
Ok(())
},
)?;
})?;
self.indexes_to_supply_in_loss_rel_to_own_supply.compute_all(
starting_indexes,
exit,
|v| {
if let Some(dateindex_vec) = unrealized.indexes_to_supply_in_loss.bitcoin.dateindex.as_ref()
&& let Some(supply_dateindex) = supply.indexes_to_supply.bitcoin.dateindex.as_ref() {
v.compute_percentage(
starting_indexes.dateindex,
dateindex_vec,
supply_dateindex,
exit,
)?;
}
self.indexes_to_supply_in_loss_rel_to_own_supply
.compute_all(starting_indexes, exit, |v| {
if let Some(dateindex_vec) = unrealized
.indexes_to_supply_in_loss
.bitcoin
.dateindex
.as_ref()
&& let Some(supply_dateindex) =
supply.indexes_to_supply.bitcoin.dateindex.as_ref()
{
v.compute_percentage(
starting_indexes.dateindex,
dateindex_vec,
supply_dateindex,
exit,
)?;
}
Ok(())
},
)?;
})?;
}
// === Supply in Profit/Loss Relative to Circulating Supply ===
if let (Some(unrealized), Some(v)) = (
unrealized,
self.height_to_supply_in_profit_rel_to_circulating_supply.as_mut(),
self.height_to_supply_in_profit_rel_to_circulating_supply
.as_mut(),
) {
v.compute_percentage(
starting_indexes.height,
@@ -514,7 +523,8 @@ impl RelativeMetrics {
}
if let (Some(unrealized), Some(v)) = (
unrealized,
self.height_to_supply_in_loss_rel_to_circulating_supply.as_mut(),
self.height_to_supply_in_loss_rel_to_circulating_supply
.as_mut(),
) {
v.compute_percentage(
starting_indexes.height,
@@ -526,71 +536,398 @@ impl RelativeMetrics {
// === Unrealized vs Market Cap ===
if let (Some(unrealized), Some(height_to_mc)) = (unrealized, height_to_market_cap) {
self.height_to_unrealized_profit_rel_to_market_cap.compute_percentage(
starting_indexes.height,
&unrealized.height_to_unrealized_profit,
height_to_mc,
exit,
)?;
self.height_to_unrealized_loss_rel_to_market_cap.compute_percentage(
starting_indexes.height,
&unrealized.height_to_unrealized_loss,
height_to_mc,
exit,
)?;
self.height_to_neg_unrealized_loss_rel_to_market_cap.compute_percentage(
starting_indexes.height,
&unrealized.height_to_neg_unrealized_loss,
height_to_mc,
exit,
)?;
self.height_to_net_unrealized_pnl_rel_to_market_cap.compute_percentage(
starting_indexes.height,
&unrealized.height_to_net_unrealized_pnl,
height_to_mc,
exit,
)?;
self.height_to_unrealized_profit_rel_to_market_cap
.compute_percentage(
starting_indexes.height,
&unrealized.height_to_unrealized_profit,
height_to_mc,
exit,
)?;
self.height_to_unrealized_loss_rel_to_market_cap
.compute_percentage(
starting_indexes.height,
&unrealized.height_to_unrealized_loss,
height_to_mc,
exit,
)?;
self.height_to_neg_unrealized_loss_rel_to_market_cap
.compute_percentage(
starting_indexes.height,
&unrealized.height_to_neg_unrealized_loss,
height_to_mc,
exit,
)?;
self.height_to_net_unrealized_pnl_rel_to_market_cap
.compute_percentage(
starting_indexes.height,
&unrealized.height_to_net_unrealized_pnl,
height_to_mc,
exit,
)?;
}
if let Some(dateindex_to_mc) = dateindex_to_market_cap
&& let Some(unrealized) = unrealized {
self.indexes_to_unrealized_profit_rel_to_market_cap.compute_all(
starting_indexes,
exit,
|v| {
&& let Some(unrealized) = unrealized
{
self.indexes_to_unrealized_profit_rel_to_market_cap
.compute_all(starting_indexes, exit, |v| {
v.compute_percentage(
starting_indexes.dateindex,
&unrealized.dateindex_to_unrealized_profit,
dateindex_to_mc,
exit,
)?;
Ok(())
})?;
self.indexes_to_unrealized_loss_rel_to_market_cap
.compute_all(starting_indexes, exit, |v| {
v.compute_percentage(
starting_indexes.dateindex,
&unrealized.dateindex_to_unrealized_loss,
dateindex_to_mc,
exit,
)?;
Ok(())
})?;
}
// indexes_to_neg_unrealized_loss_rel_to_market_cap
if let Some(dateindex_to_mc) = dateindex_to_market_cap
&& let Some(unrealized) = unrealized
{
if let Some(dateindex_vec) =
unrealized.indexes_to_neg_unrealized_loss.dateindex.as_ref()
{
self.indexes_to_neg_unrealized_loss_rel_to_market_cap
.compute_all(starting_indexes, exit, |v| {
v.compute_percentage(
starting_indexes.dateindex,
&unrealized.dateindex_to_unrealized_profit,
dateindex_vec,
dateindex_to_mc,
exit,
)?;
Ok(())
},
})?;
}
if let Some(dateindex_vec) = unrealized.indexes_to_net_unrealized_pnl.dateindex.as_ref()
{
self.indexes_to_net_unrealized_pnl_rel_to_market_cap
.compute_all(starting_indexes, exit, |v| {
v.compute_percentage(
starting_indexes.dateindex,
dateindex_vec,
dateindex_to_mc,
exit,
)?;
Ok(())
})?;
}
}
// === Supply in Profit/Loss Relative to Circulating Supply (indexes) ===
if let Some(v) = self
.indexes_to_supply_in_profit_rel_to_circulating_supply
.as_mut()
&& let Some(unrealized) = unrealized
&& let Some(dateindex_vec) = unrealized
.indexes_to_supply_in_profit
.bitcoin
.dateindex
.as_ref()
{
v.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
dateindex_vec,
dateindex_to_supply,
exit,
)?;
self.indexes_to_unrealized_loss_rel_to_market_cap.compute_all(
starting_indexes,
Ok(())
})?;
}
if let Some(v) = self
.indexes_to_supply_in_loss_rel_to_circulating_supply
.as_mut()
&& let Some(unrealized) = unrealized
&& let Some(dateindex_vec) = unrealized
.indexes_to_supply_in_loss
.bitcoin
.dateindex
.as_ref()
{
v.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
dateindex_vec,
dateindex_to_supply,
exit,
)?;
Ok(())
})?;
}
// === Unrealized vs Own Market Cap ===
// own_market_cap = supply_value.dollars
if let Some(unrealized) = unrealized {
if let Some(v) = self
.height_to_unrealized_profit_rel_to_own_market_cap
.as_mut()
&& let Some(supply_dollars) = supply.height_to_supply_value.dollars.as_ref()
{
v.compute_percentage(
starting_indexes.height,
&unrealized.height_to_unrealized_profit,
supply_dollars,
exit,
)?;
}
if let Some(v) = self
.height_to_unrealized_loss_rel_to_own_market_cap
.as_mut()
&& let Some(supply_dollars) = supply.height_to_supply_value.dollars.as_ref()
{
v.compute_percentage(
starting_indexes.height,
&unrealized.height_to_unrealized_loss,
supply_dollars,
exit,
)?;
}
if let Some(v) = self
.height_to_neg_unrealized_loss_rel_to_own_market_cap
.as_mut()
&& let Some(supply_dollars) = supply.height_to_supply_value.dollars.as_ref()
{
v.compute_percentage(
starting_indexes.height,
&unrealized.height_to_neg_unrealized_loss,
supply_dollars,
exit,
)?;
}
if let Some(v) = self
.height_to_net_unrealized_pnl_rel_to_own_market_cap
.as_mut()
&& let Some(supply_dollars) = supply.height_to_supply_value.dollars.as_ref()
{
v.compute_percentage(
starting_indexes.height,
&unrealized.height_to_net_unrealized_pnl,
supply_dollars,
exit,
|v| {
v.compute_percentage(
starting_indexes.dateindex,
&unrealized.dateindex_to_unrealized_loss,
dateindex_to_mc,
exit,
)?;
Ok(())
},
)?;
}
// TODO: Remaining relative metrics to implement:
// - indexes_to_supply_in_profit/loss_rel_to_circulating_supply
// - height_to_unrealized_*_rel_to_own_market_cap
// - height_to_unrealized_*_rel_to_own_total_unrealized_pnl
// - indexes_to_unrealized_*_rel_to_own_market_cap
// - indexes_to_unrealized_*_rel_to_own_total_unrealized_pnl
// See stateful/common/compute.rs for patterns.
// indexes versions
if let Some(v) = self
.indexes_to_unrealized_profit_rel_to_own_market_cap
.as_mut()
&& let Some(supply_dollars_dateindex) = supply
.indexes_to_supply
.dollars
.as_ref()
.and_then(|d| d.dateindex.as_ref())
{
v.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
&unrealized.dateindex_to_unrealized_profit,
supply_dollars_dateindex,
exit,
)?;
Ok(())
})?;
}
if let Some(v) = self
.indexes_to_unrealized_loss_rel_to_own_market_cap
.as_mut()
&& let Some(supply_dollars_dateindex) = supply
.indexes_to_supply
.dollars
.as_ref()
.and_then(|d| d.dateindex.as_ref())
{
v.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
&unrealized.dateindex_to_unrealized_loss,
supply_dollars_dateindex,
exit,
)?;
Ok(())
})?;
}
if let Some(v) = self
.indexes_to_neg_unrealized_loss_rel_to_own_market_cap
.as_mut()
&& let Some(supply_dollars_dateindex) = supply
.indexes_to_supply
.dollars
.as_ref()
.and_then(|d| d.dateindex.as_ref())
&& let Some(neg_loss_dateindex) =
unrealized.indexes_to_neg_unrealized_loss.dateindex.as_ref()
{
v.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
neg_loss_dateindex,
supply_dollars_dateindex,
exit,
)?;
Ok(())
})?;
}
if let Some(v) = self
.indexes_to_net_unrealized_pnl_rel_to_own_market_cap
.as_mut()
&& let Some(supply_dollars_dateindex) = supply
.indexes_to_supply
.dollars
.as_ref()
.and_then(|d| d.dateindex.as_ref())
&& let Some(net_pnl_dateindex) =
unrealized.indexes_to_net_unrealized_pnl.dateindex.as_ref()
{
v.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
net_pnl_dateindex,
supply_dollars_dateindex,
exit,
)?;
Ok(())
})?;
}
// === Unrealized vs Own Total Unrealized PnL ===
if let Some(v) = self
.height_to_unrealized_profit_rel_to_own_total_unrealized_pnl
.as_mut()
{
v.compute_percentage(
starting_indexes.height,
&unrealized.height_to_unrealized_profit,
&unrealized.height_to_total_unrealized_pnl,
exit,
)?;
}
if let Some(v) = self
.height_to_unrealized_loss_rel_to_own_total_unrealized_pnl
.as_mut()
{
v.compute_percentage(
starting_indexes.height,
&unrealized.height_to_unrealized_loss,
&unrealized.height_to_total_unrealized_pnl,
exit,
)?;
}
if let Some(v) = self
.height_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl
.as_mut()
{
v.compute_percentage(
starting_indexes.height,
&unrealized.height_to_neg_unrealized_loss,
&unrealized.height_to_total_unrealized_pnl,
exit,
)?;
}
if let Some(v) = self
.height_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl
.as_mut()
{
v.compute_percentage(
starting_indexes.height,
&unrealized.height_to_net_unrealized_pnl,
&unrealized.height_to_total_unrealized_pnl,
exit,
)?;
}
// indexes versions for own total unrealized pnl
if let Some(v) = self
.indexes_to_unrealized_profit_rel_to_own_total_unrealized_pnl
.as_mut()
&& let Some(total_pnl_dateindex) = unrealized
.indexes_to_total_unrealized_pnl
.dateindex
.as_ref()
{
v.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
&unrealized.dateindex_to_unrealized_profit,
total_pnl_dateindex,
exit,
)?;
Ok(())
})?;
}
if let Some(v) = self
.indexes_to_unrealized_loss_rel_to_own_total_unrealized_pnl
.as_mut()
&& let Some(total_pnl_dateindex) = unrealized
.indexes_to_total_unrealized_pnl
.dateindex
.as_ref()
{
v.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
&unrealized.dateindex_to_unrealized_loss,
total_pnl_dateindex,
exit,
)?;
Ok(())
})?;
}
if let Some(v) = self
.indexes_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl
.as_mut()
&& let Some(total_pnl_dateindex) = unrealized
.indexes_to_total_unrealized_pnl
.dateindex
.as_ref()
&& let Some(neg_loss_dateindex) =
unrealized.indexes_to_neg_unrealized_loss.dateindex.as_ref()
{
v.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
neg_loss_dateindex,
total_pnl_dateindex,
exit,
)?;
Ok(())
})?;
}
if let Some(v) = self
.indexes_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl
.as_mut()
&& let Some(total_pnl_dateindex) = unrealized
.indexes_to_total_unrealized_pnl
.dateindex
.as_ref()
&& let Some(net_pnl_dateindex) =
unrealized.indexes_to_net_unrealized_pnl.dateindex.as_ref()
{
v.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
net_pnl_dateindex,
total_pnl_dateindex,
exit,
)?;
Ok(())
})?;
}
}
let _ = dateindex_to_supply;
Ok(())
}
}

View File

@@ -304,4 +304,112 @@ impl UnrealizedMetrics {
)?;
Ok(())
}
/// First phase of computed metrics.
pub fn compute_rest_part1(
&mut self,
price: Option<&crate::price::Vecs>,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
// Compute supply value (bitcoin + dollars) from sats
self.height_to_supply_in_profit_value.compute_rest(
price,
starting_indexes,
exit,
Some(&self.height_to_supply_in_profit),
)?;
self.height_to_supply_in_loss_value.compute_rest(
price,
starting_indexes,
exit,
Some(&self.height_to_supply_in_loss),
)?;
// Compute indexes from dateindex sources
self.indexes_to_supply_in_profit.compute_rest(
price,
starting_indexes,
exit,
Some(&self.dateindex_to_supply_in_profit),
)?;
self.indexes_to_supply_in_loss.compute_rest(
price,
starting_indexes,
exit,
Some(&self.dateindex_to_supply_in_loss),
)?;
self.indexes_to_unrealized_profit.compute_rest(
starting_indexes,
exit,
Some(&self.dateindex_to_unrealized_profit),
)?;
self.indexes_to_unrealized_loss.compute_rest(
starting_indexes,
exit,
Some(&self.dateindex_to_unrealized_loss),
)?;
// total_unrealized_pnl = profit + loss
self.height_to_total_unrealized_pnl.compute_add(
starting_indexes.height,
&self.height_to_unrealized_profit,
&self.height_to_unrealized_loss,
exit,
)?;
self.indexes_to_total_unrealized_pnl
.compute_all(starting_indexes, exit, |vec| {
vec.compute_add(
starting_indexes.dateindex,
&self.dateindex_to_unrealized_profit,
&self.dateindex_to_unrealized_loss,
exit,
)?;
Ok(())
})?;
// neg_unrealized_loss = loss * -1
self.height_to_neg_unrealized_loss.compute_transform(
starting_indexes.height,
&self.height_to_unrealized_loss,
|(h, v, ..)| (h, v * -1_i64),
exit,
)?;
self.indexes_to_neg_unrealized_loss
.compute_all(starting_indexes, exit, |vec| {
vec.compute_transform(
starting_indexes.dateindex,
&self.dateindex_to_unrealized_loss,
|(h, v, ..)| (h, v * -1_i64),
exit,
)?;
Ok(())
})?;
// net_unrealized_pnl = profit - loss
self.height_to_net_unrealized_pnl.compute_subtract(
starting_indexes.height,
&self.height_to_unrealized_profit,
&self.height_to_unrealized_loss,
exit,
)?;
self.indexes_to_net_unrealized_pnl
.compute_all(starting_indexes, exit, |vec| {
vec.compute_subtract(
starting_indexes.dateindex,
&self.dateindex_to_unrealized_profit,
&self.dateindex_to_unrealized_loss,
exit,
)?;
Ok(())
})?;
Ok(())
}
}

View File

@@ -3,10 +3,16 @@
//! Accumulates address data across blocks within a flush interval.
//! Data is flushed to disk at checkpoints.
use brk_types::{OutputType, TypeIndex};
use brk_grouper::ByAddressType;
use brk_types::{AnyAddressDataIndexEnum, LoadedAddressData, OutputType, TypeIndex};
use vecdb::GenericStoredVec;
use super::super::address::AddressTypeToTypeIndexMap;
use super::{AddressLookup, EmptyAddressDataWithSource, LoadedAddressDataWithSource, TxIndexVec};
use super::super::address::{AddressTypeToTypeIndexMap, AddressesDataVecs, AnyAddressIndexesVecs};
use super::super::compute::VecsReaders;
use super::{
AddressLookup, EmptyAddressDataWithSource, LoadedAddressDataWithSource, TxIndexVec,
WithAddressDataSource,
};
/// Cache for address data within a flush interval.
pub struct AddressCache {
@@ -75,3 +81,49 @@ impl AddressCache {
)
}
}
/// Load address data from storage or create new.
///
/// Returns None if address is already in cache (loaded or empty).
#[allow(clippy::too_many_arguments)]
pub fn load_uncached_address_data(
address_type: OutputType,
typeindex: TypeIndex,
first_addressindexes: &ByAddressType<TypeIndex>,
cache: &AddressCache,
vr: &VecsReaders,
any_address_indexes: &AnyAddressIndexesVecs,
addresses_data: &AddressesDataVecs,
) -> Option<LoadedAddressDataWithSource> {
// Check if this is a new address (typeindex >= first for this height)
let first = *first_addressindexes.get(address_type).unwrap();
if first <= typeindex {
return Some(WithAddressDataSource::New(LoadedAddressData::default()));
}
// Skip if already in cache
if cache.contains(address_type, typeindex) {
return None;
}
// Read from storage
let reader = vr.address_reader(address_type);
let anyaddressindex = any_address_indexes.get(address_type, typeindex, reader);
Some(match anyaddressindex.to_enum() {
AnyAddressDataIndexEnum::Loaded(loaded_index) => {
let reader = &vr.anyaddressindex_to_anyaddressdata.loaded;
let loaded_data = addresses_data
.loaded
.get_pushed_or_read_unwrap(loaded_index, reader);
WithAddressDataSource::FromLoaded(loaded_index, loaded_data)
}
AnyAddressDataIndexEnum::Empty(empty_index) => {
let reader = &vr.anyaddressindex_to_anyaddressdata.empty;
let empty_data = addresses_data
.empty
.get_pushed_or_read_unwrap(empty_index, reader);
WithAddressDataSource::FromEmpty(empty_index, empty_data.into())
}
})
}

View File

@@ -5,10 +5,7 @@
//! - Address data for address cohort tracking (optional)
use brk_grouper::ByAddressType;
use brk_types::{
AnyAddressDataIndexEnum, Height, LoadedAddressData, OutPoint, OutputType, Sats, TxInIndex,
TxIndex, TxOutIndex, TypeIndex,
};
use brk_types::{Height, OutPoint, OutputType, Sats, TxInIndex, TxIndex, TxOutIndex, TypeIndex};
use rayon::prelude::*;
use rustc_hash::FxHashMap;
use vecdb::{BytesVec, GenericStoredVec};
@@ -18,12 +15,10 @@ use crate::stateful::address::{
};
use crate::stateful::compute::VecsReaders;
use crate::stateful::states::Transacted;
use super::AddressCache;
use crate::stateful::{IndexerReaders, process::RangeMap};
use super::super::address::HeightToAddressTypeToVec;
use super::{LoadedAddressDataWithSource, TxIndexVec, WithAddressDataSource};
use super::{load_uncached_address_data, AddressCache, LoadedAddressDataWithSource, TxIndexVec};
/// Result of processing inputs for a block.
pub struct InputsResult {
@@ -102,7 +97,7 @@ pub fn process_inputs(
txoutindex_to_typeindex.read_unwrap(txoutindex, &ir.txoutindex_to_typeindex);
// Look up address data
let addr_data_opt = get_address_data(
let addr_data_opt = load_uncached_address_data(
input_type,
typeindex,
first_addressindexes,
@@ -176,48 +171,3 @@ pub fn process_inputs(
}
}
/// Look up address data from storage or determine if new.
///
/// Returns None if address is already in loaded or empty cache.
#[allow(clippy::too_many_arguments)]
fn get_address_data(
address_type: OutputType,
typeindex: TypeIndex,
first_addressindexes: &ByAddressType<TypeIndex>,
cache: &AddressCache,
vr: &VecsReaders,
any_address_indexes: &AnyAddressIndexesVecs,
addresses_data: &AddressesDataVecs,
) -> Option<LoadedAddressDataWithSource> {
// Check if this is a new address (typeindex >= first for this height)
let first = *first_addressindexes.get(address_type).unwrap();
if first <= typeindex {
return Some(WithAddressDataSource::New(LoadedAddressData::default()));
}
// Skip if already in cache
if cache.contains(address_type, typeindex) {
return None;
}
// Read from storage
let reader = vr.address_reader(address_type);
let anyaddressindex = any_address_indexes.get(address_type, typeindex, reader);
Some(match anyaddressindex.to_enum() {
AnyAddressDataIndexEnum::Loaded(loaded_index) => {
let reader = &vr.anyaddressindex_to_anyaddressdata.loaded;
let loaded_data = addresses_data
.loaded
.get_pushed_or_read_unwrap(loaded_index, reader);
WithAddressDataSource::FromLoaded(loaded_index, loaded_data)
}
AnyAddressDataIndexEnum::Empty(empty_index) => {
let reader = &vr.anyaddressindex_to_anyaddressdata.empty;
let empty_data = addresses_data
.empty
.get_pushed_or_read_unwrap(empty_index, reader);
WithAddressDataSource::FromEmpty(empty_index, empty_data.into())
}
})
}

View File

@@ -5,8 +5,7 @@
//! - Address data for address cohort tracking (optional)
use brk_grouper::ByAddressType;
use brk_types::{AnyAddressDataIndexEnum, LoadedAddressData, OutputType, Sats, TxIndex, TypeIndex};
use vecdb::GenericStoredVec;
use brk_types::{OutputType, Sats, TxIndex, TypeIndex};
use crate::stateful::address::{
AddressTypeToTypeIndexMap, AddressesDataVecs, AnyAddressIndexesVecs,
@@ -15,7 +14,7 @@ use crate::stateful::compute::VecsReaders;
use crate::stateful::states::Transacted;
use super::super::address::AddressTypeToVec;
use super::{AddressCache, LoadedAddressDataWithSource, TxIndexVec, WithAddressDataSource};
use super::{load_uncached_address_data, AddressCache, LoadedAddressDataWithSource, TxIndexVec};
/// Result of processing outputs for a block.
pub struct OutputsResult {
@@ -79,7 +78,7 @@ pub fn process_outputs(
.unwrap()
.push((typeindex, value));
let addr_data_opt = get_address_data(
let addr_data_opt = load_uncached_address_data(
output_type,
typeindex,
first_addressindexes,
@@ -108,49 +107,3 @@ pub fn process_outputs(
txindex_vecs,
}
}
/// Look up address data from storage or determine if new.
///
/// Returns None if address is already in loaded or empty cache.
#[allow(clippy::too_many_arguments)]
fn get_address_data(
address_type: OutputType,
typeindex: TypeIndex,
first_addressindexes: &ByAddressType<TypeIndex>,
cache: &AddressCache,
vr: &VecsReaders,
any_address_indexes: &AnyAddressIndexesVecs,
addresses_data: &AddressesDataVecs,
) -> Option<LoadedAddressDataWithSource> {
// Check if this is a new address (typeindex >= first for this height)
let first = *first_addressindexes.get(address_type).unwrap();
if first <= typeindex {
return Some(WithAddressDataSource::New(LoadedAddressData::default()));
}
// Skip if already in cache
if cache.contains(address_type, typeindex) {
return None;
}
// Read from storage
let reader = vr.address_reader(address_type);
let anyaddressindex = any_address_indexes.get(address_type, typeindex, reader);
Some(match anyaddressindex.to_enum() {
AnyAddressDataIndexEnum::Loaded(loaded_index) => {
let reader = &vr.anyaddressindex_to_anyaddressdata.loaded;
let loaded_data = addresses_data
.loaded
.get_pushed_or_read_unwrap(loaded_index, reader);
WithAddressDataSource::FromLoaded(loaded_index, loaded_data)
}
AnyAddressDataIndexEnum::Empty(empty_index) => {
let reader = &vr.anyaddressindex_to_anyaddressdata.empty;
let empty_data = addresses_data
.empty
.get_pushed_or_read_unwrap(empty_index, reader);
WithAddressDataSource::FromEmpty(empty_index, empty_data.into())
}
})
}

View File

@@ -1,6 +1,6 @@
//! Process received outputs for address cohorts.
use brk_grouper::{AmountBucket, ByAddressType};
use brk_grouper::{amounts_in_different_buckets, ByAddressType};
use brk_types::{Dollars, Sats, TypeIndex};
use rustc_hash::FxHashMap;
@@ -60,7 +60,7 @@ pub fn process_received(
let prev_balance = addr_data.balance();
let new_balance = prev_balance + total_value;
if AmountBucket::from(prev_balance) != AmountBucket::from(new_balance) {
if amounts_in_different_buckets(prev_balance, new_balance) {
// Crossing cohort boundary - subtract from old, add to new
let cohort_state = cohorts
.amount_range

View File

@@ -6,7 +6,7 @@
//! - Age metrics (blocks_old, days_old) are tracked for sent UTXOs
use brk_error::Result;
use brk_grouper::{ByAddressType, Filtered};
use brk_grouper::{amounts_in_different_buckets, ByAddressType};
use brk_types::{CheckedSub, Dollars, Height, Sats, Timestamp, TypeIndex};
use vecdb::{VecIndex, unlikely};
@@ -57,11 +57,9 @@ pub fn process_sent(
let will_be_empty = addr_data.has_1_utxos();
// Check if crossing cohort boundary
let prev_cohort = cohorts.amount_range.get(prev_balance);
let new_cohort = cohorts.amount_range.get(new_balance);
let filters_differ = prev_cohort.filter() != new_cohort.filter();
let crossing_boundary = amounts_in_different_buckets(prev_balance, new_balance);
if will_be_empty || filters_differ {
if will_be_empty || crossing_boundary {
// Subtract from old cohort
let cohort_state = cohorts
.amount_range
@@ -78,7 +76,7 @@ pub fn process_sent(
"process_sent: cohort underflow detected!\n\
Block context: prev_height={:?}, output_type={:?}, type_index={:?}\n\
prev_balance={}, new_balance={}, value={}\n\
will_be_empty={}, filters_differ={}\n\
will_be_empty={}, crossing_boundary={}\n\
Address: {:?}",
prev_height,
output_type,
@@ -87,7 +85,7 @@ pub fn process_sent(
new_balance,
value,
will_be_empty,
filters_differ,
crossing_boundary,
addr_data
);
}

View File

@@ -7,7 +7,7 @@ use std::path::Path;
use brk_error::Result;
use brk_types::{Dollars, Height, Sats};
use crate::{grouped::PERCENTILES_LEN, utils::OptionExt};
use crate::grouped::PERCENTILES_LEN;
use super::{CachedUnrealizedState, PriceToAmount, RealizedState, SupplyState, UnrealizedState};
@@ -72,14 +72,21 @@ impl CohortState {
Ok(())
}
/// Apply pending price_to_amount updates. Must be called before reads.
pub fn apply_pending(&mut self) {
if let Some(p) = self.price_to_amount.as_mut() {
p.apply_pending();
}
}
/// Get first (lowest) price entry in distribution.
pub fn price_to_amount_first_key_value(&self) -> Option<(&Dollars, &Sats)> {
self.price_to_amount.u().first_key_value()
self.price_to_amount.as_ref()?.first_key_value()
}
/// Get last (highest) price entry in distribution.
pub fn price_to_amount_last_key_value(&self) -> Option<(&Dollars, &Sats)> {
self.price_to_amount.u().last_key_value()
self.price_to_amount.as_ref()?.last_key_value()
}
/// Reset per-block values before processing next block.
@@ -319,7 +326,6 @@ impl CohortState {
}
/// Compute prices at percentile thresholds.
/// Uses O(19 * log n) Fenwick tree queries instead of O(n) iteration.
pub fn compute_percentile_prices(&self) -> [Dollars; PERCENTILES_LEN] {
match self.price_to_amount.as_ref() {
Some(p) if !p.is_empty() => p.compute_percentiles(),
@@ -344,6 +350,11 @@ impl CohortState {
}
};
// Date unrealized: compute from scratch (only at date boundaries, ~144x less frequent)
let date_state = date_price.map(|date_price| {
CachedUnrealizedState::compute_full_standalone(date_price, price_to_amount)
});
// Height unrealized: use incremental cache (O(k) where k = flip range)
let height_state = if let Some(cache) = self.cached_unrealized.as_mut() {
cache.get_at_price(height_price, price_to_amount).clone()
@@ -354,11 +365,6 @@ impl CohortState {
state
};
// Date unrealized: compute from scratch (only at date boundaries, ~144x less frequent)
let date_state = date_price.map(|date_price| {
CachedUnrealizedState::compute_full_standalone(date_price, price_to_amount)
});
(height_state, date_state)
}
@@ -371,19 +377,19 @@ impl CohortState {
}
/// Get first (lowest) price in distribution.
pub fn min_price(&self) -> Option<&Dollars> {
pub fn min_price(&self) -> Option<Dollars> {
self.price_to_amount
.as_ref()?
.first_key_value()
.map(|(k, _)| k)
.map(|(&k, _)| k)
}
/// Get last (highest) price in distribution.
pub fn max_price(&self) -> Option<&Dollars> {
pub fn max_price(&self) -> Option<Dollars> {
self.price_to_amount
.as_ref()?
.last_key_value()
.map(|(k, _)| k)
.map(|(&k, _)| k)
}
/// Get iterator over price_to_amount for merged percentile computation.

View File

@@ -1,135 +0,0 @@
//! Fenwick Tree (Binary Indexed Tree) for O(log n) prefix sums.
//!
//! Used for efficient percentile computation over price distributions.
/// Fenwick tree for O(log n) prefix sum queries and updates.
///
/// Supports:
/// - `add(idx, delta)`: O(log n) - add delta to position idx
/// - `prefix_sum(idx)`: O(log n) - sum of elements 0..=idx
/// - `lower_bound(target)`: O(log n) - find smallest idx where prefix_sum >= target
#[derive(Clone, Debug)]
pub struct FenwickTree {
tree: Vec<u64>,
len: usize,
}
impl FenwickTree {
/// Create a new Fenwick tree with given capacity.
pub fn new(len: usize) -> Self {
Self {
tree: vec![0; len + 1], // 1-indexed
len,
}
}
/// Add delta to position idx. O(log n).
pub fn add(&mut self, idx: usize, delta: u64) {
let mut i = idx + 1; // Convert to 1-indexed
while i <= self.len {
self.tree[i] += delta;
i += i & i.wrapping_neg(); // Add LSB
}
}
/// Subtract delta from position idx. O(log n).
pub fn sub(&mut self, idx: usize, delta: u64) {
let mut i = idx + 1;
while i <= self.len {
self.tree[i] -= delta;
i += i & i.wrapping_neg();
}
}
/// Get prefix sum of elements 0..=idx. O(log n).
#[allow(unused)]
pub fn prefix_sum(&self, idx: usize) -> u64 {
let mut sum = 0u64;
let mut i = idx + 1; // Convert to 1-indexed
while i > 0 {
sum += self.tree[i];
i -= i & i.wrapping_neg(); // Remove LSB
}
sum
}
/// Find smallest index where prefix_sum >= target. O(log n).
/// Returns None if no such index exists (target > total sum).
pub fn lower_bound(&self, target: u64) -> Option<usize> {
if target == 0 {
return Some(0);
}
let mut sum = 0u64;
let mut pos = 0usize;
// Find highest bit position
let mut bit = 1usize << (usize::BITS - 1 - self.len.leading_zeros());
while bit > 0 {
let next_pos = pos + bit;
if next_pos <= self.len && sum + self.tree[next_pos] < target {
sum += self.tree[next_pos];
pos = next_pos;
}
bit >>= 1;
}
// pos is now the largest index where prefix_sum < target
// So pos + 1 is the smallest where prefix_sum >= target
if pos < self.len {
Some(pos) // Convert back to 0-indexed
} else {
None
}
}
/// Get total sum of all elements. O(log n).
#[allow(unused)]
pub fn total(&self) -> u64 {
self.prefix_sum(self.len.saturating_sub(1))
}
/// Reset all values to zero. O(n).
#[allow(unused)]
pub fn clear(&mut self) {
self.tree.fill(0);
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_basic_operations() {
let mut ft = FenwickTree::new(10);
ft.add(0, 5);
ft.add(2, 3);
ft.add(5, 7);
assert_eq!(ft.prefix_sum(0), 5);
assert_eq!(ft.prefix_sum(1), 5);
assert_eq!(ft.prefix_sum(2), 8);
assert_eq!(ft.prefix_sum(5), 15);
assert_eq!(ft.total(), 15);
}
#[test]
fn test_lower_bound() {
let mut ft = FenwickTree::new(10);
ft.add(0, 10);
ft.add(2, 20);
ft.add(5, 30);
assert_eq!(ft.lower_bound(5), Some(0));
assert_eq!(ft.lower_bound(10), Some(0));
assert_eq!(ft.lower_bound(11), Some(2));
assert_eq!(ft.lower_bound(30), Some(2));
assert_eq!(ft.lower_bound(31), Some(5));
assert_eq!(ft.lower_bound(60), Some(5));
assert_eq!(ft.lower_bound(61), None);
}
}

View File

@@ -1,8 +1,6 @@
mod address_cohort;
mod block;
mod cohort;
mod fenwick;
mod price_buckets;
mod price_to_amount;
mod realized;
mod supply;
@@ -13,7 +11,6 @@ mod utxo_cohort;
pub use address_cohort::*;
pub use block::*;
pub use cohort::*;
pub use price_buckets::*;
pub use price_to_amount::*;
pub use realized::*;
pub use supply::*;

View File

@@ -1,253 +0,0 @@
//! Logarithmic price buckets with Fenwick tree for O(log n) percentile queries.
//!
//! Uses logarithmic buckets to maintain constant relative precision across all price levels.
//! Bucket i represents prices in range [MIN_PRICE * BASE^i, MIN_PRICE * BASE^(i+1)).
use brk_types::{Dollars, Sats};
use super::fenwick::FenwickTree;
use crate::grouped::{PERCENTILES, PERCENTILES_LEN};
/// Minimum price tracked (sub-cent for early Bitcoin days).
const MIN_PRICE: f64 = 0.001;
/// Maximum price tracked ($100M for future-proofing).
#[allow(unused)]
const MAX_PRICE: f64 = 100_000_000.0;
/// Base for logarithmic buckets (0.1% precision).
const BASE: f64 = 1.001;
/// Pre-computed ln(BASE) for efficiency.
const LN_BASE: f64 = 0.0009995003; // ln(1.001)
/// Pre-computed ln(MIN_PRICE) for efficiency.
const LN_MIN_PRICE: f64 = -6.907755279; // ln(0.001)
/// Number of buckets needed: ceil(ln(MAX/MIN) / ln(BASE)).
/// ln(100_000_000 / 0.001) / ln(1.001) ≈ 25,328
const NUM_BUCKETS: usize = 25_400; // Rounded up for safety
/// Logarithmic price buckets with O(log n) percentile queries.
#[derive(Clone, Debug)]
pub struct PriceBuckets {
/// Fenwick tree for O(log n) prefix sums.
fenwick: FenwickTree,
/// Direct bucket access for iteration (needed for unrealized computation).
buckets: Vec<Sats>,
/// Total supply tracked.
total: Sats,
}
impl Default for PriceBuckets {
fn default() -> Self {
Self::new()
}
}
impl PriceBuckets {
/// Create new empty price buckets.
pub fn new() -> Self {
Self {
fenwick: FenwickTree::new(NUM_BUCKETS),
buckets: vec![Sats::ZERO; NUM_BUCKETS],
total: Sats::ZERO,
}
}
/// Convert price to bucket index. O(1).
#[inline]
pub fn price_to_bucket(price: Dollars) -> usize {
let price_f64 = f64::from(price);
if price_f64 <= MIN_PRICE {
return 0;
}
let bucket = ((price_f64.ln() - LN_MIN_PRICE) / LN_BASE) as usize;
bucket.min(NUM_BUCKETS - 1)
}
/// Convert bucket index to representative price (bucket midpoint). O(1).
#[inline]
pub fn bucket_to_price(bucket: usize) -> Dollars {
// Use geometric mean of bucket range for better accuracy
let low = MIN_PRICE * BASE.powi(bucket as i32);
let high = low * BASE;
Dollars::from((low * high).sqrt())
}
/// Add amount at given price. O(log n).
pub fn increment(&mut self, price: Dollars, amount: Sats) {
if amount == Sats::ZERO {
return;
}
let bucket = Self::price_to_bucket(price);
self.fenwick.add(bucket, u64::from(amount));
self.buckets[bucket] += amount;
self.total += amount;
}
/// Remove amount at given price. O(log n).
pub fn decrement(&mut self, price: Dollars, amount: Sats) {
if amount == Sats::ZERO {
return;
}
let bucket = Self::price_to_bucket(price);
self.fenwick.sub(bucket, u64::from(amount));
self.buckets[bucket] -= amount;
self.total -= amount;
}
/// Check if empty.
#[allow(unused)]
pub fn is_empty(&self) -> bool {
self.total == Sats::ZERO
}
/// Get total supply.
#[allow(unused)]
pub fn total(&self) -> Sats {
self.total
}
/// Compute all percentile prices. O(19 * log n) ≈ O(323 ops).
pub fn compute_percentiles(&self) -> [Dollars; PERCENTILES_LEN] {
let mut result = [Dollars::NAN; PERCENTILES_LEN];
if self.total == Sats::ZERO {
return result;
}
let total = u64::from(self.total);
for (i, &percentile) in PERCENTILES.iter().enumerate() {
let target = total * u64::from(percentile) / 100;
if let Some(bucket) = self.fenwick.lower_bound(target) {
result[i] = Self::bucket_to_price(bucket);
}
}
result
}
/// Get amount in a specific bucket.
#[allow(unused)]
pub fn get_bucket(&self, bucket: usize) -> Sats {
self.buckets.get(bucket).copied().unwrap_or(Sats::ZERO)
}
/// Iterate over non-empty buckets in a price range.
/// Used for unrealized computation flip range.
#[allow(unused)]
pub fn iter_range(
&self,
from_price: Dollars,
to_price: Dollars,
) -> impl Iterator<Item = (Dollars, Sats)> + '_ {
let from_bucket = Self::price_to_bucket(from_price);
let to_bucket = Self::price_to_bucket(to_price);
let (start, end) = if from_bucket <= to_bucket {
(from_bucket, to_bucket)
} else {
(to_bucket, from_bucket)
};
(start..=end).filter_map(move |bucket| {
let amount = self.buckets[bucket];
if amount > Sats::ZERO {
Some((Self::bucket_to_price(bucket), amount))
} else {
None
}
})
}
/// Iterate over all non-empty buckets (for full unrealized computation).
#[allow(unused)]
pub fn iter(&self) -> impl Iterator<Item = (Dollars, Sats)> + '_ {
self.buckets
.iter()
.enumerate()
.filter_map(|(bucket, &amount)| {
if amount > Sats::ZERO {
Some((Self::bucket_to_price(bucket), amount))
} else {
None
}
})
}
/// Get the lowest price bucket with non-zero amount.
#[allow(unused)]
pub fn min_price(&self) -> Option<Dollars> {
self.buckets
.iter()
.position(|&s| s > Sats::ZERO)
.map(Self::bucket_to_price)
}
/// Get the highest price bucket with non-zero amount.
#[allow(unused)]
pub fn max_price(&self) -> Option<Dollars> {
self.buckets
.iter()
.rposition(|&s| s > Sats::ZERO)
.map(Self::bucket_to_price)
}
/// Clear all data.
#[allow(unused)]
pub fn clear(&mut self) {
self.fenwick.clear();
self.buckets.fill(Sats::ZERO);
self.total = Sats::ZERO;
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_bucket_conversion() {
// Test price -> bucket -> price roundtrip
let prices = [0.01, 1.0, 100.0, 10000.0, 50000.0, 100000.0];
for &price in &prices {
let bucket = PriceBuckets::price_to_bucket(Dollars::from(price));
let recovered = PriceBuckets::bucket_to_price(bucket);
let ratio = f64::from(recovered) / price;
// Should be within 0.1% (our bucket precision)
assert!(
(0.999..=1.001).contains(&ratio),
"price={}, recovered={}, ratio={}",
price,
f64::from(recovered),
ratio
);
}
}
#[test]
fn test_percentiles() {
let mut buckets = PriceBuckets::new();
// Add 100 sats at $10, 200 sats at $20, 300 sats at $30
buckets.increment(Dollars::from(10.0), Sats::from(100u64));
buckets.increment(Dollars::from(20.0), Sats::from(200u64));
buckets.increment(Dollars::from(30.0), Sats::from(300u64));
// Total = 600 sats
// 50th percentile = 300 sats = should be around $20-$30
let percentiles = buckets.compute_percentiles();
// Median (index 9 in PERCENTILES which is 50%)
let median = percentiles[9]; // PERCENTILES[9] = 50
let median_f64 = f64::from(median);
assert!(
(15.0..=35.0).contains(&median_f64),
"median={} should be around $20-$30",
median_f64
);
}
}

View File

@@ -8,20 +8,24 @@ use brk_error::{Error, Result};
use brk_types::{Dollars, Height, Sats};
use derive_deref::{Deref, DerefMut};
use pco::standalone::{simple_decompress, simpler_compress};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use vecdb::{Bytes, unlikely};
use vecdb::Bytes;
use crate::{grouped::PERCENTILES_LEN, utils::OptionExt};
use crate::{
grouped::{PERCENTILES, PERCENTILES_LEN},
utils::OptionExt,
};
use super::{PriceBuckets, SupplyState};
use super::SupplyState;
#[derive(Clone, Debug)]
pub struct PriceToAmount {
pathbuf: PathBuf,
state: Option<State>,
/// Logarithmic buckets for O(log n) percentile queries.
/// Rebuilt on load, not persisted.
buckets: Option<PriceBuckets>,
/// Pending deltas: (total_increment, total_decrement) per price.
/// Flushed to BTreeMap before reads and at end of block.
pending: FxHashMap<Dollars, (Sats, Sats)>,
}
const STATE_AT_: &str = "state_at_";
@@ -32,7 +36,7 @@ impl PriceToAmount {
Self {
pathbuf: path.join(format!("{name}_price_to_amount")),
state: None,
buckets: None,
pending: FxHashMap::default(),
}
}
@@ -41,20 +45,20 @@ impl PriceToAmount {
let (&height, path) = files.range(..=height).next_back().ok_or(Error::NotFound(
"No price state found at or before height".into(),
))?;
let state = State::deserialize(&fs::read(path)?)?;
// Rebuild buckets from loaded state
let mut buckets = PriceBuckets::new();
for (&price, &amount) in state.iter() {
buckets.increment(price, amount);
}
self.state = Some(state);
self.buckets = Some(buckets);
self.state = Some(State::deserialize(&fs::read(path)?)?);
self.pending.clear();
Ok(height)
}
fn assert_pending_empty(&self) {
assert!(
self.pending.is_empty(),
"PriceToAmount: pending not empty, call apply_pending first"
);
}
pub fn iter(&self) -> impl Iterator<Item = (&Dollars, &Sats)> {
self.assert_pending_empty();
self.state.u().iter()
}
@@ -63,84 +67,92 @@ impl PriceToAmount {
&self,
range: R,
) -> impl Iterator<Item = (&Dollars, &Sats)> {
self.assert_pending_empty();
self.state.u().range(range)
}
pub fn is_empty(&self) -> bool {
self.state.u().is_empty()
self.pending.is_empty() && self.state.u().is_empty()
}
pub fn first_key_value(&self) -> Option<(&Dollars, &Sats)> {
self.assert_pending_empty();
self.state.u().first_key_value()
}
pub fn last_key_value(&self) -> Option<(&Dollars, &Sats)> {
self.assert_pending_empty();
self.state.u().last_key_value()
}
/// Accumulate increment in pending batch. O(1).
pub fn increment(&mut self, price: Dollars, supply_state: &SupplyState) {
*self.state.um().entry(price).or_default() += supply_state.value;
if let Some(buckets) = self.buckets.as_mut() {
buckets.increment(price, supply_state.value);
}
self.pending.entry(price).or_default().0 += supply_state.value;
}
/// Accumulate decrement in pending batch. O(1).
pub fn decrement(&mut self, price: Dollars, supply_state: &SupplyState) {
if let Some(amount) = self.state.um().get_mut(&price) {
if unlikely(*amount < supply_state.value) {
let amount = *amount;
self.pending.entry(price).or_default().1 += supply_state.value;
}
/// Apply pending deltas to BTreeMap. O(k log n) where k = unique prices in pending.
/// Must be called before any read operations.
pub fn apply_pending(&mut self) {
for (price, (inc, dec)) in self.pending.drain() {
let entry = self.state.um().entry(price).or_default();
*entry += inc;
if *entry < dec {
panic!(
"PriceToAmount::decrement underflow!\n\
"PriceToAmount::apply_pending underflow!\n\
Path: {:?}\n\
Price: {}\n\
Bucket amount: {}\n\
Trying to decrement by: {}\n\
Supply state: utxo_count={}, value={}\n\
All buckets: {:?}",
self.pathbuf,
price,
amount,
supply_state.value,
supply_state.utxo_count,
supply_state.value,
self.state.u().iter().collect::<Vec<_>>()
Current + increments: {}\n\
Trying to decrement by: {}",
self.pathbuf, price, entry, dec
);
}
*amount -= supply_state.value;
if *amount == Sats::ZERO {
*entry -= dec;
if *entry == Sats::ZERO {
self.state.um().remove(&price);
}
if let Some(buckets) = self.buckets.as_mut() {
buckets.decrement(price, supply_state.value);
}
} else {
panic!(
"PriceToAmount::decrement price not found!\n\
Path: {:?}\n\
Price: {}\n\
Supply state: utxo_count={}, value={}\n\
All buckets: {:?}",
self.pathbuf,
price,
supply_state.utxo_count,
supply_state.value,
self.state.u().iter().collect::<Vec<_>>()
);
}
}
pub fn init(&mut self) {
self.state.replace(State::default());
self.buckets.replace(PriceBuckets::new());
self.pending.clear();
}
/// Compute percentile prices using O(log n) Fenwick tree queries.
/// Compute percentile prices by iterating the BTreeMap directly.
/// O(n) where n = number of unique prices.
pub fn compute_percentiles(&self) -> [Dollars; PERCENTILES_LEN] {
if let Some(buckets) = self.buckets.as_ref() {
buckets.compute_percentiles()
} else {
[Dollars::NAN; PERCENTILES_LEN]
self.assert_pending_empty();
let state = match self.state.as_ref() {
Some(s) if !s.is_empty() => s,
_ => return [Dollars::NAN; PERCENTILES_LEN],
};
let total: u64 = state.values().map(|&s| u64::from(s)).sum();
if total == 0 {
return [Dollars::NAN; PERCENTILES_LEN];
}
let mut result = [Dollars::NAN; PERCENTILES_LEN];
let mut cumsum = 0u64;
let mut idx = 0;
for (&price, &amount) in state.iter() {
cumsum += u64::from(amount);
while idx < PERCENTILES_LEN
&& cumsum >= total * u64::from(PERCENTILES[idx]) / 100
{
result[idx] = price;
idx += 1;
}
}
result
}
pub fn clean(&mut self) -> Result<()> {
@@ -170,6 +182,8 @@ impl PriceToAmount {
}
pub fn flush(&mut self, height: Height) -> Result<()> {
self.apply_pending();
let files = self.read_dir(Some(height))?;
for (_, path) in files

View File

@@ -39,14 +39,17 @@ impl AddAssign<&SupplyState> for SupplyState {
impl SubAssign<&SupplyState> for SupplyState {
fn sub_assign(&mut self, rhs: &Self) {
self.utxo_count = self.utxo_count.checked_sub(rhs.utxo_count).unwrap_or_else(|| {
panic!(
"SupplyState underflow: cohort utxo_count {} < address utxo_count {}. \
self.utxo_count = self
.utxo_count
.checked_sub(rhs.utxo_count)
.unwrap_or_else(|| {
panic!(
"SupplyState underflow: cohort utxo_count {} < address utxo_count {}. \
This indicates a desync between cohort state and address data. \
Try deleting the compute cache and restarting fresh.",
self.utxo_count, rhs.utxo_count
)
});
self.utxo_count, rhs.utxo_count
)
});
self.value = self.value.checked_sub(rhs.value).unwrap_or_else(|| {
panic!(
"SupplyState underflow: cohort value {} < address value {}. \

View File

@@ -3,28 +3,32 @@ use rayon::iter::{IntoParallelIterator, ParallelIterator};
use super::{Filter, TimeFilter};
// Age boundary constants in days
pub const DAYS_1D: usize = 1;
pub const DAYS_1W: usize = 7;
pub const DAYS_1M: usize = 30;
pub const DAYS_2M: usize = 2 * 30;
pub const DAYS_3M: usize = 3 * 30;
pub const DAYS_4M: usize = 4 * 30;
pub const DAYS_5M: usize = 5 * 30;
pub const DAYS_6M: usize = 6 * 30;
pub const DAYS_1Y: usize = 365;
pub const DAYS_2Y: usize = 2 * 365;
pub const DAYS_3Y: usize = 3 * 365;
pub const DAYS_4Y: usize = 4 * 365;
pub const DAYS_5Y: usize = 5 * 365;
pub const DAYS_6Y: usize = 6 * 365;
pub const DAYS_7Y: usize = 7 * 365;
pub const DAYS_8Y: usize = 8 * 365;
pub const DAYS_10Y: usize = 10 * 365;
pub const DAYS_12Y: usize = 12 * 365;
pub const DAYS_15Y: usize = 15 * 365;
/// Age boundaries in days. Defines the cohort ranges:
/// [0, B[0]), [B[0], B[1]), [B[1], B[2]), ..., [B[n-1], ∞)
pub const AGE_BOUNDARIES: [usize; 19] = [
1, // up_to_1d | _1d_to_1w
7, // _1d_to_1w | _1w_to_1m
30, // _1w_to_1m | _1m_to_2m
2 * 30, // _1m_to_2m | _2m_to_3m
3 * 30, // _2m_to_3m | _3m_to_4m
4 * 30, // _3m_to_4m | _4m_to_5m
5 * 30, // _4m_to_5m | _5m_to_6m
6 * 30, // _5m_to_6m | _6m_to_1y
365, // _6m_to_1y | _1y_to_2y
2 * 365, // _1y_to_2y | _2y_to_3y
3 * 365, // _2y_to_3y | _3y_to_4y
4 * 365, // _3y_to_4y | _4y_to_5y
5 * 365, // _4y_to_5y | _5y_to_6y
6 * 365, // _5y_to_6y | _6y_to_7y
7 * 365, // _6y_to_7y | _7y_to_8y
8 * 365, // _7y_to_8y | _8y_to_10y
10 * 365, // _8y_to_10y | _10y_to_12y
12 * 365, // _10y_to_12y | _12y_to_15y
15 * 365, // _12y_to_15y | from_15y
DAYS_1D, DAYS_1W, DAYS_1M, DAYS_2M, DAYS_3M, DAYS_4M, DAYS_5M, DAYS_6M, DAYS_1Y, DAYS_2Y,
DAYS_3Y, DAYS_4Y, DAYS_5Y, DAYS_6Y, DAYS_7Y, DAYS_8Y, DAYS_10Y, DAYS_12Y, DAYS_15Y,
];
#[derive(Default, Clone, Traversable)]
@@ -52,31 +56,58 @@ pub struct ByAgeRange<T> {
}
impl<T> ByAgeRange<T> {
/// Get mutable reference by days old. O(1).
#[inline]
pub fn get_mut_by_days_old(&mut self, days_old: usize) -> &mut T {
match days_old {
0..DAYS_1D => &mut self.up_to_1d,
DAYS_1D..DAYS_1W => &mut self._1d_to_1w,
DAYS_1W..DAYS_1M => &mut self._1w_to_1m,
DAYS_1M..DAYS_2M => &mut self._1m_to_2m,
DAYS_2M..DAYS_3M => &mut self._2m_to_3m,
DAYS_3M..DAYS_4M => &mut self._3m_to_4m,
DAYS_4M..DAYS_5M => &mut self._4m_to_5m,
DAYS_5M..DAYS_6M => &mut self._5m_to_6m,
DAYS_6M..DAYS_1Y => &mut self._6m_to_1y,
DAYS_1Y..DAYS_2Y => &mut self._1y_to_2y,
DAYS_2Y..DAYS_3Y => &mut self._2y_to_3y,
DAYS_3Y..DAYS_4Y => &mut self._3y_to_4y,
DAYS_4Y..DAYS_5Y => &mut self._4y_to_5y,
DAYS_5Y..DAYS_6Y => &mut self._5y_to_6y,
DAYS_6Y..DAYS_7Y => &mut self._6y_to_7y,
DAYS_7Y..DAYS_8Y => &mut self._7y_to_8y,
DAYS_8Y..DAYS_10Y => &mut self._8y_to_10y,
DAYS_10Y..DAYS_12Y => &mut self._10y_to_12y,
DAYS_12Y..DAYS_15Y => &mut self._12y_to_15y,
_ => &mut self.from_15y,
}
}
pub fn new<F>(mut create: F) -> Self
where
F: FnMut(Filter) -> T,
{
Self {
up_to_1d: create(Filter::Time(TimeFilter::Range(0..AGE_BOUNDARIES[0]))),
_1d_to_1w: create(Filter::Time(TimeFilter::Range(AGE_BOUNDARIES[0]..AGE_BOUNDARIES[1]))),
_1w_to_1m: create(Filter::Time(TimeFilter::Range(AGE_BOUNDARIES[1]..AGE_BOUNDARIES[2]))),
_1m_to_2m: create(Filter::Time(TimeFilter::Range(AGE_BOUNDARIES[2]..AGE_BOUNDARIES[3]))),
_2m_to_3m: create(Filter::Time(TimeFilter::Range(AGE_BOUNDARIES[3]..AGE_BOUNDARIES[4]))),
_3m_to_4m: create(Filter::Time(TimeFilter::Range(AGE_BOUNDARIES[4]..AGE_BOUNDARIES[5]))),
_4m_to_5m: create(Filter::Time(TimeFilter::Range(AGE_BOUNDARIES[5]..AGE_BOUNDARIES[6]))),
_5m_to_6m: create(Filter::Time(TimeFilter::Range(AGE_BOUNDARIES[6]..AGE_BOUNDARIES[7]))),
_6m_to_1y: create(Filter::Time(TimeFilter::Range(AGE_BOUNDARIES[7]..AGE_BOUNDARIES[8]))),
_1y_to_2y: create(Filter::Time(TimeFilter::Range(AGE_BOUNDARIES[8]..AGE_BOUNDARIES[9]))),
_2y_to_3y: create(Filter::Time(TimeFilter::Range(AGE_BOUNDARIES[9]..AGE_BOUNDARIES[10]))),
_3y_to_4y: create(Filter::Time(TimeFilter::Range(AGE_BOUNDARIES[10]..AGE_BOUNDARIES[11]))),
_4y_to_5y: create(Filter::Time(TimeFilter::Range(AGE_BOUNDARIES[11]..AGE_BOUNDARIES[12]))),
_5y_to_6y: create(Filter::Time(TimeFilter::Range(AGE_BOUNDARIES[12]..AGE_BOUNDARIES[13]))),
_6y_to_7y: create(Filter::Time(TimeFilter::Range(AGE_BOUNDARIES[13]..AGE_BOUNDARIES[14]))),
_7y_to_8y: create(Filter::Time(TimeFilter::Range(AGE_BOUNDARIES[14]..AGE_BOUNDARIES[15]))),
_8y_to_10y: create(Filter::Time(TimeFilter::Range(AGE_BOUNDARIES[15]..AGE_BOUNDARIES[16]))),
_10y_to_12y: create(Filter::Time(TimeFilter::Range(AGE_BOUNDARIES[16]..AGE_BOUNDARIES[17]))),
_12y_to_15y: create(Filter::Time(TimeFilter::Range(AGE_BOUNDARIES[17]..AGE_BOUNDARIES[18]))),
from_15y: create(Filter::Time(TimeFilter::GreaterOrEqual(AGE_BOUNDARIES[18]))),
up_to_1d: create(Filter::Time(TimeFilter::Range(0..DAYS_1D))),
_1d_to_1w: create(Filter::Time(TimeFilter::Range(DAYS_1D..DAYS_1W))),
_1w_to_1m: create(Filter::Time(TimeFilter::Range(DAYS_1W..DAYS_1M))),
_1m_to_2m: create(Filter::Time(TimeFilter::Range(DAYS_1M..DAYS_2M))),
_2m_to_3m: create(Filter::Time(TimeFilter::Range(DAYS_2M..DAYS_3M))),
_3m_to_4m: create(Filter::Time(TimeFilter::Range(DAYS_3M..DAYS_4M))),
_4m_to_5m: create(Filter::Time(TimeFilter::Range(DAYS_4M..DAYS_5M))),
_5m_to_6m: create(Filter::Time(TimeFilter::Range(DAYS_5M..DAYS_6M))),
_6m_to_1y: create(Filter::Time(TimeFilter::Range(DAYS_6M..DAYS_1Y))),
_1y_to_2y: create(Filter::Time(TimeFilter::Range(DAYS_1Y..DAYS_2Y))),
_2y_to_3y: create(Filter::Time(TimeFilter::Range(DAYS_2Y..DAYS_3Y))),
_3y_to_4y: create(Filter::Time(TimeFilter::Range(DAYS_3Y..DAYS_4Y))),
_4y_to_5y: create(Filter::Time(TimeFilter::Range(DAYS_4Y..DAYS_5Y))),
_5y_to_6y: create(Filter::Time(TimeFilter::Range(DAYS_5Y..DAYS_6Y))),
_6y_to_7y: create(Filter::Time(TimeFilter::Range(DAYS_6Y..DAYS_7Y))),
_7y_to_8y: create(Filter::Time(TimeFilter::Range(DAYS_7Y..DAYS_8Y))),
_8y_to_10y: create(Filter::Time(TimeFilter::Range(DAYS_8Y..DAYS_10Y))),
_10y_to_12y: create(Filter::Time(TimeFilter::Range(DAYS_10Y..DAYS_12Y))),
_12y_to_15y: create(Filter::Time(TimeFilter::Range(DAYS_12Y..DAYS_15Y))),
from_15y: create(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_15Y))),
}
}

View File

@@ -8,7 +8,7 @@ use super::{AmountFilter, Filter};
/// Bucket index for amount ranges. Use for cheap comparisons.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct AmountBucket(u8);
struct AmountBucket(u8);
impl From<Sats> for AmountBucket {
#[inline(always)]
@@ -33,6 +33,12 @@ impl From<Sats> for AmountBucket {
}
}
/// Check if two amounts are in different buckets. O(1).
#[inline(always)]
pub fn amounts_in_different_buckets(a: Sats, b: Sats) -> bool {
AmountBucket::from(a) != AmountBucket::from(b)
}
#[derive(Debug, Default, Clone, Traversable)]
pub struct ByAmountRange<T> {
pub _0sats: T,

View File

@@ -1,7 +1,11 @@
use super::{Filter, TimeFilter, AGE_BOUNDARIES};
use brk_traversable::Traversable;
use rayon::prelude::*;
use super::{
Filter, TimeFilter, DAYS_10Y, DAYS_12Y, DAYS_15Y, DAYS_1M, DAYS_1W, DAYS_1Y, DAYS_2M, DAYS_2Y,
DAYS_3M, DAYS_3Y, DAYS_4M, DAYS_4Y, DAYS_5M, DAYS_5Y, DAYS_6M, DAYS_6Y, DAYS_7Y, DAYS_8Y,
};
#[derive(Default, Clone, Traversable)]
pub struct ByMaxAge<T> {
pub _1w: T,
@@ -30,24 +34,24 @@ impl<T> ByMaxAge<T> {
F: FnMut(Filter) -> T,
{
Self {
_1w: create(Filter::Time(TimeFilter::LowerThan(AGE_BOUNDARIES[1]))),
_1m: create(Filter::Time(TimeFilter::LowerThan(AGE_BOUNDARIES[2]))),
_2m: create(Filter::Time(TimeFilter::LowerThan(AGE_BOUNDARIES[3]))),
_3m: create(Filter::Time(TimeFilter::LowerThan(AGE_BOUNDARIES[4]))),
_4m: create(Filter::Time(TimeFilter::LowerThan(AGE_BOUNDARIES[5]))),
_5m: create(Filter::Time(TimeFilter::LowerThan(AGE_BOUNDARIES[6]))),
_6m: create(Filter::Time(TimeFilter::LowerThan(AGE_BOUNDARIES[7]))),
_1y: create(Filter::Time(TimeFilter::LowerThan(AGE_BOUNDARIES[8]))),
_2y: create(Filter::Time(TimeFilter::LowerThan(AGE_BOUNDARIES[9]))),
_3y: create(Filter::Time(TimeFilter::LowerThan(AGE_BOUNDARIES[10]))),
_4y: create(Filter::Time(TimeFilter::LowerThan(AGE_BOUNDARIES[11]))),
_5y: create(Filter::Time(TimeFilter::LowerThan(AGE_BOUNDARIES[12]))),
_6y: create(Filter::Time(TimeFilter::LowerThan(AGE_BOUNDARIES[13]))),
_7y: create(Filter::Time(TimeFilter::LowerThan(AGE_BOUNDARIES[14]))),
_8y: create(Filter::Time(TimeFilter::LowerThan(AGE_BOUNDARIES[15]))),
_10y: create(Filter::Time(TimeFilter::LowerThan(AGE_BOUNDARIES[16]))),
_12y: create(Filter::Time(TimeFilter::LowerThan(AGE_BOUNDARIES[17]))),
_15y: create(Filter::Time(TimeFilter::LowerThan(AGE_BOUNDARIES[18]))),
_1w: create(Filter::Time(TimeFilter::LowerThan(DAYS_1W))),
_1m: create(Filter::Time(TimeFilter::LowerThan(DAYS_1M))),
_2m: create(Filter::Time(TimeFilter::LowerThan(DAYS_2M))),
_3m: create(Filter::Time(TimeFilter::LowerThan(DAYS_3M))),
_4m: create(Filter::Time(TimeFilter::LowerThan(DAYS_4M))),
_5m: create(Filter::Time(TimeFilter::LowerThan(DAYS_5M))),
_6m: create(Filter::Time(TimeFilter::LowerThan(DAYS_6M))),
_1y: create(Filter::Time(TimeFilter::LowerThan(DAYS_1Y))),
_2y: create(Filter::Time(TimeFilter::LowerThan(DAYS_2Y))),
_3y: create(Filter::Time(TimeFilter::LowerThan(DAYS_3Y))),
_4y: create(Filter::Time(TimeFilter::LowerThan(DAYS_4Y))),
_5y: create(Filter::Time(TimeFilter::LowerThan(DAYS_5Y))),
_6y: create(Filter::Time(TimeFilter::LowerThan(DAYS_6Y))),
_7y: create(Filter::Time(TimeFilter::LowerThan(DAYS_7Y))),
_8y: create(Filter::Time(TimeFilter::LowerThan(DAYS_8Y))),
_10y: create(Filter::Time(TimeFilter::LowerThan(DAYS_10Y))),
_12y: create(Filter::Time(TimeFilter::LowerThan(DAYS_12Y))),
_15y: create(Filter::Time(TimeFilter::LowerThan(DAYS_15Y))),
}
}

View File

@@ -1,7 +1,10 @@
use brk_traversable::Traversable;
use rayon::prelude::*;
use super::{Filter, TimeFilter, AGE_BOUNDARIES};
use super::{
Filter, TimeFilter, DAYS_10Y, DAYS_12Y, DAYS_1D, DAYS_1M, DAYS_1W, DAYS_1Y, DAYS_2M, DAYS_2Y,
DAYS_3M, DAYS_3Y, DAYS_4M, DAYS_4Y, DAYS_5M, DAYS_5Y, DAYS_6M, DAYS_6Y, DAYS_7Y, DAYS_8Y,
};
#[derive(Default, Clone, Traversable)]
pub struct ByMinAge<T> {
@@ -31,24 +34,24 @@ impl<T> ByMinAge<T> {
F: FnMut(Filter) -> T,
{
Self {
_1d: create(Filter::Time(TimeFilter::GreaterOrEqual(AGE_BOUNDARIES[0]))),
_1w: create(Filter::Time(TimeFilter::GreaterOrEqual(AGE_BOUNDARIES[1]))),
_1m: create(Filter::Time(TimeFilter::GreaterOrEqual(AGE_BOUNDARIES[2]))),
_2m: create(Filter::Time(TimeFilter::GreaterOrEqual(AGE_BOUNDARIES[3]))),
_3m: create(Filter::Time(TimeFilter::GreaterOrEqual(AGE_BOUNDARIES[4]))),
_4m: create(Filter::Time(TimeFilter::GreaterOrEqual(AGE_BOUNDARIES[5]))),
_5m: create(Filter::Time(TimeFilter::GreaterOrEqual(AGE_BOUNDARIES[6]))),
_6m: create(Filter::Time(TimeFilter::GreaterOrEqual(AGE_BOUNDARIES[7]))),
_1y: create(Filter::Time(TimeFilter::GreaterOrEqual(AGE_BOUNDARIES[8]))),
_2y: create(Filter::Time(TimeFilter::GreaterOrEqual(AGE_BOUNDARIES[9]))),
_3y: create(Filter::Time(TimeFilter::GreaterOrEqual(AGE_BOUNDARIES[10]))),
_4y: create(Filter::Time(TimeFilter::GreaterOrEqual(AGE_BOUNDARIES[11]))),
_5y: create(Filter::Time(TimeFilter::GreaterOrEqual(AGE_BOUNDARIES[12]))),
_6y: create(Filter::Time(TimeFilter::GreaterOrEqual(AGE_BOUNDARIES[13]))),
_7y: create(Filter::Time(TimeFilter::GreaterOrEqual(AGE_BOUNDARIES[14]))),
_8y: create(Filter::Time(TimeFilter::GreaterOrEqual(AGE_BOUNDARIES[15]))),
_10y: create(Filter::Time(TimeFilter::GreaterOrEqual(AGE_BOUNDARIES[16]))),
_12y: create(Filter::Time(TimeFilter::GreaterOrEqual(AGE_BOUNDARIES[17]))),
_1d: create(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_1D))),
_1w: create(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_1W))),
_1m: create(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_1M))),
_2m: create(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_2M))),
_3m: create(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_3M))),
_4m: create(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_4M))),
_5m: create(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_5M))),
_6m: create(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_6M))),
_1y: create(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_1Y))),
_2y: create(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_2Y))),
_3y: create(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_3Y))),
_4y: create(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_4Y))),
_5y: create(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_5Y))),
_6y: create(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_6Y))),
_7y: create(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_7Y))),
_8y: create(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_8Y))),
_10y: create(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_10Y))),
_12y: create(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_12Y))),
}
}

View File

@@ -128,6 +128,23 @@ impl<T> BySpendableType<T> {
]
.into_iter()
}
pub fn iter_typed_mut(&mut self) -> impl Iterator<Item = (OutputType, &mut T)> {
[
(OutputType::P2PK65, &mut self.p2pk65),
(OutputType::P2PK33, &mut self.p2pk33),
(OutputType::P2PKH, &mut self.p2pkh),
(OutputType::P2MS, &mut self.p2ms),
(OutputType::P2SH, &mut self.p2sh),
(OutputType::P2WPKH, &mut self.p2wpkh),
(OutputType::P2WSH, &mut self.p2wsh),
(OutputType::P2TR, &mut self.p2tr),
(OutputType::P2A, &mut self.p2a),
(OutputType::Unknown, &mut self.unknown),
(OutputType::Empty, &mut self.empty),
]
.into_iter()
}
}
impl<T> Add for BySpendableType<T>

View File

@@ -1,3 +1,5 @@
use crate::DAYS_5M;
/// Classification for short-term vs long-term holders.
/// The threshold is 150 days (approximately 5 months).
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
@@ -9,7 +11,7 @@ pub enum Term {
}
impl Term {
pub const THRESHOLD_DAYS: usize = 150;
pub const THRESHOLD_DAYS: usize = DAYS_5M;
pub fn to_name(&self) -> &'static str {
match self {

View File

@@ -12,7 +12,7 @@ use log::info;
use rayon::prelude::*;
use vecdb::{AnyVec, TypedVecIterator, VecIndex, VecIterator};
use crate::{constants::DUPLICATE_TXID_PREFIXES, Indexes};
use crate::{Indexes, constants::DUPLICATE_TXID_PREFIXES};
use super::Vecs;
@@ -168,7 +168,7 @@ impl Stores {
self.addresstype_to_addressindex_and_unspentoutpoint
.par_values_mut()
.map(|s| s as &mut dyn AnyStore),
) // Changed from par_iter_mut()
)
.try_for_each(|store| store.commit(height))?;
info!("Commits done in {:?}", i.elapsed());

View File

@@ -1,6 +1,7 @@
use std::{
cmp::Ordering,
f64,
hash::{Hash, Hasher},
iter::Sum,
ops::{Add, AddAssign, Div, Mul},
};
@@ -18,6 +19,12 @@ use super::{Bitcoin, Cents, Close, High, Sats, StoredF32, StoredF64};
#[derive(Debug, Default, Clone, Copy, Deref, Serialize, Deserialize, Pco, JsonSchema)]
pub struct Dollars(f64);
impl Hash for Dollars {
fn hash<H: Hasher>(&self, state: &mut H) {
self.0.to_bits().hash(state);
}
}
impl Dollars {
pub const ZERO: Self = Self(0.0);
pub const NAN: Self = Self(f64::NAN);