computer: snapshot

This commit is contained in:
nym21
2025-12-28 03:19:34 +01:00
parent 9ba77dac0f
commit 5d6325ae30
43 changed files with 1948 additions and 695 deletions

7
Cargo.lock generated
View File

@@ -4267,8 +4267,6 @@ dependencies = [
[[package]]
name = "rawdb"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48974be79d0854eb9ae7a0931882bc84052bc9c0be10b39a324aab0be975e2d9"
dependencies = [
"libc",
"log",
@@ -5461,8 +5459,6 @@ checksum = "8f54a172d0620933a27a4360d3db3e2ae0dd6cceae9730751a036bbf182c4b23"
[[package]]
name = "vecdb"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c9bb3a189dc833e0e1eeb4816230f4478412fbddaaeffec56cbfd3d7f778a25d"
dependencies = [
"ctrlc",
"log",
@@ -5470,6 +5466,7 @@ dependencies = [
"parking_lot",
"pco",
"rawdb",
"schemars",
"serde",
"serde_json",
"thiserror 2.0.17",
@@ -5481,8 +5478,6 @@ dependencies = [
[[package]]
name = "vecdb_derive"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "daf3feda7d28c275661d640ad454830ad3ed6ccdaa297d3ec24faa06741142e3"
dependencies = [
"quote",
"syn 2.0.111",

View File

@@ -81,8 +81,8 @@ serde_derive = "1.0.228"
serde_json = { version = "1.0.148", features = ["float_roundtrip"] }
smallvec = "1.15.1"
tokio = { version = "1.48.0", features = ["rt-multi-thread"] }
vecdb = { version = "0.5.0", features = ["derive", "serde_json", "pco"] }
# vecdb = { path = "../anydb/crates/vecdb", features = ["derive", "serde_json", "pco", "schemars"] }
# vecdb = { version = "0.5.0", features = ["derive", "serde_json", "pco", "schemars"] }
vecdb = { path = "../anydb/crates/vecdb", features = ["derive", "serde_json", "pco", "schemars"] }
# vecdb = { git = "https://github.com/anydb-rs/anydb", features = ["derive", "serde_json", "pco"] }
[workspace.metadata.release]

View File

@@ -570,13 +570,13 @@ fn field_to_js_type_with_generic_value(
if metadata.is_pattern_type(&field.rust_type) {
if metadata.is_pattern_generic(&field.rust_type) {
if let Some(vt) = generic_value_type {
return format!("{}<{}>", field.rust_type, vt);
} else if is_generic {
return format!("{}<T>", field.rust_type);
} else {
return format!("{}<unknown>", field.rust_type);
}
// Use type_param from field, then generic_value_type, then T if parent is generic
let type_param = field
.type_param
.as_deref()
.or(generic_value_type)
.unwrap_or(if is_generic { "T" } else { "unknown" });
return format!("{}<{}>", field.rust_type, type_param);
}
field.rust_type.clone()
} else if let Some(accessor) = metadata.find_index_set_pattern(&field.indexes) {
@@ -763,12 +763,10 @@ fn generate_tree_initializer(
let arg = if is_parameterizable {
get_pattern_instance_base(child_node, child_name)
} else if accumulated_name.is_empty() {
format!("/{}", child_name)
} else {
if accumulated_name.is_empty() {
format!("/{}", child_name)
} else {
format!("{}/{}", accumulated_name, child_name)
}
format!("{}/{}", accumulated_name, child_name)
};
writeln!(
@@ -798,16 +796,16 @@ fn generate_tree_initializer(
}
fn infer_child_accumulated_name(node: &TreeNode, parent_acc: &str, field_name: &str) -> String {
if let Some(leaf_name) = get_first_leaf_name(node) {
if let Some(pos) = leaf_name.find(field_name) {
if pos == 0 {
if let Some(leaf_name) = get_first_leaf_name(node)
&& let Some(pos) = leaf_name.find(field_name)
{
if pos == 0 {
return field_name.to_string();
} else if leaf_name.chars().nth(pos - 1) == Some('_') {
if parent_acc.is_empty() {
return field_name.to_string();
} else if leaf_name.chars().nth(pos - 1) == Some('_') {
if parent_acc.is_empty() {
return field_name.to_string();
}
return format!("{}_{}", parent_acc, field_name);
}
return format!("{}_{}", parent_acc, field_name);
}
}

View File

@@ -1,3 +1,5 @@
#![allow(clippy::type_complexity)]
use std::{collections::btree_map::Entry, fs::create_dir_all, io, path::PathBuf};
use brk_query::Vecs;

View File

@@ -6,7 +6,7 @@ use serde_json::Value;
use crate::{
ClientMetadata, Endpoint, FieldNamePosition, IndexSetPattern, PatternField, StructuralPattern,
TypeSchemas, extract_inner_type, get_fields_with_child_info, get_node_fields,
get_pattern_instance_base, is_enum_schema, to_pascal_case, to_snake_case,
get_pattern_instance_base, to_pascal_case, to_snake_case,
};
/// Generate Python client from metadata and OpenAPI endpoints.
@@ -65,9 +65,9 @@ fn generate_type_definitions(output: &mut String, schemas: &TypeSchemas) {
writeln!(output, " {}: {}", safe_name, prop_type).unwrap();
}
writeln!(output).unwrap();
} else if is_enum_schema(schema) {
let py_type = schema_to_python_type_ctx(schema, Some(&name));
writeln!(output, "{} = {}", name, py_type).unwrap();
// } else if is_enum_schema(schema) {
// let py_type = schema_to_python_type_ctx(schema, Some(&name));
// writeln!(output, "{} = {}", name, py_type).unwrap();
} else {
let py_type = schema_to_python_type_ctx(schema, Some(&name));
writeln!(output, "{} = {}", name, py_type).unwrap();
@@ -615,11 +615,14 @@ fn field_to_python_type_with_generic_value(
};
if metadata.is_pattern_type(&field.rust_type) {
// Check if this pattern is generic and we have a value type
if metadata.is_pattern_generic(&field.rust_type)
&& let Some(vt) = generic_value_type
{
return format!("{}[{}]", field.rust_type, vt);
if metadata.is_pattern_generic(&field.rust_type) {
// Use type_param from field, then generic_value_type, then T if parent is generic
let type_param = field
.type_param
.as_deref()
.or(generic_value_type)
.unwrap_or(if is_generic { "T" } else { "Any" });
return format!("{}[{}]", field.rust_type, type_param);
}
field.rust_type.clone()
} else if let Some(accessor) = metadata.find_index_set_pattern(&field.indexes) {
@@ -631,7 +634,6 @@ fn field_to_python_type_with_generic_value(
}
}
/// Generate tree classes
fn generate_tree_classes(output: &mut String, catalog: &TreeNode, metadata: &ClientMetadata) {
writeln!(output, "# Catalog tree classes\n").unwrap();
@@ -668,7 +670,8 @@ fn generate_tree_class(
.collect();
// Skip if this matches a pattern (already generated)
if pattern_lookup.contains_key(&fields) && pattern_lookup.get(&fields) != Some(&name.to_string())
if pattern_lookup.contains_key(&fields)
&& pattern_lookup.get(&fields) != Some(&name.to_string())
{
return;
}
@@ -695,12 +698,8 @@ fn generate_tree_class(
.as_ref()
.and_then(|cf| metadata.get_type_param(cf))
.map(String::as_str);
let py_type = field_to_python_type_with_generic_value(
field,
metadata,
false,
generic_value_type,
);
let py_type =
field_to_python_type_with_generic_value(field, metadata, false, generic_value_type);
let field_name_py = to_snake_case(&field.name);
if metadata.is_pattern_type(&field.rust_type) {

View File

@@ -407,10 +407,14 @@ fn field_to_type_annotation_with_generic(
};
if metadata.is_pattern_type(&field.rust_type) {
if metadata.is_pattern_generic(&field.rust_type)
&& let Some(vt) = generic_value_type
{
return format!("{}<{}>", field.rust_type, vt);
if metadata.is_pattern_generic(&field.rust_type) {
// Use type_param from field, then generic_value_type, then T if parent is generic
let type_param = field
.type_param
.as_deref()
.or(generic_value_type)
.unwrap_or(if is_generic { "T" } else { "_" });
return format!("{}<{}>", field.rust_type, type_param);
}
field.rust_type.clone()
} else if let Some(accessor) = metadata.find_index_set_pattern(&field.indexes) {

View File

@@ -156,6 +156,8 @@ pub struct PatternField {
pub json_type: String,
/// For leaves: the set of supported indexes. Empty for branches.
pub indexes: BTreeSet<Index>,
/// For branches referencing generic patterns: the concrete type parameter
pub type_param: Option<String>,
}
impl PatternField {
@@ -175,6 +177,7 @@ impl std::hash::Hash for PatternField {
self.name.hash(state);
self.rust_type.hash(state);
self.json_type.hash(state);
// Note: child_fields not included in hash for pattern matching purposes
}
}
@@ -183,6 +186,7 @@ impl PartialEq for PatternField {
self.name == other.name
&& self.rust_type == other.rust_type
&& self.json_type == other.json_type
// Note: child_fields not included in equality for pattern matching purposes
}
}

View File

@@ -21,8 +21,9 @@ pub fn detect_structural_patterns(
let mut signature_counts: HashMap<Vec<PatternField>, usize> = HashMap::new();
let mut normalized_to_name: HashMap<Vec<PatternField>, String> = HashMap::new();
let mut name_counts: HashMap<String, usize> = HashMap::new();
let mut signature_to_child_fields: HashMap<Vec<PatternField>, Vec<Vec<PatternField>>> =
HashMap::new();
// Process tree bottom-up to resolve all branch types
resolve_branch_patterns(
tree,
"root",
@@ -30,30 +31,44 @@ pub fn detect_structural_patterns(
&mut signature_counts,
&mut normalized_to_name,
&mut name_counts,
&mut signature_to_child_fields,
);
// Identify generic patterns (also extracts type params)
let (generic_patterns, generic_mappings, type_mappings) =
detect_generic_patterns(&signature_to_pattern);
// Build non-generic patterns: signatures appearing 2+ times that weren't merged into generics
let mut patterns: Vec<StructuralPattern> = signature_to_pattern
.iter()
.filter(|(sig, _)| {
signature_counts.get(*sig).copied().unwrap_or(0) >= 2
&& !generic_mappings.contains_key(*sig)
})
.map(|(fields, name)| StructuralPattern {
name: name.clone(),
fields: fields.clone(),
field_positions: HashMap::new(),
is_generic: false,
.map(|(fields, name)| {
let child_fields_list = signature_to_child_fields.get(fields);
let fields_with_type_params = fields
.iter()
.enumerate()
.map(|(i, f)| {
let type_param = child_fields_list
.and_then(|list| list.get(i))
.and_then(|cf| type_mappings.get(cf).cloned());
PatternField {
type_param,
..f.clone()
}
})
.collect();
StructuralPattern {
name: name.clone(),
fields: fields_with_type_params,
field_positions: HashMap::new(),
is_generic: false,
}
})
.collect();
patterns.extend(generic_patterns);
// Build lookup for field position analysis
let mut pattern_lookup: HashMap<Vec<PatternField>, String> = HashMap::new();
for (sig, name) in &signature_to_pattern {
if signature_counts.get(sig).copied().unwrap_or(0) >= 2 {
@@ -64,7 +79,6 @@ pub fn detect_structural_patterns(
let concrete_to_pattern = pattern_lookup.clone();
// Second pass: analyze field positions
analyze_pattern_field_positions(tree, &mut patterns, &pattern_lookup);
patterns.sort_by(|a, b| b.fields.len().cmp(&a.fields.len()));
@@ -147,6 +161,7 @@ fn normalize_fields_for_generic(fields: &[PatternField]) -> Option<(Vec<PatternF
rust_type: "T".to_string(),
json_type: "T".to_string(),
indexes: f.indexes.clone(),
type_param: None,
}
}
})
@@ -156,6 +171,7 @@ fn normalize_fields_for_generic(fields: &[PatternField]) -> Option<(Vec<PatternF
}
/// Recursively resolve branch patterns bottom-up.
/// Returns (pattern_name, fields) for parent's child_fields tracking.
fn resolve_branch_patterns(
node: &TreeNode,
field_name: &str,
@@ -163,30 +179,40 @@ fn resolve_branch_patterns(
signature_counts: &mut HashMap<Vec<PatternField>, usize>,
normalized_to_name: &mut HashMap<Vec<PatternField>, String>,
name_counts: &mut HashMap<String, usize>,
) -> Option<String> {
signature_to_child_fields: &mut HashMap<Vec<PatternField>, Vec<Vec<PatternField>>>,
) -> Option<(String, Vec<PatternField>)> {
let TreeNode::Branch(children) = node else {
return None;
};
let mut fields: Vec<PatternField> = Vec::new();
let mut child_fields_vec: Vec<Vec<PatternField>> = Vec::new();
for (child_name, child_node) in children {
let (rust_type, json_type, indexes) = match child_node {
let (rust_type, json_type, indexes, child_fields) = match child_node {
TreeNode::Leaf(leaf) => (
leaf.value_type().to_string(),
schema_to_json_type(&leaf.schema),
leaf.indexes().clone(),
Vec::new(),
),
TreeNode::Branch(_) => {
let pattern_name = resolve_branch_patterns(
let (pattern_name, child_pattern_fields) = resolve_branch_patterns(
child_node,
child_name,
signature_to_pattern,
signature_counts,
normalized_to_name,
name_counts,
signature_to_child_fields,
)
.unwrap_or_else(|| ("Unknown".to_string(), Vec::new()));
(
pattern_name.clone(),
pattern_name,
BTreeSet::new(),
child_pattern_fields,
)
.unwrap_or_else(|| "Unknown".to_string());
(pattern_name.clone(), pattern_name, BTreeSet::new())
}
};
fields.push(PatternField {
@@ -194,12 +220,19 @@ fn resolve_branch_patterns(
rust_type,
json_type,
indexes,
type_param: None,
});
child_fields_vec.push(child_fields);
}
fields.sort_by(|a, b| a.name.cmp(&b.name));
*signature_counts.entry(fields.clone()).or_insert(0) += 1;
// Store child fields for type param resolution later
signature_to_child_fields
.entry(fields.clone())
.or_insert(child_fields_vec);
let pattern_name = if let Some(existing) = signature_to_pattern.get(&fields) {
existing.clone()
} else {
@@ -208,11 +241,11 @@ fn resolve_branch_patterns(
.entry(normalized)
.or_insert_with(|| generate_pattern_name(field_name, name_counts))
.clone();
signature_to_pattern.insert(fields, name.clone());
signature_to_pattern.insert(fields.clone(), name.clone());
name
};
Some(pattern_name)
Some((pattern_name, fields))
}
/// Normalize fields for naming (same structure = same name).
@@ -228,6 +261,7 @@ fn normalize_fields_for_naming(fields: &[PatternField]) -> Vec<PatternField> {
rust_type: "_".to_string(),
json_type: "_".to_string(),
indexes: f.indexes.clone(),
type_param: None,
}
}
})

View File

@@ -50,6 +50,7 @@ pub fn get_node_fields(
rust_type,
json_type,
indexes,
type_param: None,
}
})
.collect();
@@ -94,6 +95,7 @@ pub fn get_fields_with_child_info(
rust_type,
json_type,
indexes,
type_param: None,
},
child_fields,
)

View File

@@ -5,7 +5,7 @@ use brk_error::Result;
use brk_fetcher::Fetcher;
use brk_indexer::Indexer;
use mimalloc::MiMalloc;
use vecdb::Exit;
use vecdb::{AnyStoredVec, Exit};
#[global_allocator]
static GLOBAL: MiMalloc = MiMalloc;
@@ -34,8 +34,7 @@ fn run() -> Result<()> {
let computer = Computer::forced_import(&outputs_dir, &indexer, Some(fetcher))?;
// let _a = dbg!(computer.chain.txinindex_to_value.region().meta());
// let _b = dbg!(indexer.vecs.txout.txoutindex_to_txoutdata.region().meta());
let _a = dbg!(computer.chain.txindex_to_fee.region().meta());
Ok(())
}

View File

@@ -0,0 +1,205 @@
use brk_traversable::Traversable;
use brk_types::Version;
use schemars::JsonSchema;
use vecdb::{IterableCloneableVec, LazyVecFrom1, UnaryTransform, VecIndex};
use super::{ComputedVecValue, EagerVecsBuilder, LazyVecsBuilder};
const VERSION: Version = Version::ZERO;
/// Lazy transform version of `EagerVecsBuilder`.
/// Each group is a `LazyVecFrom1` that transforms from the corresponding stored group.
/// S1T is the source type, T is the output type (can be the same for transforms like negation).
#[derive(Clone, Traversable)]
pub struct LazyTransformBuilder<I, T, S1T = T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1T: ComputedVecValue,
{
pub first: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub average: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub sum: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub max: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub pct90: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub pct75: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub median: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub pct25: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub pct10: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub min: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub last: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub cumulative: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
}
impl<I, T, S1T> LazyTransformBuilder<I, T, S1T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
/// Create a lazy transform from a stored `EagerVecsBuilder`.
/// F is the transform type (e.g., `Negate`, `Halve`).
pub fn from_eager<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
source: &EagerVecsBuilder<I, S1T>,
) -> Self {
let v = version + VERSION;
let suffix = |s: &str| format!("{name}_{s}");
Self {
first: source.first.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("first"),
v,
s.boxed_clone(),
))
}),
average: source.average.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("avg"),
v,
s.boxed_clone(),
))
}),
sum: source.sum.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("sum"),
v,
s.boxed_clone(),
))
}),
max: source.max.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("max"),
v,
s.boxed_clone(),
))
}),
pct90: source.pct90.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("pct90"),
v,
s.boxed_clone(),
))
}),
pct75: source.pct75.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("pct75"),
v,
s.boxed_clone(),
))
}),
median: source.median.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("median"),
v,
s.boxed_clone(),
))
}),
pct25: source.pct25.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("pct25"),
v,
s.boxed_clone(),
))
}),
pct10: source.pct10.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("pct10"),
v,
s.boxed_clone(),
))
}),
min: source.min.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("min"),
v,
s.boxed_clone(),
))
}),
last: source
.last
.as_ref()
.map(|s| Box::new(LazyVecFrom1::transformed::<F>(name, v, s.boxed_clone()))),
cumulative: source.cumulative.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("cumulative"),
v,
s.boxed_clone(),
))
}),
}
}
}
impl<I, T, S1T> LazyTransformBuilder<I, T, S1T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
/// Create a lazy transform from a `LazyVecsBuilder`.
/// Note: LazyVecsBuilder doesn't have percentiles, so those will be None.
pub fn from_lazy<F: UnaryTransform<S1T, T>, S1I: VecIndex, S2T: ComputedVecValue>(
name: &str,
version: Version,
source: &LazyVecsBuilder<I, S1T, S1I, S2T>,
) -> Self {
let v = version + VERSION;
// Use same suffix pattern as EagerVecsBuilder
let suffix = |s: &str| format!("{name}_{s}");
Self {
first: source.first.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("first"),
v,
s.boxed_clone(),
))
}),
average: source.average.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("avg"),
v,
s.boxed_clone(),
))
}),
sum: source.sum.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("sum"),
v,
s.boxed_clone(),
))
}),
max: source.max.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("max"),
v,
s.boxed_clone(),
))
}),
pct90: None,
pct75: None,
median: None,
pct25: None,
pct10: None,
min: source.min.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("min"),
v,
s.boxed_clone(),
))
}),
last: source
.last
.as_ref()
.map(|s| Box::new(LazyVecFrom1::transformed::<F>(name, v, s.boxed_clone()))),
cumulative: source.cumulative.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("cumulative"),
v,
s.boxed_clone(),
))
}),
}
}
}

View File

@@ -0,0 +1,236 @@
use brk_traversable::Traversable;
use brk_types::Version;
use schemars::JsonSchema;
use vecdb::{BinaryTransform, IterableCloneableVec, LazyVecFrom2, VecIndex};
use super::{ComputedVecValue, EagerVecsBuilder, LazyVecsBuilder};
const VERSION: Version = Version::ZERO;
/// Lazy binary transform builder.
/// Each group is a `LazyVecFrom2` that transforms from two corresponding stored groups.
#[derive(Clone, Traversable)]
#[allow(clippy::type_complexity)]
pub struct LazyTransform2Builder<I, T, S1T, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
pub first: Option<Box<LazyVecFrom2<I, T, I, S1T, I, S2T>>>,
pub average: Option<Box<LazyVecFrom2<I, T, I, S1T, I, S2T>>>,
pub sum: Option<Box<LazyVecFrom2<I, T, I, S1T, I, S2T>>>,
pub max: Option<Box<LazyVecFrom2<I, T, I, S1T, I, S2T>>>,
pub min: Option<Box<LazyVecFrom2<I, T, I, S1T, I, S2T>>>,
pub last: Option<Box<LazyVecFrom2<I, T, I, S1T, I, S2T>>>,
pub cumulative: Option<Box<LazyVecFrom2<I, T, I, S1T, I, S2T>>>,
}
impl<I, T, S1T, S2T> LazyTransform2Builder<I, T, S1T, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
/// Create a lazy binary transform from two stored `EagerVecsBuilder`.
pub fn from_eager<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &EagerVecsBuilder<I, S1T>,
source2: &EagerVecsBuilder<I, S2T>,
) -> Self {
let v = version + VERSION;
let suffix = |s: &str| format!("{name}_{s}");
Self {
first: source1
.first
.as_ref()
.zip(source2.first.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("first"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
average: source1
.average
.as_ref()
.zip(source2.average.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("avg"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
sum: source1
.sum
.as_ref()
.zip(source2.sum.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("sum"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
max: source1
.max
.as_ref()
.zip(source2.max.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("max"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
min: source1
.min
.as_ref()
.zip(source2.min.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("min"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
last: source1
.last
.as_ref()
.zip(source2.last.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
name,
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
cumulative: source1
.cumulative
.as_ref()
.zip(source2.cumulative.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("cumulative"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
}
}
/// Create a lazy binary transform from two `LazyVecsBuilder`.
pub fn from_lazy<
F: BinaryTransform<S1T, S2T, T>,
S1I: VecIndex,
S1E: ComputedVecValue,
S2I: VecIndex,
S2E: ComputedVecValue,
>(
name: &str,
version: Version,
source1: &LazyVecsBuilder<I, S1T, S1I, S1E>,
source2: &LazyVecsBuilder<I, S2T, S2I, S2E>,
) -> Self {
let v = version + VERSION;
let suffix = |s: &str| format!("{name}_{s}");
Self {
first: source1
.first
.as_ref()
.zip(source2.first.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("first"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
average: source1
.average
.as_ref()
.zip(source2.average.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("avg"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
sum: source1
.sum
.as_ref()
.zip(source2.sum.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("sum"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
max: source1
.max
.as_ref()
.zip(source2.max.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("max"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
min: source1
.min
.as_ref()
.zip(source2.min.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("min"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
last: source1
.last
.as_ref()
.zip(source2.last.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
name,
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
cumulative: source1
.cumulative
.as_ref()
.zip(source2.cumulative.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("cumulative"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
}
}
}

View File

@@ -1,5 +1,4 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex,

View File

@@ -0,0 +1,133 @@
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex,
};
use schemars::JsonSchema;
use vecdb::{AnyExportableVec, BinaryTransform, IterableCloneableVec, LazyVecFrom2};
use super::{ComputedVecValue, ComputedVecsFromDateIndex, LazyTransform2Builder};
const VERSION: Version = Version::ZERO;
/// Lazy binary transform from two `ComputedVecsFromDateIndex` sources.
#[derive(Clone)]
pub struct LazyVecsFrom2FromDateIndex<T, S1T, S2T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
pub dateindex: Option<LazyVecFrom2<DateIndex, T, DateIndex, S1T, DateIndex, S2T>>,
pub weekindex: LazyTransform2Builder<WeekIndex, T, S1T, S2T>,
pub monthindex: LazyTransform2Builder<MonthIndex, T, S1T, S2T>,
pub quarterindex: LazyTransform2Builder<QuarterIndex, T, S1T, S2T>,
pub semesterindex: LazyTransform2Builder<SemesterIndex, T, S1T, S2T>,
pub yearindex: LazyTransform2Builder<YearIndex, T, S1T, S2T>,
pub decadeindex: LazyTransform2Builder<DecadeIndex, T, S1T, S2T>,
}
impl<T, S1T, S2T> LazyVecsFrom2FromDateIndex<T, S1T, S2T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
/// Create from two `ComputedVecsFromDateIndex` sources.
pub fn from_computed<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &ComputedVecsFromDateIndex<S1T>,
source2: &ComputedVecsFromDateIndex<S2T>,
) -> Self {
let v = version + VERSION;
Self {
dateindex: source1
.dateindex
.as_ref()
.zip(source2.dateindex.as_ref())
.map(|(s1, s2)| {
LazyVecFrom2::transformed::<F>(name, v, s1.boxed_clone(), s2.boxed_clone())
}),
weekindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.weekindex,
&source2.weekindex,
),
monthindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.monthindex,
&source2.monthindex,
),
quarterindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.quarterindex,
&source2.quarterindex,
),
semesterindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.semesterindex,
&source2.semesterindex,
),
yearindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.yearindex,
&source2.yearindex,
),
decadeindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.decadeindex,
&source2.decadeindex,
),
}
}
}
impl<T, S1T, S2T> Traversable for LazyVecsFrom2FromDateIndex<T, S1T, S2T>
where
T: ComputedVecValue + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
fn to_tree_node(&self) -> brk_traversable::TreeNode {
brk_traversable::TreeNode::Branch(
[
self.dateindex
.as_ref()
.map(|v| ("dateindex".to_string(), v.to_tree_node())),
Some(("weekindex".to_string(), self.weekindex.to_tree_node())),
Some(("monthindex".to_string(), self.monthindex.to_tree_node())),
Some(("quarterindex".to_string(), self.quarterindex.to_tree_node())),
Some(("semesterindex".to_string(), self.semesterindex.to_tree_node())),
Some(("yearindex".to_string(), self.yearindex.to_tree_node())),
Some(("decadeindex".to_string(), self.decadeindex.to_tree_node())),
]
.into_iter()
.flatten()
.collect(),
)
.merge_branches()
.unwrap()
}
fn iter_any_exportable(&self) -> impl Iterator<Item = &dyn AnyExportableVec> {
let mut iter: Box<dyn Iterator<Item = &dyn AnyExportableVec>> =
Box::new(std::iter::empty());
if let Some(ref v) = self.dateindex {
iter = Box::new(iter.chain(v.iter_any_exportable()));
}
iter = Box::new(iter.chain(self.weekindex.iter_any_exportable()));
iter = Box::new(iter.chain(self.monthindex.iter_any_exportable()));
iter = Box::new(iter.chain(self.quarterindex.iter_any_exportable()));
iter = Box::new(iter.chain(self.semesterindex.iter_any_exportable()));
iter = Box::new(iter.chain(self.yearindex.iter_any_exportable()));
iter = Box::new(iter.chain(self.decadeindex.iter_any_exportable()));
iter
}
}

View File

@@ -0,0 +1,97 @@
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex,
};
use schemars::JsonSchema;
use vecdb::{AnyExportableVec, IterableBoxedVec, LazyVecFrom1, UnaryTransform};
use super::{ComputedVecValue, ComputedVecsFromDateIndex, LazyTransformBuilder};
const VERSION: Version = Version::ZERO;
/// Fully lazy version of `ComputedVecsFromDateIndex` where all vecs are lazy transforms.
#[derive(Clone)]
pub struct LazyVecsFromDateIndex<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
pub dateindex: Option<LazyVecFrom1<DateIndex, T, DateIndex, S1T>>,
pub weekindex: LazyTransformBuilder<WeekIndex, T, S1T>,
pub monthindex: LazyTransformBuilder<MonthIndex, T, S1T>,
pub quarterindex: LazyTransformBuilder<QuarterIndex, T, S1T>,
pub semesterindex: LazyTransformBuilder<SemesterIndex, T, S1T>,
pub yearindex: LazyTransformBuilder<YearIndex, T, S1T>,
pub decadeindex: LazyTransformBuilder<DecadeIndex, T, S1T>,
}
impl<T, S1T> LazyVecsFromDateIndex<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
/// Create a lazy transform from a stored `ComputedVecsFromDateIndex`.
/// F is the transform type (e.g., `Negate`, `Halve`).
pub fn from_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
dateindex_source: Option<IterableBoxedVec<DateIndex, S1T>>,
source: &ComputedVecsFromDateIndex<S1T>,
) -> Self {
let v = version + VERSION;
Self {
dateindex: dateindex_source.map(|s| LazyVecFrom1::transformed::<F>(name, v, s)),
weekindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.weekindex),
monthindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.monthindex),
quarterindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.quarterindex),
semesterindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.semesterindex),
yearindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.yearindex),
decadeindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.decadeindex),
}
}
}
impl<T, S1T> Traversable for LazyVecsFromDateIndex<T, S1T>
where
T: ComputedVecValue + JsonSchema,
S1T: ComputedVecValue,
{
fn to_tree_node(&self) -> brk_traversable::TreeNode {
brk_traversable::TreeNode::Branch(
[
self.dateindex
.as_ref()
.map(|v| ("dateindex".to_string(), v.to_tree_node())),
Some(("weekindex".to_string(), self.weekindex.to_tree_node())),
Some(("monthindex".to_string(), self.monthindex.to_tree_node())),
Some(("quarterindex".to_string(), self.quarterindex.to_tree_node())),
Some((
"semesterindex".to_string(),
self.semesterindex.to_tree_node(),
)),
Some(("yearindex".to_string(), self.yearindex.to_tree_node())),
Some(("decadeindex".to_string(), self.decadeindex.to_tree_node())),
]
.into_iter()
.flatten()
.collect(),
)
.merge_branches()
.unwrap()
}
fn iter_any_exportable(&self) -> impl Iterator<Item = &dyn AnyExportableVec> {
let mut regular_iter: Box<dyn Iterator<Item = &dyn AnyExportableVec>> =
Box::new(std::iter::empty());
if let Some(ref dateindex) = self.dateindex {
regular_iter = Box::new(regular_iter.chain(dateindex.iter_any_exportable()));
}
regular_iter = Box::new(regular_iter.chain(self.weekindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.monthindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.quarterindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.semesterindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.yearindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.decadeindex.iter_any_exportable()));
regular_iter
}
}

View File

@@ -0,0 +1,105 @@
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, DifficultyEpoch, Height, MonthIndex, QuarterIndex, SemesterIndex,
Version, WeekIndex, YearIndex,
};
use schemars::JsonSchema;
use vecdb::{AnyExportableVec, IterableBoxedVec, LazyVecFrom1, UnaryTransform};
use super::{ComputedVecValue, ComputedVecsFromHeight, LazyTransformBuilder};
const VERSION: Version = Version::ZERO;
/// Fully lazy version of `ComputedVecsFromHeight` where all vecs are lazy transforms.
/// Each index uses `LazyTransformBuilder` sourced from its corresponding stored groups.
#[derive(Clone)]
pub struct LazyVecsFromHeight<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
pub height: LazyVecFrom1<Height, T, Height, S1T>,
pub dateindex: LazyTransformBuilder<DateIndex, T, S1T>,
pub weekindex: LazyTransformBuilder<WeekIndex, T, S1T>,
pub difficultyepoch: LazyTransformBuilder<DifficultyEpoch, T, S1T>,
pub monthindex: LazyTransformBuilder<MonthIndex, T, S1T>,
pub quarterindex: LazyTransformBuilder<QuarterIndex, T, S1T>,
pub semesterindex: LazyTransformBuilder<SemesterIndex, T, S1T>,
pub yearindex: LazyTransformBuilder<YearIndex, T, S1T>,
pub decadeindex: LazyTransformBuilder<DecadeIndex, T, S1T>,
}
impl<T, S1T> LazyVecsFromHeight<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
/// Create a lazy transform from a stored `ComputedVecsFromHeight`.
/// F is the transform type (e.g., `Negate`, `Halve`).
pub fn from_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
height_source: IterableBoxedVec<Height, S1T>,
source: &ComputedVecsFromHeight<S1T>,
) -> Self {
let v = version + VERSION;
Self {
height: LazyVecFrom1::transformed::<F>(name, v, height_source),
dateindex: LazyTransformBuilder::from_eager::<F>(name, v, &source.dateindex),
weekindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.weekindex),
difficultyepoch: LazyTransformBuilder::from_eager::<F>(name, v, &source.difficultyepoch),
monthindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.monthindex),
quarterindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.quarterindex),
semesterindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.semesterindex),
yearindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.yearindex),
decadeindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.decadeindex),
}
}
}
impl<T, S1T> Traversable for LazyVecsFromHeight<T, S1T>
where
T: ComputedVecValue + JsonSchema,
S1T: ComputedVecValue,
{
fn to_tree_node(&self) -> brk_traversable::TreeNode {
brk_traversable::TreeNode::Branch(
[
Some(("height".to_string(), self.height.to_tree_node())),
Some(("dateindex".to_string(), self.dateindex.to_tree_node())),
Some(("weekindex".to_string(), self.weekindex.to_tree_node())),
Some((
"difficultyepoch".to_string(),
self.difficultyepoch.to_tree_node(),
)),
Some(("monthindex".to_string(), self.monthindex.to_tree_node())),
Some(("quarterindex".to_string(), self.quarterindex.to_tree_node())),
Some((
"semesterindex".to_string(),
self.semesterindex.to_tree_node(),
)),
Some(("yearindex".to_string(), self.yearindex.to_tree_node())),
Some(("decadeindex".to_string(), self.decadeindex.to_tree_node())),
]
.into_iter()
.flatten()
.collect(),
)
.merge_branches()
.unwrap()
}
fn iter_any_exportable(&self) -> impl Iterator<Item = &dyn AnyExportableVec> {
let mut regular_iter: Box<dyn Iterator<Item = &dyn AnyExportableVec>> =
Box::new(self.height.iter_any_exportable());
regular_iter = Box::new(regular_iter.chain(self.dateindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.weekindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.difficultyepoch.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.monthindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.quarterindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.semesterindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.yearindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.decadeindex.iter_any_exportable()));
regular_iter
}
}

View File

@@ -0,0 +1,70 @@
use brk_traversable::Traversable;
use brk_types::{DifficultyEpoch, Height, Version};
use schemars::JsonSchema;
use vecdb::{AnyExportableVec, IterableBoxedVec, LazyVecFrom1, UnaryTransform};
use super::{ComputedVecValue, ComputedVecsFromHeightStrict, LazyTransformBuilder};
const VERSION: Version = Version::ZERO;
/// Fully lazy version of `ComputedVecsFromHeightStrict` where all vecs are lazy transforms.
#[derive(Clone)]
pub struct LazyVecsFromHeightStrict<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
pub height: LazyVecFrom1<Height, T, Height, S1T>,
pub difficultyepoch: LazyTransformBuilder<DifficultyEpoch, T, S1T>,
}
impl<T, S1T> LazyVecsFromHeightStrict<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
/// Create a lazy transform from a stored `ComputedVecsFromHeightStrict`.
/// F is the transform type (e.g., `Negate`, `Halve`).
pub fn from_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
height_source: IterableBoxedVec<Height, S1T>,
source: &ComputedVecsFromHeightStrict<S1T>,
) -> Self {
let v = version + VERSION;
Self {
height: LazyVecFrom1::transformed::<F>(name, v, height_source),
difficultyepoch: LazyTransformBuilder::from_eager::<F>(name, v, &source.difficultyepoch),
}
}
}
impl<T, S1T> Traversable for LazyVecsFromHeightStrict<T, S1T>
where
T: ComputedVecValue + JsonSchema,
S1T: ComputedVecValue,
{
fn to_tree_node(&self) -> brk_traversable::TreeNode {
brk_traversable::TreeNode::Branch(
[
Some(("height".to_string(), self.height.to_tree_node())),
Some((
"difficultyepoch".to_string(),
self.difficultyepoch.to_tree_node(),
)),
]
.into_iter()
.flatten()
.collect(),
)
.merge_branches()
.unwrap()
}
fn iter_any_exportable(&self) -> impl Iterator<Item = &dyn AnyExportableVec> {
let mut regular_iter: Box<dyn Iterator<Item = &dyn AnyExportableVec>> =
Box::new(self.height.iter_any_exportable());
regular_iter = Box::new(regular_iter.chain(self.difficultyepoch.iter_any_exportable()));
regular_iter
}
}

View File

@@ -0,0 +1,113 @@
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, DifficultyEpoch, Height, MonthIndex, QuarterIndex, SemesterIndex,
TxIndex, Version, WeekIndex, YearIndex,
};
use schemars::JsonSchema;
use vecdb::{AnyExportableVec, IterableBoxedVec, LazyVecFrom1, UnaryTransform};
use super::{ComputedVecValue, ComputedVecsFromTxindex, LazyTransformBuilder};
const VERSION: Version = Version::ZERO;
/// Fully lazy version of `ComputedVecsFromTxindex` where all vecs are lazy transforms.
#[derive(Clone)]
pub struct LazyVecsFromTxindex<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
pub txindex: Option<LazyVecFrom1<TxIndex, T, TxIndex, S1T>>,
pub height: LazyTransformBuilder<Height, T, S1T>,
pub dateindex: LazyTransformBuilder<DateIndex, T, S1T>,
pub weekindex: LazyTransformBuilder<WeekIndex, T, S1T>,
pub difficultyepoch: LazyTransformBuilder<DifficultyEpoch, T, S1T>,
pub monthindex: LazyTransformBuilder<MonthIndex, T, S1T>,
pub quarterindex: LazyTransformBuilder<QuarterIndex, T, S1T>,
pub semesterindex: LazyTransformBuilder<SemesterIndex, T, S1T>,
pub yearindex: LazyTransformBuilder<YearIndex, T, S1T>,
pub decadeindex: LazyTransformBuilder<DecadeIndex, T, S1T>,
}
impl<T, S1T> LazyVecsFromTxindex<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
/// Create a lazy transform from a stored `ComputedVecsFromTxindex`.
/// F is the transform type (e.g., `Negate`, `Halve`).
pub fn from_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
txindex_source: Option<IterableBoxedVec<TxIndex, S1T>>,
source: &ComputedVecsFromTxindex<S1T>,
) -> Self {
let v = version + VERSION;
Self {
txindex: txindex_source.map(|s| LazyVecFrom1::transformed::<F>(name, v, s)),
height: LazyTransformBuilder::from_eager::<F>(name, v, &source.height),
dateindex: LazyTransformBuilder::from_eager::<F>(name, v, &source.dateindex),
weekindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.weekindex),
difficultyepoch: LazyTransformBuilder::from_eager::<F>(name, v, &source.difficultyepoch),
monthindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.monthindex),
quarterindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.quarterindex),
semesterindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.semesterindex),
yearindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.yearindex),
decadeindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.decadeindex),
}
}
}
impl<T, S1T> Traversable for LazyVecsFromTxindex<T, S1T>
where
T: ComputedVecValue + JsonSchema,
S1T: ComputedVecValue,
{
fn to_tree_node(&self) -> brk_traversable::TreeNode {
brk_traversable::TreeNode::Branch(
[
self.txindex
.as_ref()
.map(|v| ("txindex".to_string(), v.to_tree_node())),
Some(("height".to_string(), self.height.to_tree_node())),
Some(("dateindex".to_string(), self.dateindex.to_tree_node())),
Some((
"difficultyepoch".to_string(),
self.difficultyepoch.to_tree_node(),
)),
Some(("weekindex".to_string(), self.weekindex.to_tree_node())),
Some(("monthindex".to_string(), self.monthindex.to_tree_node())),
Some(("quarterindex".to_string(), self.quarterindex.to_tree_node())),
Some((
"semesterindex".to_string(),
self.semesterindex.to_tree_node(),
)),
Some(("yearindex".to_string(), self.yearindex.to_tree_node())),
Some(("decadeindex".to_string(), self.decadeindex.to_tree_node())),
]
.into_iter()
.flatten()
.collect(),
)
.merge_branches()
.unwrap()
}
fn iter_any_exportable(&self) -> impl Iterator<Item = &dyn AnyExportableVec> {
let mut regular_iter: Box<dyn Iterator<Item = &dyn AnyExportableVec>> =
Box::new(std::iter::empty());
if let Some(ref txindex) = self.txindex {
regular_iter = Box::new(regular_iter.chain(txindex.iter_any_exportable()));
}
regular_iter = Box::new(regular_iter.chain(self.height.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.dateindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.difficultyepoch.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.weekindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.monthindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.quarterindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.semesterindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.yearindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.decadeindex.iter_any_exportable()));
regular_iter
}
}

View File

@@ -1,14 +1,22 @@
mod builder_eager;
mod builder_lazy;
mod builder_transform;
mod builder_transform2;
mod computed;
mod from_dateindex;
mod from_height;
mod from_height_strict;
mod from_txindex;
mod computed_from_dateindex;
mod computed_from_height;
mod computed_from_height_strict;
mod computed_from_txindex;
mod lazy2_from_dateindex;
mod lazy_from_dateindex;
mod lazy_from_height;
// mod lazy_from_height_strict;
// mod lazy_from_txindex;
mod price_percentiles;
mod ratio_from_dateindex;
mod sd_from_dateindex;
mod source;
mod transforms;
mod value_from_dateindex;
mod value_from_height;
mod value_from_txindex;
@@ -16,15 +24,23 @@ mod value_height;
pub use builder_eager::*;
pub use builder_lazy::*;
pub use builder_transform::*;
pub use builder_transform2::*;
use computed::*;
pub use from_dateindex::*;
pub use from_height::*;
pub use from_height_strict::*;
pub use from_txindex::*;
pub use computed_from_dateindex::*;
pub use computed_from_height::*;
pub use computed_from_height_strict::*;
pub use computed_from_txindex::*;
pub use lazy_from_dateindex::*;
pub use lazy_from_height::*;
pub use lazy2_from_dateindex::*;
// pub use lazy_from_height_strict::*;
// pub use lazy_from_txindex::*;
pub use price_percentiles::*;
pub use ratio_from_dateindex::*;
pub use sd_from_dateindex::*;
pub use source::*;
pub use transforms::*;
pub use value_from_dateindex::*;
pub use value_from_height::*;
pub use value_from_txindex::*;

View File

@@ -0,0 +1,78 @@
use brk_types::{Bitcoin, Dollars, Sats, StoredF32, StoredF64};
use vecdb::{BinaryTransform, UnaryTransform};
/// (Dollars, Dollars) -> Dollars addition
/// Used for computing total = profit + loss
pub struct DollarsPlus;
impl BinaryTransform<Dollars, Dollars, Dollars> for DollarsPlus {
#[inline(always)]
fn apply(lhs: Dollars, rhs: Dollars) -> Dollars {
lhs + rhs
}
}
/// (Dollars, Dollars) -> Dollars subtraction
/// Used for computing net = profit - loss
pub struct DollarsMinus;
impl BinaryTransform<Dollars, Dollars, Dollars> for DollarsMinus {
#[inline(always)]
fn apply(lhs: Dollars, rhs: Dollars) -> Dollars {
lhs - rhs
}
}
/// (Dollars, Dollars) -> StoredF32 ratio
/// Used for computing percentage ratios like profit/total, loss/total, etc.
pub struct Ratio32;
impl BinaryTransform<Dollars, Dollars, StoredF32> for Ratio32 {
#[inline(always)]
fn apply(numerator: Dollars, denominator: Dollars) -> StoredF32 {
StoredF32::from(numerator / denominator)
}
}
/// (Dollars, Dollars) -> -StoredF32 (negated ratio)
/// Computes -(a/b) directly to avoid lazy-from-lazy chains.
pub struct NegRatio32;
impl BinaryTransform<Dollars, Dollars, StoredF32> for NegRatio32 {
#[inline(always)]
fn apply(numerator: Dollars, denominator: Dollars) -> StoredF32 {
-StoredF32::from(numerator / denominator)
}
}
// === Unary Transforms ===
/// Sats -> Bitcoin (divide by 1e8)
pub struct SatsToBitcoin;
impl UnaryTransform<Sats, Bitcoin> for SatsToBitcoin {
#[inline(always)]
fn apply(sats: Sats) -> Bitcoin {
Bitcoin::from(sats)
}
}
/// Sats -> StoredF64 via Bitcoin (for coinblocks/coindays)
pub struct SatsToStoredF64;
impl UnaryTransform<Sats, StoredF64> for SatsToStoredF64 {
#[inline(always)]
fn apply(sats: Sats) -> StoredF64 {
StoredF64::from(Bitcoin::from(sats))
}
}
/// Sats -> Sats/2 (for supply_half)
pub struct HalveSats;
impl UnaryTransform<Sats, Sats> for HalveSats {
#[inline(always)]
fn apply(sats: Sats) -> Sats {
sats / 2
}
}

View File

@@ -1,20 +1,20 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Dollars, Height, Sats, Version};
use vecdb::{CollectableVec, Database, EagerVec, Exit, ImportableVec, PcoVec};
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, LazyVecFrom1, PcoVec};
use crate::{
Indexes,
grouped::Source,
grouped::{SatsToBitcoin, Source},
price,
traits::{ComputeFromBitcoin, ComputeFromSats},
traits::ComputeFromBitcoin,
utils::OptionExt,
};
#[derive(Clone, Traversable)]
pub struct ComputedHeightValueVecs {
pub sats: Option<EagerVec<PcoVec<Height, Sats>>>,
pub bitcoin: EagerVec<PcoVec<Height, Bitcoin>>,
pub bitcoin: LazyVecFrom1<Height, Bitcoin, Height, Sats>,
pub dollars: Option<EagerVec<PcoVec<Height, Dollars>>>,
}
@@ -28,15 +28,29 @@ impl ComputedHeightValueVecs {
version: Version,
compute_dollars: bool,
) -> Result<Self> {
Ok(Self {
sats: source.is_compute().then(|| {
EagerVec::forced_import(db, name, version + VERSION + Version::ZERO).unwrap()
}),
bitcoin: EagerVec::forced_import(
db,
let sats = source
.is_compute()
.then(|| EagerVec::forced_import(db, name, version + VERSION + Version::ZERO).unwrap());
let bitcoin = match &source {
Source::Compute => LazyVecFrom1::transformed::<SatsToBitcoin>(
&format!("{name}_btc"),
version + VERSION + Version::ZERO,
)?,
sats.as_ref().unwrap().boxed_clone(),
),
Source::Vec(boxed) => LazyVecFrom1::transformed::<SatsToBitcoin>(
&format!("{name}_btc"),
version + VERSION + Version::ZERO,
boxed.clone(),
),
Source::None => {
panic!("Source::None not supported for lazy bitcoin - use Source::Vec instead")
}
};
Ok(Self {
sats,
bitcoin,
dollars: compute_dollars.then(|| {
EagerVec::forced_import(
db,
@@ -60,8 +74,7 @@ impl ComputedHeightValueVecs {
{
compute(self.sats.um())?;
let height: Option<&PcoVec<Height, Sats>> = None;
self.compute_rest(price, starting_indexes, exit, height)?;
self.compute_rest(price, starting_indexes, exit)?;
Ok(())
}
@@ -71,27 +84,12 @@ impl ComputedHeightValueVecs {
price: Option<&price::Vecs>,
starting_indexes: &Indexes,
exit: &Exit,
height: Option<&impl CollectableVec<Height, Sats>>,
) -> Result<()> {
if let Some(height) = height {
self.bitcoin
.compute_from_sats(starting_indexes.height, height, exit)?;
} else {
self.bitcoin.compute_from_sats(
starting_indexes.height,
self.sats.u(),
exit,
)?;
}
let height_to_bitcoin = &self.bitcoin;
let height_to_price_close = &price.u().chainindexes_to_price_close.height;
if let Some(dollars) = self.dollars.as_mut() {
dollars.compute_from_bitcoin(
starting_indexes.height,
height_to_bitcoin,
height_to_price_close,
&self.bitcoin,
&price.u().chainindexes_to_price_close.height,
exit,
)?;
}

View File

@@ -90,7 +90,10 @@ impl Computer {
Ok((indexes, fetched, blks, txins, txouts))
})?;
info!("Imported indexes/fetched/blks/txins/txouts in {:?}", i.elapsed());
info!(
"Imported indexes/fetched/blks/txins/txouts in {:?}",
i.elapsed()
);
let i = Instant::now();
let (price, constants, market) = thread::scope(|s| -> Result<_> {
@@ -195,11 +198,11 @@ impl Computer {
continue;
}
if let Some(name) = entry.file_name().to_str() {
if !EXPECTED_DBS.contains(&name) {
info!("Removing obsolete database folder: {}", name);
fs::remove_dir_all(entry.path())?;
}
if let Some(name) = entry.file_name().to_str()
&& !EXPECTED_DBS.contains(&name)
{
info!("Removing obsolete database folder: {}", name);
fs::remove_dir_all(entry.path())?;
}
}
@@ -262,7 +265,8 @@ impl Computer {
let txouts = scope.spawn(|| -> Result<()> {
info!("Computing txouts...");
let i = Instant::now();
self.txouts.compute(indexer, &self.txins, &starting_indexes, exit)?;
self.txouts
.compute(indexer, &self.txins, &starting_indexes, exit)?;
info!("Computed txouts in {:?}", i.elapsed());
Ok(())
});

View File

@@ -51,7 +51,6 @@ impl Vecs {
vecs::Vecs::forced_import(
&db,
pool.slug,
pools,
version + Version::ZERO,
indexes,
price,

View File

@@ -1,6 +1,6 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, PoolSlug, Pools, Sats, StoredF32, StoredU16, StoredU32};
use brk_types::{Height, PoolSlug, Sats, StoredF32, StoredU16, StoredU32};
use vecdb::{Database, Exit, GenericStoredVec, IterableVec, VecIndex, Version};
use crate::{
@@ -37,7 +37,6 @@ impl Vecs {
pub fn forced_import(
db: &Database,
slug: PoolSlug,
_pools: &Pools,
parent_version: Version,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,

View File

@@ -17,7 +17,10 @@ use crate::{
stateful::states::AddressCohortState,
};
use super::{super::metrics::{CohortMetrics, ImportConfig}, traits::{CohortVecs, DynCohortVecs}};
use super::{
super::metrics::{CohortMetrics, ImportConfig},
traits::{CohortVecs, DynCohortVecs},
};
const VERSION: Version = Version::ZERO;
@@ -288,8 +291,6 @@ impl CohortVecs for AddressCohortVecs {
dateindex_to_supply: &impl IterableVec<DateIndex, Bitcoin>,
height_to_market_cap: Option<&impl IterableVec<Height, Dollars>>,
dateindex_to_market_cap: Option<&impl IterableVec<DateIndex, Dollars>>,
height_to_realized_cap: Option<&impl IterableVec<Height, Dollars>>,
dateindex_to_realized_cap: Option<&impl IterableVec<DateIndex, Dollars>>,
exit: &Exit,
) -> Result<()> {
self.metrics.compute_rest_part2(
@@ -300,8 +301,6 @@ impl CohortVecs for AddressCohortVecs {
dateindex_to_supply,
height_to_market_cap,
dateindex_to_market_cap,
height_to_realized_cap,
dateindex_to_realized_cap,
exit,
)?;
Ok(())

View File

@@ -183,7 +183,7 @@ impl AddressCohorts {
/// Second phase of post-processing: compute relative metrics.
#[allow(clippy::too_many_arguments)]
pub fn compute_rest_part2<S, D, HM, DM, HR, DR>(
pub fn compute_rest_part2<S, D, HM, DM>(
&mut self,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
@@ -192,8 +192,6 @@ impl AddressCohorts {
dateindex_to_supply: &D,
height_to_market_cap: Option<&HM>,
dateindex_to_market_cap: Option<&DM>,
height_to_realized_cap: Option<&HR>,
dateindex_to_realized_cap: Option<&DR>,
exit: &Exit,
) -> Result<()>
where
@@ -201,8 +199,6 @@ impl AddressCohorts {
D: IterableVec<DateIndex, Bitcoin> + Sync,
HM: IterableVec<Height, Dollars> + Sync,
DM: IterableVec<DateIndex, Dollars> + Sync,
HR: IterableVec<Height, Dollars> + Sync,
DR: IterableVec<DateIndex, Dollars> + Sync,
{
self.0.par_iter_mut().try_for_each(|v| {
v.compute_rest_part2(
@@ -213,8 +209,6 @@ impl AddressCohorts {
dateindex_to_supply,
height_to_market_cap,
dateindex_to_market_cap,
height_to_realized_cap,
dateindex_to_realized_cap,
exit,
)
})

View File

@@ -64,8 +64,6 @@ pub trait CohortVecs: DynCohortVecs {
dateindex_to_supply: &impl IterableVec<DateIndex, Bitcoin>,
height_to_market_cap: Option<&impl IterableVec<Height, Dollars>>,
dateindex_to_market_cap: Option<&impl IterableVec<DateIndex, Dollars>>,
height_to_realized_cap: Option<&impl IterableVec<Height, Dollars>>,
dateindex_to_realized_cap: Option<&impl IterableVec<DateIndex, Dollars>>,
exit: &Exit,
) -> Result<()>;
}

View File

@@ -236,8 +236,6 @@ impl CohortVecs for UTXOCohortVecs {
dateindex_to_supply: &impl IterableVec<DateIndex, Bitcoin>,
height_to_market_cap: Option<&impl IterableVec<Height, Dollars>>,
dateindex_to_market_cap: Option<&impl IterableVec<DateIndex, Dollars>>,
height_to_realized_cap: Option<&impl IterableVec<Height, Dollars>>,
dateindex_to_realized_cap: Option<&impl IterableVec<DateIndex, Dollars>>,
exit: &Exit,
) -> Result<()> {
self.metrics.compute_rest_part2(
@@ -248,8 +246,6 @@ impl CohortVecs for UTXOCohortVecs {
dateindex_to_supply,
height_to_market_cap,
dateindex_to_market_cap,
height_to_realized_cap,
dateindex_to_realized_cap,
exit,
)
}

View File

@@ -335,7 +335,7 @@ impl UTXOCohorts {
/// Second phase of post-processing: compute relative metrics.
#[allow(clippy::too_many_arguments)]
pub fn compute_rest_part2<S, D, HM, DM, HR, DR>(
pub fn compute_rest_part2<S, D, HM, DM>(
&mut self,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
@@ -344,8 +344,6 @@ impl UTXOCohorts {
dateindex_to_supply: &D,
height_to_market_cap: Option<&HM>,
dateindex_to_market_cap: Option<&DM>,
height_to_realized_cap: Option<&HR>,
dateindex_to_realized_cap: Option<&DR>,
exit: &Exit,
) -> Result<()>
where
@@ -353,8 +351,6 @@ impl UTXOCohorts {
D: IterableVec<DateIndex, Bitcoin> + Sync,
HM: IterableVec<Height, Dollars> + Sync,
DM: IterableVec<DateIndex, Dollars> + Sync,
HR: IterableVec<Height, Dollars> + Sync,
DR: IterableVec<DateIndex, Dollars> + Sync,
{
self.par_iter_mut().try_for_each(|v| {
v.compute_rest_part2(
@@ -365,8 +361,6 @@ impl UTXOCohorts {
dateindex_to_supply,
height_to_market_cap,
dateindex_to_market_cap,
height_to_realized_cap,
dateindex_to_realized_cap,
exit,
)
})

View File

@@ -49,7 +49,7 @@ pub fn compute_rest_part1(
///
/// Computes supply ratios, market cap ratios, etc. using total references.
#[allow(clippy::too_many_arguments)]
pub fn compute_rest_part2<S, D, HM, DM, HR, DR>(
pub fn compute_rest_part2<S, D, HM, DM>(
utxo_cohorts: &mut UTXOCohorts,
address_cohorts: &mut AddressCohorts,
indexes: &indexes::Vecs,
@@ -59,8 +59,6 @@ pub fn compute_rest_part2<S, D, HM, DM, HR, DR>(
dateindex_to_supply: &D,
height_to_market_cap: Option<&HM>,
dateindex_to_market_cap: Option<&DM>,
height_to_realized_cap: Option<&HR>,
dateindex_to_realized_cap: Option<&DR>,
exit: &Exit,
) -> Result<()>
where
@@ -68,8 +66,6 @@ where
D: IterableVec<DateIndex, Bitcoin> + Sync,
HM: IterableVec<Height, Dollars> + Sync,
DM: IterableVec<DateIndex, Dollars> + Sync,
HR: IterableVec<Height, Dollars> + Sync,
DR: IterableVec<DateIndex, Dollars> + Sync,
{
info!("Computing rest part 2...");
@@ -81,8 +77,6 @@ where
dateindex_to_supply,
height_to_market_cap,
dateindex_to_market_cap,
height_to_realized_cap,
dateindex_to_realized_cap,
exit,
)?;
@@ -94,8 +88,6 @@ where
dateindex_to_supply,
height_to_market_cap,
dateindex_to_market_cap,
height_to_realized_cap,
dateindex_to_realized_cap,
exit,
)?;

View File

@@ -53,6 +53,10 @@ impl CohortMetrics {
pub fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let compute_dollars = cfg.compute_dollars();
let unrealized = compute_dollars
.then(|| UnrealizedMetrics::forced_import(cfg))
.transpose()?;
Ok(Self {
filter: cfg.filter.clone(),
supply: SupplyMetrics::forced_import(cfg)?,
@@ -60,15 +64,14 @@ impl CohortMetrics {
realized: compute_dollars
.then(|| RealizedMetrics::forced_import(cfg))
.transpose()?,
unrealized: compute_dollars
.then(|| UnrealizedMetrics::forced_import(cfg))
.transpose()?,
price_paid: compute_dollars
.then(|| PricePaidMetrics::forced_import(cfg))
.transpose()?,
relative: compute_dollars
.then(|| RelativeMetrics::forced_import(cfg))
relative: unrealized
.as_ref()
.map(|u| RelativeMetrics::forced_import(cfg, u))
.transpose()?,
unrealized,
})
}
@@ -261,7 +264,7 @@ impl CohortMetrics {
.compute_rest_part1(indexes, price, starting_indexes, exit)?;
if let Some(realized) = self.realized.as_mut() {
realized.compute_rest_part1(indexes, price, starting_indexes, exit)?;
realized.compute_rest_part1(indexes, starting_indexes, exit)?;
}
if let Some(unrealized) = self.unrealized.as_mut() {
@@ -286,21 +289,8 @@ impl CohortMetrics {
dateindex_to_supply: &impl IterableVec<DateIndex, Bitcoin>,
height_to_market_cap: Option<&impl IterableVec<Height, Dollars>>,
dateindex_to_market_cap: Option<&impl IterableVec<DateIndex, Dollars>>,
height_to_realized_cap: Option<&impl IterableVec<Height, Dollars>>,
dateindex_to_realized_cap: Option<&impl IterableVec<DateIndex, Dollars>>,
exit: &Exit,
) -> Result<()> {
self.supply.compute_rest_part2(
indexes,
price,
starting_indexes,
height_to_supply,
dateindex_to_supply,
height_to_market_cap,
dateindex_to_market_cap,
exit,
)?;
if let Some(realized) = self.realized.as_mut() {
realized.compute_rest_part2(
indexes,
@@ -321,11 +311,8 @@ impl CohortMetrics {
dateindex_to_supply,
height_to_market_cap,
dateindex_to_market_cap,
height_to_realized_cap,
dateindex_to_realized_cap,
&self.supply,
self.unrealized.as_ref(),
self.realized.as_ref(),
exit,
)?;
}

View File

@@ -2,13 +2,16 @@ use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, DateIndex, Dollars, Height, StoredF32, StoredF64, Version};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableVec, PcoVec};
use vecdb::{
AnyStoredVec, EagerVec, Exit, GenericStoredVec, Ident, ImportableVec, IterableCloneableVec,
IterableVec, Negate, PcoVec,
};
use crate::{
Indexes,
grouped::{
ComputedRatioVecsFromDateIndex, ComputedVecsFromDateIndex, ComputedVecsFromHeight, Source,
VecBuilderOptions,
ComputedRatioVecsFromDateIndex, ComputedVecsFromDateIndex, ComputedVecsFromHeight,
LazyVecsFromHeight, Source, VecBuilderOptions,
},
indexes, price,
stateful::states::RealizedState,
@@ -33,7 +36,7 @@ pub struct RealizedMetrics {
pub indexes_to_realized_profit: ComputedVecsFromHeight<Dollars>,
pub height_to_realized_loss: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_realized_loss: ComputedVecsFromHeight<Dollars>,
pub indexes_to_neg_realized_loss: ComputedVecsFromHeight<Dollars>,
pub indexes_to_neg_realized_loss: LazyVecsFromHeight<Dollars>,
pub indexes_to_net_realized_pnl: ComputedVecsFromHeight<Dollars>,
pub indexes_to_realized_value: ComputedVecsFromHeight<Dollars>,
@@ -43,8 +46,7 @@ pub struct RealizedMetrics {
pub indexes_to_net_realized_pnl_rel_to_realized_cap: ComputedVecsFromHeight<StoredF32>,
// === Total Realized PnL ===
pub height_to_total_realized_pnl: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_total_realized_pnl: ComputedVecsFromDateIndex<Dollars>,
pub indexes_to_total_realized_pnl: LazyVecsFromHeight<Dollars>,
pub dateindex_to_realized_profit_to_loss_ratio: Option<EagerVec<PcoVec<DateIndex, StoredF64>>>,
// === Value Created/Destroyed ===
@@ -92,6 +94,47 @@ impl RealizedMetrics {
let sum = VecBuilderOptions::default().add_sum();
let sum_cum = VecBuilderOptions::default().add_sum().add_cumulative();
let height_to_realized_loss: EagerVec<PcoVec<Height, Dollars>> =
EagerVec::forced_import(cfg.db, &cfg.name("realized_loss"), cfg.version + v0)?;
let indexes_to_realized_loss = ComputedVecsFromHeight::forced_import(
cfg.db,
&cfg.name("realized_loss"),
Source::None,
cfg.version + v0,
cfg.indexes,
sum_cum,
)?;
let indexes_to_neg_realized_loss = LazyVecsFromHeight::from_computed::<Negate>(
&cfg.name("neg_realized_loss"),
cfg.version + v1,
height_to_realized_loss.boxed_clone(),
&indexes_to_realized_loss,
);
// realized_value is the source for total_realized_pnl (they're identical)
let indexes_to_realized_value = ComputedVecsFromHeight::forced_import(
cfg.db,
&cfg.name("realized_value"),
Source::Compute,
cfg.version + v0,
cfg.indexes,
sum,
)?;
// total_realized_pnl is a lazy alias to realized_value
let indexes_to_total_realized_pnl = LazyVecsFromHeight::from_computed::<Ident>(
&cfg.name("total_realized_pnl"),
cfg.version + v1,
indexes_to_realized_value
.height
.as_ref()
.unwrap()
.boxed_clone(),
&indexes_to_realized_value,
);
Ok(Self {
// === Realized Cap ===
height_to_realized_cap: EagerVec::forced_import(
@@ -158,27 +201,9 @@ impl RealizedMetrics {
cfg.indexes,
sum_cum,
)?,
height_to_realized_loss: EagerVec::forced_import(
cfg.db,
&cfg.name("realized_loss"),
cfg.version + v0,
)?,
indexes_to_realized_loss: ComputedVecsFromHeight::forced_import(
cfg.db,
&cfg.name("realized_loss"),
Source::None,
cfg.version + v0,
cfg.indexes,
sum_cum,
)?,
indexes_to_neg_realized_loss: ComputedVecsFromHeight::forced_import(
cfg.db,
&cfg.name("neg_realized_loss"),
Source::Compute,
cfg.version + v1,
cfg.indexes,
sum_cum,
)?,
height_to_realized_loss,
indexes_to_realized_loss,
indexes_to_neg_realized_loss,
indexes_to_net_realized_pnl: ComputedVecsFromHeight::forced_import(
cfg.db,
&cfg.name("net_realized_pnl"),
@@ -187,14 +212,7 @@ impl RealizedMetrics {
cfg.indexes,
sum_cum,
)?,
indexes_to_realized_value: ComputedVecsFromHeight::forced_import(
cfg.db,
&cfg.name("realized_value"),
Source::Compute,
cfg.version + v0,
cfg.indexes,
sum,
)?,
indexes_to_realized_value,
// === Realized vs Realized Cap Ratios ===
indexes_to_realized_profit_rel_to_realized_cap: ComputedVecsFromHeight::forced_import(
@@ -223,19 +241,7 @@ impl RealizedMetrics {
)?,
// === Total Realized PnL ===
height_to_total_realized_pnl: EagerVec::forced_import(
cfg.db,
&cfg.name("total_realized_pnl"),
cfg.version + v0,
)?,
indexes_to_total_realized_pnl: ComputedVecsFromDateIndex::forced_import(
cfg.db,
&cfg.name("total_realized_pnl"),
Source::Compute,
cfg.version + v1,
cfg.indexes,
sum,
)?,
indexes_to_total_realized_pnl,
dateindex_to_realized_profit_to_loss_ratio: extended
.then(|| {
EagerVec::forced_import(
@@ -555,7 +561,6 @@ impl RealizedMetrics {
pub fn compute_rest_part1(
&mut self,
indexes: &indexes::Vecs,
_price: Option<&price::Vecs>,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
@@ -580,18 +585,6 @@ impl RealizedMetrics {
Some(&self.height_to_realized_loss),
)?;
// neg_realized_loss = realized_loss * -1
self.indexes_to_neg_realized_loss
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_transform(
starting_indexes.height,
&self.height_to_realized_loss,
|(i, v, ..)| (i, v * -1_i64),
exit,
)?;
Ok(())
})?;
// net_realized_pnl = profit - loss
self.indexes_to_net_realized_pnl
.compute_all(indexes, starting_indexes, exit, |vec| {
@@ -605,6 +598,8 @@ impl RealizedMetrics {
})?;
// realized_value = profit + loss
// Note: total_realized_pnl is a lazy alias to realized_value since both
// compute profit + loss with sum aggregation, making them identical.
self.indexes_to_realized_value
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_add(
@@ -616,14 +611,6 @@ impl RealizedMetrics {
Ok(())
})?;
// total_realized_pnl at height level = profit + loss
self.height_to_total_realized_pnl.compute_add(
starting_indexes.height,
&self.height_to_realized_profit,
&self.height_to_realized_loss,
exit,
)?;
self.indexes_to_value_created.compute_rest(
indexes,
starting_indexes,
@@ -705,18 +692,6 @@ impl RealizedMetrics {
Ok(())
})?;
// total_realized_pnl at dateindex level
self.indexes_to_total_realized_pnl
.compute_all(starting_indexes, exit, |vec| {
vec.compute_add(
starting_indexes.dateindex,
self.indexes_to_realized_profit.dateindex.unwrap_sum(),
self.indexes_to_realized_loss.dateindex.unwrap_sum(),
exit,
)?;
Ok(())
})?;
// SOPR = value_created / value_destroyed
self.dateindex_to_sopr.compute_divide(
starting_indexes.dateindex,

View File

@@ -1,15 +1,21 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, DateIndex, Dollars, Height, StoredF32, StoredF64, Version};
use vecdb::{EagerVec, Exit, ImportableVec, IterableVec, PcoVec};
use vecdb::{
EagerVec, Exit, ImportableVec, IterableCloneableVec, IterableVec, LazyVecFrom1, LazyVecFrom2,
Negate, PcoVec,
};
use crate::{
Indexes,
grouped::{ComputedVecsFromDateIndex, ComputedVecsFromHeight, Source, VecBuilderOptions},
grouped::{
ComputedVecsFromDateIndex, ComputedVecsFromHeight, LazyVecsFrom2FromDateIndex,
LazyVecsFromDateIndex, NegRatio32, Ratio32, Source, VecBuilderOptions,
},
indexes,
};
use super::{ImportConfig, RealizedMetrics, SupplyMetrics};
use super::{ImportConfig, SupplyMetrics, UnrealizedMetrics};
/// Relative metrics comparing cohort values to global values.
#[derive(Clone, Traversable)]
@@ -36,11 +42,12 @@ pub struct RelativeMetrics {
// === Unrealized vs Market Cap ===
pub height_to_unrealized_profit_rel_to_market_cap: EagerVec<PcoVec<Height, StoredF32>>,
pub height_to_unrealized_loss_rel_to_market_cap: EagerVec<PcoVec<Height, StoredF32>>,
pub height_to_neg_unrealized_loss_rel_to_market_cap: EagerVec<PcoVec<Height, StoredF32>>,
pub height_to_neg_unrealized_loss_rel_to_market_cap:
LazyVecFrom1<Height, StoredF32, Height, StoredF32>,
pub height_to_net_unrealized_pnl_rel_to_market_cap: EagerVec<PcoVec<Height, StoredF32>>,
pub indexes_to_unrealized_profit_rel_to_market_cap: ComputedVecsFromDateIndex<StoredF32>,
pub indexes_to_unrealized_loss_rel_to_market_cap: ComputedVecsFromDateIndex<StoredF32>,
pub indexes_to_neg_unrealized_loss_rel_to_market_cap: ComputedVecsFromDateIndex<StoredF32>,
pub indexes_to_neg_unrealized_loss_rel_to_market_cap: LazyVecsFromDateIndex<StoredF32>,
pub indexes_to_net_unrealized_pnl_rel_to_market_cap: ComputedVecsFromDateIndex<StoredF32>,
// === Unrealized vs Own Market Cap (optional) ===
@@ -49,7 +56,7 @@ pub struct RelativeMetrics {
pub height_to_unrealized_loss_rel_to_own_market_cap:
Option<EagerVec<PcoVec<Height, StoredF32>>>,
pub height_to_neg_unrealized_loss_rel_to_own_market_cap:
Option<EagerVec<PcoVec<Height, StoredF32>>>,
Option<LazyVecFrom1<Height, StoredF32, Height, StoredF32>>,
pub height_to_net_unrealized_pnl_rel_to_own_market_cap:
Option<EagerVec<PcoVec<Height, StoredF32>>>,
pub indexes_to_unrealized_profit_rel_to_own_market_cap:
@@ -57,32 +64,32 @@ pub struct RelativeMetrics {
pub indexes_to_unrealized_loss_rel_to_own_market_cap:
Option<ComputedVecsFromDateIndex<StoredF32>>,
pub indexes_to_neg_unrealized_loss_rel_to_own_market_cap:
Option<ComputedVecsFromDateIndex<StoredF32>>,
Option<LazyVecsFromDateIndex<StoredF32>>,
pub indexes_to_net_unrealized_pnl_rel_to_own_market_cap:
Option<ComputedVecsFromDateIndex<StoredF32>>,
// === Unrealized vs Own Total Unrealized PnL (optional) ===
// === Unrealized vs Own Total Unrealized PnL (optional, lazy from unrealized sources) ===
pub height_to_unrealized_profit_rel_to_own_total_unrealized_pnl:
Option<EagerVec<PcoVec<Height, StoredF32>>>,
Option<LazyVecFrom2<Height, StoredF32, Height, Dollars, Height, Dollars>>,
pub height_to_unrealized_loss_rel_to_own_total_unrealized_pnl:
Option<EagerVec<PcoVec<Height, StoredF32>>>,
Option<LazyVecFrom2<Height, StoredF32, Height, Dollars, Height, Dollars>>,
pub height_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl:
Option<EagerVec<PcoVec<Height, StoredF32>>>,
Option<LazyVecFrom2<Height, StoredF32, Height, Dollars, Height, Dollars>>,
pub height_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl:
Option<EagerVec<PcoVec<Height, StoredF32>>>,
Option<LazyVecFrom2<Height, StoredF32, Height, Dollars, Height, Dollars>>,
pub indexes_to_unrealized_profit_rel_to_own_total_unrealized_pnl:
Option<ComputedVecsFromDateIndex<StoredF32>>,
Option<LazyVecsFrom2FromDateIndex<StoredF32, Dollars, Dollars>>,
pub indexes_to_unrealized_loss_rel_to_own_total_unrealized_pnl:
Option<ComputedVecsFromDateIndex<StoredF32>>,
Option<LazyVecsFrom2FromDateIndex<StoredF32, Dollars, Dollars>>,
pub indexes_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl:
Option<ComputedVecsFromDateIndex<StoredF32>>,
Option<LazyVecsFrom2FromDateIndex<StoredF32, Dollars, Dollars>>,
pub indexes_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl:
Option<ComputedVecsFromDateIndex<StoredF32>>,
Option<LazyVecsFrom2FromDateIndex<StoredF32, Dollars, Dollars>>,
}
impl RelativeMetrics {
/// Import relative metrics from database.
pub fn forced_import(cfg: &ImportConfig) -> Result<Self> {
pub fn forced_import(cfg: &ImportConfig, unrealized: &UnrealizedMetrics) -> Result<Self> {
let v0 = Version::ZERO;
let v1 = Version::ONE;
let v2 = Version::new(2);
@@ -90,6 +97,165 @@ impl RelativeMetrics {
let compute_rel_to_all = cfg.compute_rel_to_all();
let last = VecBuilderOptions::default().add_last();
// Create sources for lazy neg vecs
let height_to_unrealized_loss_rel_to_market_cap: EagerVec<PcoVec<Height, StoredF32>> =
EagerVec::forced_import(
cfg.db,
&cfg.name("unrealized_loss_rel_to_market_cap"),
cfg.version + v0,
)?;
let height_to_neg_unrealized_loss_rel_to_market_cap = LazyVecFrom1::transformed::<Negate>(
&cfg.name("neg_unrealized_loss_rel_to_market_cap"),
cfg.version + v0,
height_to_unrealized_loss_rel_to_market_cap.boxed_clone(),
);
let indexes_to_unrealized_loss_rel_to_market_cap =
ComputedVecsFromDateIndex::forced_import(
cfg.db,
&cfg.name("unrealized_loss_rel_to_market_cap"),
Source::Compute,
cfg.version + v1,
cfg.indexes,
last,
)?;
let indexes_to_neg_unrealized_loss_rel_to_market_cap =
LazyVecsFromDateIndex::from_computed::<Negate>(
&cfg.name("neg_unrealized_loss_rel_to_market_cap"),
cfg.version + v1,
indexes_to_unrealized_loss_rel_to_market_cap
.dateindex
.as_ref()
.map(|v| v.boxed_clone()),
&indexes_to_unrealized_loss_rel_to_market_cap,
);
// Optional: own market cap vecs
let height_to_unrealized_loss_rel_to_own_market_cap: Option<
EagerVec<PcoVec<Height, StoredF32>>,
> = (extended && compute_rel_to_all)
.then(|| {
EagerVec::forced_import(
cfg.db,
&cfg.name("unrealized_loss_rel_to_own_market_cap"),
cfg.version + v1,
)
})
.transpose()?;
let height_to_neg_unrealized_loss_rel_to_own_market_cap =
height_to_unrealized_loss_rel_to_own_market_cap
.as_ref()
.map(|source| {
LazyVecFrom1::transformed::<Negate>(
&cfg.name("neg_unrealized_loss_rel_to_own_market_cap"),
cfg.version + v1,
source.boxed_clone(),
)
});
let indexes_to_unrealized_loss_rel_to_own_market_cap: Option<
ComputedVecsFromDateIndex<StoredF32>,
> = (extended && compute_rel_to_all)
.then(|| {
ComputedVecsFromDateIndex::forced_import(
cfg.db,
&cfg.name("unrealized_loss_rel_to_own_market_cap"),
Source::Compute,
cfg.version + v2,
cfg.indexes,
last,
)
})
.transpose()?;
let indexes_to_neg_unrealized_loss_rel_to_own_market_cap =
indexes_to_unrealized_loss_rel_to_own_market_cap
.as_ref()
.map(|source| {
LazyVecsFromDateIndex::from_computed::<Negate>(
&cfg.name("neg_unrealized_loss_rel_to_own_market_cap"),
cfg.version + v2,
source.dateindex.as_ref().map(|v| v.boxed_clone()),
source,
)
});
// Optional: own total unrealized pnl vecs (lazy from unrealized sources)
let height_to_unrealized_profit_rel_to_own_total_unrealized_pnl = extended.then(|| {
LazyVecFrom2::transformed::<Ratio32>(
&cfg.name("unrealized_profit_rel_to_own_total_unrealized_pnl"),
cfg.version + v0,
unrealized.height_to_unrealized_profit.boxed_clone(),
unrealized.height_to_total_unrealized_pnl.boxed_clone(),
)
});
let height_to_unrealized_loss_rel_to_own_total_unrealized_pnl = extended.then(|| {
LazyVecFrom2::transformed::<Ratio32>(
&cfg.name("unrealized_loss_rel_to_own_total_unrealized_pnl"),
cfg.version + v0,
unrealized.height_to_unrealized_loss.boxed_clone(),
unrealized.height_to_total_unrealized_pnl.boxed_clone(),
)
});
let height_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl = extended.then(|| {
LazyVecFrom2::transformed::<NegRatio32>(
&cfg.name("neg_unrealized_loss_rel_to_own_total_unrealized_pnl"),
cfg.version + v0,
unrealized.height_to_unrealized_loss.boxed_clone(),
unrealized.height_to_total_unrealized_pnl.boxed_clone(),
)
});
let height_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl = extended.then(|| {
LazyVecFrom2::transformed::<Ratio32>(
&cfg.name("net_unrealized_pnl_rel_to_own_total_unrealized_pnl"),
cfg.version + v1,
unrealized.height_to_net_unrealized_pnl.boxed_clone(),
unrealized.height_to_total_unrealized_pnl.boxed_clone(),
)
});
let indexes_to_unrealized_profit_rel_to_own_total_unrealized_pnl = extended.then(|| {
LazyVecsFrom2FromDateIndex::from_computed::<Ratio32>(
&cfg.name("unrealized_profit_rel_to_own_total_unrealized_pnl"),
cfg.version + v1,
&unrealized.indexes_to_unrealized_profit,
&unrealized.indexes_to_total_unrealized_pnl,
)
});
let indexes_to_unrealized_loss_rel_to_own_total_unrealized_pnl = extended.then(|| {
LazyVecsFrom2FromDateIndex::from_computed::<Ratio32>(
&cfg.name("unrealized_loss_rel_to_own_total_unrealized_pnl"),
cfg.version + v1,
&unrealized.indexes_to_unrealized_loss,
&unrealized.indexes_to_total_unrealized_pnl,
)
});
let indexes_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl = extended.then(|| {
LazyVecsFrom2FromDateIndex::from_computed::<NegRatio32>(
&cfg.name("neg_unrealized_loss_rel_to_own_total_unrealized_pnl"),
cfg.version + v1,
&unrealized.indexes_to_unrealized_loss,
&unrealized.indexes_to_total_unrealized_pnl,
)
});
let indexes_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl = extended.then(|| {
LazyVecsFrom2FromDateIndex::from_computed::<Ratio32>(
&cfg.name("net_unrealized_pnl_rel_to_own_total_unrealized_pnl"),
cfg.version + v1,
&unrealized.indexes_to_net_unrealized_pnl,
&unrealized.indexes_to_total_unrealized_pnl,
)
});
Ok(Self {
// === Supply Relative to Circulating Supply ===
indexes_to_supply_rel_to_circulating_supply: compute_rel_to_all
@@ -184,16 +350,8 @@ impl RelativeMetrics {
&cfg.name("unrealized_profit_rel_to_market_cap"),
cfg.version + v0,
)?,
height_to_unrealized_loss_rel_to_market_cap: EagerVec::forced_import(
cfg.db,
&cfg.name("unrealized_loss_rel_to_market_cap"),
cfg.version + v0,
)?,
height_to_neg_unrealized_loss_rel_to_market_cap: EagerVec::forced_import(
cfg.db,
&cfg.name("neg_unrealized_loss_rel_to_market_cap"),
cfg.version + v0,
)?,
height_to_unrealized_loss_rel_to_market_cap,
height_to_neg_unrealized_loss_rel_to_market_cap,
height_to_net_unrealized_pnl_rel_to_market_cap: EagerVec::forced_import(
cfg.db,
&cfg.name("net_unrealized_pnl_rel_to_market_cap"),
@@ -208,23 +366,8 @@ impl RelativeMetrics {
cfg.indexes,
last,
)?,
indexes_to_unrealized_loss_rel_to_market_cap: ComputedVecsFromDateIndex::forced_import(
cfg.db,
&cfg.name("unrealized_loss_rel_to_market_cap"),
Source::Compute,
cfg.version + v1,
cfg.indexes,
last,
)?,
indexes_to_neg_unrealized_loss_rel_to_market_cap:
ComputedVecsFromDateIndex::forced_import(
cfg.db,
&cfg.name("neg_unrealized_loss_rel_to_market_cap"),
Source::Compute,
cfg.version + v1,
cfg.indexes,
last,
)?,
indexes_to_unrealized_loss_rel_to_market_cap,
indexes_to_neg_unrealized_loss_rel_to_market_cap,
indexes_to_net_unrealized_pnl_rel_to_market_cap:
ComputedVecsFromDateIndex::forced_import(
cfg.db,
@@ -245,24 +388,8 @@ impl RelativeMetrics {
)
})
.transpose()?,
height_to_unrealized_loss_rel_to_own_market_cap: (extended && compute_rel_to_all)
.then(|| {
EagerVec::forced_import(
cfg.db,
&cfg.name("unrealized_loss_rel_to_own_market_cap"),
cfg.version + v1,
)
})
.transpose()?,
height_to_neg_unrealized_loss_rel_to_own_market_cap: (extended && compute_rel_to_all)
.then(|| {
EagerVec::forced_import(
cfg.db,
&cfg.name("neg_unrealized_loss_rel_to_own_market_cap"),
cfg.version + v1,
)
})
.transpose()?,
height_to_unrealized_loss_rel_to_own_market_cap,
height_to_neg_unrealized_loss_rel_to_own_market_cap,
height_to_net_unrealized_pnl_rel_to_own_market_cap: (extended && compute_rel_to_all)
.then(|| {
EagerVec::forced_import(
@@ -284,30 +411,8 @@ impl RelativeMetrics {
)
})
.transpose()?,
indexes_to_unrealized_loss_rel_to_own_market_cap: (extended && compute_rel_to_all)
.then(|| {
ComputedVecsFromDateIndex::forced_import(
cfg.db,
&cfg.name("unrealized_loss_rel_to_own_market_cap"),
Source::Compute,
cfg.version + v2,
cfg.indexes,
last,
)
})
.transpose()?,
indexes_to_neg_unrealized_loss_rel_to_own_market_cap: (extended && compute_rel_to_all)
.then(|| {
ComputedVecsFromDateIndex::forced_import(
cfg.db,
&cfg.name("neg_unrealized_loss_rel_to_own_market_cap"),
Source::Compute,
cfg.version + v2,
cfg.indexes,
last,
)
})
.transpose()?,
indexes_to_unrealized_loss_rel_to_own_market_cap,
indexes_to_neg_unrealized_loss_rel_to_own_market_cap,
indexes_to_net_unrealized_pnl_rel_to_own_market_cap: (extended && compute_rel_to_all)
.then(|| {
ComputedVecsFromDateIndex::forced_import(
@@ -322,90 +427,14 @@ impl RelativeMetrics {
.transpose()?,
// === Unrealized vs Own Total Unrealized PnL (optional) ===
height_to_unrealized_profit_rel_to_own_total_unrealized_pnl: extended
.then(|| {
EagerVec::forced_import(
cfg.db,
&cfg.name("unrealized_profit_rel_to_own_total_unrealized_pnl"),
cfg.version + v0,
)
})
.transpose()?,
height_to_unrealized_loss_rel_to_own_total_unrealized_pnl: extended
.then(|| {
EagerVec::forced_import(
cfg.db,
&cfg.name("unrealized_loss_rel_to_own_total_unrealized_pnl"),
cfg.version + v0,
)
})
.transpose()?,
height_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl: extended
.then(|| {
EagerVec::forced_import(
cfg.db,
&cfg.name("neg_unrealized_loss_rel_to_own_total_unrealized_pnl"),
cfg.version + v0,
)
})
.transpose()?,
height_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl: extended
.then(|| {
EagerVec::forced_import(
cfg.db,
&cfg.name("net_unrealized_pnl_rel_to_own_total_unrealized_pnl"),
cfg.version + v1,
)
})
.transpose()?,
indexes_to_unrealized_profit_rel_to_own_total_unrealized_pnl: extended
.then(|| {
ComputedVecsFromDateIndex::forced_import(
cfg.db,
&cfg.name("unrealized_profit_rel_to_own_total_unrealized_pnl"),
Source::Compute,
cfg.version + v1,
cfg.indexes,
last,
)
})
.transpose()?,
indexes_to_unrealized_loss_rel_to_own_total_unrealized_pnl: extended
.then(|| {
ComputedVecsFromDateIndex::forced_import(
cfg.db,
&cfg.name("unrealized_loss_rel_to_own_total_unrealized_pnl"),
Source::Compute,
cfg.version + v1,
cfg.indexes,
last,
)
})
.transpose()?,
indexes_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl: extended
.then(|| {
ComputedVecsFromDateIndex::forced_import(
cfg.db,
&cfg.name("neg_unrealized_loss_rel_to_own_total_unrealized_pnl"),
Source::Compute,
cfg.version + v1,
cfg.indexes,
last,
)
})
.transpose()?,
indexes_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl: extended
.then(|| {
ComputedVecsFromDateIndex::forced_import(
cfg.db,
&cfg.name("net_unrealized_pnl_rel_to_own_total_unrealized_pnl"),
Source::Compute,
cfg.version + v1,
cfg.indexes,
last,
)
})
.transpose()?,
height_to_unrealized_profit_rel_to_own_total_unrealized_pnl,
height_to_unrealized_loss_rel_to_own_total_unrealized_pnl,
height_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl,
height_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl,
indexes_to_unrealized_profit_rel_to_own_total_unrealized_pnl,
indexes_to_unrealized_loss_rel_to_own_total_unrealized_pnl,
indexes_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl,
indexes_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl,
})
}
@@ -414,7 +443,7 @@ impl RelativeMetrics {
/// This computes percentage ratios comparing cohort metrics to global metrics:
/// - Supply relative to circulating supply
/// - Supply in profit/loss relative to own supply and circulating supply
/// - Unrealized profit/loss relative to market cap, own market cap, total unrealized
/// - Unrealized profit/loss relative to market cap, total unrealized
///
/// See `stateful/common/compute.rs` lines 800-1200 for the full original implementation.
#[allow(clippy::too_many_arguments)]
@@ -426,11 +455,8 @@ impl RelativeMetrics {
dateindex_to_supply: &impl IterableVec<DateIndex, Bitcoin>,
height_to_market_cap: Option<&impl IterableVec<Height, Dollars>>,
dateindex_to_market_cap: Option<&impl IterableVec<DateIndex, Dollars>>,
_height_to_realized_cap: Option<&impl IterableVec<Height, Dollars>>,
_dateindex_to_realized_cap: Option<&impl IterableVec<DateIndex, Dollars>>,
supply: &SupplyMetrics,
unrealized: Option<&super::UnrealizedMetrics>,
_realized: Option<&RealizedMetrics>,
exit: &Exit,
) -> Result<()> {
// === Supply Relative to Circulating Supply ===
@@ -546,13 +572,6 @@ impl RelativeMetrics {
height_to_mc,
exit,
)?;
self.height_to_neg_unrealized_loss_rel_to_market_cap
.compute_percentage(
starting_indexes.height,
&unrealized.height_to_neg_unrealized_loss,
height_to_mc,
exit,
)?;
self.height_to_net_unrealized_pnl_rel_to_market_cap
.compute_percentage(
starting_indexes.height,
@@ -587,37 +606,20 @@ impl RelativeMetrics {
})?;
}
// indexes_to_neg_unrealized_loss_rel_to_market_cap
if let Some(dateindex_to_mc) = dateindex_to_market_cap
&& let Some(unrealized) = unrealized
&& let Some(dateindex_vec) = unrealized.indexes_to_net_unrealized_pnl.dateindex.as_ref()
{
if let Some(dateindex_vec) =
unrealized.indexes_to_neg_unrealized_loss.dateindex.as_ref()
{
self.indexes_to_neg_unrealized_loss_rel_to_market_cap
.compute_all(starting_indexes, exit, |v| {
v.compute_percentage(
starting_indexes.dateindex,
dateindex_vec,
dateindex_to_mc,
exit,
)?;
Ok(())
})?;
}
if let Some(dateindex_vec) = unrealized.indexes_to_net_unrealized_pnl.dateindex.as_ref()
{
self.indexes_to_net_unrealized_pnl_rel_to_market_cap
.compute_all(starting_indexes, exit, |v| {
v.compute_percentage(
starting_indexes.dateindex,
dateindex_vec,
dateindex_to_mc,
exit,
)?;
Ok(())
})?;
}
self.indexes_to_net_unrealized_pnl_rel_to_market_cap
.compute_all(starting_indexes, exit, |v| {
v.compute_percentage(
starting_indexes.dateindex,
dateindex_vec,
dateindex_to_mc,
exit,
)?;
Ok(())
})?;
}
// === Supply in Profit/Loss Relative to Circulating Supply (indexes) ===
@@ -690,18 +692,6 @@ impl RelativeMetrics {
exit,
)?;
}
if let Some(v) = self
.height_to_neg_unrealized_loss_rel_to_own_market_cap
.as_mut()
&& let Some(supply_dollars) = supply.height_to_supply_value.dollars.as_ref()
{
v.compute_percentage(
starting_indexes.height,
&unrealized.height_to_neg_unrealized_loss,
supply_dollars,
exit,
)?;
}
if let Some(v) = self
.height_to_net_unrealized_pnl_rel_to_own_market_cap
.as_mut()
@@ -754,27 +744,6 @@ impl RelativeMetrics {
Ok(())
})?;
}
if let Some(v) = self
.indexes_to_neg_unrealized_loss_rel_to_own_market_cap
.as_mut()
&& let Some(supply_dollars_dateindex) = supply
.indexes_to_supply
.dollars
.as_ref()
.and_then(|d| d.dateindex.as_ref())
&& let Some(neg_loss_dateindex) =
unrealized.indexes_to_neg_unrealized_loss.dateindex.as_ref()
{
v.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
neg_loss_dateindex,
supply_dollars_dateindex,
exit,
)?;
Ok(())
})?;
}
if let Some(v) = self
.indexes_to_net_unrealized_pnl_rel_to_own_market_cap
.as_mut()
@@ -796,132 +765,6 @@ impl RelativeMetrics {
Ok(())
})?;
}
// === Unrealized vs Own Total Unrealized PnL ===
if let Some(v) = self
.height_to_unrealized_profit_rel_to_own_total_unrealized_pnl
.as_mut()
{
v.compute_percentage(
starting_indexes.height,
&unrealized.height_to_unrealized_profit,
&unrealized.height_to_total_unrealized_pnl,
exit,
)?;
}
if let Some(v) = self
.height_to_unrealized_loss_rel_to_own_total_unrealized_pnl
.as_mut()
{
v.compute_percentage(
starting_indexes.height,
&unrealized.height_to_unrealized_loss,
&unrealized.height_to_total_unrealized_pnl,
exit,
)?;
}
if let Some(v) = self
.height_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl
.as_mut()
{
v.compute_percentage(
starting_indexes.height,
&unrealized.height_to_neg_unrealized_loss,
&unrealized.height_to_total_unrealized_pnl,
exit,
)?;
}
if let Some(v) = self
.height_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl
.as_mut()
{
v.compute_percentage(
starting_indexes.height,
&unrealized.height_to_net_unrealized_pnl,
&unrealized.height_to_total_unrealized_pnl,
exit,
)?;
}
// indexes versions for own total unrealized pnl
if let Some(v) = self
.indexes_to_unrealized_profit_rel_to_own_total_unrealized_pnl
.as_mut()
&& let Some(total_pnl_dateindex) = unrealized
.indexes_to_total_unrealized_pnl
.dateindex
.as_ref()
{
v.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
&unrealized.dateindex_to_unrealized_profit,
total_pnl_dateindex,
exit,
)?;
Ok(())
})?;
}
if let Some(v) = self
.indexes_to_unrealized_loss_rel_to_own_total_unrealized_pnl
.as_mut()
&& let Some(total_pnl_dateindex) = unrealized
.indexes_to_total_unrealized_pnl
.dateindex
.as_ref()
{
v.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
&unrealized.dateindex_to_unrealized_loss,
total_pnl_dateindex,
exit,
)?;
Ok(())
})?;
}
if let Some(v) = self
.indexes_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl
.as_mut()
&& let Some(total_pnl_dateindex) = unrealized
.indexes_to_total_unrealized_pnl
.dateindex
.as_ref()
&& let Some(neg_loss_dateindex) =
unrealized.indexes_to_neg_unrealized_loss.dateindex.as_ref()
{
v.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
neg_loss_dateindex,
total_pnl_dateindex,
exit,
)?;
Ok(())
})?;
}
if let Some(v) = self
.indexes_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl
.as_mut()
&& let Some(total_pnl_dateindex) = unrealized
.indexes_to_total_unrealized_pnl
.dateindex
.as_ref()
&& let Some(net_pnl_dateindex) =
unrealized.indexes_to_net_unrealized_pnl.dateindex.as_ref()
{
v.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
net_pnl_dateindex,
total_pnl_dateindex,
exit,
)?;
Ok(())
})?;
}
}
Ok(())

View File

@@ -1,10 +1,10 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, DateIndex, Dollars, Height, Sats, StoredU64, Version};
use brk_types::{Height, Sats, StoredU64, Version};
use rayon::prelude::*;
use vecdb::{
AnyStoredVec, AnyVec, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableVec, PcoVec,
TypedVecIterator,
AnyStoredVec, AnyVec, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableCloneableVec,
PcoVec, TypedVecIterator,
};
use crate::{
@@ -52,20 +52,20 @@ impl SupplyMetrics {
let compute_dollars = cfg.compute_dollars();
let last = VecBuilderOptions::default().add_last();
Ok(Self {
height_to_supply: EagerVec::forced_import(
cfg.db,
&cfg.name("supply"),
cfg.version + v0,
)?,
let height_to_supply: EagerVec<PcoVec<Height, Sats>> =
EagerVec::forced_import(cfg.db, &cfg.name("supply"), cfg.version + v0)?;
height_to_supply_value: ComputedHeightValueVecs::forced_import(
cfg.db,
&cfg.name("supply"),
Source::None,
cfg.version + v0,
compute_dollars,
)?,
let height_to_supply_value = ComputedHeightValueVecs::forced_import(
cfg.db,
&cfg.name("supply"),
Source::Vec(height_to_supply.boxed_clone()),
cfg.version + v0,
compute_dollars,
)?;
Ok(Self {
height_to_supply,
height_to_supply_value,
indexes_to_supply: ComputedValueVecsFromDateIndex::forced_import(
cfg.db,
@@ -183,12 +183,8 @@ impl SupplyMetrics {
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.height_to_supply_value.compute_rest(
price,
starting_indexes,
exit,
Some(&self.height_to_supply),
)?;
self.height_to_supply_value
.compute_rest(price, starting_indexes, exit)?;
self.indexes_to_supply
.compute_all(price, starting_indexes, exit, |v| {
@@ -242,29 +238,4 @@ impl SupplyMetrics {
Ok(())
}
/// Second phase of computed metrics (ratios, relative values).
#[allow(clippy::too_many_arguments)]
pub fn compute_rest_part2(
&mut self,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
_starting_indexes: &Indexes,
height_to_supply: &impl IterableVec<Height, Bitcoin>,
_dateindex_to_supply: &impl IterableVec<DateIndex, Bitcoin>,
height_to_market_cap: Option<&impl IterableVec<Height, Dollars>>,
dateindex_to_market_cap: Option<&impl IterableVec<DateIndex, Dollars>>,
_exit: &Exit,
) -> Result<()> {
let _ = (
indexes,
price,
height_to_supply,
height_to_market_cap,
dateindex_to_market_cap,
);
// Supply relative metrics computed here if needed
Ok(())
}
}

View File

@@ -0,0 +1,404 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Dollars, Height, Sats, Version};
use rayon::prelude::*;
use vecdb::{
AnyStoredVec, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableCloneableVec,
LazyVecFrom1, LazyVecFrom2, Negate, PcoVec,
};
use crate::{
Indexes,
grouped::{
ComputedHeightValueVecs, ComputedValueVecsFromDateIndex, ComputedVecsFromDateIndex,
DollarsMinus, DollarsPlus, LazyVecsFromDateIndex, Source, VecBuilderOptions,
},
stateful::states::UnrealizedState,
};
use super::ImportConfig;
/// Unrealized profit/loss metrics.
#[derive(Clone, Traversable)]
pub struct UnrealizedMetrics {
// === Supply in Profit/Loss ===
pub height_to_supply_in_profit: EagerVec<PcoVec<Height, Sats>>,
pub indexes_to_supply_in_profit: ComputedValueVecsFromDateIndex,
pub height_to_supply_in_loss: EagerVec<PcoVec<Height, Sats>>,
pub indexes_to_supply_in_loss: ComputedValueVecsFromDateIndex,
pub dateindex_to_supply_in_profit: EagerVec<PcoVec<DateIndex, Sats>>,
pub dateindex_to_supply_in_loss: EagerVec<PcoVec<DateIndex, Sats>>,
pub height_to_supply_in_profit_value: ComputedHeightValueVecs,
pub height_to_supply_in_loss_value: ComputedHeightValueVecs,
// === Unrealized Profit/Loss ===
pub height_to_unrealized_profit: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_unrealized_profit: ComputedVecsFromDateIndex<Dollars>,
pub height_to_unrealized_loss: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_unrealized_loss: ComputedVecsFromDateIndex<Dollars>,
pub dateindex_to_unrealized_profit: EagerVec<PcoVec<DateIndex, Dollars>>,
pub dateindex_to_unrealized_loss: EagerVec<PcoVec<DateIndex, Dollars>>,
// === Negated and Net ===
pub height_to_neg_unrealized_loss: LazyVecFrom1<Height, Dollars, Height, Dollars>,
pub indexes_to_neg_unrealized_loss: LazyVecsFromDateIndex<Dollars>,
// net = profit - loss (height is lazy, indexes computed)
pub height_to_net_unrealized_pnl:
LazyVecFrom2<Height, Dollars, Height, Dollars, Height, Dollars>,
pub indexes_to_net_unrealized_pnl: ComputedVecsFromDateIndex<Dollars>,
// total = profit + loss (height is lazy, indexes computed)
pub height_to_total_unrealized_pnl:
LazyVecFrom2<Height, Dollars, Height, Dollars, Height, Dollars>,
pub indexes_to_total_unrealized_pnl: ComputedVecsFromDateIndex<Dollars>,
}
impl UnrealizedMetrics {
/// Import unrealized metrics from database.
pub fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let v0 = Version::ZERO;
let compute_dollars = cfg.compute_dollars();
let last = VecBuilderOptions::default().add_last();
let dateindex_to_supply_in_profit =
EagerVec::forced_import(cfg.db, &cfg.name("supply_in_profit"), cfg.version + v0)?;
let dateindex_to_supply_in_loss =
EagerVec::forced_import(cfg.db, &cfg.name("supply_in_loss"), cfg.version + v0)?;
let dateindex_to_unrealized_profit =
EagerVec::forced_import(cfg.db, &cfg.name("unrealized_profit"), cfg.version + v0)?;
let dateindex_to_unrealized_loss =
EagerVec::forced_import(cfg.db, &cfg.name("unrealized_loss"), cfg.version + v0)?;
let height_to_unrealized_loss: EagerVec<PcoVec<Height, Dollars>> =
EagerVec::forced_import(cfg.db, &cfg.name("unrealized_loss"), cfg.version + v0)?;
let height_to_neg_unrealized_loss = LazyVecFrom1::transformed::<Negate>(
&cfg.name("neg_unrealized_loss"),
cfg.version + v0,
height_to_unrealized_loss.boxed_clone(),
);
let indexes_to_unrealized_loss = ComputedVecsFromDateIndex::forced_import(
cfg.db,
&cfg.name("unrealized_loss"),
Source::Vec(dateindex_to_unrealized_loss.boxed_clone()),
cfg.version + v0,
cfg.indexes,
last,
)?;
let indexes_to_neg_unrealized_loss = LazyVecsFromDateIndex::from_computed::<Negate>(
&cfg.name("neg_unrealized_loss"),
cfg.version + v0,
Some(dateindex_to_unrealized_loss.boxed_clone()),
&indexes_to_unrealized_loss,
);
// Extract profit sources for lazy net/total vecs
let height_to_unrealized_profit: EagerVec<PcoVec<Height, Dollars>> =
EagerVec::forced_import(cfg.db, &cfg.name("unrealized_profit"), cfg.version + v0)?;
let indexes_to_unrealized_profit = ComputedVecsFromDateIndex::forced_import(
cfg.db,
&cfg.name("unrealized_profit"),
Source::Vec(dateindex_to_unrealized_profit.boxed_clone()),
cfg.version + v0,
cfg.indexes,
last,
)?;
// Create lazy height vecs from profit/loss sources
let height_to_net_unrealized_pnl = LazyVecFrom2::transformed::<DollarsMinus>(
&cfg.name("net_unrealized_pnl"),
cfg.version + v0,
height_to_unrealized_profit.boxed_clone(),
height_to_unrealized_loss.boxed_clone(),
);
let height_to_total_unrealized_pnl = LazyVecFrom2::transformed::<DollarsPlus>(
&cfg.name("total_unrealized_pnl"),
cfg.version + v0,
height_to_unrealized_profit.boxed_clone(),
height_to_unrealized_loss.boxed_clone(),
);
// indexes_to_net/total remain computed (needed by relative.rs)
let indexes_to_net_unrealized_pnl = ComputedVecsFromDateIndex::forced_import(
cfg.db,
&cfg.name("net_unrealized_pnl"),
Source::Compute,
cfg.version + v0,
cfg.indexes,
last,
)?;
let indexes_to_total_unrealized_pnl = ComputedVecsFromDateIndex::forced_import(
cfg.db,
&cfg.name("total_unrealized_pnl"),
Source::Compute,
cfg.version + v0,
cfg.indexes,
last,
)?;
let height_to_supply_in_profit: EagerVec<PcoVec<Height, Sats>> =
EagerVec::forced_import(cfg.db, &cfg.name("supply_in_profit"), cfg.version + v0)?;
let height_to_supply_in_loss: EagerVec<PcoVec<Height, Sats>> =
EagerVec::forced_import(cfg.db, &cfg.name("supply_in_loss"), cfg.version + v0)?;
let height_to_supply_in_profit_value = ComputedHeightValueVecs::forced_import(
cfg.db,
&cfg.name("supply_in_profit"),
Source::Vec(height_to_supply_in_profit.boxed_clone()),
cfg.version + v0,
compute_dollars,
)?;
let height_to_supply_in_loss_value = ComputedHeightValueVecs::forced_import(
cfg.db,
&cfg.name("supply_in_loss"),
Source::Vec(height_to_supply_in_loss.boxed_clone()),
cfg.version + v0,
compute_dollars,
)?;
Ok(Self {
// === Supply in Profit/Loss ===
height_to_supply_in_profit,
indexes_to_supply_in_profit: ComputedValueVecsFromDateIndex::forced_import(
cfg.db,
&cfg.name("supply_in_profit"),
Source::Vec(dateindex_to_supply_in_profit.boxed_clone()),
cfg.version + v0,
last,
compute_dollars,
cfg.indexes,
)?,
height_to_supply_in_loss,
indexes_to_supply_in_loss: ComputedValueVecsFromDateIndex::forced_import(
cfg.db,
&cfg.name("supply_in_loss"),
Source::Vec(dateindex_to_supply_in_loss.boxed_clone()),
cfg.version + v0,
last,
compute_dollars,
cfg.indexes,
)?,
dateindex_to_supply_in_profit,
dateindex_to_supply_in_loss,
height_to_supply_in_profit_value,
height_to_supply_in_loss_value,
// === Unrealized Profit/Loss ===
height_to_unrealized_profit,
indexes_to_unrealized_profit,
height_to_unrealized_loss,
indexes_to_unrealized_loss,
dateindex_to_unrealized_profit,
dateindex_to_unrealized_loss,
height_to_neg_unrealized_loss,
indexes_to_neg_unrealized_loss,
height_to_net_unrealized_pnl,
indexes_to_net_unrealized_pnl,
height_to_total_unrealized_pnl,
indexes_to_total_unrealized_pnl,
})
}
/// Push unrealized state values to height-indexed vectors.
pub fn truncate_push(
&mut self,
height: Height,
dateindex: Option<DateIndex>,
height_state: &UnrealizedState,
date_state: Option<&UnrealizedState>,
) -> Result<()> {
self.height_to_supply_in_profit
.truncate_push(height, height_state.supply_in_profit)?;
self.height_to_supply_in_loss
.truncate_push(height, height_state.supply_in_loss)?;
self.height_to_unrealized_profit
.truncate_push(height, height_state.unrealized_profit)?;
self.height_to_unrealized_loss
.truncate_push(height, height_state.unrealized_loss)?;
if let (Some(dateindex), Some(date_state)) = (dateindex, date_state) {
self.dateindex_to_supply_in_profit
.truncate_push(dateindex, date_state.supply_in_profit)?;
self.dateindex_to_supply_in_loss
.truncate_push(dateindex, date_state.supply_in_loss)?;
self.dateindex_to_unrealized_profit
.truncate_push(dateindex, date_state.unrealized_profit)?;
self.dateindex_to_unrealized_loss
.truncate_push(dateindex, date_state.unrealized_loss)?;
}
Ok(())
}
/// Write height-indexed vectors to disk.
pub fn write(&mut self) -> Result<()> {
self.height_to_supply_in_profit.write()?;
self.height_to_supply_in_loss.write()?;
self.height_to_unrealized_profit.write()?;
self.height_to_unrealized_loss.write()?;
self.dateindex_to_supply_in_profit.write()?;
self.dateindex_to_supply_in_loss.write()?;
self.dateindex_to_unrealized_profit.write()?;
self.dateindex_to_unrealized_loss.write()?;
Ok(())
}
/// Returns a parallel iterator over all vecs for parallel writing.
pub fn par_iter_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
vec![
&mut self.height_to_supply_in_profit as &mut dyn AnyStoredVec,
&mut self.height_to_supply_in_loss as &mut dyn AnyStoredVec,
&mut self.height_to_unrealized_profit as &mut dyn AnyStoredVec,
&mut self.height_to_unrealized_loss as &mut dyn AnyStoredVec,
&mut self.dateindex_to_supply_in_profit as &mut dyn AnyStoredVec,
&mut self.dateindex_to_supply_in_loss as &mut dyn AnyStoredVec,
&mut self.dateindex_to_unrealized_profit as &mut dyn AnyStoredVec,
&mut self.dateindex_to_unrealized_loss as &mut dyn AnyStoredVec,
]
.into_par_iter()
}
/// Compute aggregate values from separate cohorts.
pub fn compute_from_stateful(
&mut self,
starting_indexes: &Indexes,
others: &[&Self],
exit: &Exit,
) -> Result<()> {
self.height_to_supply_in_profit.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.height_to_supply_in_profit)
.collect::<Vec<_>>(),
exit,
)?;
self.height_to_supply_in_loss.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.height_to_supply_in_loss)
.collect::<Vec<_>>(),
exit,
)?;
self.height_to_unrealized_profit.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.height_to_unrealized_profit)
.collect::<Vec<_>>(),
exit,
)?;
self.height_to_unrealized_loss.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.height_to_unrealized_loss)
.collect::<Vec<_>>(),
exit,
)?;
self.dateindex_to_supply_in_profit.compute_sum_of_others(
starting_indexes.dateindex,
&others
.iter()
.map(|v| &v.dateindex_to_supply_in_profit)
.collect::<Vec<_>>(),
exit,
)?;
self.dateindex_to_supply_in_loss.compute_sum_of_others(
starting_indexes.dateindex,
&others
.iter()
.map(|v| &v.dateindex_to_supply_in_loss)
.collect::<Vec<_>>(),
exit,
)?;
self.dateindex_to_unrealized_profit.compute_sum_of_others(
starting_indexes.dateindex,
&others
.iter()
.map(|v| &v.dateindex_to_unrealized_profit)
.collect::<Vec<_>>(),
exit,
)?;
self.dateindex_to_unrealized_loss.compute_sum_of_others(
starting_indexes.dateindex,
&others
.iter()
.map(|v| &v.dateindex_to_unrealized_loss)
.collect::<Vec<_>>(),
exit,
)?;
Ok(())
}
/// First phase of computed metrics.
pub fn compute_rest_part1(
&mut self,
price: Option<&crate::price::Vecs>,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
// Compute supply value from sats
self.height_to_supply_in_profit_value
.compute_rest(price, starting_indexes, exit)?;
self.height_to_supply_in_loss_value
.compute_rest(price, starting_indexes, exit)?;
// Compute indexes from dateindex sources
self.indexes_to_supply_in_profit.compute_rest(
price,
starting_indexes,
exit,
Some(&self.dateindex_to_supply_in_profit),
)?;
self.indexes_to_supply_in_loss.compute_rest(
price,
starting_indexes,
exit,
Some(&self.dateindex_to_supply_in_loss),
)?;
self.indexes_to_unrealized_profit.compute_rest(
starting_indexes,
exit,
Some(&self.dateindex_to_unrealized_profit),
)?;
self.indexes_to_unrealized_loss.compute_rest(
starting_indexes,
exit,
Some(&self.dateindex_to_unrealized_loss),
)?;
// height_to_net/total are lazy, but indexes still need compute
// total_unrealized_pnl = profit + loss
self.indexes_to_total_unrealized_pnl
.compute_all(starting_indexes, exit, |vec| {
vec.compute_add(
starting_indexes.dateindex,
&self.dateindex_to_unrealized_profit,
&self.dateindex_to_unrealized_loss,
exit,
)?;
Ok(())
})?;
// net_unrealized_pnl = profit - loss
self.indexes_to_net_unrealized_pnl
.compute_all(starting_indexes, exit, |vec| {
vec.compute_subtract(
starting_indexes.dateindex,
&self.dateindex_to_unrealized_profit,
&self.dateindex_to_unrealized_loss,
exit,
)?;
Ok(())
})?;
Ok(())
}
}

View File

@@ -462,27 +462,9 @@ impl Vecs {
.as_ref()
.map(|v| v.dateindex.u().clone());
let height_to_realized_cap = self
.utxo_cohorts
.all
.metrics
.realized
.as_ref()
.map(|r| r.height_to_realized_cap.clone());
let dateindex_to_realized_cap = self
.utxo_cohorts
.all
.metrics
.realized
.as_ref()
.map(|r| r.indexes_to_realized_cap.dateindex.unwrap_last().clone());
let dateindex_to_supply_ref = dateindex_to_supply.u();
let height_to_market_cap_ref = height_to_market_cap.as_ref();
let dateindex_to_market_cap_ref = dateindex_to_market_cap.as_ref();
let height_to_realized_cap_ref = height_to_realized_cap.as_ref();
let dateindex_to_realized_cap_ref = dateindex_to_realized_cap.as_ref();
aggregates::compute_rest_part2(
&mut self.utxo_cohorts,
@@ -494,8 +476,6 @@ impl Vecs {
dateindex_to_supply_ref,
height_to_market_cap_ref,
dateindex_to_market_cap_ref,
height_to_realized_cap_ref,
dateindex_to_realized_cap_ref,
exit,
)?;

View File

@@ -109,12 +109,15 @@ impl Filter {
}
/// Whether to compute extended metrics (realized cap ratios, profit/loss ratios, percentiles)
/// For UTXO context: false for Type and Amount filters
/// For UTXO context: false for Type, Amount, Year, and Epoch filters
/// For Address context: always false
pub fn is_extended(&self, context: CohortContext) -> bool {
match context {
CohortContext::Address => false,
CohortContext::Utxo => !matches!(self, Filter::Type(_) | Filter::Amount(_)),
CohortContext::Utxo => !matches!(
self,
Filter::Type(_) | Filter::Amount(_) | Filter::Year(_) | Filter::Epoch(_)
),
}
}

View File

@@ -1,4 +1,4 @@
use std::ops::{Add, Div, Mul};
use std::ops::{Add, Div, Mul, Sub};
use schemars::JsonSchema;
use serde::Serialize;
@@ -102,6 +102,13 @@ impl Add for Cents {
}
}
impl Sub for Cents {
type Output = Self;
fn sub(self, rhs: Self) -> Self::Output {
Self(self.0 - rhs.0)
}
}
impl Div<Cents> for Cents {
type Output = Self;
fn div(self, rhs: Self) -> Self::Output {

View File

@@ -3,7 +3,7 @@ use std::{
f64,
hash::{Hash, Hasher},
iter::Sum,
ops::{Add, AddAssign, Div, Mul},
ops::{Add, AddAssign, Div, Mul, Neg, Sub},
};
use derive_deref::Deref;
@@ -127,6 +127,13 @@ impl Add for Dollars {
}
}
impl Sub for Dollars {
type Output = Self;
fn sub(self, rhs: Self) -> Self::Output {
Self::from(Cents::from(self) - Cents::from(rhs))
}
}
impl Div<Dollars> for Dollars {
type Output = StoredF64;
fn div(self, rhs: Dollars) -> Self::Output {
@@ -359,6 +366,13 @@ impl CheckedSub<usize> for Dollars {
}
}
impl Neg for Dollars {
type Output = Self;
fn neg(self) -> Self::Output {
Self(-self.0)
}
}
impl PartialEq for Dollars {
fn eq(&self, other: &Self) -> bool {
match (self.0.is_nan(), other.0.is_nan()) {

View File

@@ -3,7 +3,7 @@ use std::{
cmp::Ordering,
f32,
iter::Sum,
ops::{Add, AddAssign, Div, Mul, Sub},
ops::{Add, AddAssign, Div, Mul, Neg, Sub},
};
use derive_deref::Deref;
@@ -177,6 +177,13 @@ impl Sub<StoredF32> for StoredF32 {
}
}
impl Neg for StoredF32 {
type Output = Self;
fn neg(self) -> Self::Output {
Self(-self.0)
}
}
impl PartialEq for StoredF32 {
fn eq(&self, other: &Self) -> bool {
match (self.0.is_nan(), other.0.is_nan()) {