global: snapshot

This commit is contained in:
nym21
2026-01-31 17:39:48 +01:00
parent 8dd350264a
commit ff5bb770d7
116 changed files with 13312 additions and 9530 deletions

15
Cargo.lock generated
View File

@@ -388,6 +388,7 @@ dependencies = [
"brk_cohort",
"brk_query",
"brk_types",
"indexmap",
"oas3",
"serde",
"serde_json",
@@ -658,6 +659,7 @@ version = "0.1.2"
dependencies = [
"brk_traversable_derive",
"brk_types",
"indexmap",
"schemars",
"serde",
"serde_json",
@@ -681,6 +683,7 @@ dependencies = [
"brk_error",
"byteview",
"derive_more",
"indexmap",
"itoa",
"jiff",
"rapidhash",
@@ -2432,9 +2435,9 @@ dependencies = [
[[package]]
name = "rawdb"
version = "0.6.3"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4bf9c16af7a93d15280ceb0d502657f9ec524cce61c946a1c98390740270820d"
checksum = "32158f67cfcd5359af3294b26cc4acbd8e412106ab1d6e470038b5284df362e7"
dependencies = [
"libc",
"log",
@@ -3252,9 +3255,9 @@ checksum = "8f54a172d0620933a27a4360d3db3e2ae0dd6cceae9730751a036bbf182c4b23"
[[package]]
name = "vecdb"
version = "0.6.3"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "66b361b0614c0d367441dcbc7c28a885b7b088a8d379e23ad845d81324f2c5f6"
checksum = "05c9f596e212ac69076b58735d340dd944f83531b5a83061020d4a922b73016d"
dependencies = [
"ctrlc",
"log",
@@ -3273,9 +3276,9 @@ dependencies = [
[[package]]
name = "vecdb_derive"
version = "0.6.3"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "962dfcd7fc848c27f6ee051cf1a4dfd55d72a37d443fb525268eff41da9cacf8"
checksum = "63c4ecf88e970a6275bad540fc68e022ed86987d817ef6711a7c57889aa2dfdf"
dependencies = [
"quote",
"syn",

View File

@@ -66,13 +66,14 @@ byteview = "0.10.0"
color-eyre = "0.6.5"
derive_more = { version = "2.1.1", features = ["deref", "deref_mut"] }
fjall = "3.0.1"
indexmap = { version = "2.13.0", features = ["serde"] }
jiff = { version = "0.2.18", features = ["perf-inline", "tz-system"], default-features = false }
minreq = { version = "2.14.1", features = ["https", "json-using-serde"] }
owo-colors = "4.2.3"
parking_lot = "0.12.5"
rayon = "1.11.0"
rustc-hash = "2.1.1"
schemars = "1.2.0"
schemars = { version = "1.2.0", features = ["indexmap2"] }
serde = "1.0.228"
serde_bytes = "0.11.19"
serde_derive = "1.0.228"
@@ -82,7 +83,7 @@ tokio = { version = "1.49.0", features = ["rt-multi-thread"] }
tracing = { version = "0.1", default-features = false, features = ["std"] }
tower-http = { version = "0.6.8", features = ["catch-panic", "compression-br", "compression-gzip", "compression-zstd", "cors", "normalize-path", "timeout", "trace"] }
tower-layer = "0.3"
vecdb = { version = "0.6.3", features = ["derive", "serde_json", "pco", "schemars"] }
vecdb = { version = "0.6.4", features = ["derive", "serde_json", "pco", "schemars"] }
# vecdb = { path = "../anydb/crates/vecdb", features = ["derive", "serde_json", "pco", "schemars"] }
[workspace.metadata.release]

View File

@@ -11,6 +11,7 @@ repository.workspace = true
brk_cohort = { workspace = true }
brk_query = { workspace = true }
brk_types = { workspace = true }
indexmap = { workspace = true }
oas3 = "0.20"
serde = { workspace = true }
serde_json = { workspace = true }

View File

@@ -50,7 +50,7 @@ pub fn detect_structural_patterns(
BTreeMap<String, PatternBaseResult>,
) {
let mut ctx = PatternContext::new();
resolve_branch_patterns(tree, "root", &mut ctx);
resolve_branch_patterns(tree, &mut ctx);
let (generic_patterns, generic_mappings, type_mappings) =
detect_generic_patterns(&ctx.signature_to_pattern);
@@ -249,17 +249,19 @@ fn replace_inner_type(type_str: &str, replacement: &str) -> String {
/// Recursively resolve branch patterns bottom-up.
fn resolve_branch_patterns(
node: &TreeNode,
field_name: &str,
ctx: &mut PatternContext,
) -> Option<(String, Vec<PatternField>)> {
let TreeNode::Branch(children) = node else {
return None;
};
// Convert to sorted BTreeMap for consistent pattern detection
let sorted_children: BTreeMap<_, _> = children.iter().collect();
let mut fields: Vec<PatternField> = Vec::new();
let mut child_fields_vec: Vec<Vec<PatternField>> = Vec::new();
for (child_name, child_node) in children {
for (child_name, child_node) in sorted_children {
let (rust_type, json_type, indexes, child_fields) = match child_node {
TreeNode::Leaf(leaf) => (
leaf.kind().to_string(),
@@ -268,9 +270,8 @@ fn resolve_branch_patterns(
Vec::new(),
),
TreeNode::Branch(_) => {
let (pattern_name, child_pattern_fields) =
resolve_branch_patterns(child_node, child_name, ctx)
.unwrap_or_else(|| ("Unknown".to_string(), Vec::new()));
let (pattern_name, child_pattern_fields) = resolve_branch_patterns(child_node, ctx)
.unwrap_or_else(|| ("Unknown".to_string(), Vec::new()));
(
pattern_name.clone(),
pattern_name,
@@ -289,7 +290,7 @@ fn resolve_branch_patterns(
child_fields_vec.push(child_fields);
}
fields.sort_by(|a, b| a.name.cmp(&b.name));
// Fields are already sorted since we iterated over BTreeMap
*ctx.signature_counts.entry(fields.clone()).or_insert(0) += 1;
ctx.signature_to_child_fields
@@ -300,10 +301,17 @@ fn resolve_branch_patterns(
existing.clone()
} else {
let normalized = normalize_fields_for_naming(&fields);
// Generate stable name from first word of each field (deduped, sorted)
let first_words: BTreeSet<String> = fields
.iter()
.filter_map(|f| f.name.split('_').next())
.map(to_pascal_case)
.collect();
let combined: String = first_words.into_iter().collect();
let name = ctx
.normalized_to_name
.entry(normalized)
.or_insert_with(|| generate_pattern_name(field_name, &mut ctx.name_counts))
.or_insert_with(|| generate_pattern_name(&combined, &mut ctx.name_counts))
.clone();
ctx.signature_to_pattern
.insert(fields.clone(), name.clone());

View File

@@ -6,6 +6,7 @@
use std::collections::{BTreeMap, BTreeSet};
use brk_types::{Index, TreeNode, extract_json_type};
use indexmap::IndexMap;
use crate::{IndexSetPattern, PatternField, child_type_name};
@@ -26,8 +27,9 @@ fn get_shortest_leaf_name(node: &TreeNode) -> Option<String> {
}
/// Get the field signature for a branch node's children.
/// Fields are sorted alphabetically for consistent pattern matching.
pub fn get_node_fields(
children: &BTreeMap<String, TreeNode>,
children: &IndexMap<String, TreeNode>,
pattern_lookup: &BTreeMap<Vec<PatternField>, String>,
) -> Vec<PatternField> {
let mut fields: Vec<PatternField> = children
@@ -57,6 +59,7 @@ pub fn get_node_fields(
}
})
.collect();
// Sort for consistent pattern matching (display order preserved in IndexMap)
fields.sort_by(|a, b| a.name.cmp(&b.name));
fields
}
@@ -298,7 +301,7 @@ pub fn infer_accumulated_name(parent_acc: &str, field_name: &str, descendant_lea
/// Get fields with child field information for generic pattern lookup.
pub fn get_fields_with_child_info(
children: &BTreeMap<String, TreeNode>,
children: &IndexMap<String, TreeNode>,
parent_name: &str,
pattern_lookup: &BTreeMap<Vec<PatternField>, String>,
) -> Vec<(PatternField, Option<Vec<PatternField>>)> {
@@ -344,7 +347,6 @@ pub fn get_fields_with_child_info(
mod tests {
use super::*;
use brk_types::{MetricLeaf, MetricLeafWithSchema, TreeNode};
use std::collections::BTreeMap;
fn make_leaf(name: &str) -> TreeNode {
let leaf = MetricLeaf {
@@ -356,7 +358,7 @@ mod tests {
}
fn make_branch(children: Vec<(&str, TreeNode)>) -> TreeNode {
let map: BTreeMap<String, TreeNode> = children
let map: IndexMap<String, TreeNode> = children
.into_iter()
.map(|(k, v)| (k.to_string(), v))
.collect();

View File

@@ -34,7 +34,7 @@ fn main() -> Result<()> {
.fetch()?;
for ohlc in ohlcs.data {
let avg = (*ohlc.open + *ohlc.close) / 2;
let avg = (u64::from(*ohlc.open) + u64::from(*ohlc.close)) / 2;
let avg = Dollars::from(avg);
writeln!(writer, "{avg}").map_err(|e| brk_client::BrkError {
message: e.to_string(),

File diff suppressed because it is too large Load Diff

View File

@@ -102,4 +102,14 @@ impl Filter {
),
}
}
/// Whether to compute relative metrics (invested capital %, NUPL ratios, etc.)
/// Returns false for edge-case output types (Empty, P2MS, Unknown) which have
/// too little volume for meaningful ratio/percentage analysis.
pub fn compute_relative(&self) -> bool {
!matches!(
self,
Filter::Type(OutputType::Empty | OutputType::P2MS | OutputType::Unknown)
)
}
}

View File

@@ -29,7 +29,7 @@ impl Vecs {
let supply = SupplyVecs::forced_import(&db, v1, indexes, price)?;
let value = ValueVecs::forced_import(&db, v1, indexes)?;
let cap = CapVecs::forced_import(&db, v1, indexes)?;
let pricing = PricingVecs::forced_import(&db, version, indexes, price)?;
let pricing = PricingVecs::forced_import(&db, version, indexes)?;
let adjusted = AdjustedVecs::forced_import(&db, version, indexes)?;
let reserve_risk = ReserveRiskVecs::forced_import(&db, v1, indexes, compute_dollars)?;

View File

@@ -6,7 +6,6 @@ use super::Vecs;
use crate::{
indexes,
internal::{ComputedFromDateRatio, PriceFromHeight},
price,
};
impl Vecs {
@@ -14,7 +13,6 @@ impl Vecs {
db: &Database,
version: Version,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
) -> Result<Self> {
let vaulted_price = PriceFromHeight::forced_import(db, "vaulted_price", version, indexes)?;
let vaulted_price_ratio = ComputedFromDateRatio::forced_import(
@@ -24,7 +22,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let active_price = PriceFromHeight::forced_import(db, "active_price", version, indexes)?;
@@ -35,7 +32,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let true_market_mean =
@@ -47,7 +43,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let cointime_price =
@@ -59,7 +54,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
Ok(Self {

View File

@@ -134,18 +134,6 @@ impl AddressTypeToAddrCountVecs {
.into_par_iter()
}
pub fn write_height(&mut self) -> Result<()> {
self.p2pk65.height.write()?;
self.p2pk33.height.write()?;
self.p2pkh.height.write()?;
self.p2sh.height.write()?;
self.p2wpkh.height.write()?;
self.p2wsh.height.write()?;
self.p2tr.height.write()?;
self.p2a.height.write()?;
Ok(())
}
pub fn truncate_push_height(
&mut self,
height: Height,

View File

@@ -1,6 +1,5 @@
use brk_cohort::ByAddressType;
use brk_types::{AnyAddressDataIndexEnum, LoadedAddressData, OutputType, TypeIndex};
use vecdb::GenericStoredVec;
use crate::distribution::{
address::{AddressTypeToTypeIndexMap, AddressesDataVecs, AnyAddressIndexesVecs},

View File

@@ -1,5 +1,5 @@
use brk_cohort::{AmountBucket, ByAddressType};
use brk_types::{Dollars, Sats, TypeIndex};
use brk_types::{CentsUnsigned, Sats, TypeIndex};
use rustc_hash::FxHashMap;
use crate::distribution::{
@@ -14,7 +14,7 @@ pub fn process_received(
received_data: AddressTypeToVec<(TypeIndex, Sats)>,
cohorts: &mut AddressCohorts,
lookup: &mut AddressLookup<'_>,
price: Option<Dollars>,
price: Option<CentsUnsigned>,
addr_count: &mut ByAddressType<u64>,
empty_addr_count: &mut ByAddressType<u64>,
activity_counts: &mut AddressTypeToActivityCounts,
@@ -118,7 +118,7 @@ pub fn process_received(
.state
.as_mut()
.unwrap()
.receive_outputs(addr_data, total_value, price, output_count);
.receive_outputs(addr_data, total_value, price.unwrap(), output_count);
}
}
}

View File

@@ -1,12 +1,13 @@
use brk_cohort::{AmountBucket, ByAddressType};
use brk_error::Result;
use brk_types::{Age, CheckedSub, Dollars, Height, Sats, Timestamp, TypeIndex};
use brk_types::{Age, CentsUnsigned, CheckedSub, Height, Sats, Timestamp, TypeIndex};
use rustc_hash::FxHashSet;
use vecdb::{unlikely, VecIndex};
use crate::distribution::{
address::{AddressTypeToActivityCounts, HeightToAddressTypeToVec},
cohorts::AddressCohorts,
compute::PriceRangeMax,
};
use super::super::cache::AddressLookup;
@@ -21,17 +22,21 @@ use super::super::cache::AddressLookup;
///
/// Note: Takes separate price/timestamp slices instead of chain_state to allow
/// parallel execution with UTXO cohort processing (which mutates chain_state).
///
/// `price_range_max` is used to compute the peak price during each UTXO's holding period
/// for accurate ATH regret calculation.
#[allow(clippy::too_many_arguments)]
pub fn process_sent(
sent_data: HeightToAddressTypeToVec<(TypeIndex, Sats)>,
cohorts: &mut AddressCohorts,
lookup: &mut AddressLookup<'_>,
current_price: Option<Dollars>,
current_price: Option<CentsUnsigned>,
price_range_max: Option<&PriceRangeMax>,
addr_count: &mut ByAddressType<u64>,
empty_addr_count: &mut ByAddressType<u64>,
activity_counts: &mut AddressTypeToActivityCounts,
received_addresses: &ByAddressType<FxHashSet<TypeIndex>>,
height_to_price: Option<&[Dollars]>,
height_to_price: Option<&[CentsUnsigned]>,
height_to_timestamp: &[Timestamp],
current_height: Height,
current_timestamp: Timestamp,
@@ -39,12 +44,17 @@ pub fn process_sent(
// Track unique senders per address type (simple set, no extra data needed)
let mut seen_senders: ByAddressType<FxHashSet<TypeIndex>> = ByAddressType::default();
for (prev_height, by_type) in sent_data.into_iter() {
let prev_price = height_to_price.map(|v| v[prev_height.to_usize()]);
let prev_timestamp = height_to_timestamp[prev_height.to_usize()];
let blocks_old = current_height.to_usize() - prev_height.to_usize();
for (receive_height, by_type) in sent_data.into_iter() {
let prev_price = height_to_price.map(|v| v[receive_height.to_usize()]);
let prev_timestamp = height_to_timestamp[receive_height.to_usize()];
let blocks_old = current_height.to_usize() - receive_height.to_usize();
let age = Age::new(current_timestamp, prev_timestamp, blocks_old);
// Compute peak price during holding period for ATH regret
// This is the max HIGH price between receive and send heights
let peak_price: Option<CentsUnsigned> =
price_range_max.map(|t| t.max_between(receive_height, current_height));
for (output_type, vec) in by_type.unwrap().into_iter() {
// Cache mutable refs for this address type
let type_addr_count = addr_count.get_mut(output_type).unwrap();
@@ -91,11 +101,11 @@ pub fn process_sent(
) {
panic!(
"process_sent: cohort underflow detected!\n\
Block context: prev_height={:?}, output_type={:?}, type_index={:?}\n\
Block context: receive_height={:?}, output_type={:?}, type_index={:?}\n\
prev_balance={}, new_balance={}, value={}\n\
will_be_empty={}, crossing_boundary={}\n\
Address: {:?}",
prev_height,
receive_height,
output_type,
type_index,
prev_balance,
@@ -141,7 +151,7 @@ pub fn process_sent(
.state
.as_mut()
.unwrap()
.send(addr_data, value, current_price, prev_price, age)?;
.send(addr_data, value, current_price.unwrap(), prev_price.unwrap(), peak_price.unwrap(), age)?;
}
}
}

View File

@@ -191,11 +191,11 @@ impl AddressCohorts {
});
}
/// Reset price_to_amount for all separate cohorts (called during fresh start).
pub fn reset_separate_price_to_amount(&mut self) -> Result<()> {
/// Reset cost_basis_data for all separate cohorts (called during fresh start).
pub fn reset_separate_cost_basis_data(&mut self) -> Result<()> {
self.par_iter_separate_mut().try_for_each(|v| {
if let Some(state) = v.state.as_mut() {
state.reset_price_to_amount_if_needed()?;
state.reset_cost_basis_data_if_needed()?;
}
Ok(())
})

View File

@@ -3,7 +3,7 @@ use std::path::Path;
use brk_cohort::{CohortContext, Filter, Filtered};
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Dollars, Height, StoredU64, Version};
use brk_types::{CentsUnsigned, DateIndex, Dollars, Height, StoredU64, Version};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, AnyVec, Database, Exit, GenericStoredVec, IterableVec};
@@ -145,7 +145,7 @@ impl DynCohortVecs for AddressCohortVecs {
// State files are saved AT height H, so to resume at H+1 we need to import at H
// Decrement first, then increment result to match expected starting_height
if let Some(mut prev_height) = starting_height.decremented() {
// Import price_to_amount state file (may adjust prev_height to actual file found)
// Import cost_basis_data state file (may adjust prev_height to actual file found)
prev_height = state.inner.import_at_or_before(prev_height)?;
// Restore supply state from height-indexed vectors
@@ -164,15 +164,8 @@ impl DynCohortVecs for AddressCohortVecs {
.read_once(prev_height)?;
state.addr_count = *self.addr_count.height.read_once(prev_height)?;
// Restore realized cap if present
if let Some(realized_metrics) = self.metrics.realized.as_mut()
&& let Some(realized_state) = state.inner.realized.as_mut()
{
realized_state.cap = realized_metrics
.realized_cap
.height
.read_once(prev_height)?;
}
// Restore realized cap from persisted exact values
state.inner.restore_realized_cap();
let result = prev_height.incremented();
self.starting_height = Some(result);
@@ -216,9 +209,9 @@ impl DynCohortVecs for AddressCohortVecs {
fn compute_then_truncate_push_unrealized_states(
&mut self,
height: Height,
height_price: Option<Dollars>,
height_price: Option<CentsUnsigned>,
dateindex: Option<DateIndex>,
date_price: Option<Option<Dollars>>,
date_price: Option<Option<CentsUnsigned>>,
) -> Result<()> {
if let Some(state) = self.state.as_mut() {
self.metrics.compute_then_truncate_push_unrealized_states(

View File

@@ -1,5 +1,5 @@
use brk_error::Result;
use brk_types::{DateIndex, Dollars, Height, Version};
use brk_types::{CentsUnsigned, DateIndex, Dollars, Height, Version};
use vecdb::{Exit, IterableVec};
use crate::{ComputeIndexes, indexes, price};
@@ -30,9 +30,9 @@ pub trait DynCohortVecs: Send + Sync {
fn compute_then_truncate_push_unrealized_states(
&mut self,
height: Height,
height_price: Option<Dollars>,
height_price: Option<CentsUnsigned>,
dateindex: Option<DateIndex>,
date_price: Option<Option<Dollars>>,
date_price: Option<Option<CentsUnsigned>>,
) -> Result<()>;
/// First phase of post-processing computations.

View File

@@ -1,4 +1,4 @@
use std::path::Path;
use std::{cmp::Reverse, collections::BinaryHeap, path::Path};
use brk_cohort::{
ByAgeRange, ByAmountRange, ByEpoch, ByGreatEqualAmount, ByLowerThanAmount, ByMaxAge, ByMinAge,
@@ -6,16 +6,16 @@ use brk_cohort::{
};
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Dollars, Height, Sats, Version};
use brk_types::{CentsUnsigned, DateIndex, Dollars, Height, Sats, StoredF32, Version};
use derive_more::{Deref, DerefMut};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, Database, Exit, IterableVec};
use vecdb::{AnyStoredVec, Database, Exit, GenericStoredVec, IterableVec};
use crate::{
ComputeIndexes,
distribution::DynCohortVecs,
indexes,
internal::{PERCENTILES, PERCENTILES_LEN},
internal::{PERCENTILES, PERCENTILES_LEN, compute_spot_percentile_rank},
price,
};
@@ -288,13 +288,12 @@ impl UTXOCohorts {
}
/// Get minimum dateindex from all aggregate cohorts' dateindex-indexed vectors.
/// This checks cost_basis percentiles which are only on aggregate cohorts.
/// This checks cost_basis metrics which are only on aggregate cohorts.
pub fn min_aggregate_stateful_dateindex_len(&self) -> usize {
self.0
.iter_aggregate()
.filter_map(|v| v.metrics.cost_basis.as_ref())
.filter_map(|cb| cb.percentiles.as_ref())
.map(|cbp| cbp.min_stateful_dateindex_len())
.map(|cb| cb.min_stateful_dateindex_len())
.min()
.unwrap_or(usize::MAX)
}
@@ -314,35 +313,47 @@ impl UTXOCohorts {
});
}
/// Reset price_to_amount for all separate cohorts (called during fresh start).
pub fn reset_separate_price_to_amount(&mut self) -> Result<()> {
/// Reset cost_basis_data for all separate cohorts (called during fresh start).
pub fn reset_separate_cost_basis_data(&mut self) -> Result<()> {
self.par_iter_separate_mut().try_for_each(|v| {
if let Some(state) = v.state.as_mut() {
state.reset_price_to_amount_if_needed()?;
state.reset_cost_basis_data_if_needed()?;
}
Ok(())
})
}
/// Compute and push percentiles for aggregate cohorts (all, sth, lth).
/// Computes on-demand by merging age_range cohorts' price_to_amount data.
/// This avoids maintaining redundant aggregate price_to_amount maps.
pub fn truncate_push_aggregate_percentiles(&mut self, dateindex: DateIndex) -> Result<()> {
use std::cmp::Reverse;
use std::collections::BinaryHeap;
// Collect (filter, supply, price_to_amount as Vec) from age_range cohorts
/// Computes on-demand by merging age_range cohorts' cost_basis_data data.
/// This avoids maintaining redundant aggregate cost_basis_data maps.
/// Computes both sat-weighted (percentiles) and USD-weighted (invested_capital) percentiles.
pub fn truncate_push_aggregate_percentiles(
&mut self,
dateindex: DateIndex,
spot: Dollars,
) -> Result<()> {
// Collect (filter, entries, total_sats, total_usd) from age_range cohorts.
// Keep data in CentsUnsigned to avoid float conversions until output.
// Compute totals during collection to avoid a second pass.
let age_range_data: Vec<_> = self
.0
.age_range
.iter()
.filter_map(|sub| {
let state = sub.state.as_ref()?;
let entries: Vec<(Dollars, Sats)> = state
.price_to_amount_iter()?
.map(|(p, &a)| (p, a))
let mut total_sats: u64 = 0;
let mut total_usd: u128 = 0;
let entries: Vec<(CentsUnsigned, Sats)> = state
.cost_basis_data_iter()?
.map(|(price, &sats)| {
let sats_u64 = u64::from(sats);
let price_u128 = price.as_u128();
total_sats += sats_u64;
total_usd += price_u128 * sats_u64 as u128;
(price, sats)
})
.collect();
Some((sub.filter().clone(), state.supply.value, entries))
Some((sub.filter().clone(), entries, total_sats, total_usd))
})
.collect();
@@ -350,72 +361,109 @@ impl UTXOCohorts {
for aggregate in self.0.iter_aggregate_mut() {
let filter = aggregate.filter().clone();
// Get cost_basis percentiles storage, skip if not configured
let Some(percentiles) = aggregate
.metrics
.cost_basis
.as_mut()
.and_then(|cb| cb.percentiles.as_mut())
else {
// Get cost_basis, skip if not configured
let Some(cost_basis) = aggregate.metrics.cost_basis.as_mut() else {
continue;
};
// Collect relevant cohort data for this aggregate
// Collect relevant cohort data for this aggregate and sum totals
let mut total_sats: u64 = 0;
let mut total_usd: u128 = 0;
let relevant: Vec<_> = age_range_data
.iter()
.filter(|(sub_filter, _, _)| filter.includes(sub_filter))
.filter(|(sub_filter, _, _, _)| filter.includes(sub_filter))
.map(|(_, entries, cohort_sats, cohort_usd)| {
total_sats += cohort_sats;
total_usd += cohort_usd;
entries
})
.collect();
// Calculate total supply
let total_supply: u64 = relevant.iter().map(|(_, s, _)| u64::from(*s)).sum();
if total_supply == 0 {
percentiles.truncate_push(dateindex, &[Dollars::NAN; PERCENTILES_LEN])?;
if total_sats == 0 {
let nan_prices = [Dollars::NAN; PERCENTILES_LEN];
if let Some(percentiles) = cost_basis.percentiles.as_mut() {
percentiles.truncate_push(dateindex, &nan_prices)?;
}
if let Some(invested_capital) = cost_basis.invested_capital.as_mut() {
invested_capital.truncate_push(dateindex, &nan_prices)?;
}
if let Some(spot_pct) = cost_basis.spot_cost_basis_percentile.as_mut() {
spot_pct
.dateindex
.truncate_push(dateindex, StoredF32::NAN)?;
}
if let Some(spot_pct) = cost_basis.spot_invested_capital_percentile.as_mut() {
spot_pct
.dateindex
.truncate_push(dateindex, StoredF32::NAN)?;
}
continue;
}
// K-way merge using min-heap: O(n log k) where k = number of cohorts
// Each heap entry: (price, amount, cohort_idx, entry_idx)
let mut heap: BinaryHeap<Reverse<(Dollars, usize, usize)>> = BinaryHeap::new();
let mut heap: BinaryHeap<Reverse<(CentsUnsigned, usize, usize)>> = BinaryHeap::new();
// Initialize heap with first entry from each cohort
for (cohort_idx, (_, _, entries)) in relevant.iter().enumerate() {
for (cohort_idx, entries) in relevant.iter().enumerate() {
if !entries.is_empty() {
heap.push(Reverse((entries[0].0, cohort_idx, 0)));
}
}
let targets = PERCENTILES.map(|p| total_supply * u64::from(p) / 100);
let mut result = [Dollars::NAN; PERCENTILES_LEN];
let mut accumulated = 0u64;
let mut pct_idx = 0;
let mut current_price: Option<Dollars> = None;
let mut amount_at_price = 0u64;
// Compute both sat-weighted and USD-weighted percentiles in one pass
let sat_targets = PERCENTILES.map(|p| total_sats * u64::from(p) / 100);
let usd_targets = PERCENTILES.map(|p| total_usd * u128::from(p) / 100);
let mut sat_result = [Dollars::NAN; PERCENTILES_LEN];
let mut usd_result = [Dollars::NAN; PERCENTILES_LEN];
let mut cumsum_sats: u64 = 0;
let mut cumsum_usd: u128 = 0;
let mut sat_idx = 0;
let mut usd_idx = 0;
let mut current_price: Option<CentsUnsigned> = None;
let mut sats_at_price: u64 = 0;
let mut usd_at_price: u128 = 0;
while let Some(Reverse((price, cohort_idx, entry_idx))) = heap.pop() {
let (_, _, entries) = relevant[cohort_idx];
let entries = relevant[cohort_idx];
let (_, amount) = entries[entry_idx];
let amount_u64 = u64::from(amount);
let price_u128 = price.as_u128();
// If price changed, finalize previous price
if let Some(current_price) = current_price
&& current_price != price
if let Some(prev_price) = current_price
&& prev_price != price
{
accumulated += amount_at_price;
cumsum_sats += sats_at_price;
cumsum_usd += usd_at_price;
while pct_idx < PERCENTILES_LEN && accumulated >= targets[pct_idx] {
result[pct_idx] = current_price;
pct_idx += 1;
// Only convert to dollars if we still need percentiles
if sat_idx < PERCENTILES_LEN || usd_idx < PERCENTILES_LEN {
let prev_dollars = prev_price.to_dollars();
while sat_idx < PERCENTILES_LEN && cumsum_sats >= sat_targets[sat_idx] {
sat_result[sat_idx] = prev_dollars;
sat_idx += 1;
}
while usd_idx < PERCENTILES_LEN && cumsum_usd >= usd_targets[usd_idx] {
usd_result[usd_idx] = prev_dollars;
usd_idx += 1;
}
// Early exit if all percentiles found
if sat_idx >= PERCENTILES_LEN && usd_idx >= PERCENTILES_LEN {
break;
}
}
if pct_idx >= PERCENTILES_LEN {
break;
}
amount_at_price = 0;
sats_at_price = 0;
usd_at_price = 0;
}
current_price = Some(price);
amount_at_price += u64::from(amount);
sats_at_price += amount_u64;
usd_at_price += price_u128 * amount_u64 as u128;
// Push next entry from this cohort
let next_idx = entry_idx + 1;
@@ -424,16 +472,41 @@ impl UTXOCohorts {
}
}
// Finalize last price
if let Some(price) = current_price {
accumulated += amount_at_price;
while pct_idx < PERCENTILES_LEN && accumulated >= targets[pct_idx] {
result[pct_idx] = price;
pct_idx += 1;
// Finalize last price (skip if we already found all percentiles via early exit)
if (sat_idx < PERCENTILES_LEN || usd_idx < PERCENTILES_LEN)
&& let Some(price) = current_price
{
cumsum_sats += sats_at_price;
cumsum_usd += usd_at_price;
let price_dollars = price.to_dollars();
while sat_idx < PERCENTILES_LEN && cumsum_sats >= sat_targets[sat_idx] {
sat_result[sat_idx] = price_dollars;
sat_idx += 1;
}
while usd_idx < PERCENTILES_LEN && cumsum_usd >= usd_targets[usd_idx] {
usd_result[usd_idx] = price_dollars;
usd_idx += 1;
}
}
percentiles.truncate_push(dateindex, &result)?;
// Push both sat-weighted and USD-weighted results
if let Some(percentiles) = cost_basis.percentiles.as_mut() {
percentiles.truncate_push(dateindex, &sat_result)?;
}
if let Some(invested_capital) = cost_basis.invested_capital.as_mut() {
invested_capital.truncate_push(dateindex, &usd_result)?;
}
// Compute and push spot percentile ranks
if let Some(spot_pct) = cost_basis.spot_cost_basis_percentile.as_mut() {
let rank = compute_spot_percentile_rank(&sat_result, spot);
spot_pct.dateindex.truncate_push(dateindex, rank)?;
}
if let Some(spot_pct) = cost_basis.spot_invested_capital_percentile.as_mut() {
let rank = compute_spot_percentile_rank(&usd_result, spot);
spot_pct.dateindex.truncate_push(dateindex, rank)?;
}
}
Ok(())

View File

@@ -1,4 +1,4 @@
use brk_types::{Dollars, Height, Timestamp};
use brk_types::{CentsUnsigned, Height, Timestamp};
use crate::distribution::state::Transacted;
@@ -18,7 +18,7 @@ impl UTXOCohorts {
received: Transacted,
height: Height,
timestamp: Timestamp,
price: Option<Dollars>,
price: Option<CentsUnsigned>,
) {
let supply_state = received.spendable_supply;
@@ -30,7 +30,7 @@ impl UTXOCohorts {
]
.into_iter()
.for_each(|v| {
v.state.as_mut().unwrap().receive(&supply_state, price);
v.state.as_mut().unwrap().receive_utxo(&supply_state, price);
});
// Update output type cohorts
@@ -40,7 +40,7 @@ impl UTXOCohorts {
vecs.state
.as_mut()
.unwrap()
.receive(received.by_type.get(output_type), price)
.receive_utxo(received.by_type.get(output_type), price)
});
// Update amount range cohorts
@@ -53,7 +53,7 @@ impl UTXOCohorts {
.state
.as_mut()
.unwrap()
.receive(supply_state, price);
.receive_utxo(supply_state, price);
});
}
}

View File

@@ -1,9 +1,12 @@
use brk_types::{Age, Height};
use brk_types::{Age, CentsUnsigned, Height};
use rustc_hash::FxHashMap;
use vecdb::VecIndex;
use crate::{
distribution::state::{BlockState, Transacted},
distribution::{
compute::PriceRangeMax,
state::{BlockState, Transacted},
},
utils::OptionExt,
};
@@ -14,10 +17,14 @@ impl UTXOCohorts {
///
/// Each input references a UTXO created at some previous height.
/// We need to update the cohort states based on when that UTXO was created.
///
/// `price_range_max` is used to compute the peak price during each UTXO's holding period
/// for accurate ATH regret calculation.
pub fn send(
&mut self,
height_to_sent: FxHashMap<Height, Transacted>,
chain_state: &mut [BlockState],
price_range_max: Option<&PriceRangeMax>,
) {
if chain_state.is_empty() {
return;
@@ -27,31 +34,44 @@ impl UTXOCohorts {
let last_timestamp = last_block.timestamp;
let current_price = last_block.price;
let chain_len = chain_state.len();
let send_height = Height::from(chain_len - 1);
for (height, sent) in height_to_sent {
for (receive_height, sent) in height_to_sent {
// Update chain_state to reflect spent supply
chain_state[height.to_usize()].supply -= &sent.spendable_supply;
chain_state[receive_height.to_usize()].supply -= &sent.spendable_supply;
let block_state = &chain_state[height.to_usize()];
let block_state = &chain_state[receive_height.to_usize()];
let prev_price = block_state.price;
let blocks_old = chain_len - 1 - height.to_usize();
let blocks_old = chain_len - 1 - receive_height.to_usize();
let age = Age::new(last_timestamp, block_state.timestamp, blocks_old);
// Compute peak price during holding period for ATH regret
// This is the max HIGH price between receive and send heights
let peak_price: Option<CentsUnsigned> =
price_range_max.map(|t| t.max_between(receive_height, send_height));
// Update age range cohort (direct index lookup)
self.0.age_range.get_mut(age).state.um().send(
self.0.age_range.get_mut(age).state.um().send_utxo(
&sent.spendable_supply,
current_price,
prev_price,
peak_price,
age,
);
// Update epoch cohort (direct lookup by height)
self.0.epoch.mut_vec_from_height(height).state.um().send(
&sent.spendable_supply,
current_price,
prev_price,
age,
);
self.0
.epoch
.mut_vec_from_height(receive_height)
.state
.um()
.send_utxo(
&sent.spendable_supply,
current_price,
prev_price,
peak_price,
age,
);
// Update year cohort (direct lookup by timestamp)
self.0
@@ -59,7 +79,13 @@ impl UTXOCohorts {
.mut_vec_from_timestamp(block_state.timestamp)
.state
.um()
.send(&sent.spendable_supply, current_price, prev_price, age);
.send_utxo(
&sent.spendable_supply,
current_price,
prev_price,
peak_price,
age,
);
// Update output type cohorts
sent.by_type
@@ -71,7 +97,7 @@ impl UTXOCohorts {
.get_mut(output_type)
.state
.um()
.send(supply_state, current_price, prev_price, age)
.send_utxo(supply_state, current_price, prev_price, peak_price, age)
});
// Update amount range cohorts
@@ -83,7 +109,7 @@ impl UTXOCohorts {
.get_mut(group)
.state
.um()
.send(supply_state, current_price, prev_price, age);
.send_utxo(supply_state, current_price, prev_price, peak_price, age);
});
}
}

View File

@@ -3,7 +3,7 @@ use std::path::Path;
use brk_cohort::{CohortContext, Filter, Filtered, StateLevel};
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Dollars, Height, Version};
use brk_types::{CentsUnsigned, DateIndex, Dollars, Height, Version};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, Database, Exit, IterableVec};
@@ -142,7 +142,7 @@ impl DynCohortVecs for UTXOCohortVecs {
// State files are saved AT height H, so to resume at H+1 we need to import at H
// Decrement first, then increment result to match expected starting_height
if let Some(mut prev_height) = starting_height.decremented() {
// Import price_to_amount state file (may adjust prev_height to actual file found)
// Import cost_basis_data state file (may adjust prev_height to actual file found)
prev_height = state.import_at_or_before(prev_height)?;
// Restore supply state from height-indexed vectors
@@ -160,15 +160,8 @@ impl DynCohortVecs for UTXOCohortVecs {
.height
.read_once(prev_height)?;
// Restore realized cap if present
if let Some(realized_metrics) = self.metrics.realized.as_mut()
&& let Some(realized_state) = state.realized.as_mut()
{
realized_state.cap = realized_metrics
.realized_cap
.height
.read_once(prev_height)?;
}
// Restore realized cap from persisted exact values
state.restore_realized_cap();
let result = prev_height.incremented();
self.state_starting_height = Some(result);
@@ -204,9 +197,9 @@ impl DynCohortVecs for UTXOCohortVecs {
fn compute_then_truncate_push_unrealized_states(
&mut self,
height: Height,
height_price: Option<Dollars>,
height_price: Option<CentsUnsigned>,
dateindex: Option<DateIndex>,
date_price: Option<Option<Dollars>>,
date_price: Option<Option<CentsUnsigned>>,
) -> Result<()> {
if let Some(state) = self.state.as_mut() {
self.metrics.compute_then_truncate_push_unrealized_states(

View File

@@ -3,7 +3,7 @@ use std::thread;
use brk_cohort::ByAddressType;
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::{DateIndex, Height, OutputType, Sats, TxIndex, TypeIndex};
use brk_types::{CentsUnsigned, DateIndex, Dollars, Height, OutputType, Sats, TxIndex, TypeIndex};
use rayon::prelude::*;
use rustc_hash::FxHashSet;
use tracing::info;
@@ -75,9 +75,9 @@ pub fn process_blocks(
let txindex_to_output_count = &indexes.txindex.output_count;
let txindex_to_input_count = &indexes.txindex.input_count;
// From price (optional):
let height_to_price = price.map(|p| &p.usd.split.close.height);
let dateindex_to_price = price.map(|p| &p.usd.split.close.dateindex);
// From price (optional) - use cents for computation:
let height_to_price = price.map(|p| &p.cents.split.height.close);
let dateindex_to_price = price.map(|p| &p.cents.split.dateindex.close);
// Access pre-computed vectors from context for thread-safe access
let height_to_price_vec = &ctx.height_to_price;
@@ -329,6 +329,7 @@ pub fn process_blocks(
&mut vecs.address_cohorts,
&mut lookup,
block_price,
ctx.price_range_max.as_ref(),
&mut addr_counts,
&mut empty_addr_counts,
&mut activity_counts,
@@ -344,7 +345,8 @@ pub fn process_blocks(
// Main thread: Update UTXO cohorts
vecs.utxo_cohorts
.receive(transacted, height, timestamp, block_price);
vecs.utxo_cohorts.send(height_to_sent, chain_state);
vecs.utxo_cohorts
.send(height_to_sent, chain_state, ctx.price_range_max.as_ref());
});
// Push to height-indexed vectors
@@ -382,8 +384,12 @@ pub fn process_blocks(
// Compute and push percentiles for aggregate cohorts (all, sth, lth)
if let Some(dateindex) = dateindex_opt {
let spot = date_price
.flatten()
.map(|c| c.to_dollars())
.unwrap_or(Dollars::NAN);
vecs.utxo_cohorts
.truncate_push_aggregate_percentiles(dateindex)?;
.truncate_push_aggregate_percentiles(dateindex, spot)?;
}
// Periodic checkpoint flush
@@ -456,9 +462,9 @@ fn push_cohort_states(
utxo_cohorts: &mut UTXOCohorts,
address_cohorts: &mut AddressCohorts,
height: Height,
height_price: Option<brk_types::Dollars>,
height_price: Option<CentsUnsigned>,
dateindex: Option<DateIndex>,
date_price: Option<Option<brk_types::Dollars>>,
date_price: Option<Option<CentsUnsigned>>,
) -> Result<()> {
// utxo_cohorts.iter_separate_mut().try_for_each(|v| {
utxo_cohorts.par_iter_separate_mut().try_for_each(|v| {

View File

@@ -1,8 +1,99 @@
use brk_types::{Dollars, Height, Timestamp};
use std::time::Instant;
use brk_types::{CentsUnsigned, Height, Timestamp};
use tracing::debug;
use vecdb::VecIndex;
use crate::{blocks, price};
/// Sparse table for O(1) range maximum queries on prices.
/// Uses O(n log n) space (~140MB for 880k blocks).
pub struct PriceRangeMax {
/// Flattened table: table[k * n + i] = max of 2^k elements starting at index i
/// Using flat layout for better cache locality.
table: Vec<CentsUnsigned>,
/// Number of elements
n: usize,
}
impl PriceRangeMax {
/// Build sparse table from high prices. O(n log n) time and space.
pub fn build(prices: &[CentsUnsigned]) -> Self {
let start = Instant::now();
let n = prices.len();
if n == 0 {
return Self {
table: vec![],
n: 0,
};
}
// levels = floor(log2(n)) + 1
let levels = (usize::BITS - n.leading_zeros()) as usize;
// Allocate flat table: levels * n elements
let mut table = vec![CentsUnsigned::ZERO; levels * n];
// Base case: level 0 = original prices
table[..n].copy_from_slice(prices);
// Build each level from the previous
// table[k][i] = max(table[k-1][i], table[k-1][i + 2^(k-1)])
for k in 1..levels {
let prev_offset = (k - 1) * n;
let curr_offset = k * n;
let half = 1 << (k - 1);
let end = n.saturating_sub(1 << k) + 1;
// Use split_at_mut to avoid bounds checks in the loop
let (prev_level, rest) = table.split_at_mut(curr_offset);
let prev = &prev_level[prev_offset..prev_offset + n];
let curr = &mut rest[..n];
for i in 0..end {
curr[i] = prev[i].max(prev[i + half]);
}
}
let elapsed = start.elapsed();
debug!(
"PriceRangeMax built: {} heights, {} levels, {:.2}MB, {:.2}ms",
n,
levels,
(levels * n * std::mem::size_of::<CentsUnsigned>()) as f64 / 1_000_000.0,
elapsed.as_secs_f64() * 1000.0
);
Self { table, n }
}
/// Query maximum value in range [l, r] (inclusive). O(1) time.
#[inline]
pub fn range_max(&self, l: usize, r: usize) -> CentsUnsigned {
debug_assert!(l <= r && r < self.n);
let len = r - l + 1;
// k = floor(log2(len))
let k = (usize::BITS - len.leading_zeros() - 1) as usize;
let half = 1 << k;
// max of [l, l + 2^k) and [r - 2^k + 1, r + 1)
let offset = k * self.n;
unsafe {
let a = *self.table.get_unchecked(offset + l);
let b = *self.table.get_unchecked(offset + r + 1 - half);
a.max(b)
}
}
/// Query maximum value in height range. O(1) time.
#[inline]
pub fn max_between(&self, from: Height, to: Height) -> CentsUnsigned {
self.range_max(from.to_usize(), to.to_usize())
}
}
/// Context shared across block processing.
pub struct ComputeContext {
/// Starting height for this computation run
@@ -15,7 +106,11 @@ pub struct ComputeContext {
pub height_to_timestamp: Vec<Timestamp>,
/// Pre-computed height -> price mapping (if available)
pub height_to_price: Option<Vec<Dollars>>,
pub height_to_price: Option<Vec<CentsUnsigned>>,
/// Sparse table for O(1) range max queries on high prices.
/// Used for computing max price during UTXO holding periods (ATH regret).
pub price_range_max: Option<PriceRangeMax>,
}
impl ComputeContext {
@@ -29,20 +124,28 @@ impl ComputeContext {
let height_to_timestamp: Vec<Timestamp> =
blocks.time.timestamp_monotonic.into_iter().collect();
let height_to_price: Option<Vec<Dollars>> = price
.map(|p| &p.usd.split.close.height)
.map(|v| v.into_iter().map(|d| *d).collect());
let height_to_price: Option<Vec<CentsUnsigned>> = price
.map(|p| &p.cents.split.height.close)
.map(|v| v.into_iter().map(|c| *c).collect());
// Build sparse table for O(1) range max queries on HIGH prices
// Used for computing peak price during UTXO holding periods (ATH regret)
let price_range_max = price
.map(|p| &p.cents.split.height.high)
.map(|v| v.into_iter().map(|c| *c).collect::<Vec<_>>())
.map(|prices| PriceRangeMax::build(&prices));
Self {
starting_height,
last_height,
height_to_timestamp,
height_to_price,
price_range_max,
}
}
/// Get price at height (None if no price data or height out of range).
pub fn price_at(&self, height: Height) -> Option<Dollars> {
pub fn price_at(&self, height: Height) -> Option<CentsUnsigned> {
self.height_to_price
.as_ref()?
.get(height.to_usize())

View File

@@ -6,7 +6,7 @@ mod recover;
mod write;
pub use block_loop::process_blocks;
pub use context::ComputeContext;
pub use context::{ComputeContext, PriceRangeMax};
pub use readers::{
TxInIterators, TxOutData, TxOutIterators, VecsReaders, build_txinindex_to_txindex,
build_txoutindex_to_txindex,

View File

@@ -108,9 +108,9 @@ pub fn reset_state(
utxo_cohorts.reset_separate_state_heights();
address_cohorts.reset_separate_state_heights();
// Reset price_to_amount for all cohorts
utxo_cohorts.reset_separate_price_to_amount()?;
address_cohorts.reset_separate_price_to_amount()?;
// Reset cost_basis_data for all cohorts
utxo_cohorts.reset_separate_cost_basis_data()?;
address_cohorts.reset_separate_cost_basis_data()?;
Ok(RecoveredState {
starting_height: Height::ZERO,

View File

@@ -96,14 +96,6 @@ impl ActivityMetrics {
Ok(())
}
/// Write height-indexed vectors to disk.
pub fn write(&mut self) -> Result<()> {
self.sent.sats.height.write()?;
self.satblocks_destroyed.write()?;
self.satdays_destroyed.write()?;
Ok(())
}
/// Returns a parallel iterator over all vecs for parallel writing.
pub fn par_iter_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
vec![

View File

@@ -41,6 +41,11 @@ impl<'a> ImportConfig<'a> {
self.filter.compute_adjusted(self.context)
}
/// Whether to compute relative metrics (invested capital %, NUPL ratios, etc.).
pub fn compute_relative(&self) -> bool {
self.filter.compute_relative()
}
/// Get full metric name with filter prefix.
pub fn name(&self, suffix: &str) -> String {
if self.full_name.is_empty() {

View File

@@ -1,6 +1,6 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Dollars, Height, Version};
use brk_types::{DateIndex, Dollars, Height, StoredF32, Version};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, AnyVec, Exit, GenericStoredVec};
@@ -8,7 +8,10 @@ use crate::{
ComputeIndexes,
distribution::state::CohortState,
indexes,
internal::{CostBasisPercentiles, PriceFromHeight},
internal::{
ComputedFromDateLast, PERCENTILES_LEN, PercentilesVecs, PriceFromHeight,
compute_spot_percentile_rank,
},
};
use super::ImportConfig;
@@ -22,8 +25,17 @@ pub struct CostBasisMetrics {
/// Maximum cost basis for any UTXO at this height
pub max: PriceFromHeight,
/// Cost basis distribution percentiles (median, quartiles, etc.)
pub percentiles: Option<CostBasisPercentiles>,
/// Cost basis percentiles (sat-weighted)
pub percentiles: Option<PercentilesVecs>,
/// Invested capital percentiles (USD-weighted)
pub invested_capital: Option<PercentilesVecs>,
/// What percentile of cost basis is below spot (sat-weighted)
pub spot_cost_basis_percentile: Option<ComputedFromDateLast<StoredF32>>,
/// What percentile of invested capital is below spot (USD-weighted)
pub spot_invested_capital_percentile: Option<ComputedFromDateLast<StoredF32>>,
}
impl CostBasisMetrics {
@@ -46,15 +58,46 @@ impl CostBasisMetrics {
)?,
percentiles: extended
.then(|| {
CostBasisPercentiles::forced_import(
PercentilesVecs::forced_import(
cfg.db,
&cfg.name(""),
&cfg.name("cost_basis"),
cfg.version,
cfg.indexes,
true,
)
})
.transpose()?,
invested_capital: extended
.then(|| {
PercentilesVecs::forced_import(
cfg.db,
&cfg.name("invested_capital"),
cfg.version,
cfg.indexes,
true,
)
})
.transpose()?,
spot_cost_basis_percentile: extended
.then(|| {
ComputedFromDateLast::forced_import(
cfg.db,
&cfg.name("spot_cost_basis_percentile"),
cfg.version,
cfg.indexes,
)
})
.transpose()?,
spot_invested_capital_percentile: extended
.then(|| {
ComputedFromDateLast::forced_import(
cfg.db,
&cfg.name("spot_invested_capital_percentile"),
cfg.version,
cfg.indexes,
)
})
.transpose()?,
})
}
@@ -69,6 +112,24 @@ impl CostBasisMetrics {
.as_ref()
.map(|p| p.min_stateful_dateindex_len())
.unwrap_or(usize::MAX)
.min(
self.invested_capital
.as_ref()
.map(|p| p.min_stateful_dateindex_len())
.unwrap_or(usize::MAX),
)
.min(
self.spot_cost_basis_percentile
.as_ref()
.map(|v| v.dateindex.len())
.unwrap_or(usize::MAX),
)
.min(
self.spot_invested_capital_percentile
.as_ref()
.map(|v| v.dateindex.len())
.unwrap_or(usize::MAX),
)
}
/// Push min/max cost basis from state.
@@ -76,15 +137,15 @@ impl CostBasisMetrics {
self.min.height.truncate_push(
height,
state
.price_to_amount_first_key_value()
.map(|(dollars, _)| dollars)
.cost_basis_data_first_key_value()
.map(|(cents, _)| cents.into())
.unwrap_or(Dollars::NAN),
)?;
self.max.height.truncate_push(
height,
state
.price_to_amount_last_key_value()
.map(|(dollars, _)| dollars)
.cost_basis_data_last_key_value()
.map(|(cents, _)| cents.into())
.unwrap_or(Dollars::NAN),
)?;
Ok(())
@@ -96,21 +157,38 @@ impl CostBasisMetrics {
&mut self,
dateindex: DateIndex,
state: &CohortState,
spot: Dollars,
) -> Result<()> {
if let Some(percentiles) = self.percentiles.as_mut() {
let percentile_prices = state.compute_percentile_prices();
percentiles.truncate_push(dateindex, &percentile_prices)?;
}
Ok(())
}
let computed = state.compute_percentiles();
// Push sat-weighted percentiles and spot rank
let sat_prices = computed
.as_ref()
.map(|p| p.sat_weighted.map(|c| c.to_dollars()))
.unwrap_or([Dollars::NAN; PERCENTILES_LEN]);
/// Write height-indexed vectors to disk.
pub fn write(&mut self) -> Result<()> {
self.min.height.write()?;
self.max.height.write()?;
if let Some(percentiles) = self.percentiles.as_mut() {
percentiles.write()?;
percentiles.truncate_push(dateindex, &sat_prices)?;
}
if let Some(spot_pct) = self.spot_cost_basis_percentile.as_mut() {
let rank = compute_spot_percentile_rank(&sat_prices, spot);
spot_pct.dateindex.truncate_push(dateindex, rank)?;
}
// Push USD-weighted percentiles and spot rank
let usd_prices = computed
.as_ref()
.map(|p| p.usd_weighted.map(|c| c.to_dollars()))
.unwrap_or([Dollars::NAN; PERCENTILES_LEN]);
if let Some(invested_capital) = self.invested_capital.as_mut() {
invested_capital.truncate_push(dateindex, &usd_prices)?;
}
if let Some(spot_pct) = self.spot_invested_capital_percentile.as_mut() {
let rank = compute_spot_percentile_rank(&usd_prices, spot);
spot_pct.dateindex.truncate_push(dateindex, rank)?;
}
Ok(())
}
@@ -126,6 +204,21 @@ impl CostBasisMetrics {
.map(|v| &mut v.dateindex as &mut dyn AnyStoredVec),
);
}
if let Some(invested_capital) = self.invested_capital.as_mut() {
vecs.extend(
invested_capital
.vecs
.iter_mut()
.flatten()
.map(|v| &mut v.dateindex as &mut dyn AnyStoredVec),
);
}
if let Some(v) = self.spot_cost_basis_percentile.as_mut() {
vecs.push(&mut v.dateindex);
}
if let Some(v) = self.spot_invested_capital_percentile.as_mut() {
vecs.push(&mut v.dateindex);
}
vecs.into_par_iter()
}
@@ -134,6 +227,15 @@ impl CostBasisMetrics {
if let Some(percentiles) = self.percentiles.as_mut() {
percentiles.validate_computed_version_or_reset(base_version)?;
}
if let Some(invested_capital) = self.invested_capital.as_mut() {
invested_capital.validate_computed_version_or_reset(base_version)?;
}
if let Some(v) = self.spot_cost_basis_percentile.as_mut() {
v.dateindex.validate_computed_version_or_reset(base_version)?;
}
if let Some(v) = self.spot_invested_capital_percentile.as_mut() {
v.dateindex.validate_computed_version_or_reset(base_version)?;
}
Ok(())
}

View File

@@ -19,7 +19,7 @@ pub use unrealized::*;
use brk_cohort::Filter;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Dollars, Height, Version};
use brk_types::{CentsUnsigned, DateIndex, Dollars, Height, Version};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, Exit, IterableVec};
@@ -69,9 +69,20 @@ impl CohortMetrics {
.then(|| UnrealizedMetrics::forced_import(cfg))
.transpose()?;
let relative = unrealized
.as_ref()
.map(|u| RelativeMetrics::forced_import(cfg, u, &supply, all_supply))
let realized = compute_dollars
.then(|| RealizedMetrics::forced_import(cfg))
.transpose()?;
let relative = (cfg.compute_relative() && unrealized.is_some())
.then(|| {
RelativeMetrics::forced_import(
cfg,
unrealized.as_ref().unwrap(),
&supply,
all_supply,
realized.as_ref(),
)
})
.transpose()?;
Ok(Self {
@@ -79,9 +90,7 @@ impl CohortMetrics {
supply,
outputs,
activity: ActivityMetrics::forced_import(cfg)?,
realized: compute_dollars
.then(|| RealizedMetrics::forced_import(cfg))
.transpose()?,
realized,
cost_basis: compute_dollars
.then(|| CostBasisMetrics::forced_import(cfg))
.transpose()?,
@@ -146,27 +155,6 @@ impl CohortMetrics {
Ok(())
}
/// Write height-indexed vectors to disk.
pub fn write(&mut self) -> Result<()> {
self.supply.write()?;
self.outputs.write()?;
self.activity.write()?;
if let Some(realized) = self.realized.as_mut() {
realized.write()?;
}
if let Some(unrealized) = self.unrealized.as_mut() {
unrealized.write()?;
}
if let Some(cost_basis) = self.cost_basis.as_mut() {
cost_basis.write()?;
}
Ok(())
}
/// Returns a parallel iterator over all vecs for parallel writing.
pub fn par_iter_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
let mut vecs: Vec<&mut dyn AnyStoredVec> = Vec::new();
@@ -211,9 +199,9 @@ impl CohortMetrics {
pub fn compute_then_truncate_push_unrealized_states(
&mut self,
height: Height,
height_price: Option<Dollars>,
height_price: Option<CentsUnsigned>,
dateindex: Option<DateIndex>,
date_price: Option<Option<Dollars>>,
date_price: Option<Option<CentsUnsigned>>,
state: &mut CohortState,
) -> Result<()> {
// Apply pending updates before reading
@@ -238,7 +226,11 @@ impl CohortMetrics {
// Only compute expensive percentiles at date boundaries (~144x reduction)
if let Some(dateindex) = dateindex {
cost_basis.truncate_push_percentiles(dateindex, state)?;
let spot = date_price
.unwrap()
.map(|c| c.to_dollars())
.unwrap_or(Dollars::NAN);
cost_basis.truncate_push_percentiles(dateindex, state, spot)?;
}
}
@@ -323,7 +315,7 @@ impl CohortMetrics {
}
if let Some(unrealized) = self.unrealized.as_mut() {
unrealized.compute_rest_part1(price, starting_indexes, exit)?;
unrealized.compute_rest(indexes, price, starting_indexes, exit)?;
}
if let Some(cost_basis) = self.cost_basis.as_mut() {

View File

@@ -40,12 +40,6 @@ impl OutputsMetrics {
Ok(())
}
/// Write height-indexed vectors to disk.
pub fn write(&mut self) -> Result<()> {
self.utxo_count.height.write()?;
Ok(())
}
/// Returns a parallel iterator over all vecs for parallel writing.
pub fn par_iter_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
vec![&mut self.utxo_count.height as &mut dyn AnyStoredVec].into_par_iter()

View File

@@ -1,10 +1,13 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, DateIndex, Dollars, Height, StoredF32, StoredF64, Version};
use brk_types::{
Bitcoin, CentsSats, CentsSquaredSats, CentsUnsigned, DateIndex, Dollars, Height, StoredF32,
StoredF64, Version,
};
use rayon::prelude::*;
use vecdb::{
AnyStoredVec, AnyVec, EagerVec, Exit, GenericStoredVec, Ident, ImportableVec,
IterableCloneableVec, IterableVec, Negate, PcoVec,
AnyStoredVec, AnyVec, BytesVec, EagerVec, Exit, GenericStoredVec, Ident, ImportableVec,
IterableCloneableVec, IterableVec, Negate, PcoVec, TypedVecIterator,
};
use crate::{
@@ -12,10 +15,11 @@ use crate::{
distribution::state::RealizedState,
indexes,
internal::{
ComputedFromHeightLast, ComputedFromHeightSum, ComputedFromHeightSumCum, ComputedFromDateLast,
ComputedFromDateRatio, DollarsMinus, LazyBinaryFromHeightSum, LazyBinaryFromHeightSumCum,
LazyFromHeightSum, LazyFromHeightSumCum, LazyFromDateLast, PercentageDollarsF32,
PriceFromHeight, StoredF32Identity,
CentsUnsignedToDollars, ComputedFromDateLast, ComputedFromDateRatio,
ComputedFromHeightLast, ComputedFromHeightSum, ComputedFromHeightSumCum, DollarsMinus,
DollarsPlus, LazyBinaryFromHeightSum, LazyBinaryFromHeightSumCum, LazyFromDateLast,
LazyFromHeightLast, LazyFromHeightSum, LazyFromHeightSumCum, LazyPriceFromCents,
PercentageDollarsF32, PriceFromHeight, StoredF32Identity,
},
price,
};
@@ -26,12 +30,24 @@ use super::ImportConfig;
#[derive(Clone, Traversable)]
pub struct RealizedMetrics {
// === Realized Cap ===
pub realized_cap: ComputedFromHeightLast<Dollars>,
pub realized_cap_cents: ComputedFromHeightLast<CentsUnsigned>,
pub realized_cap: LazyFromHeightLast<Dollars, CentsUnsigned>,
pub realized_price: PriceFromHeight,
pub realized_price_extra: ComputedFromDateRatio,
pub realized_cap_rel_to_own_market_cap: Option<ComputedFromHeightLast<StoredF32>>,
pub realized_cap_30d_delta: ComputedFromDateLast<Dollars>,
// === Investor Price (dollar-weighted average acquisition price) ===
pub investor_price_cents: ComputedFromHeightLast<CentsUnsigned>,
pub investor_price: LazyPriceFromCents,
pub investor_price_extra: ComputedFromDateRatio,
// === Raw values for aggregation (needed to compute investor_price for aggregated cohorts) ===
/// Raw Σ(price × sats) for realized cap aggregation
pub cap_raw: BytesVec<Height, CentsSats>,
/// Raw Σ(price² × sats) for investor_price aggregation
pub investor_cap_raw: BytesVec<Height, CentsSquaredSats>,
// === MVRV (Market Value to Realized Value) ===
// Proxy for realized_price_extra.ratio (close / realized_price = market_cap / realized_cap)
pub mvrv: LazyFromDateLast<StoredF32>,
@@ -44,17 +60,29 @@ pub struct RealizedMetrics {
pub realized_value: ComputedFromHeightSum<Dollars>,
// === Realized vs Realized Cap Ratios (lazy) ===
pub realized_profit_rel_to_realized_cap: LazyBinaryFromHeightSumCum<StoredF32, Dollars, Dollars>,
pub realized_profit_rel_to_realized_cap:
LazyBinaryFromHeightSumCum<StoredF32, Dollars, Dollars>,
pub realized_loss_rel_to_realized_cap: LazyBinaryFromHeightSumCum<StoredF32, Dollars, Dollars>,
pub net_realized_pnl_rel_to_realized_cap: LazyBinaryFromHeightSumCum<StoredF32, Dollars, Dollars>,
pub net_realized_pnl_rel_to_realized_cap:
LazyBinaryFromHeightSumCum<StoredF32, Dollars, Dollars>,
// === Total Realized PnL ===
pub total_realized_pnl: LazyFromHeightSum<Dollars>,
pub realized_profit_to_loss_ratio: Option<EagerVec<PcoVec<DateIndex, StoredF64>>>,
// === Value Created/Destroyed ===
pub value_created: ComputedFromHeightSum<Dollars>,
pub value_destroyed: ComputedFromHeightSum<Dollars>,
// === Value Created/Destroyed Splits (stored) ===
pub profit_value_created: ComputedFromHeightSum<Dollars>,
pub profit_value_destroyed: ComputedFromHeightSum<Dollars>,
pub loss_value_created: ComputedFromHeightSum<Dollars>,
pub loss_value_destroyed: ComputedFromHeightSum<Dollars>,
// === Value Created/Destroyed Totals (lazy: profit + loss) ===
pub value_created: LazyBinaryFromHeightSum<Dollars, Dollars, Dollars>,
pub value_destroyed: LazyBinaryFromHeightSum<Dollars, Dollars, Dollars>,
// === Capitulation/Profit Flow (lazy aliases) ===
pub capitulation_flow: LazyFromHeightSum<Dollars>,
pub profit_flow: LazyFromHeightSum<Dollars>,
// === Adjusted Value (lazy: cohort - up_to_1h) ===
pub adjusted_value_created: Option<LazyBinaryFromHeightSum<Dollars, Dollars, Dollars>>,
@@ -77,24 +105,37 @@ pub struct RealizedMetrics {
pub net_realized_pnl_cumulative_30d_delta: ComputedFromDateLast<Dollars>,
pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: ComputedFromDateLast<StoredF32>,
pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: ComputedFromDateLast<StoredF32>,
// === ATH Regret ===
/// Realized ATH regret: Σ((ath - sell_price) × sats)
/// "How much more could have been made by selling at ATH instead"
pub ath_regret: ComputedFromHeightSumCum<Dollars>,
}
impl RealizedMetrics {
/// Import realized metrics from database.
pub fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let v1 = Version::ONE;
let v2 = Version::new(2);
let v3 = Version::new(3);
let extended = cfg.extended();
let compute_adjusted = cfg.compute_adjusted();
// Import combined types using forced_import which handles height + derived
let realized_cap = ComputedFromHeightLast::forced_import(
let realized_cap_cents = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("realized_cap"),
&cfg.name("realized_cap_cents"),
cfg.version,
cfg.indexes,
)?;
let realized_cap = LazyFromHeightLast::from_computed::<CentsUnsignedToDollars>(
&cfg.name("realized_cap"),
cfg.version,
realized_cap_cents.height.boxed_clone(),
&realized_cap_cents,
);
let realized_profit = ComputedFromHeightSumCum::forced_import(
cfg.db,
&cfg.name("realized_profit"),
@@ -141,7 +182,7 @@ impl RealizedMetrics {
// Construct lazy ratio vecs
let realized_profit_rel_to_realized_cap =
LazyBinaryFromHeightSumCum::from_computed_last::<PercentageDollarsF32>(
LazyBinaryFromHeightSumCum::from_computed_lazy_last::<PercentageDollarsF32, _>(
&cfg.name("realized_profit_rel_to_realized_cap"),
cfg.version + v1,
realized_profit.height.boxed_clone(),
@@ -151,7 +192,7 @@ impl RealizedMetrics {
);
let realized_loss_rel_to_realized_cap =
LazyBinaryFromHeightSumCum::from_computed_last::<PercentageDollarsF32>(
LazyBinaryFromHeightSumCum::from_computed_lazy_last::<PercentageDollarsF32, _>(
&cfg.name("realized_loss_rel_to_realized_cap"),
cfg.version + v1,
realized_loss.height.boxed_clone(),
@@ -161,7 +202,7 @@ impl RealizedMetrics {
);
let net_realized_pnl_rel_to_realized_cap =
LazyBinaryFromHeightSumCum::from_computed_last::<PercentageDollarsF32>(
LazyBinaryFromHeightSumCum::from_computed_lazy_last::<PercentageDollarsF32, _>(
&cfg.name("net_realized_pnl_rel_to_realized_cap"),
cfg.version + v1,
net_realized_pnl.height.boxed_clone(),
@@ -177,25 +218,104 @@ impl RealizedMetrics {
cfg.indexes,
)?;
let value_created = ComputedFromHeightSum::forced_import(
// Investor price (dollar-weighted average acquisition price)
let investor_price_cents = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("value_created"),
&cfg.name("investor_price_cents"),
cfg.version,
cfg.indexes,
)?;
let value_destroyed = ComputedFromHeightSum::forced_import(
let investor_price = LazyPriceFromCents::from_computed(
&cfg.name("investor_price"),
cfg.version,
&investor_price_cents,
);
let investor_price_extra = ComputedFromDateRatio::forced_import_from_lazy(
cfg.db,
&cfg.name("value_destroyed"),
&cfg.name("investor_price"),
&investor_price.dollars,
cfg.version,
cfg.indexes,
extended,
)?;
// Raw values for aggregation
let cap_raw = BytesVec::forced_import(cfg.db, &cfg.name("cap_raw"), cfg.version)?;
let investor_cap_raw =
BytesVec::forced_import(cfg.db, &cfg.name("investor_cap_raw"), cfg.version)?;
// Import the 4 splits (stored)
let profit_value_created = ComputedFromHeightSum::forced_import(
cfg.db,
&cfg.name("profit_value_created"),
cfg.version,
cfg.indexes,
)?;
let profit_value_destroyed = ComputedFromHeightSum::forced_import(
cfg.db,
&cfg.name("profit_value_destroyed"),
cfg.version,
cfg.indexes,
)?;
let loss_value_created = ComputedFromHeightSum::forced_import(
cfg.db,
&cfg.name("loss_value_created"),
cfg.version,
cfg.indexes,
)?;
let loss_value_destroyed = ComputedFromHeightSum::forced_import(
cfg.db,
&cfg.name("loss_value_destroyed"),
cfg.version,
cfg.indexes,
)?;
// Create lazy totals (profit + loss)
let value_created = LazyBinaryFromHeightSum::from_computed::<DollarsPlus>(
&cfg.name("value_created"),
cfg.version,
&profit_value_created,
&loss_value_created,
);
let value_destroyed = LazyBinaryFromHeightSum::from_computed::<DollarsPlus>(
&cfg.name("value_destroyed"),
cfg.version,
&profit_value_destroyed,
&loss_value_destroyed,
);
// Create lazy aliases
let capitulation_flow = LazyFromHeightSum::from_computed::<Ident>(
&cfg.name("capitulation_flow"),
cfg.version,
loss_value_destroyed.height.boxed_clone(),
&loss_value_destroyed,
);
let profit_flow = LazyFromHeightSum::from_computed::<Ident>(
&cfg.name("profit_flow"),
cfg.version,
profit_value_destroyed.height.boxed_clone(),
&profit_value_destroyed,
);
// Create lazy adjusted vecs if compute_adjusted and up_to_1h is available
let adjusted_value_created =
(compute_adjusted && cfg.up_to_1h_realized.is_some()).then(|| {
let up_to_1h = cfg.up_to_1h_realized.unwrap();
LazyBinaryFromHeightSum::from_computed::<DollarsMinus>(
LazyBinaryFromHeightSum::from_binary::<
DollarsMinus,
Dollars,
Dollars,
Dollars,
Dollars,
>(
&cfg.name("adjusted_value_created"),
cfg.version,
&value_created,
@@ -205,7 +325,13 @@ impl RealizedMetrics {
let adjusted_value_destroyed =
(compute_adjusted && cfg.up_to_1h_realized.is_some()).then(|| {
let up_to_1h = cfg.up_to_1h_realized.unwrap();
LazyBinaryFromHeightSum::from_computed::<DollarsMinus>(
LazyBinaryFromHeightSum::from_binary::<
DollarsMinus,
Dollars,
Dollars,
Dollars,
Dollars,
>(
&cfg.name("adjusted_value_destroyed"),
cfg.version,
&value_destroyed,
@@ -221,7 +347,6 @@ impl RealizedMetrics {
cfg.version + v1,
cfg.indexes,
extended,
cfg.price,
)?;
// MVRV is a lazy proxy for realized_price_extra.ratio
@@ -234,6 +359,7 @@ impl RealizedMetrics {
Ok(Self {
// === Realized Cap ===
realized_cap_cents,
realized_cap,
realized_price,
realized_price_extra,
@@ -254,6 +380,13 @@ impl RealizedMetrics {
cfg.indexes,
)?,
// === Investor Price ===
investor_price_cents,
investor_price,
investor_price_extra,
cap_raw,
investor_cap_raw,
// === MVRV ===
mvrv,
@@ -281,17 +414,31 @@ impl RealizedMetrics {
})
.transpose()?,
// === Value Created/Destroyed ===
// === Value Created/Destroyed Splits (stored) ===
profit_value_created,
profit_value_destroyed,
loss_value_created,
loss_value_destroyed,
// === Value Created/Destroyed Totals (lazy: profit + loss) ===
value_created,
value_destroyed,
// === Capitulation/Profit Flow (lazy aliases) ===
capitulation_flow,
profit_flow,
// === Adjusted Value (lazy: cohort - up_to_1h) ===
adjusted_value_created,
adjusted_value_destroyed,
// === SOPR ===
sopr: EagerVec::forced_import(cfg.db, &cfg.name("sopr"), cfg.version + v1)?,
sopr_7d_ema: EagerVec::forced_import(cfg.db, &cfg.name("sopr_7d_ema"), cfg.version + v1)?,
sopr_7d_ema: EagerVec::forced_import(
cfg.db,
&cfg.name("sopr_7d_ema"),
cfg.version + v1,
)?,
sopr_30d_ema: EagerVec::forced_import(
cfg.db,
&cfg.name("sopr_30d_ema"),
@@ -359,6 +506,15 @@ impl RealizedMetrics {
cfg.version + v3,
cfg.indexes,
)?,
// === ATH Regret ===
// v2: Changed to use max HIGH price during holding period instead of global ATH at send time
ath_regret: ComputedFromHeightSumCum::forced_import(
cfg.db,
&cfg.name("realized_ath_regret"),
cfg.version + v2,
cfg.indexes,
)?,
})
}
@@ -369,47 +525,73 @@ impl RealizedMetrics {
.len()
.min(self.realized_profit.height.len())
.min(self.realized_loss.height.len())
.min(self.value_created.height.len())
.min(self.value_destroyed.height.len())
.min(self.investor_price_cents.height.len())
.min(self.cap_raw.len())
.min(self.investor_cap_raw.len())
.min(self.profit_value_created.height.len())
.min(self.profit_value_destroyed.height.len())
.min(self.loss_value_created.height.len())
.min(self.loss_value_destroyed.height.len())
.min(self.ath_regret.height.len())
}
/// Push realized state values to height-indexed vectors.
/// State values are CentsUnsigned (deterministic), converted to Dollars for storage.
pub fn truncate_push(&mut self, height: Height, state: &RealizedState) -> Result<()> {
self.realized_cap.height.truncate_push(height, state.cap)?;
self.realized_cap_cents
.height
.truncate_push(height, state.cap())?;
self.realized_profit
.height
.truncate_push(height, state.profit)?;
.truncate_push(height, state.profit().to_dollars())?;
self.realized_loss
.height
.truncate_push(height, state.loss)?;
self.value_created
.truncate_push(height, state.loss().to_dollars())?;
self.investor_price_cents
.height
.truncate_push(height, state.value_created)?;
self.value_destroyed
.truncate_push(height, state.investor_price())?;
// Push raw values for aggregation
self.cap_raw.truncate_push(height, state.cap_raw())?;
self.investor_cap_raw
.truncate_push(height, state.investor_cap_raw())?;
// Push the 4 splits (totals are derived lazily)
self.profit_value_created
.height
.truncate_push(height, state.value_destroyed)?;
.truncate_push(height, state.profit_value_created().to_dollars())?;
self.profit_value_destroyed
.height
.truncate_push(height, state.profit_value_destroyed().to_dollars())?;
self.loss_value_created
.height
.truncate_push(height, state.loss_value_created().to_dollars())?;
self.loss_value_destroyed
.height
.truncate_push(height, state.loss_value_destroyed().to_dollars())?;
// ATH regret
self.ath_regret
.height
.truncate_push(height, state.ath_regret().to_dollars())?;
Ok(())
}
/// Write height-indexed vectors to disk.
pub fn write(&mut self) -> Result<()> {
self.realized_cap.height.write()?;
self.realized_profit.height.write()?;
self.realized_loss.height.write()?;
self.value_created.height.write()?;
self.value_destroyed.height.write()?;
Ok(())
}
/// Returns a parallel iterator over all vecs for parallel writing.
pub fn par_iter_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
[
&mut self.realized_cap.height as &mut dyn AnyStoredVec,
vec![
&mut self.realized_cap_cents.height as &mut dyn AnyStoredVec,
&mut self.realized_profit.height,
&mut self.realized_loss.height,
&mut self.value_created.height,
&mut self.value_destroyed.height,
&mut self.investor_price_cents.height,
// Raw values for aggregation
&mut self.cap_raw as &mut dyn AnyStoredVec,
&mut self.investor_cap_raw as &mut dyn AnyStoredVec,
// The 4 splits (totals are derived lazily)
&mut self.profit_value_created.height,
&mut self.profit_value_destroyed.height,
&mut self.loss_value_created.height,
&mut self.loss_value_destroyed.height,
// ATH regret
&mut self.ath_regret.height,
]
.into_par_iter()
}
@@ -427,11 +609,11 @@ impl RealizedMetrics {
others: &[&Self],
exit: &Exit,
) -> Result<()> {
self.realized_cap.height.compute_sum_of_others(
self.realized_cap_cents.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.realized_cap.height)
.map(|v| &v.realized_cap_cents.height)
.collect::<Vec<_>>(),
exit,
)?;
@@ -451,19 +633,103 @@ impl RealizedMetrics {
.collect::<Vec<_>>(),
exit,
)?;
self.value_created.height.compute_sum_of_others(
// Aggregate raw values for investor_price computation
// (BytesVec doesn't have compute_sum_of_others, so we manually iterate)
// Validate version for investor_price_cents (same pattern as compute_sum_of_others)
let investor_price_dep_version = others
.iter()
.map(|o| o.investor_price_cents.height.version())
.fold(vecdb::Version::ZERO, |acc, v| acc + v);
self.investor_price_cents
.height
.validate_computed_version_or_reset(investor_price_dep_version)?;
let mut iters: Vec<_> = others
.iter()
.filter_map(|o| Some((o.cap_raw.iter().ok()?, o.investor_cap_raw.iter().ok()?)))
.collect();
// Start from where the target vecs left off (handles fresh/reset vecs)
let start = self
.cap_raw
.len()
.min(self.investor_cap_raw.len())
.min(self.investor_price_cents.height.len());
// End at the minimum length across all source vecs
let end = others.iter().map(|o| o.cap_raw.len()).min().unwrap_or(0);
for i in start..end {
let height = Height::from(i);
let mut sum_cap = CentsSats::ZERO;
let mut sum_investor_cap = CentsSquaredSats::ZERO;
for (cap_iter, investor_cap_iter) in &mut iters {
sum_cap += cap_iter.get_unwrap(height);
sum_investor_cap += investor_cap_iter.get_unwrap(height);
}
self.cap_raw.truncate_push(height, sum_cap)?;
self.investor_cap_raw
.truncate_push(height, sum_investor_cap)?;
// Compute investor_price from aggregated raw values
let investor_price = if sum_cap.inner() == 0 {
CentsUnsigned::ZERO
} else {
CentsUnsigned::new((sum_investor_cap / sum_cap.inner()) as u64)
};
self.investor_price_cents
.height
.truncate_push(height, investor_price)?;
}
// Write to persist computed_version (same pattern as compute_sum_of_others)
{
let _lock = exit.lock();
self.investor_price_cents.height.write()?;
}
// Aggregate the 4 splits (totals are derived lazily)
self.profit_value_created.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.value_created.height)
.map(|v| &v.profit_value_created.height)
.collect::<Vec<_>>(),
exit,
)?;
self.value_destroyed.height.compute_sum_of_others(
self.profit_value_destroyed.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.value_destroyed.height)
.map(|v| &v.profit_value_destroyed.height)
.collect::<Vec<_>>(),
exit,
)?;
self.loss_value_created.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.loss_value_created.height)
.collect::<Vec<_>>(),
exit,
)?;
self.loss_value_destroyed.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.loss_value_destroyed.height)
.collect::<Vec<_>>(),
exit,
)?;
// ATH regret
self.ath_regret.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.ath_regret.height)
.collect::<Vec<_>>(),
exit,
)?;
@@ -478,9 +744,14 @@ impl RealizedMetrics {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.realized_cap.compute_rest(indexes, starting_indexes, exit)?;
self.realized_profit.compute_rest(indexes, starting_indexes, exit)?;
self.realized_loss.compute_rest(indexes, starting_indexes, exit)?;
self.realized_cap_cents
.compute_rest(indexes, starting_indexes, exit)?;
self.realized_profit
.compute_rest(indexes, starting_indexes, exit)?;
self.realized_loss
.compute_rest(indexes, starting_indexes, exit)?;
self.investor_price_cents
.compute_rest(indexes, starting_indexes, exit)?;
// net_realized_pnl = profit - loss
self.net_realized_pnl
@@ -508,8 +779,19 @@ impl RealizedMetrics {
Ok(())
})?;
self.value_created.compute_rest(indexes, starting_indexes, exit)?;
self.value_destroyed.compute_rest(indexes, starting_indexes, exit)?;
// Compute derived aggregations for the 4 splits
// (value_created, value_destroyed, capitulation_flow, profit_flow are derived lazily)
self.profit_value_created
.compute_rest(indexes, starting_indexes, exit)?;
self.profit_value_destroyed
.compute_rest(indexes, starting_indexes, exit)?;
self.loss_value_created
.compute_rest(indexes, starting_indexes, exit)?;
self.loss_value_destroyed
.compute_rest(indexes, starting_indexes, exit)?;
// ATH regret
self.ath_regret
.compute_rest(indexes, starting_indexes, exit)?;
Ok(())
}
@@ -545,6 +827,13 @@ impl RealizedMetrics {
exit,
Some(&self.realized_price.dateindex.0),
)?;
self.investor_price_extra.compute_rest(
price,
starting_indexes,
exit,
Some(&self.investor_price.dateindex.0),
)?;
}
// realized_cap_30d_delta
@@ -613,8 +902,12 @@ impl RealizedMetrics {
exit,
)?;
self.sell_side_risk_ratio_7d_ema
.compute_ema(starting_indexes.dateindex, &self.sell_side_risk_ratio, 7, exit)?;
self.sell_side_risk_ratio_7d_ema.compute_ema(
starting_indexes.dateindex,
&self.sell_side_risk_ratio,
7,
exit,
)?;
self.sell_side_risk_ratio_30d_ema.compute_ema(
starting_indexes.dateindex,

View File

@@ -8,7 +8,7 @@ use crate::internal::{
PercentageDollarsF32, PercentageSatsF64, Ratio32,
};
use super::{ImportConfig, SupplyMetrics, UnrealizedMetrics};
use super::{ImportConfig, RealizedMetrics, SupplyMetrics, UnrealizedMetrics};
/// Relative metrics comparing cohort values to global values.
/// All `rel_to_` vecs are lazy - computed on-demand from their sources.
@@ -58,6 +58,12 @@ pub struct RelativeMetrics {
Option<LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>>,
pub net_unrealized_pnl_rel_to_own_total_unrealized_pnl:
Option<LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>>,
// === Invested Capital in Profit/Loss as % of Realized Cap ===
pub invested_capital_in_profit_pct:
Option<LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>>,
pub invested_capital_in_loss_pct:
Option<LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>>,
}
impl RelativeMetrics {
@@ -65,11 +71,13 @@ impl RelativeMetrics {
///
/// All `rel_to_` metrics are lazy - computed on-demand from their sources.
/// `all_supply` provides global sources for `*_rel_to_market_cap` and `*_rel_to_circulating_supply`.
/// `realized` provides realized_cap for invested capital percentage metrics.
pub fn forced_import(
cfg: &ImportConfig,
unrealized: &UnrealizedMetrics,
supply: &SupplyMetrics,
all_supply: Option<&SupplyMetrics>,
realized: Option<&RealizedMetrics>,
) -> Result<Self> {
let v1 = Version::ONE;
let v2 = Version::new(2);
@@ -350,6 +358,30 @@ impl RelativeMetrics {
&unrealized.total_unrealized_pnl,
)
}),
// === Invested Capital in Profit/Loss as % of Realized Cap ===
invested_capital_in_profit_pct: realized.map(|r| {
LazyBinaryFromHeightLast::from_computed_height_date_and_lazy_block_last::<
PercentageDollarsF32,
_,
>(
&cfg.name("invested_capital_in_profit_pct"),
cfg.version,
&unrealized.invested_capital_in_profit,
&r.realized_cap,
)
}),
invested_capital_in_loss_pct: realized.map(|r| {
LazyBinaryFromHeightLast::from_computed_height_date_and_lazy_block_last::<
PercentageDollarsF32,
_,
>(
&cfg.name("invested_capital_in_loss_pct"),
cfg.version,
&unrealized.invested_capital_in_loss,
&r.realized_cap,
)
}),
})
}
}

View File

@@ -58,12 +58,6 @@ impl SupplyMetrics {
Ok(())
}
/// Write height-indexed vectors to disk.
pub fn write(&mut self) -> Result<()> {
self.total.sats.height.write()?;
Ok(())
}
/// Returns a parallel iterator over all vecs for parallel writing.
pub fn par_iter_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
vec![&mut self.total.sats.height as &mut dyn AnyStoredVec].into_par_iter()

View File

@@ -1,16 +1,21 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Dollars, Height};
use brk_types::{CentsSats, CentsSquaredSats, CentsUnsigned, DateIndex, Dollars, Height, Sats};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, AnyVec, Exit, GenericStoredVec, Negate};
use vecdb::{
AnyStoredVec, AnyVec, BytesVec, Exit, GenericStoredVec, ImportableVec, Negate,
TypedVecIterator, Version,
};
use crate::{
ComputeIndexes,
distribution::state::UnrealizedState,
indexes,
internal::{
ComputedFromHeightAndDateLast, DollarsMinus, DollarsPlus, LazyBinaryFromHeightLast, LazyFromHeightLast,
ValueFromHeightAndDateLast,
ComputedFromHeightAndDateLast, ComputedFromHeightLast, DollarsMinus, DollarsPlus,
LazyBinaryFromHeightLast, LazyFromHeightLast, ValueFromHeightAndDateLast,
},
price,
};
use super::ImportConfig;
@@ -26,12 +31,39 @@ pub struct UnrealizedMetrics {
pub unrealized_profit: ComputedFromHeightAndDateLast<Dollars>,
pub unrealized_loss: ComputedFromHeightAndDateLast<Dollars>,
// === Invested Capital in Profit/Loss ===
pub invested_capital_in_profit: ComputedFromHeightAndDateLast<Dollars>,
pub invested_capital_in_loss: ComputedFromHeightAndDateLast<Dollars>,
// === Raw values for precise aggregation (used to compute pain/greed indices) ===
/// Σ(price × sats) for UTXOs in profit (raw u128, no indexes)
pub invested_capital_in_profit_raw: BytesVec<Height, CentsSats>,
/// Σ(price × sats) for UTXOs in loss (raw u128, no indexes)
pub invested_capital_in_loss_raw: BytesVec<Height, CentsSats>,
/// Σ(price² × sats) for UTXOs in profit (raw u128, no indexes)
pub investor_cap_in_profit_raw: BytesVec<Height, CentsSquaredSats>,
/// Σ(price² × sats) for UTXOs in loss (raw u128, no indexes)
pub investor_cap_in_loss_raw: BytesVec<Height, CentsSquaredSats>,
// === Pain/Greed Indices (computed in compute_rest from raw values + spot price) ===
/// investor_price_of_losers - spot (average distance underwater, weighted by $)
pub pain_index: ComputedFromHeightLast<Dollars>,
/// spot - investor_price_of_winners (average distance in profit, weighted by $)
pub greed_index: ComputedFromHeightLast<Dollars>,
/// greed_index - pain_index (positive = greedy market, negative = painful market)
pub net_sentiment: ComputedFromHeightLast<Dollars>,
// === Negated ===
pub neg_unrealized_loss: LazyFromHeightLast<Dollars>,
// === Net and Total ===
pub net_unrealized_pnl: LazyBinaryFromHeightLast<Dollars>,
pub total_unrealized_pnl: LazyBinaryFromHeightLast<Dollars>,
// === ATH Regret ===
/// Unrealized ATH regret: (ATH - spot) × supply_in_profit + ATH × supply_in_loss - invested_capital_in_loss
/// "How much more I'd have if I sold at ATH instead of now" (refined formula accounting for cost basis)
pub ath_regret: ComputedFromHeightLast<Dollars>,
}
impl UnrealizedMetrics {
@@ -71,6 +103,56 @@ impl UnrealizedMetrics {
cfg.indexes,
)?;
// === Invested Capital in Profit/Loss ===
let invested_capital_in_profit = ComputedFromHeightAndDateLast::forced_import(
cfg.db,
&cfg.name("invested_capital_in_profit"),
cfg.version,
cfg.indexes,
)?;
let invested_capital_in_loss = ComputedFromHeightAndDateLast::forced_import(
cfg.db,
&cfg.name("invested_capital_in_loss"),
cfg.version,
cfg.indexes,
)?;
// === Raw values for precise aggregation ===
let invested_capital_in_profit_raw = BytesVec::forced_import(
cfg.db,
&cfg.name("invested_capital_in_profit_raw"),
cfg.version,
)?;
let invested_capital_in_loss_raw = BytesVec::forced_import(
cfg.db,
&cfg.name("invested_capital_in_loss_raw"),
cfg.version,
)?;
let investor_cap_in_profit_raw =
BytesVec::forced_import(cfg.db, &cfg.name("investor_cap_in_profit_raw"), cfg.version)?;
let investor_cap_in_loss_raw =
BytesVec::forced_import(cfg.db, &cfg.name("investor_cap_in_loss_raw"), cfg.version)?;
// === Pain/Greed Indices ===
let pain_index = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("pain_index"),
cfg.version,
cfg.indexes,
)?;
let greed_index = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("greed_index"),
cfg.version,
cfg.indexes,
)?;
let net_sentiment = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("net_sentiment"),
cfg.version,
cfg.indexes,
)?;
// === Negated ===
let neg_unrealized_loss = LazyFromHeightLast::from_computed_height_date::<Negate>(
&cfg.name("neg_unrealized_loss"),
@@ -79,27 +161,50 @@ impl UnrealizedMetrics {
);
// === Net and Total ===
let net_unrealized_pnl = LazyBinaryFromHeightLast::from_computed_height_date_last::<DollarsMinus>(
&cfg.name("net_unrealized_pnl"),
cfg.version,
&unrealized_profit,
&unrealized_loss,
);
let total_unrealized_pnl = LazyBinaryFromHeightLast::from_computed_height_date_last::<DollarsPlus>(
&cfg.name("total_unrealized_pnl"),
cfg.version,
&unrealized_profit,
&unrealized_loss,
);
let net_unrealized_pnl =
LazyBinaryFromHeightLast::from_computed_height_date_last::<DollarsMinus>(
&cfg.name("net_unrealized_pnl"),
cfg.version,
&unrealized_profit,
&unrealized_loss,
);
let total_unrealized_pnl =
LazyBinaryFromHeightLast::from_computed_height_date_last::<DollarsPlus>(
&cfg.name("total_unrealized_pnl"),
cfg.version,
&unrealized_profit,
&unrealized_loss,
);
// === ATH Regret ===
// v2: Changed to use HIGH prices consistently for ATH instead of mixing HIGH/CLOSE
// v3: Changed to ComputedFromHeightLast to derive dateindex from height (avoids precision loss)
let v3 = Version::new(3);
let ath_regret = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("unrealized_ath_regret"),
cfg.version + v3,
cfg.indexes,
)?;
Ok(Self {
supply_in_profit,
supply_in_loss,
unrealized_profit,
unrealized_loss,
invested_capital_in_profit,
invested_capital_in_loss,
invested_capital_in_profit_raw,
invested_capital_in_loss_raw,
investor_cap_in_profit_raw,
investor_cap_in_loss_raw,
pain_index,
greed_index,
net_sentiment,
neg_unrealized_loss,
net_unrealized_pnl,
total_unrealized_pnl,
ath_regret,
})
}
@@ -111,6 +216,12 @@ impl UnrealizedMetrics {
.min(self.supply_in_loss.height.len())
.min(self.unrealized_profit.height.len())
.min(self.unrealized_loss.height.len())
.min(self.invested_capital_in_profit.height.len())
.min(self.invested_capital_in_loss.height.len())
.min(self.invested_capital_in_profit_raw.len())
.min(self.invested_capital_in_loss_raw.len())
.min(self.investor_cap_in_profit_raw.len())
.min(self.investor_cap_in_loss_raw.len())
}
/// Get minimum length across dateindex-indexed vectors written in block loop.
@@ -122,6 +233,8 @@ impl UnrealizedMetrics {
.min(self.supply_in_loss.indexes.sats_dateindex.len())
.min(self.unrealized_profit.dateindex.len())
.min(self.unrealized_loss.dateindex.len())
.min(self.invested_capital_in_profit.dateindex.len())
.min(self.invested_capital_in_loss.dateindex.len())
}
/// Push unrealized state values to height-indexed vectors.
@@ -140,10 +253,34 @@ impl UnrealizedMetrics {
.truncate_push(height, height_state.supply_in_loss)?;
self.unrealized_profit
.height
.truncate_push(height, height_state.unrealized_profit)?;
.truncate_push(height, height_state.unrealized_profit.to_dollars())?;
self.unrealized_loss
.height
.truncate_push(height, height_state.unrealized_loss)?;
.truncate_push(height, height_state.unrealized_loss.to_dollars())?;
self.invested_capital_in_profit
.height
.truncate_push(height, height_state.invested_capital_in_profit.to_dollars())?;
self.invested_capital_in_loss
.height
.truncate_push(height, height_state.invested_capital_in_loss.to_dollars())?;
// Raw values for aggregation
self.invested_capital_in_profit_raw.truncate_push(
height,
CentsSats::new(height_state.invested_capital_in_profit_raw),
)?;
self.invested_capital_in_loss_raw.truncate_push(
height,
CentsSats::new(height_state.invested_capital_in_loss_raw),
)?;
self.investor_cap_in_profit_raw.truncate_push(
height,
CentsSquaredSats::new(height_state.investor_cap_in_profit_raw),
)?;
self.investor_cap_in_loss_raw.truncate_push(
height,
CentsSquaredSats::new(height_state.investor_cap_in_loss_raw),
)?;
if let (Some(dateindex), Some(date_state)) = (dateindex, date_state) {
self.supply_in_profit
@@ -156,28 +293,22 @@ impl UnrealizedMetrics {
.truncate_push(dateindex, date_state.supply_in_loss)?;
self.unrealized_profit
.dateindex
.truncate_push(dateindex, date_state.unrealized_profit)?;
.truncate_push(dateindex, date_state.unrealized_profit.to_dollars())?;
self.unrealized_loss
.dateindex
.truncate_push(dateindex, date_state.unrealized_loss)?;
.truncate_push(dateindex, date_state.unrealized_loss.to_dollars())?;
self.invested_capital_in_profit.dateindex.truncate_push(
dateindex,
date_state.invested_capital_in_profit.to_dollars(),
)?;
self.invested_capital_in_loss
.dateindex
.truncate_push(dateindex, date_state.invested_capital_in_loss.to_dollars())?;
}
Ok(())
}
/// Write height-indexed vectors to disk.
pub fn write(&mut self) -> Result<()> {
self.supply_in_profit.height.write()?;
self.supply_in_loss.height.write()?;
self.unrealized_profit.height.write()?;
self.unrealized_loss.height.write()?;
self.supply_in_profit.indexes.sats_dateindex.write()?;
self.supply_in_loss.indexes.sats_dateindex.write()?;
self.unrealized_profit.dateindex.write()?;
self.unrealized_loss.dateindex.write()?;
Ok(())
}
/// Returns a parallel iterator over all vecs for parallel writing.
pub fn par_iter_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
vec![
@@ -185,10 +316,18 @@ impl UnrealizedMetrics {
&mut self.supply_in_loss.height as &mut dyn AnyStoredVec,
&mut self.unrealized_profit.height as &mut dyn AnyStoredVec,
&mut self.unrealized_loss.height as &mut dyn AnyStoredVec,
&mut self.invested_capital_in_profit.height as &mut dyn AnyStoredVec,
&mut self.invested_capital_in_loss.height as &mut dyn AnyStoredVec,
&mut self.invested_capital_in_profit_raw as &mut dyn AnyStoredVec,
&mut self.invested_capital_in_loss_raw as &mut dyn AnyStoredVec,
&mut self.investor_cap_in_profit_raw as &mut dyn AnyStoredVec,
&mut self.investor_cap_in_loss_raw as &mut dyn AnyStoredVec,
&mut self.supply_in_profit.indexes.sats_dateindex as &mut dyn AnyStoredVec,
&mut self.supply_in_loss.indexes.sats_dateindex as &mut dyn AnyStoredVec,
&mut self.unrealized_profit.rest.dateindex as &mut dyn AnyStoredVec,
&mut self.unrealized_loss.rest.dateindex as &mut dyn AnyStoredVec,
&mut self.invested_capital_in_profit.rest.dateindex as &mut dyn AnyStoredVec,
&mut self.invested_capital_in_loss.rest.dateindex as &mut dyn AnyStoredVec,
]
.into_par_iter()
}
@@ -232,6 +371,78 @@ impl UnrealizedMetrics {
.collect::<Vec<_>>(),
exit,
)?;
self.invested_capital_in_profit
.height
.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.invested_capital_in_profit.height)
.collect::<Vec<_>>(),
exit,
)?;
self.invested_capital_in_loss.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.invested_capital_in_loss.height)
.collect::<Vec<_>>(),
exit,
)?;
// Raw values for aggregation - manually sum since BytesVec doesn't have compute_sum_of_others
// Create iterators for each source vec
let mut iters: Vec<_> = others
.iter()
.filter_map(|o| {
Some((
o.invested_capital_in_profit_raw.iter().ok()?,
o.invested_capital_in_loss_raw.iter().ok()?,
o.investor_cap_in_profit_raw.iter().ok()?,
o.investor_cap_in_loss_raw.iter().ok()?,
))
})
.collect();
// Start from where the target vecs left off (handles fresh/reset vecs)
let start = self
.invested_capital_in_profit_raw
.len()
.min(self.invested_capital_in_loss_raw.len())
.min(self.investor_cap_in_profit_raw.len())
.min(self.investor_cap_in_loss_raw.len());
// End at the minimum length across all source vecs
let end = others
.iter()
.map(|o| o.invested_capital_in_profit_raw.len())
.min()
.unwrap_or(0);
for i in start..end {
let height = Height::from(i);
let mut sum_invested_profit = CentsSats::ZERO;
let mut sum_invested_loss = CentsSats::ZERO;
let mut sum_investor_profit = CentsSquaredSats::ZERO;
let mut sum_investor_loss = CentsSquaredSats::ZERO;
for (ip_iter, il_iter, cap_p_iter, cap_l_iter) in &mut iters {
sum_invested_profit += ip_iter.get_unwrap(height);
sum_invested_loss += il_iter.get_unwrap(height);
sum_investor_profit += cap_p_iter.get_unwrap(height);
sum_investor_loss += cap_l_iter.get_unwrap(height);
}
self.invested_capital_in_profit_raw
.truncate_push(height, sum_invested_profit)?;
self.invested_capital_in_loss_raw
.truncate_push(height, sum_invested_loss)?;
self.investor_cap_in_profit_raw
.truncate_push(height, sum_investor_profit)?;
self.investor_cap_in_loss_raw
.truncate_push(height, sum_investor_loss)?;
}
self.supply_in_profit
.indexes
.sats_dateindex
@@ -270,13 +481,34 @@ impl UnrealizedMetrics {
.collect::<Vec<_>>(),
exit,
)?;
self.invested_capital_in_profit
.dateindex
.compute_sum_of_others(
starting_indexes.dateindex,
&others
.iter()
.map(|v| &v.invested_capital_in_profit.dateindex)
.collect::<Vec<_>>(),
exit,
)?;
self.invested_capital_in_loss
.dateindex
.compute_sum_of_others(
starting_indexes.dateindex,
&others
.iter()
.map(|v| &v.invested_capital_in_loss.dateindex)
.collect::<Vec<_>>(),
exit,
)?;
Ok(())
}
/// First phase of computed metrics.
pub fn compute_rest_part1(
/// Compute derived metrics from stored values + price.
pub fn compute_rest(
&mut self,
price: Option<&crate::price::Vecs>,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
@@ -286,6 +518,122 @@ impl UnrealizedMetrics {
self.supply_in_loss
.compute_dollars_from_price(price, starting_indexes, exit)?;
// Compute pain/greed/net from raw values + spot price
let Some(price) = price else {
return Ok(());
};
// Pain index: investor_price_of_losers - spot
self.pain_index
.compute_all(indexes, starting_indexes, exit, |vec| {
Ok(vec.compute_transform3(
starting_indexes.height,
&self.investor_cap_in_loss_raw,
&self.invested_capital_in_loss_raw,
&price.cents.split.height.close,
|(h, investor_cap, invested_cap, spot, ..)| {
if invested_cap.inner() == 0 {
return (h, Dollars::ZERO);
}
let investor_price_losers = investor_cap.inner() / invested_cap.inner();
let spot_u128 = (*spot).as_u128();
(
h,
CentsUnsigned::new((investor_price_losers - spot_u128) as u64)
.to_dollars(),
)
},
exit,
)?)
})?;
// Greed index: spot - investor_price_of_winners
self.greed_index
.compute_all(indexes, starting_indexes, exit, |vec| {
Ok(vec.compute_transform3(
starting_indexes.height,
&self.investor_cap_in_profit_raw,
&self.invested_capital_in_profit_raw,
&price.cents.split.height.close,
|(h, investor_cap, invested_cap, spot, ..)| {
if invested_cap.inner() == 0 {
return (h, Dollars::ZERO);
}
let investor_price_winners = investor_cap.inner() / invested_cap.inner();
let spot_u128 = (*spot).as_u128();
(
h,
CentsUnsigned::new((spot_u128 - investor_price_winners) as u64)
.to_dollars(),
)
},
exit,
)?)
})?;
// Net sentiment: greed - pain
self.net_sentiment
.compute_all(indexes, starting_indexes, exit, |vec| {
Ok(vec.compute_subtract(
starting_indexes.height,
&self.greed_index.height,
&self.pain_index.height,
exit,
)?)
})?;
// ATH regret: (ATH - spot) × supply_in_profit + ATH × supply_in_loss - invested_capital_in_loss
// This is the refined formula that accounts for cost basis:
// - For UTXOs in profit: regret = ATH - spot (they could have sold at ATH instead of now)
// - For UTXOs in loss: regret = ATH - cost_basis (they could have sold at ATH instead of holding)
// ath = running max of high prices
// Height computation
{
// Pre-compute ATH as running max of high prices
let height_ath: Vec<CentsUnsigned> = {
let mut ath = CentsUnsigned::ZERO;
price
.cents
.split
.height
.high
.into_iter()
.map(|high| {
if *high > ath {
ath = *high;
}
ath
})
.collect()
};
self.ath_regret.height.compute_transform4(
starting_indexes.height,
&price.cents.split.height.close,
&self.supply_in_profit.height,
&self.supply_in_loss.height,
&self.invested_capital_in_loss_raw,
|(h, spot, supply_profit, supply_loss, invested_loss_raw, ..)| {
let ath = height_ath[usize::from(h)];
// (ATH - spot) × supply_in_profit + ATH × supply_in_loss - invested_capital_in_loss
let ath_u128 = ath.as_u128();
let spot_u128 = spot.as_u128();
let profit_regret = (ath_u128 - spot_u128) * supply_profit.as_u128();
// invested_loss_raw is CentsSats (already in cents*sats scale)
let loss_regret = ath_u128 * supply_loss.as_u128() - invested_loss_raw.inner();
let regret_raw = profit_regret + loss_regret;
let regret_cents = CentsUnsigned::new((regret_raw / Sats::ONE_BTC_U128) as u64);
(h, regret_cents.to_dollars())
},
exit,
)?;
}
// DateIndex computation: derive from height values using last-value aggregation
self.ath_regret
.compute_rest(indexes, starting_indexes, exit)?;
Ok(())
}
}

View File

@@ -1,6 +1,6 @@
use std::ops::{Add, AddAssign, SubAssign};
use brk_types::{Dollars, SupplyState, Timestamp};
use brk_types::{CentsUnsigned, SupplyState, Timestamp};
use serde::Serialize;
#[derive(Debug, Clone, Serialize)]
@@ -8,7 +8,7 @@ pub struct BlockState {
#[serde(flatten)]
pub supply: SupplyState,
#[serde(skip)]
pub price: Option<Dollars>,
pub price: Option<CentsUnsigned>,
#[serde(skip)]
pub timestamp: Timestamp,
}

View File

@@ -1,7 +1,7 @@
use std::path::Path;
use brk_error::Result;
use brk_types::{Age, Dollars, Height, LoadedAddressData, Sats, SupplyState};
use brk_types::{Age, CentsUnsigned, Height, LoadedAddressData, Sats, SupplyState};
use vecdb::unlikely;
use super::{super::cost_basis::RealizedState, base::CohortState};
@@ -28,12 +28,12 @@ impl AddressCohortState {
self.inner.satblocks_destroyed = Sats::ZERO;
self.inner.satdays_destroyed = Sats::ZERO;
if let Some(realized) = self.inner.realized.as_mut() {
*realized = RealizedState::NAN;
*realized = RealizedState::default();
}
}
pub fn reset_price_to_amount_if_needed(&mut self) -> Result<()> {
self.inner.reset_price_to_amount_if_needed()
pub fn reset_cost_basis_data_if_needed(&mut self) -> Result<()> {
self.inner.reset_cost_basis_data_if_needed()
}
pub fn reset_single_iteration_values(&mut self) {
@@ -44,35 +44,23 @@ impl AddressCohortState {
&mut self,
addressdata: &mut LoadedAddressData,
value: Sats,
current_price: Option<Dollars>,
prev_price: Option<Dollars>,
current_price: CentsUnsigned,
prev_price: CentsUnsigned,
ath: CentsUnsigned,
age: Age,
) -> Result<()> {
let compute_price = current_price.is_some();
let prev = addressdata.cost_basis_snapshot();
addressdata.send(value, Some(prev_price))?;
let current = addressdata.cost_basis_snapshot();
let prev_realized_price = compute_price.then(|| addressdata.realized_price());
let prev_supply_state = SupplyState {
utxo_count: addressdata.utxo_count() as u64,
value: addressdata.balance(),
};
addressdata.send(value, prev_price)?;
let supply_state = SupplyState {
utxo_count: addressdata.utxo_count() as u64,
value: addressdata.balance(),
};
self.inner.send_(
&SupplyState {
utxo_count: 1,
value,
},
self.inner.send_address(
&SupplyState { utxo_count: 1, value },
current_price,
prev_price,
ath,
age,
compute_price.then(|| (addressdata.realized_price(), &supply_state)),
prev_realized_price.map(|prev_price| (prev_price, &prev_supply_state)),
&current,
&prev,
);
Ok(())
@@ -82,7 +70,7 @@ impl AddressCohortState {
&mut self,
address_data: &mut LoadedAddressData,
value: Sats,
price: Option<Dollars>,
price: CentsUnsigned,
) {
self.receive_outputs(address_data, value, price, 1);
}
@@ -91,50 +79,31 @@ impl AddressCohortState {
&mut self,
address_data: &mut LoadedAddressData,
value: Sats,
price: Option<Dollars>,
price: CentsUnsigned,
output_count: u32,
) {
let compute_price = price.is_some();
let prev = address_data.cost_basis_snapshot();
address_data.receive_outputs(value, Some(price), output_count);
let current = address_data.cost_basis_snapshot();
let prev_realized_price = compute_price.then(|| address_data.realized_price());
let prev_supply_state = SupplyState {
utxo_count: address_data.utxo_count() as u64,
value: address_data.balance(),
};
address_data.receive_outputs(value, price, output_count);
let supply_state = SupplyState {
utxo_count: address_data.utxo_count() as u64,
value: address_data.balance(),
};
self.inner.receive_(
&SupplyState {
utxo_count: output_count as u64,
value,
},
self.inner.receive_address(
&SupplyState { utxo_count: output_count as u64, value },
price,
compute_price.then(|| (address_data.realized_price(), &supply_state)),
prev_realized_price.map(|prev_price| (prev_price, &prev_supply_state)),
&current,
&prev,
);
}
pub fn add(&mut self, addressdata: &LoadedAddressData) {
self.addr_count += 1;
self.inner.increment_(
&addressdata.into(),
addressdata.realized_cap,
addressdata.realized_price(),
);
self.inner.increment_snapshot(&addressdata.cost_basis_snapshot());
}
pub fn subtract(&mut self, addressdata: &LoadedAddressData) {
let addr_supply: SupplyState = addressdata.into();
let realized_price = addressdata.realized_price();
let snapshot = addressdata.cost_basis_snapshot();
// Check for potential underflow before it happens
if unlikely(self.inner.supply.utxo_count < addr_supply.utxo_count) {
if unlikely(self.inner.supply.utxo_count < snapshot.supply_state.utxo_count) {
panic!(
"AddressCohortState::subtract underflow!\n\
Cohort state: addr_count={}, supply={}\n\
@@ -142,10 +111,10 @@ impl AddressCohortState {
Address supply: {}\n\
Realized price: {}\n\
This means the address is not properly tracked in this cohort.",
self.addr_count, self.inner.supply, addressdata, addr_supply, realized_price
self.addr_count, self.inner.supply, addressdata, snapshot.supply_state, snapshot.realized_price
);
}
if unlikely(self.inner.supply.value < addr_supply.value) {
if unlikely(self.inner.supply.value < snapshot.supply_state.value) {
panic!(
"AddressCohortState::subtract value underflow!\n\
Cohort state: addr_count={}, supply={}\n\
@@ -153,7 +122,7 @@ impl AddressCohortState {
Address supply: {}\n\
Realized price: {}\n\
This means the address is not properly tracked in this cohort.",
self.addr_count, self.inner.supply, addressdata, addr_supply, realized_price
self.addr_count, self.inner.supply, addressdata, snapshot.supply_state, snapshot.realized_price
);
}
@@ -162,12 +131,11 @@ impl AddressCohortState {
"AddressCohortState::subtract addr_count underflow! addr_count=0\n\
Address being subtracted: {}\n\
Realized price: {}",
addressdata, realized_price
addressdata, snapshot.realized_price
)
});
self.inner
.decrement_(&addr_supply, addressdata.realized_cap, realized_price);
self.inner.decrement_snapshot(&snapshot);
}
pub fn write(&mut self, height: Height, cleanup: bool) -> Result<()> {

View File

@@ -1,93 +1,78 @@
use std::path::Path;
use brk_error::Result;
use brk_types::{Age, Dollars, Height, Sats, SupplyState};
use crate::internal::PERCENTILES_LEN;
use brk_types::{Age, CentsSats, CentsUnsigned, CostBasisSnapshot, Height, Sats, SupplyState};
use super::super::cost_basis::{
CachedUnrealizedState, PriceToAmount, RealizedState, UnrealizedState,
CachedUnrealizedState, Percentiles, CostBasisData, RealizedState, UnrealizedState,
};
/// State tracked for each cohort during computation.
#[derive(Clone)]
pub struct CohortState {
/// Current supply in this cohort
pub supply: SupplyState,
/// Realized cap and profit/loss (requires price data)
pub realized: Option<RealizedState>,
/// Amount sent in current block
pub sent: Sats,
/// Satoshi-blocks destroyed (supply * blocks_old when spent)
pub satblocks_destroyed: Sats,
/// Satoshi-days destroyed (supply * days_old when spent)
pub satdays_destroyed: Sats,
/// Price distribution for percentile calculations (requires price data)
price_to_amount: Option<PriceToAmount>,
/// Cached unrealized state for O(k) incremental updates.
cost_basis_data: Option<CostBasisData>,
cached_unrealized: Option<CachedUnrealizedState>,
}
impl CohortState {
/// Create new cohort state.
pub fn new(path: &Path, name: &str, compute_dollars: bool) -> Self {
Self {
supply: SupplyState::default(),
realized: compute_dollars.then_some(RealizedState::NAN),
realized: compute_dollars.then_some(RealizedState::default()),
sent: Sats::ZERO,
satblocks_destroyed: Sats::ZERO,
satdays_destroyed: Sats::ZERO,
price_to_amount: compute_dollars.then_some(PriceToAmount::create(path, name)),
cost_basis_data: compute_dollars.then_some(CostBasisData::create(path, name)),
cached_unrealized: None,
}
}
/// Import state from checkpoint.
pub fn import_at_or_before(&mut self, height: Height) -> Result<Height> {
// Invalidate cache when importing new data
self.cached_unrealized = None;
match self.price_to_amount.as_mut() {
match self.cost_basis_data.as_mut() {
Some(p) => p.import_at_or_before(height),
None => Ok(height),
}
}
/// Reset price_to_amount if needed (for starting fresh).
pub fn reset_price_to_amount_if_needed(&mut self) -> Result<()> {
if let Some(p) = self.price_to_amount.as_mut() {
/// Restore realized cap from cost_basis_data after import.
/// Uses the exact persisted values instead of recomputing from the map.
pub fn restore_realized_cap(&mut self) {
if let Some(cost_basis_data) = self.cost_basis_data.as_ref()
&& let Some(realized) = self.realized.as_mut()
{
realized.set_cap_raw(cost_basis_data.cap_raw());
realized.set_investor_cap_raw(cost_basis_data.investor_cap_raw());
}
}
pub fn reset_cost_basis_data_if_needed(&mut self) -> Result<()> {
if let Some(p) = self.cost_basis_data.as_mut() {
p.clean()?;
p.init();
}
// Invalidate cache when data is reset
self.cached_unrealized = None;
Ok(())
}
/// Apply pending price_to_amount updates. Must be called before reads.
pub fn apply_pending(&mut self) {
if let Some(p) = self.price_to_amount.as_mut() {
if let Some(p) = self.cost_basis_data.as_mut() {
p.apply_pending();
}
}
/// Get first (lowest) price entry in distribution.
pub fn price_to_amount_first_key_value(&self) -> Option<(Dollars, &Sats)> {
self.price_to_amount.as_ref()?.first_key_value()
pub fn cost_basis_data_first_key_value(&self) -> Option<(CentsUnsigned, &Sats)> {
self.cost_basis_data.as_ref()?.first_key_value().map(|(k, v)| (k.into(), v))
}
/// Get last (highest) price entry in distribution.
pub fn price_to_amount_last_key_value(&self) -> Option<(Dollars, &Sats)> {
self.price_to_amount.as_ref()?.last_key_value()
pub fn cost_basis_data_last_key_value(&self) -> Option<(CentsUnsigned, &Sats)> {
self.cost_basis_data.as_ref()?.last_key_value().map(|(k, v)| (k.into(), v))
}
/// Reset per-block values before processing next block.
pub fn reset_single_iteration_values(&mut self) {
self.sent = Sats::ZERO;
self.satdays_destroyed = Sats::ZERO;
@@ -97,177 +82,137 @@ impl CohortState {
}
}
/// Add supply to this cohort (e.g., when UTXO ages into cohort).
pub fn increment(&mut self, supply: &SupplyState, price: Option<Dollars>) {
pub fn increment(&mut self, supply: &SupplyState, price: Option<CentsUnsigned>) {
match price {
Some(p) => self.increment_snapshot(&CostBasisSnapshot::from_utxo(p, supply)),
None => self.supply += supply,
}
}
pub fn increment_snapshot(&mut self, s: &CostBasisSnapshot) {
self.supply += &s.supply_state;
if s.supply_state.value > Sats::ZERO
&& let Some(realized) = self.realized.as_mut()
{
realized.increment_snapshot(s.price_sats, s.investor_cap);
self.cost_basis_data.as_mut().unwrap().increment(
s.realized_price,
s.supply_state.value,
s.price_sats,
s.investor_cap,
);
if let Some(cache) = self.cached_unrealized.as_mut() {
cache.on_receive(s.realized_price, s.supply_state.value);
}
}
}
pub fn decrement(&mut self, supply: &SupplyState, price: Option<CentsUnsigned>) {
match price {
Some(p) => self.decrement_snapshot(&CostBasisSnapshot::from_utxo(p, supply)),
None => self.supply -= supply,
}
}
pub fn decrement_snapshot(&mut self, s: &CostBasisSnapshot) {
self.supply -= &s.supply_state;
if s.supply_state.value > Sats::ZERO
&& let Some(realized) = self.realized.as_mut()
{
realized.decrement_snapshot(s.price_sats, s.investor_cap);
self.cost_basis_data.as_mut().unwrap().decrement(
s.realized_price,
s.supply_state.value,
s.price_sats,
s.investor_cap,
);
if let Some(cache) = self.cached_unrealized.as_mut() {
cache.on_send(s.realized_price, s.supply_state.value);
}
}
}
pub fn receive_utxo(&mut self, supply: &SupplyState, price: Option<CentsUnsigned>) {
self.supply += supply;
if supply.value > Sats::ZERO
&& let Some(realized) = self.realized.as_mut()
{
let price = price.unwrap();
realized.increment(supply, price);
self.price_to_amount
.as_mut()
.unwrap()
.increment(price, supply);
let sats = supply.value;
// Compute once using typed values
let price_sats = CentsSats::from_price_sats(price, sats);
let investor_cap = price_sats.to_investor_cap(price);
realized.receive(price, sats);
self.cost_basis_data.as_mut().unwrap().increment(
price,
sats,
price_sats,
investor_cap,
);
// Update cache for added supply
if let Some(cache) = self.cached_unrealized.as_mut() {
cache.on_receive(price, supply.value);
cache.on_receive(price, sats);
}
}
}
/// Add supply with pre-computed realized cap (for address cohorts).
pub fn increment_(
pub fn receive_address(
&mut self,
supply: &SupplyState,
realized_cap: Dollars,
realized_price: Dollars,
price: CentsUnsigned,
current: &CostBasisSnapshot,
prev: &CostBasisSnapshot,
) {
self.supply += supply;
if supply.value > Sats::ZERO
&& let Some(realized) = self.realized.as_mut()
{
realized.increment_(realized_cap);
self.price_to_amount
.as_mut()
.unwrap()
.increment(realized_price, supply);
realized.receive(price, supply.value);
// Update cache for added supply
if let Some(cache) = self.cached_unrealized.as_mut() {
cache.on_receive(realized_price, supply.value);
}
}
}
if current.supply_state.value.is_not_zero() {
self.cost_basis_data.as_mut().unwrap().increment(
current.realized_price,
current.supply_state.value,
current.price_sats,
current.investor_cap,
);
/// Remove supply from this cohort (e.g., when UTXO ages out of cohort).
pub fn decrement(&mut self, supply: &SupplyState, price: Option<Dollars>) {
self.supply -= supply;
if supply.value > Sats::ZERO
&& let Some(realized) = self.realized.as_mut()
{
let price = price.unwrap();
realized.decrement(supply, price);
self.price_to_amount
.as_mut()
.unwrap()
.decrement(price, supply);
// Update cache for removed supply
if let Some(cache) = self.cached_unrealized.as_mut() {
cache.on_send(price, supply.value);
}
}
}
/// Remove supply with pre-computed realized cap (for address cohorts).
pub fn decrement_(
&mut self,
supply: &SupplyState,
realized_cap: Dollars,
realized_price: Dollars,
) {
self.supply -= supply;
if supply.value > Sats::ZERO
&& let Some(realized) = self.realized.as_mut()
{
realized.decrement_(realized_cap);
self.price_to_amount
.as_mut()
.unwrap()
.decrement(realized_price, supply);
// Update cache for removed supply
if let Some(cache) = self.cached_unrealized.as_mut() {
cache.on_send(realized_price, supply.value);
}
}
}
/// Process received output (new UTXO in cohort).
pub fn receive(&mut self, supply: &SupplyState, price: Option<Dollars>) {
self.receive_(supply, price, price.map(|price| (price, supply)), None);
}
/// Process received output with custom price_to_amount updates (for address cohorts).
pub fn receive_(
&mut self,
supply: &SupplyState,
price: Option<Dollars>,
price_to_amount_increment: Option<(Dollars, &SupplyState)>,
price_to_amount_decrement: Option<(Dollars, &SupplyState)>,
) {
self.supply += supply;
if supply.value > Sats::ZERO
&& let Some(realized) = self.realized.as_mut()
{
let price = price.unwrap();
realized.receive(supply, price);
if let Some((price, supply)) = price_to_amount_increment
&& supply.value.is_not_zero()
{
self.price_to_amount
.as_mut()
.unwrap()
.increment(price, supply);
// Update cache for added supply
if let Some(cache) = self.cached_unrealized.as_mut() {
cache.on_receive(price, supply.value);
cache.on_receive(current.realized_price, current.supply_state.value);
}
}
if let Some((price, supply)) = price_to_amount_decrement
&& supply.value.is_not_zero()
{
self.price_to_amount
.as_mut()
.unwrap()
.decrement(price, supply);
if prev.supply_state.value.is_not_zero() {
self.cost_basis_data.as_mut().unwrap().decrement(
prev.realized_price,
prev.supply_state.value,
prev.price_sats,
prev.investor_cap,
);
// Update cache for removed supply
if let Some(cache) = self.cached_unrealized.as_mut() {
cache.on_send(price, supply.value);
cache.on_send(prev.realized_price, prev.supply_state.value);
}
}
}
}
/// Process spent input (UTXO leaving cohort).
pub fn send(
pub fn send_utxo(
&mut self,
supply: &SupplyState,
current_price: Option<Dollars>,
prev_price: Option<Dollars>,
current_price: Option<CentsUnsigned>,
prev_price: Option<CentsUnsigned>,
ath: Option<CentsUnsigned>,
age: Age,
) {
self.send_(
supply,
current_price,
prev_price,
age,
None,
prev_price.map(|prev_price| (prev_price, supply)),
);
}
/// Process spent input with custom price_to_amount updates (for address cohorts).
#[allow(clippy::too_many_arguments)]
pub fn send_(
&mut self,
supply: &SupplyState,
current_price: Option<Dollars>,
prev_price: Option<Dollars>,
age: Age,
price_to_amount_increment: Option<(Dollars, &SupplyState)>,
price_to_amount_decrement: Option<(Dollars, &SupplyState)>,
) {
if supply.utxo_count == 0 {
return;
@@ -281,77 +226,118 @@ impl CohortState {
self.satdays_destroyed += age.satdays_destroyed(supply.value);
if let Some(realized) = self.realized.as_mut() {
let current_price = current_price.unwrap();
let prev_price = prev_price.unwrap();
realized.send(supply, current_price, prev_price);
let cp = current_price.unwrap();
let pp = prev_price.unwrap();
let ath_price = ath.unwrap();
let sats = supply.value;
if let Some((price, supply)) = price_to_amount_increment
&& supply.value.is_not_zero()
{
self.price_to_amount
.as_mut()
.unwrap()
.increment(price, supply);
// Compute ONCE using typed values
let current_ps = CentsSats::from_price_sats(cp, sats);
let prev_ps = CentsSats::from_price_sats(pp, sats);
let ath_ps = CentsSats::from_price_sats(ath_price, sats);
let prev_investor_cap = prev_ps.to_investor_cap(pp);
realized.send(current_ps, prev_ps, ath_ps, prev_investor_cap);
self.cost_basis_data.as_mut().unwrap().decrement(
pp,
sats,
prev_ps,
prev_investor_cap,
);
if let Some(cache) = self.cached_unrealized.as_mut() {
cache.on_send(pp, sats);
}
}
}
}
#[allow(clippy::too_many_arguments)]
pub fn send_address(
&mut self,
supply: &SupplyState,
current_price: CentsUnsigned,
prev_price: CentsUnsigned,
ath: CentsUnsigned,
age: Age,
current: &CostBasisSnapshot,
prev: &CostBasisSnapshot,
) {
if supply.utxo_count == 0 {
return;
}
self.supply -= supply;
if supply.value > Sats::ZERO {
self.sent += supply.value;
self.satblocks_destroyed += age.satblocks_destroyed(supply.value);
self.satdays_destroyed += age.satdays_destroyed(supply.value);
if let Some(realized) = self.realized.as_mut() {
let sats = supply.value;
// Compute once for realized.send using typed values
let current_ps = CentsSats::from_price_sats(current_price, sats);
let prev_ps = CentsSats::from_price_sats(prev_price, sats);
let ath_ps = CentsSats::from_price_sats(ath, sats);
let prev_investor_cap = prev_ps.to_investor_cap(prev_price);
realized.send(current_ps, prev_ps, ath_ps, prev_investor_cap);
if current.supply_state.value.is_not_zero() {
self.cost_basis_data.as_mut().unwrap().increment(
current.realized_price,
current.supply_state.value,
current.price_sats,
current.investor_cap,
);
// Update cache for added supply
if let Some(cache) = self.cached_unrealized.as_mut() {
cache.on_receive(price, supply.value);
cache.on_receive(current.realized_price, current.supply_state.value);
}
}
if let Some((price, supply)) = price_to_amount_decrement
&& supply.value.is_not_zero()
{
self.price_to_amount
.as_mut()
.unwrap()
.decrement(price, supply);
if prev.supply_state.value.is_not_zero() {
self.cost_basis_data.as_mut().unwrap().decrement(
prev.realized_price,
prev.supply_state.value,
prev.price_sats,
prev.investor_cap,
);
// Update cache for removed supply
if let Some(cache) = self.cached_unrealized.as_mut() {
cache.on_send(price, supply.value);
cache.on_send(prev.realized_price, prev.supply_state.value);
}
}
}
}
}
/// Compute prices at percentile thresholds.
pub fn compute_percentile_prices(&self) -> [Dollars; PERCENTILES_LEN] {
match self.price_to_amount.as_ref() {
Some(p) if !p.is_empty() => p.compute_percentiles(),
_ => [Dollars::NAN; PERCENTILES_LEN],
}
pub fn compute_percentiles(&self) -> Option<Percentiles> {
self.cost_basis_data.as_ref()?.compute_percentiles()
}
/// Compute unrealized profit/loss at current price.
/// Uses O(k) incremental updates for height_price where k = flip range size.
pub fn compute_unrealized_states(
&mut self,
height_price: Dollars,
date_price: Option<Dollars>,
height_price: CentsUnsigned,
date_price: Option<CentsUnsigned>,
) -> (UnrealizedState, Option<UnrealizedState>) {
let price_to_amount = match self.price_to_amount.as_ref() {
let cost_basis_data = match self.cost_basis_data.as_ref() {
Some(p) if !p.is_empty() => p,
_ => {
return (
UnrealizedState::NAN,
date_price.map(|_| UnrealizedState::NAN),
);
}
_ => return (UnrealizedState::ZERO, date_price.map(|_| UnrealizedState::ZERO)),
};
// Date unrealized: compute from scratch (only at date boundaries, ~144x less frequent)
let date_state = date_price.map(|date_price| {
CachedUnrealizedState::compute_full_standalone(date_price, price_to_amount)
CachedUnrealizedState::compute_full_standalone(date_price.into(), cost_basis_data)
});
// Height unrealized: use incremental cache (O(k) where k = flip range)
let height_state = if let Some(cache) = self.cached_unrealized.as_mut() {
cache.get_at_price(height_price, price_to_amount).clone()
cache.get_at_price(height_price, cost_basis_data)
} else {
let cache = CachedUnrealizedState::compute_fresh(height_price, price_to_amount);
let state = cache.state.clone();
let cache = CachedUnrealizedState::compute_fresh(height_price, cost_basis_data);
let state = cache.current_state();
self.cached_unrealized = Some(cache);
state
};
@@ -359,33 +345,24 @@ impl CohortState {
(height_state, date_state)
}
/// Flush state to disk at checkpoint.
pub fn write(&mut self, height: Height, cleanup: bool) -> Result<()> {
if let Some(p) = self.price_to_amount.as_mut() {
if let Some(p) = self.cost_basis_data.as_mut() {
p.write(height, cleanup)?;
}
Ok(())
}
/// Get first (lowest) price in distribution.
pub fn min_price(&self) -> Option<Dollars> {
self.price_to_amount
.as_ref()?
.first_key_value()
.map(|(k, _)| k)
pub fn min_price(&self) -> Option<CentsUnsigned> {
self.cost_basis_data.as_ref()?.first_key_value().map(|(k, _)| k.into())
}
/// Get last (highest) price in distribution.
pub fn max_price(&self) -> Option<Dollars> {
self.price_to_amount
.as_ref()?
.last_key_value()
.map(|(k, _)| k)
pub fn max_price(&self) -> Option<CentsUnsigned> {
self.cost_basis_data.as_ref()?.last_key_value().map(|(k, _)| k.into())
}
/// Get iterator over price_to_amount for merged percentile computation.
/// Returns None if price data is not tracked for this cohort.
pub fn price_to_amount_iter(&self) -> Option<impl Iterator<Item = (Dollars, &Sats)>> {
self.price_to_amount.as_ref().map(|p| p.iter())
pub fn cost_basis_data_iter(
&self,
) -> Option<impl Iterator<Item = (CentsUnsigned, &Sats)>> {
self.cost_basis_data.as_ref().map(|p| p.iter().map(|(k, v)| (k.into(), v)))
}
}

View File

@@ -14,8 +14,8 @@ impl UTXOCohortState {
Self(CohortState::new(path, name, compute_dollars))
}
pub fn reset_price_to_amount_if_needed(&mut self) -> Result<()> {
self.0.reset_price_to_amount_if_needed()
pub fn reset_cost_basis_data_if_needed(&mut self) -> Result<()> {
self.0.reset_cost_basis_data_if_needed()
}
/// Reset state for fresh start.
@@ -25,7 +25,7 @@ impl UTXOCohortState {
self.0.satblocks_destroyed = Sats::ZERO;
self.0.satdays_destroyed = Sats::ZERO;
if let Some(realized) = self.0.realized.as_mut() {
*realized = RealizedState::NAN;
*realized = RealizedState::default();
}
}
}

View File

@@ -0,0 +1,323 @@
use std::{
collections::BTreeMap,
fs,
ops::Bound,
path::{Path, PathBuf},
};
use brk_error::{Error, Result};
use brk_types::{CentsSats, CentsSquaredSats, CentsUnsigned, CentsUnsignedCompact, Height, Sats};
use pco::{
ChunkConfig,
standalone::{simple_compress, simple_decompress},
};
use rustc_hash::FxHashMap;
use vecdb::Bytes;
use crate::utils::OptionExt;
use super::Percentiles;
#[derive(Clone, Debug, Default)]
struct PendingRaw {
cap_inc: CentsSats,
cap_dec: CentsSats,
investor_cap_inc: CentsSquaredSats,
investor_cap_dec: CentsSquaredSats,
}
#[derive(Clone, Debug)]
pub struct CostBasisData {
pathbuf: PathBuf,
state: Option<State>,
pending: FxHashMap<CentsUnsignedCompact, (Sats, Sats)>,
pending_raw: PendingRaw,
}
const STATE_TO_KEEP: usize = 10;
impl CostBasisData {
pub fn create(path: &Path, name: &str) -> Self {
Self {
pathbuf: path.join(format!("{name}_cost_basis")),
state: None,
pending: FxHashMap::default(),
pending_raw: PendingRaw::default(),
}
}
pub fn import_at_or_before(&mut self, height: Height) -> Result<Height> {
let files = self.read_dir(None)?;
let (&height, path) = files.range(..=height).next_back().ok_or(Error::NotFound(
"No cost basis state found at or before height".into(),
))?;
self.state = Some(State::deserialize(&fs::read(path)?)?);
self.pending.clear();
self.pending_raw = PendingRaw::default();
Ok(height)
}
fn assert_pending_empty(&self) {
assert!(
self.pending.is_empty() && self.pending_raw_is_zero(),
"CostBasisData: pending not empty, call apply_pending first"
);
}
fn pending_raw_is_zero(&self) -> bool {
self.pending_raw.cap_inc == CentsSats::ZERO
&& self.pending_raw.cap_dec == CentsSats::ZERO
&& self.pending_raw.investor_cap_inc == CentsSquaredSats::ZERO
&& self.pending_raw.investor_cap_dec == CentsSquaredSats::ZERO
}
pub fn iter(&self) -> impl Iterator<Item = (CentsUnsignedCompact, &Sats)> {
self.assert_pending_empty();
self.state.u().map.iter().map(|(&k, v)| (k, v))
}
pub fn range(
&self,
bounds: (Bound<CentsUnsignedCompact>, Bound<CentsUnsignedCompact>),
) -> impl Iterator<Item = (CentsUnsignedCompact, &Sats)> {
self.assert_pending_empty();
self.state.u().map.range(bounds).map(|(&k, v)| (k, v))
}
pub fn is_empty(&self) -> bool {
self.pending.is_empty() && self.state.u().map.is_empty()
}
pub fn first_key_value(&self) -> Option<(CentsUnsignedCompact, &Sats)> {
self.assert_pending_empty();
self.state.u().map.first_key_value().map(|(&k, v)| (k, v))
}
pub fn last_key_value(&self) -> Option<(CentsUnsignedCompact, &Sats)> {
self.assert_pending_empty();
self.state.u().map.last_key_value().map(|(&k, v)| (k, v))
}
/// Get the exact cap_raw value (not recomputed from map).
pub fn cap_raw(&self) -> CentsSats {
self.assert_pending_empty();
self.state.u().cap_raw
}
/// Get the exact investor_cap_raw value (not recomputed from map).
pub fn investor_cap_raw(&self) -> CentsSquaredSats {
self.assert_pending_empty();
self.state.u().investor_cap_raw
}
/// Increment with pre-computed typed values
pub fn increment(
&mut self,
price: CentsUnsigned,
sats: Sats,
price_sats: CentsSats,
investor_cap: CentsSquaredSats,
) {
self.pending.entry(price.into()).or_default().0 += sats;
self.pending_raw.cap_inc += price_sats;
if investor_cap != CentsSquaredSats::ZERO {
self.pending_raw.investor_cap_inc += investor_cap;
}
}
/// Decrement with pre-computed typed values
pub fn decrement(
&mut self,
price: CentsUnsigned,
sats: Sats,
price_sats: CentsSats,
investor_cap: CentsSquaredSats,
) {
self.pending.entry(price.into()).or_default().1 += sats;
self.pending_raw.cap_dec += price_sats;
if investor_cap != CentsSquaredSats::ZERO {
self.pending_raw.investor_cap_dec += investor_cap;
}
}
pub fn apply_pending(&mut self) {
for (cents, (inc, dec)) in self.pending.drain() {
let entry = self.state.um().map.entry(cents).or_default();
*entry += inc;
if *entry < dec {
panic!(
"CostBasisData::apply_pending underflow!\n\
Path: {:?}\n\
Price: {}\n\
Current + increments: {}\n\
Trying to decrement by: {}",
self.pathbuf,
cents.to_dollars(),
entry,
dec
);
}
*entry -= dec;
if *entry == Sats::ZERO {
self.state.um().map.remove(&cents);
}
}
// Apply raw values
let state = self.state.um();
state.cap_raw += self.pending_raw.cap_inc;
// Check for underflow before subtracting
if state.cap_raw.inner() < self.pending_raw.cap_dec.inner() {
panic!(
"CostBasisData::apply_pending cap_raw underflow!\n\
Path: {:?}\n\
Current cap_raw (after increments): {}\n\
Trying to decrement by: {}",
self.pathbuf, state.cap_raw, self.pending_raw.cap_dec
);
}
state.cap_raw -= self.pending_raw.cap_dec;
// Only process investor_cap if there are non-zero values
let has_investor_cap = self.pending_raw.investor_cap_inc != CentsSquaredSats::ZERO
|| self.pending_raw.investor_cap_dec != CentsSquaredSats::ZERO;
if has_investor_cap {
state.investor_cap_raw += self.pending_raw.investor_cap_inc;
if state.investor_cap_raw.inner() < self.pending_raw.investor_cap_dec.inner() {
panic!(
"CostBasisData::apply_pending investor_cap_raw underflow!\n\
Path: {:?}\n\
Current investor_cap_raw (after increments): {}\n\
Trying to decrement by: {}",
self.pathbuf, state.investor_cap_raw, self.pending_raw.investor_cap_dec
);
}
state.investor_cap_raw -= self.pending_raw.investor_cap_dec;
}
self.pending_raw = PendingRaw::default();
}
pub fn init(&mut self) {
self.state.replace(State::default());
self.pending.clear();
self.pending_raw = PendingRaw::default();
}
pub fn compute_percentiles(&self) -> Option<Percentiles> {
self.assert_pending_empty();
Percentiles::compute(self.iter().map(|(k, &v)| (k, v)))
}
pub fn clean(&mut self) -> Result<()> {
let _ = fs::remove_dir_all(&self.pathbuf);
fs::create_dir_all(&self.pathbuf)?;
Ok(())
}
fn read_dir(&self, keep_only_before: Option<Height>) -> Result<BTreeMap<Height, PathBuf>> {
Ok(fs::read_dir(&self.pathbuf)?
.filter_map(|entry| {
let path = entry.ok()?.path();
let name = path.file_name()?.to_str()?;
if let Ok(h) = name.parse::<u32>().map(Height::from) {
if keep_only_before.is_none_or(|height| h < height) {
Some((h, path))
} else {
let _ = fs::remove_file(path);
None
}
} else {
None
}
})
.collect::<BTreeMap<Height, PathBuf>>())
}
pub fn write(&mut self, height: Height, cleanup: bool) -> Result<()> {
self.apply_pending();
if cleanup {
let files = self.read_dir(Some(height))?;
for (_, path) in files
.iter()
.take(files.len().saturating_sub(STATE_TO_KEEP - 1))
{
fs::remove_file(path)?;
}
}
fs::write(self.path_state(height), self.state.u().serialize()?)?;
Ok(())
}
fn path_state(&self, height: Height) -> PathBuf {
self.pathbuf.join(u32::from(height).to_string())
}
}
#[derive(Clone, Default, Debug)]
struct State {
map: BTreeMap<CentsUnsignedCompact, Sats>,
/// Exact realized cap: Σ(price × sats)
cap_raw: CentsSats,
/// Exact investor cap: Σ(price² × sats)
investor_cap_raw: CentsSquaredSats,
}
impl State {
fn serialize(&self) -> vecdb::Result<Vec<u8>> {
let keys: Vec<u32> = self.map.keys().map(|k| k.inner()).collect();
let values: Vec<u64> = self.map.values().map(|v| u64::from(*v)).collect();
let config = ChunkConfig::default();
let compressed_keys = simple_compress(&keys, &config)?;
let compressed_values = simple_compress(&values, &config)?;
let mut buffer = Vec::new();
buffer.extend(keys.len().to_bytes());
buffer.extend(compressed_keys.len().to_bytes());
buffer.extend(compressed_values.len().to_bytes());
buffer.extend(compressed_keys);
buffer.extend(compressed_values);
buffer.extend(self.cap_raw.to_bytes());
buffer.extend(self.investor_cap_raw.to_bytes());
Ok(buffer)
}
fn deserialize(data: &[u8]) -> vecdb::Result<Self> {
let entry_count = usize::from_bytes(&data[0..8])?;
let keys_len = usize::from_bytes(&data[8..16])?;
let values_len = usize::from_bytes(&data[16..24])?;
let keys_start = 24;
let values_start = keys_start + keys_len;
let raw_start = values_start + values_len;
let keys: Vec<u32> = simple_decompress(&data[keys_start..values_start])?;
let values: Vec<u64> = simple_decompress(&data[values_start..raw_start])?;
let map: BTreeMap<CentsUnsignedCompact, Sats> = keys
.into_iter()
.zip(values)
.map(|(k, v)| (CentsUnsignedCompact::new(k), Sats::from(v)))
.collect();
assert_eq!(map.len(), entry_count);
let cap_raw = CentsSats::from_bytes(&data[raw_start..raw_start + 16])?;
let investor_cap_raw = CentsSquaredSats::from_bytes(&data[raw_start + 16..raw_start + 32])?;
Ok(Self {
map,
cap_raw,
investor_cap_raw,
})
}
}

View File

@@ -1,7 +1,9 @@
mod price_to_amount;
mod cost_basis_data;
mod percentiles;
mod realized;
mod unrealized;
pub use price_to_amount::*;
pub use cost_basis_data::*;
pub use percentiles::*;
pub use realized::*;
pub use unrealized::*;

View File

@@ -0,0 +1,66 @@
use brk_types::{CentsUnsigned, CentsUnsignedCompact, Sats};
use crate::internal::{PERCENTILES, PERCENTILES_LEN};
#[derive(Clone, Copy, Debug)]
pub struct Percentiles {
/// Sat-weighted: percentiles by coin count
pub sat_weighted: [CentsUnsigned; PERCENTILES_LEN],
/// USD-weighted: percentiles by invested capital (sats × price)
pub usd_weighted: [CentsUnsigned; PERCENTILES_LEN],
}
impl Percentiles {
/// Compute both sat-weighted and USD-weighted percentiles in a single pass.
/// Takes an iterator over (price, sats) pairs, assumed sorted by price ascending.
pub fn compute(iter: impl Iterator<Item = (CentsUnsignedCompact, Sats)>) -> Option<Self> {
// Collect to allow two passes: one for totals, one for percentiles
let entries: Vec<_> = iter.collect();
if entries.is_empty() {
return None;
}
// Compute totals
let mut total_sats: u64 = 0;
let mut total_usd: u128 = 0;
for &(cents, sats) in &entries {
total_sats += u64::from(sats);
total_usd += cents.as_u128() * sats.as_u128();
}
if total_sats == 0 {
return None;
}
let mut sat_weighted = [CentsUnsigned::ZERO; PERCENTILES_LEN];
let mut usd_weighted = [CentsUnsigned::ZERO; PERCENTILES_LEN];
let mut cumsum_sats: u64 = 0;
let mut cumsum_usd: u128 = 0;
let mut sat_idx = 0;
let mut usd_idx = 0;
for (cents, sats) in entries {
cumsum_sats += u64::from(sats);
cumsum_usd += cents.as_u128() * sats.as_u128();
while sat_idx < PERCENTILES_LEN
&& cumsum_sats >= total_sats * u64::from(PERCENTILES[sat_idx]) / 100
{
sat_weighted[sat_idx] = cents.into();
sat_idx += 1;
}
while usd_idx < PERCENTILES_LEN
&& cumsum_usd >= total_usd * u128::from(PERCENTILES[usd_idx]) / 100
{
usd_weighted[usd_idx] = cents.into();
usd_idx += 1;
}
}
Some(Self {
sat_weighted,
usd_weighted,
})
}
}

View File

@@ -1,272 +0,0 @@
use std::{
collections::BTreeMap,
fs,
ops::Bound,
path::{Path, PathBuf},
};
use brk_error::{Error, Result};
use brk_types::{CentsCompact, Dollars, Height, Sats, SupplyState};
use derive_more::{Deref, DerefMut};
use pco::{standalone::{simple_compress, simple_decompress}, ChunkConfig};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use vecdb::Bytes;
use crate::{
internal::{PERCENTILES, PERCENTILES_LEN},
utils::OptionExt,
};
#[derive(Clone, Debug)]
pub struct PriceToAmount {
pathbuf: PathBuf,
state: Option<State>,
/// Pending deltas: (total_increment, total_decrement) per price.
/// Flushed to BTreeMap before reads and at end of block.
pending: FxHashMap<CentsCompact, (Sats, Sats)>,
}
const STATE_AT_: &str = "state_at_";
const STATE_TO_KEEP: usize = 10;
impl PriceToAmount {
pub fn create(path: &Path, name: &str) -> Self {
Self {
pathbuf: path.join(format!("{name}_price_to_amount")),
state: None,
pending: FxHashMap::default(),
}
}
pub fn import_at_or_before(&mut self, height: Height) -> Result<Height> {
let files = self.read_dir(None)?;
let (&height, path) = files.range(..=height).next_back().ok_or(Error::NotFound(
"No price state found at or before height".into(),
))?;
self.state = Some(State::deserialize(&fs::read(path)?)?);
self.pending.clear();
Ok(height)
}
fn assert_pending_empty(&self) {
assert!(
self.pending.is_empty(),
"PriceToAmount: pending not empty, call apply_pending first"
);
}
pub fn iter(&self) -> impl Iterator<Item = (Dollars, &Sats)> {
self.assert_pending_empty();
self.state.u().iter().map(|(k, v)| (k.to_dollars(), v))
}
/// Iterate over entries in a price range with explicit bounds.
pub fn range(
&self,
bounds: (Bound<Dollars>, Bound<Dollars>),
) -> impl Iterator<Item = (Dollars, &Sats)> {
self.assert_pending_empty();
let start = match bounds.0 {
Bound::Included(d) => Bound::Included(CentsCompact::from(d)),
Bound::Excluded(d) => Bound::Excluded(CentsCompact::from(d)),
Bound::Unbounded => Bound::Unbounded,
};
let end = match bounds.1 {
Bound::Included(d) => Bound::Included(CentsCompact::from(d)),
Bound::Excluded(d) => Bound::Excluded(CentsCompact::from(d)),
Bound::Unbounded => Bound::Unbounded,
};
self.state
.u()
.range((start, end))
.map(|(k, v)| (k.to_dollars(), v))
}
pub fn is_empty(&self) -> bool {
self.pending.is_empty() && self.state.u().is_empty()
}
pub fn first_key_value(&self) -> Option<(Dollars, &Sats)> {
self.assert_pending_empty();
self.state
.u()
.first_key_value()
.map(|(k, v)| (k.to_dollars(), v))
}
pub fn last_key_value(&self) -> Option<(Dollars, &Sats)> {
self.assert_pending_empty();
self.state
.u()
.last_key_value()
.map(|(k, v)| (k.to_dollars(), v))
}
/// Accumulate increment in pending batch. O(1).
pub fn increment(&mut self, price: Dollars, supply_state: &SupplyState) {
self.pending.entry(CentsCompact::from(price)).or_default().0 += supply_state.value;
}
/// Accumulate decrement in pending batch. O(1).
pub fn decrement(&mut self, price: Dollars, supply_state: &SupplyState) {
self.pending.entry(CentsCompact::from(price)).or_default().1 += supply_state.value;
}
/// Apply pending deltas to BTreeMap. O(k log n) where k = unique prices in pending.
/// Must be called before any read operations.
pub fn apply_pending(&mut self) {
for (cents, (inc, dec)) in self.pending.drain() {
let entry = self.state.um().entry(cents).or_default();
*entry += inc;
if *entry < dec {
panic!(
"PriceToAmount::apply_pending underflow!\n\
Path: {:?}\n\
Price: {}\n\
Current + increments: {}\n\
Trying to decrement by: {}",
self.pathbuf,
cents.to_dollars(),
entry,
dec
);
}
*entry -= dec;
if *entry == Sats::ZERO {
self.state.um().remove(&cents);
}
}
}
pub fn init(&mut self) {
self.state.replace(State::default());
self.pending.clear();
}
/// Compute percentile prices by iterating the BTreeMap directly.
/// O(n) where n = number of unique prices.
pub fn compute_percentiles(&self) -> [Dollars; PERCENTILES_LEN] {
self.assert_pending_empty();
let state = match self.state.as_ref() {
Some(s) if !s.is_empty() => s,
_ => return [Dollars::NAN; PERCENTILES_LEN],
};
let total: u64 = state.values().map(|&s| u64::from(s)).sum();
if total == 0 {
return [Dollars::NAN; PERCENTILES_LEN];
}
let mut result = [Dollars::NAN; PERCENTILES_LEN];
let mut cumsum = 0u64;
let mut idx = 0;
for (&cents, &amount) in state.iter() {
cumsum += u64::from(amount);
while idx < PERCENTILES_LEN && cumsum >= total * u64::from(PERCENTILES[idx]) / 100 {
result[idx] = cents.to_dollars();
idx += 1;
}
}
result
}
pub fn clean(&mut self) -> Result<()> {
let _ = fs::remove_dir_all(&self.pathbuf);
fs::create_dir_all(&self.pathbuf)?;
Ok(())
}
fn read_dir(&self, keep_only_before: Option<Height>) -> Result<BTreeMap<Height, PathBuf>> {
Ok(fs::read_dir(&self.pathbuf)?
.filter_map(|entry| {
let path = entry.ok()?.path();
let name = path.file_name()?.to_str()?;
let height_str = name.strip_prefix(STATE_AT_).unwrap_or(name);
if let Ok(h) = height_str.parse::<u32>().map(Height::from) {
if keep_only_before.is_none_or(|height| h < height) {
Some((h, path))
} else {
let _ = fs::remove_file(path);
None
}
} else {
None
}
})
.collect::<BTreeMap<Height, PathBuf>>())
}
/// Flush state to disk, optionally cleaning up old state files.
pub fn write(&mut self, height: Height, cleanup: bool) -> Result<()> {
self.apply_pending();
if cleanup {
let files = self.read_dir(Some(height))?;
for (_, path) in files
.iter()
.take(files.len().saturating_sub(STATE_TO_KEEP - 1))
{
fs::remove_file(path)?;
}
}
fs::write(self.path_state(height), self.state.u().serialize()?)?;
Ok(())
}
fn path_state(&self, height: Height) -> PathBuf {
Self::path_state_(&self.pathbuf, height)
}
fn path_state_(path: &Path, height: Height) -> PathBuf {
path.join(u32::from(height).to_string())
}
}
#[derive(Clone, Default, Debug, Deref, DerefMut, Serialize, Deserialize)]
struct State(BTreeMap<CentsCompact, Sats>);
impl State {
fn serialize(&self) -> vecdb::Result<Vec<u8>> {
let keys: Vec<i32> = self.keys().map(|k| i32::from(*k)).collect();
let values: Vec<u64> = self.values().map(|v| u64::from(*v)).collect();
let config = ChunkConfig::default();
let compressed_keys = simple_compress(&keys, &config)?;
let compressed_values = simple_compress(&values, &config)?;
let mut buffer = Vec::new();
buffer.extend(keys.len().to_bytes());
buffer.extend(compressed_keys.len().to_bytes());
buffer.extend(compressed_keys);
buffer.extend(compressed_values);
Ok(buffer)
}
fn deserialize(data: &[u8]) -> vecdb::Result<Self> {
let entry_count = usize::from_bytes(&data[0..8])?;
let keys_len = usize::from_bytes(&data[8..16])?;
let keys: Vec<i32> = simple_decompress(&data[16..16 + keys_len])?;
let values: Vec<u64> = simple_decompress(&data[16 + keys_len..])?;
let map: BTreeMap<CentsCompact, Sats> = keys
.into_iter()
.zip(values)
.map(|(k, v)| (CentsCompact::from(k), Sats::from(v)))
.collect();
assert_eq!(map.len(), entry_count);
Ok(Self(map))
}
}

View File

@@ -1,88 +1,222 @@
use std::cmp::Ordering;
use brk_types::{CheckedSub, Dollars, SupplyState};
use brk_types::{CentsSats, CentsSquaredSats, CentsUnsigned, Sats};
/// Realized state using u128 for raw cent*sat values internally.
/// This avoids overflow and defers division to output time for efficiency.
#[derive(Debug, Default, Clone)]
pub struct RealizedState {
pub cap: Dollars,
pub profit: Dollars,
pub loss: Dollars,
pub value_created: Dollars,
pub value_destroyed: Dollars,
/// Raw realized cap: Σ(price × sats)
cap_raw: u128,
/// Raw investor cap: Σ(price² × sats)
/// investor_price = investor_cap_raw / cap_raw (gives cents directly)
investor_cap_raw: CentsSquaredSats,
/// Raw realized profit (cents * sats)
profit_raw: u128,
/// Raw realized loss (cents * sats)
loss_raw: u128,
/// sell_price × sats for profit cases
profit_value_created_raw: u128,
/// cost_basis × sats for profit cases
profit_value_destroyed_raw: u128,
/// sell_price × sats for loss cases
loss_value_created_raw: u128,
/// cost_basis × sats for loss cases (= capitulation_flow)
loss_value_destroyed_raw: u128,
/// Raw realized ATH regret: Σ((ath - sell_price) × sats)
ath_regret_raw: u128,
}
impl RealizedState {
pub const NAN: Self = Self {
cap: Dollars::NAN,
profit: Dollars::NAN,
loss: Dollars::NAN,
value_created: Dollars::NAN,
value_destroyed: Dollars::NAN,
};
/// Get realized cap as CentsUnsigned (divides by ONE_BTC).
#[inline]
pub fn cap(&self) -> CentsUnsigned {
CentsUnsigned::new((self.cap_raw / Sats::ONE_BTC_U128) as u64)
}
/// Set cap_raw directly from persisted value.
#[inline]
pub fn set_cap_raw(&mut self, cap_raw: CentsSats) {
self.cap_raw = cap_raw.inner();
}
/// Set investor_cap_raw directly from persisted value.
#[inline]
pub fn set_investor_cap_raw(&mut self, investor_cap_raw: CentsSquaredSats) {
self.investor_cap_raw = investor_cap_raw;
}
/// Get investor price as CentsUnsigned.
/// investor_price = Σ(price² × sats) / Σ(price × sats)
/// This is the dollar-weighted average acquisition price.
#[inline]
pub fn investor_price(&self) -> CentsUnsigned {
if self.cap_raw == 0 {
return CentsUnsigned::ZERO;
}
CentsUnsigned::new((self.investor_cap_raw / self.cap_raw) as u64)
}
/// Get raw realized cap for aggregation.
#[inline]
pub fn cap_raw(&self) -> CentsSats {
CentsSats::new(self.cap_raw)
}
/// Get raw investor cap for aggregation.
#[inline]
pub fn investor_cap_raw(&self) -> CentsSquaredSats {
self.investor_cap_raw
}
/// Get realized profit as CentsUnsigned.
#[inline]
pub fn profit(&self) -> CentsUnsigned {
CentsUnsigned::new((self.profit_raw / Sats::ONE_BTC_U128) as u64)
}
/// Get realized loss as CentsUnsigned.
#[inline]
pub fn loss(&self) -> CentsUnsigned {
CentsUnsigned::new((self.loss_raw / Sats::ONE_BTC_U128) as u64)
}
/// Get value created as CentsUnsigned (derived from profit + loss splits).
#[inline]
pub fn value_created(&self) -> CentsUnsigned {
let raw = self.profit_value_created_raw + self.loss_value_created_raw;
CentsUnsigned::new((raw / Sats::ONE_BTC_U128) as u64)
}
/// Get value destroyed as CentsUnsigned (derived from profit + loss splits).
#[inline]
pub fn value_destroyed(&self) -> CentsUnsigned {
let raw = self.profit_value_destroyed_raw + self.loss_value_destroyed_raw;
CentsUnsigned::new((raw / Sats::ONE_BTC_U128) as u64)
}
/// Get profit value created as CentsUnsigned (sell_price × sats for profit cases).
#[inline]
pub fn profit_value_created(&self) -> CentsUnsigned {
CentsUnsigned::new((self.profit_value_created_raw / Sats::ONE_BTC_U128) as u64)
}
/// Get profit value destroyed as CentsUnsigned (cost_basis × sats for profit cases).
/// This is also known as profit_flow.
#[inline]
pub fn profit_value_destroyed(&self) -> CentsUnsigned {
CentsUnsigned::new((self.profit_value_destroyed_raw / Sats::ONE_BTC_U128) as u64)
}
/// Get loss value created as CentsUnsigned (sell_price × sats for loss cases).
#[inline]
pub fn loss_value_created(&self) -> CentsUnsigned {
CentsUnsigned::new((self.loss_value_created_raw / Sats::ONE_BTC_U128) as u64)
}
/// Get loss value destroyed as CentsUnsigned (cost_basis × sats for loss cases).
/// This is also known as capitulation_flow.
#[inline]
pub fn loss_value_destroyed(&self) -> CentsUnsigned {
CentsUnsigned::new((self.loss_value_destroyed_raw / Sats::ONE_BTC_U128) as u64)
}
/// Get capitulation flow as CentsUnsigned.
/// This is the invested capital (cost_basis × sats) sold at a loss.
/// Alias for loss_value_destroyed.
#[inline]
pub fn capitulation_flow(&self) -> CentsUnsigned {
self.loss_value_destroyed()
}
/// Get profit flow as CentsUnsigned.
/// This is the invested capital (cost_basis × sats) sold at a profit.
/// Alias for profit_value_destroyed.
#[inline]
pub fn profit_flow(&self) -> CentsUnsigned {
self.profit_value_destroyed()
}
/// Get realized ATH regret as CentsUnsigned.
/// This is Σ((ath - sell_price) × sats) - how much more could have been made
/// by selling at ATH instead of when actually sold.
#[inline]
pub fn ath_regret(&self) -> CentsUnsigned {
CentsUnsigned::new((self.ath_regret_raw / Sats::ONE_BTC_U128) as u64)
}
pub fn reset_single_iteration_values(&mut self) {
if self.cap != Dollars::NAN {
self.profit = Dollars::ZERO;
self.loss = Dollars::ZERO;
self.value_created = Dollars::ZERO;
self.value_destroyed = Dollars::ZERO;
}
self.profit_raw = 0;
self.loss_raw = 0;
self.profit_value_created_raw = 0;
self.profit_value_destroyed_raw = 0;
self.loss_value_created_raw = 0;
self.loss_value_destroyed_raw = 0;
self.ath_regret_raw = 0;
}
pub fn increment(&mut self, supply_state: &SupplyState, price: Dollars) {
if supply_state.value.is_zero() {
/// Increment using pre-computed values (for UTXO path)
#[inline]
pub fn increment(&mut self, price: CentsUnsigned, sats: Sats) {
if sats.is_zero() {
return;
}
self.increment_(price * supply_state.value)
let price_sats = CentsSats::from_price_sats(price, sats);
self.cap_raw += price_sats.as_u128();
self.investor_cap_raw += price_sats.to_investor_cap(price);
}
pub fn increment_(&mut self, realized_cap: Dollars) {
if self.cap == Dollars::NAN {
self.cap = Dollars::ZERO;
self.profit = Dollars::ZERO;
self.loss = Dollars::ZERO;
self.value_created = Dollars::ZERO;
self.value_destroyed = Dollars::ZERO;
}
self.cap += realized_cap;
/// Increment using pre-computed snapshot values (for address path)
#[inline]
pub fn increment_snapshot(&mut self, price_sats: CentsSats, investor_cap: CentsSquaredSats) {
self.cap_raw += price_sats.as_u128();
self.investor_cap_raw += investor_cap;
}
pub fn decrement(&mut self, supply_state: &SupplyState, price: Dollars) {
self.decrement_(price * supply_state.value);
/// Decrement using pre-computed snapshot values (for address path)
#[inline]
pub fn decrement_snapshot(&mut self, price_sats: CentsSats, investor_cap: CentsSquaredSats) {
self.cap_raw -= price_sats.as_u128();
self.investor_cap_raw -= investor_cap;
}
pub fn decrement_(&mut self, realized_cap: Dollars) {
self.cap = self.cap.checked_sub(realized_cap).unwrap();
}
pub fn receive(&mut self, supply_state: &SupplyState, current_price: Dollars) {
self.increment(supply_state, current_price);
#[inline]
pub fn receive(&mut self, price: CentsUnsigned, sats: Sats) {
self.increment(price, sats);
}
/// Send with pre-computed typed values. Inlines decrement to avoid recomputation.
#[inline]
pub fn send(
&mut self,
supply_state: &SupplyState,
current_price: Dollars,
prev_price: Dollars,
current_ps: CentsSats,
prev_ps: CentsSats,
ath_ps: CentsSats,
prev_investor_cap: CentsSquaredSats,
) {
let current_value = current_price * supply_state.value;
let prev_value = prev_price * supply_state.value;
self.value_created += current_value;
self.value_destroyed += prev_value;
match current_price.cmp(&prev_price) {
match current_ps.cmp(&prev_ps) {
Ordering::Greater => {
self.profit += current_value.checked_sub(prev_value).unwrap();
self.profit_raw += (current_ps - prev_ps).as_u128();
self.profit_value_created_raw += current_ps.as_u128();
self.profit_value_destroyed_raw += prev_ps.as_u128();
}
Ordering::Less => {
self.loss += prev_value.checked_sub(current_value).unwrap();
self.loss_raw += (prev_ps - current_ps).as_u128();
self.loss_value_created_raw += current_ps.as_u128();
self.loss_value_destroyed_raw += prev_ps.as_u128();
}
Ordering::Equal => {
// Break-even: count as profit side (arbitrary but consistent)
self.profit_value_created_raw += current_ps.as_u128();
self.profit_value_destroyed_raw += prev_ps.as_u128();
}
Ordering::Equal => {}
}
self.decrement(supply_state, prev_price);
// Track ATH regret: (ath - sell_price) × sats
self.ath_regret_raw += (ath_ps - current_ps).as_u128();
// Inline decrement to avoid recomputation
self.cap_raw -= prev_ps.as_u128();
self.investor_cap_raw -= prev_investor_cap;
}
}

View File

@@ -1,253 +1,328 @@
use std::ops::Bound;
use brk_types::{CentsUnsigned, Dollars, Sats};
use vecdb::CheckedSub;
use brk_types::{CentsUnsigned, CentsUnsignedCompact, Sats};
use super::price_to_amount::PriceToAmount;
use super::cost_basis_data::CostBasisData;
#[derive(Debug, Default, Clone)]
pub struct UnrealizedState {
pub supply_in_profit: Sats,
pub supply_in_loss: Sats,
pub unrealized_profit: Dollars,
pub unrealized_loss: Dollars,
/// Invested capital in profit: Σ(sats × price) where price <= spot
pub invested_capital_in_profit: Dollars,
/// Invested capital in loss: Σ(sats × price) where price > spot
pub invested_capital_in_loss: Dollars,
pub unrealized_profit: CentsUnsigned,
pub unrealized_loss: CentsUnsigned,
pub invested_capital_in_profit: CentsUnsigned,
pub invested_capital_in_loss: CentsUnsigned,
/// Raw Σ(price² × sats) for UTXOs in profit. Used for aggregation.
pub investor_cap_in_profit_raw: u128,
/// Raw Σ(price² × sats) for UTXOs in loss. Used for aggregation.
pub investor_cap_in_loss_raw: u128,
/// Raw Σ(price × sats) for UTXOs in profit. Used for aggregation.
pub invested_capital_in_profit_raw: u128,
/// Raw Σ(price × sats) for UTXOs in loss. Used for aggregation.
pub invested_capital_in_loss_raw: u128,
}
impl UnrealizedState {
pub const NAN: Self = Self {
supply_in_profit: Sats::ZERO,
supply_in_loss: Sats::ZERO,
unrealized_profit: Dollars::NAN,
unrealized_loss: Dollars::NAN,
invested_capital_in_profit: Dollars::NAN,
invested_capital_in_loss: Dollars::NAN,
};
pub const ZERO: Self = Self {
supply_in_profit: Sats::ZERO,
supply_in_loss: Sats::ZERO,
unrealized_profit: Dollars::ZERO,
unrealized_loss: Dollars::ZERO,
invested_capital_in_profit: Dollars::ZERO,
invested_capital_in_loss: Dollars::ZERO,
unrealized_profit: CentsUnsigned::ZERO,
unrealized_loss: CentsUnsigned::ZERO,
invested_capital_in_profit: CentsUnsigned::ZERO,
invested_capital_in_loss: CentsUnsigned::ZERO,
investor_cap_in_profit_raw: 0,
investor_cap_in_loss_raw: 0,
invested_capital_in_profit_raw: 0,
invested_capital_in_loss_raw: 0,
};
/// Compute pain_index from raw values.
/// pain_index = investor_price_of_losers - spot
#[inline]
pub fn pain_index(&self, spot: CentsUnsigned) -> CentsUnsigned {
if self.invested_capital_in_loss_raw == 0 {
return CentsUnsigned::ZERO;
}
let investor_price_losers =
self.investor_cap_in_loss_raw / self.invested_capital_in_loss_raw;
CentsUnsigned::new((investor_price_losers - spot.as_u128()) as u64)
}
/// Compute greed_index from raw values.
/// greed_index = spot - investor_price_of_winners
#[inline]
pub fn greed_index(&self, spot: CentsUnsigned) -> CentsUnsigned {
if self.invested_capital_in_profit_raw == 0 {
return CentsUnsigned::ZERO;
}
let investor_price_winners =
self.investor_cap_in_profit_raw / self.invested_capital_in_profit_raw;
CentsUnsigned::new((spot.as_u128() - investor_price_winners) as u64)
}
}
/// Internal cache state using u128 for raw cent*sat values.
/// This avoids rounding errors from premature division by ONE_BTC.
/// Division happens only when converting to UnrealizedState output.
#[derive(Debug, Default, Clone)]
struct CachedStateRaw {
supply_in_profit: Sats,
supply_in_loss: Sats,
/// Raw value: sum of (price_cents * sats) for UTXOs in profit
unrealized_profit: u128,
/// Raw value: sum of (price_cents * sats) for UTXOs in loss
unrealized_loss: u128,
/// Raw value: sum of (price_cents * sats) for UTXOs in profit
invested_capital_in_profit: u128,
/// Raw value: sum of (price_cents * sats) for UTXOs in loss
invested_capital_in_loss: u128,
/// Raw value: sum of (price_cents² * sats) for UTXOs in profit
investor_cap_in_profit: u128,
/// Raw value: sum of (price_cents² * sats) for UTXOs in loss
investor_cap_in_loss: u128,
}
impl CachedStateRaw {
/// Convert raw values to final output by dividing by ONE_BTC.
fn to_output(&self) -> UnrealizedState {
UnrealizedState {
supply_in_profit: self.supply_in_profit,
supply_in_loss: self.supply_in_loss,
unrealized_profit: CentsUnsigned::new(
(self.unrealized_profit / Sats::ONE_BTC_U128) as u64,
),
unrealized_loss: CentsUnsigned::new(
(self.unrealized_loss / Sats::ONE_BTC_U128) as u64,
),
invested_capital_in_profit: CentsUnsigned::new(
(self.invested_capital_in_profit / Sats::ONE_BTC_U128) as u64,
),
invested_capital_in_loss: CentsUnsigned::new(
(self.invested_capital_in_loss / Sats::ONE_BTC_U128) as u64,
),
investor_cap_in_profit_raw: self.investor_cap_in_profit,
investor_cap_in_loss_raw: self.investor_cap_in_loss,
invested_capital_in_profit_raw: self.invested_capital_in_profit,
invested_capital_in_loss_raw: self.invested_capital_in_loss,
}
}
}
/// Cached unrealized state for O(k) incremental updates.
/// k = number of entries in price flip range (typically tiny).
#[derive(Debug, Clone)]
pub struct CachedUnrealizedState {
pub state: UnrealizedState,
at_price: Dollars,
state: CachedStateRaw,
at_price: CentsUnsignedCompact,
}
impl CachedUnrealizedState {
/// Create new cache by computing from scratch. O(n).
pub fn compute_fresh(price: Dollars, price_to_amount: &PriceToAmount) -> Self {
let state = Self::compute_full_standalone(price, price_to_amount);
Self {
state,
at_price: price,
}
pub fn compute_fresh(price: CentsUnsigned, cost_basis_data: &CostBasisData) -> Self {
let price: CentsUnsignedCompact = price.into();
let state = Self::compute_raw(price, cost_basis_data);
Self { state, at_price: price }
}
/// Get the current cached state as output (without price update).
pub fn current_state(&self) -> UnrealizedState {
self.state.to_output()
}
/// Get unrealized state at new_price. O(k) where k = flip range size.
pub fn get_at_price(
&mut self,
new_price: Dollars,
price_to_amount: &PriceToAmount,
) -> &UnrealizedState {
new_price: CentsUnsigned,
cost_basis_data: &CostBasisData,
) -> UnrealizedState {
let new_price: CentsUnsignedCompact = new_price.into();
if new_price != self.at_price {
self.update_for_price_change(new_price, price_to_amount);
self.update_for_price_change(new_price, cost_basis_data);
}
&self.state
self.state.to_output()
}
/// Update cached state when a receive happens.
/// Determines profit/loss classification relative to cached price.
pub fn on_receive(&mut self, purchase_price: Dollars, sats: Sats) {
let invested_capital = purchase_price * sats;
if purchase_price <= self.at_price {
pub fn on_receive(&mut self, price: CentsUnsigned, sats: Sats) {
let price: CentsUnsignedCompact = price.into();
let sats_u128 = sats.as_u128();
let price_u128 = price.as_u128();
let invested_capital = price_u128 * sats_u128;
let investor_cap = price_u128 * invested_capital;
if price <= self.at_price {
self.state.supply_in_profit += sats;
self.state.invested_capital_in_profit += invested_capital;
if purchase_price < self.at_price {
let diff = self.at_price.checked_sub(purchase_price).unwrap();
self.state.unrealized_profit += diff * sats;
self.state.investor_cap_in_profit += investor_cap;
if price < self.at_price {
let diff = (self.at_price - price).as_u128();
self.state.unrealized_profit += diff * sats_u128;
}
} else {
self.state.supply_in_loss += sats;
self.state.invested_capital_in_loss += invested_capital;
let diff = purchase_price.checked_sub(self.at_price).unwrap();
self.state.unrealized_loss += diff * sats;
self.state.investor_cap_in_loss += investor_cap;
let diff = (price - self.at_price).as_u128();
self.state.unrealized_loss += diff * sats_u128;
}
}
/// Update cached state when a send happens from historical price.
pub fn on_send(&mut self, historical_price: Dollars, sats: Sats) {
let invested_capital = historical_price * sats;
if historical_price <= self.at_price {
// Was in profit
pub fn on_send(&mut self, price: CentsUnsigned, sats: Sats) {
let price: CentsUnsignedCompact = price.into();
let sats_u128 = sats.as_u128();
let price_u128 = price.as_u128();
let invested_capital = price_u128 * sats_u128;
let investor_cap = price_u128 * invested_capital;
if price <= self.at_price {
self.state.supply_in_profit -= sats;
self.state.invested_capital_in_profit = self
.state
.invested_capital_in_profit
.checked_sub(invested_capital)
.unwrap();
if historical_price < self.at_price {
let diff = self.at_price.checked_sub(historical_price).unwrap();
let profit_removed = diff * sats;
self.state.unrealized_profit = self
.state
.unrealized_profit
.checked_sub(profit_removed)
.unwrap_or(Dollars::ZERO);
self.state.invested_capital_in_profit -= invested_capital;
self.state.investor_cap_in_profit -= investor_cap;
if price < self.at_price {
let diff = (self.at_price - price).as_u128();
self.state.unrealized_profit -= diff * sats_u128;
}
} else {
// Was in loss
self.state.supply_in_loss -= sats;
self.state.invested_capital_in_loss = self
.state
.invested_capital_in_loss
.checked_sub(invested_capital)
.unwrap();
let diff = historical_price.checked_sub(self.at_price).unwrap();
let loss_removed = diff * sats;
self.state.unrealized_loss = self
.state
.unrealized_loss
.checked_sub(loss_removed)
.unwrap_or(Dollars::ZERO);
self.state.invested_capital_in_loss -= invested_capital;
self.state.investor_cap_in_loss -= investor_cap;
let diff = (price - self.at_price).as_u128();
self.state.unrealized_loss -= diff * sats_u128;
}
}
/// Incremental update for price change. O(k) where k = entries in flip range.
fn update_for_price_change(&mut self, new_price: Dollars, price_to_amount: &PriceToAmount) {
fn update_for_price_change(
&mut self,
new_price: CentsUnsignedCompact,
cost_basis_data: &CostBasisData,
) {
let old_price = self.at_price;
let delta_f64 = f64::from(new_price) - f64::from(old_price);
// Update profit/loss for entries that DON'T flip
// Profit changes by delta * supply_in_profit
// Loss changes by -delta * supply_in_loss
if delta_f64 > 0.0 {
// Price went up: profits increase, losses decrease
self.state.unrealized_profit += Dollars::from(delta_f64) * self.state.supply_in_profit;
let loss_decrease = Dollars::from(delta_f64) * self.state.supply_in_loss;
self.state.unrealized_loss = self
.state
.unrealized_loss
.checked_sub(loss_decrease)
.unwrap_or(Dollars::ZERO);
} else if delta_f64 < 0.0 {
// Price went down: profits decrease, losses increase
let profit_decrease = Dollars::from(-delta_f64) * self.state.supply_in_profit;
self.state.unrealized_profit = self
.state
.unrealized_profit
.checked_sub(profit_decrease)
.unwrap_or(Dollars::ZERO);
self.state.unrealized_loss += Dollars::from(-delta_f64) * self.state.supply_in_loss;
}
// Handle flipped entries (only iterate the small range between prices)
if new_price > old_price {
// Price went up: entries where old < price <= new flip from loss to profit
let delta = (new_price - old_price).as_u128();
// Save original supply for delta calculation (before crossing UTXOs move)
let original_supply_in_profit = self.state.supply_in_profit.as_u128();
// First, process UTXOs crossing from loss to profit
// Range (old_price, new_price] means: old_price < price <= new_price
for (price, &sats) in
price_to_amount.range((Bound::Excluded(old_price), Bound::Included(new_price)))
cost_basis_data.range((Bound::Excluded(old_price), Bound::Included(new_price)))
{
// Move from loss to profit
let sats_u128 = sats.as_u128();
let price_u128 = price.as_u128();
let invested_capital = price_u128 * sats_u128;
let investor_cap = price_u128 * invested_capital;
// Move between buckets
self.state.supply_in_loss -= sats;
self.state.supply_in_profit += sats;
self.state.invested_capital_in_loss -= invested_capital;
self.state.invested_capital_in_profit += invested_capital;
self.state.investor_cap_in_loss -= investor_cap;
self.state.investor_cap_in_profit += investor_cap;
// Undo the loss adjustment applied above for this entry
// We decreased loss by delta * sats, but this entry should be removed entirely
// Original loss: (price - old_price) * sats
// After global adjustment: original - delta * sats (negative, wrong)
// Correct: 0 (removed from loss)
// Correction: add back delta * sats, then add original loss
let delta_adj = Dollars::from(delta_f64) * sats;
self.state.unrealized_loss += delta_adj;
if price > old_price {
let original_loss = price.checked_sub(old_price).unwrap() * sats;
self.state.unrealized_loss += original_loss;
}
// Remove their original contribution to unrealized_loss
// (price > old_price is always true due to Bound::Excluded)
let original_loss = (price - old_price).as_u128();
self.state.unrealized_loss -= original_loss * sats_u128;
// Undo the profit adjustment applied above for this entry
// We increased profit by delta * sats, but this entry was not in profit before
// Correct profit: (new_price - price) * sats
// Correction: subtract delta * sats, add correct profit
let profit_adj = Dollars::from(delta_f64) * sats;
self.state.unrealized_profit = self
.state
.unrealized_profit
.checked_sub(profit_adj)
.unwrap_or(Dollars::ZERO);
if new_price > price {
let correct_profit = new_price.checked_sub(price).unwrap() * sats;
self.state.unrealized_profit += correct_profit;
// Add their new contribution to unrealized_profit (if not at boundary)
if price < new_price {
let new_profit = (new_price - price).as_u128();
self.state.unrealized_profit += new_profit * sats_u128;
}
}
// Apply delta to non-crossing UTXOs only
// Non-crossing profit UTXOs: their profit increases by delta
self.state.unrealized_profit += delta * original_supply_in_profit;
// Non-crossing loss UTXOs: their loss decreases by delta
let non_crossing_loss_sats =
self.state.supply_in_loss.as_u128(); // Already excludes crossing
self.state.unrealized_loss -= delta * non_crossing_loss_sats;
} else if new_price < old_price {
// Price went down: entries where new < price <= old flip from profit to loss
let delta = (old_price - new_price).as_u128();
// Save original supply for delta calculation (before crossing UTXOs move)
let original_supply_in_loss = self.state.supply_in_loss.as_u128();
// First, process UTXOs crossing from profit to loss
// Range (new_price, old_price] means: new_price < price <= old_price
for (price, &sats) in
price_to_amount.range((Bound::Excluded(new_price), Bound::Included(old_price)))
cost_basis_data.range((Bound::Excluded(new_price), Bound::Included(old_price)))
{
// Move from profit to loss
let sats_u128 = sats.as_u128();
let price_u128 = price.as_u128();
let invested_capital = price_u128 * sats_u128;
let investor_cap = price_u128 * invested_capital;
// Move between buckets
self.state.supply_in_profit -= sats;
self.state.supply_in_loss += sats;
self.state.invested_capital_in_profit -= invested_capital;
self.state.invested_capital_in_loss += invested_capital;
self.state.investor_cap_in_profit -= investor_cap;
self.state.investor_cap_in_loss += investor_cap;
// Undo the profit adjustment applied above for this entry
let delta_adj = Dollars::from(-delta_f64) * sats;
self.state.unrealized_profit += delta_adj;
if old_price > price {
let original_profit = old_price.checked_sub(price).unwrap() * sats;
self.state.unrealized_profit += original_profit;
// Remove their original contribution to unrealized_profit (if not at boundary)
if price < old_price {
let original_profit = (old_price - price).as_u128();
self.state.unrealized_profit -= original_profit * sats_u128;
}
// Undo the loss adjustment applied above for this entry
let loss_adj = Dollars::from(-delta_f64) * sats;
self.state.unrealized_loss = self
.state
.unrealized_loss
.checked_sub(loss_adj)
.unwrap_or(Dollars::ZERO);
if price > new_price {
let correct_loss = price.checked_sub(new_price).unwrap() * sats;
self.state.unrealized_loss += correct_loss;
}
// Add their new contribution to unrealized_loss
// (price > new_price is always true due to Bound::Excluded)
let new_loss = (price - new_price).as_u128();
self.state.unrealized_loss += new_loss * sats_u128;
}
// Apply delta to non-crossing UTXOs only
// Non-crossing loss UTXOs: their loss increases by delta
self.state.unrealized_loss += delta * original_supply_in_loss;
// Non-crossing profit UTXOs: their profit decreases by delta
let non_crossing_profit_sats =
self.state.supply_in_profit.as_u128(); // Already excludes crossing
self.state.unrealized_profit -= delta * non_crossing_profit_sats;
}
self.at_price = new_price;
}
/// Full computation from scratch (no cache). O(n).
pub fn compute_full_standalone(
current_price: Dollars,
price_to_amount: &PriceToAmount,
) -> UnrealizedState {
let mut state = UnrealizedState::ZERO;
/// Compute raw cached state from cost_basis_data.
fn compute_raw(
current_price: CentsUnsignedCompact,
cost_basis_data: &CostBasisData,
) -> CachedStateRaw {
let mut state = CachedStateRaw::default();
for (price, &sats) in cost_basis_data.iter() {
let sats_u128 = sats.as_u128();
let price_u128 = price.as_u128();
let invested_capital = price_u128 * sats_u128;
let investor_cap = price_u128 * invested_capital;
for (price, &sats) in price_to_amount.iter() {
let invested_capital = price * sats;
if price <= current_price {
state.supply_in_profit += sats;
state.invested_capital_in_profit += invested_capital;
state.investor_cap_in_profit += investor_cap;
if price < current_price {
let diff = current_price.checked_sub(price).unwrap();
state.unrealized_profit += diff * sats;
let diff = (current_price - price).as_u128();
state.unrealized_profit += diff * sats_u128;
}
} else {
state.supply_in_loss += sats;
state.invested_capital_in_loss += invested_capital;
let diff = price.checked_sub(current_price).unwrap();
state.unrealized_loss += diff * sats;
state.investor_cap_in_loss += investor_cap;
let diff = (price - current_price).as_u128();
state.unrealized_loss += diff * sats_u128;
}
}
state
}
/// Compute final UnrealizedState directly (not cached).
/// Used for date_state which doesn't use the cache.
pub fn compute_full_standalone(
current_price: CentsUnsignedCompact,
cost_basis_data: &CostBasisData,
) -> UnrealizedState {
Self::compute_raw(current_price, cost_basis_data).to_output()
}
}

View File

@@ -30,7 +30,7 @@ use super::{
compute::aggregates,
};
const VERSION: Version = Version::new(21);
const VERSION: Version = Version::new(22);
/// Main struct holding all computed vectors and state for stateful computation.
#[derive(Clone, Traversable)]
@@ -257,7 +257,7 @@ impl Vecs {
} else {
// Recover chain_state from stored values
let height_to_timestamp = &blocks.time.timestamp_monotonic;
let height_to_price = price.map(|p| &p.usd.split.close.height);
let height_to_price = price.map(|p| &p.cents.split.height.close);
let mut height_to_timestamp_iter = height_to_timestamp.into_iter();
let mut height_to_price_iter = height_to_price.map(|v| v.into_iter());
@@ -266,9 +266,10 @@ impl Vecs {
let chain_state = (0..recovered_height.to_usize())
.map(|h| {
let h = Height::from(h);
let price = height_to_price_iter.as_mut().map(|v| *v.get_unwrap(h));
BlockState {
supply: chain_state_iter.get_unwrap(h),
price: height_to_price_iter.as_mut().map(|v| *v.get_unwrap(h)),
price,
timestamp: height_to_timestamp_iter.get_unwrap(h),
}
})

View File

@@ -10,7 +10,7 @@ use vecdb::{BinaryTransform, IterableBoxedVec, IterableCloneableVec, LazyVecFrom
use crate::internal::{
ComputedFromHeightLast, ComputedFromHeightSum, ComputedFromDateLast, ComputedVecValue,
LazyBinaryComputedFromHeightLast, LazyBinaryComputedFromHeightSum, LazyBinaryTransformLast,
LazyDateDerivedLast, LazyDateDerivedSumCum, NumericValue,
LazyDateDerivedLast, LazyDateDerivedSumCum, LazyFromDateLast, LazyFromHeightLast, NumericValue,
};
const VERSION: Version = Version::ZERO;
@@ -223,6 +223,45 @@ where
}
}
pub fn from_lazy_height_and_dateindex_last<F, S1SourceT>(
name: &str,
version: Version,
source1: &LazyFromHeightLast<S1T, S1SourceT>,
source2: &ComputedFromDateLast<S2T>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S1SourceT: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
macro_rules! period {
($p:ident) => {
LazyBinaryTransformLast::from_vecs::<F>(
name,
v,
source1.$p.boxed_clone(),
source2.$p.boxed_clone(),
)
};
}
Self {
dateindex: LazyVecFrom2::transformed::<F>(
name,
v,
source1.dateindex.boxed_clone(),
source2.dateindex.boxed_clone(),
),
weekindex: period!(weekindex),
monthindex: period!(monthindex),
quarterindex: period!(quarterindex),
semesterindex: period!(semesterindex),
yearindex: period!(yearindex),
decadeindex: period!(decadeindex),
}
}
pub fn from_dateindex_and_height_last<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
@@ -753,4 +792,44 @@ where
decadeindex: period!(decadeindex),
}
}
/// Create from a ComputedFromDateLast and a LazyFromDateLast.
pub fn from_computed_and_lazy_last<F, S2SourceT>(
name: &str,
version: Version,
source1: &ComputedFromDateLast<S1T>,
source2: &LazyFromDateLast<S2T, S2SourceT>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S2SourceT: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
macro_rules! period {
($p:ident) => {
LazyBinaryTransformLast::from_vecs::<F>(
name,
v,
source1.rest.$p.boxed_clone(),
source2.$p.boxed_clone(),
)
};
}
Self {
dateindex: LazyVecFrom2::transformed::<F>(
name,
v,
source1.dateindex.boxed_clone(),
source2.dateindex.boxed_clone(),
),
weekindex: period!(weekindex),
monthindex: period!(monthindex),
quarterindex: period!(quarterindex),
semesterindex: period!(semesterindex),
yearindex: period!(yearindex),
decadeindex: period!(decadeindex),
}
}
}

View File

@@ -58,4 +58,37 @@ where
decadeindex: period!(decadeindex),
}
}
/// Create from two LazyBinaryFromDateSum sources.
pub fn from_binary<F, S1aT, S1bT, S2aT, S2bT>(
name: &str,
version: Version,
source1: &LazyBinaryFromDateSum<S1T, S1aT, S1bT>,
source2: &LazyBinaryFromDateSum<S2T, S2aT, S2bT>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S1aT: ComputedVecValue + JsonSchema,
S1bT: ComputedVecValue + JsonSchema,
S2aT: ComputedVecValue + JsonSchema,
S2bT: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
macro_rules! period {
($p:ident) => {
LazyBinaryTransformSum::from_boxed::<F>(name, v, source1.$p.boxed_clone(), source2.$p.boxed_clone())
};
}
Self {
dateindex: period!(dateindex),
weekindex: period!(weekindex),
monthindex: period!(monthindex),
quarterindex: period!(quarterindex),
semesterindex: period!(semesterindex),
yearindex: period!(yearindex),
decadeindex: period!(decadeindex),
}
}
}

View File

@@ -10,7 +10,7 @@ use vecdb::{BinaryTransform, IterableCloneableVec};
use crate::internal::{
ComputedFromHeightLast, ComputedFromHeightSumCum, ComputedHeightDerivedLast,
ComputedHeightDerivedSumCum, ComputedVecValue, LazyBinaryTransformSumCum, LazyDateDerivedFull,
LazyDateDerivedSumCum, NumericValue, SumCum,
LazyDateDerivedSumCum, LazyFromHeightLast, NumericValue, SumCum,
};
const VERSION: Version = Version::ZERO;
@@ -278,4 +278,47 @@ where
decadeindex: period!(decadeindex),
}
}
// --- Methods accepting SumCum + LazyLast sources ---
pub fn from_computed_lazy_last<F, S2ST>(
name: &str,
version: Version,
source1: &ComputedFromHeightSumCum<S1T>,
source2: &LazyFromHeightLast<S2T, S2ST>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S1T: PartialOrd,
S2T: NumericValue,
S2ST: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
macro_rules! period {
($p:ident) => {
LazyBinaryTransformSumCum::from_sources_last_sum_raw::<F>(
name, v,
source1.rest.$p.sum.boxed_clone(),
source1.rest.$p.cumulative.boxed_clone(),
source2.rest.dates.$p.boxed_clone(),
)
};
}
Self {
dateindex: LazyBinaryTransformSumCum::from_sources_last_sum_raw::<F>(
name, v,
source1.dateindex.boxed_sum(),
source1.dateindex.boxed_cumulative(),
source2.rest.dates.dateindex.boxed_clone(),
),
weekindex: period!(weekindex),
monthindex: period!(monthindex),
quarterindex: period!(quarterindex),
semesterindex: period!(semesterindex),
yearindex: period!(yearindex),
decadeindex: period!(decadeindex),
}
}
}

View File

@@ -10,7 +10,7 @@ use schemars::JsonSchema;
use vecdb::{BinaryTransform, IterableCloneableVec, LazyVecFrom1};
use super::{ComputedFromDateLast, LazyBinaryFromDateLast};
use crate::internal::{ComputedFromHeightLast, ComputedVecValue, DollarsToSatsFract, LazyTransformLast, NumericValue};
use crate::internal::{ComputedFromHeightLast, ComputedVecValue, DollarsToSatsFract, LazyFromHeightLast, LazyTransformLast, NumericValue};
/// Lazy binary price with both USD and sats representations.
///
@@ -71,6 +71,23 @@ where
Self::from_dollars(name, version, dollars)
}
/// Create from lazy height-based price and dateindex-based ratio sources.
pub fn from_lazy_height_and_dateindex_last<F, S1SourceT>(
name: &str,
version: Version,
source1: &LazyFromHeightLast<S1T, S1SourceT>,
source2: &ComputedFromDateLast<S2T>,
) -> Self
where
F: BinaryTransform<S1T, S2T, Dollars>,
S1SourceT: ComputedVecValue + JsonSchema,
{
let dollars = LazyBinaryFromDateLast::from_lazy_height_and_dateindex_last::<F, S1SourceT>(
name, version, source1, source2,
);
Self::from_dollars(name, version, dollars)
}
/// Create from two computed dateindex sources.
pub fn from_computed_both_last<F: BinaryTransform<S1T, S2T, Dollars>>(
name: &str,

View File

@@ -1,6 +1,6 @@
use brk_error::Result;
use brk_traversable::{Traversable, TreeNode};
use brk_types::{DateIndex, Dollars, Version};
use brk_types::{DateIndex, Dollars, StoredF32, Version};
use rayon::prelude::*;
use vecdb::{
AnyExportableVec, AnyStoredVec, AnyVec, Database, EagerVec, Exit, GenericStoredVec, PcoVec,
@@ -15,28 +15,77 @@ pub const PERCENTILES: [u8; 19] = [
];
pub const PERCENTILES_LEN: usize = PERCENTILES.len();
/// Compute spot percentile rank by interpolating within percentile bands.
/// Returns a value between 0 and 100 indicating where spot sits in the distribution.
pub fn compute_spot_percentile_rank(percentile_prices: &[Dollars; PERCENTILES_LEN], spot: Dollars) -> StoredF32 {
if spot.is_nan() || percentile_prices[0].is_nan() {
return StoredF32::NAN;
}
let spot_f64 = f64::from(spot);
// Below lowest percentile (p5) - extrapolate towards 0
let p5 = f64::from(percentile_prices[0]);
if spot_f64 <= p5 {
if p5 == 0.0 {
return StoredF32::from(0.0);
}
// Linear extrapolation: rank = 5 * (spot / p5)
return StoredF32::from((5.0 * spot_f64 / p5).max(0.0));
}
// Above highest percentile (p95) - extrapolate towards 100
let p95 = f64::from(percentile_prices[PERCENTILES_LEN - 1]);
let p90 = f64::from(percentile_prices[PERCENTILES_LEN - 2]);
if spot_f64 >= p95 {
if p95 == p90 {
return StoredF32::from(100.0);
}
// Linear extrapolation using p90-p95 slope
let slope = 5.0 / (p95 - p90);
return StoredF32::from((95.0 + (spot_f64 - p95) * slope).min(100.0));
}
// Find the band containing spot and interpolate
for i in 0..PERCENTILES_LEN - 1 {
let lower = f64::from(percentile_prices[i]);
let upper = f64::from(percentile_prices[i + 1]);
if spot_f64 >= lower && spot_f64 <= upper {
let lower_pct = f64::from(PERCENTILES[i]);
let upper_pct = f64::from(PERCENTILES[i + 1]);
if upper == lower {
return StoredF32::from(lower_pct);
}
// Linear interpolation
let ratio = (spot_f64 - lower) / (upper - lower);
return StoredF32::from(lower_pct + ratio * (upper_pct - lower_pct));
}
}
StoredF32::NAN
}
#[derive(Clone)]
pub struct CostBasisPercentiles {
pub struct PercentilesVecs {
pub vecs: [Option<Price>; PERCENTILES_LEN],
}
const VERSION: Version = Version::ZERO;
impl CostBasisPercentiles {
impl PercentilesVecs {
pub fn forced_import(
db: &Database,
name: &str,
prefix: &str,
version: Version,
indexes: &indexes::Vecs,
compute: bool,
) -> Result<Self> {
let vecs = PERCENTILES.map(|p| {
compute.then(|| {
let metric_name = if name.is_empty() {
format!("cost_basis_pct{p:02}")
} else {
format!("{name}_cost_basis_pct{p:02}")
};
let metric_name = format!("{prefix}_pct{p:02}");
Price::forced_import(db, &metric_name, version + VERSION, indexes).unwrap()
})
});
@@ -88,7 +137,7 @@ impl CostBasisPercentiles {
}
}
impl CostBasisPercentiles {
impl PercentilesVecs {
pub fn write(&mut self) -> Result<()> {
for vec in self.vecs.iter_mut().flatten() {
vec.dateindex.write()?;
@@ -115,7 +164,7 @@ impl CostBasisPercentiles {
}
}
impl Traversable for CostBasisPercentiles {
impl Traversable for PercentilesVecs {
fn to_tree_node(&self) -> TreeNode {
TreeNode::Branch(
PERCENTILES

View File

@@ -17,7 +17,8 @@ use crate::{
};
use super::{ComputedFromDateLast, Price};
use crate::internal::ComputedFromHeightLast;
use crate::internal::{ComputedFromHeightLast, ComputedVecValue, LazyFromHeightLast};
use schemars::JsonSchema;
#[derive(Clone, Traversable)]
pub struct ComputedFromDateRatio {
@@ -56,7 +57,6 @@ impl ComputedFromDateRatio {
version: Version,
indexes: &indexes::Vecs,
extended: bool,
price_vecs: Option<&price::Vecs>,
) -> Result<Self> {
let v = version + VERSION;
@@ -81,7 +81,8 @@ impl ComputedFromDateRatio {
v,
indexes,
StandardDeviationVecsOptions::default().add_all(),
price_vecs,
metric_price,
price.as_ref().map(|p| &p.dollars),
)
.unwrap()
};
@@ -142,6 +143,82 @@ impl ComputedFromDateRatio {
})
}
pub fn forced_import_from_lazy<S1T: ComputedVecValue + JsonSchema>(
db: &Database,
name: &str,
metric_price: &LazyFromHeightLast<Dollars, S1T>,
version: Version,
indexes: &indexes::Vecs,
extended: bool,
) -> Result<Self> {
let v = version + VERSION;
macro_rules! import {
($suffix:expr) => {
ComputedFromDateLast::forced_import(db, &format!("{name}_{}", $suffix), v, indexes)
.unwrap()
};
}
macro_rules! import_sd {
($suffix:expr, $days:expr) => {
ComputedFromDateStdDev::forced_import_from_lazy(
db,
&format!("{name}_{}", $suffix),
$days,
v,
indexes,
StandardDeviationVecsOptions::default().add_all(),
Some(metric_price),
)
.unwrap()
};
}
let ratio_pct99 = extended.then(|| import!("ratio_pct99"));
let ratio_pct98 = extended.then(|| import!("ratio_pct98"));
let ratio_pct95 = extended.then(|| import!("ratio_pct95"));
let ratio_pct5 = extended.then(|| import!("ratio_pct5"));
let ratio_pct2 = extended.then(|| import!("ratio_pct2"));
let ratio_pct1 = extended.then(|| import!("ratio_pct1"));
macro_rules! lazy_usd {
($ratio:expr, $suffix:expr) => {
$ratio.as_ref().map(|r| {
LazyBinaryPrice::from_lazy_height_and_dateindex_last::<PriceTimesRatio, S1T>(
&format!("{name}_{}", $suffix),
v,
metric_price,
r,
)
})
};
}
Ok(Self {
ratio: import!("ratio"),
ratio_1w_sma: extended.then(|| import!("ratio_1w_sma")),
ratio_1m_sma: extended.then(|| import!("ratio_1m_sma")),
ratio_sd: extended.then(|| import_sd!("ratio", usize::MAX)),
ratio_1y_sd: extended.then(|| import_sd!("ratio_1y", 365)),
ratio_2y_sd: extended.then(|| import_sd!("ratio_2y", 2 * 365)),
ratio_4y_sd: extended.then(|| import_sd!("ratio_4y", 4 * 365)),
ratio_pct99_usd: lazy_usd!(&ratio_pct99, "ratio_pct99_usd"),
ratio_pct98_usd: lazy_usd!(&ratio_pct98, "ratio_pct98_usd"),
ratio_pct95_usd: lazy_usd!(&ratio_pct95, "ratio_pct95_usd"),
ratio_pct5_usd: lazy_usd!(&ratio_pct5, "ratio_pct5_usd"),
ratio_pct2_usd: lazy_usd!(&ratio_pct2, "ratio_pct2_usd"),
ratio_pct1_usd: lazy_usd!(&ratio_pct1, "ratio_pct1_usd"),
price: None,
ratio_pct99,
ratio_pct98,
ratio_pct95,
ratio_pct5,
ratio_pct2,
ratio_pct1,
})
}
pub fn compute_all<F>(
&mut self,
price: &price::Vecs,

View File

@@ -2,15 +2,19 @@ use std::mem;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Close, Date, DateIndex, Dollars, StoredF32, Version};
use brk_types::{Date, DateIndex, Dollars, StoredF32, Version};
use schemars::JsonSchema;
use vecdb::{
AnyStoredVec, AnyVec, CollectableVec, Database, EagerVec, Exit, GenericStoredVec, IterableVec,
PcoVec, VecIndex,
};
use crate::{ComputeIndexes, indexes, price};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ClosePriceTimesRatio, ComputedFromDateLast, LazyBinaryPrice};
use crate::internal::{
ComputedFromDateLast, ComputedFromHeightLast, ComputedVecValue, LazyBinaryPrice,
LazyFromHeightLast, PriceTimesRatio,
};
#[derive(Clone, Traversable)]
pub struct ComputedFromDateStdDev {
@@ -35,19 +39,19 @@ pub struct ComputedFromDateStdDev {
pub m2_5sd: Option<ComputedFromDateLast<StoredF32>>,
pub m3sd: Option<ComputedFromDateLast<StoredF32>>,
pub _0sd_usd: Option<LazyBinaryPrice<Close<Dollars>, StoredF32>>,
pub p0_5sd_usd: Option<LazyBinaryPrice<Close<Dollars>, StoredF32>>,
pub p1sd_usd: Option<LazyBinaryPrice<Close<Dollars>, StoredF32>>,
pub p1_5sd_usd: Option<LazyBinaryPrice<Close<Dollars>, StoredF32>>,
pub p2sd_usd: Option<LazyBinaryPrice<Close<Dollars>, StoredF32>>,
pub p2_5sd_usd: Option<LazyBinaryPrice<Close<Dollars>, StoredF32>>,
pub p3sd_usd: Option<LazyBinaryPrice<Close<Dollars>, StoredF32>>,
pub m0_5sd_usd: Option<LazyBinaryPrice<Close<Dollars>, StoredF32>>,
pub m1sd_usd: Option<LazyBinaryPrice<Close<Dollars>, StoredF32>>,
pub m1_5sd_usd: Option<LazyBinaryPrice<Close<Dollars>, StoredF32>>,
pub m2sd_usd: Option<LazyBinaryPrice<Close<Dollars>, StoredF32>>,
pub m2_5sd_usd: Option<LazyBinaryPrice<Close<Dollars>, StoredF32>>,
pub m3sd_usd: Option<LazyBinaryPrice<Close<Dollars>, StoredF32>>,
pub _0sd_usd: Option<LazyBinaryPrice<Dollars, StoredF32>>,
pub p0_5sd_usd: Option<LazyBinaryPrice<Dollars, StoredF32>>,
pub p1sd_usd: Option<LazyBinaryPrice<Dollars, StoredF32>>,
pub p1_5sd_usd: Option<LazyBinaryPrice<Dollars, StoredF32>>,
pub p2sd_usd: Option<LazyBinaryPrice<Dollars, StoredF32>>,
pub p2_5sd_usd: Option<LazyBinaryPrice<Dollars, StoredF32>>,
pub p3sd_usd: Option<LazyBinaryPrice<Dollars, StoredF32>>,
pub m0_5sd_usd: Option<LazyBinaryPrice<Dollars, StoredF32>>,
pub m1sd_usd: Option<LazyBinaryPrice<Dollars, StoredF32>>,
pub m1_5sd_usd: Option<LazyBinaryPrice<Dollars, StoredF32>>,
pub m2sd_usd: Option<LazyBinaryPrice<Dollars, StoredF32>>,
pub m2_5sd_usd: Option<LazyBinaryPrice<Dollars, StoredF32>>,
pub m3sd_usd: Option<LazyBinaryPrice<Dollars, StoredF32>>,
}
#[derive(Debug, Default)]
@@ -103,9 +107,10 @@ impl ComputedFromDateStdDev {
parent_version: Version,
indexes: &indexes::Vecs,
options: StandardDeviationVecsOptions,
price_vecs: Option<&price::Vecs>,
metric_price: Option<&ComputedFromHeightLast<Dollars>>,
date_price: Option<&ComputedFromDateLast<Dollars>>,
) -> Result<Self> {
let version = parent_version + Version::ONE;
let version = parent_version + Version::TWO;
macro_rules! import {
($suffix:expr) => {
@@ -133,20 +138,33 @@ impl ComputedFromDateStdDev {
let m2_5sd = options.bands().then(|| import!("m2_5sd"));
let m3sd = options.bands().then(|| import!("m3sd"));
// Create USD bands using the metric price (the denominator of the ratio).
// This converts ratio bands back to USD: usd_band = metric_price * ratio_band
macro_rules! lazy_usd {
($band:expr, $suffix:expr) => {
price_vecs
.map(|p| &p.usd.split.close)
.zip($band.as_ref())
.filter(|_| options.price_bands())
.map(|(p, b)| {
LazyBinaryPrice::from_computed_both_last::<ClosePriceTimesRatio>(
if !options.price_bands() {
None
} else if let Some(mp) = metric_price {
$band.as_ref().map(|b| {
LazyBinaryPrice::from_height_and_dateindex_last::<PriceTimesRatio>(
&format!("{name}_{}", $suffix),
version,
p,
mp,
b,
)
})
} else if let Some(dp) = date_price {
$band.as_ref().map(|b| {
LazyBinaryPrice::from_computed_both_last::<PriceTimesRatio>(
&format!("{name}_{}", $suffix),
version,
dp,
b,
)
})
} else {
None
}
};
}
@@ -395,4 +413,91 @@ impl ComputedFromDateStdDev {
) -> impl Iterator<Item = &mut EagerVec<PcoVec<DateIndex, StoredF32>>> {
self.mut_stateful_computed().map(|c| &mut c.dateindex)
}
#[allow(clippy::too_many_arguments)]
pub fn forced_import_from_lazy<S1T: ComputedVecValue + JsonSchema>(
db: &Database,
name: &str,
days: usize,
parent_version: Version,
indexes: &indexes::Vecs,
options: StandardDeviationVecsOptions,
metric_price: Option<&LazyFromHeightLast<Dollars, S1T>>,
) -> Result<Self> {
let version = parent_version + Version::TWO;
macro_rules! import {
($suffix:expr) => {
ComputedFromDateLast::forced_import(
db,
&format!("{name}_{}", $suffix),
version,
indexes,
)
.unwrap()
};
}
let sma_vec = Some(import!("sma"));
let p0_5sd = options.bands().then(|| import!("p0_5sd"));
let p1sd = options.bands().then(|| import!("p1sd"));
let p1_5sd = options.bands().then(|| import!("p1_5sd"));
let p2sd = options.bands().then(|| import!("p2sd"));
let p2_5sd = options.bands().then(|| import!("p2_5sd"));
let p3sd = options.bands().then(|| import!("p3sd"));
let m0_5sd = options.bands().then(|| import!("m0_5sd"));
let m1sd = options.bands().then(|| import!("m1sd"));
let m1_5sd = options.bands().then(|| import!("m1_5sd"));
let m2sd = options.bands().then(|| import!("m2sd"));
let m2_5sd = options.bands().then(|| import!("m2_5sd"));
let m3sd = options.bands().then(|| import!("m3sd"));
macro_rules! lazy_usd {
($band:expr, $suffix:expr) => {
metric_price
.zip($band.as_ref())
.filter(|_| options.price_bands())
.map(|(mp, b)| {
LazyBinaryPrice::from_lazy_height_and_dateindex_last::<PriceTimesRatio, S1T>(
&format!("{name}_{}", $suffix),
version,
mp,
b,
)
})
};
}
Ok(Self {
days,
sd: import!("sd"),
zscore: options.zscore().then(|| import!("zscore")),
_0sd_usd: lazy_usd!(&sma_vec, "0sd_usd"),
p0_5sd_usd: lazy_usd!(&p0_5sd, "p0_5sd_usd"),
p1sd_usd: lazy_usd!(&p1sd, "p1sd_usd"),
p1_5sd_usd: lazy_usd!(&p1_5sd, "p1_5sd_usd"),
p2sd_usd: lazy_usd!(&p2sd, "p2sd_usd"),
p2_5sd_usd: lazy_usd!(&p2_5sd, "p2_5sd_usd"),
p3sd_usd: lazy_usd!(&p3sd, "p3sd_usd"),
m0_5sd_usd: lazy_usd!(&m0_5sd, "m0_5sd_usd"),
m1sd_usd: lazy_usd!(&m1sd, "m1sd_usd"),
m1_5sd_usd: lazy_usd!(&m1_5sd, "m1_5sd_usd"),
m2sd_usd: lazy_usd!(&m2sd, "m2sd_usd"),
m2_5sd_usd: lazy_usd!(&m2_5sd, "m2_5sd_usd"),
m3sd_usd: lazy_usd!(&m3sd, "m3sd_usd"),
sma: sma_vec,
p0_5sd,
p1sd,
p1_5sd,
p2sd,
p2_5sd,
p3sd,
m0_5sd,
m1sd,
m1_5sd,
m2sd,
m2_5sd,
m3sd,
})
}
}

View File

@@ -9,7 +9,7 @@ use vecdb::{BinaryTransform, IterableBoxedVec, IterableCloneableVec, LazyVecFrom
use crate::internal::{
ComputedFromHeightLast, ComputedFromHeightSumCum, ComputedFromHeightAndDateLast, ComputedVecValue,
LazyBinaryComputedFromHeightLast, LazyBinaryFromDateLast, LazyBinaryHeightDerivedLast,
LazyBinaryTransformLast, LazyDateDerivedLast, NumericValue,
LazyBinaryTransformLast, LazyDateDerivedLast, LazyFromHeightLast, NumericValue,
};
#[derive(Clone, Deref, DerefMut, Traversable)]
@@ -369,4 +369,31 @@ where
},
}
}
/// Create from a ComputedFromHeightAndDateLast and a LazyFromHeightLast.
pub fn from_computed_height_date_and_lazy_block_last<F, S2SourceT>(
name: &str,
version: Version,
source1: &ComputedFromHeightAndDateLast<S1T>,
source2: &LazyFromHeightLast<S2T, S2SourceT>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S1T: PartialOrd,
S2SourceT: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(
name,
v,
source1.height.boxed_clone(),
source2.height.boxed_clone(),
),
rest: LazyBinaryHeightDerivedLast::from_computed_height_date_and_lazy_block_last::<F, _>(
name, v, source1, source2,
),
}
}
}

View File

@@ -68,4 +68,36 @@ where
rest: LazyBinaryHeightDerivedSum::from_derived::<F>(name, v, &source1.rest, &source2.rest),
}
}
/// Create from two LazyBinaryFromHeightSum sources.
pub fn from_binary<F, S1aT, S1bT, S2aT, S2bT>(
name: &str,
version: Version,
source1: &LazyBinaryFromHeightSum<S1T, S1aT, S1bT>,
source2: &LazyBinaryFromHeightSum<S2T, S2aT, S2bT>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S1aT: ComputedVecValue + JsonSchema,
S1bT: ComputedVecValue + JsonSchema,
S2aT: ComputedVecValue + JsonSchema,
S2bT: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(
name,
v,
source1.height.boxed_clone(),
source2.height.boxed_clone(),
),
rest: LazyBinaryHeightDerivedSum::from_binary::<F, _, _, _, _>(
name,
v,
&source1.rest,
&source2.rest,
),
}
}
}

View File

@@ -8,7 +8,7 @@ use vecdb::{BinaryTransform, IterableBoxedVec, IterableCloneableVec, LazyVecFrom
use crate::internal::{
ComputedFromHeightLast, ComputedFromHeightSumCum, ComputedHeightDerivedLast, ComputedHeightDerivedSumCum,
ComputedVecValue, LazyBinaryHeightDerivedSumCum, NumericValue,
ComputedVecValue, LazyBinaryHeightDerivedSumCum, LazyFromHeightLast, NumericValue,
};
#[derive(Clone, Deref, DerefMut, Traversable)]
@@ -207,4 +207,33 @@ where
rest: LazyBinaryHeightDerivedSumCum::from_computed_derived_last::<F>(name, v, source1, source2),
}
}
// --- Methods accepting SumCum + LazyLast sources ---
pub fn from_computed_lazy_last<F, S2ST>(
name: &str,
version: Version,
height_source1: IterableBoxedVec<Height, S1T>,
height_source2: IterableBoxedVec<Height, S2T>,
source1: &ComputedFromHeightSumCum<S1T>,
source2: &LazyFromHeightLast<S2T, S2ST>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S1T: PartialOrd,
S2T: NumericValue,
S2ST: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(name, v, height_source1, height_source2),
height_cumulative: LazyVecFrom2::transformed::<F>(
&format!("{name}_cumulative"),
v,
source1.height_cumulative.boxed_clone(),
source2.height.boxed_clone(),
),
rest: LazyBinaryHeightDerivedSumCum::from_computed_lazy_last::<F, S2ST>(name, v, source1, source2),
}
}
}

View File

@@ -0,0 +1,47 @@
//! Lazy price wrapper for height-based metrics with both USD and sats representations.
//! Derives both from a cents base metric.
use brk_traversable::Traversable;
use brk_types::{CentsUnsigned, Dollars, SatsFract, Version};
use derive_more::{Deref, DerefMut};
use vecdb::IterableCloneableVec;
use super::{ComputedFromHeightLast, LazyFromHeightLast};
use crate::internal::{CentsUnsignedToDollars, CentsUnsignedToSatsFract};
/// Lazy price metric (height-based) with both USD and sats representations.
/// Both are lazily derived from a cents base metric.
///
/// Derefs to the dollars metric, so existing code works unchanged.
/// Access `.sats` for the sats exchange rate version.
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyPriceFromCents {
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub dollars: LazyFromHeightLast<Dollars, CentsUnsigned>,
pub sats: LazyFromHeightLast<SatsFract, CentsUnsigned>,
}
impl LazyPriceFromCents {
pub fn from_computed(
name: &str,
version: Version,
cents: &ComputedFromHeightLast<CentsUnsigned>,
) -> Self {
let dollars = LazyFromHeightLast::from_computed::<CentsUnsignedToDollars>(
name,
version,
cents.height.boxed_clone(),
cents,
);
let sats = LazyFromHeightLast::from_computed::<CentsUnsignedToSatsFract>(
&format!("{name}_sats"),
version,
cents.height.boxed_clone(),
cents,
);
Self { dollars, sats }
}
}

View File

@@ -16,6 +16,7 @@ mod lazy_binary_computed_sum_cum;
mod lazy_computed_full;
mod lazy_computed_sum_cum;
mod lazy_last;
mod lazy_price_from_cents;
mod lazy_sum;
mod price;
mod unary_last;
@@ -51,6 +52,7 @@ pub use lazy_binary_computed_sum_cum::*;
pub use lazy_computed_full::*;
pub use lazy_computed_sum_cum::*;
pub use lazy_last::*;
pub use lazy_price_from_cents::*;
pub use lazy_sum::*;
pub use price::*;
pub use unary_last::*;

View File

@@ -8,7 +8,7 @@ use vecdb::{BinaryTransform, IterableCloneableVec};
use crate::internal::{
ComputedFromHeightLast, ComputedFromHeightSumCum, ComputedFromHeightAndDateLast, ComputedVecValue,
LazyBinaryFromDateLast, LazyBinaryTransformLast, NumericValue,
LazyBinaryFromDateLast, LazyBinaryTransformLast, LazyFromHeightLast, NumericValue,
};
#[derive(Clone, Deref, DerefMut, Traversable)]
@@ -141,4 +141,34 @@ where
),
}
}
/// Create from a ComputedFromHeightAndDateLast and a LazyFromHeightLast.
pub fn from_computed_height_date_and_lazy_block_last<F, S2SourceT>(
name: &str,
version: Version,
source1: &ComputedFromHeightAndDateLast<S1T>,
source2: &LazyFromHeightLast<S2T, S2SourceT>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S1T: PartialOrd,
S2SourceT: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
Self {
dates: LazyBinaryFromDateLast::from_computed_and_lazy_last::<F, _>(
name,
v,
&source1.rest,
&source2.rest.dates,
),
difficultyepoch: LazyBinaryTransformLast::from_vecs::<F>(
name,
v,
source1.difficultyepoch.boxed_clone(),
source2.rest.difficultyepoch.boxed_clone(),
),
}
}
}

View File

@@ -48,4 +48,36 @@ where
),
}
}
/// Create from two LazyBinaryHeightDerivedSum sources.
pub fn from_binary<F, S1aT, S1bT, S2aT, S2bT>(
name: &str,
version: Version,
source1: &LazyBinaryHeightDerivedSum<S1T, S1aT, S1bT>,
source2: &LazyBinaryHeightDerivedSum<S2T, S2aT, S2bT>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S1aT: ComputedVecValue + JsonSchema,
S1bT: ComputedVecValue + JsonSchema,
S2aT: ComputedVecValue + JsonSchema,
S2bT: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
Self {
dates: LazyBinaryFromDateSum::from_binary::<F, _, _, _, _>(
name,
v,
&source1.dates,
&source2.dates,
),
difficultyepoch: LazyBinaryTransformSum::from_boxed::<F>(
name,
v,
source1.difficultyepoch.boxed_clone(),
source2.difficultyepoch.boxed_clone(),
),
}
}
}

View File

@@ -9,7 +9,7 @@ use vecdb::{BinaryTransform, IterableCloneableVec};
use crate::internal::{
ComputedFromHeightLast, ComputedFromHeightSumCum, ComputedHeightDerivedLast, ComputedHeightDerivedSumCum,
ComputedVecValue, LazyBinaryFromDateSumCum, LazyBinaryTransformSumCum, LazyFull, LazyDateDerivedFull,
LazyDateDerivedSumCum, LazySumCum, NumericValue, SumCum,
LazyDateDerivedSumCum, LazyFromHeightLast, LazySumCum, NumericValue, SumCum,
};
const VERSION: Version = Version::ZERO;
@@ -221,4 +221,32 @@ where
),
}
}
// --- Methods accepting SumCum + LazyLast sources ---
pub fn from_computed_lazy_last<F, S2ST>(
name: &str,
version: Version,
source1: &ComputedFromHeightSumCum<S1T>,
source2: &LazyFromHeightLast<S2T, S2ST>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S1T: PartialOrd,
S2T: NumericValue,
S2ST: ComputedVecValue + schemars::JsonSchema,
{
let v = version + VERSION;
Self {
dates: LazyBinaryFromDateSumCum::from_computed_lazy_last::<F, S2ST>(name, v, source1, source2),
difficultyepoch: LazyBinaryTransformSumCum::from_sources_last_sum_raw::<F>(
name,
v,
source1.difficultyepoch.sum.boxed_clone(),
source1.difficultyepoch.cumulative.boxed_clone(),
source2.rest.difficultyepoch.boxed_clone(),
),
}
}
}

View File

@@ -0,0 +1,12 @@
use brk_types::{CentsUnsigned, Dollars};
use vecdb::UnaryTransform;
/// CentsUnsigned -> Dollars (convert cents to dollars for display)
pub struct CentsUnsignedToDollars;
impl UnaryTransform<CentsUnsigned, Dollars> for CentsUnsignedToDollars {
#[inline(always)]
fn apply(cents: CentsUnsigned) -> Dollars {
cents.into()
}
}

View File

@@ -0,0 +1,19 @@
use brk_types::{CentsUnsigned, SatsFract};
use vecdb::UnaryTransform;
/// CentsUnsigned -> SatsFract (exchange rate: sats per dollar at this price level)
/// Formula: sats = 100_000_000 / dollars = 100_000_000 / (cents / 100) = 10_000_000_000 / cents
pub struct CentsUnsignedToSatsFract;
impl UnaryTransform<CentsUnsigned, SatsFract> for CentsUnsignedToSatsFract {
#[inline(always)]
fn apply(cents: CentsUnsigned) -> SatsFract {
let cents_f64 = cents.inner() as f64;
if cents_f64 == 0.0 {
SatsFract::NAN
} else {
// sats = 1 BTC * 100 / cents = 10_000_000_000 / cents
SatsFract::new(SatsFract::SATS_PER_BTC * 100.0 / cents_f64)
}
}
}

View File

@@ -1,13 +0,0 @@
use brk_types::{Close, Dollars, StoredF32};
use vecdb::BinaryTransform;
/// Close<Dollars> * StoredF32 -> Dollars (price × ratio)
/// Same as PriceTimesRatio but accepts Close<Dollars> price source.
pub struct ClosePriceTimesRatio;
impl BinaryTransform<Close<Dollars>, StoredF32, Dollars> for ClosePriceTimesRatio {
#[inline(always)]
fn apply(price: Close<Dollars>, ratio: StoredF32) -> Dollars {
*price * ratio
}
}

View File

@@ -1,4 +1,5 @@
mod close_price_times_ratio;
mod cents_unsigned_to_dollars;
mod cents_unsigned_to_sats_fract;
mod close_price_times_sats;
mod difference_f32;
mod dollar_halve;
@@ -40,7 +41,8 @@ mod volatility_sqrt365;
mod volatility_sqrt7;
mod weight_to_fullness;
pub use close_price_times_ratio::*;
pub use cents_unsigned_to_dollars::*;
pub use cents_unsigned_to_sats_fract::*;
pub use close_price_times_sats::*;
pub use difference_f32::*;
pub use dollar_halve::*;

View File

@@ -33,7 +33,7 @@ impl Vecs {
let returns = ReturnsVecs::forced_import(&db, version, indexes, price, &lookback)?;
let volatility = VolatilityVecs::forced_import(version, &returns);
let range = RangeVecs::forced_import(&db, version, indexes)?;
let moving_average = MovingAverageVecs::forced_import(&db, version, indexes, Some(price))?;
let moving_average = MovingAverageVecs::forced_import(&db, version, indexes)?;
let dca = DcaVecs::forced_import(&db, version, indexes, price, &lookback)?;
let indicators = IndicatorsVecs::forced_import(
&db,

View File

@@ -6,7 +6,6 @@ use super::Vecs;
use crate::{
indexes,
internal::{ComputedFromDateRatio, DollarsTimesTenths, LazyPrice},
price,
};
impl Vecs {
@@ -14,7 +13,6 @@ impl Vecs {
db: &Database,
version: Version,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
) -> Result<Self> {
let price_1w_sma = ComputedFromDateRatio::forced_import(
db,
@@ -23,7 +21,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_8d_sma = ComputedFromDateRatio::forced_import(
db,
@@ -32,7 +29,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_13d_sma = ComputedFromDateRatio::forced_import(
db,
@@ -41,7 +37,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_21d_sma = ComputedFromDateRatio::forced_import(
db,
@@ -50,7 +45,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_1m_sma = ComputedFromDateRatio::forced_import(
db,
@@ -59,7 +53,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_34d_sma = ComputedFromDateRatio::forced_import(
db,
@@ -68,7 +61,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_55d_sma = ComputedFromDateRatio::forced_import(
db,
@@ -77,7 +69,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_89d_sma = ComputedFromDateRatio::forced_import(
db,
@@ -86,7 +77,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_111d_sma = ComputedFromDateRatio::forced_import(
db,
@@ -95,7 +85,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_144d_sma = ComputedFromDateRatio::forced_import(
db,
@@ -104,7 +93,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_200d_sma = ComputedFromDateRatio::forced_import(
db,
@@ -113,7 +101,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_350d_sma = ComputedFromDateRatio::forced_import(
db,
@@ -122,7 +109,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_1y_sma = ComputedFromDateRatio::forced_import(
db,
@@ -131,7 +117,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_2y_sma = ComputedFromDateRatio::forced_import(
db,
@@ -140,7 +125,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_200w_sma = ComputedFromDateRatio::forced_import(
db,
@@ -149,7 +133,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_4y_sma = ComputedFromDateRatio::forced_import(
db,
@@ -158,7 +141,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_1w_ema = ComputedFromDateRatio::forced_import(
@@ -168,7 +150,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_8d_ema = ComputedFromDateRatio::forced_import(
db,
@@ -177,7 +158,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_12d_ema = ComputedFromDateRatio::forced_import(
db,
@@ -186,7 +166,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_13d_ema = ComputedFromDateRatio::forced_import(
db,
@@ -195,7 +174,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_21d_ema = ComputedFromDateRatio::forced_import(
db,
@@ -204,7 +182,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_26d_ema = ComputedFromDateRatio::forced_import(
db,
@@ -213,7 +190,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_1m_ema = ComputedFromDateRatio::forced_import(
db,
@@ -222,7 +198,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_34d_ema = ComputedFromDateRatio::forced_import(
db,
@@ -231,7 +206,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_55d_ema = ComputedFromDateRatio::forced_import(
db,
@@ -240,7 +214,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_89d_ema = ComputedFromDateRatio::forced_import(
db,
@@ -249,7 +222,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_144d_ema = ComputedFromDateRatio::forced_import(
db,
@@ -258,7 +230,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_200d_ema = ComputedFromDateRatio::forced_import(
db,
@@ -267,7 +238,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_1y_ema = ComputedFromDateRatio::forced_import(
db,
@@ -276,7 +246,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_2y_ema = ComputedFromDateRatio::forced_import(
db,
@@ -285,7 +254,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_200w_ema = ComputedFromDateRatio::forced_import(
db,
@@ -294,7 +262,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_4y_ema = ComputedFromDateRatio::forced_import(
db,
@@ -303,7 +270,6 @@ impl Vecs {
version,
indexes,
true,
price,
)?;
let price_200d_sma_source = price_200d_sma.price.as_ref().unwrap();

View File

@@ -49,6 +49,7 @@ impl Vecs {
indexes,
StandardDeviationVecsOptions::default(),
None,
None,
)?;
let _1d_returns_1m_sd = ComputedFromDateStdDev::forced_import(
db,
@@ -58,6 +59,7 @@ impl Vecs {
indexes,
StandardDeviationVecsOptions::default(),
None,
None,
)?;
let _1d_returns_1y_sd = ComputedFromDateStdDev::forced_import(
db,
@@ -67,6 +69,7 @@ impl Vecs {
indexes,
StandardDeviationVecsOptions::default(),
None,
None,
)?;
let downside_returns = EagerVec::forced_import(db, "downside_returns", version)?;
@@ -78,6 +81,7 @@ impl Vecs {
indexes,
StandardDeviationVecsOptions::default(),
None,
None,
)?;
let downside_1m_sd = ComputedFromDateStdDev::forced_import(
db,
@@ -87,6 +91,7 @@ impl Vecs {
indexes,
StandardDeviationVecsOptions::default(),
None,
None,
)?;
let downside_1y_sd = ComputedFromDateStdDev::forced_import(
db,
@@ -96,6 +101,7 @@ impl Vecs {
indexes,
StandardDeviationVecsOptions::default(),
None,
None,
)?;
Ok(Self {

View File

@@ -1,5 +1,5 @@
use brk_error::Result;
use brk_types::{DateIndex, Height, OHLCCents, Version};
use brk_types::{DateIndex, Height, OHLCCentsUnsigned, Version};
use vecdb::{Database, IterableCloneableVec, LazyVecFrom1};
use super::Vecs;
@@ -7,7 +7,7 @@ use crate::internal::{ComputedHeightAndDateBytes, LazyHeightAndDateOHLC, LazyOHL
impl Vecs {
pub fn forced_import(db: &Database, version: Version) -> Result<Self> {
let ohlc: ComputedHeightAndDateBytes<OHLCCents> =
let ohlc: ComputedHeightAndDateBytes<OHLCCentsUnsigned> =
ComputedHeightAndDateBytes::forced_import(db, "ohlc_cents", version)?;
let components = LazyHeightAndDateOHLC {
@@ -16,25 +16,25 @@ impl Vecs {
"price_open_cents",
version,
ohlc.height.boxed_clone(),
|h: Height, iter| iter.get(h).map(|o: OHLCCents| o.open),
|h: Height, iter| iter.get(h).map(|o: OHLCCentsUnsigned| o.open),
),
high: LazyVecFrom1::init(
"price_high_cents",
version,
ohlc.height.boxed_clone(),
|h: Height, iter| iter.get(h).map(|o: OHLCCents| o.high),
|h: Height, iter| iter.get(h).map(|o: OHLCCentsUnsigned| o.high),
),
low: LazyVecFrom1::init(
"price_low_cents",
version,
ohlc.height.boxed_clone(),
|h: Height, iter| iter.get(h).map(|o: OHLCCents| o.low),
|h: Height, iter| iter.get(h).map(|o: OHLCCentsUnsigned| o.low),
),
close: LazyVecFrom1::init(
"price_close_cents",
version,
ohlc.height.boxed_clone(),
|h: Height, iter| iter.get(h).map(|o: OHLCCents| o.close),
|h: Height, iter| iter.get(h).map(|o: OHLCCentsUnsigned| o.close),
),
},
dateindex: LazyOHLC {
@@ -42,25 +42,25 @@ impl Vecs {
"price_open_cents",
version,
ohlc.dateindex.boxed_clone(),
|di: DateIndex, iter| iter.get(di).map(|o: OHLCCents| o.open),
|di: DateIndex, iter| iter.get(di).map(|o: OHLCCentsUnsigned| o.open),
),
high: LazyVecFrom1::init(
"price_high_cents",
version,
ohlc.dateindex.boxed_clone(),
|di: DateIndex, iter| iter.get(di).map(|o: OHLCCents| o.high),
|di: DateIndex, iter| iter.get(di).map(|o: OHLCCentsUnsigned| o.high),
),
low: LazyVecFrom1::init(
"price_low_cents",
version,
ohlc.dateindex.boxed_clone(),
|di: DateIndex, iter| iter.get(di).map(|o: OHLCCents| o.low),
|di: DateIndex, iter| iter.get(di).map(|o: OHLCCentsUnsigned| o.low),
),
close: LazyVecFrom1::init(
"price_close_cents",
version,
ohlc.dateindex.boxed_clone(),
|di: DateIndex, iter| iter.get(di).map(|o: OHLCCents| o.close),
|di: DateIndex, iter| iter.get(di).map(|o: OHLCCentsUnsigned| o.close),
),
},
};

View File

@@ -1,10 +1,10 @@
use brk_traversable::Traversable;
use brk_types::{Cents, OHLCCents};
use brk_types::{CentsUnsigned, OHLCCentsUnsigned};
use crate::internal::{ComputedHeightAndDateBytes, LazyHeightAndDateOHLC};
#[derive(Clone, Traversable)]
pub struct Vecs {
pub split: LazyHeightAndDateOHLC<Cents, OHLCCents>,
pub ohlc: ComputedHeightAndDateBytes<OHLCCents>,
pub split: LazyHeightAndDateOHLC<CentsUnsigned, OHLCCentsUnsigned>,
pub ohlc: ComputedHeightAndDateBytes<OHLCCentsUnsigned>,
}

View File

@@ -1,6 +1,6 @@
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::{DateIndex, Height, OHLCCents};
use brk_types::{DateIndex, Height, OHLCCentsUnsigned};
use vecdb::{
AnyStoredVec, AnyVec, Exit, GenericStoredVec, IterableVec, TypedVecIterator, VecIndex,
};
@@ -61,7 +61,7 @@ impl Vecs {
let index = starting_indexes
.dateindex
.min(DateIndex::from(self.cents.ohlc.dateindex.len()));
let mut prev = Some(index.decremented().map_or(OHLCCents::default(), |prev_i| {
let mut prev = Some(index.decremented().map_or(OHLCCentsUnsigned::default(), |prev_i| {
self.cents.ohlc.dateindex.iter().unwrap().get_unwrap(prev_i)
}));
indexes

View File

@@ -6,7 +6,7 @@ use std::{
};
use brk_error::{Error, Result};
use brk_types::{Date, Height, OHLCCents, Timestamp};
use brk_types::{Date, Height, OHLCCentsUnsigned, Timestamp};
use serde_json::Value;
use tracing::info;
@@ -18,9 +18,9 @@ use crate::{
#[derive(Clone)]
pub struct Binance {
path: Option<PathBuf>,
_1mn: Option<BTreeMap<Timestamp, OHLCCents>>,
_1d: Option<BTreeMap<Date, OHLCCents>>,
har: Option<BTreeMap<Timestamp, OHLCCents>>,
_1mn: Option<BTreeMap<Timestamp, OHLCCentsUnsigned>>,
_1d: Option<BTreeMap<Date, OHLCCentsUnsigned>>,
har: Option<BTreeMap<Timestamp, OHLCCentsUnsigned>>,
}
impl Binance {
@@ -37,7 +37,7 @@ impl Binance {
&mut self,
timestamp: Timestamp,
previous_timestamp: Option<Timestamp>,
) -> Result<OHLCCents> {
) -> Result<OHLCCentsUnsigned> {
// Try live API data first
if self._1mn.is_none()
|| self._1mn.as_ref().unwrap().last_key_value().unwrap().0 <= &timestamp
@@ -69,7 +69,7 @@ impl Binance {
)
}
pub fn fetch_1mn() -> Result<BTreeMap<Timestamp, OHLCCents>> {
pub fn fetch_1mn() -> Result<BTreeMap<Timestamp, OHLCCentsUnsigned>> {
default_retry(|_| {
let url = Self::url("interval=1m&limit=1000");
info!("Fetching {url} ...");
@@ -79,7 +79,7 @@ impl Binance {
})
}
pub fn get_from_1d(&mut self, date: &Date) -> Result<OHLCCents> {
pub fn get_from_1d(&mut self, date: &Date) -> Result<OHLCCentsUnsigned> {
if self._1d.is_none() || self._1d.as_ref().unwrap().last_key_value().unwrap().0 <= date {
self._1d.replace(Self::fetch_1d()?);
}
@@ -92,7 +92,7 @@ impl Binance {
.ok_or(Error::NotFound("Couldn't find date".into()))
}
pub fn fetch_1d() -> Result<BTreeMap<Date, OHLCCents>> {
pub fn fetch_1d() -> Result<BTreeMap<Date, OHLCCentsUnsigned>> {
default_retry(|_| {
let url = Self::url("interval=1d");
info!("Fetching {url} ...");
@@ -102,7 +102,7 @@ impl Binance {
})
}
fn read_har(&self) -> Result<BTreeMap<Timestamp, OHLCCents>> {
fn read_har(&self) -> Result<BTreeMap<Timestamp, OHLCCentsUnsigned>> {
if self.path.is_none() {
return Err(Error::NotFound("HAR path not configured".into()));
}
@@ -179,7 +179,7 @@ impl Binance {
})
}
fn parse_ohlc_array(json: &Value) -> Result<BTreeMap<Timestamp, OHLCCents>> {
fn parse_ohlc_array(json: &Value) -> Result<BTreeMap<Timestamp, OHLCCentsUnsigned>> {
let result = json
.as_array()
.ok_or(Error::Parse("Expected JSON array".into()))?
@@ -193,7 +193,7 @@ impl Binance {
Ok(result)
}
fn parse_date_ohlc_array(json: &Value) -> Result<BTreeMap<Date, OHLCCents>> {
fn parse_date_ohlc_array(json: &Value) -> Result<BTreeMap<Date, OHLCCentsUnsigned>> {
Self::parse_ohlc_array(json).map(|map| {
map.into_iter()
.map(|(ts, ohlc)| (date_from_timestamp(ts), ohlc))
@@ -218,7 +218,7 @@ impl PriceSource for Binance {
"Binance"
}
fn get_date(&mut self, date: Date) -> Option<Result<OHLCCents>> {
fn get_date(&mut self, date: Date) -> Option<Result<OHLCCentsUnsigned>> {
Some(self.get_from_1d(&date))
}
@@ -226,11 +226,11 @@ impl PriceSource for Binance {
&mut self,
timestamp: Timestamp,
previous_timestamp: Option<Timestamp>,
) -> Option<Result<OHLCCents>> {
) -> Option<Result<OHLCCentsUnsigned>> {
Some(self.get_from_1mn(timestamp, previous_timestamp))
}
fn get_height(&mut self, _height: Height) -> Option<Result<OHLCCents>> {
fn get_height(&mut self, _height: Height) -> Option<Result<OHLCCentsUnsigned>> {
None // Binance doesn't support height-based queries
}

View File

@@ -2,8 +2,8 @@ use std::collections::BTreeMap;
use brk_error::{Error, Result};
use brk_types::{
Cents, CheckedSub, Close, Date, DateIndex, Dollars, Height, High, Low, OHLCCents, Open,
Timestamp,
CentsUnsigned, CheckedSub, Close, Date, DateIndex, Dollars, Height, High, Low,
OHLCCentsUnsigned, Open, Timestamp,
};
use serde_json::Value;
use tracing::info;
@@ -13,15 +13,15 @@ use crate::{PriceSource, check_response, default_retry};
#[derive(Default, Clone)]
#[allow(clippy::upper_case_acronyms)]
pub struct BRK {
height_to_ohlc: BTreeMap<Height, Vec<OHLCCents>>,
dateindex_to_ohlc: BTreeMap<DateIndex, Vec<OHLCCents>>,
height_to_ohlc: BTreeMap<Height, Vec<OHLCCentsUnsigned>>,
dateindex_to_ohlc: BTreeMap<DateIndex, Vec<OHLCCentsUnsigned>>,
}
const API_URL: &str = "https://bitview.space/api/vecs";
const CHUNK_SIZE: usize = 10_000;
impl BRK {
pub fn get_from_height(&mut self, height: Height) -> Result<OHLCCents> {
pub fn get_from_height(&mut self, height: Height) -> Result<OHLCCentsUnsigned> {
let key = height.checked_sub(height % CHUNK_SIZE).unwrap();
#[allow(clippy::map_entry)]
@@ -40,7 +40,7 @@ impl BRK {
.ok_or(Error::NotFound("Couldn't find height in BRK".into()))
}
fn fetch_height_prices(height: Height) -> Result<Vec<OHLCCents>> {
fn fetch_height_prices(height: Height) -> Result<Vec<OHLCCentsUnsigned>> {
default_retry(|_| {
let url = format!(
"{API_URL}/height-to-price-ohlc?from={}&to={}",
@@ -60,7 +60,7 @@ impl BRK {
})
}
pub fn get_from_date(&mut self, date: Date) -> Result<OHLCCents> {
pub fn get_from_date(&mut self, date: Date) -> Result<OHLCCentsUnsigned> {
let dateindex = DateIndex::try_from(date)?;
let key = dateindex.checked_sub(dateindex % CHUNK_SIZE).unwrap();
@@ -81,7 +81,7 @@ impl BRK {
.ok_or(Error::NotFound("Couldn't find date in BRK".into()))
}
fn fetch_date_prices(dateindex: DateIndex) -> Result<Vec<OHLCCents>> {
fn fetch_date_prices(dateindex: DateIndex) -> Result<Vec<OHLCCentsUnsigned>> {
default_retry(|_| {
let url = format!(
"{API_URL}/dateindex-to-price-ohlc?from={}&to={}",
@@ -101,13 +101,13 @@ impl BRK {
})
}
fn value_to_ohlc(value: &Value) -> Result<OHLCCents> {
fn value_to_ohlc(value: &Value) -> Result<OHLCCentsUnsigned> {
let ohlc = value
.as_array()
.ok_or(Error::Parse("Expected OHLC array".into()))?;
let get_value = |index: usize| -> Result<_> {
Ok(Cents::from(Dollars::from(
Ok(CentsUnsigned::from(Dollars::from(
ohlc.get(index)
.ok_or(Error::Parse("Missing OHLC value at index".into()))?
.as_f64()
@@ -115,7 +115,7 @@ impl BRK {
)))
};
Ok(OHLCCents::from((
Ok(OHLCCentsUnsigned::from((
Open::new(get_value(0)?),
High::new(get_value(1)?),
Low::new(get_value(2)?),
@@ -134,7 +134,7 @@ impl PriceSource for BRK {
"BRK"
}
fn get_date(&mut self, date: Date) -> Option<Result<OHLCCents>> {
fn get_date(&mut self, date: Date) -> Option<Result<OHLCCentsUnsigned>> {
Some(self.get_from_date(date))
}
@@ -142,11 +142,11 @@ impl PriceSource for BRK {
&mut self,
_timestamp: Timestamp,
_previous_timestamp: Option<Timestamp>,
) -> Option<Result<OHLCCents>> {
) -> Option<Result<OHLCCentsUnsigned>> {
None // BRK doesn't support timestamp-based queries
}
fn get_height(&mut self, height: Height) -> Option<Result<OHLCCents>> {
fn get_height(&mut self, height: Height) -> Option<Result<OHLCCentsUnsigned>> {
Some(self.get_from_height(height))
}

View File

@@ -1,7 +1,7 @@
use std::collections::BTreeMap;
use brk_error::{Error, Result};
use brk_types::{Date, Height, OHLCCents, Timestamp};
use brk_types::{Date, Height, OHLCCentsUnsigned, Timestamp};
use serde_json::Value;
use tracing::info;
@@ -12,8 +12,8 @@ use crate::{
#[derive(Default, Clone)]
pub struct Kraken {
_1mn: Option<BTreeMap<Timestamp, OHLCCents>>,
_1d: Option<BTreeMap<Date, OHLCCents>>,
_1mn: Option<BTreeMap<Timestamp, OHLCCentsUnsigned>>,
_1d: Option<BTreeMap<Date, OHLCCentsUnsigned>>,
}
impl Kraken {
@@ -21,7 +21,7 @@ impl Kraken {
&mut self,
timestamp: Timestamp,
previous_timestamp: Option<Timestamp>,
) -> Result<OHLCCents> {
) -> Result<OHLCCentsUnsigned> {
if self._1mn.is_none()
|| self._1mn.as_ref().unwrap().last_key_value().unwrap().0 <= &timestamp
{
@@ -35,7 +35,7 @@ impl Kraken {
)
}
pub fn fetch_1mn() -> Result<BTreeMap<Timestamp, OHLCCents>> {
pub fn fetch_1mn() -> Result<BTreeMap<Timestamp, OHLCCentsUnsigned>> {
default_retry(|_| {
let url = Self::url(1);
info!("Fetching {url} ...");
@@ -45,7 +45,7 @@ impl Kraken {
})
}
fn get_from_1d(&mut self, date: &Date) -> Result<OHLCCents> {
fn get_from_1d(&mut self, date: &Date) -> Result<OHLCCentsUnsigned> {
if self._1d.is_none() || self._1d.as_ref().unwrap().last_key_value().unwrap().0 <= date {
self._1d.replace(Self::fetch_1d()?);
}
@@ -57,7 +57,7 @@ impl Kraken {
.ok_or(Error::NotFound("Couldn't find date".into()))
}
pub fn fetch_1d() -> Result<BTreeMap<Date, OHLCCents>> {
pub fn fetch_1d() -> Result<BTreeMap<Date, OHLCCentsUnsigned>> {
default_retry(|_| {
let url = Self::url(1440);
info!("Fetching {url} ...");
@@ -68,7 +68,7 @@ impl Kraken {
}
/// Parse Kraken's nested JSON response: { result: { XXBTZUSD: [...] } }
fn parse_ohlc_response(json: &Value) -> Result<BTreeMap<Timestamp, OHLCCents>> {
fn parse_ohlc_response(json: &Value) -> Result<BTreeMap<Timestamp, OHLCCentsUnsigned>> {
let result = json
.get("result")
.and_then(|r| r.get("XXBTZUSD"))
@@ -84,7 +84,7 @@ impl Kraken {
Ok(result)
}
fn parse_date_ohlc_response(json: &Value) -> Result<BTreeMap<Date, OHLCCents>> {
fn parse_date_ohlc_response(json: &Value) -> Result<BTreeMap<Date, OHLCCentsUnsigned>> {
Self::parse_ohlc_response(json).map(|map| {
map.into_iter()
.map(|(ts, ohlc)| (date_from_timestamp(ts), ohlc))
@@ -109,7 +109,7 @@ impl PriceSource for Kraken {
"Kraken"
}
fn get_date(&mut self, date: Date) -> Option<Result<OHLCCents>> {
fn get_date(&mut self, date: Date) -> Option<Result<OHLCCentsUnsigned>> {
Some(self.get_from_1d(&date))
}
@@ -117,11 +117,11 @@ impl PriceSource for Kraken {
&mut self,
timestamp: Timestamp,
previous_timestamp: Option<Timestamp>,
) -> Option<Result<OHLCCents>> {
) -> Option<Result<OHLCCentsUnsigned>> {
Some(self.get_from_1mn(timestamp, previous_timestamp))
}
fn get_height(&mut self, _height: Height) -> Option<Result<OHLCCents>> {
fn get_height(&mut self, _height: Height) -> Option<Result<OHLCCentsUnsigned>> {
None // Kraken doesn't support height-based queries
}

View File

@@ -3,7 +3,7 @@
use std::{path::Path, thread::sleep, time::Duration};
use brk_error::{Error, Result};
use brk_types::{Date, Height, OHLCCents, Timestamp};
use brk_types::{Date, Height, OHLCCentsUnsigned, Timestamp};
use tracing::info;
mod binance;
@@ -66,9 +66,9 @@ impl Fetcher {
}
/// Try fetching from each source in order, return first success
fn try_sources<F>(&mut self, mut fetch: F) -> Option<Result<OHLCCents>>
fn try_sources<F>(&mut self, mut fetch: F) -> Option<Result<OHLCCentsUnsigned>>
where
F: FnMut(&mut dyn PriceSource) -> Option<Result<OHLCCents>>,
F: FnMut(&mut dyn PriceSource) -> Option<Result<OHLCCentsUnsigned>>,
{
if let Some(Ok(ohlc)) = fetch(&mut self.binance) {
return Some(Ok(ohlc));
@@ -82,7 +82,7 @@ impl Fetcher {
None
}
pub fn get_date(&mut self, date: Date) -> Result<OHLCCents> {
pub fn get_date(&mut self, date: Date) -> Result<OHLCCentsUnsigned> {
self.fetch_with_retry(
|source| source.get_date(date),
|| format!("Failed to fetch price for date {date}"),
@@ -94,7 +94,7 @@ impl Fetcher {
height: Height,
timestamp: Timestamp,
previous_timestamp: Option<Timestamp>,
) -> Result<OHLCCents> {
) -> Result<OHLCCentsUnsigned> {
let timestamp = timestamp.floor_seconds();
let previous_timestamp = previous_timestamp.map(|t| t.floor_seconds());
@@ -133,9 +133,9 @@ How to fix this:
}
/// Try each source in order, with retries on total failure
fn fetch_with_retry<F, E>(&mut self, mut fetch: F, error_message: E) -> Result<OHLCCents>
fn fetch_with_retry<F, E>(&mut self, mut fetch: F, error_message: E) -> Result<OHLCCentsUnsigned>
where
F: FnMut(&mut dyn PriceSource) -> Option<Result<OHLCCents>>,
F: FnMut(&mut dyn PriceSource) -> Option<Result<OHLCCentsUnsigned>>,
E: Fn() -> String,
{
for retry in 0..=MAX_RETRIES {

View File

@@ -1,11 +1,11 @@
use std::collections::BTreeMap;
use brk_error::{Error, Result};
use brk_types::{Cents, Close, Date, Dollars, High, Low, OHLCCents, Open, Timestamp};
use brk_types::{CentsUnsigned, Close, Date, Dollars, High, Low, OHLCCentsUnsigned, Open, Timestamp};
/// Parse OHLC value from a JSON array element at given index
pub fn parse_cents(array: &[serde_json::Value], index: usize) -> Cents {
Cents::from(Dollars::from(
pub fn parse_cents(array: &[serde_json::Value], index: usize) -> CentsUnsigned {
CentsUnsigned::from(Dollars::from(
array
.get(index)
.and_then(|v| v.as_str())
@@ -14,9 +14,9 @@ pub fn parse_cents(array: &[serde_json::Value], index: usize) -> Cents {
))
}
/// Build OHLCCents from array indices 1-4 (open, high, low, close)
pub fn ohlc_from_array(array: &[serde_json::Value]) -> OHLCCents {
OHLCCents::from((
/// Build OHLCCentsUnsigned from array indices 1-4 (open, high, low, close)
pub fn ohlc_from_array(array: &[serde_json::Value]) -> OHLCCentsUnsigned {
OHLCCentsUnsigned::from((
Open::new(parse_cents(array, 1)),
High::new(parse_cents(array, 2)),
Low::new(parse_cents(array, 3)),
@@ -27,13 +27,13 @@ pub fn ohlc_from_array(array: &[serde_json::Value]) -> OHLCCents {
/// Compute OHLC for a block from a time series of minute data.
/// Aggregates all candles between previous_timestamp and timestamp.
pub fn compute_ohlc_from_range(
tree: &BTreeMap<Timestamp, OHLCCents>,
tree: &BTreeMap<Timestamp, OHLCCentsUnsigned>,
timestamp: Timestamp,
previous_timestamp: Option<Timestamp>,
source_name: &str,
) -> Result<OHLCCents> {
) -> Result<OHLCCentsUnsigned> {
let previous_ohlc = previous_timestamp
.map_or(Some(OHLCCents::default()), |t| tree.get(&t).cloned());
.map_or(Some(OHLCCentsUnsigned::default()), |t| tree.get(&t).cloned());
let last_ohlc = tree.get(&timestamp);
@@ -44,7 +44,7 @@ pub fn compute_ohlc_from_range(
}
let previous_ohlc = previous_ohlc.unwrap();
let mut result = OHLCCents::from(previous_ohlc.close);
let mut result = OHLCCentsUnsigned::from(previous_ohlc.close);
let start = previous_timestamp.unwrap_or(Timestamp::new(0));
let end = timestamp;

View File

@@ -1,7 +1,7 @@
use std::time::{Duration, Instant};
use brk_error::{Error, Result};
use brk_types::{Date, Height, OHLCCents, Timestamp};
use brk_types::{Date, Height, OHLCCentsUnsigned, Timestamp};
use tracing::info;
/// Default cooldown period for unhealthy sources (5 minutes)
@@ -12,17 +12,17 @@ pub trait PriceSource {
fn name(&self) -> &'static str;
/// Fetch daily OHLC for a date. Returns None if this source doesn't support date queries.
fn get_date(&mut self, date: Date) -> Option<Result<OHLCCents>>;
fn get_date(&mut self, date: Date) -> Option<Result<OHLCCentsUnsigned>>;
/// Fetch minute OHLC for a timestamp range. Returns None if unsupported.
fn get_1mn(
&mut self,
timestamp: Timestamp,
previous_timestamp: Option<Timestamp>,
) -> Option<Result<OHLCCents>>;
) -> Option<Result<OHLCCentsUnsigned>>;
/// Fetch OHLC by block height. Returns None if unsupported.
fn get_height(&mut self, height: Height) -> Option<Result<OHLCCents>>;
fn get_height(&mut self, height: Height) -> Option<Result<OHLCCentsUnsigned>>;
/// Check if the source is reachable
fn ping(&self) -> Result<()>;
@@ -115,7 +115,7 @@ impl<T: PriceSource> PriceSource for TrackedSource<T> {
self.source.name()
}
fn get_date(&mut self, date: Date) -> Option<Result<OHLCCents>> {
fn get_date(&mut self, date: Date) -> Option<Result<OHLCCentsUnsigned>> {
self.try_fetch(|s| s.get_date(date))
}
@@ -123,11 +123,11 @@ impl<T: PriceSource> PriceSource for TrackedSource<T> {
&mut self,
timestamp: Timestamp,
previous_timestamp: Option<Timestamp>,
) -> Option<Result<OHLCCents>> {
) -> Option<Result<OHLCCentsUnsigned>> {
self.try_fetch(|s| s.get_1mn(timestamp, previous_timestamp))
}
fn get_height(&mut self, height: Height) -> Option<Result<OHLCCents>> {
fn get_height(&mut self, height: Height) -> Option<Result<OHLCCentsUnsigned>> {
self.try_fetch(|s| s.get_height(height))
}

View File

@@ -21,19 +21,54 @@ impl ApiJson {
/// Removes redundant fields while preserving essential API information.
///
/// Transformations applied (in order):
/// 1. Remove error responses (304, 400, 404, 500)
/// 2. Compact responses to "returns": "Type"
/// 3. Remove per-endpoint tags and style
/// 4. Simplify parameter schema to type, remove param descriptions
/// 5. Remove summary
/// 6. Remove examples, replace $ref with type
/// 7. Flatten single-item allOf
/// 8. Flatten anyOf to type array
/// 9. Remove format
/// 10. Remove property descriptions
/// 11. Simplify properties to direct types
/// 1. Remove deprecated endpoints
/// 2. Remove contact/license from info
/// 3. Remove *Param schemas
/// 3. Remove error responses (304, 400, 404, 500)
/// 4. Compact responses to "returns": "Type"
/// 5. Remove per-endpoint tags and style
/// 6. Simplify parameter schema to type, remove param descriptions
/// 7. Remove summary and operationId
/// 8. Remove examples, replace $ref with type
/// 9. Flatten single-item allOf
/// 10. Flatten anyOf to type array
/// 11. Remove format
/// 12. Remove property descriptions
/// 13. Simplify properties to direct types
/// 14. Remove min/max constraints
/// 15. Trim descriptions to first paragraph, strip mempool.space links
/// 16. Remove required arrays from schemas
/// 17. Remove redundant "type": "object" when properties exist
/// 18. Flatten single-element type arrays
/// 19. Replace large enums (>20 values) with string type
fn compact_json(json: &str) -> String {
let mut spec: Value = serde_json::from_str(json).expect("Invalid OpenAPI JSON");
// Step 1: Remove deprecated endpoints from paths
if let Some(Value::Object(paths)) = spec.get_mut("paths") {
paths.retain(|_, v| {
if let Value::Object(path_obj) = v
&& let Some(Value::Object(get_obj)) = path_obj.get("get")
{
return get_obj.get("deprecated") != Some(&Value::Bool(true));
}
true
});
}
// Step 2: Remove contact/license from info
if let Some(Value::Object(info)) = spec.get_mut("info") {
info.remove("contact");
info.remove("license");
}
// Step 3: Remove *Param schemas from components
if let Some(Value::Object(components)) = spec.get_mut("components")
&& let Some(Value::Object(schemas)) = components.get_mut("schemas")
{
schemas.retain(|name, _| !name.ends_with("Param"));
}
compact_value(&mut spec);
serde_json::to_string(&spec).unwrap()
}
@@ -70,8 +105,9 @@ fn compact_value(value: &mut Value) {
}
}
// Step 5: Remove summary
// Step 7: Remove summary and operationId
obj.remove("summary");
obj.remove("operationId");
// Step 6: Remove examples, replace $ref with type
obj.remove("example");
@@ -113,10 +149,45 @@ fn compact_value(value: &mut Value) {
}
}
// Step 9: Remove format
// Step 11: Remove format
obj.remove("format");
// Step 10 & 11: Simplify properties (remove descriptions, simplify to direct types)
// Step 14: Remove min/max constraints
obj.remove("minimum");
obj.remove("maximum");
// Step 16: Remove required arrays from schemas (but keep boolean required on params)
if let Some(Value::Array(_)) = obj.get("required") {
obj.remove("required");
}
// Step 17: Flatten single-element type arrays: ["object"] -> "object"
if let Some(Value::Array(arr)) = obj.get("type").cloned()
&& arr.len() == 1
{
obj.insert("type".to_string(), arr.into_iter().next().unwrap());
}
// Step 18: Remove "type": "object" when properties exist (it's redundant)
if obj.contains_key("properties")
&& obj.get("type") == Some(&Value::String("object".to_string()))
{
obj.remove("type");
}
// Step 19: Replace large enums (>20 values) with just string type
if let Some(Value::Array(enum_values)) = obj.get("enum")
&& enum_values.len() > 20
{
obj.remove("enum");
}
// Step 15: Strip mempool.space links and keep only first paragraph of descriptions
if let Some(Value::String(desc)) = obj.get_mut("description") {
*desc = trim_description(desc);
}
// Step 12 & 13: Simplify properties (remove descriptions, simplify to direct types)
if let Some(Value::Object(props)) = obj.get_mut("properties") {
simplify_properties(props);
}
@@ -135,6 +206,23 @@ fn compact_value(value: &mut Value) {
}
}
/// Trim description to first paragraph and strip mempool.space endpoint links.
fn trim_description(desc: &str) -> String {
// First, strip mempool.space docs links (endpoint pattern with asterisks)
let desc = if let Some(idx) = desc.find("*[Mempool.space docs]") {
desc[..idx].trim()
} else {
desc
};
// Keep only the first paragraph (up to \n\n)
if let Some(idx) = desc.find("\n\n") {
desc[..idx].trim().to_string()
} else {
desc.trim().to_string()
}
}
fn extract_return_type(responses: &Map<String, Value>) -> Option<String> {
let resp_200 = responses.get("200")?;
let content = resp_200.get("content")?;
@@ -226,11 +314,27 @@ fn simplify_properties(props: &mut Map<String, Value>) {
}
fn simplify_property_value(obj: &mut Map<String, Value>) -> Value {
// Remove validation constraints
for key in &["default", "minItems", "maxItems", "uniqueItems"] {
// Remove validation constraints, format, and examples
for key in &[
"default",
"minItems",
"maxItems",
"uniqueItems",
"minimum",
"maximum",
"format",
"examples",
"example",
"description",
] {
obj.remove(*key);
}
// Remove "items": true (means any type, not useful)
if obj.get("items") == Some(&Value::Bool(true)) {
obj.remove("items");
}
// Handle $ref - convert to type (runs before recursion would)
if let Some(Value::String(ref_path)) = obj.remove("$ref") {
let type_name = ref_path.split('/').next_back().unwrap_or("any");

View File

@@ -17,6 +17,7 @@ zstd = ["vecdb/zstd"]
[dependencies]
brk_types = { workspace = true }
brk_traversable_derive = { workspace = true, optional = true }
indexmap = { workspace = true }
schemars = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }

View File

@@ -1,5 +1,6 @@
use std::{collections::BTreeMap, fmt::Display};
pub use indexmap::IndexMap;
pub use brk_types::{Index, MetricLeaf, MetricLeafWithSchema, TreeNode};
#[cfg(feature = "derive")]
@@ -197,7 +198,7 @@ impl<T: Traversable> Traversable for Option<T> {
fn to_tree_node(&self) -> TreeNode {
match self {
Some(inner) => inner.to_tree_node(),
None => TreeNode::Branch(BTreeMap::new()),
None => TreeNode::Branch(IndexMap::new()),
}
}
@@ -233,7 +234,7 @@ impl<K: Display, V: Traversable> Traversable for BTreeMap<K, V> {
/// (e.g., Unpriced variants where dollar fields are not needed)
impl Traversable for () {
fn to_tree_node(&self) -> TreeNode {
TreeNode::Branch(BTreeMap::new())
TreeNode::Branch(IndexMap::new())
}
fn iter_any_exportable(&self) -> impl Iterator<Item = &dyn AnyExportableVec> {

View File

@@ -94,7 +94,7 @@ pub fn derive_traversable(input: TokenStream) -> TokenStream {
return TokenStream::from(quote! {
impl #impl_generics Traversable for #name #ty_generics {
fn to_tree_node(&self) -> brk_traversable::TreeNode {
brk_traversable::TreeNode::Branch(std::collections::BTreeMap::new())
brk_traversable::TreeNode::Branch(brk_traversable::IndexMap::new())
}
fn iter_any_exportable(&self) -> impl Iterator<Item = &dyn vecdb::AnyExportableVec> {
@@ -362,8 +362,8 @@ fn generate_field_traversals(infos: &[FieldInfo], merge: bool) -> proc_macro2::T
// No flatten fields - use merge_entry for each to handle duplicates
(
quote! {
let mut collected: std::collections::BTreeMap<String, brk_traversable::TreeNode> =
std::collections::BTreeMap::new();
let mut collected: brk_traversable::IndexMap<String, brk_traversable::TreeNode> =
brk_traversable::IndexMap::new();
for entry in [#(#normal_entries,)*].into_iter().flatten() {
brk_traversable::TreeNode::merge_node(&mut collected, entry.0, entry.1)
.expect("Conflicting values for same key");
@@ -375,8 +375,8 @@ fn generate_field_traversals(infos: &[FieldInfo], merge: bool) -> proc_macro2::T
// Only flatten fields - explicit type annotation needed
(
quote! {
let mut collected: std::collections::BTreeMap<String, brk_traversable::TreeNode> =
std::collections::BTreeMap::new();
let mut collected: brk_traversable::IndexMap<String, brk_traversable::TreeNode> =
brk_traversable::IndexMap::new();
},
quote! { #(#flatten_entries)* },
)
@@ -384,8 +384,8 @@ fn generate_field_traversals(infos: &[FieldInfo], merge: bool) -> proc_macro2::T
// Both normal and flatten fields - use merge_entry for normal fields
(
quote! {
let mut collected: std::collections::BTreeMap<String, brk_traversable::TreeNode> =
std::collections::BTreeMap::new();
let mut collected: brk_traversable::IndexMap<String, brk_traversable::TreeNode> =
brk_traversable::IndexMap::new();
for entry in [#(#normal_entries,)*].into_iter().flatten() {
brk_traversable::TreeNode::merge_node(&mut collected, entry.0, entry.1)
.expect("Conflicting values for same key");

View File

@@ -12,6 +12,7 @@ bitcoin = { workspace = true }
brk_error = { workspace = true, features = ["bitcoin", "jiff", "serde_json", "vecdb"] }
byteview = { workspace = true }
derive_more = { workspace = true }
indexmap = { workspace = true }
itoa = "1.0.17"
jiff = { workspace = true }
rapidhash = "4.2.1"

View File

@@ -0,0 +1,138 @@
use std::ops::{Add, AddAssign, Div, Sub, SubAssign};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use vecdb::{Bytes, Formattable};
use super::{CentsSquaredSats, CentsUnsigned, Sats};
/// Cents × Sats (u128) - price in cents multiplied by amount in sats.
/// Uses u128 because large amounts at any price can overflow u64.
#[derive(
Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, JsonSchema,
)]
pub struct CentsSats(u128);
impl CentsSats {
pub const ZERO: Self = Self(0);
#[inline(always)]
pub const fn new(value: u128) -> Self {
Self(value)
}
/// Compute from price and sats using widening multiplication
#[inline(always)]
pub fn from_price_sats(price: CentsUnsigned, sats: Sats) -> Self {
Self(price.inner() as u128 * sats.as_u128())
}
#[inline(always)]
pub const fn inner(self) -> u128 {
self.0
}
#[inline(always)]
pub const fn as_u128(self) -> u128 {
self.0
}
/// Convert to CentsUnsigned by dividing by ONE_BTC.
#[inline(always)]
pub fn to_cents(self) -> CentsUnsigned {
CentsUnsigned::new((self.0 / Sats::ONE_BTC_U128) as u64)
}
/// Get the realized price (cents per BTC) given the sats amount.
#[inline(always)]
pub fn realized_price(self, sats: Sats) -> CentsUnsigned {
if sats.is_zero() {
return CentsUnsigned::ZERO;
}
let result = self.0 / sats.as_u128();
CentsUnsigned::new(result.min(u32::MAX as u128) as u64)
}
/// Compute investor cap (price² × sats) = price × (price × sats)
#[inline(always)]
pub fn to_investor_cap(self, price: CentsUnsigned) -> CentsSquaredSats {
CentsSquaredSats::new(price.inner() as u128 * self.0)
}
}
impl Add for CentsSats {
type Output = Self;
#[inline(always)]
fn add(self, rhs: Self) -> Self {
Self(self.0 + rhs.0)
}
}
impl AddAssign for CentsSats {
#[inline(always)]
fn add_assign(&mut self, rhs: Self) {
self.0 += rhs.0;
}
}
impl Sub for CentsSats {
type Output = Self;
#[inline(always)]
fn sub(self, rhs: Self) -> Self {
Self(self.0 - rhs.0)
}
}
impl SubAssign for CentsSats {
#[inline(always)]
fn sub_assign(&mut self, rhs: Self) {
self.0 -= rhs.0;
}
}
impl From<u128> for CentsSats {
#[inline(always)]
fn from(value: u128) -> Self {
Self(value)
}
}
impl From<CentsSats> for u128 {
#[inline(always)]
fn from(value: CentsSats) -> Self {
value.0
}
}
impl Div<usize> for CentsSats {
type Output = Self;
#[inline(always)]
fn div(self, rhs: usize) -> Self {
Self(self.0 / rhs as u128)
}
}
impl Formattable for CentsSats {
#[inline(always)]
fn may_need_escaping() -> bool {
false
}
}
impl Bytes for CentsSats {
type Array = [u8; 16];
fn to_bytes(&self) -> Self::Array {
self.0.to_le_bytes()
}
fn from_bytes(bytes: &[u8]) -> vecdb::Result<Self> {
Ok(Self(u128::from_bytes(bytes)?))
}
}
impl std::fmt::Display for CentsSats {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}

View File

@@ -1,91 +0,0 @@
use serde::{Deserialize, Serialize};
use super::Dollars;
/// Compact signed cents (i32) - memory-efficient for map keys.
/// Supports prices from -$21,474,836.47 to $21,474,836.47 (i32 range / 100).
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
pub struct CentsSignedCompact(i32);
impl CentsSignedCompact {
pub const ZERO: Self = Self(0);
#[inline]
pub const fn new(value: i32) -> Self {
Self(value)
}
#[inline]
pub const fn inner(self) -> i32 {
self.0
}
#[inline]
pub fn is_negative(self) -> bool {
self.0 < 0
}
#[inline]
pub fn to_dollars(self) -> Dollars {
Dollars::from(self.0 as f64 / 100.0)
}
#[inline]
pub fn checked_sub(self, rhs: Self) -> Option<Self> {
self.0.checked_sub(rhs.0).map(Self)
}
}
impl From<Dollars> for CentsSignedCompact {
#[inline]
fn from(value: Dollars) -> Self {
let f = f64::from(value);
if f.is_nan() {
Self::ZERO
} else {
let cents = (f * 100.0).round();
debug_assert!(
cents >= i32::MIN as f64 && cents <= i32::MAX as f64,
"Price ${} exceeds CentsSignedCompact range (~$21.5M)",
f
);
Self(cents as i32)
}
}
}
impl From<CentsSignedCompact> for Dollars {
#[inline]
fn from(value: CentsSignedCompact) -> Self {
value.to_dollars()
}
}
impl From<i32> for CentsSignedCompact {
#[inline]
fn from(value: i32) -> Self {
Self(value)
}
}
impl From<CentsSignedCompact> for i32 {
#[inline]
fn from(value: CentsSignedCompact) -> Self {
value.0
}
}
impl From<CentsSignedCompact> for f64 {
#[inline]
fn from(value: CentsSignedCompact) -> Self {
value.0 as f64
}
}
impl std::fmt::Display for CentsSignedCompact {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut buf = itoa::Buffer::new();
let str = buf.format(self.0);
f.write_str(str)
}
}

View File

@@ -0,0 +1,133 @@
use std::ops::{Add, AddAssign, Div, Sub, SubAssign};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use vecdb::{Bytes, Formattable};
/// Raw cents squared (u128) - stores cents² × sats without division.
/// Used for precise accumulation of investor cap values: Σ(price² × sats).
/// investor_price = investor_cap_raw / realized_cap_raw
#[derive(
Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, JsonSchema,
)]
pub struct CentsSquaredSats(u128);
impl CentsSquaredSats {
pub const ZERO: Self = Self(0);
#[inline(always)]
pub const fn new(value: u128) -> Self {
Self(value)
}
#[inline(always)]
pub const fn inner(self) -> u128 {
self.0
}
}
impl Div<u128> for CentsSquaredSats {
type Output = u128;
#[inline(always)]
fn div(self, rhs: u128) -> u128 {
self.0 / rhs
}
}
impl AddAssign<u128> for CentsSquaredSats {
#[inline(always)]
fn add_assign(&mut self, rhs: u128) {
self.0 += rhs;
}
}
impl SubAssign<u128> for CentsSquaredSats {
#[inline(always)]
fn sub_assign(&mut self, rhs: u128) {
self.0 -= rhs;
}
}
impl Add for CentsSquaredSats {
type Output = Self;
#[inline(always)]
fn add(self, rhs: Self) -> Self {
Self(self.0 + rhs.0)
}
}
impl AddAssign for CentsSquaredSats {
#[inline(always)]
fn add_assign(&mut self, rhs: Self) {
self.0 += rhs.0;
}
}
impl Sub for CentsSquaredSats {
type Output = Self;
#[inline(always)]
fn sub(self, rhs: Self) -> Self {
Self(self.0 - rhs.0)
}
}
impl SubAssign for CentsSquaredSats {
#[inline(always)]
fn sub_assign(&mut self, rhs: Self) {
self.0 -= rhs.0;
}
}
impl From<u128> for CentsSquaredSats {
#[inline(always)]
fn from(value: u128) -> Self {
Self(value)
}
}
impl From<CentsSquaredSats> for u128 {
#[inline(always)]
fn from(value: CentsSquaredSats) -> Self {
value.0
}
}
impl From<usize> for CentsSquaredSats {
#[inline(always)]
fn from(value: usize) -> Self {
Self(value as u128)
}
}
impl Div<usize> for CentsSquaredSats {
type Output = Self;
#[inline(always)]
fn div(self, rhs: usize) -> Self {
Self(self.0 / rhs as u128)
}
}
impl Formattable for CentsSquaredSats {
#[inline(always)]
fn may_need_escaping() -> bool {
false
}
}
impl Bytes for CentsSquaredSats {
type Array = [u8; 16];
fn to_bytes(&self) -> Self::Array {
self.0.to_le_bytes()
}
fn from_bytes(bytes: &[u8]) -> vecdb::Result<Self> {
Ok(Self(u128::from_bytes(bytes)?))
}
}
impl std::fmt::Display for CentsSquaredSats {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}

View File

@@ -4,7 +4,7 @@ use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use vecdb::{Formattable, Pco};
use super::Dollars;
use super::{CentsSats, Dollars, Sats};
/// Unsigned cents (u64) - for values that should never be negative.
/// Used for invested capital, realized cap, etc.
@@ -33,11 +33,16 @@ impl CentsUnsigned {
Self(value)
}
#[inline]
#[inline(always)]
pub const fn inner(self) -> u64 {
self.0
}
#[inline(always)]
pub const fn as_u128(self) -> u128 {
self.0 as u128
}
#[inline]
pub fn checked_sub(self, rhs: Self) -> Option<Self> {
self.0.checked_sub(rhs.0).map(Self)
@@ -84,6 +89,13 @@ impl From<u64> for CentsUnsigned {
}
}
impl From<usize> for CentsUnsigned {
#[inline]
fn from(value: usize) -> Self {
Self(value as u64)
}
}
impl From<CentsUnsigned> for u64 {
#[inline]
fn from(value: CentsUnsigned) -> Self {
@@ -113,6 +125,17 @@ impl From<CentsUnsigned> for f64 {
}
}
impl From<f64> for CentsUnsigned {
#[inline]
fn from(value: f64) -> Self {
if value.is_nan() || value < 0.0 {
Self::ZERO
} else {
Self(value as u64)
}
}
}
impl Add for CentsUnsigned {
type Output = Self;
#[inline]
@@ -167,6 +190,14 @@ impl Mul<usize> for CentsUnsigned {
}
}
impl Mul<Sats> for CentsUnsigned {
type Output = CentsSats;
#[inline]
fn mul(self, sats: Sats) -> CentsSats {
CentsSats::new(self.as_u128() * sats.as_u128())
}
}
impl Div<CentsUnsigned> for CentsUnsigned {
type Output = Self;
#[inline]

View File

@@ -1,6 +1,8 @@
use std::ops::Sub;
use serde::{Deserialize, Serialize};
use super::Dollars;
use super::{CentsUnsigned, Dollars};
/// Compact unsigned cents (u32) - memory-efficient for map keys.
/// Supports values from $0.00 to $42,949,672.95 (u32::MAX / 100).
@@ -21,6 +23,11 @@ impl CentsUnsignedCompact {
self.0
}
#[inline(always)]
pub const fn as_u128(self) -> u128 {
self.0 as u128
}
#[inline]
pub fn to_dollars(self) -> Dollars {
Dollars::from(self.0 as f64 / 100.0)
@@ -83,6 +90,34 @@ impl From<CentsUnsignedCompact> for f64 {
}
}
impl From<CentsUnsigned> for CentsUnsignedCompact {
#[inline]
fn from(value: CentsUnsigned) -> Self {
let v = value.inner();
debug_assert!(
v <= u32::MAX as u64,
"CentsUnsigned {} exceeds CentsUnsignedCompact max",
v
);
Self(v as u32)
}
}
impl From<CentsUnsignedCompact> for CentsUnsigned {
#[inline]
fn from(value: CentsUnsignedCompact) -> Self {
CentsUnsigned::new(value.0 as u64)
}
}
impl Sub for CentsUnsignedCompact {
type Output = Self;
#[inline(always)]
fn sub(self, rhs: Self) -> Self {
Self(self.0 - rhs.0)
}
}
impl std::fmt::Display for CentsUnsignedCompact {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut buf = itoa::Buffer::new();

View File

@@ -13,7 +13,7 @@ use vecdb::{CheckedSub, Formattable, Pco};
use crate::{Low, Open};
use super::{Bitcoin, Cents, Close, High, Sats, StoredF32, StoredF64};
use super::{Bitcoin, CentsSigned, Close, High, Sats, StoredF32, StoredF64};
/// US Dollar amount as floating point
#[derive(Debug, Default, Clone, Copy, Deref, Serialize, Deserialize, Pco, JsonSchema)]
@@ -38,7 +38,7 @@ impl Dollars {
}
pub fn round_to(self, digits: i32) -> Self {
Self::from(Cents::from(self).round_to(digits))
Self::from(CentsSigned::from(self).round_to(digits))
}
pub fn is_negative(&self) -> bool {
@@ -68,13 +68,6 @@ impl From<f64> for Dollars {
}
}
impl From<Cents> for Dollars {
#[inline]
fn from(value: Cents) -> Self {
Self(f64::from(value) / 100.0)
}
}
impl From<Dollars> for f32 {
#[inline]
fn from(value: Dollars) -> Self {
@@ -127,14 +120,14 @@ impl From<usize> for Dollars {
impl Add for Dollars {
type Output = Self;
fn add(self, rhs: Self) -> Self::Output {
Self::from(Cents::from(self) + Cents::from(rhs))
Self::from(CentsSigned::from(self) + CentsSigned::from(rhs))
}
}
impl Sub for Dollars {
type Output = Self;
fn sub(self, rhs: Self) -> Self::Output {
Self::from(Cents::from(self) - Cents::from(rhs))
Self::from(CentsSigned::from(self) - CentsSigned::from(rhs))
}
}
@@ -177,7 +170,7 @@ impl Div<usize> for Dollars {
if self.is_nan() || rhs == 0 {
Dollars::NAN
} else {
Self::from(Cents::from(self) / rhs)
Self::from(CentsSigned::from(self) / rhs)
}
}
}
@@ -195,7 +188,7 @@ impl Div<f64> for Dollars {
if self.is_nan() || rhs == 0.0 {
Dollars::NAN
} else {
Dollars::from(Cents::from(Self::from(self.0 / rhs)))
Dollars::from(CentsSigned::from(Self::from(self.0 / rhs)))
}
}
}
@@ -214,28 +207,28 @@ impl Div<Bitcoin> for Dollars {
impl Mul<Dollars> for Dollars {
type Output = Self;
fn mul(self, rhs: Dollars) -> Self::Output {
Self::from(Cents::from(self) * Cents::from(rhs))
Self::from(CentsSigned::from(self) * CentsSigned::from(rhs))
}
}
impl Mul<Close<Dollars>> for Dollars {
type Output = Self;
fn mul(self, rhs: Close<Dollars>) -> Self::Output {
Self::from(Cents::from(self) * Cents::from(*rhs))
Self::from(CentsSigned::from(self) * CentsSigned::from(*rhs))
}
}
impl Mul<Dollars> for Close<Dollars> {
type Output = Dollars;
fn mul(self, rhs: Dollars) -> Self::Output {
Dollars::from(Cents::from(*self) * Cents::from(rhs))
Dollars::from(CentsSigned::from(*self) * CentsSigned::from(rhs))
}
}
impl Mul<usize> for Close<Dollars> {
type Output = Dollars;
fn mul(self, rhs: usize) -> Self::Output {
Dollars::from(Cents::from(*self) * rhs)
Dollars::from(CentsSigned::from(*self) * rhs)
}
}
@@ -277,8 +270,8 @@ impl Mul<Sats> for Dollars {
if self.is_nan() {
self
} else {
Self::from(Cents::from(
u128::from(rhs) * u128::from(Cents::from(self)) / Sats::ONE_BTC_U128,
Self::from(CentsSigned::from(
u128::from(rhs) * u128::from(CentsSigned::from(self)) / Sats::ONE_BTC_U128,
))
}
}
@@ -301,7 +294,7 @@ impl Mul<StoredF64> for Dollars {
impl Mul<i64> for Dollars {
type Output = Self;
fn mul(self, rhs: i64) -> Self::Output {
Self::from(Cents::from(self) * rhs)
Self::from(CentsSigned::from(self) * rhs)
}
}
@@ -311,15 +304,22 @@ impl Mul<usize> for Dollars {
if self.is_nan() {
self
} else {
Self::from(Cents::from(self) * rhs)
Self::from(CentsSigned::from(self) * rhs)
}
}
}
impl From<u64> for Dollars {
#[inline]
fn from(value: u64) -> Self {
Self::from(CentsSigned::from(value))
}
}
impl From<u128> for Dollars {
#[inline]
fn from(value: u128) -> Self {
Self::from(Cents::from(value))
Self::from(CentsSigned::from(value))
}
}
@@ -340,13 +340,13 @@ impl From<Close<Dollars>> for u128 {
impl From<Dollars> for u128 {
#[inline]
fn from(value: Dollars) -> Self {
u128::from(Cents::from(value))
u128::from(CentsSigned::from(value))
}
}
impl AddAssign for Dollars {
fn add_assign(&mut self, rhs: Self) {
*self = Dollars::from(Cents::from(*self) + Cents::from(rhs));
*self = Dollars::from(CentsSigned::from(*self) + CentsSigned::from(rhs));
}
}
@@ -355,8 +355,8 @@ impl CheckedSub for Dollars {
if self.is_nan() {
Some(self)
} else {
Cents::from(self)
.checked_sub(Cents::from(rhs))
CentsSigned::from(self)
.checked_sub(CentsSigned::from(rhs))
.map(Dollars::from)
}
}
@@ -365,7 +365,9 @@ impl CheckedSub for Dollars {
impl CheckedSub<usize> for Dollars {
fn checked_sub(self, rhs: usize) -> Option<Self> {
Some(Dollars::from(
Cents::from(self).checked_sub(Cents::from(rhs)).unwrap(),
CentsSigned::from(self)
.checked_sub(CentsSigned::from(rhs))
.unwrap(),
))
}
}

View File

@@ -4,7 +4,6 @@ pub use vecdb::{CheckedSub, Exit, PrintableIndex, Version};
mod address;
mod addressbytes;
mod age;
mod addresschainstats;
mod addresshash;
mod addressindexoutpoint;
@@ -14,6 +13,7 @@ mod addressparam;
mod addressstats;
mod addresstxidsparam;
mod addressvalidation;
mod age;
mod anyaddressindex;
mod bitcoin;
mod blkmetadata;
@@ -35,12 +35,11 @@ mod blockstatus;
mod blocktimestamp;
mod blockweightentry;
mod bytes;
mod cents;
mod centscompact;
mod cents_signed;
mod cents_signed_compact;
mod cents_unsigned;
mod cents_unsigned_compact;
mod cents_sats;
mod cents_squared_sats;
mod datarange;
mod datarangeformat;
mod date;
@@ -48,10 +47,10 @@ mod dateindex;
mod decadeindex;
mod deser;
mod difficultyadjustment;
mod diskusage;
mod difficultyadjustmententry;
mod difficultyentry;
mod difficultyepoch;
mod diskusage;
mod dollars;
mod emptyaddressdata;
mod emptyaddressindex;
@@ -81,22 +80,21 @@ mod mempoolinfo;
mod metric;
mod metriccount;
mod metricdata;
mod metricoutput;
mod metricparam;
mod metrics;
mod metricoutput;
mod metricselection;
mod metricselectionlegacy;
mod metricspaginated;
mod metricwithindex;
mod monthindex;
mod ohlc;
mod oracle_bins;
mod opreturnindex;
mod option_ext;
mod oracle_bins;
mod outpoint;
mod output;
mod outputtype;
mod pairoutputindex;
mod p2aaddressindex;
mod p2abytes;
mod p2msoutputindex;
@@ -116,16 +114,17 @@ mod p2wshaddressindex;
mod p2wshbytes;
mod pagination;
mod paginationindex;
mod pairoutputindex;
mod percentile;
mod pool;
mod pooldetail;
mod port;
mod poolinfo;
mod pools;
mod poolslug;
mod poolslugparam;
mod poolssummary;
mod poolstats;
mod port;
mod quarterindex;
mod rawlocktime;
mod recommendedfees;
@@ -145,9 +144,9 @@ mod stored_u64;
mod stored_u8;
mod supply_state;
mod syncstatus;
mod term;
mod timeperiod;
mod timeperiodparam;
mod term;
mod timestamp;
mod timestampparam;
mod treenode;
@@ -180,7 +179,6 @@ mod yearindex;
pub use address::*;
pub use addressbytes::*;
pub use age::*;
pub use addresschainstats::*;
pub use addresshash::*;
pub use addressindexoutpoint::*;
@@ -190,6 +188,7 @@ pub use addressparam::*;
pub use addressstats::*;
pub use addresstxidsparam::*;
pub use addressvalidation::*;
pub use age::*;
pub use anyaddressindex::*;
pub use bitcoin::*;
pub use blkmetadata::*;
@@ -211,12 +210,11 @@ pub use blockstatus::*;
pub use blocktimestamp::*;
pub use blockweightentry::*;
pub use bytes::*;
pub use cents::*;
pub use centscompact::*;
pub use cents_signed::*;
pub use cents_signed_compact::*;
pub use cents_unsigned::*;
pub use cents_unsigned_compact::*;
pub use cents_sats::*;
pub use cents_squared_sats::*;
pub use datarange::*;
pub use datarangeformat::*;
pub use date::*;
@@ -224,10 +222,10 @@ pub use dateindex::*;
pub use decadeindex::*;
pub use deser::*;
pub use difficultyadjustment::*;
pub use diskusage::*;
pub use difficultyadjustmententry::*;
pub use difficultyentry::*;
pub use difficultyepoch::*;
pub use diskusage::*;
pub use dollars::*;
pub use emptyaddressdata::*;
pub use emptyaddressindex::*;
@@ -257,22 +255,21 @@ pub use mempoolinfo::*;
pub use metric::*;
pub use metriccount::*;
pub use metricdata::*;
pub use metricoutput::*;
pub use metricparam::*;
pub use metrics::*;
pub use metricoutput::*;
pub use metricselection::*;
pub use metricselectionlegacy::*;
pub use metricspaginated::*;
pub use metricwithindex::*;
pub use monthindex::*;
pub use ohlc::*;
pub use oracle_bins::*;
pub use opreturnindex::*;
pub use option_ext::*;
pub use oracle_bins::*;
pub use outpoint::*;
pub use output::*;
pub use outputtype::*;
pub use pairoutputindex::*;
pub use p2aaddressindex::*;
pub use p2abytes::*;
pub use p2msoutputindex::*;
@@ -292,16 +289,17 @@ pub use p2wshaddressindex::*;
pub use p2wshbytes::*;
pub use pagination::*;
pub use paginationindex::*;
pub use pairoutputindex::*;
pub use percentile::*;
pub use pool::*;
pub use pooldetail::*;
pub use poolinfo::*;
pub use port::*;
pub use pools::*;
pub use poolslug::*;
pub use poolslugparam::*;
pub use poolssummary::*;
pub use poolstats::*;
pub use port::*;
pub use quarterindex::*;
pub use rawlocktime::*;
pub use recommendedfees::*;
@@ -312,8 +310,8 @@ pub use semesterindex::*;
pub use stored_bool::*;
pub use stored_f32::*;
pub use stored_f64::*;
pub use stored_i16::*;
pub use stored_i8::*;
pub use stored_i16::*;
pub use stored_string::*;
pub use stored_u8::*;
pub use stored_u16::*;

View File

@@ -1,9 +1,35 @@
use brk_error::{Error, Result};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use vecdb::{Bytes, CheckedSub, Formattable};
use vecdb::{Bytes, Formattable};
use crate::{Bitcoin, Dollars, EmptyAddressData, Sats};
use crate::{CentsSats, CentsSquaredSats, CentsUnsigned, EmptyAddressData, Sats, SupplyState};
/// Snapshot of cost basis related state.
/// Uses CentsSats (u64) for single-UTXO values, CentsSquaredSats (u128) for investor cap.
#[derive(Clone, Debug)]
pub struct CostBasisSnapshot {
pub realized_price: CentsUnsigned,
pub supply_state: SupplyState,
/// price × sats (fits u64 for individual UTXOs)
pub price_sats: CentsSats,
/// price² × sats (needs u128)
pub investor_cap: CentsSquaredSats,
}
impl CostBasisSnapshot {
/// Create from a single UTXO (computes caps from price × value)
#[inline]
pub fn from_utxo(price: CentsUnsigned, supply: &SupplyState) -> Self {
let price_sats = CentsSats::from_price_sats(price, supply.value);
Self {
realized_price: price,
supply_state: supply.clone(),
price_sats,
investor_cap: price_sats.to_investor_cap(price),
}
}
}
/// Data for a loaded (non-empty) address with current balance
#[derive(Debug, Default, Clone, Serialize, Deserialize, JsonSchema)]
@@ -21,8 +47,10 @@ pub struct LoadedAddressData {
pub received: Sats,
/// Satoshis sent by this address
pub sent: Sats,
/// The realized capitalization of this address
pub realized_cap: Dollars,
/// The realized capitalization: Σ(price × sats)
pub realized_cap_raw: CentsSats,
/// The investor capitalization: Σ(price² × sats)
pub investor_cap_raw: CentsSquaredSats,
}
impl LoadedAddressData {
@@ -30,21 +58,22 @@ impl LoadedAddressData {
(u64::from(self.received) - u64::from(self.sent)).into()
}
/// Max realized price for CentsCompact (i32::MAX / 100)
const MAX_REALIZED_PRICE: f64 = 21_000_000.0;
pub fn realized_price(&self) -> CentsUnsigned {
self.realized_cap_raw.realized_price(self.balance())
}
pub fn realized_price(&self) -> Dollars {
let p = (self.realized_cap / Bitcoin::from(self.balance())).round_to(4);
if p.is_negative() {
dbg!((
self.realized_cap,
self.balance(),
Bitcoin::from(self.balance()),
p
));
panic!("");
pub fn cost_basis_snapshot(&self) -> CostBasisSnapshot {
let realized_price = self.realized_price();
CostBasisSnapshot {
realized_price,
supply_state: SupplyState {
utxo_count: self.utxo_count() as u64,
value: self.balance(),
},
// Use exact value to avoid rounding errors from realized_price × balance
price_sats: CentsSats::new(self.realized_cap_raw.inner()),
investor_cap: self.investor_cap_raw,
}
p.min(Dollars::from(Self::MAX_REALIZED_PRICE))
}
#[inline]
@@ -75,44 +104,35 @@ impl LoadedAddressData {
self.funded_txo_count == self.spent_txo_count
}
pub fn receive(&mut self, amount: Sats, price: Option<Dollars>) {
pub fn receive(&mut self, amount: Sats, price: Option<CentsUnsigned>) {
self.receive_outputs(amount, price, 1);
}
pub fn receive_outputs(&mut self, amount: Sats, price: Option<Dollars>, output_count: u32) {
pub fn receive_outputs(
&mut self,
amount: Sats,
price: Option<CentsUnsigned>,
output_count: u32,
) {
self.received += amount;
self.funded_txo_count += output_count;
if let Some(price) = price {
let added = price * amount;
self.realized_cap += added;
if added.is_negative() || self.realized_cap.is_negative() {
dbg!((self.realized_cap, price, amount, added));
panic!();
}
let ps = CentsSats::from_price_sats(price, amount);
self.realized_cap_raw += ps;
self.investor_cap_raw += ps.to_investor_cap(price);
}
}
pub fn send(&mut self, amount: Sats, previous_price: Option<Dollars>) -> Result<()> {
pub fn send(&mut self, amount: Sats, previous_price: Option<CentsUnsigned>) -> Result<()> {
if self.balance() < amount {
return Err(Error::Internal("Previous amount smaller than sent amount"));
}
self.sent += amount;
self.spent_txo_count += 1;
if let Some(previous_price) = previous_price {
let subtracted = previous_price * amount;
let realized_cap = self.realized_cap.checked_sub(subtracted).unwrap();
if self.realized_cap.is_negative() || realized_cap.is_negative() {
dbg!((
self,
realized_cap,
previous_price,
amount,
previous_price * amount,
subtracted
));
panic!();
}
self.realized_cap = realized_cap;
if let Some(price) = previous_price {
let ps = CentsSats::from_price_sats(price, amount);
self.realized_cap_raw -= ps;
self.investor_cap_raw -= ps.to_investor_cap(price);
}
Ok(())
}
@@ -135,7 +155,8 @@ impl From<&EmptyAddressData> for LoadedAddressData {
padding: 0,
received: value.transfered,
sent: value.transfered,
realized_cap: Dollars::ZERO,
realized_cap_raw: CentsSats::ZERO,
investor_cap_raw: CentsSquaredSats::ZERO,
}
}
}
@@ -144,13 +165,14 @@ impl std::fmt::Display for LoadedAddressData {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"tx_count: {}, funded_txo_count: {}, spent_txo_count: {}, received: {}, sent: {}, realized_cap: {}",
"tx_count: {}, funded_txo_count: {}, spent_txo_count: {}, received: {}, sent: {}, realized_cap_raw: {}, investor_cap_raw: {}",
self.tx_count,
self.funded_txo_count,
self.spent_txo_count,
self.received,
self.sent,
self.realized_cap,
self.realized_cap_raw,
self.investor_cap_raw,
)
}
}
@@ -173,7 +195,8 @@ impl Bytes for LoadedAddressData {
arr[12..16].copy_from_slice(self.padding.to_bytes().as_ref());
arr[16..24].copy_from_slice(self.received.to_bytes().as_ref());
arr[24..32].copy_from_slice(self.sent.to_bytes().as_ref());
arr[32..40].copy_from_slice(self.realized_cap.to_bytes().as_ref());
arr[32..48].copy_from_slice(self.realized_cap_raw.to_bytes().as_ref());
arr[48..64].copy_from_slice(self.investor_cap_raw.to_bytes().as_ref());
arr
}
@@ -185,7 +208,8 @@ impl Bytes for LoadedAddressData {
padding: u32::from_bytes(&bytes[12..16])?,
received: Sats::from_bytes(&bytes[16..24])?,
sent: Sats::from_bytes(&bytes[24..32])?,
realized_cap: Dollars::from_bytes(&bytes[32..40])?,
realized_cap_raw: CentsSats::from_bytes(&bytes[32..48])?,
investor_cap_raw: CentsSquaredSats::from_bytes(&bytes[48..64])?,
})
}
}

View File

@@ -15,21 +15,21 @@ use vecdb::{Bytes, Formattable, Pco, TransparentPco};
use crate::StoredF64;
use super::{Cents, Dollars, Sats};
use super::{CentsUnsigned, Dollars, Sats};
/// OHLC (Open, High, Low, Close) data in cents
#[derive(Debug, Default, Clone, JsonSchema)]
#[repr(C)]
pub struct OHLCCents {
pub open: Open<Cents>,
pub high: High<Cents>,
pub low: Low<Cents>,
pub close: Close<Cents>,
pub struct OHLCCentsUnsigned {
pub open: Open<CentsUnsigned>,
pub high: High<CentsUnsigned>,
pub low: Low<CentsUnsigned>,
pub close: Close<CentsUnsigned>,
}
impl From<(Open<Cents>, High<Cents>, Low<Cents>, Close<Cents>)> for OHLCCents {
impl From<(Open<CentsUnsigned>, High<CentsUnsigned>, Low<CentsUnsigned>, Close<CentsUnsigned>)> for OHLCCentsUnsigned {
#[inline]
fn from(value: (Open<Cents>, High<Cents>, Low<Cents>, Close<Cents>)) -> Self {
fn from(value: (Open<CentsUnsigned>, High<CentsUnsigned>, Low<CentsUnsigned>, Close<CentsUnsigned>)) -> Self {
Self {
open: value.0,
high: value.1,
@@ -39,9 +39,9 @@ impl From<(Open<Cents>, High<Cents>, Low<Cents>, Close<Cents>)> for OHLCCents {
}
}
impl From<Close<Cents>> for OHLCCents {
impl From<Close<CentsUnsigned>> for OHLCCentsUnsigned {
#[inline]
fn from(value: Close<Cents>) -> Self {
fn from(value: Close<CentsUnsigned>) -> Self {
Self {
open: Open::from(value),
high: High::from(value),
@@ -51,7 +51,7 @@ impl From<Close<Cents>> for OHLCCents {
}
}
impl Serialize for OHLCCents {
impl Serialize for OHLCCentsUnsigned {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
@@ -113,11 +113,11 @@ macro_rules! impl_ohlc_deserialize {
};
}
impl_ohlc_deserialize!(OHLCCents, Cents);
impl_ohlc_deserialize!(OHLCCentsUnsigned, CentsUnsigned);
impl_ohlc_deserialize!(OHLCDollars, Dollars);
impl_ohlc_deserialize!(OHLCSats, Sats);
impl Display for OHLCCents {
impl Display for OHLCCentsUnsigned {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
@@ -127,14 +127,14 @@ impl Display for OHLCCents {
}
}
impl Formattable for OHLCCents {
impl Formattable for OHLCCentsUnsigned {
#[inline(always)]
fn may_need_escaping() -> bool {
true
}
}
impl Bytes for OHLCCents {
impl Bytes for OHLCCentsUnsigned {
type Array = [u8; size_of::<Self>()];
fn to_bytes(&self) -> Self::Array {
@@ -148,15 +148,15 @@ impl Bytes for OHLCCents {
fn from_bytes(bytes: &[u8]) -> vecdb::Result<Self> {
Ok(Self {
open: Open::<Cents>::from_bytes(&bytes[0..8])?,
high: High::<Cents>::from_bytes(&bytes[8..16])?,
low: Low::<Cents>::from_bytes(&bytes[16..24])?,
close: Close::<Cents>::from_bytes(&bytes[24..32])?,
open: Open::<CentsUnsigned>::from_bytes(&bytes[0..8])?,
high: High::<CentsUnsigned>::from_bytes(&bytes[8..16])?,
low: Low::<CentsUnsigned>::from_bytes(&bytes[16..24])?,
close: Close::<CentsUnsigned>::from_bytes(&bytes[24..32])?,
})
}
}
impl Add for OHLCCents {
impl Add for OHLCCentsUnsigned {
type Output = Self;
fn add(self, rhs: Self) -> Self::Output {
Self {
@@ -223,16 +223,16 @@ impl From<Close<Dollars>> for OHLCDollars {
}
}
impl From<OHLCCents> for OHLCDollars {
impl From<OHLCCentsUnsigned> for OHLCDollars {
#[inline]
fn from(value: OHLCCents) -> Self {
fn from(value: OHLCCentsUnsigned) -> Self {
Self::from(&value)
}
}
impl From<&OHLCCents> for OHLCDollars {
impl From<&OHLCCentsUnsigned> for OHLCDollars {
#[inline]
fn from(value: &OHLCCents) -> Self {
fn from(value: &OHLCCentsUnsigned) -> Self {
Self {
open: value.open.into(),
high: value.high.into(),
@@ -451,9 +451,9 @@ where
}
}
impl From<Open<Cents>> for Open<Dollars> {
impl From<Open<CentsUnsigned>> for Open<Dollars> {
#[inline]
fn from(value: Open<Cents>) -> Self {
fn from(value: Open<CentsUnsigned>) -> Self {
Self(Dollars::from(*value))
}
}
@@ -582,9 +582,9 @@ where
}
}
impl From<High<Cents>> for High<Dollars> {
impl From<High<CentsUnsigned>> for High<Dollars> {
#[inline]
fn from(value: High<Cents>) -> Self {
fn from(value: High<CentsUnsigned>) -> Self {
Self(Dollars::from(*value))
}
}
@@ -713,9 +713,9 @@ where
}
}
impl From<Low<Cents>> for Low<Dollars> {
impl From<Low<CentsUnsigned>> for Low<Dollars> {
#[inline]
fn from(value: Low<Cents>) -> Self {
fn from(value: Low<CentsUnsigned>) -> Self {
Self(Dollars::from(*value))
}
}
@@ -861,9 +861,9 @@ where
// #[inline]
// fn from(value: Close<A>) -> Self {
// Self(B::from(*value))
impl From<Close<Cents>> for Close<Dollars> {
impl From<Close<CentsUnsigned>> for Close<Dollars> {
#[inline]
fn from(value: Close<Cents>) -> Self {
fn from(value: Close<CentsUnsigned>) -> Self {
Self(Dollars::from(*value))
}
}

View File

@@ -11,7 +11,7 @@ use vecdb::{CheckedSub, Formattable, Pco, SaturatingAdd};
use crate::StoredF64;
use super::{Bitcoin, Cents, Dollars, Height};
use super::{Bitcoin, CentsUnsigned, Dollars, Height};
/// Satoshis
#[derive(
@@ -52,6 +52,7 @@ impl Sats {
pub const MAX: Self = Self(u64::MAX);
pub const COINBASE: Self = Self(u64::MAX);
pub const FIFTY_BTC: Self = Self(50_00_000_000);
pub const ONE_BTC_U64: u64 = 1_00_000_000;
pub const ONE_BTC_U128: u128 = 1_00_000_000;
pub fn new(sats: u64) -> Self {
@@ -66,6 +67,11 @@ impl Sats {
*self != Self::ZERO
}
#[inline(always)]
pub const fn as_u128(self) -> u128 {
self.0 as u128
}
pub fn is_max(&self) -> bool {
*self == Self::MAX
}
@@ -210,7 +216,7 @@ impl Sum for Sats {
impl Div<Dollars> for Sats {
type Output = Self;
fn div(self, rhs: Dollars) -> Self::Output {
let raw_cents = u64::from(Cents::from(rhs));
let raw_cents = u64::from(CentsUnsigned::from(rhs));
if raw_cents != 0 {
Self(self.0 * 100 / raw_cents)
} else {

View File

@@ -1,5 +1,6 @@
use std::collections::{BTreeMap, BTreeSet};
use std::collections::BTreeSet;
use indexmap::IndexMap;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
@@ -130,7 +131,7 @@ impl Eq for MetricLeafWithSchema {}
#[serde(untagged)]
pub enum TreeNode {
/// Branch node containing subcategories
Branch(BTreeMap<String, TreeNode>),
Branch(IndexMap<String, TreeNode>),
/// Leaf node containing metric metadata with schema
Leaf(MetricLeafWithSchema),
}
@@ -146,7 +147,7 @@ impl TreeNode {
}
}
pub fn as_mut_branch(&mut self) -> &mut BTreeMap<String, TreeNode> {
pub fn as_mut_branch(&mut self) -> &mut IndexMap<String, TreeNode> {
match self {
Self::Branch(b) => b,
_ => panic!(),
@@ -156,7 +157,7 @@ impl TreeNode {
/// Wraps a node in a Branch with the given key.
/// Used by #[traversable(wrap = "...")] to produce Branch { key: inner }.
pub fn wrap(key: &str, inner: Self) -> Self {
let mut map = BTreeMap::new();
let mut map = IndexMap::new();
map.insert(key.to_string(), inner);
Self::Branch(map)
}
@@ -171,7 +172,7 @@ impl TreeNode {
return Some(self);
};
let mut merged: BTreeMap<String, TreeNode> = BTreeMap::new();
let mut merged: IndexMap<String, TreeNode> = IndexMap::new();
for (key, node) in tree {
match node {
@@ -194,7 +195,7 @@ impl TreeNode {
/// If all entries in the map are leaves with the same metric name,
/// collapse them into a single leaf with merged indexes.
fn try_collapse_same_name_leaves(map: BTreeMap<String, TreeNode>) -> Self {
fn try_collapse_same_name_leaves(map: IndexMap<String, TreeNode>) -> Self {
if map.is_empty() {
return Self::Branch(map);
}
@@ -238,7 +239,7 @@ impl TreeNode {
/// Merges a node into the target map at the given key (consuming version).
/// Returns None if there's a conflict.
pub fn merge_node(
target: &mut BTreeMap<String, TreeNode>,
target: &mut IndexMap<String, TreeNode>,
key: String,
node: TreeNode,
) -> Option<()> {
@@ -261,7 +262,7 @@ impl TreeNode {
}
(existing @ Self::Leaf(_), Self::Branch(branch)) => {
let Self::Leaf(leaf) =
std::mem::replace(existing, Self::Branch(BTreeMap::new()))
std::mem::replace(existing, Self::Branch(IndexMap::new()))
else {
unreachable!()
};

View File

@@ -3,6 +3,7 @@
[![MIT Licensed](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/bitcoinresearchkit/brk/blob/main/docs/LICENSE.md)
[![Crates.io](https://img.shields.io/crates/v/brk.svg)](https://crates.io/crates/brk)
[![docs.rs](https://img.shields.io/docsrs/brk)](https://docs.rs/brk)
[![Supported by OpenSats](https://img.shields.io/badge/supported%20by-opensats-ff7b00)](https://opensats.org/)
Open-source on-chain analytics for Bitcoin. Combines functionality of [Glassnode](https://glassnode.com) (on-chain metrics), [mempool.space](https://mempool.space) (block explorer), and [electrs](https://github.com/romanz/electrs) (address index) into a single self-hostable package. [Bitview](https://bitview.space) is an analytics interface built entirely on BRK.
@@ -26,7 +27,7 @@ Browse metrics and charts visually. Use it free at [Bitview](https://bitview.spa
Query thousands of metrics and blockchain data in JSON or CSV. Freely accessible at [Bitview](https://bitview.space/api).
[Documentation](https://bitview.space/api) · [JavaScript](https://www.npmjs.com/package/brk-client) · [Python](https://pypi.org/project/brk-client) · [Rust](https://crates.io/crates/brk_client)
[Documentation](https://bitview.space/api) · [JavaScript](https://www.npmjs.com/package/brk-client) · [Python](https://pypi.org/project/brk-client) · [Rust](https://crates.io/crates/brk_client) · [LLM](https://bitview.space/api.json)
### Self-host
@@ -45,8 +46,8 @@ Build custom applications in Rust. Use the full stack or individual components (
- [Changelog](./CHANGELOG.md)
- [Support](./SUPPORT.md)
- [Contributing](https://github.com/bitcoinresearchkit/brk/issues)
[Discord](https://discord.gg/WACpShCB7M) · [Nostr](https://primal.net/p/nprofile1qqsfw5dacngjlahye34krvgz7u0yghhjgk7gxzl5ptm9v6n2y3sn03sqxu2e6)
- [Discord](https://discord.gg/WACpShCB7M)
- [Nostr](https://primal.net/p/nprofile1qqsfw5dacngjlahye34krvgz7u0yghhjgk7gxzl5ptm9v6n2y3sn03sqxu2e6)
Development supported by [OpenSats](https://opensats.org/).

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -33,7 +33,6 @@ import { style } from "../utils/elements.js";
/**
* @template T
* @typedef {Object} Series
* @property {string} key
* @property {string} id
* @property {number} paneIndex
* @property {PersistedValue<boolean>} active
@@ -448,16 +447,12 @@ export function createChart({ parent, id: chartId, brk, fitContent }) {
};
const serieses = {
/** @type {Map<string, PersistedValue<boolean>>} */
activeStates: new Map(),
/** @type {Map<string, Set<AnySeries>>} */
byKey: new Map(),
/** @type {Set<AnySeries>} */
all: new Set(),
refreshAll() {
serieses.byKey.forEach((set) => {
set.forEach((s) => {
if (s.active.value) s.fetch?.();
});
serieses.all.forEach((s) => {
if (s.active.value) s.fetch?.();
});
},
@@ -503,17 +498,12 @@ export function createChart({ parent, id: chartId, brk, fitContent }) {
const key = customKey ?? stringToId(name);
const id = `${unit.id}-${key}`;
// Reuse existing state if same name (links legends across panes, regardless of unit)
const existingActive = serieses.activeStates.get(key);
const active =
existingActive ??
createPersistedValue({
defaultValue: defaultActive ?? true,
storageKey: key,
urlKey: key,
...serdeBool,
});
if (!existingActive) serieses.activeStates.set(key, active);
const active = createPersistedValue({
defaultValue: defaultActive ?? true,
storageKey: `${chartId}-p${paneIndex}-${key}`,
urlKey: `${paneIndex === 0 ? "t" : "b"}-${key}`,
...serdeBool,
});
setOrder(-order);
@@ -533,18 +523,9 @@ export function createChart({ parent, id: chartId, brk, fitContent }) {
setActive(value) {
const wasActive = active.value;
active.set(value);
const linkedSeries = serieses.byKey.get(key);
linkedSeries?.forEach((s) => {
value ? s.show() : s.hide();
});
document.querySelectorAll(`[data-series="${key}"]`).forEach((el) => {
if (el instanceof HTMLInputElement && el.type === "checkbox") {
el.checked = value;
}
});
// Fetch data for ALL linked series, not just this one
value ? show() : hide();
if (value && !wasActive) {
linkedSeries?.forEach((s) => s.fetch?.());
_fetch?.();
}
panes.updateVisibility();
},
@@ -555,7 +536,6 @@ export function createChart({ parent, id: chartId, brk, fitContent }) {
tame,
hasData: () => hasData,
fetch: () => _fetch?.(),
key,
id,
paneIndex,
url: null,
@@ -563,18 +543,12 @@ export function createChart({ parent, id: chartId, brk, fitContent }) {
update,
remove() {
onRemove();
serieses.byKey.get(key)?.delete(series);
serieses.all.delete(series);
panes.seriesByHome.get(paneIndex)?.delete(series);
},
};
// Register series for cross-pane linking (by name only)
let keySet = serieses.byKey.get(key);
if (!keySet) {
keySet = new Set();
serieses.byKey.set(key, keySet);
}
keySet.add(series);
serieses.all.add(series);
/** @param {ChartableIndex} idx */
function setupIndexEffect(idx) {
@@ -1312,35 +1286,28 @@ export function createChart({ parent, id: chartId, brk, fitContent }) {
deserialize: (s) => /** @type {"lin" | "log"} */ (s),
});
/** @param {"lin" | "log"} value */
const applyScale = (value) => {
panes.whenReady(paneIndex, () => {
try {
ichart
.panes()
.at(paneIndex)
?.priceScale("right")
.applyOptions({
mode: value === "lin" ? 0 : 1,
});
} catch {}
});
/** @param {IPaneApi<Time>} pane @param {"lin" | "log"} value */
const applyScale = (pane, value) => {
try {
pane.priceScale("right").applyOptions({
mode: value === "lin" ? 0 : 1,
});
} catch {}
};
applyScale(persisted.value);
fieldsets.addIfNeeded({
id,
paneIndex,
position: "sw",
createChild() {
createChild(pane) {
applyScale(pane, persisted.value);
return createRadios({
choices: /** @type {const} */ (["lin", "log"]),
id: stringToId(`${id} ${paneIndex}`),
initialValue: persisted.value,
onChange(value) {
persisted.set(value);
applyScale(value);
applyScale(pane, value);
},
});
},
@@ -1472,12 +1439,7 @@ export function createChart({ parent, id: chartId, brk, fitContent }) {
// Remove old series AFTER adding new ones to prevent pane collapse
oldSeries.forEach((s) => s.remove());
// Ensure other pane's series are in their correct pane before applying scale
// (they may have been collapsed when this pane was empty)
const otherPaneIndex = paneIndex === 0 ? 1 : 0;
panes.moveTo(otherPaneIndex, otherPaneIndex);
// Apply scale after series are created and panes are properly separated
// Store scale config - it will be applied when createForPane runs after updateVisibility
applyScaleForUnit(paneIndex, unit);
},

View File

@@ -50,19 +50,17 @@ export function createLegend() {
}
legends[order] = div;
const { input, label } = createLabeledInput({
const { label } = createLabeledInput({
inputId: stringToId(`legend-${series.id}`),
inputName: stringToId(`selected-${series.id}`),
inputValue: "value",
title: "Click to toggle",
inputChecked: series.active.value,
onClick: (event) => {
event.preventDefault();
onClick: () => {
series.setActive(!series.active.value);
},
type: "checkbox",
});
input.dataset.series = series.key;
const spanMain = window.document.createElement("span");
spanMain.classList.add("main");
@@ -93,7 +91,7 @@ export function createLegend() {
anchor.href = series.url;
anchor.target = "_blank";
anchor.rel = "noopener noreferrer";
anchor.title = "Click to view data";
anchor.title = "Open the metric data in a new tab";
div.append(anchor);
}
},

View File

@@ -45,14 +45,13 @@ export function createChainSection(ctx) {
const {
colors,
brk,
fromSizePattern,
fromFullnessPattern,
fromDollarsPattern,
fromFeeRatePattern,
fromSumStatsPattern,
fromBaseStatsPattern,
fromFullStatsPattern,
fromStatsPattern,
fromCoinbasePattern,
fromValuePattern,
fromBlockCountWithUnit,
fromIntervalPattern,
fromCountPattern,
fromSupplyPattern,
} = ctx;
const {
@@ -132,19 +131,19 @@ export function createChainSection(ctx) {
{
name: "New",
title: `${titlePrefix}New Address Count`,
bottom: fromDollarsPattern(distribution.newAddrCount[key], Unit.count),
bottom: fromFullStatsPattern(distribution.newAddrCount[key], Unit.count),
},
{
name: "Growth Rate",
title: `${titlePrefix}Address Growth Rate`,
bottom: fromFullnessPattern(distribution.growthRate[key], Unit.ratio),
bottom: fromBaseStatsPattern(distribution.growthRate[key], Unit.ratio),
},
{
name: "Activity",
tree: activityTypes.map((a) => ({
name: a.name,
title: `${titlePrefix}${a.name} Address Count`,
bottom: fromFullnessPattern(
bottom: fromBaseStatsPattern(
distribution.addressActivity[key][a.key],
Unit.count,
),
@@ -297,7 +296,7 @@ export function createChainSection(ctx) {
name: "Count",
title: "Block Count",
bottom: [
...fromBlockCountWithUnit(blocks.count.blockCount, Unit.count),
...fromCountPattern(blocks.count.blockCount, Unit.count),
line({
metric: blocks.count.blockCountTarget,
name: "Target",
@@ -339,7 +338,7 @@ export function createChainSection(ctx) {
name: "Interval",
title: "Block Interval",
bottom: [
...fromIntervalPattern(blocks.interval, Unit.secs),
...fromBaseStatsPattern(blocks.interval, Unit.secs, "", { avgActive: false }),
priceLine({ ctx, unit: Unit.secs, name: "Target", number: 600 }),
],
},
@@ -347,7 +346,7 @@ export function createChainSection(ctx) {
name: "Size",
title: "Block Size",
bottom: [
...fromSizePattern(blocks.size, Unit.bytes),
...fromSumStatsPattern(blocks.size, Unit.bytes),
line({
metric: blocks.totalSize,
name: "Total",
@@ -355,8 +354,8 @@ export function createChainSection(ctx) {
unit: Unit.bytes,
defaultActive: false,
}),
...fromFullnessPattern(blocks.vbytes, Unit.vb),
...fromFullnessPattern(blocks.weight, Unit.wu),
...fromBaseStatsPattern(blocks.vbytes, Unit.vb),
...fromBaseStatsPattern(blocks.weight, Unit.wu),
line({
metric: blocks.weight.sum,
name: "Sum",
@@ -376,7 +375,7 @@ export function createChainSection(ctx) {
{
name: "Fullness",
title: "Block Fullness",
bottom: fromFullnessPattern(blocks.fullness, Unit.percentage),
bottom: fromBaseStatsPattern(blocks.fullness, Unit.percentage),
},
],
},
@@ -388,7 +387,7 @@ export function createChainSection(ctx) {
{
name: "Count",
title: "Transaction Count",
bottom: fromDollarsPattern(transactions.count.txCount, Unit.count),
bottom: fromFullStatsPattern(transactions.count.txCount, Unit.count),
},
{
name: "Speed",
@@ -426,34 +425,34 @@ export function createChainSection(ctx) {
name: "Size",
title: "Transaction Size",
bottom: [
...fromFeeRatePattern(transactions.size.weight, Unit.wu),
...fromFeeRatePattern(transactions.size.vsize, Unit.vb),
...fromStatsPattern(transactions.size.weight, Unit.wu),
...fromStatsPattern(transactions.size.vsize, Unit.vb),
],
},
{
name: "Fee Rate",
title: "Fee Rate",
bottom: fromFeeRatePattern(transactions.fees.feeRate, Unit.feeRate),
bottom: fromStatsPattern(transactions.fees.feeRate, Unit.feeRate),
},
{
name: "Versions",
title: "Transaction Versions",
bottom: [
...fromBlockCountWithUnit(
...fromCountPattern(
transactions.versions.v1,
Unit.count,
"v1",
colors.orange,
colors.red,
),
...fromBlockCountWithUnit(
...fromCountPattern(
transactions.versions.v2,
Unit.count,
"v2",
colors.cyan,
colors.blue,
),
...fromBlockCountWithUnit(
...fromCountPattern(
transactions.versions.v3,
Unit.count,
"v3",
@@ -489,12 +488,12 @@ export function createChainSection(ctx) {
{
name: "Input Count",
title: "Input Count",
bottom: [...fromSizePattern(inputs.count, Unit.count)],
bottom: [...fromSumStatsPattern(inputs.count, Unit.count)],
},
{
name: "Output Count",
title: "Output Count",
bottom: [...fromSizePattern(outputs.count.totalCount, Unit.count)],
bottom: [...fromSumStatsPattern(outputs.count.totalCount, Unit.count)],
},
{
name: "Inputs/sec",
@@ -546,12 +545,12 @@ export function createChainSection(ctx) {
{
name: "P2PKH",
title: "P2PKH Output Count",
bottom: fromDollarsPattern(scripts.count.p2pkh, Unit.count),
bottom: fromFullStatsPattern(scripts.count.p2pkh, Unit.count),
},
{
name: "P2PK33",
title: "P2PK33 Output Count",
bottom: fromDollarsPattern(
bottom: fromFullStatsPattern(
scripts.count.p2pk33,
Unit.count,
),
@@ -559,7 +558,7 @@ export function createChainSection(ctx) {
{
name: "P2PK65",
title: "P2PK65 Output Count",
bottom: fromDollarsPattern(
bottom: fromFullStatsPattern(
scripts.count.p2pk65,
Unit.count,
),
@@ -573,12 +572,12 @@ export function createChainSection(ctx) {
{
name: "P2SH",
title: "P2SH Output Count",
bottom: fromDollarsPattern(scripts.count.p2sh, Unit.count),
bottom: fromFullStatsPattern(scripts.count.p2sh, Unit.count),
},
{
name: "P2MS",
title: "P2MS Output Count",
bottom: fromDollarsPattern(scripts.count.p2ms, Unit.count),
bottom: fromFullStatsPattern(scripts.count.p2ms, Unit.count),
},
],
},
@@ -589,7 +588,7 @@ export function createChainSection(ctx) {
{
name: "All SegWit",
title: "SegWit Output Count",
bottom: fromDollarsPattern(
bottom: fromFullStatsPattern(
scripts.count.segwit,
Unit.count,
),
@@ -597,7 +596,7 @@ export function createChainSection(ctx) {
{
name: "P2WPKH",
title: "P2WPKH Output Count",
bottom: fromDollarsPattern(
bottom: fromFullStatsPattern(
scripts.count.p2wpkh,
Unit.count,
),
@@ -605,7 +604,7 @@ export function createChainSection(ctx) {
{
name: "P2WSH",
title: "P2WSH Output Count",
bottom: fromDollarsPattern(scripts.count.p2wsh, Unit.count),
bottom: fromFullStatsPattern(scripts.count.p2wsh, Unit.count),
},
],
},
@@ -616,12 +615,12 @@ export function createChainSection(ctx) {
{
name: "P2TR",
title: "P2TR Output Count",
bottom: fromDollarsPattern(scripts.count.p2tr, Unit.count),
bottom: fromFullStatsPattern(scripts.count.p2tr, Unit.count),
},
{
name: "P2A",
title: "P2A Output Count",
bottom: fromDollarsPattern(scripts.count.p2a, Unit.count),
bottom: fromFullStatsPattern(scripts.count.p2a, Unit.count),
},
],
},
@@ -632,7 +631,7 @@ export function createChainSection(ctx) {
{
name: "OP_RETURN",
title: "OP_RETURN Output Count",
bottom: fromDollarsPattern(
bottom: fromFullStatsPattern(
scripts.count.opreturn,
Unit.count,
),
@@ -640,7 +639,7 @@ export function createChainSection(ctx) {
{
name: "Empty",
title: "Empty Output Count",
bottom: fromDollarsPattern(
bottom: fromFullStatsPattern(
scripts.count.emptyoutput,
Unit.count,
),
@@ -648,7 +647,7 @@ export function createChainSection(ctx) {
{
name: "Unknown",
title: "Unknown Output Count",
bottom: fromDollarsPattern(
bottom: fromFullStatsPattern(
scripts.count.unknownoutput,
Unit.count,
),
@@ -793,9 +792,9 @@ export function createChainSection(ctx) {
name: "Fee",
title: "Transaction Fees",
bottom: [
...fromSizePattern(transactions.fees.fee.bitcoin, Unit.btc),
...fromSizePattern(transactions.fees.fee.sats, Unit.sats),
...fromSizePattern(transactions.fees.fee.dollars, Unit.usd),
...fromSumStatsPattern(transactions.fees.fee.bitcoin, Unit.btc),
...fromSumStatsPattern(transactions.fees.fee.sats, Unit.sats),
...fromSumStatsPattern(transactions.fees.fee.dollars, Unit.usd),
line({
metric: blocks.rewards.feeDominance,
name: "Dominance",

View File

@@ -1,37 +1,6 @@
import { Unit } from "../utils/units.js";
import { line, price } from "./series.js";
import {
satsBtcUsd,
createRatioChart,
createZScoresFolder,
formatCohortTitle,
} from "./shared.js";
/**
* Create price with ratio options for cointime prices
* @param {PartialContext} ctx
* @param {Object} args
* @param {string} args.title
* @param {string} args.legend
* @param {AnyPricePattern} args.pricePattern
* @param {ActivePriceRatioPattern} args.ratio
* @param {Color} args.color
* @returns {PartialOptionsTree}
*/
function createCointimePriceWithRatioOptions(
ctx,
{ title, legend, pricePattern, ratio, color },
) {
return [
{
name: "Price",
title,
top: [price({ metric: pricePattern, name: legend, color })],
},
createRatioChart(ctx, { title: formatCohortTitle(title), pricePattern, ratio, color }),
createZScoresFolder(ctx, { title, legend, pricePattern, ratio, color }),
];
}
import { satsBtcUsd, createPriceRatioCharts } from "./shared.js";
/**
* Create Cointime section
@@ -134,12 +103,12 @@ export function createCointimeSection(ctx) {
},
...cointimePrices.map(({ pricePattern, ratio, name, color, title }) => ({
name,
tree: createCointimePriceWithRatioOptions(ctx, {
tree: createPriceRatioCharts(ctx, {
context: title,
legend: name,
pricePattern,
ratio,
legend: name,
color,
title,
}),
})),
],

View File

@@ -1,13 +1,12 @@
import {
fromSizePattern,
fromFullnessPattern,
fromDollarsPattern,
fromFeeRatePattern,
fromSumStatsPattern,
fromBaseStatsPattern,
fromFullStatsPattern,
fromStatsPattern,
fromCoinbasePattern,
fromValuePattern,
fromBitcoinPatternWithUnit,
fromBlockCountWithUnit,
fromIntervalPattern,
fromCountPattern,
fromSupplyPattern,
} from "./series.js";
import { colors } from "../chart/colors.js";
@@ -39,15 +38,14 @@ export function createContext({ brk }) {
return {
colors,
brk,
fromSizePattern: bind(fromSizePattern),
fromFullnessPattern: bind(fromFullnessPattern),
fromDollarsPattern: bind(fromDollarsPattern),
fromFeeRatePattern: bind(fromFeeRatePattern),
fromSumStatsPattern: bind(fromSumStatsPattern),
fromBaseStatsPattern: bind(fromBaseStatsPattern),
fromFullStatsPattern: bind(fromFullStatsPattern),
fromStatsPattern: bind(fromStatsPattern),
fromCoinbasePattern: bind(fromCoinbasePattern),
fromValuePattern: bind(fromValuePattern),
fromBitcoinPatternWithUnit: bind(fromBitcoinPatternWithUnit),
fromBlockCountWithUnit: bind(fromBlockCountWithUnit),
fromIntervalPattern: bind(fromIntervalPattern),
fromCountPattern: bind(fromCountPattern),
fromSupplyPattern,
};
}

Some files were not shown because too many files have changed in this diff Show More