global: snapshot

This commit is contained in:
nym21
2025-12-20 17:02:00 +01:00
parent 4b910ceaa7
commit 8c2402cacb
42 changed files with 2754 additions and 2930 deletions

View File

@@ -482,7 +482,6 @@ where
self.first.u()
}
#[inline]
#[allow(unused)]
pub fn unwrap_average(&self) -> &EagerVec<PcoVec<I, T>> {
self.average.u()
}
@@ -495,27 +494,22 @@ where
self.max.u()
}
#[inline]
#[allow(unused)]
pub fn unwrap_pct90(&self) -> &EagerVec<PcoVec<I, T>> {
self.pct90.u()
}
#[inline]
#[allow(unused)]
pub fn unwrap_pct75(&self) -> &EagerVec<PcoVec<I, T>> {
self.pct75.u()
}
#[inline]
#[allow(unused)]
pub fn unwrap_median(&self) -> &EagerVec<PcoVec<I, T>> {
self.median.u()
}
#[inline]
#[allow(unused)]
pub fn unwrap_pct25(&self) -> &EagerVec<PcoVec<I, T>> {
self.pct25.u()
}
#[inline]
#[allow(unused)]
pub fn unwrap_pct10(&self) -> &EagerVec<PcoVec<I, T>> {
self.pct10.u()
}
@@ -528,7 +522,6 @@ where
self.last.u()
}
#[inline]
#[allow(unused)]
pub fn unwrap_cumulative(&self) -> &EagerVec<PcoVec<I, T>> {
self.cumulative.u()
}
@@ -701,7 +694,6 @@ impl VecBuilderOptions {
self
}
#[allow(unused)]
pub fn add_median(mut self) -> Self {
self.median = true;
self
@@ -717,25 +709,21 @@ impl VecBuilderOptions {
self
}
#[allow(unused)]
pub fn add_pct90(mut self) -> Self {
self.pct90 = true;
self
}
#[allow(unused)]
pub fn add_pct75(mut self) -> Self {
self.pct75 = true;
self
}
#[allow(unused)]
pub fn add_pct25(mut self) -> Self {
self.pct25 = true;
self
}
#[allow(unused)]
pub fn add_pct10(mut self) -> Self {
self.pct10 = true;
self
@@ -746,61 +734,51 @@ impl VecBuilderOptions {
self
}
#[allow(unused)]
pub fn rm_min(mut self) -> Self {
self.min = false;
self
}
#[allow(unused)]
pub fn rm_max(mut self) -> Self {
self.max = false;
self
}
#[allow(unused)]
pub fn rm_median(mut self) -> Self {
self.median = false;
self
}
#[allow(unused)]
pub fn rm_average(mut self) -> Self {
self.average = false;
self
}
#[allow(unused)]
pub fn rm_sum(mut self) -> Self {
self.sum = false;
self
}
#[allow(unused)]
pub fn rm_pct90(mut self) -> Self {
self.pct90 = false;
self
}
#[allow(unused)]
pub fn rm_pct75(mut self) -> Self {
self.pct75 = false;
self
}
#[allow(unused)]
pub fn rm_pct25(mut self) -> Self {
self.pct25 = false;
self
}
#[allow(unused)]
pub fn rm_pct10(mut self) -> Self {
self.pct10 = false;
self
}
#[allow(unused)]
pub fn rm_cumulative(mut self) -> Self {
self.cumulative = false;
self

View File

@@ -223,7 +223,6 @@ where
pub fn unwrap_first(&self) -> &LazyVecFrom2<I, T, S1I, T, I, S2T> {
self.first.u()
}
#[allow(unused)]
pub fn unwrap_average(&self) -> &LazyVecFrom2<I, T, S1I, T, I, S2T> {
self.average.u()
}
@@ -239,7 +238,6 @@ where
pub fn unwrap_last(&self) -> &LazyVecFrom2<I, T, S1I, T, I, S2T> {
self.last.u()
}
#[allow(unused)]
pub fn unwrap_cumulative(&self) -> &LazyVecFrom2<I, T, S1I, T, I, S2T> {
self.cumulative.u()
}
@@ -307,31 +305,26 @@ impl LazyVecBuilderOptions {
self
}
#[allow(unused)]
pub fn rm_min(mut self) -> Self {
self.min = false;
self
}
#[allow(unused)]
pub fn rm_max(mut self) -> Self {
self.max = false;
self
}
#[allow(unused)]
pub fn rm_average(mut self) -> Self {
self.average = false;
self
}
#[allow(unused)]
pub fn rm_sum(mut self) -> Self {
self.sum = false;
self
}
#[allow(unused)]
pub fn rm_cumulative(mut self) -> Self {
self.cumulative = false;
self

View File

@@ -129,7 +129,6 @@ where
})
}
// #[allow(unused)]
// pub fn compute_all<F>(
// &mut self,
// indexer: &Indexer,

View File

@@ -10,7 +10,9 @@ use brk_error::Result;
use brk_grouper::{
AmountFilter, ByAgeRange, ByAmountRange, ByEpoch, ByGreatEqualAmount, ByLowerThanAmount,
ByMaxAge, ByMinAge, BySpendableType, ByTerm, ByYear, Filter, Filtered, StateLevel, Term,
TimeFilter, UTXOGroups,
TimeFilter, UTXOGroups, DAYS_10Y, DAYS_12Y, DAYS_15Y, DAYS_1D, DAYS_1M, DAYS_1W, DAYS_1Y,
DAYS_2M, DAYS_2Y, DAYS_3M, DAYS_3Y, DAYS_4M, DAYS_4Y, DAYS_5M, DAYS_5Y, DAYS_6M, DAYS_6Y,
DAYS_7Y, DAYS_8Y,
};
use brk_traversable::Traversable;
use brk_types::{Bitcoin, DateIndex, Dollars, HalvingEpoch, Height, OutputType, Sats, Version, Year};
@@ -111,68 +113,68 @@ impl UTXOCohorts {
},
max_age: ByMaxAge {
_1w: none(Filter::Time(TimeFilter::LowerThan(7)))?,
_1m: none(Filter::Time(TimeFilter::LowerThan(30)))?,
_2m: none(Filter::Time(TimeFilter::LowerThan(2 * 30)))?,
_3m: none(Filter::Time(TimeFilter::LowerThan(3 * 30)))?,
_4m: none(Filter::Time(TimeFilter::LowerThan(4 * 30)))?,
_5m: none(Filter::Time(TimeFilter::LowerThan(5 * 30)))?,
_6m: none(Filter::Time(TimeFilter::LowerThan(6 * 30)))?,
_1y: none(Filter::Time(TimeFilter::LowerThan(365)))?,
_2y: none(Filter::Time(TimeFilter::LowerThan(2 * 365)))?,
_3y: none(Filter::Time(TimeFilter::LowerThan(3 * 365)))?,
_4y: none(Filter::Time(TimeFilter::LowerThan(4 * 365)))?,
_5y: none(Filter::Time(TimeFilter::LowerThan(5 * 365)))?,
_6y: none(Filter::Time(TimeFilter::LowerThan(6 * 365)))?,
_7y: none(Filter::Time(TimeFilter::LowerThan(7 * 365)))?,
_8y: none(Filter::Time(TimeFilter::LowerThan(8 * 365)))?,
_10y: none(Filter::Time(TimeFilter::LowerThan(10 * 365)))?,
_12y: none(Filter::Time(TimeFilter::LowerThan(12 * 365)))?,
_15y: none(Filter::Time(TimeFilter::LowerThan(15 * 365)))?,
_1w: none(Filter::Time(TimeFilter::LowerThan(DAYS_1W)))?,
_1m: none(Filter::Time(TimeFilter::LowerThan(DAYS_1M)))?,
_2m: none(Filter::Time(TimeFilter::LowerThan(DAYS_2M)))?,
_3m: none(Filter::Time(TimeFilter::LowerThan(DAYS_3M)))?,
_4m: none(Filter::Time(TimeFilter::LowerThan(DAYS_4M)))?,
_5m: none(Filter::Time(TimeFilter::LowerThan(DAYS_5M)))?,
_6m: none(Filter::Time(TimeFilter::LowerThan(DAYS_6M)))?,
_1y: none(Filter::Time(TimeFilter::LowerThan(DAYS_1Y)))?,
_2y: none(Filter::Time(TimeFilter::LowerThan(DAYS_2Y)))?,
_3y: none(Filter::Time(TimeFilter::LowerThan(DAYS_3Y)))?,
_4y: none(Filter::Time(TimeFilter::LowerThan(DAYS_4Y)))?,
_5y: none(Filter::Time(TimeFilter::LowerThan(DAYS_5Y)))?,
_6y: none(Filter::Time(TimeFilter::LowerThan(DAYS_6Y)))?,
_7y: none(Filter::Time(TimeFilter::LowerThan(DAYS_7Y)))?,
_8y: none(Filter::Time(TimeFilter::LowerThan(DAYS_8Y)))?,
_10y: none(Filter::Time(TimeFilter::LowerThan(DAYS_10Y)))?,
_12y: none(Filter::Time(TimeFilter::LowerThan(DAYS_12Y)))?,
_15y: none(Filter::Time(TimeFilter::LowerThan(DAYS_15Y)))?,
},
min_age: ByMinAge {
_1d: none(Filter::Time(TimeFilter::GreaterOrEqual(1)))?,
_1w: none(Filter::Time(TimeFilter::GreaterOrEqual(7)))?,
_1m: none(Filter::Time(TimeFilter::GreaterOrEqual(30)))?,
_2m: none(Filter::Time(TimeFilter::GreaterOrEqual(2 * 30)))?,
_3m: none(Filter::Time(TimeFilter::GreaterOrEqual(3 * 30)))?,
_4m: none(Filter::Time(TimeFilter::GreaterOrEqual(4 * 30)))?,
_5m: none(Filter::Time(TimeFilter::GreaterOrEqual(5 * 30)))?,
_6m: none(Filter::Time(TimeFilter::GreaterOrEqual(6 * 30)))?,
_1y: none(Filter::Time(TimeFilter::GreaterOrEqual(365)))?,
_2y: none(Filter::Time(TimeFilter::GreaterOrEqual(2 * 365)))?,
_3y: none(Filter::Time(TimeFilter::GreaterOrEqual(3 * 365)))?,
_4y: none(Filter::Time(TimeFilter::GreaterOrEqual(4 * 365)))?,
_5y: none(Filter::Time(TimeFilter::GreaterOrEqual(5 * 365)))?,
_6y: none(Filter::Time(TimeFilter::GreaterOrEqual(6 * 365)))?,
_7y: none(Filter::Time(TimeFilter::GreaterOrEqual(7 * 365)))?,
_8y: none(Filter::Time(TimeFilter::GreaterOrEqual(8 * 365)))?,
_10y: none(Filter::Time(TimeFilter::GreaterOrEqual(10 * 365)))?,
_12y: none(Filter::Time(TimeFilter::GreaterOrEqual(12 * 365)))?,
_1d: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_1D)))?,
_1w: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_1W)))?,
_1m: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_1M)))?,
_2m: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_2M)))?,
_3m: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_3M)))?,
_4m: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_4M)))?,
_5m: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_5M)))?,
_6m: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_6M)))?,
_1y: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_1Y)))?,
_2y: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_2Y)))?,
_3y: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_3Y)))?,
_4y: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_4Y)))?,
_5y: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_5Y)))?,
_6y: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_6Y)))?,
_7y: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_7Y)))?,
_8y: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_8Y)))?,
_10y: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_10Y)))?,
_12y: none(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_12Y)))?,
},
age_range: ByAgeRange {
up_to_1d: full(Filter::Time(TimeFilter::Range(0..1)))?,
_1d_to_1w: full(Filter::Time(TimeFilter::Range(1..7)))?,
_1w_to_1m: full(Filter::Time(TimeFilter::Range(7..30)))?,
_1m_to_2m: full(Filter::Time(TimeFilter::Range(30..2 * 30)))?,
_2m_to_3m: full(Filter::Time(TimeFilter::Range(2 * 30..3 * 30)))?,
_3m_to_4m: full(Filter::Time(TimeFilter::Range(3 * 30..4 * 30)))?,
_4m_to_5m: full(Filter::Time(TimeFilter::Range(4 * 30..5 * 30)))?,
_5m_to_6m: full(Filter::Time(TimeFilter::Range(5 * 30..6 * 30)))?,
_6m_to_1y: full(Filter::Time(TimeFilter::Range(6 * 30..365)))?,
_1y_to_2y: full(Filter::Time(TimeFilter::Range(365..2 * 365)))?,
_2y_to_3y: full(Filter::Time(TimeFilter::Range(2 * 365..3 * 365)))?,
_3y_to_4y: full(Filter::Time(TimeFilter::Range(3 * 365..4 * 365)))?,
_4y_to_5y: full(Filter::Time(TimeFilter::Range(4 * 365..5 * 365)))?,
_5y_to_6y: full(Filter::Time(TimeFilter::Range(5 * 365..6 * 365)))?,
_6y_to_7y: full(Filter::Time(TimeFilter::Range(6 * 365..7 * 365)))?,
_7y_to_8y: full(Filter::Time(TimeFilter::Range(7 * 365..8 * 365)))?,
_8y_to_10y: full(Filter::Time(TimeFilter::Range(8 * 365..10 * 365)))?,
_10y_to_12y: full(Filter::Time(TimeFilter::Range(10 * 365..12 * 365)))?,
_12y_to_15y: full(Filter::Time(TimeFilter::Range(12 * 365..15 * 365)))?,
from_15y: full(Filter::Time(TimeFilter::GreaterOrEqual(15 * 365)))?,
up_to_1d: full(Filter::Time(TimeFilter::Range(0..DAYS_1D)))?,
_1d_to_1w: full(Filter::Time(TimeFilter::Range(DAYS_1D..DAYS_1W)))?,
_1w_to_1m: full(Filter::Time(TimeFilter::Range(DAYS_1W..DAYS_1M)))?,
_1m_to_2m: full(Filter::Time(TimeFilter::Range(DAYS_1M..DAYS_2M)))?,
_2m_to_3m: full(Filter::Time(TimeFilter::Range(DAYS_2M..DAYS_3M)))?,
_3m_to_4m: full(Filter::Time(TimeFilter::Range(DAYS_3M..DAYS_4M)))?,
_4m_to_5m: full(Filter::Time(TimeFilter::Range(DAYS_4M..DAYS_5M)))?,
_5m_to_6m: full(Filter::Time(TimeFilter::Range(DAYS_5M..DAYS_6M)))?,
_6m_to_1y: full(Filter::Time(TimeFilter::Range(DAYS_6M..DAYS_1Y)))?,
_1y_to_2y: full(Filter::Time(TimeFilter::Range(DAYS_1Y..DAYS_2Y)))?,
_2y_to_3y: full(Filter::Time(TimeFilter::Range(DAYS_2Y..DAYS_3Y)))?,
_3y_to_4y: full(Filter::Time(TimeFilter::Range(DAYS_3Y..DAYS_4Y)))?,
_4y_to_5y: full(Filter::Time(TimeFilter::Range(DAYS_4Y..DAYS_5Y)))?,
_5y_to_6y: full(Filter::Time(TimeFilter::Range(DAYS_5Y..DAYS_6Y)))?,
_6y_to_7y: full(Filter::Time(TimeFilter::Range(DAYS_6Y..DAYS_7Y)))?,
_7y_to_8y: full(Filter::Time(TimeFilter::Range(DAYS_7Y..DAYS_8Y)))?,
_8y_to_10y: full(Filter::Time(TimeFilter::Range(DAYS_8Y..DAYS_10Y)))?,
_10y_to_12y: full(Filter::Time(TimeFilter::Range(DAYS_10Y..DAYS_12Y)))?,
_12y_to_15y: full(Filter::Time(TimeFilter::Range(DAYS_12Y..DAYS_15Y)))?,
from_15y: full(Filter::Time(TimeFilter::GreaterOrEqual(DAYS_15Y)))?,
},
amount_range: ByAmountRange {

View File

@@ -1,6 +1,5 @@
//! Processing received outputs (new UTXOs).
use brk_grouper::{Filter, Filtered};
use brk_types::{Dollars, Height, Timestamp};
use crate::stateful::states::Transacted;
@@ -37,16 +36,14 @@ impl UTXOCohorts {
});
// Update output type cohorts
self.type_.iter_mut().for_each(|vecs| {
let output_type = match vecs.filter() {
Filter::Type(output_type) => *output_type,
_ => unreachable!(),
};
vecs.state
.as_mut()
.unwrap()
.receive(received.by_type.get(output_type), price)
});
self.type_
.iter_typed_mut()
.for_each(|(output_type, vecs)| {
vecs.state
.as_mut()
.unwrap()
.receive(received.by_type.get(output_type), price)
});
// Update amount range cohorts
received

View File

@@ -1,7 +1,6 @@
//! Processing spent inputs (UTXOs being spent).
use brk_grouper::{Filter, Filtered, TimeFilter};
use brk_types::{CheckedSub, HalvingEpoch, Height, Year};
use brk_types::{CheckedSub, Height};
use rustc_hash::FxHashMap;
use vecdb::VecIndex;
@@ -26,15 +25,6 @@ impl UTXOCohorts {
return;
}
// Time-based cohorts: age_range + epoch + year
let mut time_cohorts: Vec<_> = self
.0
.age_range
.iter_mut()
.chain(self.0.epoch.iter_mut())
.chain(self.0.year.iter_mut())
.collect();
let last_block = chain_state.last().unwrap();
let last_timestamp = last_block.timestamp;
let current_price = last_block.price;
@@ -55,27 +45,45 @@ impl UTXOCohorts {
.unwrap()
.is_more_than_hour();
// Update time-based cohorts
time_cohorts
.iter_mut()
.filter(|v| match v.filter() {
Filter::Time(TimeFilter::GreaterOrEqual(from)) => *from <= days_old,
Filter::Time(TimeFilter::LowerThan(to)) => *to > days_old,
Filter::Time(TimeFilter::Range(range)) => range.contains(&days_old),
Filter::Epoch(e) => *e == HalvingEpoch::from(height),
Filter::Year(y) => *y == Year::from(block_state.timestamp),
_ => unreachable!(),
})
.for_each(|vecs| {
vecs.state.um().send(
&sent.spendable_supply,
current_price,
prev_price,
blocks_old,
days_old_float,
older_than_hour,
);
});
// Update age range cohort (direct index lookup)
self.0
.age_range
.get_mut_by_days_old(days_old)
.state
.um()
.send(
&sent.spendable_supply,
current_price,
prev_price,
blocks_old,
days_old_float,
older_than_hour,
);
// Update epoch cohort (direct lookup by height)
self.0.epoch.mut_vec_from_height(height).state.um().send(
&sent.spendable_supply,
current_price,
prev_price,
blocks_old,
days_old_float,
older_than_hour,
);
// Update year cohort (direct lookup by timestamp)
self.0
.year
.mut_vec_from_timestamp(block_state.timestamp)
.state
.um()
.send(
&sent.spendable_supply,
current_price,
prev_price,
blocks_old,
days_old_float,
older_than_hour,
);
// Update output type cohorts
sent.by_type

View File

@@ -15,6 +15,7 @@ use brk_grouper::ByAddressType;
use brk_indexer::Indexer;
use brk_types::{DateIndex, Height, OutputType, Sats, TypeIndex};
use log::info;
use rayon::prelude::*;
use vecdb::{Exit, GenericStoredVec, IterableVec, TypedVecIterator, VecIndex};
use crate::{
@@ -420,7 +421,8 @@ pub fn process_blocks(
});
// Main thread: Update UTXO cohorts
vecs.utxo_cohorts.receive(transacted, height, timestamp, block_price);
vecs.utxo_cohorts
.receive(transacted, height, timestamp, block_price);
vecs.utxo_cohorts.send(height_to_sent, chain_state);
});
@@ -542,14 +544,14 @@ fn push_cohort_states(
dateindex: Option<DateIndex>,
date_price: Option<Option<brk_types::Dollars>>,
) -> Result<()> {
utxo_cohorts.iter_separate_mut().try_for_each(|v| {
// utxo_cohorts.par_iter_separate_mut().try_for_each(|v| {
// utxo_cohorts.iter_separate_mut().try_for_each(|v| {
utxo_cohorts.par_iter_separate_mut().try_for_each(|v| {
v.truncate_push(height)?;
v.compute_then_truncate_push_unrealized_states(height, height_price, dateindex, date_price)
})?;
address_cohorts.iter_separate_mut().try_for_each(|v| {
// address_cohorts.par_iter_separate_mut().try_for_each(|v| {
// address_cohorts.iter_separate_mut().try_for_each(|v| {
address_cohorts.par_iter_separate_mut().try_for_each(|v| {
v.truncate_push(height)?;
v.compute_then_truncate_push_unrealized_states(height, height_price, dateindex, date_price)
})?;

View File

@@ -151,6 +151,9 @@ impl CohortMetrics {
date_price: Option<Option<Dollars>>,
state: &mut CohortState,
) -> Result<()> {
// Apply pending updates before reading
state.apply_pending();
if let (Some(unrealized), Some(price_paid), Some(height_price)) = (
self.unrealized.as_mut(),
self.price_paid.as_mut(),
@@ -248,6 +251,14 @@ impl CohortMetrics {
realized.compute_rest_part1(indexes, price, starting_indexes, exit)?;
}
if let Some(unrealized) = self.unrealized.as_mut() {
unrealized.compute_rest_part1(price, starting_indexes, exit)?;
}
if let Some(price_paid) = self.price_paid.as_mut() {
price_paid.compute_rest_part1(indexes, starting_indexes, exit)?;
}
Ok(())
}
@@ -277,6 +288,18 @@ impl CohortMetrics {
exit,
)?;
if let Some(realized) = self.realized.as_mut() {
realized.compute_rest_part2(
indexes,
price,
starting_indexes,
height_to_supply,
height_to_market_cap,
dateindex_to_market_cap,
exit,
)?;
}
if let Some(relative) = self.relative.as_mut() {
relative.compute_rest_part2(
indexes,

View File

@@ -154,4 +154,28 @@ impl PricePaidMetrics {
)?;
Ok(())
}
/// First phase of computed metrics (indexes from height).
pub fn compute_rest_part1(
&mut self,
indexes: &crate::indexes::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_min_price_paid.compute_rest(
indexes,
starting_indexes,
exit,
Some(&self.height_to_min_price_paid),
)?;
self.indexes_to_max_price_paid.compute_rest(
indexes,
starting_indexes,
exit,
Some(&self.height_to_max_price_paid),
)?;
Ok(())
}
}

View File

@@ -4,8 +4,8 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Dollars, Height, StoredF32, StoredF64, Version};
use vecdb::{AnyStoredVec, EagerVec, Exit, GenericStoredVec, ImportableVec, PcoVec};
use brk_types::{Bitcoin, DateIndex, Dollars, Height, StoredF32, StoredF64, Version};
use vecdb::{AnyStoredVec, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableVec, PcoVec};
use crate::{
Indexes,
@@ -565,6 +565,50 @@ impl RealizedMetrics {
Some(&self.height_to_realized_loss),
)?;
// neg_realized_loss = realized_loss * -1
self.indexes_to_neg_realized_loss
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_transform(
starting_indexes.height,
&self.height_to_realized_loss,
|(i, v, ..)| (i, v * -1_i64),
exit,
)?;
Ok(())
})?;
// net_realized_pnl = profit - loss
self.indexes_to_net_realized_pnl
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_subtract(
starting_indexes.height,
&self.height_to_realized_profit,
&self.height_to_realized_loss,
exit,
)?;
Ok(())
})?;
// realized_value = profit + loss
self.indexes_to_realized_value
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_add(
starting_indexes.height,
&self.height_to_realized_profit,
&self.height_to_realized_loss,
exit,
)?;
Ok(())
})?;
// total_realized_pnl at height level = profit + loss
self.height_to_total_realized_pnl.compute_add(
starting_indexes.height,
&self.height_to_realized_profit,
&self.height_to_realized_loss,
exit,
)?;
self.indexes_to_value_created.compute_rest(
indexes,
starting_indexes,
@@ -579,6 +623,265 @@ impl RealizedMetrics {
Some(&self.height_to_value_destroyed),
)?;
// Optional: adjusted value
if let Some(adjusted_value_created) = self.indexes_to_adjusted_value_created.as_mut() {
adjusted_value_created.compute_rest(
indexes,
starting_indexes,
exit,
self.height_to_adjusted_value_created.as_ref(),
)?;
}
if let Some(adjusted_value_destroyed) = self.indexes_to_adjusted_value_destroyed.as_mut() {
adjusted_value_destroyed.compute_rest(
indexes,
starting_indexes,
exit,
self.height_to_adjusted_value_destroyed.as_ref(),
)?;
}
Ok(())
}
/// Second phase of computed metrics (realized price from realized cap / supply).
#[allow(clippy::too_many_arguments)]
pub fn compute_rest_part2(
&mut self,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
starting_indexes: &Indexes,
height_to_supply: &impl IterableVec<Height, Bitcoin>,
height_to_market_cap: Option<&impl IterableVec<Height, Dollars>>,
dateindex_to_market_cap: Option<&impl IterableVec<DateIndex, Dollars>>,
exit: &Exit,
) -> Result<()> {
// realized_price = realized_cap / supply
self.indexes_to_realized_price
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_divide(
starting_indexes.height,
&self.height_to_realized_cap,
height_to_supply,
exit,
)?;
Ok(())
})?;
if let Some(price) = price {
self.indexes_to_realized_price_extra.compute_rest(
price,
starting_indexes,
exit,
Some(self.indexes_to_realized_price.dateindex.unwrap_last()),
)?;
}
// realized_cap_30d_delta
self.indexes_to_realized_cap_30d_delta
.compute_all(starting_indexes, exit, |vec| {
vec.compute_change(
starting_indexes.dateindex,
self.indexes_to_realized_cap.dateindex.unwrap_last(),
30,
exit,
)?;
Ok(())
})?;
// total_realized_pnl at dateindex level
self.indexes_to_total_realized_pnl
.compute_all(starting_indexes, exit, |vec| {
vec.compute_add(
starting_indexes.dateindex,
self.indexes_to_realized_profit.dateindex.unwrap_sum(),
self.indexes_to_realized_loss.dateindex.unwrap_sum(),
exit,
)?;
Ok(())
})?;
// SOPR = value_created / value_destroyed
self.dateindex_to_sopr.compute_divide(
starting_indexes.dateindex,
self.indexes_to_value_created.dateindex.unwrap_sum(),
self.indexes_to_value_destroyed.dateindex.unwrap_sum(),
exit,
)?;
self.dateindex_to_sopr_7d_ema.compute_ema(
starting_indexes.dateindex,
&self.dateindex_to_sopr,
7,
exit,
)?;
self.dateindex_to_sopr_30d_ema.compute_ema(
starting_indexes.dateindex,
&self.dateindex_to_sopr,
30,
exit,
)?;
// Optional: adjusted SOPR
if let (Some(adjusted_sopr), Some(adj_created), Some(adj_destroyed)) = (
self.dateindex_to_adjusted_sopr.as_mut(),
self.indexes_to_adjusted_value_created.as_ref(),
self.indexes_to_adjusted_value_destroyed.as_ref(),
) {
adjusted_sopr.compute_divide(
starting_indexes.dateindex,
adj_created.dateindex.unwrap_sum(),
adj_destroyed.dateindex.unwrap_sum(),
exit,
)?;
if let Some(ema_7d) = self.dateindex_to_adjusted_sopr_7d_ema.as_mut() {
ema_7d.compute_ema(
starting_indexes.dateindex,
self.dateindex_to_adjusted_sopr.as_ref().unwrap(),
7,
exit,
)?;
}
if let Some(ema_30d) = self.dateindex_to_adjusted_sopr_30d_ema.as_mut() {
ema_30d.compute_ema(
starting_indexes.dateindex,
self.dateindex_to_adjusted_sopr.as_ref().unwrap(),
30,
exit,
)?;
}
}
// sell_side_risk_ratio = realized_value / realized_cap
self.dateindex_to_sell_side_risk_ratio.compute_percentage(
starting_indexes.dateindex,
self.indexes_to_realized_value.dateindex.unwrap_sum(),
self.indexes_to_realized_cap.dateindex.unwrap_last(),
exit,
)?;
self.dateindex_to_sell_side_risk_ratio_7d_ema.compute_ema(
starting_indexes.dateindex,
&self.dateindex_to_sell_side_risk_ratio,
7,
exit,
)?;
self.dateindex_to_sell_side_risk_ratio_30d_ema.compute_ema(
starting_indexes.dateindex,
&self.dateindex_to_sell_side_risk_ratio,
30,
exit,
)?;
// Ratios relative to realized cap
self.indexes_to_realized_profit_rel_to_realized_cap
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.height,
&self.height_to_realized_profit,
&self.height_to_realized_cap,
exit,
)?;
Ok(())
})?;
self.indexes_to_realized_loss_rel_to_realized_cap
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.height,
&self.height_to_realized_loss,
&self.height_to_realized_cap,
exit,
)?;
Ok(())
})?;
self.indexes_to_net_realized_pnl_rel_to_realized_cap
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.height,
self.indexes_to_net_realized_pnl.height.u(),
&self.height_to_realized_cap,
exit,
)?;
Ok(())
})?;
// Net realized PnL cumulative 30d delta
self.indexes_to_net_realized_pnl_cumulative_30d_delta
.compute_all(starting_indexes, exit, |vec| {
vec.compute_change(
starting_indexes.dateindex,
self.indexes_to_net_realized_pnl
.dateindex
.unwrap_cumulative(),
30,
exit,
)?;
Ok(())
})?;
// Relative to realized cap
self.indexes_to_net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap
.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
self.indexes_to_net_realized_pnl_cumulative_30d_delta
.dateindex
.u(),
self.indexes_to_realized_cap.dateindex.unwrap_last(),
exit,
)?;
Ok(())
})?;
// Relative to market cap
if let Some(dateindex_to_market_cap) = dateindex_to_market_cap {
self.indexes_to_net_realized_pnl_cumulative_30d_delta_rel_to_market_cap
.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
self.indexes_to_net_realized_pnl_cumulative_30d_delta
.dateindex
.u(),
dateindex_to_market_cap,
exit,
)?;
Ok(())
})?;
}
// Optional: realized_cap_rel_to_own_market_cap
if let (Some(rel_vec), Some(height_to_market_cap)) = (
self.indexes_to_realized_cap_rel_to_own_market_cap.as_mut(),
height_to_market_cap,
) {
rel_vec.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.height,
&self.height_to_realized_cap,
height_to_market_cap,
exit,
)?;
Ok(())
})?;
}
// Optional: realized_profit_to_loss_ratio
if let Some(ratio) = self.dateindex_to_realized_profit_to_loss_ratio.as_mut() {
ratio.compute_divide(
starting_indexes.dateindex,
self.indexes_to_realized_profit.dateindex.unwrap_sum(),
self.indexes_to_realized_loss.dateindex.unwrap_sum(),
exit,
)?;
}
Ok(())
}
}

View File

@@ -452,58 +452,67 @@ impl RelativeMetrics {
// === Supply in Profit/Loss Relative to Own Supply ===
if let Some(unrealized) = unrealized {
self.height_to_supply_in_profit_rel_to_own_supply.compute_percentage(
starting_indexes.height,
&unrealized.height_to_supply_in_profit_value.bitcoin,
&supply.height_to_supply_value.bitcoin,
exit,
)?;
self.height_to_supply_in_loss_rel_to_own_supply.compute_percentage(
starting_indexes.height,
&unrealized.height_to_supply_in_loss_value.bitcoin,
&supply.height_to_supply_value.bitcoin,
exit,
)?;
self.height_to_supply_in_profit_rel_to_own_supply
.compute_percentage(
starting_indexes.height,
&unrealized.height_to_supply_in_profit_value.bitcoin,
&supply.height_to_supply_value.bitcoin,
exit,
)?;
self.height_to_supply_in_loss_rel_to_own_supply
.compute_percentage(
starting_indexes.height,
&unrealized.height_to_supply_in_loss_value.bitcoin,
&supply.height_to_supply_value.bitcoin,
exit,
)?;
self.indexes_to_supply_in_profit_rel_to_own_supply.compute_all(
starting_indexes,
exit,
|v| {
if let Some(dateindex_vec) = unrealized.indexes_to_supply_in_profit.bitcoin.dateindex.as_ref()
&& let Some(supply_dateindex) = supply.indexes_to_supply.bitcoin.dateindex.as_ref() {
v.compute_percentage(
starting_indexes.dateindex,
dateindex_vec,
supply_dateindex,
exit,
)?;
}
self.indexes_to_supply_in_profit_rel_to_own_supply
.compute_all(starting_indexes, exit, |v| {
if let Some(dateindex_vec) = unrealized
.indexes_to_supply_in_profit
.bitcoin
.dateindex
.as_ref()
&& let Some(supply_dateindex) =
supply.indexes_to_supply.bitcoin.dateindex.as_ref()
{
v.compute_percentage(
starting_indexes.dateindex,
dateindex_vec,
supply_dateindex,
exit,
)?;
}
Ok(())
},
)?;
})?;
self.indexes_to_supply_in_loss_rel_to_own_supply.compute_all(
starting_indexes,
exit,
|v| {
if let Some(dateindex_vec) = unrealized.indexes_to_supply_in_loss.bitcoin.dateindex.as_ref()
&& let Some(supply_dateindex) = supply.indexes_to_supply.bitcoin.dateindex.as_ref() {
v.compute_percentage(
starting_indexes.dateindex,
dateindex_vec,
supply_dateindex,
exit,
)?;
}
self.indexes_to_supply_in_loss_rel_to_own_supply
.compute_all(starting_indexes, exit, |v| {
if let Some(dateindex_vec) = unrealized
.indexes_to_supply_in_loss
.bitcoin
.dateindex
.as_ref()
&& let Some(supply_dateindex) =
supply.indexes_to_supply.bitcoin.dateindex.as_ref()
{
v.compute_percentage(
starting_indexes.dateindex,
dateindex_vec,
supply_dateindex,
exit,
)?;
}
Ok(())
},
)?;
})?;
}
// === Supply in Profit/Loss Relative to Circulating Supply ===
if let (Some(unrealized), Some(v)) = (
unrealized,
self.height_to_supply_in_profit_rel_to_circulating_supply.as_mut(),
self.height_to_supply_in_profit_rel_to_circulating_supply
.as_mut(),
) {
v.compute_percentage(
starting_indexes.height,
@@ -514,7 +523,8 @@ impl RelativeMetrics {
}
if let (Some(unrealized), Some(v)) = (
unrealized,
self.height_to_supply_in_loss_rel_to_circulating_supply.as_mut(),
self.height_to_supply_in_loss_rel_to_circulating_supply
.as_mut(),
) {
v.compute_percentage(
starting_indexes.height,
@@ -526,71 +536,398 @@ impl RelativeMetrics {
// === Unrealized vs Market Cap ===
if let (Some(unrealized), Some(height_to_mc)) = (unrealized, height_to_market_cap) {
self.height_to_unrealized_profit_rel_to_market_cap.compute_percentage(
starting_indexes.height,
&unrealized.height_to_unrealized_profit,
height_to_mc,
exit,
)?;
self.height_to_unrealized_loss_rel_to_market_cap.compute_percentage(
starting_indexes.height,
&unrealized.height_to_unrealized_loss,
height_to_mc,
exit,
)?;
self.height_to_neg_unrealized_loss_rel_to_market_cap.compute_percentage(
starting_indexes.height,
&unrealized.height_to_neg_unrealized_loss,
height_to_mc,
exit,
)?;
self.height_to_net_unrealized_pnl_rel_to_market_cap.compute_percentage(
starting_indexes.height,
&unrealized.height_to_net_unrealized_pnl,
height_to_mc,
exit,
)?;
self.height_to_unrealized_profit_rel_to_market_cap
.compute_percentage(
starting_indexes.height,
&unrealized.height_to_unrealized_profit,
height_to_mc,
exit,
)?;
self.height_to_unrealized_loss_rel_to_market_cap
.compute_percentage(
starting_indexes.height,
&unrealized.height_to_unrealized_loss,
height_to_mc,
exit,
)?;
self.height_to_neg_unrealized_loss_rel_to_market_cap
.compute_percentage(
starting_indexes.height,
&unrealized.height_to_neg_unrealized_loss,
height_to_mc,
exit,
)?;
self.height_to_net_unrealized_pnl_rel_to_market_cap
.compute_percentage(
starting_indexes.height,
&unrealized.height_to_net_unrealized_pnl,
height_to_mc,
exit,
)?;
}
if let Some(dateindex_to_mc) = dateindex_to_market_cap
&& let Some(unrealized) = unrealized {
self.indexes_to_unrealized_profit_rel_to_market_cap.compute_all(
starting_indexes,
exit,
|v| {
&& let Some(unrealized) = unrealized
{
self.indexes_to_unrealized_profit_rel_to_market_cap
.compute_all(starting_indexes, exit, |v| {
v.compute_percentage(
starting_indexes.dateindex,
&unrealized.dateindex_to_unrealized_profit,
dateindex_to_mc,
exit,
)?;
Ok(())
})?;
self.indexes_to_unrealized_loss_rel_to_market_cap
.compute_all(starting_indexes, exit, |v| {
v.compute_percentage(
starting_indexes.dateindex,
&unrealized.dateindex_to_unrealized_loss,
dateindex_to_mc,
exit,
)?;
Ok(())
})?;
}
// indexes_to_neg_unrealized_loss_rel_to_market_cap
if let Some(dateindex_to_mc) = dateindex_to_market_cap
&& let Some(unrealized) = unrealized
{
if let Some(dateindex_vec) =
unrealized.indexes_to_neg_unrealized_loss.dateindex.as_ref()
{
self.indexes_to_neg_unrealized_loss_rel_to_market_cap
.compute_all(starting_indexes, exit, |v| {
v.compute_percentage(
starting_indexes.dateindex,
&unrealized.dateindex_to_unrealized_profit,
dateindex_vec,
dateindex_to_mc,
exit,
)?;
Ok(())
},
})?;
}
if let Some(dateindex_vec) = unrealized.indexes_to_net_unrealized_pnl.dateindex.as_ref()
{
self.indexes_to_net_unrealized_pnl_rel_to_market_cap
.compute_all(starting_indexes, exit, |v| {
v.compute_percentage(
starting_indexes.dateindex,
dateindex_vec,
dateindex_to_mc,
exit,
)?;
Ok(())
})?;
}
}
// === Supply in Profit/Loss Relative to Circulating Supply (indexes) ===
if let Some(v) = self
.indexes_to_supply_in_profit_rel_to_circulating_supply
.as_mut()
&& let Some(unrealized) = unrealized
&& let Some(dateindex_vec) = unrealized
.indexes_to_supply_in_profit
.bitcoin
.dateindex
.as_ref()
{
v.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
dateindex_vec,
dateindex_to_supply,
exit,
)?;
self.indexes_to_unrealized_loss_rel_to_market_cap.compute_all(
starting_indexes,
Ok(())
})?;
}
if let Some(v) = self
.indexes_to_supply_in_loss_rel_to_circulating_supply
.as_mut()
&& let Some(unrealized) = unrealized
&& let Some(dateindex_vec) = unrealized
.indexes_to_supply_in_loss
.bitcoin
.dateindex
.as_ref()
{
v.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
dateindex_vec,
dateindex_to_supply,
exit,
)?;
Ok(())
})?;
}
// === Unrealized vs Own Market Cap ===
// own_market_cap = supply_value.dollars
if let Some(unrealized) = unrealized {
if let Some(v) = self
.height_to_unrealized_profit_rel_to_own_market_cap
.as_mut()
&& let Some(supply_dollars) = supply.height_to_supply_value.dollars.as_ref()
{
v.compute_percentage(
starting_indexes.height,
&unrealized.height_to_unrealized_profit,
supply_dollars,
exit,
)?;
}
if let Some(v) = self
.height_to_unrealized_loss_rel_to_own_market_cap
.as_mut()
&& let Some(supply_dollars) = supply.height_to_supply_value.dollars.as_ref()
{
v.compute_percentage(
starting_indexes.height,
&unrealized.height_to_unrealized_loss,
supply_dollars,
exit,
)?;
}
if let Some(v) = self
.height_to_neg_unrealized_loss_rel_to_own_market_cap
.as_mut()
&& let Some(supply_dollars) = supply.height_to_supply_value.dollars.as_ref()
{
v.compute_percentage(
starting_indexes.height,
&unrealized.height_to_neg_unrealized_loss,
supply_dollars,
exit,
)?;
}
if let Some(v) = self
.height_to_net_unrealized_pnl_rel_to_own_market_cap
.as_mut()
&& let Some(supply_dollars) = supply.height_to_supply_value.dollars.as_ref()
{
v.compute_percentage(
starting_indexes.height,
&unrealized.height_to_net_unrealized_pnl,
supply_dollars,
exit,
|v| {
v.compute_percentage(
starting_indexes.dateindex,
&unrealized.dateindex_to_unrealized_loss,
dateindex_to_mc,
exit,
)?;
Ok(())
},
)?;
}
// TODO: Remaining relative metrics to implement:
// - indexes_to_supply_in_profit/loss_rel_to_circulating_supply
// - height_to_unrealized_*_rel_to_own_market_cap
// - height_to_unrealized_*_rel_to_own_total_unrealized_pnl
// - indexes_to_unrealized_*_rel_to_own_market_cap
// - indexes_to_unrealized_*_rel_to_own_total_unrealized_pnl
// See stateful/common/compute.rs for patterns.
// indexes versions
if let Some(v) = self
.indexes_to_unrealized_profit_rel_to_own_market_cap
.as_mut()
&& let Some(supply_dollars_dateindex) = supply
.indexes_to_supply
.dollars
.as_ref()
.and_then(|d| d.dateindex.as_ref())
{
v.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
&unrealized.dateindex_to_unrealized_profit,
supply_dollars_dateindex,
exit,
)?;
Ok(())
})?;
}
if let Some(v) = self
.indexes_to_unrealized_loss_rel_to_own_market_cap
.as_mut()
&& let Some(supply_dollars_dateindex) = supply
.indexes_to_supply
.dollars
.as_ref()
.and_then(|d| d.dateindex.as_ref())
{
v.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
&unrealized.dateindex_to_unrealized_loss,
supply_dollars_dateindex,
exit,
)?;
Ok(())
})?;
}
if let Some(v) = self
.indexes_to_neg_unrealized_loss_rel_to_own_market_cap
.as_mut()
&& let Some(supply_dollars_dateindex) = supply
.indexes_to_supply
.dollars
.as_ref()
.and_then(|d| d.dateindex.as_ref())
&& let Some(neg_loss_dateindex) =
unrealized.indexes_to_neg_unrealized_loss.dateindex.as_ref()
{
v.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
neg_loss_dateindex,
supply_dollars_dateindex,
exit,
)?;
Ok(())
})?;
}
if let Some(v) = self
.indexes_to_net_unrealized_pnl_rel_to_own_market_cap
.as_mut()
&& let Some(supply_dollars_dateindex) = supply
.indexes_to_supply
.dollars
.as_ref()
.and_then(|d| d.dateindex.as_ref())
&& let Some(net_pnl_dateindex) =
unrealized.indexes_to_net_unrealized_pnl.dateindex.as_ref()
{
v.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
net_pnl_dateindex,
supply_dollars_dateindex,
exit,
)?;
Ok(())
})?;
}
// === Unrealized vs Own Total Unrealized PnL ===
if let Some(v) = self
.height_to_unrealized_profit_rel_to_own_total_unrealized_pnl
.as_mut()
{
v.compute_percentage(
starting_indexes.height,
&unrealized.height_to_unrealized_profit,
&unrealized.height_to_total_unrealized_pnl,
exit,
)?;
}
if let Some(v) = self
.height_to_unrealized_loss_rel_to_own_total_unrealized_pnl
.as_mut()
{
v.compute_percentage(
starting_indexes.height,
&unrealized.height_to_unrealized_loss,
&unrealized.height_to_total_unrealized_pnl,
exit,
)?;
}
if let Some(v) = self
.height_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl
.as_mut()
{
v.compute_percentage(
starting_indexes.height,
&unrealized.height_to_neg_unrealized_loss,
&unrealized.height_to_total_unrealized_pnl,
exit,
)?;
}
if let Some(v) = self
.height_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl
.as_mut()
{
v.compute_percentage(
starting_indexes.height,
&unrealized.height_to_net_unrealized_pnl,
&unrealized.height_to_total_unrealized_pnl,
exit,
)?;
}
// indexes versions for own total unrealized pnl
if let Some(v) = self
.indexes_to_unrealized_profit_rel_to_own_total_unrealized_pnl
.as_mut()
&& let Some(total_pnl_dateindex) = unrealized
.indexes_to_total_unrealized_pnl
.dateindex
.as_ref()
{
v.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
&unrealized.dateindex_to_unrealized_profit,
total_pnl_dateindex,
exit,
)?;
Ok(())
})?;
}
if let Some(v) = self
.indexes_to_unrealized_loss_rel_to_own_total_unrealized_pnl
.as_mut()
&& let Some(total_pnl_dateindex) = unrealized
.indexes_to_total_unrealized_pnl
.dateindex
.as_ref()
{
v.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
&unrealized.dateindex_to_unrealized_loss,
total_pnl_dateindex,
exit,
)?;
Ok(())
})?;
}
if let Some(v) = self
.indexes_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl
.as_mut()
&& let Some(total_pnl_dateindex) = unrealized
.indexes_to_total_unrealized_pnl
.dateindex
.as_ref()
&& let Some(neg_loss_dateindex) =
unrealized.indexes_to_neg_unrealized_loss.dateindex.as_ref()
{
v.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
neg_loss_dateindex,
total_pnl_dateindex,
exit,
)?;
Ok(())
})?;
}
if let Some(v) = self
.indexes_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl
.as_mut()
&& let Some(total_pnl_dateindex) = unrealized
.indexes_to_total_unrealized_pnl
.dateindex
.as_ref()
&& let Some(net_pnl_dateindex) =
unrealized.indexes_to_net_unrealized_pnl.dateindex.as_ref()
{
v.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
net_pnl_dateindex,
total_pnl_dateindex,
exit,
)?;
Ok(())
})?;
}
}
let _ = dateindex_to_supply;
Ok(())
}
}

View File

@@ -304,4 +304,112 @@ impl UnrealizedMetrics {
)?;
Ok(())
}
/// First phase of computed metrics.
pub fn compute_rest_part1(
&mut self,
price: Option<&crate::price::Vecs>,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
// Compute supply value (bitcoin + dollars) from sats
self.height_to_supply_in_profit_value.compute_rest(
price,
starting_indexes,
exit,
Some(&self.height_to_supply_in_profit),
)?;
self.height_to_supply_in_loss_value.compute_rest(
price,
starting_indexes,
exit,
Some(&self.height_to_supply_in_loss),
)?;
// Compute indexes from dateindex sources
self.indexes_to_supply_in_profit.compute_rest(
price,
starting_indexes,
exit,
Some(&self.dateindex_to_supply_in_profit),
)?;
self.indexes_to_supply_in_loss.compute_rest(
price,
starting_indexes,
exit,
Some(&self.dateindex_to_supply_in_loss),
)?;
self.indexes_to_unrealized_profit.compute_rest(
starting_indexes,
exit,
Some(&self.dateindex_to_unrealized_profit),
)?;
self.indexes_to_unrealized_loss.compute_rest(
starting_indexes,
exit,
Some(&self.dateindex_to_unrealized_loss),
)?;
// total_unrealized_pnl = profit + loss
self.height_to_total_unrealized_pnl.compute_add(
starting_indexes.height,
&self.height_to_unrealized_profit,
&self.height_to_unrealized_loss,
exit,
)?;
self.indexes_to_total_unrealized_pnl
.compute_all(starting_indexes, exit, |vec| {
vec.compute_add(
starting_indexes.dateindex,
&self.dateindex_to_unrealized_profit,
&self.dateindex_to_unrealized_loss,
exit,
)?;
Ok(())
})?;
// neg_unrealized_loss = loss * -1
self.height_to_neg_unrealized_loss.compute_transform(
starting_indexes.height,
&self.height_to_unrealized_loss,
|(h, v, ..)| (h, v * -1_i64),
exit,
)?;
self.indexes_to_neg_unrealized_loss
.compute_all(starting_indexes, exit, |vec| {
vec.compute_transform(
starting_indexes.dateindex,
&self.dateindex_to_unrealized_loss,
|(h, v, ..)| (h, v * -1_i64),
exit,
)?;
Ok(())
})?;
// net_unrealized_pnl = profit - loss
self.height_to_net_unrealized_pnl.compute_subtract(
starting_indexes.height,
&self.height_to_unrealized_profit,
&self.height_to_unrealized_loss,
exit,
)?;
self.indexes_to_net_unrealized_pnl
.compute_all(starting_indexes, exit, |vec| {
vec.compute_subtract(
starting_indexes.dateindex,
&self.dateindex_to_unrealized_profit,
&self.dateindex_to_unrealized_loss,
exit,
)?;
Ok(())
})?;
Ok(())
}
}

View File

@@ -3,10 +3,16 @@
//! Accumulates address data across blocks within a flush interval.
//! Data is flushed to disk at checkpoints.
use brk_types::{OutputType, TypeIndex};
use brk_grouper::ByAddressType;
use brk_types::{AnyAddressDataIndexEnum, LoadedAddressData, OutputType, TypeIndex};
use vecdb::GenericStoredVec;
use super::super::address::AddressTypeToTypeIndexMap;
use super::{AddressLookup, EmptyAddressDataWithSource, LoadedAddressDataWithSource, TxIndexVec};
use super::super::address::{AddressTypeToTypeIndexMap, AddressesDataVecs, AnyAddressIndexesVecs};
use super::super::compute::VecsReaders;
use super::{
AddressLookup, EmptyAddressDataWithSource, LoadedAddressDataWithSource, TxIndexVec,
WithAddressDataSource,
};
/// Cache for address data within a flush interval.
pub struct AddressCache {
@@ -75,3 +81,49 @@ impl AddressCache {
)
}
}
/// Load address data from storage or create new.
///
/// Returns None if address is already in cache (loaded or empty).
#[allow(clippy::too_many_arguments)]
pub fn load_uncached_address_data(
address_type: OutputType,
typeindex: TypeIndex,
first_addressindexes: &ByAddressType<TypeIndex>,
cache: &AddressCache,
vr: &VecsReaders,
any_address_indexes: &AnyAddressIndexesVecs,
addresses_data: &AddressesDataVecs,
) -> Option<LoadedAddressDataWithSource> {
// Check if this is a new address (typeindex >= first for this height)
let first = *first_addressindexes.get(address_type).unwrap();
if first <= typeindex {
return Some(WithAddressDataSource::New(LoadedAddressData::default()));
}
// Skip if already in cache
if cache.contains(address_type, typeindex) {
return None;
}
// Read from storage
let reader = vr.address_reader(address_type);
let anyaddressindex = any_address_indexes.get(address_type, typeindex, reader);
Some(match anyaddressindex.to_enum() {
AnyAddressDataIndexEnum::Loaded(loaded_index) => {
let reader = &vr.anyaddressindex_to_anyaddressdata.loaded;
let loaded_data = addresses_data
.loaded
.get_pushed_or_read_unwrap(loaded_index, reader);
WithAddressDataSource::FromLoaded(loaded_index, loaded_data)
}
AnyAddressDataIndexEnum::Empty(empty_index) => {
let reader = &vr.anyaddressindex_to_anyaddressdata.empty;
let empty_data = addresses_data
.empty
.get_pushed_or_read_unwrap(empty_index, reader);
WithAddressDataSource::FromEmpty(empty_index, empty_data.into())
}
})
}

View File

@@ -5,10 +5,7 @@
//! - Address data for address cohort tracking (optional)
use brk_grouper::ByAddressType;
use brk_types::{
AnyAddressDataIndexEnum, Height, LoadedAddressData, OutPoint, OutputType, Sats, TxInIndex,
TxIndex, TxOutIndex, TypeIndex,
};
use brk_types::{Height, OutPoint, OutputType, Sats, TxInIndex, TxIndex, TxOutIndex, TypeIndex};
use rayon::prelude::*;
use rustc_hash::FxHashMap;
use vecdb::{BytesVec, GenericStoredVec};
@@ -18,12 +15,10 @@ use crate::stateful::address::{
};
use crate::stateful::compute::VecsReaders;
use crate::stateful::states::Transacted;
use super::AddressCache;
use crate::stateful::{IndexerReaders, process::RangeMap};
use super::super::address::HeightToAddressTypeToVec;
use super::{LoadedAddressDataWithSource, TxIndexVec, WithAddressDataSource};
use super::{load_uncached_address_data, AddressCache, LoadedAddressDataWithSource, TxIndexVec};
/// Result of processing inputs for a block.
pub struct InputsResult {
@@ -102,7 +97,7 @@ pub fn process_inputs(
txoutindex_to_typeindex.read_unwrap(txoutindex, &ir.txoutindex_to_typeindex);
// Look up address data
let addr_data_opt = get_address_data(
let addr_data_opt = load_uncached_address_data(
input_type,
typeindex,
first_addressindexes,
@@ -176,48 +171,3 @@ pub fn process_inputs(
}
}
/// Look up address data from storage or determine if new.
///
/// Returns None if address is already in loaded or empty cache.
#[allow(clippy::too_many_arguments)]
fn get_address_data(
address_type: OutputType,
typeindex: TypeIndex,
first_addressindexes: &ByAddressType<TypeIndex>,
cache: &AddressCache,
vr: &VecsReaders,
any_address_indexes: &AnyAddressIndexesVecs,
addresses_data: &AddressesDataVecs,
) -> Option<LoadedAddressDataWithSource> {
// Check if this is a new address (typeindex >= first for this height)
let first = *first_addressindexes.get(address_type).unwrap();
if first <= typeindex {
return Some(WithAddressDataSource::New(LoadedAddressData::default()));
}
// Skip if already in cache
if cache.contains(address_type, typeindex) {
return None;
}
// Read from storage
let reader = vr.address_reader(address_type);
let anyaddressindex = any_address_indexes.get(address_type, typeindex, reader);
Some(match anyaddressindex.to_enum() {
AnyAddressDataIndexEnum::Loaded(loaded_index) => {
let reader = &vr.anyaddressindex_to_anyaddressdata.loaded;
let loaded_data = addresses_data
.loaded
.get_pushed_or_read_unwrap(loaded_index, reader);
WithAddressDataSource::FromLoaded(loaded_index, loaded_data)
}
AnyAddressDataIndexEnum::Empty(empty_index) => {
let reader = &vr.anyaddressindex_to_anyaddressdata.empty;
let empty_data = addresses_data
.empty
.get_pushed_or_read_unwrap(empty_index, reader);
WithAddressDataSource::FromEmpty(empty_index, empty_data.into())
}
})
}

View File

@@ -5,8 +5,7 @@
//! - Address data for address cohort tracking (optional)
use brk_grouper::ByAddressType;
use brk_types::{AnyAddressDataIndexEnum, LoadedAddressData, OutputType, Sats, TxIndex, TypeIndex};
use vecdb::GenericStoredVec;
use brk_types::{OutputType, Sats, TxIndex, TypeIndex};
use crate::stateful::address::{
AddressTypeToTypeIndexMap, AddressesDataVecs, AnyAddressIndexesVecs,
@@ -15,7 +14,7 @@ use crate::stateful::compute::VecsReaders;
use crate::stateful::states::Transacted;
use super::super::address::AddressTypeToVec;
use super::{AddressCache, LoadedAddressDataWithSource, TxIndexVec, WithAddressDataSource};
use super::{load_uncached_address_data, AddressCache, LoadedAddressDataWithSource, TxIndexVec};
/// Result of processing outputs for a block.
pub struct OutputsResult {
@@ -79,7 +78,7 @@ pub fn process_outputs(
.unwrap()
.push((typeindex, value));
let addr_data_opt = get_address_data(
let addr_data_opt = load_uncached_address_data(
output_type,
typeindex,
first_addressindexes,
@@ -108,49 +107,3 @@ pub fn process_outputs(
txindex_vecs,
}
}
/// Look up address data from storage or determine if new.
///
/// Returns None if address is already in loaded or empty cache.
#[allow(clippy::too_many_arguments)]
fn get_address_data(
address_type: OutputType,
typeindex: TypeIndex,
first_addressindexes: &ByAddressType<TypeIndex>,
cache: &AddressCache,
vr: &VecsReaders,
any_address_indexes: &AnyAddressIndexesVecs,
addresses_data: &AddressesDataVecs,
) -> Option<LoadedAddressDataWithSource> {
// Check if this is a new address (typeindex >= first for this height)
let first = *first_addressindexes.get(address_type).unwrap();
if first <= typeindex {
return Some(WithAddressDataSource::New(LoadedAddressData::default()));
}
// Skip if already in cache
if cache.contains(address_type, typeindex) {
return None;
}
// Read from storage
let reader = vr.address_reader(address_type);
let anyaddressindex = any_address_indexes.get(address_type, typeindex, reader);
Some(match anyaddressindex.to_enum() {
AnyAddressDataIndexEnum::Loaded(loaded_index) => {
let reader = &vr.anyaddressindex_to_anyaddressdata.loaded;
let loaded_data = addresses_data
.loaded
.get_pushed_or_read_unwrap(loaded_index, reader);
WithAddressDataSource::FromLoaded(loaded_index, loaded_data)
}
AnyAddressDataIndexEnum::Empty(empty_index) => {
let reader = &vr.anyaddressindex_to_anyaddressdata.empty;
let empty_data = addresses_data
.empty
.get_pushed_or_read_unwrap(empty_index, reader);
WithAddressDataSource::FromEmpty(empty_index, empty_data.into())
}
})
}

View File

@@ -1,6 +1,6 @@
//! Process received outputs for address cohorts.
use brk_grouper::{AmountBucket, ByAddressType};
use brk_grouper::{amounts_in_different_buckets, ByAddressType};
use brk_types::{Dollars, Sats, TypeIndex};
use rustc_hash::FxHashMap;
@@ -60,7 +60,7 @@ pub fn process_received(
let prev_balance = addr_data.balance();
let new_balance = prev_balance + total_value;
if AmountBucket::from(prev_balance) != AmountBucket::from(new_balance) {
if amounts_in_different_buckets(prev_balance, new_balance) {
// Crossing cohort boundary - subtract from old, add to new
let cohort_state = cohorts
.amount_range

View File

@@ -6,7 +6,7 @@
//! - Age metrics (blocks_old, days_old) are tracked for sent UTXOs
use brk_error::Result;
use brk_grouper::{ByAddressType, Filtered};
use brk_grouper::{amounts_in_different_buckets, ByAddressType};
use brk_types::{CheckedSub, Dollars, Height, Sats, Timestamp, TypeIndex};
use vecdb::{VecIndex, unlikely};
@@ -57,11 +57,9 @@ pub fn process_sent(
let will_be_empty = addr_data.has_1_utxos();
// Check if crossing cohort boundary
let prev_cohort = cohorts.amount_range.get(prev_balance);
let new_cohort = cohorts.amount_range.get(new_balance);
let filters_differ = prev_cohort.filter() != new_cohort.filter();
let crossing_boundary = amounts_in_different_buckets(prev_balance, new_balance);
if will_be_empty || filters_differ {
if will_be_empty || crossing_boundary {
// Subtract from old cohort
let cohort_state = cohorts
.amount_range
@@ -78,7 +76,7 @@ pub fn process_sent(
"process_sent: cohort underflow detected!\n\
Block context: prev_height={:?}, output_type={:?}, type_index={:?}\n\
prev_balance={}, new_balance={}, value={}\n\
will_be_empty={}, filters_differ={}\n\
will_be_empty={}, crossing_boundary={}\n\
Address: {:?}",
prev_height,
output_type,
@@ -87,7 +85,7 @@ pub fn process_sent(
new_balance,
value,
will_be_empty,
filters_differ,
crossing_boundary,
addr_data
);
}

View File

@@ -7,7 +7,7 @@ use std::path::Path;
use brk_error::Result;
use brk_types::{Dollars, Height, Sats};
use crate::{grouped::PERCENTILES_LEN, utils::OptionExt};
use crate::grouped::PERCENTILES_LEN;
use super::{CachedUnrealizedState, PriceToAmount, RealizedState, SupplyState, UnrealizedState};
@@ -72,14 +72,21 @@ impl CohortState {
Ok(())
}
/// Apply pending price_to_amount updates. Must be called before reads.
pub fn apply_pending(&mut self) {
if let Some(p) = self.price_to_amount.as_mut() {
p.apply_pending();
}
}
/// Get first (lowest) price entry in distribution.
pub fn price_to_amount_first_key_value(&self) -> Option<(&Dollars, &Sats)> {
self.price_to_amount.u().first_key_value()
self.price_to_amount.as_ref()?.first_key_value()
}
/// Get last (highest) price entry in distribution.
pub fn price_to_amount_last_key_value(&self) -> Option<(&Dollars, &Sats)> {
self.price_to_amount.u().last_key_value()
self.price_to_amount.as_ref()?.last_key_value()
}
/// Reset per-block values before processing next block.
@@ -319,7 +326,6 @@ impl CohortState {
}
/// Compute prices at percentile thresholds.
/// Uses O(19 * log n) Fenwick tree queries instead of O(n) iteration.
pub fn compute_percentile_prices(&self) -> [Dollars; PERCENTILES_LEN] {
match self.price_to_amount.as_ref() {
Some(p) if !p.is_empty() => p.compute_percentiles(),
@@ -344,6 +350,11 @@ impl CohortState {
}
};
// Date unrealized: compute from scratch (only at date boundaries, ~144x less frequent)
let date_state = date_price.map(|date_price| {
CachedUnrealizedState::compute_full_standalone(date_price, price_to_amount)
});
// Height unrealized: use incremental cache (O(k) where k = flip range)
let height_state = if let Some(cache) = self.cached_unrealized.as_mut() {
cache.get_at_price(height_price, price_to_amount).clone()
@@ -354,11 +365,6 @@ impl CohortState {
state
};
// Date unrealized: compute from scratch (only at date boundaries, ~144x less frequent)
let date_state = date_price.map(|date_price| {
CachedUnrealizedState::compute_full_standalone(date_price, price_to_amount)
});
(height_state, date_state)
}
@@ -371,19 +377,19 @@ impl CohortState {
}
/// Get first (lowest) price in distribution.
pub fn min_price(&self) -> Option<&Dollars> {
pub fn min_price(&self) -> Option<Dollars> {
self.price_to_amount
.as_ref()?
.first_key_value()
.map(|(k, _)| k)
.map(|(&k, _)| k)
}
/// Get last (highest) price in distribution.
pub fn max_price(&self) -> Option<&Dollars> {
pub fn max_price(&self) -> Option<Dollars> {
self.price_to_amount
.as_ref()?
.last_key_value()
.map(|(k, _)| k)
.map(|(&k, _)| k)
}
/// Get iterator over price_to_amount for merged percentile computation.

View File

@@ -1,135 +0,0 @@
//! Fenwick Tree (Binary Indexed Tree) for O(log n) prefix sums.
//!
//! Used for efficient percentile computation over price distributions.
/// Fenwick tree for O(log n) prefix sum queries and updates.
///
/// Supports:
/// - `add(idx, delta)`: O(log n) - add delta to position idx
/// - `prefix_sum(idx)`: O(log n) - sum of elements 0..=idx
/// - `lower_bound(target)`: O(log n) - find smallest idx where prefix_sum >= target
#[derive(Clone, Debug)]
pub struct FenwickTree {
tree: Vec<u64>,
len: usize,
}
impl FenwickTree {
/// Create a new Fenwick tree with given capacity.
pub fn new(len: usize) -> Self {
Self {
tree: vec![0; len + 1], // 1-indexed
len,
}
}
/// Add delta to position idx. O(log n).
pub fn add(&mut self, idx: usize, delta: u64) {
let mut i = idx + 1; // Convert to 1-indexed
while i <= self.len {
self.tree[i] += delta;
i += i & i.wrapping_neg(); // Add LSB
}
}
/// Subtract delta from position idx. O(log n).
pub fn sub(&mut self, idx: usize, delta: u64) {
let mut i = idx + 1;
while i <= self.len {
self.tree[i] -= delta;
i += i & i.wrapping_neg();
}
}
/// Get prefix sum of elements 0..=idx. O(log n).
#[allow(unused)]
pub fn prefix_sum(&self, idx: usize) -> u64 {
let mut sum = 0u64;
let mut i = idx + 1; // Convert to 1-indexed
while i > 0 {
sum += self.tree[i];
i -= i & i.wrapping_neg(); // Remove LSB
}
sum
}
/// Find smallest index where prefix_sum >= target. O(log n).
/// Returns None if no such index exists (target > total sum).
pub fn lower_bound(&self, target: u64) -> Option<usize> {
if target == 0 {
return Some(0);
}
let mut sum = 0u64;
let mut pos = 0usize;
// Find highest bit position
let mut bit = 1usize << (usize::BITS - 1 - self.len.leading_zeros());
while bit > 0 {
let next_pos = pos + bit;
if next_pos <= self.len && sum + self.tree[next_pos] < target {
sum += self.tree[next_pos];
pos = next_pos;
}
bit >>= 1;
}
// pos is now the largest index where prefix_sum < target
// So pos + 1 is the smallest where prefix_sum >= target
if pos < self.len {
Some(pos) // Convert back to 0-indexed
} else {
None
}
}
/// Get total sum of all elements. O(log n).
#[allow(unused)]
pub fn total(&self) -> u64 {
self.prefix_sum(self.len.saturating_sub(1))
}
/// Reset all values to zero. O(n).
#[allow(unused)]
pub fn clear(&mut self) {
self.tree.fill(0);
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_basic_operations() {
let mut ft = FenwickTree::new(10);
ft.add(0, 5);
ft.add(2, 3);
ft.add(5, 7);
assert_eq!(ft.prefix_sum(0), 5);
assert_eq!(ft.prefix_sum(1), 5);
assert_eq!(ft.prefix_sum(2), 8);
assert_eq!(ft.prefix_sum(5), 15);
assert_eq!(ft.total(), 15);
}
#[test]
fn test_lower_bound() {
let mut ft = FenwickTree::new(10);
ft.add(0, 10);
ft.add(2, 20);
ft.add(5, 30);
assert_eq!(ft.lower_bound(5), Some(0));
assert_eq!(ft.lower_bound(10), Some(0));
assert_eq!(ft.lower_bound(11), Some(2));
assert_eq!(ft.lower_bound(30), Some(2));
assert_eq!(ft.lower_bound(31), Some(5));
assert_eq!(ft.lower_bound(60), Some(5));
assert_eq!(ft.lower_bound(61), None);
}
}

View File

@@ -1,8 +1,6 @@
mod address_cohort;
mod block;
mod cohort;
mod fenwick;
mod price_buckets;
mod price_to_amount;
mod realized;
mod supply;
@@ -13,7 +11,6 @@ mod utxo_cohort;
pub use address_cohort::*;
pub use block::*;
pub use cohort::*;
pub use price_buckets::*;
pub use price_to_amount::*;
pub use realized::*;
pub use supply::*;

View File

@@ -1,253 +0,0 @@
//! Logarithmic price buckets with Fenwick tree for O(log n) percentile queries.
//!
//! Uses logarithmic buckets to maintain constant relative precision across all price levels.
//! Bucket i represents prices in range [MIN_PRICE * BASE^i, MIN_PRICE * BASE^(i+1)).
use brk_types::{Dollars, Sats};
use super::fenwick::FenwickTree;
use crate::grouped::{PERCENTILES, PERCENTILES_LEN};
/// Minimum price tracked (sub-cent for early Bitcoin days).
const MIN_PRICE: f64 = 0.001;
/// Maximum price tracked ($100M for future-proofing).
#[allow(unused)]
const MAX_PRICE: f64 = 100_000_000.0;
/// Base for logarithmic buckets (0.1% precision).
const BASE: f64 = 1.001;
/// Pre-computed ln(BASE) for efficiency.
const LN_BASE: f64 = 0.0009995003; // ln(1.001)
/// Pre-computed ln(MIN_PRICE) for efficiency.
const LN_MIN_PRICE: f64 = -6.907755279; // ln(0.001)
/// Number of buckets needed: ceil(ln(MAX/MIN) / ln(BASE)).
/// ln(100_000_000 / 0.001) / ln(1.001) ≈ 25,328
const NUM_BUCKETS: usize = 25_400; // Rounded up for safety
/// Logarithmic price buckets with O(log n) percentile queries.
#[derive(Clone, Debug)]
pub struct PriceBuckets {
/// Fenwick tree for O(log n) prefix sums.
fenwick: FenwickTree,
/// Direct bucket access for iteration (needed for unrealized computation).
buckets: Vec<Sats>,
/// Total supply tracked.
total: Sats,
}
impl Default for PriceBuckets {
fn default() -> Self {
Self::new()
}
}
impl PriceBuckets {
/// Create new empty price buckets.
pub fn new() -> Self {
Self {
fenwick: FenwickTree::new(NUM_BUCKETS),
buckets: vec![Sats::ZERO; NUM_BUCKETS],
total: Sats::ZERO,
}
}
/// Convert price to bucket index. O(1).
#[inline]
pub fn price_to_bucket(price: Dollars) -> usize {
let price_f64 = f64::from(price);
if price_f64 <= MIN_PRICE {
return 0;
}
let bucket = ((price_f64.ln() - LN_MIN_PRICE) / LN_BASE) as usize;
bucket.min(NUM_BUCKETS - 1)
}
/// Convert bucket index to representative price (bucket midpoint). O(1).
#[inline]
pub fn bucket_to_price(bucket: usize) -> Dollars {
// Use geometric mean of bucket range for better accuracy
let low = MIN_PRICE * BASE.powi(bucket as i32);
let high = low * BASE;
Dollars::from((low * high).sqrt())
}
/// Add amount at given price. O(log n).
pub fn increment(&mut self, price: Dollars, amount: Sats) {
if amount == Sats::ZERO {
return;
}
let bucket = Self::price_to_bucket(price);
self.fenwick.add(bucket, u64::from(amount));
self.buckets[bucket] += amount;
self.total += amount;
}
/// Remove amount at given price. O(log n).
pub fn decrement(&mut self, price: Dollars, amount: Sats) {
if amount == Sats::ZERO {
return;
}
let bucket = Self::price_to_bucket(price);
self.fenwick.sub(bucket, u64::from(amount));
self.buckets[bucket] -= amount;
self.total -= amount;
}
/// Check if empty.
#[allow(unused)]
pub fn is_empty(&self) -> bool {
self.total == Sats::ZERO
}
/// Get total supply.
#[allow(unused)]
pub fn total(&self) -> Sats {
self.total
}
/// Compute all percentile prices. O(19 * log n) ≈ O(323 ops).
pub fn compute_percentiles(&self) -> [Dollars; PERCENTILES_LEN] {
let mut result = [Dollars::NAN; PERCENTILES_LEN];
if self.total == Sats::ZERO {
return result;
}
let total = u64::from(self.total);
for (i, &percentile) in PERCENTILES.iter().enumerate() {
let target = total * u64::from(percentile) / 100;
if let Some(bucket) = self.fenwick.lower_bound(target) {
result[i] = Self::bucket_to_price(bucket);
}
}
result
}
/// Get amount in a specific bucket.
#[allow(unused)]
pub fn get_bucket(&self, bucket: usize) -> Sats {
self.buckets.get(bucket).copied().unwrap_or(Sats::ZERO)
}
/// Iterate over non-empty buckets in a price range.
/// Used for unrealized computation flip range.
#[allow(unused)]
pub fn iter_range(
&self,
from_price: Dollars,
to_price: Dollars,
) -> impl Iterator<Item = (Dollars, Sats)> + '_ {
let from_bucket = Self::price_to_bucket(from_price);
let to_bucket = Self::price_to_bucket(to_price);
let (start, end) = if from_bucket <= to_bucket {
(from_bucket, to_bucket)
} else {
(to_bucket, from_bucket)
};
(start..=end).filter_map(move |bucket| {
let amount = self.buckets[bucket];
if amount > Sats::ZERO {
Some((Self::bucket_to_price(bucket), amount))
} else {
None
}
})
}
/// Iterate over all non-empty buckets (for full unrealized computation).
#[allow(unused)]
pub fn iter(&self) -> impl Iterator<Item = (Dollars, Sats)> + '_ {
self.buckets
.iter()
.enumerate()
.filter_map(|(bucket, &amount)| {
if amount > Sats::ZERO {
Some((Self::bucket_to_price(bucket), amount))
} else {
None
}
})
}
/// Get the lowest price bucket with non-zero amount.
#[allow(unused)]
pub fn min_price(&self) -> Option<Dollars> {
self.buckets
.iter()
.position(|&s| s > Sats::ZERO)
.map(Self::bucket_to_price)
}
/// Get the highest price bucket with non-zero amount.
#[allow(unused)]
pub fn max_price(&self) -> Option<Dollars> {
self.buckets
.iter()
.rposition(|&s| s > Sats::ZERO)
.map(Self::bucket_to_price)
}
/// Clear all data.
#[allow(unused)]
pub fn clear(&mut self) {
self.fenwick.clear();
self.buckets.fill(Sats::ZERO);
self.total = Sats::ZERO;
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_bucket_conversion() {
// Test price -> bucket -> price roundtrip
let prices = [0.01, 1.0, 100.0, 10000.0, 50000.0, 100000.0];
for &price in &prices {
let bucket = PriceBuckets::price_to_bucket(Dollars::from(price));
let recovered = PriceBuckets::bucket_to_price(bucket);
let ratio = f64::from(recovered) / price;
// Should be within 0.1% (our bucket precision)
assert!(
(0.999..=1.001).contains(&ratio),
"price={}, recovered={}, ratio={}",
price,
f64::from(recovered),
ratio
);
}
}
#[test]
fn test_percentiles() {
let mut buckets = PriceBuckets::new();
// Add 100 sats at $10, 200 sats at $20, 300 sats at $30
buckets.increment(Dollars::from(10.0), Sats::from(100u64));
buckets.increment(Dollars::from(20.0), Sats::from(200u64));
buckets.increment(Dollars::from(30.0), Sats::from(300u64));
// Total = 600 sats
// 50th percentile = 300 sats = should be around $20-$30
let percentiles = buckets.compute_percentiles();
// Median (index 9 in PERCENTILES which is 50%)
let median = percentiles[9]; // PERCENTILES[9] = 50
let median_f64 = f64::from(median);
assert!(
(15.0..=35.0).contains(&median_f64),
"median={} should be around $20-$30",
median_f64
);
}
}

View File

@@ -8,20 +8,24 @@ use brk_error::{Error, Result};
use brk_types::{Dollars, Height, Sats};
use derive_deref::{Deref, DerefMut};
use pco::standalone::{simple_decompress, simpler_compress};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use vecdb::{Bytes, unlikely};
use vecdb::Bytes;
use crate::{grouped::PERCENTILES_LEN, utils::OptionExt};
use crate::{
grouped::{PERCENTILES, PERCENTILES_LEN},
utils::OptionExt,
};
use super::{PriceBuckets, SupplyState};
use super::SupplyState;
#[derive(Clone, Debug)]
pub struct PriceToAmount {
pathbuf: PathBuf,
state: Option<State>,
/// Logarithmic buckets for O(log n) percentile queries.
/// Rebuilt on load, not persisted.
buckets: Option<PriceBuckets>,
/// Pending deltas: (total_increment, total_decrement) per price.
/// Flushed to BTreeMap before reads and at end of block.
pending: FxHashMap<Dollars, (Sats, Sats)>,
}
const STATE_AT_: &str = "state_at_";
@@ -32,7 +36,7 @@ impl PriceToAmount {
Self {
pathbuf: path.join(format!("{name}_price_to_amount")),
state: None,
buckets: None,
pending: FxHashMap::default(),
}
}
@@ -41,20 +45,20 @@ impl PriceToAmount {
let (&height, path) = files.range(..=height).next_back().ok_or(Error::NotFound(
"No price state found at or before height".into(),
))?;
let state = State::deserialize(&fs::read(path)?)?;
// Rebuild buckets from loaded state
let mut buckets = PriceBuckets::new();
for (&price, &amount) in state.iter() {
buckets.increment(price, amount);
}
self.state = Some(state);
self.buckets = Some(buckets);
self.state = Some(State::deserialize(&fs::read(path)?)?);
self.pending.clear();
Ok(height)
}
fn assert_pending_empty(&self) {
assert!(
self.pending.is_empty(),
"PriceToAmount: pending not empty, call apply_pending first"
);
}
pub fn iter(&self) -> impl Iterator<Item = (&Dollars, &Sats)> {
self.assert_pending_empty();
self.state.u().iter()
}
@@ -63,84 +67,92 @@ impl PriceToAmount {
&self,
range: R,
) -> impl Iterator<Item = (&Dollars, &Sats)> {
self.assert_pending_empty();
self.state.u().range(range)
}
pub fn is_empty(&self) -> bool {
self.state.u().is_empty()
self.pending.is_empty() && self.state.u().is_empty()
}
pub fn first_key_value(&self) -> Option<(&Dollars, &Sats)> {
self.assert_pending_empty();
self.state.u().first_key_value()
}
pub fn last_key_value(&self) -> Option<(&Dollars, &Sats)> {
self.assert_pending_empty();
self.state.u().last_key_value()
}
/// Accumulate increment in pending batch. O(1).
pub fn increment(&mut self, price: Dollars, supply_state: &SupplyState) {
*self.state.um().entry(price).or_default() += supply_state.value;
if let Some(buckets) = self.buckets.as_mut() {
buckets.increment(price, supply_state.value);
}
self.pending.entry(price).or_default().0 += supply_state.value;
}
/// Accumulate decrement in pending batch. O(1).
pub fn decrement(&mut self, price: Dollars, supply_state: &SupplyState) {
if let Some(amount) = self.state.um().get_mut(&price) {
if unlikely(*amount < supply_state.value) {
let amount = *amount;
self.pending.entry(price).or_default().1 += supply_state.value;
}
/// Apply pending deltas to BTreeMap. O(k log n) where k = unique prices in pending.
/// Must be called before any read operations.
pub fn apply_pending(&mut self) {
for (price, (inc, dec)) in self.pending.drain() {
let entry = self.state.um().entry(price).or_default();
*entry += inc;
if *entry < dec {
panic!(
"PriceToAmount::decrement underflow!\n\
"PriceToAmount::apply_pending underflow!\n\
Path: {:?}\n\
Price: {}\n\
Bucket amount: {}\n\
Trying to decrement by: {}\n\
Supply state: utxo_count={}, value={}\n\
All buckets: {:?}",
self.pathbuf,
price,
amount,
supply_state.value,
supply_state.utxo_count,
supply_state.value,
self.state.u().iter().collect::<Vec<_>>()
Current + increments: {}\n\
Trying to decrement by: {}",
self.pathbuf, price, entry, dec
);
}
*amount -= supply_state.value;
if *amount == Sats::ZERO {
*entry -= dec;
if *entry == Sats::ZERO {
self.state.um().remove(&price);
}
if let Some(buckets) = self.buckets.as_mut() {
buckets.decrement(price, supply_state.value);
}
} else {
panic!(
"PriceToAmount::decrement price not found!\n\
Path: {:?}\n\
Price: {}\n\
Supply state: utxo_count={}, value={}\n\
All buckets: {:?}",
self.pathbuf,
price,
supply_state.utxo_count,
supply_state.value,
self.state.u().iter().collect::<Vec<_>>()
);
}
}
pub fn init(&mut self) {
self.state.replace(State::default());
self.buckets.replace(PriceBuckets::new());
self.pending.clear();
}
/// Compute percentile prices using O(log n) Fenwick tree queries.
/// Compute percentile prices by iterating the BTreeMap directly.
/// O(n) where n = number of unique prices.
pub fn compute_percentiles(&self) -> [Dollars; PERCENTILES_LEN] {
if let Some(buckets) = self.buckets.as_ref() {
buckets.compute_percentiles()
} else {
[Dollars::NAN; PERCENTILES_LEN]
self.assert_pending_empty();
let state = match self.state.as_ref() {
Some(s) if !s.is_empty() => s,
_ => return [Dollars::NAN; PERCENTILES_LEN],
};
let total: u64 = state.values().map(|&s| u64::from(s)).sum();
if total == 0 {
return [Dollars::NAN; PERCENTILES_LEN];
}
let mut result = [Dollars::NAN; PERCENTILES_LEN];
let mut cumsum = 0u64;
let mut idx = 0;
for (&price, &amount) in state.iter() {
cumsum += u64::from(amount);
while idx < PERCENTILES_LEN
&& cumsum >= total * u64::from(PERCENTILES[idx]) / 100
{
result[idx] = price;
idx += 1;
}
}
result
}
pub fn clean(&mut self) -> Result<()> {
@@ -170,6 +182,8 @@ impl PriceToAmount {
}
pub fn flush(&mut self, height: Height) -> Result<()> {
self.apply_pending();
let files = self.read_dir(Some(height))?;
for (_, path) in files

View File

@@ -39,14 +39,17 @@ impl AddAssign<&SupplyState> for SupplyState {
impl SubAssign<&SupplyState> for SupplyState {
fn sub_assign(&mut self, rhs: &Self) {
self.utxo_count = self.utxo_count.checked_sub(rhs.utxo_count).unwrap_or_else(|| {
panic!(
"SupplyState underflow: cohort utxo_count {} < address utxo_count {}. \
self.utxo_count = self
.utxo_count
.checked_sub(rhs.utxo_count)
.unwrap_or_else(|| {
panic!(
"SupplyState underflow: cohort utxo_count {} < address utxo_count {}. \
This indicates a desync between cohort state and address data. \
Try deleting the compute cache and restarting fresh.",
self.utxo_count, rhs.utxo_count
)
});
self.utxo_count, rhs.utxo_count
)
});
self.value = self.value.checked_sub(rhs.value).unwrap_or_else(|| {
panic!(
"SupplyState underflow: cohort value {} < address value {}. \