global: snapshot

This commit is contained in:
nym21
2026-02-01 22:38:01 +01:00
parent f03bbd9a92
commit f7d7c5704a
47 changed files with 2924 additions and 837 deletions

View File

@@ -24,7 +24,7 @@ use super::super::cache::AddressLookup;
/// parallel execution with UTXO cohort processing (which mutates chain_state).
///
/// `price_range_max` is used to compute the peak price during each UTXO's holding period
/// for accurate ATH regret calculation.
/// for accurate peak regret calculation.
#[allow(clippy::too_many_arguments)]
pub fn process_sent(
sent_data: HeightToAddressTypeToVec<(TypeIndex, Sats)>,
@@ -50,7 +50,7 @@ pub fn process_sent(
let blocks_old = current_height.to_usize() - receive_height.to_usize();
let age = Age::new(current_timestamp, prev_timestamp, blocks_old);
// Compute peak price during holding period for ATH regret
// Compute peak price during holding period for peak regret
// This is the max HIGH price between receive and send heights
let peak_price: Option<CentsUnsigned> =
price_range_max.map(|t| t.max_between(receive_height, current_height));

View File

@@ -1,19 +1,21 @@
use std::{cmp::Reverse, collections::BinaryHeap, path::Path};
use brk_cohort::{
ByAgeRange, ByAmountRange, ByEpoch, ByGreatEqualAmount, ByLowerThanAmount, ByMaxAge, ByMinAge,
BySpendableType, ByTerm, ByYear, Filter, Filtered, StateLevel, UTXOGroups,
AGE_BOUNDARIES, ByAgeRange, ByAmountRange, ByEpoch, ByGreatEqualAmount, ByLowerThanAmount,
ByMaxAge, ByMinAge, BySpendableType, ByTerm, ByYear, Filter, Filtered, StateLevel, UTXOGroups,
};
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{CentsUnsigned, DateIndex, Dollars, Height, Sats, StoredF32, Version};
use brk_types::{
CentsUnsigned, DateIndex, Dollars, Height, ONE_HOUR_IN_SEC, Sats, StoredF32, Timestamp, Version,
};
use derive_more::{Deref, DerefMut};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, Database, Exit, GenericStoredVec, IterableVec};
use vecdb::{AnyStoredVec, Database, Exit, GenericStoredVec, IterableVec, VecIndex};
use crate::{
ComputeIndexes,
distribution::DynCohortVecs,
distribution::{DynCohortVecs, compute::PriceRangeMax, state::BlockState},
indexes,
internal::{PERCENTILES, PERCENTILES_LEN, compute_spot_percentile_rank},
price,
@@ -357,13 +359,13 @@ impl UTXOCohorts {
})
.collect();
// Compute percentiles for each aggregate filter
for aggregate in self.0.iter_aggregate_mut() {
// Compute percentiles for each aggregate filter in parallel
self.0.par_iter_aggregate_mut().try_for_each(|aggregate| {
let filter = aggregate.filter().clone();
// Get cost_basis, skip if not configured
let Some(cost_basis) = aggregate.metrics.cost_basis.as_mut() else {
continue;
return Ok(());
};
// Collect relevant cohort data for this aggregate and sum totals
@@ -397,7 +399,7 @@ impl UTXOCohorts {
.dateindex
.truncate_push(dateindex, StoredF32::NAN)?;
}
continue;
return Ok(());
}
// K-way merge using min-heap: O(n log k) where k = number of cohorts
@@ -507,9 +509,9 @@ impl UTXOCohorts {
let rank = compute_spot_percentile_rank(&usd_result, spot);
spot_pct.dateindex.truncate_push(dateindex, rank)?;
}
}
Ok(())
Ok(())
})
}
/// Validate computed versions for all cohorts (separate and aggregate).
@@ -525,4 +527,112 @@ impl UTXOCohorts {
Ok(())
}
/// Compute and push peak regret for all age_range cohorts.
///
/// Uses split points to efficiently compute regret per cohort.
/// All 21 cohorts are computed in parallel, then pushed sequentially.
/// Called once per day when dateindex changes.
pub fn compute_and_push_peak_regret(
&mut self,
chain_state: &[BlockState],
current_height: Height,
current_timestamp: Timestamp,
spot: CentsUnsigned,
price_range_max: &PriceRangeMax,
dateindex: DateIndex,
) -> Result<()> {
const FIRST_PRICE_HEIGHT: usize = 68_195;
let start_height = FIRST_PRICE_HEIGHT;
let end_height = current_height.to_usize() + 1;
// Early return: push zeros if no price data yet
if end_height <= start_height {
for cohort in self.0.age_range.iter_mut() {
if let Some(unrealized) = cohort.metrics.unrealized.as_mut()
&& let Some(peak_regret) = unrealized.peak_regret.as_mut()
{
peak_regret
.dateindex
.truncate_push(dateindex, Dollars::ZERO)?;
}
}
return Ok(());
}
let spot_u128 = spot.as_u128();
let current_ts = *current_timestamp;
// Compute split points: splits[k] = first index where age < AGE_BOUNDARIES[k]
let splits: [usize; 20] = std::array::from_fn(|k| {
let boundary_seconds = (AGE_BOUNDARIES[k] as u32) * ONE_HOUR_IN_SEC;
let threshold_ts = current_ts.saturating_sub(boundary_seconds);
chain_state[..end_height].partition_point(|b| *b.timestamp <= threshold_ts)
});
// Build ranges for all 21 cohorts
let ranges: [(usize, usize); 21] = std::array::from_fn(|i| {
if i == 0 {
(splits[0], end_height)
} else if i < 20 {
(splits[i], splits[i - 1])
} else {
(start_height, splits[19])
}
});
// Compute regret for all cohorts in parallel
let regrets: [Dollars; 21] = ranges
.into_par_iter()
.map(|(range_start, range_end)| {
let effective_start = range_start.max(start_height);
if effective_start >= range_end {
return Dollars::ZERO;
}
let mut regret: u128 = 0;
for h in effective_start..range_end {
let block = &chain_state[h];
let supply = block.supply.value;
if supply.is_zero() {
continue;
}
let cost_basis = match block.price {
Some(p) => p,
None => continue,
};
let receive_height = Height::from(h);
let peak = price_range_max.max_between(receive_height, current_height);
let peak_u128 = peak.as_u128();
let cost_u128 = cost_basis.as_u128();
let supply_u128 = supply.as_u128();
regret += if spot_u128 >= cost_u128 {
(peak_u128 - spot_u128) * supply_u128
} else {
(peak_u128 - cost_u128) * supply_u128
};
}
CentsUnsigned::new((regret / Sats::ONE_BTC_U128) as u64).to_dollars()
})
.collect::<Vec<_>>()
.try_into()
.unwrap();
// Push results to cohorts
for (cohort, regret) in self.0.age_range.iter_mut().zip(regrets) {
if let Some(unrealized) = cohort.metrics.unrealized.as_mut()
&& let Some(peak_regret) = unrealized.peak_regret.as_mut()
{
peak_regret.dateindex.truncate_push(dateindex, regret)?;
}
}
Ok(())
}
}

View File

@@ -19,7 +19,7 @@ impl UTXOCohorts {
/// We need to update the cohort states based on when that UTXO was created.
///
/// `price_range_max` is used to compute the peak price during each UTXO's holding period
/// for accurate ATH regret calculation.
/// for accurate peak regret calculation.
pub fn send(
&mut self,
height_to_sent: FxHashMap<Height, Transacted>,
@@ -45,7 +45,7 @@ impl UTXOCohorts {
let blocks_old = chain_len - 1 - receive_height.to_usize();
let age = Age::new(last_timestamp, block_state.timestamp, blocks_old);
// Compute peak price during holding period for ATH regret
// Compute peak price during holding period for peak regret
// This is the max HIGH price between receive and send heights
let peak_price: Option<CentsUnsigned> =
price_range_max.map(|t| t.max_between(receive_height, send_height));

View File

@@ -11,10 +11,10 @@ impl UTXOCohorts {
/// UTXOs age with each block. When they cross hour boundaries,
/// they move between age-based cohorts (e.g., from "0-1h" to "1h-1d").
///
/// Complexity: O(k * (log n + m)) where:
/// Complexity: O(k * log n) where:
/// - k = 20 boundaries to check
/// - n = total blocks in chain_state
/// - m = blocks crossing each boundary (typically 0-2 per boundary per block)
/// - Linear scan for end_idx is faster than binary search since typically 0-2 blocks cross each boundary
pub fn tick_tock_next_block(&mut self, chain_state: &[BlockState], timestamp: Timestamp) {
if chain_state.is_empty() {
return;
@@ -49,9 +49,12 @@ impl UTXOCohorts {
continue;
}
// Binary search to find blocks in the timestamp range (lower, upper]
// Binary search to find start, then linear scan for end (typically 0-2 blocks)
let start_idx = chain_state.partition_point(|b| *b.timestamp <= lower_timestamp);
let end_idx = chain_state.partition_point(|b| *b.timestamp <= upper_timestamp);
let end_idx = chain_state[start_idx..]
.iter()
.position(|b| *b.timestamp > upper_timestamp)
.map_or(chain_state.len(), |pos| start_idx + pos);
// Move supply from younger cohort to older cohort
for block_state in &chain_state[start_idx..end_idx] {

View File

@@ -6,7 +6,7 @@ use brk_indexer::Indexer;
use brk_types::{CentsUnsigned, DateIndex, Dollars, Height, OutputType, Sats, TxIndex, TypeIndex};
use rayon::prelude::*;
use rustc_hash::FxHashSet;
use tracing::info;
use tracing::{debug, info};
use vecdb::{Exit, IterableVec, TypedVecIterator, VecIndex};
use crate::{
@@ -51,7 +51,9 @@ pub fn process_blocks(
exit: &Exit,
) -> Result<()> {
// Create computation context with pre-computed vectors for thread-safe access
debug!("creating ComputeContext");
let ctx = ComputeContext::new(starting_height, last_height, blocks, price);
debug!("ComputeContext created");
if ctx.starting_height > ctx.last_height {
return Ok(());
@@ -99,9 +101,12 @@ pub fn process_blocks(
let mut height_to_price_iter = height_to_price.map(|v| v.into_iter());
let mut dateindex_to_price_iter = dateindex_to_price.map(|v| v.into_iter());
debug!("creating VecsReaders");
let mut vr = VecsReaders::new(&vecs.any_address_indexes, &vecs.addresses_data);
debug!("VecsReaders created");
// Build txindex -> height lookup map for efficient prev_height computation
debug!("building txindex_to_height RangeMap");
let mut txindex_to_height: RangeMap<TxIndex, Height> = {
let mut map = RangeMap::with_capacity(last_height.to_usize() + 1);
for first_txindex in indexer.vecs.transactions.first_txindex.into_iter() {
@@ -109,6 +114,7 @@ pub fn process_blocks(
}
map
};
debug!("txindex_to_height RangeMap built");
// Create reusable iterators for sequential txout/txin reads (16KB buffered)
let mut txout_iters = TxOutIterators::new(indexer);
@@ -125,6 +131,7 @@ pub fn process_blocks(
let mut first_p2wsh_iter = indexer.vecs.addresses.first_p2wshaddressindex.into_iter();
// Track running totals - recover from previous height if resuming
debug!("recovering addr_counts from height {}", starting_height);
let (mut addr_counts, mut empty_addr_counts) = if starting_height > Height::ZERO {
let addr_counts =
AddressTypeToAddressCount::from((&vecs.addr_count.by_addresstype, starting_height));
@@ -139,11 +146,14 @@ pub fn process_blocks(
AddressTypeToAddressCount::default(),
)
};
debug!("addr_counts recovered");
// Track activity counts - reset each block
let mut activity_counts = AddressTypeToActivityCounts::default();
debug!("creating AddressCache");
let mut cache = AddressCache::new();
debug!("AddressCache created, entering main loop");
// Main block iteration
for height in starting_height.to_usize()..=last_height.to_usize() {
@@ -390,6 +400,21 @@ pub fn process_blocks(
.unwrap_or(Dollars::NAN);
vecs.utxo_cohorts
.truncate_push_aggregate_percentiles(dateindex, spot)?;
// Compute unrealized peak regret by age range (once per day)
// Aggregate cohorts (all, term, etc.) get values via compute_from_stateful
if let Some(spot_cents) = block_price
&& let Some(price_range_max) = ctx.price_range_max.as_ref()
{
vecs.utxo_cohorts.compute_and_push_peak_regret(
chain_state,
height,
timestamp,
spot_cents,
price_range_max,
dateindex,
)?;
}
}
// Periodic checkpoint flush

View File

@@ -109,7 +109,7 @@ pub struct ComputeContext {
pub height_to_price: Option<Vec<CentsUnsigned>>,
/// Sparse table for O(1) range max queries on high prices.
/// Used for computing max price during UTXO holding periods (ATH regret).
/// Used for computing max price during UTXO holding periods (peak regret).
pub price_range_max: Option<PriceRangeMax>,
}
@@ -129,7 +129,7 @@ impl ComputeContext {
.map(|v| v.into_iter().map(|c| *c).collect());
// Build sparse table for O(1) range max queries on HIGH prices
// Used for computing peak price during UTXO holding periods (ATH regret)
// Used for computing peak price during UTXO holding periods (peak regret)
let price_range_max = price
.map(|p| &p.cents.split.height.high)
.map(|v| v.into_iter().map(|c| *c).collect::<Vec<_>>())

View File

@@ -71,20 +71,24 @@ pub fn recover_state(
}
// Import UTXO cohort states - all must succeed
debug!("importing UTXO cohort states at height {}", consistent_height);
if !utxo_cohorts.import_separate_states(consistent_height) {
warn!("UTXO cohort state import failed at height {}", consistent_height);
return Ok(RecoveredState {
starting_height: Height::ZERO,
});
}
debug!("UTXO cohort states imported");
// Import address cohort states - all must succeed
debug!("importing address cohort states at height {}", consistent_height);
if !address_cohorts.import_separate_states(consistent_height) {
warn!("Address cohort state import failed at height {}", consistent_height);
return Ok(RecoveredState {
starting_height: Height::ZERO,
});
}
debug!("address cohort states imported");
Ok(RecoveredState {
starting_height: consistent_height,

View File

@@ -66,9 +66,9 @@ pub fn write(
let stamp = Stamp::from(height);
// Prepare chain_state before parallel write
vecs.chain_state.truncate_if_needed(Height::ZERO)?;
vecs.supply_state.truncate_if_needed(Height::ZERO)?;
for block_state in chain_state {
vecs.chain_state.push(block_state.supply.clone());
vecs.supply_state.push(block_state.supply.clone());
}
vecs.any_address_indexes
@@ -78,7 +78,7 @@ pub fn write(
.chain(vecs.empty_addr_count.par_iter_height_mut())
.chain(vecs.address_activity.par_iter_height_mut())
.chain(rayon::iter::once(
&mut vecs.chain_state as &mut dyn AnyStoredVec,
&mut vecs.supply_state as &mut dyn AnyStoredVec,
))
.chain(vecs.utxo_cohorts.par_iter_vecs_mut())
.chain(vecs.address_cohorts.par_iter_vecs_mut())

View File

@@ -6,7 +6,7 @@ use vecdb::{AnyStoredVec, AnyVec, EagerVec, Exit, GenericStoredVec, ImportableVe
use crate::{
ComputeIndexes, indexes,
internal::{ComputedFromHeightSumCum, LazyComputedValueFromHeightSumCum},
internal::{ComputedFromHeightSumCum, LazyComputedValueFromHeightSumCum, ValueFromDateLast},
};
use super::ImportConfig;
@@ -17,6 +17,9 @@ pub struct ActivityMetrics {
/// Total satoshis sent at each height + derived indexes
pub sent: LazyComputedValueFromHeightSumCum,
/// 14-day EMA of sent supply (sats, btc, usd)
pub sent_14d_ema: ValueFromDateLast,
/// Satoshi-blocks destroyed (supply * blocks_old when spent)
pub satblocks_destroyed: EagerVec<PcoVec<Height, Sats>>,
@@ -42,6 +45,14 @@ impl ActivityMetrics {
cfg.price,
)?,
sent_14d_ema: ValueFromDateLast::forced_import(
cfg.db,
&cfg.name("sent_14d_ema"),
cfg.version,
cfg.compute_dollars(),
cfg.indexes,
)?,
satblocks_destroyed: EagerVec::forced_import(
cfg.db,
&cfg.name("satblocks_destroyed"),
@@ -155,6 +166,15 @@ impl ActivityMetrics {
) -> Result<()> {
self.sent.compute_rest(indexes, starting_indexes, exit)?;
// 14-day EMA of sent (sats and dollars)
self.sent_14d_ema.compute_ema(
starting_indexes.dateindex,
&self.sent.sats.dateindex.sum.0,
self.sent.dollars.as_ref().map(|d| &d.dateindex.sum.0),
14,
exit,
)?;
self.coinblocks_destroyed
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_transform(

View File

@@ -1,4 +1,4 @@
use brk_cohort::{CohortContext, Filter};
use brk_cohort::{CohortContext, Filter, TimeFilter};
use brk_types::Version;
use vecdb::Database;
@@ -56,4 +56,24 @@ impl<'a> ImportConfig<'a> {
format!("{}_{suffix}", self.full_name)
}
}
/// Whether this cohort needs peak_regret metric.
/// True for UTXO cohorts with age-based filters (all, term, time).
/// age_range cohorts compute directly, others aggregate from age_range.
pub fn compute_peak_regret(&self) -> bool {
matches!(self.context, CohortContext::Utxo)
&& matches!(
self.filter,
Filter::All | Filter::Term(_) | Filter::Time(_)
)
}
/// Whether this is an age_range cohort (UTXO context with Time::Range filter).
/// These cohorts have peak_regret computed directly from chain_state.
pub fn is_age_range(&self) -> bool {
matches!(
(&self.context, &self.filter),
(CohortContext::Utxo, Filter::Time(TimeFilter::Range(_)))
)
}
}

View File

@@ -17,9 +17,10 @@ use crate::{
internal::{
CentsUnsignedToDollars, ComputedFromDateLast, ComputedFromDateRatio,
ComputedFromHeightLast, ComputedFromHeightSum, ComputedFromHeightSumCum, DollarsMinus,
DollarsPlus, LazyBinaryFromHeightSum, LazyBinaryFromHeightSumCum, LazyFromDateLast,
LazyFromHeightLast, LazyFromHeightSum, LazyFromHeightSumCum, LazyPriceFromCents,
PercentageDollarsF32, PriceFromHeight, StoredF32Identity,
DollarsPlus, LazyBinaryFromHeightSum, LazyBinaryFromHeightSumCum,
LazyComputedValueFromHeightSumCum, LazyFromDateLast, LazyFromHeightLast, LazyFromHeightSum,
LazyFromHeightSumCum, LazyPriceFromCents, PercentageDollarsF32, PriceFromHeight,
StoredF32Identity, ValueFromDateLast,
},
price,
};
@@ -54,9 +55,12 @@ pub struct RealizedMetrics {
// === Realized Profit/Loss ===
pub realized_profit: ComputedFromHeightSumCum<Dollars>,
pub realized_profit_7d_ema: ComputedFromDateLast<Dollars>,
pub realized_loss: ComputedFromHeightSumCum<Dollars>,
pub realized_loss_7d_ema: ComputedFromDateLast<Dollars>,
pub neg_realized_loss: LazyFromHeightSumCum<Dollars>,
pub net_realized_pnl: ComputedFromHeightSumCum<Dollars>,
pub net_realized_pnl_7d_ema: ComputedFromDateLast<Dollars>,
pub realized_value: ComputedFromHeightSum<Dollars>,
// === Realized vs Realized Cap Ratios (lazy) ===
@@ -106,10 +110,23 @@ pub struct RealizedMetrics {
pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: ComputedFromDateLast<StoredF32>,
pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: ComputedFromDateLast<StoredF32>,
// === ATH Regret ===
/// Realized ATH regret: Σ((ath - sell_price) × sats)
/// "How much more could have been made by selling at ATH instead"
pub ath_regret: ComputedFromHeightSumCum<Dollars>,
// === Peak Regret ===
/// Realized peak regret: Σ((peak - sell_price) × sats)
/// where peak = max price during holding period.
/// "How much more could have been made by selling at peak instead"
pub peak_regret: ComputedFromHeightSumCum<Dollars>,
/// Peak regret as % of realized cap
pub peak_regret_rel_to_realized_cap: LazyBinaryFromHeightSum<StoredF32, Dollars, Dollars>,
// === Sent in Profit/Loss ===
/// Sats sent in profit (sats/btc/usd)
pub sent_in_profit: LazyComputedValueFromHeightSumCum,
/// 14-day EMA of sent in profit (sats, btc, usd)
pub sent_in_profit_14d_ema: ValueFromDateLast,
/// Sats sent in loss (sats/btc/usd)
pub sent_in_loss: LazyComputedValueFromHeightSumCum,
/// 14-day EMA of sent in loss (sats, btc, usd)
pub sent_in_loss_14d_ema: ValueFromDateLast,
}
impl RealizedMetrics {
@@ -143,6 +160,13 @@ impl RealizedMetrics {
cfg.indexes,
)?;
let realized_profit_7d_ema = ComputedFromDateLast::forced_import(
cfg.db,
&cfg.name("realized_profit_7d_ema"),
cfg.version,
cfg.indexes,
)?;
let realized_loss = ComputedFromHeightSumCum::forced_import(
cfg.db,
&cfg.name("realized_loss"),
@@ -150,6 +174,13 @@ impl RealizedMetrics {
cfg.indexes,
)?;
let realized_loss_7d_ema = ComputedFromDateLast::forced_import(
cfg.db,
&cfg.name("realized_loss_7d_ema"),
cfg.version,
cfg.indexes,
)?;
let neg_realized_loss = LazyFromHeightSumCum::from_computed::<Negate>(
&cfg.name("neg_realized_loss"),
cfg.version + v1,
@@ -164,6 +195,20 @@ impl RealizedMetrics {
cfg.indexes,
)?;
let net_realized_pnl_7d_ema = ComputedFromDateLast::forced_import(
cfg.db,
&cfg.name("net_realized_pnl_7d_ema"),
cfg.version,
cfg.indexes,
)?;
let peak_regret = ComputedFromHeightSumCum::forced_import(
cfg.db,
&cfg.name("realized_peak_regret"),
cfg.version + v2,
cfg.indexes,
)?;
// realized_value is the source for total_realized_pnl (they're identical)
let realized_value = ComputedFromHeightSum::forced_import(
cfg.db,
@@ -360,7 +405,7 @@ impl RealizedMetrics {
Ok(Self {
// === Realized Cap ===
realized_cap_cents,
realized_cap,
realized_cap: realized_cap.clone(),
realized_price,
realized_price_extra,
realized_cap_rel_to_own_market_cap: extended
@@ -392,9 +437,12 @@ impl RealizedMetrics {
// === Realized Profit/Loss ===
realized_profit,
realized_profit_7d_ema,
realized_loss,
realized_loss_7d_ema,
neg_realized_loss,
net_realized_pnl,
net_realized_pnl_7d_ema,
realized_value,
// === Realized vs Realized Cap Ratios (lazy) ===
@@ -508,11 +556,46 @@ impl RealizedMetrics {
)?,
// === ATH Regret ===
// v2: Changed to use max HIGH price during holding period instead of global ATH at send time
ath_regret: ComputedFromHeightSumCum::forced_import(
peak_regret: peak_regret.clone(),
peak_regret_rel_to_realized_cap: LazyBinaryFromHeightSum::from_sumcum_lazy_last::<
PercentageDollarsF32,
_,
>(
&cfg.name("peak_regret_rel_to_realized_cap"),
cfg.version + v1,
peak_regret.height.boxed_clone(),
realized_cap.height.boxed_clone(),
&peak_regret,
&realized_cap,
),
// === Sent in Profit/Loss ===
sent_in_profit: LazyComputedValueFromHeightSumCum::forced_import(
cfg.db,
&cfg.name("realized_ath_regret"),
cfg.version + v2,
&cfg.name("sent_in_profit"),
cfg.version,
cfg.indexes,
cfg.price,
)?,
sent_in_profit_14d_ema: ValueFromDateLast::forced_import(
cfg.db,
&cfg.name("sent_in_profit_14d_ema"),
cfg.version,
cfg.compute_dollars(),
cfg.indexes,
)?,
sent_in_loss: LazyComputedValueFromHeightSumCum::forced_import(
cfg.db,
&cfg.name("sent_in_loss"),
cfg.version,
cfg.indexes,
cfg.price,
)?,
sent_in_loss_14d_ema: ValueFromDateLast::forced_import(
cfg.db,
&cfg.name("sent_in_loss_14d_ema"),
cfg.version,
cfg.compute_dollars(),
cfg.indexes,
)?,
})
@@ -532,7 +615,9 @@ impl RealizedMetrics {
.min(self.profit_value_destroyed.height.len())
.min(self.loss_value_created.height.len())
.min(self.loss_value_destroyed.height.len())
.min(self.ath_regret.height.len())
.min(self.peak_regret.height.len())
.min(self.sent_in_profit.sats.height.len())
.min(self.sent_in_loss.sats.height.len())
}
/// Push realized state values to height-indexed vectors.
@@ -568,9 +653,19 @@ impl RealizedMetrics {
.height
.truncate_push(height, state.loss_value_destroyed().to_dollars())?;
// ATH regret
self.ath_regret
self.peak_regret
.height
.truncate_push(height, state.ath_regret().to_dollars())?;
.truncate_push(height, state.peak_regret().to_dollars())?;
// Volume at profit/loss
self.sent_in_profit
.sats
.height
.truncate_push(height, state.sent_in_profit())?;
self.sent_in_loss
.sats
.height
.truncate_push(height, state.sent_in_loss())?;
Ok(())
}
@@ -591,7 +686,10 @@ impl RealizedMetrics {
&mut self.loss_value_created.height,
&mut self.loss_value_destroyed.height,
// ATH regret
&mut self.ath_regret.height,
&mut self.peak_regret.height,
// Sent in profit/loss
&mut self.sent_in_profit.sats.height,
&mut self.sent_in_loss.sats.height,
]
.into_par_iter()
}
@@ -725,11 +823,29 @@ impl RealizedMetrics {
exit,
)?;
// ATH regret
self.ath_regret.height.compute_sum_of_others(
self.peak_regret.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.ath_regret.height)
.map(|v| &v.peak_regret.height)
.collect::<Vec<_>>(),
exit,
)?;
// Volume at profit/loss
self.sent_in_profit.sats.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.sent_in_profit.sats.height)
.collect::<Vec<_>>(),
exit,
)?;
self.sent_in_loss.sats.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.sent_in_loss.sats.height)
.collect::<Vec<_>>(),
exit,
)?;
@@ -790,7 +906,13 @@ impl RealizedMetrics {
self.loss_value_destroyed
.compute_rest(indexes, starting_indexes, exit)?;
// ATH regret
self.ath_regret
self.peak_regret
.compute_rest(indexes, starting_indexes, exit)?;
// Volume at profit/loss
self.sent_in_profit
.compute_rest(indexes, starting_indexes, exit)?;
self.sent_in_loss
.compute_rest(indexes, starting_indexes, exit)?;
Ok(())
@@ -856,6 +978,52 @@ impl RealizedMetrics {
exit,
)?;
// 7d EMA of realized profit/loss
self.realized_profit_7d_ema.compute_all(starting_indexes, exit, |v| {
Ok(v.compute_ema(
starting_indexes.dateindex,
&self.realized_profit.dateindex.sum.0,
7,
exit,
)?)
})?;
self.realized_loss_7d_ema.compute_all(starting_indexes, exit, |v| {
Ok(v.compute_ema(
starting_indexes.dateindex,
&self.realized_loss.dateindex.sum.0,
7,
exit,
)?)
})?;
self.net_realized_pnl_7d_ema.compute_all(starting_indexes, exit, |v| {
Ok(v.compute_ema(
starting_indexes.dateindex,
&self.net_realized_pnl.dateindex.sum.0,
7,
exit,
)?)
})?;
// 14-day EMA of sent in profit (sats and dollars)
self.sent_in_profit_14d_ema.compute_ema(
starting_indexes.dateindex,
&self.sent_in_profit.sats.dateindex.sum.0,
self.sent_in_profit.dollars.as_ref().map(|d| &d.dateindex.sum.0),
14,
exit,
)?;
// 14-day EMA of sent in loss (sats and dollars)
self.sent_in_loss_14d_ema.compute_ema(
starting_indexes.dateindex,
&self.sent_in_loss.sats.dateindex.sum.0,
self.sent_in_loss.dollars.as_ref().map(|d| &d.dateindex.sum.0),
14,
exit,
)?;
self.sopr_7d_ema
.compute_ema(starting_indexes.dateindex, &self.sopr, 7, exit)?;

View File

@@ -1,3 +1,4 @@
use brk_cohort::Filter;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Dollars, Sats, StoredF32, StoredF64, Version};
@@ -64,6 +65,10 @@ pub struct RelativeMetrics {
Option<LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>>,
pub invested_capital_in_loss_pct:
Option<LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>>,
// === Unrealized Peak Regret Relative to Market Cap (date-only, lazy) ===
pub unrealized_peak_regret_rel_to_market_cap:
Option<LazyBinaryFromDateLast<StoredF32, Dollars, Dollars>>,
}
impl RelativeMetrics {
@@ -94,6 +99,11 @@ impl RelativeMetrics {
// Own market cap source
let own_market_cap = supply.total.dollars.as_ref();
// For "all" cohort, own_market_cap IS the global market cap
let market_cap = global_market_cap.or_else(|| {
matches!(cfg.filter, Filter::All).then_some(own_market_cap).flatten()
});
Ok(Self {
// === Supply Relative to Circulating Supply (lazy from global supply) ===
supply_rel_to_circulating_supply: (compute_rel_to_all
@@ -189,7 +199,7 @@ impl RelativeMetrics {
// === Unrealized vs Market Cap (lazy from global market cap) ===
unrealized_profit_rel_to_market_cap:
global_market_cap.map(|mc| {
market_cap.map(|mc| {
LazyBinaryFromHeightLast::from_computed_height_date_and_lazy_binary_block_last::<
PercentageDollarsF32,
_,
@@ -202,7 +212,7 @@ impl RelativeMetrics {
)
}),
unrealized_loss_rel_to_market_cap:
global_market_cap.map(|mc| {
market_cap.map(|mc| {
LazyBinaryFromHeightLast::from_computed_height_date_and_lazy_binary_block_last::<
PercentageDollarsF32,
_,
@@ -214,7 +224,7 @@ impl RelativeMetrics {
mc,
)
}),
neg_unrealized_loss_rel_to_market_cap: global_market_cap.map(|mc| {
neg_unrealized_loss_rel_to_market_cap: market_cap.map(|mc| {
LazyBinaryFromHeightLast::from_computed_height_date_and_lazy_binary_block_last::<
NegPercentageDollarsF32,
_,
@@ -226,7 +236,7 @@ impl RelativeMetrics {
mc,
)
}),
net_unrealized_pnl_rel_to_market_cap: global_market_cap.map(|mc| {
net_unrealized_pnl_rel_to_market_cap: market_cap.map(|mc| {
LazyBinaryFromHeightLast::from_binary_block_and_lazy_binary_block_last::<
PercentageDollarsF32,
_,
@@ -242,7 +252,7 @@ impl RelativeMetrics {
}),
// NUPL is a proxy for net_unrealized_pnl_rel_to_market_cap
nupl: global_market_cap.map(|mc| {
nupl: market_cap.map(|mc| {
LazyBinaryFromHeightLast::from_binary_block_and_lazy_binary_block_last::<
PercentageDollarsF32,
_,
@@ -382,6 +392,21 @@ impl RelativeMetrics {
&r.realized_cap,
)
}),
// === Peak Regret Relative to Market Cap (date-only, lazy) ===
unrealized_peak_regret_rel_to_market_cap: unrealized
.peak_regret
.as_ref()
.zip(market_cap)
.map(|(pr, mc)| {
LazyBinaryFromDateLast::from_computed_and_derived_last::<PercentageDollarsF32>(
&cfg.name("unrealized_peak_regret_rel_to_market_cap"),
cfg.version,
pr,
mc.rest.dateindex.boxed_clone(),
&mc.rest.dates,
)
}),
})
}
}

View File

@@ -10,7 +10,7 @@ use crate::{
indexes,
internal::{
HalfClosePriceTimesSats, HalveDollars, HalveSats, HalveSatsToBitcoin,
LazyBinaryValueFromHeightLast, ValueFromHeightLast,
LazyBinaryValueFromHeightLast, ValueChangeFromDate, ValueFromHeightLast,
},
};
@@ -21,6 +21,8 @@ use super::ImportConfig;
pub struct SupplyMetrics {
pub total: ValueFromHeightLast,
pub halved: LazyBinaryValueFromHeightLast,
/// 30-day change in supply (net position change) - sats, btc, usd
pub _30d_change: ValueChangeFromDate,
}
impl SupplyMetrics {
@@ -41,9 +43,18 @@ impl SupplyMetrics {
HalveDollars,
>(&cfg.name("supply_halved"), &supply, cfg.price, cfg.version);
let _30d_change = ValueChangeFromDate::forced_import(
cfg.db,
&cfg.name("_30d_change"),
cfg.version,
cfg.compute_dollars(),
cfg.indexes,
)?;
Ok(Self {
total: supply,
halved: supply_halved,
_30d_change,
})
}
@@ -94,6 +105,17 @@ impl SupplyMetrics {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.total.compute_rest(indexes, starting_indexes, exit)
self.total.compute_rest(indexes, starting_indexes, exit)?;
// 30-day change in supply
self._30d_change.compute_change(
starting_indexes.dateindex,
&self.total.sats.dateindex.0,
self.total.dollars.as_ref().map(|d| &d.dateindex.0),
30,
exit,
)?;
Ok(())
}
}

View File

@@ -1,10 +1,10 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{CentsSats, CentsSquaredSats, CentsUnsigned, DateIndex, Dollars, Height, Sats};
use brk_types::{CentsSats, CentsSquaredSats, CentsUnsigned, DateIndex, Dollars, Height};
use rayon::prelude::*;
use vecdb::{
AnyStoredVec, AnyVec, BytesVec, Exit, GenericStoredVec, ImportableVec, Negate,
TypedVecIterator, Version,
TypedVecIterator,
};
use crate::{
@@ -12,8 +12,8 @@ use crate::{
distribution::state::UnrealizedState,
indexes,
internal::{
ComputedFromHeightAndDateLast, ComputedFromHeightLast, DollarsMinus, DollarsPlus,
LazyBinaryFromHeightLast, LazyFromHeightLast, ValueFromHeightAndDateLast,
ComputedFromDateLast, ComputedFromHeightAndDateLast, ComputedFromHeightLast, DollarsMinus,
DollarsPlus, LazyBinaryFromHeightLast, LazyFromHeightLast, ValueFromHeightAndDateLast,
},
price,
};
@@ -60,10 +60,11 @@ pub struct UnrealizedMetrics {
pub net_unrealized_pnl: LazyBinaryFromHeightLast<Dollars>,
pub total_unrealized_pnl: LazyBinaryFromHeightLast<Dollars>,
// === ATH Regret ===
/// Unrealized ATH regret: (ATH - spot) × supply_in_profit + ATH × supply_in_loss - invested_capital_in_loss
/// "How much more I'd have if I sold at ATH instead of now" (refined formula accounting for cost basis)
pub ath_regret: ComputedFromHeightLast<Dollars>,
// === Peak Regret (age_range cohorts only) ===
/// Unrealized peak regret: sum of (peak_price - reference_price) × supply
/// where reference_price = max(spot, cost_basis) and peak = max price during holding period.
/// Only computed for age_range cohorts, then aggregated for overlapping cohorts.
pub peak_regret: Option<ComputedFromDateLast<Dollars>>,
}
impl UnrealizedMetrics {
@@ -176,16 +177,18 @@ impl UnrealizedMetrics {
&unrealized_loss,
);
// === ATH Regret ===
// v2: Changed to use HIGH prices consistently for ATH instead of mixing HIGH/CLOSE
// v3: Changed to ComputedFromHeightLast to derive dateindex from height (avoids precision loss)
let v3 = Version::new(3);
let ath_regret = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("unrealized_ath_regret"),
cfg.version + v3,
cfg.indexes,
)?;
// Peak regret: only for age-based UTXO cohorts
let peak_regret = cfg
.compute_peak_regret()
.then(|| {
ComputedFromDateLast::forced_import(
cfg.db,
&cfg.name("unrealized_peak_regret"),
cfg.version,
cfg.indexes,
)
})
.transpose()?;
Ok(Self {
supply_in_profit,
@@ -204,7 +207,7 @@ impl UnrealizedMetrics {
neg_unrealized_loss,
net_unrealized_pnl,
total_unrealized_pnl,
ath_regret,
peak_regret,
})
}
@@ -226,7 +229,8 @@ impl UnrealizedMetrics {
/// Get minimum length across dateindex-indexed vectors written in block loop.
pub fn min_stateful_dateindex_len(&self) -> usize {
self.supply_in_profit
let mut min = self
.supply_in_profit
.indexes
.sats_dateindex
.len()
@@ -234,7 +238,11 @@ impl UnrealizedMetrics {
.min(self.unrealized_profit.dateindex.len())
.min(self.unrealized_loss.dateindex.len())
.min(self.invested_capital_in_profit.dateindex.len())
.min(self.invested_capital_in_loss.dateindex.len())
.min(self.invested_capital_in_loss.dateindex.len());
if let Some(pr) = &self.peak_regret {
min = min.min(pr.dateindex.len());
}
min
}
/// Push unrealized state values to height-indexed vectors.
@@ -311,25 +319,28 @@ impl UnrealizedMetrics {
/// Returns a parallel iterator over all vecs for parallel writing.
pub fn par_iter_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
vec![
&mut self.supply_in_profit.height as &mut dyn AnyStoredVec,
&mut self.supply_in_loss.height as &mut dyn AnyStoredVec,
&mut self.unrealized_profit.height as &mut dyn AnyStoredVec,
&mut self.unrealized_loss.height as &mut dyn AnyStoredVec,
&mut self.invested_capital_in_profit.height as &mut dyn AnyStoredVec,
&mut self.invested_capital_in_loss.height as &mut dyn AnyStoredVec,
&mut self.invested_capital_in_profit_raw as &mut dyn AnyStoredVec,
&mut self.invested_capital_in_loss_raw as &mut dyn AnyStoredVec,
&mut self.investor_cap_in_profit_raw as &mut dyn AnyStoredVec,
&mut self.investor_cap_in_loss_raw as &mut dyn AnyStoredVec,
&mut self.supply_in_profit.indexes.sats_dateindex as &mut dyn AnyStoredVec,
&mut self.supply_in_loss.indexes.sats_dateindex as &mut dyn AnyStoredVec,
&mut self.unrealized_profit.rest.dateindex as &mut dyn AnyStoredVec,
&mut self.unrealized_loss.rest.dateindex as &mut dyn AnyStoredVec,
&mut self.invested_capital_in_profit.rest.dateindex as &mut dyn AnyStoredVec,
&mut self.invested_capital_in_loss.rest.dateindex as &mut dyn AnyStoredVec,
]
.into_par_iter()
let mut vecs: Vec<&mut dyn AnyStoredVec> = vec![
&mut self.supply_in_profit.height,
&mut self.supply_in_loss.height,
&mut self.unrealized_profit.height,
&mut self.unrealized_loss.height,
&mut self.invested_capital_in_profit.height,
&mut self.invested_capital_in_loss.height,
&mut self.invested_capital_in_profit_raw,
&mut self.invested_capital_in_loss_raw,
&mut self.investor_cap_in_profit_raw,
&mut self.investor_cap_in_loss_raw,
&mut self.supply_in_profit.indexes.sats_dateindex,
&mut self.supply_in_loss.indexes.sats_dateindex,
&mut self.unrealized_profit.rest.dateindex,
&mut self.unrealized_loss.rest.dateindex,
&mut self.invested_capital_in_profit.rest.dateindex,
&mut self.invested_capital_in_loss.rest.dateindex,
];
if let Some(pr) = &mut self.peak_regret {
vecs.push(&mut pr.dateindex);
}
vecs.into_par_iter()
}
/// Compute aggregate values from separate cohorts.
@@ -501,6 +512,22 @@ impl UnrealizedMetrics {
.collect::<Vec<_>>(),
exit,
)?;
// Peak regret aggregation (only if this cohort has peak_regret)
if let Some(pr) = &mut self.peak_regret {
let other_prs: Vec<_> = others.iter().filter_map(|v| v.peak_regret.as_ref()).collect();
if !other_prs.is_empty() {
pr.dateindex.compute_sum_of_others(
starting_indexes.dateindex,
&other_prs
.iter()
.map(|v| &v.dateindex)
.collect::<Vec<_>>(),
exit,
)?;
}
}
Ok(())
}
@@ -582,58 +609,6 @@ impl UnrealizedMetrics {
)?)
})?;
// ATH regret: (ATH - spot) × supply_in_profit + ATH × supply_in_loss - invested_capital_in_loss
// This is the refined formula that accounts for cost basis:
// - For UTXOs in profit: regret = ATH - spot (they could have sold at ATH instead of now)
// - For UTXOs in loss: regret = ATH - cost_basis (they could have sold at ATH instead of holding)
// ath = running max of high prices
// Height computation
{
// Pre-compute ATH as running max of high prices
let height_ath: Vec<CentsUnsigned> = {
let mut ath = CentsUnsigned::ZERO;
price
.cents
.split
.height
.high
.into_iter()
.map(|high| {
if *high > ath {
ath = *high;
}
ath
})
.collect()
};
self.ath_regret.height.compute_transform4(
starting_indexes.height,
&price.cents.split.height.close,
&self.supply_in_profit.height,
&self.supply_in_loss.height,
&self.invested_capital_in_loss_raw,
|(h, spot, supply_profit, supply_loss, invested_loss_raw, ..)| {
let ath = height_ath[usize::from(h)];
// (ATH - spot) × supply_in_profit + ATH × supply_in_loss - invested_capital_in_loss
let ath_u128 = ath.as_u128();
let spot_u128 = spot.as_u128();
let profit_regret = (ath_u128 - spot_u128) * supply_profit.as_u128();
// invested_loss_raw is CentsSats (already in cents*sats scale)
let loss_regret = ath_u128 * supply_loss.as_u128() - invested_loss_raw.inner();
let regret_raw = profit_regret + loss_regret;
let regret_cents = CentsUnsigned::new((regret_raw / Sats::ONE_BTC_U128) as u64);
(h, regret_cents.to_dollars())
},
exit,
)?;
}
// DateIndex computation: derive from height values using last-value aggregation
self.ath_regret
.compute_rest(indexes, starting_indexes, exit)?;
Ok(())
}
}

View File

@@ -237,7 +237,7 @@ impl CohortState {
let ath_ps = CentsSats::from_price_sats(ath_price, sats);
let prev_investor_cap = prev_ps.to_investor_cap(pp);
realized.send(current_ps, prev_ps, ath_ps, prev_investor_cap);
realized.send(sats, current_ps, prev_ps, ath_ps, prev_investor_cap);
self.cost_basis_data.as_mut().unwrap().decrement(
pp,
@@ -284,7 +284,7 @@ impl CohortState {
let ath_ps = CentsSats::from_price_sats(ath, sats);
let prev_investor_cap = prev_ps.to_investor_cap(prev_price);
realized.send(current_ps, prev_ps, ath_ps, prev_investor_cap);
realized.send(sats, current_ps, prev_ps, ath_ps, prev_investor_cap);
if current.supply_state.value.is_not_zero() {
self.cost_basis_data.as_mut().unwrap().increment(

View File

@@ -23,8 +23,12 @@ pub struct RealizedState {
loss_value_created_raw: u128,
/// cost_basis × sats for loss cases (= capitulation_flow)
loss_value_destroyed_raw: u128,
/// Raw realized ATH regret: Σ((ath - sell_price) × sats)
ath_regret_raw: u128,
/// Raw realized peak regret: Σ((peak - sell_price) × sats)
peak_regret_raw: u128,
/// Sats sent in profit
sent_in_profit: Sats,
/// Sats sent in loss
sent_in_loss: Sats,
}
impl RealizedState {
@@ -137,12 +141,24 @@ impl RealizedState {
self.profit_value_destroyed()
}
/// Get realized ATH regret as CentsUnsigned.
/// This is Σ((ath - sell_price) × sats) - how much more could have been made
/// by selling at ATH instead of when actually sold.
/// Get realized peak regret as CentsUnsigned.
/// This is Σ((peak - sell_price) × sats) - how much more could have been made
/// by selling at peak instead of when actually sold.
#[inline]
pub fn ath_regret(&self) -> CentsUnsigned {
CentsUnsigned::new((self.ath_regret_raw / Sats::ONE_BTC_U128) as u64)
pub fn peak_regret(&self) -> CentsUnsigned {
CentsUnsigned::new((self.peak_regret_raw / Sats::ONE_BTC_U128) as u64)
}
/// Get sats sent in profit.
#[inline]
pub fn sent_in_profit(&self) -> Sats {
self.sent_in_profit
}
/// Get sats sent in loss.
#[inline]
pub fn sent_in_loss(&self) -> Sats {
self.sent_in_loss
}
pub fn reset_single_iteration_values(&mut self) {
@@ -152,7 +168,9 @@ impl RealizedState {
self.profit_value_destroyed_raw = 0;
self.loss_value_created_raw = 0;
self.loss_value_destroyed_raw = 0;
self.ath_regret_raw = 0;
self.peak_regret_raw = 0;
self.sent_in_profit = Sats::ZERO;
self.sent_in_loss = Sats::ZERO;
}
/// Increment using pre-computed values (for UTXO path)
@@ -189,6 +207,7 @@ impl RealizedState {
#[inline]
pub fn send(
&mut self,
sats: Sats,
current_ps: CentsSats,
prev_ps: CentsSats,
ath_ps: CentsSats,
@@ -199,21 +218,24 @@ impl RealizedState {
self.profit_raw += (current_ps - prev_ps).as_u128();
self.profit_value_created_raw += current_ps.as_u128();
self.profit_value_destroyed_raw += prev_ps.as_u128();
self.sent_in_profit += sats;
}
Ordering::Less => {
self.loss_raw += (prev_ps - current_ps).as_u128();
self.loss_value_created_raw += current_ps.as_u128();
self.loss_value_destroyed_raw += prev_ps.as_u128();
self.sent_in_loss += sats;
}
Ordering::Equal => {
// Break-even: count as profit side (arbitrary but consistent)
self.profit_value_created_raw += current_ps.as_u128();
self.profit_value_destroyed_raw += prev_ps.as_u128();
self.sent_in_profit += sats;
}
}
// Track ATH regret: (ath - sell_price) × sats
self.ath_regret_raw += (ath_ps - current_ps).as_u128();
// Track peak regret: (peak - sell_price) × sats
self.peak_regret_raw += (ath_ps - current_ps).as_u128();
// Inline decrement to avoid recomputation
self.cap_raw -= prev_ps.as_u128();

View File

@@ -7,7 +7,7 @@ use brk_types::{
DateIndex, EmptyAddressData, EmptyAddressIndex, Height, LoadedAddressData, LoadedAddressIndex,
SupplyState, Version,
};
use tracing::info;
use tracing::{debug, info};
use vecdb::{
AnyVec, BytesVec, Database, Exit, GenericStoredVec, ImportableVec, IterableCloneableVec,
LazyVecFrom1, PAGE_SIZE, Stamp, TypedVecIterator, VecIndex,
@@ -38,7 +38,7 @@ pub struct Vecs {
#[traversable(skip)]
db: Database,
pub chain_state: BytesVec<Height, SupplyState>,
pub supply_state: BytesVec<Height, SupplyState>,
pub any_address_indexes: AnyAddressIndexesVecs,
pub addresses_data: AddressesDataVecs,
pub utxo_cohorts: UTXOCohorts,
@@ -139,8 +139,8 @@ impl Vecs {
GrowthRateVecs::forced_import(&db, version, indexes, &new_addr_count, &addr_count)?;
let this = Self {
chain_state: BytesVec::forced_import_with(
vecdb::ImportOptions::new(&db, "chain", version)
supply_state: BytesVec::forced_import_with(
vecdb::ImportOptions::new(&db, "supply_state", version)
.with_saved_stamped_changes(SAVED_STAMPED_CHANGES),
)?,
@@ -197,7 +197,7 @@ impl Vecs {
exit: &Exit,
) -> Result<()> {
// 1. Find minimum height we have data for across stateful vecs
let current_height = Height::from(self.chain_state.len());
let current_height = Height::from(self.supply_state.len());
let height_based_min = self.min_stateful_height_len();
let dateindex_min = self.min_stateful_dateindex_len();
let min_stateful = adjust_for_dateindex_gap(height_based_min, dateindex_min, indexes)?;
@@ -219,7 +219,7 @@ impl Vecs {
let stamp = Stamp::from(height);
// Rollback BytesVec state and capture results for validation
let chain_state_rollback = self.chain_state.rollback_before(stamp);
let chain_state_rollback = self.supply_state.rollback_before(stamp);
// Validate all rollbacks and imports are consistent
let recovered = recover_state(
@@ -234,14 +234,20 @@ impl Vecs {
if recovered.starting_height.is_zero() {
info!("State recovery validation failed, falling back to fresh start");
}
debug!(
"recover_state completed, starting_height={}",
recovered.starting_height
);
recovered.starting_height
}
StartMode::Fresh => Height::ZERO,
};
debug!("recovered_height={}", recovered_height);
// Fresh start: reset all state
let (starting_height, mut chain_state) = if recovered_height.is_zero() {
self.chain_state.reset()?;
self.supply_state.reset()?;
self.addr_count.reset_height()?;
self.empty_addr_count.reset_height()?;
self.address_activity.reset_height()?;
@@ -256,13 +262,15 @@ impl Vecs {
(Height::ZERO, vec![])
} else {
// Recover chain_state from stored values
debug!("recovering chain_state from stored values");
let height_to_timestamp = &blocks.time.timestamp_monotonic;
let height_to_price = price.map(|p| &p.cents.split.height.close);
let mut height_to_timestamp_iter = height_to_timestamp.into_iter();
let mut height_to_price_iter = height_to_price.map(|v| v.into_iter());
let mut chain_state_iter = self.chain_state.into_iter();
let mut chain_state_iter = self.supply_state.into_iter();
debug!("building supply_state vec for {} heights", recovered_height);
let chain_state = (0..recovered_height.to_usize())
.map(|h| {
let h = Height::from(h);
@@ -274,6 +282,7 @@ impl Vecs {
}
})
.collect();
debug!("chain_state vec built");
(recovered_height, chain_state)
};
@@ -293,16 +302,23 @@ impl Vecs {
}
// 2b. Validate computed versions
debug!("validating computed versions");
let base_version = VERSION;
self.utxo_cohorts.validate_computed_versions(base_version)?;
self.address_cohorts
.validate_computed_versions(base_version)?;
debug!("computed versions validated");
// 3. Get last height from indexer
let last_height = Height::from(indexer.vecs.blocks.blockhash.len().saturating_sub(1));
debug!(
"last_height={}, starting_height={}",
last_height, starting_height
);
// 4. Process blocks
if starting_height <= last_height {
debug!("calling process_blocks");
process_blocks(
self,
indexer,
@@ -401,7 +417,7 @@ impl Vecs {
self.utxo_cohorts
.min_separate_stateful_height_len()
.min(self.address_cohorts.min_separate_stateful_height_len())
.min(Height::from(self.chain_state.len()))
.min(Height::from(self.supply_state.len()))
.min(self.any_address_indexes.min_stamped_height())
.min(self.addresses_data.min_stamped_height())
.min(Height::from(self.addr_count.min_stateful_height()))

View File

@@ -832,4 +832,41 @@ where
decadeindex: period!(decadeindex),
}
}
/// Create from a ComputedFromDateLast and a LazyDateDerivedLast.
pub fn from_computed_and_derived_last<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &ComputedFromDateLast<S1T>,
dateindex_source2: IterableBoxedVec<DateIndex, S2T>,
source2: &LazyDateDerivedLast<S2T>,
) -> Self {
let v = version + VERSION;
macro_rules! period {
($p:ident) => {
LazyBinaryTransformLast::from_lazy_last::<F, _, _, _, _>(
name,
v,
&source1.$p,
&source2.$p,
)
};
}
Self {
dateindex: LazyVecFrom2::transformed::<F>(
name,
v,
source1.dateindex.boxed_clone(),
dateindex_source2,
),
weekindex: period!(weekindex),
monthindex: period!(monthindex),
quarterindex: period!(quarterindex),
semesterindex: period!(semesterindex),
yearindex: period!(yearindex),
decadeindex: period!(decadeindex),
}
}
}

View File

@@ -7,7 +7,10 @@ use brk_types::{
use schemars::JsonSchema;
use vecdb::{BinaryTransform, IterableCloneableVec};
use crate::internal::{ComputedVecValue, ComputedHeightDerivedSum, LazyBinaryTransformSum, NumericValue};
use crate::internal::{
ComputedFromHeightSumCum, ComputedHeightDerivedSum, ComputedVecValue, LazyBinaryTransformSum,
LazyFromHeightLast, NumericValue,
};
const VERSION: Version = Version::ZERO;
@@ -91,4 +94,41 @@ where
decadeindex: period!(decadeindex),
}
}
/// Create from a SumCum source (using only sum) and a LazyLast source.
pub fn from_sumcum_lazy_last<F, S2ST>(
name: &str,
version: Version,
source1: &ComputedFromHeightSumCum<S1T>,
source2: &LazyFromHeightLast<S2T, S2ST>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S2ST: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
// source1 has SumCum pattern with .dateindex.sum, .weekindex.sum, etc.
// source2 has Last pattern via deref chain: .dates.dateindex, .dates.weekindex, etc.
macro_rules! period {
($p:ident) => {
LazyBinaryTransformSum::from_boxed::<F>(
name,
v,
source1.$p.sum.boxed_clone(),
source2.dates.$p.boxed_clone(),
)
};
}
Self {
dateindex: period!(dateindex),
weekindex: period!(weekindex),
monthindex: period!(monthindex),
quarterindex: period!(quarterindex),
semesterindex: period!(semesterindex),
yearindex: period!(yearindex),
decadeindex: period!(decadeindex),
}
}
}

View File

@@ -19,6 +19,8 @@ mod price;
mod ratio;
mod stddev;
mod unary_last;
mod value_change;
mod value_change_derived;
mod value_derived_last;
mod value_last;
mod value_lazy_last;
@@ -44,6 +46,8 @@ pub use price::*;
pub use ratio::*;
pub use stddev::*;
pub use unary_last::*;
pub use value_change::*;
pub use value_change_derived::*;
pub use value_derived_last::*;
pub use value_last::*;
pub use value_lazy_last::*;

View File

@@ -0,0 +1,77 @@
//! Change values from DateIndex - stores signed sats (changes can be negative).
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Dollars, Sats, SatsSigned, Version};
use vecdb::{CollectableVec, Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec};
use crate::{ComputeIndexes, indexes, price};
use super::LazyValueChangeDateDerived;
const VERSION: Version = Version::ZERO;
/// Change values indexed by date - uses signed sats since changes can be negative.
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct ValueChangeFromDate {
#[traversable(rename = "sats")]
pub sats: EagerVec<PcoVec<DateIndex, SatsSigned>>,
#[traversable(flatten)]
pub rest: LazyValueChangeDateDerived,
}
impl ValueChangeFromDate {
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
compute_dollars: bool,
indexes: &indexes::Vecs,
) -> Result<Self> {
let sats = EagerVec::forced_import(db, name, version + VERSION)?;
let rest = LazyValueChangeDateDerived::from_source(
db,
name,
sats.boxed_clone(),
version + VERSION,
compute_dollars,
indexes,
)?;
Ok(Self { sats, rest })
}
/// Compute N-day change from unsigned sats source and optional dollars source.
pub fn compute_change(
&mut self,
starting_dateindex: DateIndex,
sats_source: &impl CollectableVec<DateIndex, Sats>,
dollars_source: Option<&impl CollectableVec<DateIndex, Dollars>>,
period: usize,
exit: &Exit,
) -> Result<()> {
self.sats
.compute_change(starting_dateindex, sats_source, period, exit)?;
if let (Some(dollars), Some(source)) = (self.rest.dollars.as_mut(), dollars_source) {
dollars
.dateindex
.compute_change(starting_dateindex, source, period, exit)?;
}
Ok(())
}
/// Compute dollars from price after sats change is computed.
pub fn compute_dollars_from_price(
&mut self,
price: Option<&price::Vecs>,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.rest
.compute_dollars_from_price(price, starting_indexes, exit)
}
}

View File

@@ -0,0 +1,84 @@
//! Lazy derived values for change (bitcoin from sats, period aggregations).
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, DateIndex, Dollars, SatsSigned, Version};
use vecdb::{Database, Exit, IterableBoxedVec};
use crate::{
ComputeIndexes, indexes,
internal::{ComputedFromDateLast, LazyDateDerivedLast, LazyFromDateLast, SatsSignedToBitcoin},
price,
traits::ComputeFromBitcoin,
utils::OptionExt,
};
const VERSION: Version = Version::ZERO;
/// Lazy derived values for change (bitcoin from sats, period aggregations).
#[derive(Clone, Traversable)]
pub struct LazyValueChangeDateDerived {
pub sats: LazyDateDerivedLast<SatsSigned>,
pub bitcoin: LazyFromDateLast<Bitcoin, SatsSigned>,
pub dollars: Option<ComputedFromDateLast<Dollars>>,
}
impl LazyValueChangeDateDerived {
pub fn from_source(
db: &Database,
name: &str,
source: IterableBoxedVec<DateIndex, SatsSigned>,
version: Version,
compute_dollars: bool,
indexes: &indexes::Vecs,
) -> Result<Self> {
let sats =
LazyDateDerivedLast::from_source(name, version + VERSION, source.clone(), indexes);
let bitcoin = LazyFromDateLast::from_derived::<SatsSignedToBitcoin>(
&format!("{name}_btc"),
version + VERSION,
source,
&sats,
);
let dollars = compute_dollars
.then(|| {
ComputedFromDateLast::forced_import(
db,
&format!("{name}_usd"),
version + VERSION,
indexes,
)
})
.transpose()?;
Ok(Self {
sats,
bitcoin,
dollars,
})
}
pub fn compute_dollars_from_price(
&mut self,
price: Option<&price::Vecs>,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
if let Some(dollars) = self.dollars.as_mut() {
let dateindex_to_bitcoin = &*self.bitcoin.dateindex;
let dateindex_to_price_close = &price.u().usd.split.close.dateindex;
dollars.compute_all(starting_indexes, exit, |v| {
v.compute_from_bitcoin(
starting_indexes.dateindex,
dateindex_to_bitcoin,
dateindex_to_price_close,
exit,
)
})?;
}
Ok(())
}
}

View File

@@ -2,13 +2,13 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Sats, Version};
use brk_types::{DateIndex, Dollars, Sats, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec};
use vecdb::{CollectableVec, Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec};
use crate::{ComputeIndexes, indexes, price};
use super::LazyValueDateDerivedLast;
use super::{ComputedFromDateLast, LazyValueDateDerivedLast};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
@@ -70,7 +70,7 @@ impl ValueFromDateLast {
pub fn compute_dollars<F>(&mut self, compute: F) -> Result<()>
where
F: FnMut(&mut crate::internal::ComputedFromDateLast<brk_types::Dollars>) -> Result<()>,
F: FnMut(&mut ComputedFromDateLast<Dollars>) -> Result<()>,
{
self.rest.compute_dollars(compute)
}
@@ -84,4 +84,63 @@ impl ValueFromDateLast {
self.rest
.compute_dollars_from_price(price, starting_indexes, exit)
}
/// Compute both sats and dollars using provided closures.
pub fn compute_both<S, D>(
&mut self,
compute_sats: S,
compute_dollars: D,
) -> Result<()>
where
S: FnOnce(&mut EagerVec<PcoVec<DateIndex, Sats>>) -> Result<()>,
D: FnOnce(&mut ComputedFromDateLast<Dollars>) -> Result<()>,
{
compute_sats(&mut self.sats_dateindex)?;
if let Some(dollars) = self.rest.dollars.as_mut() {
compute_dollars(dollars)?;
}
Ok(())
}
/// Compute EMA for sats and optionally dollars from source vecs.
pub fn compute_ema(
&mut self,
starting_dateindex: DateIndex,
sats_source: &impl CollectableVec<DateIndex, Sats>,
dollars_source: Option<&impl CollectableVec<DateIndex, Dollars>>,
period: usize,
exit: &Exit,
) -> Result<()> {
self.sats_dateindex
.compute_ema(starting_dateindex, sats_source, period, exit)?;
if let (Some(dollars), Some(source)) = (self.rest.dollars.as_mut(), dollars_source) {
dollars
.dateindex
.compute_ema(starting_dateindex, source, period, exit)?;
}
Ok(())
}
/// Compute N-day change for sats and optionally dollars from source vecs.
pub fn compute_change(
&mut self,
starting_dateindex: DateIndex,
sats_source: &impl CollectableVec<DateIndex, Sats>,
dollars_source: Option<&impl CollectableVec<DateIndex, Dollars>>,
period: usize,
exit: &Exit,
) -> Result<()> {
self.sats_dateindex
.compute_change(starting_dateindex, sats_source, period, exit)?;
if let (Some(dollars), Some(source)) = (self.rest.dollars.as_mut(), dollars_source) {
dollars
.dateindex
.compute_change(starting_dateindex, source, period, exit)?;
}
Ok(())
}
}

View File

@@ -7,8 +7,8 @@ use schemars::JsonSchema;
use vecdb::{BinaryTransform, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2};
use crate::internal::{
ComputedFromHeightSum, ComputedHeightDerivedSum, ComputedVecValue, LazyBinaryHeightDerivedSum,
NumericValue,
ComputedFromHeightSum, ComputedFromHeightSumCum, ComputedHeightDerivedSum, ComputedVecValue,
LazyBinaryHeightDerivedSum, LazyFromHeightLast, NumericValue,
};
const VERSION: Version = Version::ZERO;
@@ -100,4 +100,31 @@ where
),
}
}
/// Create from a SumCum source (using only sum) and a LazyLast source.
/// Produces sum-only output (no cumulative).
pub fn from_sumcum_lazy_last<F, S2ST>(
name: &str,
version: Version,
height_source1: IterableBoxedVec<Height, S1T>,
height_source2: IterableBoxedVec<Height, S2T>,
source1: &ComputedFromHeightSumCum<S1T>,
source2: &LazyFromHeightLast<S2T, S2ST>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S2ST: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(name, v, height_source1, height_source2),
rest: LazyBinaryHeightDerivedSum::from_sumcum_lazy_last::<F, S2ST>(
name,
v,
source1,
source2,
),
}
}
}

View File

@@ -6,7 +6,10 @@ use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, IterableCloneableVec};
use crate::internal::{ComputedVecValue, ComputedHeightDerivedSum, LazyBinaryFromDateSum, LazyBinaryTransformSum, NumericValue};
use crate::internal::{
ComputedFromHeightSumCum, ComputedHeightDerivedSum, ComputedVecValue, LazyBinaryFromDateSum,
LazyBinaryTransformSum, LazyFromHeightLast, NumericValue,
};
const VERSION: Version = Version::ZERO;
@@ -80,4 +83,33 @@ where
),
}
}
/// Create from a SumCum source (using only sum) and a LazyLast source.
pub fn from_sumcum_lazy_last<F, S2ST>(
name: &str,
version: Version,
source1: &ComputedFromHeightSumCum<S1T>,
source2: &LazyFromHeightLast<S2T, S2ST>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S2ST: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
Self {
dates: LazyBinaryFromDateSum::from_sumcum_lazy_last::<F, S2ST>(
name,
v,
source1,
source2,
),
difficultyepoch: LazyBinaryTransformSum::from_boxed::<F>(
name,
v,
source1.difficultyepoch.sum.boxed_clone(),
source2.difficultyepoch.boxed_clone(),
),
}
}
}

View File

@@ -1,4 +1,4 @@
use brk_types::{Bitcoin, Sats};
use brk_types::{Bitcoin, Sats, SatsSigned};
use vecdb::UnaryTransform;
/// Sats -> Bitcoin (divide by 1e8)
@@ -10,3 +10,13 @@ impl UnaryTransform<Sats, Bitcoin> for SatsToBitcoin {
Bitcoin::from(sats)
}
}
/// SatsSigned -> Bitcoin (divide by 1e8, preserves sign)
pub struct SatsSignedToBitcoin;
impl UnaryTransform<SatsSigned, Bitcoin> for SatsSignedToBitcoin {
#[inline(always)]
fn apply(sats: SatsSigned) -> Bitcoin {
Bitcoin::from(sats)
}
}