global: snapshot

This commit is contained in:
nym21
2026-03-05 16:11:25 +01:00
parent 6f2a87be4f
commit eedb8d22c1
61 changed files with 2035 additions and 2757 deletions

View File

@@ -111,13 +111,8 @@ impl AddressCohorts {
self.par_iter_mut()
.try_for_each(|v| v.compute_rest_part1(blocks, prices, starting_indexes, exit))?;
// 2. Compute net_sentiment.height for separate cohorts (greed - pain)
self.par_iter_separate_mut().try_for_each(|v| {
v.metrics
.compute_net_sentiment_height(starting_indexes, exit)
})?;
// 3. Compute net_sentiment.height for aggregate cohorts (weighted average)
// 3. Compute net_sentiment.height for aggregate cohorts (weighted average).
// Separate cohorts already computed net_sentiment in step 2 (inside compute_rest_part1).
self.for_each_aggregate(|vecs, sources| {
let metrics: Vec<_> = sources.iter().map(|v| &v.metrics).collect();
vecs.metrics

View File

@@ -189,12 +189,14 @@ impl DynCohortVecs for AddressCohortVecs {
&mut self,
height: Height,
height_price: Cents,
is_day_boundary: bool,
) -> Result<()> {
if let Some(state) = self.state.as_mut() {
self.metrics.compute_then_truncate_push_unrealized_states(
height,
height_price,
&mut state.inner,
is_day_boundary,
)?;
}
Ok(())
@@ -209,18 +211,15 @@ impl DynCohortVecs for AddressCohortVecs {
) -> Result<()> {
self.metrics
.compute_rest_part1(blocks, prices, starting_indexes, exit)?;
// Separate cohorts (with state) compute net_sentiment = greed - pain directly.
// Aggregate cohorts get it via weighted average in groups.rs.
if self.state.is_some() {
self.metrics
.compute_net_sentiment_height(starting_indexes, exit)?;
}
Ok(())
}
fn compute_net_sentiment_height(
&mut self,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.metrics
.compute_net_sentiment_height(starting_indexes, exit)
}
fn write_state(&mut self, height: Height, cleanup: bool) -> Result<()> {
if let Some(state) = self.state.as_mut() {
state.inner.write(height, cleanup)?;

View File

@@ -28,6 +28,7 @@ pub trait DynCohortVecs: Send + Sync {
&mut self,
height: Height,
height_price: Cents,
is_day_boundary: bool,
) -> Result<()>;
/// First phase of post-processing computations.
@@ -39,13 +40,6 @@ pub trait DynCohortVecs: Send + Sync {
exit: &Exit,
) -> Result<()>;
/// Compute net_sentiment.height for separate cohorts (greed - pain).
fn compute_net_sentiment_height(
&mut self,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()>;
/// Write state checkpoint to disk.
fn write_state(&mut self, height: Height, cleanup: bool) -> Result<()>;

View File

@@ -2,7 +2,7 @@ use std::path::Path;
use brk_cohort::{
ByAgeRange, ByAmountRange, ByEpoch, ByGreatEqualAmount, ByLowerThanAmount, ByMaxAge, ByMinAge,
BySpendableType, ByYear, CohortContext, Filter, Term,
ByClass, BySpendableType, CohortContext, Filter, Term,
};
use brk_error::Result;
use brk_traversable::Traversable;
@@ -17,7 +17,7 @@ use crate::distribution::metrics::{
ExtendedAdjustedCohortMetrics, ExtendedCohortMetrics, ImportConfig, SupplyMetrics,
};
use super::vecs::UTXOCohortVecs;
use super::{percentiles::PercentileCache, vecs::UTXOCohortVecs};
use crate::distribution::state::UTXOCohortState;
@@ -27,7 +27,7 @@ const VERSION: Version = Version::new(0);
///
/// Each group uses a concrete metrics type matching its required features:
/// - age_range: extended realized + extended cost basis
/// - epoch/year/amount/type: basic metrics with relative
/// - epoch/class/amount/type: basic metrics with relative
/// - all: extended + adjusted (no rel_to_all)
/// - sth: extended + adjusted
/// - lth: extended
@@ -45,23 +45,22 @@ pub struct UTXOCohorts<M: StorageMode = Rw> {
pub amount_range: ByAmountRange<UTXOCohortVecs<BasicCohortMetrics<M>>>,
pub lt_amount: ByLowerThanAmount<UTXOCohortVecs<BasicCohortMetrics<M>>>,
pub epoch: ByEpoch<UTXOCohortVecs<BasicCohortMetrics<M>>>,
pub year: ByYear<UTXOCohortVecs<BasicCohortMetrics<M>>>,
pub class: ByClass<UTXOCohortVecs<BasicCohortMetrics<M>>>,
pub type_: BySpendableType<UTXOCohortVecs<BasicCohortMetrics<M>>>,
#[traversable(skip)]
pub(super) percentile_cache: PercentileCache,
/// Cached partition_point positions for tick_tock boundary searches.
/// Avoids O(log n) binary search per boundary per block; scans forward
/// from last known position (typically O(1) per boundary).
#[traversable(skip)]
pub(super) tick_tock_cached_positions: [usize; 20],
}
macro_rules! collect_separate {
($self:expr, $method:ident, $trait_ref:ty) => {{
let mut v: Vec<$trait_ref> = Vec::with_capacity(UTXOCohorts::SEPARATE_COHORT_CAPACITY);
v.extend($self.age_range.$method().map(|x| x as $trait_ref));
v.extend($self.epoch.$method().map(|x| x as $trait_ref));
v.extend($self.year.$method().map(|x| x as $trait_ref));
v.extend($self.amount_range.$method().map(|x| x as $trait_ref));
v.extend($self.type_.$method().map(|x| x as $trait_ref));
v
}};
}
impl UTXOCohorts<Rw> {
/// ~71 separate cohorts (21 age + 5 epoch + 18 class + 15 amount + 12 type)
const SEPARATE_COHORT_CAPACITY: usize = 80;
/// Import all UTXO cohorts from database.
pub(crate) fn forced_import(
db: &Database,
@@ -123,7 +122,7 @@ impl UTXOCohorts<Rw> {
let amount_range = ByAmountRange::try_new(&basic_separate)?;
let epoch = ByEpoch::try_new(&basic_separate)?;
let year = ByYear::try_new(&basic_separate)?;
let class = ByClass::try_new(&basic_separate)?;
let type_ = BySpendableType::try_new(&basic_separate)?;
// Phase 3: Import "all" cohort with pre-imported supply.
@@ -223,7 +222,7 @@ impl UTXOCohorts<Rw> {
sth,
lth,
epoch,
year,
class,
type_,
max_age,
min_age,
@@ -231,26 +230,39 @@ impl UTXOCohorts<Rw> {
amount_range,
lt_amount,
ge_amount,
percentile_cache: PercentileCache::default(),
tick_tock_cached_positions: [0; 20],
})
}
/// ~71 separate cohorts (21 age + 5 epoch + 18 year + 15 amount + 12 type)
const SEPARATE_COHORT_CAPACITY: usize = 80;
pub(crate) fn par_iter_separate_mut(
&mut self,
) -> impl ParallelIterator<Item = &mut dyn DynCohortVecs> {
collect_separate!(self, iter_mut, &mut dyn DynCohortVecs).into_par_iter()
let Self {
age_range, epoch, class, amount_range, type_, ..
} = self;
age_range
.par_iter_mut()
.map(|x| x as &mut dyn DynCohortVecs)
.chain(epoch.par_iter_mut().map(|x| x as &mut dyn DynCohortVecs))
.chain(class.par_iter_mut().map(|x| x as &mut dyn DynCohortVecs))
.chain(
amount_range
.par_iter_mut()
.map(|x| x as &mut dyn DynCohortVecs),
)
.chain(type_.par_iter_mut().map(|x| x as &mut dyn DynCohortVecs))
}
/// Immutable iterator over all separate (stateful) cohorts.
pub(crate) fn iter_separate(&self) -> impl Iterator<Item = &dyn DynCohortVecs> {
collect_separate!(self, iter, &dyn DynCohortVecs).into_iter()
}
/// Mutable iterator over all separate cohorts (non-parallel).
pub(crate) fn iter_separate_mut(&mut self) -> impl Iterator<Item = &mut dyn DynCohortVecs> {
collect_separate!(self, iter_mut, &mut dyn DynCohortVecs).into_iter()
self.age_range
.iter()
.map(|x| x as &dyn DynCohortVecs)
.chain(self.epoch.iter().map(|x| x as &dyn DynCohortVecs))
.chain(self.class.iter().map(|x| x as &dyn DynCohortVecs))
.chain(self.amount_range.iter().map(|x| x as &dyn DynCohortVecs))
.chain(self.type_.iter().map(|x| x as &dyn DynCohortVecs))
}
pub(crate) fn compute_overlapping_vecs(
@@ -258,90 +270,53 @@ impl UTXOCohorts<Rw> {
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
let age_range = &self.age_range;
let amount_range = &self.amount_range;
let Self {
all, sth, lth, age_range, max_age, min_age,
ge_amount, amount_range, lt_amount,
..
} = self;
// all: aggregate of all age_range
// Note: realized.extended rolling sums are computed from base in compute_rest_part2.
// Note: cost_basis.extended percentiles are computed in truncate_push_aggregate_percentiles.
{
let sources_dyn: Vec<&dyn CohortMetricsBase> = age_range
.iter()
.map(|v| &v.metrics as &dyn CohortMetricsBase)
.collect();
self.all
.metrics
.compute_base_from_others(starting_indexes, &sources_dyn, exit)?;
}
let ar = &*age_range;
let amr = &*amount_range;
let si = starting_indexes;
// sth: aggregate of matching age_range
{
let sth_filter = self.sth.metrics.filter().clone();
let sources_dyn: Vec<&dyn CohortMetricsBase> = age_range
.iter()
.filter(|v| sth_filter.includes(v.metrics.filter()))
.map(|v| &v.metrics as &dyn CohortMetricsBase)
.collect();
self.sth
.metrics
.compute_base_from_others(starting_indexes, &sources_dyn, exit)?;
}
let tasks: Vec<Box<dyn FnOnce() -> Result<()> + Send + '_>> = vec![
Box::new(|| {
let sources = filter_sources_from(ar.iter(), None);
all.metrics.compute_base_from_others(si, &sources, exit)
}),
Box::new(|| {
let sources = filter_sources_from(ar.iter(), Some(sth.metrics.filter()));
sth.metrics.compute_base_from_others(si, &sources, exit)
}),
Box::new(|| {
let sources = filter_sources_from(ar.iter(), Some(lth.metrics.filter()));
lth.metrics.compute_base_from_others(si, &sources, exit)
}),
Box::new(|| {
min_age.par_iter_mut().try_for_each(|vecs| {
let sources = filter_sources_from(ar.iter(), Some(vecs.metrics.filter()));
vecs.metrics.compute_base_from_others(si, &sources, exit)
})
}),
Box::new(|| {
max_age.par_iter_mut().try_for_each(|vecs| {
let sources = filter_sources_from(ar.iter(), Some(vecs.metrics.filter()));
vecs.metrics.compute_base_from_others(si, &sources, exit)
})
}),
Box::new(|| {
ge_amount.par_iter_mut().chain(lt_amount.par_iter_mut()).try_for_each(|vecs| {
let sources = filter_sources_from(amr.iter(), Some(vecs.metrics.filter()));
vecs.metrics.compute_base_from_others(si, &sources, exit)
})
}),
];
// lth: aggregate of matching age_range
{
let lth_filter = self.lth.metrics.filter().clone();
let sources_dyn: Vec<&dyn CohortMetricsBase> = age_range
.iter()
.filter(|v| lth_filter.includes(v.metrics.filter()))
.map(|v| &v.metrics as &dyn CohortMetricsBase)
.collect();
self.lth
.metrics
.compute_base_from_others(starting_indexes, &sources_dyn, exit)?;
}
// min_age: base from matching age_range
self.min_age
.par_iter_mut()
.try_for_each(|vecs| -> Result<()> {
let filter = vecs.metrics.filter().clone();
let sources_dyn: Vec<&dyn CohortMetricsBase> = age_range
.iter()
.filter(|v| filter.includes(v.metrics.filter()))
.map(|v| &v.metrics as &dyn CohortMetricsBase)
.collect();
vecs.metrics
.compute_base_from_others(starting_indexes, &sources_dyn, exit)
})?;
// max_age: base + peak_regret from matching age_range
self.max_age
.par_iter_mut()
.try_for_each(|vecs| -> Result<()> {
let filter = vecs.metrics.filter().clone();
let sources_dyn: Vec<&dyn CohortMetricsBase> = age_range
.iter()
.filter(|v| filter.includes(v.metrics.filter()))
.map(|v| &v.metrics as &dyn CohortMetricsBase)
.collect();
vecs.metrics
.compute_base_from_others(starting_indexes, &sources_dyn, exit)
})?;
// ge_amount, lt_amount: base only from matching amount_range
self.ge_amount
.par_iter_mut()
.chain(self.lt_amount.par_iter_mut())
.try_for_each(|vecs| {
let filter = vecs.metrics.filter().clone();
let sources_dyn: Vec<&dyn CohortMetricsBase> = amount_range
.iter()
.filter(|v| filter.includes(v.metrics.filter()))
.map(|v| &v.metrics as &dyn CohortMetricsBase)
.collect();
vecs.metrics
.compute_base_from_others(starting_indexes, &sources_dyn, exit)
})?;
tasks
.into_par_iter()
.map(|f| f())
.collect::<Result<Vec<_>>>()?;
Ok(())
}
@@ -373,7 +348,7 @@ impl UTXOCohorts<Rw> {
.map(|x| x as &mut dyn DynCohortVecs),
);
all.extend(self.epoch.iter_mut().map(|x| x as &mut dyn DynCohortVecs));
all.extend(self.year.iter_mut().map(|x| x as &mut dyn DynCohortVecs));
all.extend(self.class.iter_mut().map(|x| x as &mut dyn DynCohortVecs));
all.extend(
self.amount_range
.iter_mut()
@@ -389,100 +364,56 @@ impl UTXOCohorts<Rw> {
.try_for_each(|v| v.compute_rest_part1(blocks, prices, starting_indexes, exit))?;
}
// 2. Compute net_sentiment.height for separate cohorts (greed - pain)
self.par_iter_separate_mut()
.try_for_each(|v| v.compute_net_sentiment_height(starting_indexes, exit))?;
// 3. Compute net_sentiment.height for aggregate cohorts (weighted average)
// 2. Compute net_sentiment.height for aggregate cohorts (weighted average).
// Separate cohorts already computed net_sentiment in step 1 (inside compute_rest_part1).
{
let age_range = &self.age_range;
let amount_range = &self.amount_range;
let Self {
all, sth, lth, age_range, max_age, min_age,
ge_amount, amount_range, lt_amount,
..
} = self;
// all
{
let sources: Vec<_> = age_range
.iter()
.map(|v| &v.metrics as &dyn CohortMetricsBase)
.collect();
self.all.metrics.compute_net_sentiment_from_others_dyn(
starting_indexes,
&sources,
exit,
)?;
}
let ar = &*age_range;
let amr = &*amount_range;
let si = starting_indexes;
// sth
{
let filter = self.sth.metrics.filter().clone();
let sources: Vec<_> = age_range
.iter()
.filter(|v| filter.includes(v.metrics.filter()))
.map(|v| &v.metrics as &dyn CohortMetricsBase)
.collect();
self.sth.metrics.compute_net_sentiment_from_others_dyn(
starting_indexes,
&sources,
exit,
)?;
}
let tasks: Vec<Box<dyn FnOnce() -> Result<()> + Send + '_>> = vec![
Box::new(|| {
let sources = filter_sources_from(ar.iter(), None);
all.metrics.compute_net_sentiment_from_others_dyn(si, &sources, exit)
}),
Box::new(|| {
let sources = filter_sources_from(ar.iter(), Some(sth.metrics.filter()));
sth.metrics.compute_net_sentiment_from_others_dyn(si, &sources, exit)
}),
Box::new(|| {
let sources = filter_sources_from(ar.iter(), Some(lth.metrics.filter()));
lth.metrics.compute_net_sentiment_from_others_dyn(si, &sources, exit)
}),
Box::new(|| {
min_age.par_iter_mut().try_for_each(|vecs| {
let sources = filter_sources_from(ar.iter(), Some(vecs.metrics.filter()));
vecs.metrics.compute_net_sentiment_from_others_dyn(si, &sources, exit)
})
}),
Box::new(|| {
max_age.par_iter_mut().try_for_each(|vecs| {
let sources = filter_sources_from(ar.iter(), Some(vecs.metrics.filter()));
vecs.metrics.compute_net_sentiment_from_others_dyn(si, &sources, exit)
})
}),
Box::new(|| {
ge_amount.par_iter_mut().chain(lt_amount.par_iter_mut()).try_for_each(|vecs| {
let sources = filter_sources_from(amr.iter(), Some(vecs.metrics.filter()));
vecs.metrics.compute_net_sentiment_from_others_dyn(si, &sources, exit)
})
}),
];
// lth
{
let filter = self.lth.metrics.filter().clone();
let sources: Vec<_> = age_range
.iter()
.filter(|v| filter.includes(v.metrics.filter()))
.map(|v| &v.metrics as &dyn CohortMetricsBase)
.collect();
self.lth.metrics.compute_net_sentiment_from_others_dyn(
starting_indexes,
&sources,
exit,
)?;
}
// min_age, max_age from age_range
for vecs in self.min_age.iter_mut() {
let filter = vecs.metrics.filter().clone();
let sources: Vec<_> = age_range
.iter()
.filter(|v| filter.includes(v.metrics.filter()))
.map(|v| &v.metrics as &dyn CohortMetricsBase)
.collect();
vecs.metrics.compute_net_sentiment_from_others_dyn(
starting_indexes,
&sources,
exit,
)?;
}
for vecs in self.max_age.iter_mut() {
let filter = vecs.metrics.filter().clone();
let sources: Vec<_> = age_range
.iter()
.filter(|v| filter.includes(v.metrics.filter()))
.map(|v| &v.metrics as &dyn CohortMetricsBase)
.collect();
vecs.metrics.compute_net_sentiment_from_others_dyn(
starting_indexes,
&sources,
exit,
)?;
}
// ge_amount, lt_amount from amount_range
for vecs in self.ge_amount.iter_mut().chain(self.lt_amount.iter_mut()) {
let filter = vecs.metrics.filter().clone();
let sources: Vec<_> = amount_range
.iter()
.filter(|v| filter.includes(v.metrics.filter()))
.map(|v| &v.metrics as &dyn CohortMetricsBase)
.collect();
vecs.metrics.compute_net_sentiment_from_others_dyn(
starting_indexes,
&sources,
exit,
)?;
}
tasks
.into_par_iter()
.map(|f| f())
.collect::<Result<Vec<_>>>()?;
}
Ok(())
@@ -532,116 +463,37 @@ impl UTXOCohorts<Rw> {
// Clone all_supply_sats for non-all cohorts.
let all_supply_sats = self.all.metrics.supply.total.sats.height.read_only_clone();
self.sth.metrics.compute_rest_part2(
blocks,
prices,
starting_indexes,
height_to_market_cap,
&up_to_1h_value_created,
&up_to_1h_value_destroyed,
&all_supply_sats,
exit,
)?;
self.lth.metrics.compute_rest_part2(
blocks,
prices,
starting_indexes,
height_to_market_cap,
&all_supply_sats,
exit,
)?;
self.age_range.par_iter_mut().try_for_each(|v| {
v.metrics.compute_rest_part2(
blocks,
prices,
starting_indexes,
height_to_market_cap,
&all_supply_sats,
exit,
)
})?;
self.max_age.par_iter_mut().try_for_each(|v| {
v.metrics.compute_rest_part2(
blocks,
prices,
starting_indexes,
height_to_market_cap,
&up_to_1h_value_created,
&up_to_1h_value_destroyed,
&all_supply_sats,
exit,
)
})?;
self.min_age.par_iter_mut().try_for_each(|v| {
v.metrics.compute_rest_part2(
blocks,
prices,
starting_indexes,
height_to_market_cap,
&all_supply_sats,
exit,
)
})?;
self.ge_amount.par_iter_mut().try_for_each(|v| {
v.metrics.compute_rest_part2(
blocks,
prices,
starting_indexes,
height_to_market_cap,
&all_supply_sats,
exit,
)
})?;
self.epoch.par_iter_mut().try_for_each(|v| {
v.metrics.compute_rest_part2(
blocks,
prices,
starting_indexes,
height_to_market_cap,
&all_supply_sats,
exit,
)
})?;
self.year.par_iter_mut().try_for_each(|v| {
v.metrics.compute_rest_part2(
blocks,
prices,
starting_indexes,
height_to_market_cap,
&all_supply_sats,
exit,
)
})?;
self.amount_range.par_iter_mut().try_for_each(|v| {
v.metrics.compute_rest_part2(
blocks,
prices,
starting_indexes,
height_to_market_cap,
&all_supply_sats,
exit,
)
})?;
self.lt_amount.par_iter_mut().try_for_each(|v| {
v.metrics.compute_rest_part2(
blocks,
prices,
starting_indexes,
height_to_market_cap,
&all_supply_sats,
exit,
)
})?;
self.type_.par_iter_mut().try_for_each(|v| {
v.metrics.compute_rest_part2(
blocks,
prices,
starting_indexes,
height_to_market_cap,
&all_supply_sats,
exit,
)
})?;
// Destructure to allow parallel mutable access to independent fields.
let Self {
sth, lth, age_range, max_age, min_age,
ge_amount, amount_range, lt_amount, epoch, class, type_, ..
} = self;
// All remaining groups run in parallel. Each closure owns an exclusive &mut
// to its field and shares read-only references to common data.
let vc = &up_to_1h_value_created;
let vd = &up_to_1h_value_destroyed;
let ss = &all_supply_sats;
let tasks: Vec<Box<dyn FnOnce() -> Result<()> + Send + '_>> = vec![
Box::new(|| sth.metrics.compute_rest_part2(blocks, prices, starting_indexes, height_to_market_cap, vc, vd, ss, exit)),
Box::new(|| lth.metrics.compute_rest_part2(blocks, prices, starting_indexes, height_to_market_cap, ss, exit)),
Box::new(|| age_range.par_iter_mut().try_for_each(|v| v.metrics.compute_rest_part2(blocks, prices, starting_indexes, height_to_market_cap, ss, exit))),
Box::new(|| max_age.par_iter_mut().try_for_each(|v| v.metrics.compute_rest_part2(blocks, prices, starting_indexes, height_to_market_cap, vc, vd, ss, exit))),
Box::new(|| min_age.par_iter_mut().try_for_each(|v| v.metrics.compute_rest_part2(blocks, prices, starting_indexes, height_to_market_cap, ss, exit))),
Box::new(|| ge_amount.par_iter_mut().try_for_each(|v| v.metrics.compute_rest_part2(blocks, prices, starting_indexes, height_to_market_cap, ss, exit))),
Box::new(|| epoch.par_iter_mut().try_for_each(|v| v.metrics.compute_rest_part2(blocks, prices, starting_indexes, height_to_market_cap, ss, exit))),
Box::new(|| class.par_iter_mut().try_for_each(|v| v.metrics.compute_rest_part2(blocks, prices, starting_indexes, height_to_market_cap, ss, exit))),
Box::new(|| amount_range.par_iter_mut().try_for_each(|v| v.metrics.compute_rest_part2(blocks, prices, starting_indexes, height_to_market_cap, ss, exit))),
Box::new(|| lt_amount.par_iter_mut().try_for_each(|v| v.metrics.compute_rest_part2(blocks, prices, starting_indexes, height_to_market_cap, ss, exit))),
Box::new(|| type_.par_iter_mut().try_for_each(|v| v.metrics.compute_rest_part2(blocks, prices, starting_indexes, height_to_market_cap, ss, exit))),
];
tasks
.into_par_iter()
.map(|f| f())
.collect::<Result<Vec<_>>>()?;
Ok(())
}
@@ -668,7 +520,7 @@ impl UTXOCohorts<Rw> {
for v in self.epoch.iter_mut() {
vecs.extend(v.metrics.collect_all_vecs_mut());
}
for v in self.year.iter_mut() {
for v in self.class.iter_mut() {
vecs.extend(v.metrics.collect_all_vecs_mut());
}
for v in self.amount_range.iter_mut() {
@@ -744,3 +596,20 @@ impl UTXOCohorts<Rw> {
Ok(())
}
}
/// Filter source cohorts by an optional filter, returning dyn CohortMetricsBase refs.
/// If filter is None, returns all sources (used for "all" aggregate).
fn filter_sources_from<'a, M: CohortMetricsBase + 'a>(
sources: impl Iterator<Item = &'a UTXOCohortVecs<M>>,
filter: Option<&Filter>,
) -> Vec<&'a dyn CohortMetricsBase> {
match filter {
Some(f) => sources
.filter(|v| f.includes(v.metrics.filter()))
.map(|v| &v.metrics as &dyn CohortMetricsBase)
.collect(),
None => sources
.map(|v| &v.metrics as &dyn CohortMetricsBase)
.collect(),
}
}

View File

@@ -2,45 +2,69 @@ use std::{cmp::Reverse, collections::BinaryHeap, fs, path::Path};
use brk_cohort::{Filtered, TERM_NAMES};
use brk_error::Result;
use brk_types::{
BasisPoints16, Cents, CentsCompact, CostBasisDistribution, Date, Height, Sats,
};
use vecdb::WritableVec;
use brk_types::{Cents, CentsCompact, CostBasisDistribution, Date, Height, Sats};
use crate::internal::{PERCENTILES, PERCENTILES_LEN, compute_spot_percentile_rank};
use crate::internal::{PERCENTILES, PERCENTILES_LEN};
use crate::distribution::metrics::{CohortMetricsBase, CostBasisExtended};
use super::groups::UTXOCohorts;
/// Significant digits for cost basis prices (after rounding to dollars).
const COST_BASIS_PRICE_DIGITS: i32 = 5;
#[derive(Clone, Default)]
pub(super) struct CachedPercentiles {
sat_result: [Cents; PERCENTILES_LEN],
usd_result: [Cents; PERCENTILES_LEN],
}
impl CachedPercentiles {
fn push(&self, height: Height, ext: &mut CostBasisExtended) -> Result<()> {
ext.push_arrays(height, &self.sat_result, &self.usd_result)
}
}
/// Cached percentile results for all/sth/lth.
/// Avoids re-merging 21 BTreeMaps on every block.
#[derive(Clone, Default)]
pub(super) struct PercentileCache {
all: CachedPercentiles,
sth: CachedPercentiles,
lth: CachedPercentiles,
initialized: bool,
}
impl UTXOCohorts {
/// Compute and push percentiles for aggregate cohorts (all, sth, lth).
///
/// Single K-way merge pass over all age_range cohorts computes percentiles
/// for all 3 targets simultaneously, since each cohort belongs to exactly
/// one of STH/LTH and always contributes to ALL.
///
/// Uses BinaryHeap with direct BTreeMap iterators — O(log K) merge
/// with zero intermediate Vec allocation.
/// Full K-way merge only runs at day boundaries or when the cache is empty.
/// For intermediate blocks, pushes cached percentile arrays.
pub(crate) fn truncate_push_aggregate_percentiles(
&mut self,
height: Height,
spot: Cents,
date_opt: Option<Date>,
states_path: &Path,
) -> Result<()> {
if date_opt.is_some() || !self.percentile_cache.initialized {
self.merge_and_push_percentiles(height, date_opt, states_path)
} else {
self.push_cached_percentiles(height)
}
}
/// Full K-way merge: compute percentiles from scratch, update cache, push.
fn merge_and_push_percentiles(
&mut self,
height: Height,
date_opt: Option<Date>,
states_path: &Path,
) -> Result<()> {
let collect_merged = date_opt.is_some();
// Phase 1: compute totals + merge.
// Scoped so age_range borrows release before push_target borrows self.all/sth/lth.
let targets = {
let sth_filter = self.sth.metrics.filter().clone();
let mut totals = AllSthLth::<(u64, u128)>::default();
// Collect BTreeMap refs from age_range, skip empty, compute totals.
let maps: Vec<_> = self
.age_range
.iter()
@@ -75,76 +99,121 @@ impl UTXOCohorts {
let all_has_data = totals.all.0 > 0;
let mut targets = totals.map(|(sats, usd)| PercTarget::new(sats, usd, cap));
// K-way merge via BinaryHeap + BTreeMap iterators (no Vec copies)
if all_has_data {
let mut iters: Vec<_> = maps
.iter()
.map(|(map, is_sth)| (map.iter().peekable(), *is_sth))
.collect();
let mut heap: BinaryHeap<Reverse<(CentsCompact, usize)>> =
BinaryHeap::with_capacity(iters.len());
for (i, (iter, _)) in iters.iter_mut().enumerate() {
if let Some(&(&price, _)) = iter.peek() {
heap.push(Reverse((price, i)));
}
}
let mut current_price: Option<CentsCompact> = None;
let mut early_exit = false;
while let Some(Reverse((price, ci))) = heap.pop() {
let (ref mut iter, is_sth) = iters[ci];
let (_, &sats) = iter.next().unwrap();
let amount = u64::from(sats);
let usd = Cents::from(price).as_u128() * amount as u128;
if let Some(prev) = current_price
&& prev != price
{
targets
.for_each_mut(|t| t.finalize_price(prev.into(), collect_merged));
if !collect_merged && targets.all_match(|t| t.done()) {
early_exit = true;
break;
}
}
current_price = Some(price);
targets.all.accumulate(amount, usd);
targets.term_mut(is_sth).accumulate(amount, usd);
if let Some(&(&next_price, _)) = iter.peek() {
heap.push(Reverse((next_price, ci)));
}
}
if !early_exit
&& let Some(price) = current_price
{
targets.for_each_mut(|t| t.finalize_price(price.into(), collect_merged));
}
merge_k_way(&maps, &mut targets, collect_merged);
}
targets
};
// Phase 2: push results (borrows self.all/sth/lth mutably)
push_target(
height, spot, date_opt, states_path, targets.all,
&mut self.all.metrics.cost_basis.extended, "all",
)?;
push_target(
height, spot, date_opt, states_path, targets.sth,
&mut self.sth.metrics.cost_basis.extended, TERM_NAMES.short.id,
)?;
push_target(
height, spot, date_opt, states_path, targets.lth,
&mut self.lth.metrics.cost_basis.extended, TERM_NAMES.long.id,
)?;
// Update cache + push
self.percentile_cache.all = targets.all.to_cached();
self.percentile_cache.sth = targets.sth.to_cached();
self.percentile_cache.lth = targets.lth.to_cached();
self.percentile_cache.initialized = true;
self.percentile_cache
.all
.push(height, &mut self.all.metrics.cost_basis.extended)?;
self.percentile_cache
.sth
.push(height, &mut self.sth.metrics.cost_basis.extended)?;
self.percentile_cache
.lth
.push(height, &mut self.lth.metrics.cost_basis.extended)?;
// Serialize full distribution at day boundaries
if let Some(date) = date_opt {
write_distribution(states_path, "all", date, targets.all.merged)?;
write_distribution(states_path, TERM_NAMES.short.id, date, targets.sth.merged)?;
write_distribution(states_path, TERM_NAMES.long.id, date, targets.lth.merged)?;
}
Ok(())
}
/// Fast path: push cached percentile arrays.
fn push_cached_percentiles(&mut self, height: Height) -> Result<()> {
self.percentile_cache
.all
.push(height, &mut self.all.metrics.cost_basis.extended)?;
self.percentile_cache
.sth
.push(height, &mut self.sth.metrics.cost_basis.extended)?;
self.percentile_cache
.lth
.push(height, &mut self.lth.metrics.cost_basis.extended)?;
Ok(())
}
}
fn write_distribution(
states_path: &Path,
name: &str,
date: Date,
merged: Vec<(CentsCompact, Sats)>,
) -> Result<()> {
let dir = states_path.join(format!("utxo_{name}_cost_basis/by_date"));
fs::create_dir_all(&dir)?;
fs::write(
dir.join(date.to_string()),
CostBasisDistribution::serialize_iter(merged.into_iter())?,
)?;
Ok(())
}
/// K-way merge via BinaryHeap over BTreeMap iterators.
fn merge_k_way(
maps: &[(&std::collections::BTreeMap<CentsCompact, Sats>, bool)],
targets: &mut AllSthLth<PercTarget>,
collect_merged: bool,
) {
let mut iters: Vec<_> = maps
.iter()
.map(|(map, is_sth)| (map.iter().peekable(), *is_sth))
.collect();
let mut heap: BinaryHeap<Reverse<(CentsCompact, usize)>> =
BinaryHeap::with_capacity(iters.len());
for (i, (iter, _)) in iters.iter_mut().enumerate() {
if let Some(&(&price, _)) = iter.peek() {
heap.push(Reverse((price, i)));
}
}
let mut current_price: Option<CentsCompact> = None;
let mut early_exit = false;
while let Some(Reverse((price, ci))) = heap.pop() {
let (ref mut iter, is_sth) = iters[ci];
let (_, &sats) = iter.next().unwrap();
let amount = u64::from(sats);
let usd = Cents::from(price).as_u128() * amount as u128;
if let Some(prev) = current_price
&& prev != price
{
targets.for_each_mut(|t| t.finalize_price(prev.into(), collect_merged));
if !collect_merged && targets.all_match(|t| t.done()) {
early_exit = true;
break;
}
}
current_price = Some(price);
targets.all.accumulate(amount, usd);
targets.term_mut(is_sth).accumulate(amount, usd);
if let Some(&(&next_price, _)) = iter.peek() {
heap.push(Reverse((next_price, ci)));
}
}
if !early_exit
&& let Some(price) = current_price
{
targets.for_each_mut(|t| t.finalize_price(price.into(), collect_merged));
}
}
struct AllSthLth<T> {
@@ -230,6 +299,13 @@ impl PercTarget {
}
}
fn to_cached(&self) -> CachedPercentiles {
CachedPercentiles {
sat_result: self.sat_result,
usd_result: self.usd_result,
}
}
#[inline]
fn accumulate(&mut self, amount: u64, usd: u128) {
self.price_sats += amount;
@@ -275,48 +351,3 @@ impl PercTarget {
&& (self.total_usd == 0 || self.usd_idx >= PERCENTILES_LEN)
}
}
#[allow(clippy::too_many_arguments)]
fn push_target(
height: Height,
spot: Cents,
date_opt: Option<Date>,
states_path: &Path,
target: PercTarget,
ext: &mut CostBasisExtended,
name: &str,
) -> Result<()> {
ext.percentiles.truncate_push(height, &target.sat_result)?;
ext.invested_capital
.truncate_push(height, &target.usd_result)?;
let sat_rank = if target.total_sats > 0 {
compute_spot_percentile_rank(&target.sat_result, spot)
} else {
BasisPoints16::ZERO
};
ext.spot_cost_basis_percentile
.bps
.height
.truncate_push(height, sat_rank)?;
let usd_rank = if target.total_usd > 0 {
compute_spot_percentile_rank(&target.usd_result, spot)
} else {
BasisPoints16::ZERO
};
ext.spot_invested_capital_percentile
.bps
.height
.truncate_push(height, usd_rank)?;
if let Some(date) = date_opt {
let dir = states_path.join(format!("utxo_{name}_cost_basis/by_date"));
fs::create_dir_all(&dir)?;
fs::write(
dir.join(date.to_string()),
CostBasisDistribution::serialize_iter(target.merged.into_iter())?,
)?;
}
Ok(())
}

View File

@@ -11,7 +11,7 @@ impl UTXOCohorts<Rw> {
/// New UTXOs are added to:
/// - The "up_to_1h" age cohort (all new UTXOs start at 0 hours old)
/// - The appropriate epoch cohort based on block height
/// - The appropriate year cohort based on block timestamp
/// - The appropriate class cohort based on block timestamp
/// - The appropriate output type cohort (P2PKH, P2SH, etc.)
/// - The appropriate amount range cohort based on value
pub(crate) fn receive(
@@ -26,7 +26,7 @@ impl UTXOCohorts<Rw> {
// Pre-compute snapshot once for the 3 cohorts sharing the same supply_state
let snapshot = CostBasisSnapshot::from_utxo(price, &supply_state);
// New UTXOs go into up_to_1h, current epoch, and current year
// New UTXOs go into up_to_1h, current epoch, and current class
self.age_range
.up_to_1h
.state
@@ -39,7 +39,7 @@ impl UTXOCohorts<Rw> {
.as_mut()
.unwrap()
.receive_utxo_snapshot(&supply_state, &snapshot);
self.year
self.class
.mut_vec_from_timestamp(timestamp)
.state
.as_mut()

View File

@@ -70,7 +70,7 @@ impl UTXOCohorts<Rw> {
.as_mut()
.unwrap()
.send_utxo_precomputed(&sent.spendable_supply, &pre);
self.year
self.class
.mut_vec_from_timestamp(block_state.timestamp)
.state
.as_mut()
@@ -86,7 +86,7 @@ impl UTXOCohorts<Rw> {
.as_mut()
.unwrap()
.supply -= &sent.spendable_supply;
self.year
self.class
.mut_vec_from_timestamp(block_state.timestamp)
.state
.as_mut()

View File

@@ -1,6 +1,6 @@
use brk_cohort::AGE_BOUNDARIES;
use brk_types::{CostBasisSnapshot, ONE_HOUR_IN_SEC, Timestamp};
use vecdb::Rw;
use vecdb::{Rw, unlikely};
use crate::distribution::state::BlockState;
@@ -12,10 +12,9 @@ impl UTXOCohorts<Rw> {
/// UTXOs age with each block. When they cross hour boundaries,
/// they move between age-based cohorts (e.g., from "0-1h" to "1h-1d").
///
/// Complexity: O(k * log n) where:
/// - k = 20 boundaries to check
/// - n = total blocks in chain_state
/// - Linear scan for end_idx is faster than binary search since typically 0-2 blocks cross each boundary
/// Uses cached positions per boundary to avoid binary search.
/// Since timestamps are monotonic, positions only advance forward.
/// Complexity: O(k * c) where k = 20 boundaries, c = ~1 (forward scan steps).
pub(crate) fn tick_tock_next_block(
&mut self,
chain_state: &[BlockState],
@@ -38,6 +37,7 @@ impl UTXOCohorts<Rw> {
// Cohort 0 covers [0, 1) hours
// Cohort 20 covers [15*365*24, infinity) hours
let mut age_cohorts: Vec<_> = self.age_range.iter_mut().map(|v| &mut v.state).collect();
let cached = &mut self.tick_tock_cached_positions;
// For each boundary (in hours), find blocks that just crossed it
for (boundary_idx, &boundary_hours) in AGE_BOUNDARIES.iter().enumerate() {
@@ -54,8 +54,24 @@ impl UTXOCohorts<Rw> {
continue;
}
// Binary search to find start, then linear scan for end (typically 0-2 blocks)
let start_idx = chain_state.partition_point(|b| *b.timestamp <= lower_timestamp);
// Find start_idx: use cached position + forward scan (O(1) typical).
// On first call after restart, cached is 0 so fall back to binary search.
let start_idx = if unlikely(cached[boundary_idx] == 0 && chain_state.len() > 1) {
let idx = chain_state.partition_point(|b| *b.timestamp <= lower_timestamp);
cached[boundary_idx] = idx;
idx
} else {
let mut idx = cached[boundary_idx];
while idx < chain_state.len()
&& *chain_state[idx].timestamp <= lower_timestamp
{
idx += 1;
}
cached[boundary_idx] = idx;
idx
};
// Linear scan for end (typically 0-2 blocks past start)
let end_idx = chain_state[start_idx..]
.iter()
.position(|b| *b.timestamp > upper_timestamp)

View File

@@ -110,12 +110,14 @@ impl<Metrics: CohortMetricsBase + Traversable> DynCohortVecs for UTXOCohortVecs<
&mut self,
height: Height,
height_price: Cents,
is_day_boundary: bool,
) -> Result<()> {
if let Some(state) = self.state.as_mut() {
self.metrics.compute_then_truncate_push_unrealized_states(
height,
height_price,
state,
is_day_boundary,
)?;
}
Ok(())
@@ -129,16 +131,14 @@ impl<Metrics: CohortMetricsBase + Traversable> DynCohortVecs for UTXOCohortVecs<
exit: &Exit,
) -> Result<()> {
self.metrics
.compute_rest_part1(blocks, prices, starting_indexes, exit)
}
fn compute_net_sentiment_height(
&mut self,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.metrics
.compute_net_sentiment_height(starting_indexes, exit)
.compute_rest_part1(blocks, prices, starting_indexes, exit)?;
// Separate cohorts (with state) compute net_sentiment = greed - pain directly.
// Aggregate cohorts get it via weighted average in groups.rs.
if self.state.is_some() {
self.metrics
.compute_net_sentiment_height(starting_indexes, exit)?;
}
Ok(())
}
fn write_state(&mut self, height: Height, cleanup: bool) -> Result<()> {

View File

@@ -1,5 +1,3 @@
use std::thread;
use brk_cohort::ByAddressType;
use brk_error::Result;
use brk_indexer::Indexer;
@@ -95,6 +93,15 @@ pub(crate) fn process_blocks(
let height_to_timestamp_collected = &cached_timestamps[start_usize..end_usize];
let height_to_price_collected = &cached_prices[start_usize..end_usize];
// Pre-compute day boundaries to avoid per-block division in the hot loop
let is_last_of_day: Vec<bool> = (start_usize..end_usize)
.map(|h| {
h == end_usize - 1
|| *cached_timestamps[h] / ONE_DAY_IN_SEC
!= *cached_timestamps[h + 1] / ONE_DAY_IN_SEC
})
.collect();
debug!("creating VecsReaders");
let mut vr = VecsReaders::new(&vecs.any_address_indexes, &vecs.addresses_data);
debug!("VecsReaders created");
@@ -246,14 +253,11 @@ pub(crate) fn process_blocks(
p2wsh: TypeIndex::from(first_p2wsh_vec[offset].to_usize()),
};
// Reset per-block values for all separate cohorts
reset_block_values(&mut vecs.utxo_cohorts, &mut vecs.address_cohorts);
// Reset per-block activity counts
activity_counts.reset();
// Collect output/input data using reusable iterators (16KB buffered reads)
// Must be done before thread::scope since iterators aren't Send
// Must be done before rayon::join since iterators aren't Send
let txoutdata_vec = txout_iters.collect_block_outputs(first_txoutindex, output_count);
let (input_values, input_prev_heights, input_outputtypes, input_typeindexes) =
@@ -263,55 +267,54 @@ pub(crate) fn process_blocks(
(&[][..], &[][..], &[][..], &[][..])
};
// Process outputs and inputs in parallel with tick-tock
let (outputs_result, inputs_result) = thread::scope(|scope| -> Result<_> {
// Tick-tock age transitions in background
scope.spawn(|| {
// Process outputs, inputs, and tick-tock in parallel via rayon::join
let (_, oi_result) = rayon::join(
|| {
vecs.utxo_cohorts
.tick_tock_next_block(chain_state, timestamp);
});
let outputs_handle = scope.spawn(|| {
// Process outputs (receive)
process_outputs(
txoutindex_to_txindex,
txoutdata_vec,
&first_addressindexes,
&cache,
&vr,
&vecs.any_address_indexes,
&vecs.addresses_data,
)
});
// Process inputs (send) - skip coinbase input
let inputs_result = if input_count > 1 {
process_inputs(
input_count - 1,
&txinindex_to_txindex[1..], // Skip coinbase
input_values,
input_outputtypes,
input_typeindexes,
input_prev_heights,
&first_addressindexes,
&cache,
&vr,
&vecs.any_address_indexes,
&vecs.addresses_data,
)?
} else {
InputsResult {
height_to_sent: Default::default(),
sent_data: Default::default(),
address_data: Default::default(),
txindex_vecs: Default::default(),
}
};
let outputs_result = outputs_handle.join().unwrap()?;
Ok((outputs_result, inputs_result))
})?;
},
|| -> Result<_> {
let (outputs_result, inputs_result) = rayon::join(
|| {
process_outputs(
txoutindex_to_txindex,
txoutdata_vec,
&first_addressindexes,
&cache,
&vr,
&vecs.any_address_indexes,
&vecs.addresses_data,
)
},
|| -> Result<_> {
if input_count > 1 {
process_inputs(
input_count - 1,
&txinindex_to_txindex[1..],
input_values,
input_outputtypes,
input_typeindexes,
input_prev_heights,
&first_addressindexes,
&cache,
&vr,
&vecs.any_address_indexes,
&vecs.addresses_data,
)
} else {
Ok(InputsResult {
height_to_sent: Default::default(),
sent_data: Default::default(),
address_data: Default::default(),
txindex_vecs: Default::default(),
})
}
},
);
Ok((outputs_result?, inputs_result?))
},
);
let (outputs_result, inputs_result) = oi_result?;
// Merge new address data into current cache
cache.merge_funded(outputs_result.address_data);
@@ -363,11 +366,20 @@ pub(crate) fn process_blocks(
}
// Process UTXO cohorts and Address cohorts in parallel
// - Main thread: UTXO cohorts receive/send
// - Spawned thread: Address cohorts process_received/process_sent
thread::scope(|scope| {
// Spawn address cohort processing in background thread
scope.spawn(|| {
let (_, addr_result) = rayon::join(
|| {
// UTXO cohorts receive/send
vecs.utxo_cohorts
.receive(transacted, height, timestamp, block_price);
if let Some(min_h) =
vecs.utxo_cohorts
.send(height_to_sent, chain_state, ctx.price_range_max)
{
min_supply_modified =
Some(min_supply_modified.map_or(min_h, |cur| cur.min(min_h)));
}
},
|| -> Result<()> {
let mut lookup = cache.as_lookup();
// Process received outputs (addresses receiving funds)
@@ -382,7 +394,6 @@ pub(crate) fn process_blocks(
);
// Process sent inputs (addresses sending funds)
// Uses separate price/timestamp vecs to avoid borrowing chain_state
process_sent(
inputs_result.sent_data,
&mut vecs.address_cohorts,
@@ -399,19 +410,9 @@ pub(crate) fn process_blocks(
timestamp,
&mut seen_senders,
)
.unwrap();
});
// Main thread: Update UTXO cohorts
vecs.utxo_cohorts
.receive(transacted, height, timestamp, block_price);
if let Some(min_h) =
vecs.utxo_cohorts
.send(height_to_sent, chain_state, ctx.price_range_max)
{
min_supply_modified = Some(min_supply_modified.map_or(min_h, |cur| cur.min(min_h)));
}
});
},
);
addr_result?;
// Push to height-indexed vectors
vecs.addr_count
@@ -424,9 +425,7 @@ pub(crate) fn process_blocks(
vecs.address_activity
.truncate_push_height(height, &activity_counts)?;
let h = height.to_usize();
let is_last_of_day = height == last_height
|| *cached_timestamps[h] / ONE_DAY_IN_SEC != *cached_timestamps[h + 1] / ONE_DAY_IN_SEC;
let is_last_of_day = is_last_of_day[offset];
let date_opt = is_last_of_day.then(|| Date::from(timestamp));
push_cohort_states(
@@ -434,11 +433,11 @@ pub(crate) fn process_blocks(
&mut vecs.address_cohorts,
height,
block_price,
date_opt.is_some(),
)?;
vecs.utxo_cohorts.truncate_push_aggregate_percentiles(
height,
block_price,
date_opt,
&vecs.states_path,
)?;
@@ -494,36 +493,42 @@ pub(crate) fn process_blocks(
Ok(())
}
/// Reset per-block values for all separate cohorts.
fn reset_block_values(utxo_cohorts: &mut UTXOCohorts, address_cohorts: &mut AddressCohorts) {
utxo_cohorts
.iter_separate_mut()
.for_each(|v| v.reset_single_iteration_values());
address_cohorts
.iter_separate_mut()
.for_each(|v| v.reset_single_iteration_values());
}
/// Push cohort states to height-indexed vectors.
/// Push cohort states to height-indexed vectors, then reset per-block values.
fn push_cohort_states(
utxo_cohorts: &mut UTXOCohorts,
address_cohorts: &mut AddressCohorts,
height: Height,
height_price: Cents,
is_day_boundary: bool,
) -> Result<()> {
let (r1, r2) = rayon::join(
|| {
utxo_cohorts.par_iter_separate_mut().try_for_each(|v| {
v.truncate_push(height)?;
v.compute_then_truncate_push_unrealized_states(height, height_price)
})
utxo_cohorts
.par_iter_separate_mut()
.try_for_each(|v| -> Result<()> {
v.truncate_push(height)?;
v.compute_then_truncate_push_unrealized_states(
height,
height_price,
is_day_boundary,
)?;
v.reset_single_iteration_values();
Ok(())
})
},
|| {
address_cohorts.par_iter_separate_mut().try_for_each(|v| {
v.truncate_push(height)?;
v.compute_then_truncate_push_unrealized_states(height, height_price)
})
address_cohorts
.par_iter_separate_mut()
.try_for_each(|v| -> Result<()> {
v.truncate_push(height)?;
v.compute_then_truncate_push_unrealized_states(
height,
height_price,
is_day_boundary,
)?;
v.reset_single_iteration_values();
Ok(())
})
},
);
r1?;

View File

@@ -2,9 +2,7 @@ use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Height, Indexes, Sats, StoredF64, Version};
use rayon::prelude::*;
use vecdb::{
AnyStoredVec, AnyVec, EagerVec, Exit, ImportableVec, PcoVec, Rw, StorageMode, WritableVec,
};
use vecdb::{AnyStoredVec, AnyVec, Exit, Rw, StorageMode, WritableVec};
use crate::{
blocks,
@@ -22,16 +20,10 @@ pub struct ActivityMetrics<M: StorageMode = Rw> {
/// 14-day EMA of sent supply (sats, btc, usd)
pub sent_ema: RollingEmas2w<M>,
/// Satoshi-blocks destroyed (supply * blocks_old when spent)
pub satblocks_destroyed: M::Stored<EagerVec<PcoVec<Height, Sats>>>,
/// Satoshi-days destroyed (supply * days_old when spent)
pub satdays_destroyed: M::Stored<EagerVec<PcoVec<Height, Sats>>>,
/// Coin-blocks destroyed (in BTC rather than sats)
/// Coin-blocks destroyed (in BTC)
pub coinblocks_destroyed: ComputedFromHeightCumulativeSum<StoredF64, M>,
/// Coin-days destroyed (in BTC rather than sats)
/// Coin-days destroyed (in BTC)
pub coindays_destroyed: ComputedFromHeightCumulativeSum<StoredF64, M>,
}
@@ -42,20 +34,9 @@ impl ActivityMetrics {
sent: cfg.import_value_cumulative("sent", Version::ZERO)?,
sent_ema: cfg.import_emas_2w("sent", Version::ZERO)?,
satblocks_destroyed: EagerVec::forced_import(
cfg.db,
&cfg.name("satblocks_destroyed"),
cfg.version,
)?,
satdays_destroyed: EagerVec::forced_import(
cfg.db,
&cfg.name("satdays_destroyed"),
cfg.version,
)?,
coinblocks_destroyed: cfg
.import_cumulative_sum("coinblocks_destroyed", Version::ZERO)?,
coindays_destroyed: cfg.import_cumulative_sum("coindays_destroyed", Version::ZERO)?,
.import_cumulative_sum("coinblocks_destroyed", Version::ONE)?,
coindays_destroyed: cfg.import_cumulative_sum("coindays_destroyed", Version::ONE)?,
})
}
@@ -66,8 +47,8 @@ impl ActivityMetrics {
.sats
.height
.len()
.min(self.satblocks_destroyed.len())
.min(self.satdays_destroyed.len())
.min(self.coinblocks_destroyed.height.len())
.min(self.coindays_destroyed.height.len())
}
/// Push activity state values to height-indexed vectors.
@@ -79,10 +60,14 @@ impl ActivityMetrics {
satdays_destroyed: Sats,
) -> Result<()> {
self.sent.base.sats.height.truncate_push(height, sent)?;
self.satblocks_destroyed
.truncate_push(height, satblocks_destroyed)?;
self.satdays_destroyed
.truncate_push(height, satdays_destroyed)?;
self.coinblocks_destroyed.height.truncate_push(
height,
StoredF64::from(Bitcoin::from(satblocks_destroyed)),
)?;
self.coindays_destroyed.height.truncate_push(
height,
StoredF64::from(Bitcoin::from(satdays_destroyed)),
)?;
Ok(())
}
@@ -90,8 +75,8 @@ impl ActivityMetrics {
pub(crate) fn par_iter_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
vec![
&mut self.sent.base.sats.height as &mut dyn AnyStoredVec,
&mut self.satblocks_destroyed as &mut dyn AnyStoredVec,
&mut self.satdays_destroyed as &mut dyn AnyStoredVec,
&mut self.coinblocks_destroyed.height as &mut dyn AnyStoredVec,
&mut self.coindays_destroyed.height as &mut dyn AnyStoredVec,
]
.into_par_iter()
}
@@ -120,8 +105,8 @@ impl ActivityMetrics {
}
sum_others!(sent.base.sats.height);
sum_others!(satblocks_destroyed);
sum_others!(satdays_destroyed);
sum_others!(coinblocks_destroyed.height);
sum_others!(coindays_destroyed.height);
Ok(())
}
@@ -144,26 +129,10 @@ impl ActivityMetrics {
)?;
self.coinblocks_destroyed
.compute(starting_indexes.height, &window_starts, exit, |v| {
v.compute_transform(
starting_indexes.height,
&self.satblocks_destroyed,
|(i, v, ..)| (i, StoredF64::from(Bitcoin::from(v))),
exit,
)?;
Ok(())
})?;
.compute_rest(starting_indexes.height, &window_starts, exit)?;
self.coindays_destroyed
.compute(starting_indexes.height, &window_starts, exit, |v| {
v.compute_transform(
starting_indexes.height,
&self.satdays_destroyed,
|(i, v, ..)| (i, StoredF64::from(Bitcoin::from(v))),
exit,
)?;
Ok(())
})?;
.compute_rest(starting_indexes.height, &window_starts, exit)?;
Ok(())
}

View File

@@ -5,7 +5,7 @@ use brk_types::{Cents, Dollars, Height, Indexes, Sats, Version};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, Exit, ReadableVec, Rw, StorageMode};
use crate::{blocks, distribution::state::CohortState, prices};
use crate::{blocks, prices};
use crate::distribution::metrics::{
ActivityMetrics, CohortMetricsBase, CostBasisBase, ImportConfig, OutputsMetrics, RealizedBase,
@@ -73,18 +73,6 @@ impl CohortMetricsBase for AdjustedCohortMetrics {
self.activity.validate_computed_versions(base_version)?;
Ok(())
}
fn compute_then_truncate_push_unrealized_states(
&mut self,
height: Height,
height_price: Cents,
state: &mut CohortState,
) -> Result<()> {
state.apply_pending();
self.cost_basis.truncate_push_minmax(height, state)?;
let unrealized_state = state.compute_unrealized_state(height_price);
self.unrealized.truncate_push(height, &unrealized_state)?;
Ok(())
}
fn collect_all_vecs_mut(&mut self) -> Vec<&mut dyn AnyStoredVec> {
let mut vecs: Vec<&mut dyn AnyStoredVec> = Vec::new();
vecs.extend(self.supply.par_iter_mut().collect::<Vec<_>>());

View File

@@ -80,14 +80,12 @@ impl CohortMetricsBase for AllCohortMetrics {
height: Height,
height_price: Cents,
state: &mut CohortState,
is_day_boundary: bool,
) -> Result<()> {
state.apply_pending();
self.cost_basis.truncate_push_minmax(height, state)?;
let unrealized_state = state.compute_unrealized_state(height_price);
self.unrealized.truncate_push(height, &unrealized_state)?;
self.compute_and_push_unrealized_base(height, height_price, state)?;
self.cost_basis
.extended
.truncate_push_percentiles(height, state, height_price)?;
.truncate_push_percentiles(height, state, is_day_boundary)?;
Ok(())
}
fn collect_all_vecs_mut(&mut self) -> Vec<&mut dyn AnyStoredVec> {

View File

@@ -1,11 +1,11 @@
use brk_cohort::Filter;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Cents, Dollars, Height, Indexes, Sats, Version};
use brk_types::{Dollars, Height, Indexes, Sats, Version};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, Exit, ReadableVec, Rw, StorageMode};
use crate::{blocks, distribution::state::CohortState, prices};
use crate::{blocks, prices};
use crate::distribution::metrics::{
ActivityMetrics, CohortMetricsBase, CostBasisBase, ImportConfig, OutputsMetrics, RealizedBase,
@@ -72,18 +72,6 @@ impl CohortMetricsBase for BasicCohortMetrics {
self.activity.validate_computed_versions(base_version)?;
Ok(())
}
fn compute_then_truncate_push_unrealized_states(
&mut self,
height: Height,
height_price: Cents,
state: &mut CohortState,
) -> Result<()> {
state.apply_pending();
self.cost_basis.truncate_push_minmax(height, state)?;
let unrealized_state = state.compute_unrealized_state(height_price);
self.unrealized.truncate_push(height, &unrealized_state)?;
Ok(())
}
fn collect_all_vecs_mut(&mut self) -> Vec<&mut dyn AnyStoredVec> {
let mut vecs: Vec<&mut dyn AnyStoredVec> = Vec::new();
vecs.extend(self.supply.par_iter_mut().collect::<Vec<_>>());

View File

@@ -80,14 +80,12 @@ impl CohortMetricsBase for ExtendedCohortMetrics {
height: Height,
height_price: Cents,
state: &mut CohortState,
is_day_boundary: bool,
) -> Result<()> {
state.apply_pending();
self.cost_basis.truncate_push_minmax(height, state)?;
let unrealized_state = state.compute_unrealized_state(height_price);
self.unrealized.truncate_push(height, &unrealized_state)?;
self.compute_and_push_unrealized_base(height, height_price, state)?;
self.cost_basis
.extended
.truncate_push_percentiles(height, state, height_price)?;
.truncate_push_percentiles(height, state, is_day_boundary)?;
Ok(())
}
fn collect_all_vecs_mut(&mut self) -> Vec<&mut dyn AnyStoredVec> {

View File

@@ -79,14 +79,12 @@ impl CohortMetricsBase for ExtendedAdjustedCohortMetrics {
height: Height,
height_price: Cents,
state: &mut CohortState,
is_day_boundary: bool,
) -> Result<()> {
state.apply_pending();
self.cost_basis.truncate_push_minmax(height, state)?;
let unrealized_state = state.compute_unrealized_state(height_price);
self.unrealized.truncate_push(height, &unrealized_state)?;
self.compute_and_push_unrealized_base(height, height_price, state)?;
self.cost_basis
.extended
.truncate_push_percentiles(height, state, height_price)?;
.truncate_push_percentiles(height, state, is_day_boundary)?;
Ok(())
}
fn collect_all_vecs_mut(&mut self) -> Vec<&mut dyn AnyStoredVec> {

View File

@@ -1,11 +1,11 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{BasisPoints16, Cents, Height, Version};
use vecdb::{AnyStoredVec, Rw, StorageMode, WritableVec};
use brk_types::{Cents, Height, Version};
use vecdb::{AnyStoredVec, Rw, StorageMode};
use crate::{
distribution::state::CohortState,
internal::{PERCENTILES_LEN, PercentFromHeight, PercentilesVecs, compute_spot_percentile_rank},
internal::{PERCENTILES_LEN, PercentilesVecs},
};
use crate::distribution::metrics::ImportConfig;
@@ -18,12 +18,6 @@ pub struct CostBasisExtended<M: StorageMode = Rw> {
/// Invested capital percentiles (USD-weighted)
pub invested_capital: PercentilesVecs<M>,
/// What percentile of cost basis is below spot (sat-weighted)
pub spot_cost_basis_percentile: PercentFromHeight<BasisPoints16, M>,
/// What percentile of invested capital is below spot (USD-weighted)
pub spot_invested_capital_percentile: PercentFromHeight<BasisPoints16, M>,
}
impl CostBasisExtended {
@@ -41,10 +35,6 @@ impl CostBasisExtended {
cfg.version,
cfg.indexes,
)?,
spot_cost_basis_percentile: cfg
.import_percent_bp16("spot_cost_basis_percentile", Version::ZERO)?,
spot_invested_capital_percentile: cfg
.import_percent_bp16("spot_invested_capital_percentile", Version::ZERO)?,
})
}
@@ -52,34 +42,36 @@ impl CostBasisExtended {
&mut self,
height: Height,
state: &mut CohortState,
spot: Cents,
is_day_boundary: bool,
) -> Result<()> {
let computed = state.compute_percentiles();
let computed = if is_day_boundary {
state.compute_percentiles()
} else {
state.cached_percentiles()
};
let sat_prices = computed
.as_ref()
.map(|p| p.sat_weighted)
.unwrap_or([Cents::ZERO; PERCENTILES_LEN]);
self.percentiles.truncate_push(height, &sat_prices)?;
let rank = compute_spot_percentile_rank(&sat_prices, spot);
self.spot_cost_basis_percentile
.bps
.height
.truncate_push(height, rank)?;
let usd_prices = computed
.as_ref()
.map(|p| p.usd_weighted)
.unwrap_or([Cents::ZERO; PERCENTILES_LEN]);
self.invested_capital.truncate_push(height, &usd_prices)?;
let rank = compute_spot_percentile_rank(&usd_prices, spot);
self.spot_invested_capital_percentile
.bps
.height
.truncate_push(height, rank)?;
self.push_arrays(height, &sat_prices, &usd_prices)
}
/// Push pre-computed percentile arrays.
/// Shared by both individual cohort and aggregate (K-way merge) paths.
pub(crate) fn push_arrays(
&mut self,
height: Height,
sat_prices: &[Cents; PERCENTILES_LEN],
usd_prices: &[Cents; PERCENTILES_LEN],
) -> Result<()> {
self.percentiles.truncate_push(height, sat_prices)?;
self.invested_capital.truncate_push(height, usd_prices)?;
Ok(())
}
@@ -97,8 +89,6 @@ impl CostBasisExtended {
.iter_mut()
.map(|v| &mut v.cents.height as &mut dyn AnyStoredVec),
);
vecs.push(&mut self.spot_cost_basis_percentile.bps.height);
vecs.push(&mut self.spot_invested_capital_percentile.bps.height);
vecs
}
@@ -107,14 +97,6 @@ impl CostBasisExtended {
.validate_computed_version_or_reset(base_version)?;
self.invested_capital
.validate_computed_version_or_reset(base_version)?;
self.spot_cost_basis_percentile
.bps
.height
.validate_computed_version_or_reset(base_version)?;
self.spot_invested_capital_percentile
.bps
.height
.validate_computed_version_or_reset(base_version)?;
Ok(())
}
}

View File

@@ -42,12 +42,32 @@ pub trait CohortMetricsBase: Send + Sync {
fn validate_computed_versions(&mut self, base_version: Version) -> Result<()>;
/// Apply pending, push min/max cost basis, compute and push unrealized state.
fn compute_and_push_unrealized_base(
&mut self,
height: Height,
height_price: Cents,
state: &mut CohortState,
) -> Result<()> {
state.apply_pending();
self.cost_basis_base_mut()
.truncate_push_minmax(height, state)?;
let unrealized_state = state.compute_unrealized_state(height_price);
self.unrealized_base_mut()
.truncate_push(height, &unrealized_state)?;
Ok(())
}
/// Compute and push unrealized states. Extended types override to also push percentiles.
fn compute_then_truncate_push_unrealized_states(
&mut self,
height: Height,
height_price: Cents,
state: &mut CohortState,
) -> Result<()>;
_is_day_boundary: bool,
) -> Result<()> {
self.compute_and_push_unrealized_base(height, height_price, state)
}
fn collect_all_vecs_mut(&mut self) -> Vec<&mut dyn AnyStoredVec>;

View File

@@ -1,7 +1,7 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{
BasisPoints16, BasisPoints32, BasisPointsSigned32, Bitcoin, Cents, CentsSats, CentsSigned,
BasisPoints32, BasisPointsSigned32, Bitcoin, Cents, CentsSats, CentsSigned,
CentsSquaredSats, Dollars, Height, Indexes, Sats, StoredF32, StoredF64, Version,
};
use vecdb::{
@@ -16,7 +16,7 @@ use crate::{
CentsPlus, CentsUnsignedToDollars, ComputedFromHeight, ComputedFromHeightCumulative,
ComputedFromHeightRatio, FiatFromHeight, Identity, LazyFromHeight,
NegCentsUnsignedToDollars, PercentFromHeight, PercentRollingEmas1w1m,
PercentRollingWindows, Price, RatioCents64, RatioCentsBp16, RatioCentsBp32,
PercentRollingWindows, Price, RatioCents64, RatioCentsBp32,
RatioCentsSignedCentsBps32, RatioCentsSignedDollarsBps32, RollingEmas1w1m, RollingEmas2w,
RollingWindows, ValueFromHeightCumulative,
},
@@ -53,8 +53,8 @@ pub struct RealizedBase<M: StorageMode = Rw> {
pub net_realized_pnl_ema_1w: ComputedFromHeight<CentsSigned, M>,
pub gross_pnl: FiatFromHeight<Cents, M>,
pub realized_profit_rel_to_realized_cap: PercentFromHeight<BasisPoints16, M>,
pub realized_loss_rel_to_realized_cap: PercentFromHeight<BasisPoints16, M>,
pub realized_profit_rel_to_realized_cap: PercentFromHeight<BasisPoints32, M>,
pub realized_loss_rel_to_realized_cap: PercentFromHeight<BasisPoints32, M>,
pub net_realized_pnl_rel_to_realized_cap: PercentFromHeight<BasisPointsSigned32, M>,
pub profit_value_created: ComputedFromHeight<Cents, M>,
@@ -122,9 +122,9 @@ impl RealizedBase {
let gross_pnl = cfg.import_fiat("realized_gross_pnl", v0)?;
let realized_profit_rel_to_realized_cap =
cfg.import_percent_bp16("realized_profit_rel_to_realized_cap", v1)?;
cfg.import_percent_bp32("realized_profit_rel_to_realized_cap", Version::new(2))?;
let realized_loss_rel_to_realized_cap =
cfg.import_percent_bp16("realized_loss_rel_to_realized_cap", v1)?;
cfg.import_percent_bp32("realized_loss_rel_to_realized_cap", Version::new(2))?;
let net_realized_pnl_rel_to_realized_cap =
cfg.import_percent_bps32("net_realized_pnl_rel_to_realized_cap", Version::new(2))?;
@@ -649,14 +649,14 @@ impl RealizedBase {
// Realized profit/loss/net relative to realized cap
self.realized_profit_rel_to_realized_cap
.compute_binary::<Cents, Cents, RatioCentsBp16>(
.compute_binary::<Cents, Cents, RatioCentsBp32>(
starting_indexes.height,
&self.realized_profit.height,
&self.realized_cap_cents.height,
exit,
)?;
self.realized_loss_rel_to_realized_cap
.compute_binary::<Cents, Cents, RatioCentsBp16>(
.compute_binary::<Cents, Cents, RatioCentsBp32>(
starting_indexes.height,
&self.realized_loss.height,
&self.realized_cap_cents.height,

View File

@@ -185,7 +185,11 @@ impl CohortState {
let sats = supply.value;
let current_ps = CentsSats::from_price_sats(current_price, sats);
let prev_ps = CentsSats::from_price_sats(prev_price, sats);
let ath_ps = CentsSats::from_price_sats(ath, sats);
let ath_ps = if ath == current_price {
current_ps
} else {
CentsSats::from_price_sats(ath, sats)
};
let prev_investor_cap = prev_ps.to_investor_cap(prev_price);
Some(SendPrecomputed {
sats,
@@ -287,6 +291,10 @@ impl CohortState {
self.cost_basis_data.compute_percentiles()
}
pub(crate) fn cached_percentiles(&self) -> Option<Percentiles> {
self.cost_basis_data.cached_percentiles()
}
pub(crate) fn compute_unrealized_state(&mut self, height_price: Cents) -> UnrealizedState {
self.cost_basis_data.compute_unrealized_state(height_price)
}

View File

@@ -83,7 +83,7 @@ impl CostBasisData {
}
fn assert_pending_empty(&self) {
assert!(
debug_assert!(
self.pending.is_empty() && self.pending_raw_is_zero(),
"CostBasisData: pending not empty, call apply_pending first"
);
@@ -180,7 +180,7 @@ impl CostBasisData {
}
pub(crate) fn apply_pending(&mut self) {
if self.pending.is_empty() && self.pending_raw_is_zero() {
if self.pending.is_empty() {
return;
}
self.generation = self.generation.wrapping_add(1);
@@ -277,6 +277,10 @@ impl CostBasisData {
self.cached_percentiles = None;
}
pub(crate) fn cached_percentiles(&self) -> Option<Percentiles> {
self.cached_percentiles
}
pub(crate) fn compute_percentiles(&mut self) -> Option<Percentiles> {
self.assert_pending_empty();
if !self.percentiles_dirty {

View File

@@ -35,6 +35,9 @@ impl RealizedState {
/// Get realized cap as CentsUnsigned (divides by ONE_BTC).
#[inline]
pub(crate) fn cap(&self) -> Cents {
if self.cap_raw == 0 {
return Cents::ZERO;
}
Cents::new((self.cap_raw / Sats::ONE_BTC_U128) as u64)
}
@@ -76,18 +79,27 @@ impl RealizedState {
/// Get realized profit as CentsUnsigned.
#[inline]
pub(crate) fn profit(&self) -> Cents {
if self.profit_raw == 0 {
return Cents::ZERO;
}
Cents::new((self.profit_raw / Sats::ONE_BTC_U128) as u64)
}
/// Get realized loss as CentsUnsigned.
#[inline]
pub(crate) fn loss(&self) -> Cents {
if self.loss_raw == 0 {
return Cents::ZERO;
}
Cents::new((self.loss_raw / Sats::ONE_BTC_U128) as u64)
}
/// Get profit value created as CentsUnsigned (sell_price × sats for profit cases).
#[inline]
pub(crate) fn profit_value_created(&self) -> Cents {
if self.profit_value_created_raw == 0 {
return Cents::ZERO;
}
Cents::new((self.profit_value_created_raw / Sats::ONE_BTC_U128) as u64)
}
@@ -95,12 +107,18 @@ impl RealizedState {
/// This is also known as profit_flow.
#[inline]
pub(crate) fn profit_value_destroyed(&self) -> Cents {
if self.profit_value_destroyed_raw == 0 {
return Cents::ZERO;
}
Cents::new((self.profit_value_destroyed_raw / Sats::ONE_BTC_U128) as u64)
}
/// Get loss value created as CentsUnsigned (sell_price × sats for loss cases).
#[inline]
pub(crate) fn loss_value_created(&self) -> Cents {
if self.loss_value_created_raw == 0 {
return Cents::ZERO;
}
Cents::new((self.loss_value_created_raw / Sats::ONE_BTC_U128) as u64)
}
@@ -108,6 +126,9 @@ impl RealizedState {
/// This is also known as capitulation_flow.
#[inline]
pub(crate) fn loss_value_destroyed(&self) -> Cents {
if self.loss_value_destroyed_raw == 0 {
return Cents::ZERO;
}
Cents::new((self.loss_value_destroyed_raw / Sats::ONE_BTC_U128) as u64)
}
@@ -116,6 +137,9 @@ impl RealizedState {
/// by selling at peak instead of when actually sold.
#[inline]
pub(crate) fn peak_regret(&self) -> Cents {
if self.peak_regret_raw == 0 {
return Cents::ZERO;
}
Cents::new((self.peak_regret_raw / Sats::ONE_BTC_U128) as u64)
}

View File

@@ -61,17 +61,22 @@ struct CachedStateRaw {
impl CachedStateRaw {
/// Convert raw values to final output by dividing by ONE_BTC.
fn to_output(&self) -> UnrealizedState {
#[inline(always)]
fn div_btc(raw: u128) -> Cents {
if raw == 0 {
Cents::ZERO
} else {
Cents::new((raw / Sats::ONE_BTC_U128) as u64)
}
}
UnrealizedState {
supply_in_profit: self.supply_in_profit,
supply_in_loss: self.supply_in_loss,
unrealized_profit: Cents::new((self.unrealized_profit / Sats::ONE_BTC_U128) as u64),
unrealized_loss: Cents::new((self.unrealized_loss / Sats::ONE_BTC_U128) as u64),
invested_capital_in_profit: Cents::new(
(self.invested_capital_in_profit / Sats::ONE_BTC_U128) as u64,
),
invested_capital_in_loss: Cents::new(
(self.invested_capital_in_loss / Sats::ONE_BTC_U128) as u64,
),
unrealized_profit: div_btc(self.unrealized_profit),
unrealized_loss: div_btc(self.unrealized_loss),
invested_capital_in_profit: div_btc(self.invested_capital_in_profit),
invested_capital_in_loss: div_btc(self.invested_capital_in_loss),
investor_cap_in_profit_raw: self.investor_cap_in_profit,
investor_cap_in_loss_raw: self.investor_cap_in_loss,
invested_capital_in_profit_raw: self.invested_capital_in_profit,

View File

@@ -2,6 +2,7 @@ use std::ops::{Add, AddAssign};
use brk_cohort::{ByAmountRange, GroupedByType};
use brk_types::{OutputType, Sats, SupplyState};
use vecdb::unlikely;
#[derive(Default, Debug)]
pub struct Transacted {
@@ -20,7 +21,7 @@ impl Transacted {
*self.by_type.get_mut(_type) += &supply;
if _type.is_unspendable() {
if unlikely(_type.is_unspendable()) {
return;
}

View File

@@ -2,41 +2,36 @@ use brk_types::StoredF32;
/// Fast expanding percentile tracker using a Fenwick tree (Binary Indexed Tree).
///
/// Values are discretized to BasisPoints32 precision (×10000) and tracked in
/// Values are discretized to 10 BPS (0.1%) resolution and tracked in
/// a fixed-size frequency array with Fenwick prefix sums. This gives:
/// - O(log N) insert (N = tree size, ~18 ops for 200k buckets)
/// - O(log N) insert (N = tree size, ~16 ops for 43k buckets)
/// - O(log N) percentile query via prefix-sum walk
/// - Exact at BasisPoints32 resolution (no approximation)
/// - 0.1% value resolution (10 BPS granularity)
#[derive(Clone)]
pub(crate) struct ExpandingPercentiles {
/// Fenwick tree storing cumulative frequency counts.
/// Index 0 is unused (1-indexed). tree[i] covers bucket (i - 1 + offset).
tree: Vec<u64>,
count: u64,
/// Offset so bucket 0 in the tree corresponds to BPS value `offset`.
offset: i32,
size: usize,
/// 1-indexed: tree[0] is unused, tree[1..=TREE_SIZE] hold data.
tree: Vec<u32>,
count: u32,
}
/// Max BPS value supported. Ratio of 42.0 = 420,000 BPS.
/// Bucket granularity in BPS. 10 BPS = 0.1% = 0.001 ratio.
const BUCKET_BPS: i32 = 10;
/// Max ratio supported: 43.0 = 430,000 BPS.
const MAX_BPS: i32 = 430_000;
/// Min BPS value supported (0 = ratio of 0.0).
const MIN_BPS: i32 = 0;
const TREE_SIZE: usize = (MAX_BPS - MIN_BPS) as usize + 1;
const TREE_SIZE: usize = (MAX_BPS / BUCKET_BPS) as usize + 1;
impl Default for ExpandingPercentiles {
fn default() -> Self {
Self {
tree: vec![0u64; TREE_SIZE + 1], // 1-indexed
tree: vec![0u32; TREE_SIZE + 1], // 1-indexed
count: 0,
offset: MIN_BPS,
size: TREE_SIZE,
}
}
}
impl ExpandingPercentiles {
pub fn count(&self) -> u64 {
pub fn count(&self) -> u32 {
self.count
}
@@ -47,29 +42,27 @@ impl ExpandingPercentiles {
/// Convert f32 ratio to bucket index (1-indexed for Fenwick).
#[inline]
fn to_bucket(&self, value: f32) -> usize {
fn to_bucket(value: f32) -> usize {
let bps = (value as f64 * 10000.0).round() as i32;
let clamped = bps.clamp(self.offset, self.offset + self.size as i32 - 1);
(clamped - self.offset) as usize + 1 // 1-indexed
let bucket = (bps / BUCKET_BPS).clamp(0, TREE_SIZE as i32 - 1);
bucket as usize + 1
}
/// Bulk-load values in O(n + N) instead of O(n log N).
/// Builds raw frequency counts, then converts to Fenwick in-place.
pub fn add_bulk(&mut self, values: &[StoredF32]) {
// Build raw frequency counts into tree (treated as flat array)
for &v in values {
let v = *v;
if v.is_nan() {
continue;
}
self.count += 1;
let bucket = self.to_bucket(v);
self.tree[bucket] += 1;
self.tree[Self::to_bucket(v)] += 1;
}
// Convert flat frequencies to Fenwick tree in O(N)
for i in 1..=self.size {
for i in 1..=TREE_SIZE {
let parent = i + (i & i.wrapping_neg());
if parent <= self.size {
if parent <= TREE_SIZE {
let val = self.tree[i];
self.tree[parent] += val;
}
@@ -83,47 +76,40 @@ impl ExpandingPercentiles {
return;
}
self.count += 1;
let mut i = self.to_bucket(value);
while i <= self.size {
let mut i = Self::to_bucket(value);
while i <= TREE_SIZE {
self.tree[i] += 1;
i += i & i.wrapping_neg(); // i += lowbit(i)
i += i & i.wrapping_neg();
}
}
/// Find the bucket containing the k-th element (1-indexed k).
/// Uses the standard Fenwick tree walk-down in O(log N).
#[inline]
fn kth(&self, mut k: u64) -> usize {
fn kth(&self, mut k: u32) -> usize {
let mut pos = 0;
let mut bit = 1 << (usize::BITS - 1 - self.size.leading_zeros()); // highest power of 2 <= size
let mut bit = 1 << (usize::BITS - 1 - TREE_SIZE.leading_zeros());
while bit > 0 {
let next = pos + bit;
if next <= self.size && self.tree[next] < k {
if next <= TREE_SIZE && self.tree[next] < k {
k -= self.tree[next];
pos = next;
}
bit >>= 1;
}
pos + 1 // 1-indexed bucket
}
/// Convert bucket index back to BPS u32 value.
#[inline]
fn bucket_to_bps(&self, bucket: usize) -> u32 {
(bucket as i32 - 1 + self.offset) as u32
pos + 1
}
/// Compute 6 percentiles in one call. O(6 × log N).
/// Quantiles q must be in (0, 1).
/// Quantiles q must be in (0, 1). Output is in BPS.
pub fn quantiles(&self, qs: &[f64; 6], out: &mut [u32; 6]) {
if self.count == 0 {
out.iter_mut().for_each(|o| *o = 0);
return;
}
for (i, &q) in qs.iter().enumerate() {
// k = ceil(q * count), clamped to [1, count]
let k = ((q * self.count as f64).ceil() as u64).clamp(1, self.count);
out[i] = self.bucket_to_bps(self.kth(k));
let k = ((q * self.count as f64).ceil() as u32).clamp(1, self.count);
out[i] = (self.kth(k) as u32 - 1) * BUCKET_BPS as u32;
}
}
}
@@ -141,30 +127,19 @@ mod tests {
#[test]
fn basic_quantiles() {
let mut ep = ExpandingPercentiles::default();
// Add ratios 0.01 to 1.0 (BPS 100 to 10000)
for i in 1..=1000 {
ep.add(i as f32 / 1000.0);
}
assert_eq!(ep.count(), 1000);
let median = quantile(&ep, 0.5);
// 0.5 ratio = 5000 BPS, median of 1..1000 ratios ≈ 500/1000 = 0.5 = 5000 BPS
assert!(
(median as i32 - 5000).abs() < 100,
"median was {median}"
);
assert!((median as i32 - 5000).abs() < 100, "median was {median}");
let p99 = quantile(&ep, 0.99);
assert!(
(p99 as i32 - 9900).abs() < 100,
"p99 was {p99}"
);
assert!((p99 as i32 - 9900).abs() < 100, "p99 was {p99}");
let p01 = quantile(&ep, 0.01);
assert!(
(p01 as i32 - 100).abs() < 100,
"p01 was {p01}"
);
assert!((p01 as i32 - 100).abs() < 100, "p01 was {p01}");
}
#[test]
@@ -177,10 +152,9 @@ mod tests {
#[test]
fn single_value() {
let mut ep = ExpandingPercentiles::default();
ep.add(0.42); // 4200 BPS
assert_eq!(quantile(&ep, 0.0001), 4200);
assert_eq!(quantile(&ep, 0.5), 4200);
assert_eq!(quantile(&ep, 0.9999), 4200);
ep.add(0.42);
let v = quantile(&ep, 0.5);
assert!((v as i32 - 4200).abs() <= BUCKET_BPS, "got {v}");
}
#[test]

View File

@@ -61,6 +61,19 @@ where
T: Default + SubAssign,
{
compute_height(&mut self.height)?;
self.compute_rest(max_from, windows, exit)
}
/// Compute cumulative + rolling sum from already-populated height data.
pub(crate) fn compute_rest(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
exit: &Exit,
) -> Result<()>
where
T: Default + SubAssign,
{
self.cumulative
.height
.compute_cumulative(max_from, &self.height, exit)?;

View File

@@ -1,6 +1,6 @@
use brk_error::Result;
use brk_traversable::{Traversable, TreeNode};
use brk_types::{BasisPoints16, Cents, Height, Version};
use brk_types::{Cents, Height, Version};
use vecdb::{AnyExportableVec, Database, ReadOnlyClone, Ro, Rw, StorageMode, WritableVec};
use crate::indexes;
@@ -11,62 +11,6 @@ pub const PERCENTILES: [u8; 19] = [
];
pub const PERCENTILES_LEN: usize = PERCENTILES.len();
/// Compute spot percentile rank by interpolating within percentile bands.
/// Returns a value between 0 and 100 indicating where spot sits in the distribution.
pub(crate) fn compute_spot_percentile_rank(
percentile_prices: &[Cents; PERCENTILES_LEN],
spot: Cents,
) -> BasisPoints16 {
if spot == Cents::ZERO && percentile_prices[0] == Cents::ZERO {
return BasisPoints16::ZERO;
}
let spot_f64 = f64::from(spot);
// Below lowest percentile (p5) - extrapolate towards 0
let p5 = f64::from(percentile_prices[0]);
if spot_f64 <= p5 {
if p5 == 0.0 {
return BasisPoints16::ZERO;
}
// Linear extrapolation: rank = 5% * (spot / p5)
return BasisPoints16::from((0.05 * spot_f64 / p5).max(0.0));
}
// Above highest percentile (p95) - extrapolate towards 100
let p95 = f64::from(percentile_prices[PERCENTILES_LEN - 1]);
let p90 = f64::from(percentile_prices[PERCENTILES_LEN - 2]);
if spot_f64 >= p95 {
if p95 == p90 {
return BasisPoints16::ONE;
}
// Linear extrapolation using p90-p95 slope
let slope = 0.05 / (p95 - p90);
return BasisPoints16::from((0.95 + (spot_f64 - p95) * slope).min(1.0));
}
// Find the band containing spot and interpolate
for i in 0..PERCENTILES_LEN - 1 {
let lower = f64::from(percentile_prices[i]);
let upper = f64::from(percentile_prices[i + 1]);
if spot_f64 >= lower && spot_f64 <= upper {
let lower_pct = f64::from(PERCENTILES[i]) / 100.0;
let upper_pct = f64::from(PERCENTILES[i + 1]) / 100.0;
if upper == lower {
return BasisPoints16::from(lower_pct);
}
// Linear interpolation
let ratio = (spot_f64 - lower) / (upper - lower);
return BasisPoints16::from(lower_pct + ratio * (upper_pct - lower_pct));
}
}
BasisPoints16::ZERO
}
pub struct PercentilesVecs<M: StorageMode = Rw> {
pub vecs: [Price<ComputedFromHeight<Cents, M>>; PERCENTILES_LEN],
}

View File

@@ -5,20 +5,20 @@ use derive_more::{Deref, DerefMut};
use vecdb::{Database, EagerVec, Exit, PcoVec, Rw, StorageMode};
use crate::internal::{ComputedFromHeight, Price};
use crate::{blocks, indexes, prices};
use crate::{indexes, prices};
use super::ComputedFromHeightRatioExtended;
use super::ComputedFromHeightRatio;
#[derive(Deref, DerefMut, Traversable)]
pub struct ComputedFromHeightPriceWithRatioExtended<M: StorageMode = Rw> {
pub struct ComputedFromHeightPriceWithRatio<M: StorageMode = Rw> {
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub inner: ComputedFromHeightRatioExtended<M>,
pub inner: ComputedFromHeightRatio<M>,
pub price: Price<ComputedFromHeight<Cents, M>>,
}
impl ComputedFromHeightPriceWithRatioExtended {
impl ComputedFromHeightPriceWithRatio {
pub(crate) fn forced_import(
db: &Database,
name: &str,
@@ -27,15 +27,14 @@ impl ComputedFromHeightPriceWithRatioExtended {
) -> Result<Self> {
let v = version + Version::TWO;
Ok(Self {
inner: ComputedFromHeightRatioExtended::forced_import(db, name, version, indexes)?,
inner: ComputedFromHeightRatio::forced_import(db, name, version, indexes)?,
price: Price::forced_import(db, name, v, indexes)?,
})
}
/// Compute price via closure (in cents), then compute ratio + extended metrics.
/// Compute price via closure (in cents), then compute ratio.
pub(crate) fn compute_all<F>(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
@@ -45,13 +44,9 @@ impl ComputedFromHeightPriceWithRatioExtended {
F: FnMut(&mut EagerVec<PcoVec<Height, Cents>>) -> Result<()>,
{
compute_price(&mut self.price.cents.height)?;
self.inner.compute_rest(
blocks,
prices,
starting_indexes,
exit,
&self.price.cents.height,
)?;
let close_price = &prices.price.cents.height;
self.inner
.compute_ratio(starting_indexes, close_price, &self.price.cents.height, exit)?;
Ok(())
}
}

View File

@@ -23,7 +23,7 @@ pub use derived::{
RatioCents64, TimesSqrt,
};
pub use ratio::{
NegRatioDollarsBps32, RatioCentsBp16, RatioCentsBp32, RatioCentsSignedCentsBps32,
NegRatioDollarsBps32, RatioCentsBp32, RatioCentsSignedCentsBps32,
RatioCentsSignedDollarsBps32, RatioDiffCentsBps32, RatioDiffDollarsBps32, RatioDiffF32Bps32,
RatioDollarsBp16, RatioDollarsBp32, RatioDollarsBps32, RatioSatsBp16, RatioU32Bp16,
RatioU64Bp16,

View File

@@ -30,19 +30,6 @@ impl BinaryTransform<Sats, Sats, BasisPoints16> for RatioSatsBp16 {
}
}
pub struct RatioCentsBp16;
impl BinaryTransform<Cents, Cents, BasisPoints16> for RatioCentsBp16 {
#[inline(always)]
fn apply(numerator: Cents, denominator: Cents) -> BasisPoints16 {
if denominator == Cents::ZERO {
BasisPoints16::ZERO
} else {
BasisPoints16::from(numerator.inner() as f64 / denominator.inner() as f64)
}
}
}
pub struct RatioCentsBp32;
impl BinaryTransform<Cents, Cents, BasisPoints32> for RatioCentsBp32 {
@@ -143,7 +130,12 @@ pub struct RatioDollarsBp32;
impl BinaryTransform<Dollars, Dollars, BasisPoints32> for RatioDollarsBp32 {
#[inline(always)]
fn apply(numerator: Dollars, denominator: Dollars) -> BasisPoints32 {
BasisPoints32::from(f64::from(numerator) / f64::from(denominator))
let ratio = f64::from(numerator) / f64::from(denominator);
if ratio.is_finite() {
BasisPoints32::from(ratio)
} else {
BasisPoints32::ZERO
}
}
}

View File

@@ -29,10 +29,6 @@ impl Vecs {
self.returns
.compute(prices, blocks, &self.lookback, starting_indexes, exit)?;
// Volatility (depends on returns)
self.volatility
.compute(&self.returns, starting_indexes.height, exit)?;
// Range metrics (independent)
self.range.compute(prices, blocks, starting_indexes, exit)?;

View File

@@ -66,16 +66,17 @@ impl Vecs {
self.period_cost_basis.zip_mut_with_days(&self.period_stack)
{
let days = days as usize;
let start = average_price.cents.height.len();
let stack_data = stack
.sats
.height
.collect_range_at(sh, stack.sats.height.len());
.collect_range_at(start, stack.sats.height.len());
average_price.cents.height.compute_transform(
starting_indexes.height,
h2d,
|(h, di, _)| {
let di_usize = di.to_usize();
let stack_sats = stack_data[h.to_usize() - sh];
let stack_sats = stack_data[h.to_usize() - start];
let avg = if di_usize > first_price_di {
let num_days = days.min(di_usize + 1).min(di_usize + 1 - first_price_di);
Cents::from(DCA_AMOUNT * num_days / Bitcoin::from(stack_sats))
@@ -123,15 +124,16 @@ impl Vecs {
self.period_lump_sum_stack.zip_mut_with_days(&lookback_dca)
{
let total_invested = DCA_AMOUNT * days as usize;
let ls_start = stack.sats.height.len();
let lookback_data = lookback_price
.cents
.height
.collect_range_at(sh, lookback_price.cents.height.len());
.collect_range_at(ls_start, lookback_price.cents.height.len());
stack.sats.height.compute_transform(
starting_indexes.height,
h2d,
|(h, _di, _)| {
let lp = lookback_data[h.to_usize() - sh];
let lp = lookback_data[h.to_usize() - ls_start];
let sats = if lp == Cents::ZERO {
Sats::ZERO
} else {
@@ -217,10 +219,11 @@ impl Vecs {
.zip(start_days)
{
let from_usize = from.to_usize();
let cls_start = average_price.cents.height.len();
let stack_data = stack
.sats
.height
.collect_range_at(sh, stack.sats.height.len());
.collect_range_at(cls_start, stack.sats.height.len());
average_price.cents.height.compute_transform(
starting_indexes.height,
h2d,
@@ -229,7 +232,7 @@ impl Vecs {
if di_usize < from_usize {
return (h, Cents::ZERO);
}
let stack_sats = stack_data[h.to_usize() - sh];
let stack_sats = stack_data[h.to_usize() - cls_start];
let num_days = di_usize + 1 - from_usize;
let avg = Cents::from(DCA_AMOUNT * num_days / Bitcoin::from(stack_sats));
(h, avg)

View File

@@ -25,7 +25,7 @@ impl Vecs {
let ath = AthVecs::forced_import(&db, version, indexes)?;
let lookback = LookbackVecs::forced_import(&db, version, indexes)?;
let returns = ReturnsVecs::forced_import(&db, version, indexes)?;
let volatility = VolatilityVecs::forced_import(&db, version, indexes, &returns)?;
let volatility = VolatilityVecs::forced_import(version, &returns)?;
let range = RangeVecs::forced_import(&db, version, indexes)?;
let moving_average = MovingAverageVecs::forced_import(&db, version, indexes)?;
let dca = DcaVecs::forced_import(&db, version, indexes)?;

View File

@@ -29,7 +29,7 @@ pub struct Vecs<M: StorageMode = Rw> {
pub ath: AthVecs<M>,
pub lookback: LookbackVecs<M>,
pub returns: ReturnsVecs<M>,
pub volatility: VolatilityVecs<M>,
pub volatility: VolatilityVecs,
pub range: RangeVecs<M>,
pub moving_average: MovingAverageVecs<M>,
pub dca: DcaVecs<M>,

View File

@@ -34,7 +34,7 @@ impl Vecs {
(&mut self.price_sma_4y, 4 * 365),
] {
let window_starts = blocks.count.start_vec(period);
sma.compute_all(blocks, prices, starting_indexes, exit, |v| {
sma.compute_all(prices, starting_indexes, exit, |v| {
v.compute_rolling_average(starting_indexes.height, window_starts, close, exit)?;
Ok(())
})?;
@@ -59,7 +59,7 @@ impl Vecs {
(&mut self.price_ema_4y, 4 * 365),
] {
let window_starts = blocks.count.start_vec(period);
ema.compute_all(blocks, prices, starting_indexes, exit, |v| {
ema.compute_all(prices, starting_indexes, exit, |v| {
v.compute_rolling_ema(starting_indexes.height, window_starts, close, exit)?;
Ok(())
})?;

View File

@@ -5,7 +5,7 @@ use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{CentsTimesTenths, ComputedFromHeightPriceWithRatioExtended, Price},
internal::{CentsTimesTenths, ComputedFromHeightPriceWithRatio, Price},
};
impl Vecs {
@@ -16,7 +16,7 @@ impl Vecs {
) -> Result<Self> {
macro_rules! import {
($name:expr) => {
ComputedFromHeightPriceWithRatioExtended::forced_import(
ComputedFromHeightPriceWithRatio::forced_import(
db, $name, version, indexes,
)?
};

View File

@@ -2,42 +2,42 @@ use brk_traversable::Traversable;
use brk_types::Cents;
use vecdb::{Rw, StorageMode};
use crate::internal::{ComputedFromHeightPriceWithRatioExtended, LazyFromHeight, Price};
use crate::internal::{ComputedFromHeightPriceWithRatio, LazyFromHeight, Price};
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub price_sma_1w: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_sma_8d: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_sma_13d: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_sma_21d: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_sma_1m: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_sma_34d: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_sma_55d: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_sma_89d: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_sma_111d: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_sma_144d: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_sma_200d: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_sma_350d: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_sma_1y: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_sma_2y: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_sma_200w: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_sma_4y: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_sma_1w: ComputedFromHeightPriceWithRatio<M>,
pub price_sma_8d: ComputedFromHeightPriceWithRatio<M>,
pub price_sma_13d: ComputedFromHeightPriceWithRatio<M>,
pub price_sma_21d: ComputedFromHeightPriceWithRatio<M>,
pub price_sma_1m: ComputedFromHeightPriceWithRatio<M>,
pub price_sma_34d: ComputedFromHeightPriceWithRatio<M>,
pub price_sma_55d: ComputedFromHeightPriceWithRatio<M>,
pub price_sma_89d: ComputedFromHeightPriceWithRatio<M>,
pub price_sma_111d: ComputedFromHeightPriceWithRatio<M>,
pub price_sma_144d: ComputedFromHeightPriceWithRatio<M>,
pub price_sma_200d: ComputedFromHeightPriceWithRatio<M>,
pub price_sma_350d: ComputedFromHeightPriceWithRatio<M>,
pub price_sma_1y: ComputedFromHeightPriceWithRatio<M>,
pub price_sma_2y: ComputedFromHeightPriceWithRatio<M>,
pub price_sma_200w: ComputedFromHeightPriceWithRatio<M>,
pub price_sma_4y: ComputedFromHeightPriceWithRatio<M>,
pub price_ema_1w: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_ema_8d: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_ema_12d: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_ema_13d: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_ema_21d: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_ema_26d: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_ema_1m: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_ema_34d: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_ema_55d: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_ema_89d: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_ema_144d: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_ema_200d: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_ema_1y: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_ema_2y: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_ema_200w: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_ema_4y: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_ema_1w: ComputedFromHeightPriceWithRatio<M>,
pub price_ema_8d: ComputedFromHeightPriceWithRatio<M>,
pub price_ema_12d: ComputedFromHeightPriceWithRatio<M>,
pub price_ema_13d: ComputedFromHeightPriceWithRatio<M>,
pub price_ema_21d: ComputedFromHeightPriceWithRatio<M>,
pub price_ema_26d: ComputedFromHeightPriceWithRatio<M>,
pub price_ema_1m: ComputedFromHeightPriceWithRatio<M>,
pub price_ema_34d: ComputedFromHeightPriceWithRatio<M>,
pub price_ema_55d: ComputedFromHeightPriceWithRatio<M>,
pub price_ema_89d: ComputedFromHeightPriceWithRatio<M>,
pub price_ema_144d: ComputedFromHeightPriceWithRatio<M>,
pub price_ema_200d: ComputedFromHeightPriceWithRatio<M>,
pub price_ema_1y: ComputedFromHeightPriceWithRatio<M>,
pub price_ema_2y: ComputedFromHeightPriceWithRatio<M>,
pub price_ema_200w: ComputedFromHeightPriceWithRatio<M>,
pub price_ema_4y: ComputedFromHeightPriceWithRatio<M>,
pub price_sma_200d_x2_4: Price<LazyFromHeight<Cents, Cents>>,
pub price_sma_200d_x0_8: Price<LazyFromHeight<Cents, Cents>>,

View File

@@ -1,5 +1,5 @@
use brk_error::Result;
use brk_types::{BasisPointsSigned32, Dollars, Indexes, StoredF32};
use brk_types::{BasisPointsSigned32, Dollars, Indexes};
use vecdb::Exit;
use super::Vecs;
@@ -54,26 +54,6 @@ impl Vecs {
sd.compute_all(blocks, starting_indexes, exit, _24h_price_return_ratio)?;
}
// Downside returns: min(return, 0)
self.price_downside_24h.compute_transform(
starting_indexes.height,
_24h_price_return_ratio,
|(i, ret, ..)| {
let v = f32::from(ret).min(0.0);
(i, StoredF32::from(v))
},
exit,
)?;
// Downside deviation (SD of downside returns)
for sd in [
&mut self.price_downside_24h_sd_1w,
&mut self.price_downside_24h_sd_1m,
&mut self.price_downside_24h_sd_1y,
] {
sd.compute_all(blocks, starting_indexes, exit, &self.price_downside_24h)?;
}
Ok(())
}
}

View File

@@ -1,6 +1,6 @@
use brk_error::Result;
use brk_types::Version;
use vecdb::{Database, EagerVec, ImportableVec};
use vecdb::Database;
use super::super::lookback::ByLookbackPeriod;
use super::Vecs;
@@ -52,42 +52,12 @@ impl Vecs {
indexes,
)?;
let price_downside_24h = EagerVec::forced_import(db, "price_downside_24h", version)?;
let price_downside_24h_sd_1w = ComputedFromHeightStdDev::forced_import(
db,
"price_downside_24h",
"1w",
7,
version + v1,
indexes,
)?;
let price_downside_24h_sd_1m = ComputedFromHeightStdDev::forced_import(
db,
"price_downside_24h",
"1m",
30,
version + v1,
indexes,
)?;
let price_downside_24h_sd_1y = ComputedFromHeightStdDev::forced_import(
db,
"price_downside_24h",
"1y",
365,
version + v1,
indexes,
)?;
Ok(Self {
price_return,
price_cagr,
price_return_24h_sd_1w,
price_return_24h_sd_1m,
price_return_24h_sd_1y,
price_downside_24h,
price_downside_24h_sd_1w,
price_downside_24h_sd_1m,
price_downside_24h_sd_1y,
})
}
}

View File

@@ -1,6 +1,6 @@
use brk_traversable::Traversable;
use brk_types::{BasisPointsSigned32, Height, StoredF32};
use vecdb::{EagerVec, PcoVec, Rw, StorageMode};
use brk_types::BasisPointsSigned32;
use vecdb::{Rw, StorageMode};
use crate::{
internal::{ComputedFromHeightStdDev, PercentFromHeight},
@@ -18,9 +18,4 @@ pub struct Vecs<M: StorageMode = Rw> {
pub price_return_24h_sd_1m: ComputedFromHeightStdDev<M>,
pub price_return_24h_sd_1y: ComputedFromHeightStdDev<M>,
// Downside returns and deviation (for Sortino ratio)
pub price_downside_24h: M::Stored<EagerVec<PcoVec<Height, StoredF32>>>,
pub price_downside_24h_sd_1w: ComputedFromHeightStdDev<M>,
pub price_downside_24h_sd_1m: ComputedFromHeightStdDev<M>,
pub price_downside_24h_sd_1y: ComputedFromHeightStdDev<M>,
}

View File

@@ -1,98 +0,0 @@
use brk_error::Result;
use brk_types::{Height, StoredF32};
use vecdb::{EagerVec, Exit, PcoVec, ReadableVec};
use super::super::returns;
use super::Vecs;
impl Vecs {
pub(crate) fn compute(
&mut self,
returns: &returns::Vecs,
starting_indexes_height: Height,
exit: &Exit,
) -> Result<()> {
// Sharpe ratios: returns / volatility
for (out, ret, vol) in [
(
&mut self.price_sharpe_1w,
&returns.price_return._1w.ratio.height,
&self.price_volatility_1w.height,
),
(
&mut self.price_sharpe_1m,
&returns.price_return._1m.ratio.height,
&self.price_volatility_1m.height,
),
(
&mut self.price_sharpe_1y,
&returns.price_return._1y.ratio.height,
&self.price_volatility_1y.height,
),
] {
compute_divided(
&mut out.height,
starting_indexes_height,
ret,
vol,
1.0,
exit,
)?;
}
// Sortino ratios: returns / downside volatility (sd * sqrt(days))
for (out, ret, sd, sqrt_days) in [
(
&mut self.price_sortino_1w,
&returns.price_return._1w.ratio.height,
&returns.price_downside_24h_sd_1w.sd.height,
7.0_f32.sqrt(),
),
(
&mut self.price_sortino_1m,
&returns.price_return._1m.ratio.height,
&returns.price_downside_24h_sd_1m.sd.height,
30.0_f32.sqrt(),
),
(
&mut self.price_sortino_1y,
&returns.price_return._1y.ratio.height,
&returns.price_downside_24h_sd_1y.sd.height,
365.0_f32.sqrt(),
),
] {
compute_divided(
&mut out.height,
starting_indexes_height,
ret,
sd,
sqrt_days,
exit,
)?;
}
Ok(())
}
}
fn compute_divided(
out: &mut EagerVec<PcoVec<Height, StoredF32>>,
starting_indexes_height: Height,
ret: &impl ReadableVec<Height, StoredF32>,
divisor: &impl ReadableVec<Height, StoredF32>,
divisor_scale: f32,
exit: &Exit,
) -> Result<()> {
out.compute_transform2(
starting_indexes_height,
ret,
divisor,
|(h, ret, div, ..)| {
let denom = (*div) * divisor_scale;
let ratio = if denom == 0.0 { 0.0 } else { (*ret) / denom };
(h, StoredF32::from(ratio))
},
exit,
)?;
Ok(())
}

View File

@@ -1,19 +1,13 @@
use brk_error::Result;
use brk_types::Version;
use vecdb::{Database, ReadableCloneableVec};
use vecdb::ReadableCloneableVec;
use super::super::returns;
use super::Vecs;
use crate::indexes;
use crate::internal::{ComputedFromHeight, Days7, Days30, Days365, LazyFromHeight, TimesSqrt};
use crate::internal::{Days30, Days365, Days7, LazyFromHeight, TimesSqrt};
impl Vecs {
pub(crate) fn forced_import(
db: &Database,
version: Version,
indexes: &indexes::Vecs,
returns: &returns::Vecs,
) -> Result<Self> {
pub(crate) fn forced_import(version: Version, returns: &returns::Vecs) -> Result<Self> {
let v2 = Version::TWO;
let price_volatility_1w = LazyFromHeight::from_computed::<TimesSqrt<Days7>>(
@@ -49,30 +43,10 @@ impl Vecs {
&returns.price_return_24h_sd_1y.sd,
);
let price_sharpe_1w =
ComputedFromHeight::forced_import(db, "price_sharpe_1w", version + v2, indexes)?;
let price_sharpe_1m =
ComputedFromHeight::forced_import(db, "price_sharpe_1m", version + v2, indexes)?;
let price_sharpe_1y =
ComputedFromHeight::forced_import(db, "price_sharpe_1y", version + v2, indexes)?;
let price_sortino_1w =
ComputedFromHeight::forced_import(db, "price_sortino_1w", version + v2, indexes)?;
let price_sortino_1m =
ComputedFromHeight::forced_import(db, "price_sortino_1m", version + v2, indexes)?;
let price_sortino_1y =
ComputedFromHeight::forced_import(db, "price_sortino_1y", version + v2, indexes)?;
Ok(Self {
price_volatility_1w,
price_volatility_1m,
price_volatility_1y,
price_sharpe_1w,
price_sharpe_1m,
price_sharpe_1y,
price_sortino_1w,
price_sortino_1m,
price_sortino_1y,
})
}
}

View File

@@ -1,4 +1,3 @@
mod compute;
mod import;
mod vecs;

View File

@@ -1,20 +1,11 @@
use brk_traversable::Traversable;
use vecdb::{Rw, StorageMode};
use crate::internal::{ComputedFromHeight, LazyFromHeight};
use crate::internal::LazyFromHeight;
use brk_types::StoredF32;
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
#[derive(Clone, Traversable)]
pub struct Vecs {
pub price_volatility_1w: LazyFromHeight<StoredF32>,
pub price_volatility_1m: LazyFromHeight<StoredF32>,
pub price_volatility_1y: LazyFromHeight<StoredF32>,
pub price_sharpe_1w: ComputedFromHeight<StoredF32, M>,
pub price_sharpe_1m: ComputedFromHeight<StoredF32, M>,
pub price_sharpe_1y: ComputedFromHeight<StoredF32, M>,
pub price_sortino_1w: ComputedFromHeight<StoredF32, M>,
pub price_sortino_1m: ComputedFromHeight<StoredF32, M>,
pub price_sortino_1y: ComputedFromHeight<StoredF32, M>,
}

View File

@@ -2,14 +2,14 @@ use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{BasisPoints16, Height, Indexes, PoolSlug, StoredU32};
use vecdb::{
AnyVec, BinaryTransform, Database, Exit, ReadableVec, Rw, StorageMode, VecIndex, Version,
BinaryTransform, Database, Exit, ReadableVec, Rw, StorageMode, Version,
};
use crate::{
blocks, indexes,
internal::{
ComputedFromHeight, ComputedFromHeightCumulativeSum, MaskSats, PercentFromHeight,
PercentRollingWindows, RatioU32Bp16, RollingWindows, ValueFromHeightCumulativeSum,
ComputedFromHeightCumulativeSum, MaskSats, PercentFromHeight,
PercentRollingWindows, RatioU32Bp16, ValueFromHeightCumulativeSum,
},
mining, prices,
};
@@ -19,13 +19,9 @@ pub struct Vecs<M: StorageMode = Rw> {
slug: PoolSlug,
pub blocks_mined: ComputedFromHeightCumulativeSum<StoredU32, M>,
pub blocks_mined_sum: RollingWindows<StoredU32, M>,
pub subsidy: ValueFromHeightCumulativeSum<M>,
pub fee: ValueFromHeightCumulativeSum<M>,
pub coinbase: ValueFromHeightCumulativeSum<M>,
pub rewards: ValueFromHeightCumulativeSum<M>,
pub dominance: PercentFromHeight<BasisPoints16, M>,
pub dominance_rolling: PercentRollingWindows<BasisPoints16, M>,
pub blocks_since_last_mined: ComputedFromHeight<StoredU32, M>,
}
impl Vecs {
@@ -45,17 +41,8 @@ impl Vecs {
indexes,
)?;
let blocks_mined_sum =
RollingWindows::forced_import(db, &suffix("blocks_mined_sum"), version, indexes)?;
let subsidy =
ValueFromHeightCumulativeSum::forced_import(db, &suffix("subsidy"), version, indexes)?;
let fee =
ValueFromHeightCumulativeSum::forced_import(db, &suffix("fee"), version, indexes)?;
let coinbase =
ValueFromHeightCumulativeSum::forced_import(db, &suffix("coinbase"), version, indexes)?;
let rewards =
ValueFromHeightCumulativeSum::forced_import(db, &suffix("rewards"), version, indexes)?;
let dominance =
PercentFromHeight::forced_import(db, &suffix("dominance"), version, indexes)?;
@@ -67,16 +54,7 @@ impl Vecs {
dominance_rolling,
slug,
blocks_mined,
blocks_mined_sum,
coinbase,
subsidy,
fee,
blocks_since_last_mined: ComputedFromHeight::forced_import(
db,
&suffix("blocks_since_last_mined"),
version,
indexes,
)?,
rewards,
})
}
@@ -112,13 +90,6 @@ impl Vecs {
Ok(())
})?;
self.blocks_mined_sum.compute_rolling_sum(
starting_indexes.height,
&window_starts,
&self.blocks_mined.height,
exit,
)?;
self.dominance
.compute_binary::<StoredU32, StoredU32, RatioU32Bp16>(
starting_indexes.height,
@@ -131,7 +102,7 @@ impl Vecs {
.dominance_rolling
.as_mut_array()
.into_iter()
.zip(self.blocks_mined_sum.as_array())
.zip(self.blocks_mined.sum.as_array())
.zip(blocks.count.block_count_sum.as_array())
{
dom.compute_binary::<StoredU32, StoredU32, RatioU32Bp16>(
@@ -142,39 +113,7 @@ impl Vecs {
)?;
}
self.subsidy.compute(
starting_indexes.height,
&window_starts,
prices,
exit,
|vec| {
Ok(vec.compute_transform2(
starting_indexes.height,
&self.blocks_mined.height,
&mining.rewards.subsidy.base.sats.height,
|(h, mask, val, ..)| (h, MaskSats::apply(mask, val)),
exit,
)?)
},
)?;
self.fee.compute(
starting_indexes.height,
&window_starts,
prices,
exit,
|vec| {
Ok(vec.compute_transform2(
starting_indexes.height,
&self.blocks_mined.height,
&mining.rewards.fees.base.sats.height,
|(h, mask, val, ..)| (h, MaskSats::apply(mask, val)),
exit,
)?)
},
)?;
self.coinbase.compute(
self.rewards.compute(
starting_indexes.height,
&window_starts,
prices,
@@ -190,36 +129,6 @@ impl Vecs {
},
)?;
{
let resume_from = self
.blocks_since_last_mined
.height
.len()
.min(starting_indexes.height.to_usize());
let mut prev = if resume_from > 0 {
self.blocks_since_last_mined
.height
.collect_one_at(resume_from - 1)
.unwrap()
} else {
StoredU32::ZERO
};
self.blocks_since_last_mined.height.compute_transform(
starting_indexes.height,
&self.blocks_mined.height,
|(h, mined, ..)| {
let blocks = if mined.is_zero() {
prev + StoredU32::ONE
} else {
StoredU32::ZERO
};
prev = blocks;
(h, blocks)
},
exit,
)?;
}
Ok(())
}
}