computer: snapshot

This commit is contained in:
nym21
2026-02-27 01:23:36 +01:00
parent 78fc5ffcf7
commit 72c17096ea
52 changed files with 3011 additions and 2967 deletions

View File

@@ -67,8 +67,8 @@ pub(crate) fn process_blocks(
// From transactions and inputs/outputs (via .height or .height.sum_cumulative.sum patterns):
let height_to_tx_count = &transactions.count.tx_count.height;
let height_to_output_count = &outputs.count.total_count.height.sum_cumulative.sum.0;
let height_to_input_count = &inputs.count.height.sum_cumulative.sum.0;
let height_to_output_count = &outputs.count.total_count.full.sum_cumulative.sum.0;
let height_to_input_count = &inputs.count.full.sum_cumulative.sum.0;
// From blocks:
let height_to_timestamp = &blocks.time.timestamp_monotonic;
let height_to_date = &blocks.time.date;

View File

@@ -10,7 +10,6 @@ use super::{CostBasisBase, CostBasisExtended};
/// Cost basis metrics with guaranteed extended (no Option).
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct CostBasisWithExtended<M: StorageMode = Rw> {
#[deref]
#[deref_mut]
@@ -31,5 +30,4 @@ impl CostBasisWithExtended {
pub(crate) fn validate_computed_versions(&mut self, base_version: Version) -> Result<()> {
self.extended.validate_computed_versions(base_version)
}
}

View File

@@ -12,7 +12,6 @@ use super::{RealizedAdjusted, RealizedBase};
/// Realized metrics with guaranteed adjusted (no Option).
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct RealizedWithAdjusted<M: StorageMode = Rw> {
#[deref]
#[deref_mut]

View File

@@ -12,7 +12,6 @@ use super::{RealizedBase, RealizedExtended};
/// Realized metrics with guaranteed extended (no Option).
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct RealizedWithExtended<M: StorageMode = Rw> {
#[deref]
#[deref_mut]

View File

@@ -12,7 +12,6 @@ use super::{RealizedAdjusted, RealizedBase, RealizedExtended};
/// Realized metrics with guaranteed extended AND adjusted (no Options).
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct RealizedWithExtendedAdjusted<M: StorageMode = Rw> {
#[deref]
#[deref_mut]

View File

@@ -10,7 +10,6 @@ use super::{RelativeBase, RelativeExtendedOwnPnl, RelativePeakRegret};
/// Relative metrics for the "all" cohort (base + own_pnl + peak_regret, NO rel_to_all).
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct RelativeForAll<M: StorageMode = Rw> {
#[deref]
#[deref_mut]

View File

@@ -7,14 +7,13 @@ use vecdb::{Exit, ReadableVec, Rw, StorageMode};
use crate::distribution::metrics::{ImportConfig, RealizedBase, UnrealizedBase};
use super::{
RelativeBase, RelativeExtendedOwnMarketCap, RelativeExtendedOwnPnl,
RelativePeakRegret, RelativeToAll,
RelativeBase, RelativeExtendedOwnMarketCap, RelativeExtendedOwnPnl, RelativePeakRegret,
RelativeToAll,
};
/// Full extended relative metrics (base + rel_to_all + own_market_cap + own_pnl + peak_regret).
/// Used by: sth, lth, age_range cohorts.
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct RelativeWithExtended<M: StorageMode = Rw> {
#[deref]
#[deref_mut]
@@ -54,11 +53,26 @@ impl RelativeWithExtended {
peak_regret_val: &impl ReadableVec<Height, Dollars>,
exit: &Exit,
) -> Result<()> {
self.base.compute(max_from, unrealized, realized, supply_total_sats, market_cap, exit)?;
self.rel_to_all.compute(max_from, unrealized, supply_total_sats, all_supply_sats, exit)?;
self.extended_own_market_cap.compute(max_from, unrealized, own_market_cap, exit)?;
self.base.compute(
max_from,
unrealized,
realized,
supply_total_sats,
market_cap,
exit,
)?;
self.rel_to_all.compute(
max_from,
unrealized,
supply_total_sats,
all_supply_sats,
exit,
)?;
self.extended_own_market_cap
.compute(max_from, unrealized, own_market_cap, exit)?;
self.extended_own_pnl.compute(max_from, unrealized, exit)?;
self.peak_regret.compute(max_from, peak_regret_val, market_cap, exit)?;
self.peak_regret
.compute(max_from, peak_regret_val, market_cap, exit)?;
Ok(())
}
}

View File

@@ -11,7 +11,6 @@ use super::{RelativeBase, RelativePeakRegret, RelativeToAll};
/// Relative metrics with rel_to_all + peak_regret (no extended).
/// Used by: max_age, min_age cohorts.
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct RelativeWithPeakRegret<M: StorageMode = Rw> {
#[deref]
#[deref_mut]

View File

@@ -11,7 +11,6 @@ use super::{RelativeBase, RelativeToAll};
/// Relative metrics with rel_to_all (no extended, no peak_regret).
/// Used by: epoch, year, type, amount, address cohorts.
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct RelativeWithRelToAll<M: StorageMode = Rw> {
#[deref]
#[deref_mut]

View File

@@ -9,7 +9,6 @@ use super::{UnrealizedBase, UnrealizedPeakRegret};
/// Unrealized metrics with guaranteed peak regret (no Option).
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct UnrealizedWithPeakRegret<M: StorageMode = Rw> {
#[deref]
#[deref_mut]
@@ -26,5 +25,4 @@ impl UnrealizedWithPeakRegret {
peak_regret_ext: UnrealizedPeakRegret::forced_import(cfg)?,
})
}
}

View File

@@ -5,11 +5,9 @@
use brk_traversable::Traversable;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct BlockWindows<A, B = A> {
#[traversable(rename = "1h")]
pub _1h: A,
#[traversable(rename = "24h")]
pub _24h: B,
}

View File

@@ -5,7 +5,6 @@
use brk_traversable::Traversable;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct DistributionStats<A, B = A, C = A, D = A, E = A, F = A, G = A, H = A> {
pub average: A,
pub min: B,

View File

@@ -16,13 +16,11 @@ use crate::{
};
#[derive(Traversable)]
#[traversable(merge)]
pub struct ComputedFromHeightCumulative<T, M: StorageMode = Rw>
where
T: NumericValue + JsonSchema,
{
pub height: M::Stored<EagerVec<PcoVec<Height, T>>>,
#[traversable(flatten)]
pub cumulative: ComputedFromHeightLast<T, M>,
}

View File

@@ -17,15 +17,12 @@ use crate::{
};
#[derive(Traversable)]
#[traversable(merge)]
pub struct ComputedFromHeightCumulativeFull<T, M: StorageMode = Rw>
where
T: NumericValue + JsonSchema,
{
pub height: M::Stored<EagerVec<PcoVec<Height, T>>>,
#[traversable(flatten)]
pub cumulative: ComputedFromHeightLast<T, M>,
#[traversable(flatten)]
pub rolling: RollingFull<T, M>,
}

View File

@@ -18,15 +18,12 @@ use crate::{
};
#[derive(Traversable)]
#[traversable(merge)]
pub struct ComputedFromHeightCumulativeSum<T, M: StorageMode = Rw>
where
T: NumericValue + JsonSchema,
{
pub height: M::Stored<EagerVec<PcoVec<Height, T>>>,
#[traversable(flatten)]
pub cumulative: ComputedFromHeightLast<T, M>,
#[traversable(flatten)]
pub rolling: RollingWindows<T, M>,
}

View File

@@ -16,7 +16,6 @@ use crate::indexes;
use crate::internal::{ComputedVecValue, NumericValue, RollingDistribution, WindowStarts};
#[derive(Traversable)]
#[traversable(merge)]
pub struct ComputedFromHeightDistribution<T, M: StorageMode = Rw>
where
T: ComputedVecValue + PartialOrd + JsonSchema,

View File

@@ -17,14 +17,12 @@ use crate::{
};
#[derive(Traversable)]
#[traversable(merge)]
pub struct ComputedFromHeightFull<T, M: StorageMode = Rw>
where
T: NumericValue + JsonSchema,
{
#[traversable(flatten)]
pub height: Full<Height, T, M>,
#[traversable(flatten)]
pub full: Full<Height, T, M>,
pub rolling: RollingFull<T, M>,
}
@@ -45,7 +43,10 @@ where
let height = Full::forced_import(db, name, v)?;
let rolling = RollingFull::forced_import(db, name, v, indexes)?;
Ok(Self { height, rolling })
Ok(Self {
full: height,
rolling,
})
}
/// Compute Full stats via closure, then rolling windows from the per-block sum.
@@ -60,11 +61,11 @@ where
T: From<f64> + Default + SubAssign + Copy + Ord,
f64: From<T>,
{
compute_full(&mut self.height)?;
compute_full(&mut self.full)?;
self.rolling.compute(
max_from,
windows,
self.height.sum_cumulative.sum.inner(),
self.full.sum_cumulative.sum.inner(),
exit,
)?;
Ok(())

View File

@@ -18,7 +18,6 @@ const VERSION: Version = Version::ZERO;
/// Block full aggregation with lazy height transform + cumulative + rolling windows.
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyComputedFromHeightFull<T, S = T, M: StorageMode = Rw>
where
T: NumericValue + JsonSchema,
@@ -47,12 +46,7 @@ where
let height = LazyVecFrom1::transformed::<F>(name, v, source.read_only_boxed_clone());
let rest = ComputedHeightDerivedCumulativeFull::forced_import(
db,
name,
v,
indexes,
)?;
let rest = ComputedHeightDerivedCumulativeFull::forced_import(db, name, v, indexes)?;
Ok(Self {
height,

View File

@@ -17,9 +17,7 @@ use crate::{
/// Generic price metric with both USD and sats representations.
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct Price<U> {
#[traversable(flatten)]
pub usd: U,
pub sats: LazyFromHeightLast<SatsFract, Dollars>,
}
@@ -65,4 +63,3 @@ where
Self { usd, sats }
}
}

View File

@@ -9,7 +9,6 @@ use crate::{ComputeIndexes, blocks, indexes, prices};
use super::{ComputedFromHeightRatio, ComputedFromHeightRatioExtension};
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedFromHeightRatioExtended<M: StorageMode = Rw> {
#[deref]
#[deref_mut]
@@ -44,12 +43,8 @@ impl ComputedFromHeightRatioExtended {
let close_price = &prices.usd.price;
self.base
.compute_ratio(starting_indexes, close_price, metric_price, exit)?;
self.extended.compute_rest(
blocks,
starting_indexes,
exit,
&self.base.ratio.height,
)?;
self.extended
.compute_rest(blocks, starting_indexes, exit, &self.base.ratio.height)?;
self.extended
.compute_usd_bands(starting_indexes, metric_price, exit)?;
Ok(())

View File

@@ -10,7 +10,6 @@ use crate::{ComputeIndexes, blocks, indexes, prices};
use super::ComputedFromHeightRatioExtended;
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedFromHeightPriceWithRatioExtended<M: StorageMode = Rw> {
#[deref]
#[deref_mut]

View File

@@ -1,7 +1,10 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Dollars, Height, StoredF32, Version};
use vecdb::{AnyStoredVec, AnyVec, Database, EagerVec, Exit, PcoVec, ReadableVec, Rw, StorageMode, VecIndex, WritableVec};
use vecdb::{
AnyStoredVec, AnyVec, Database, EagerVec, Exit, PcoVec, ReadableVec, Rw, StorageMode, VecIndex,
WritableVec,
};
use crate::{ComputeIndexes, blocks, indexes};
@@ -10,7 +13,6 @@ use crate::internal::{ComputedFromHeightLast, Price};
use super::ComputedFromHeightStdDev;
#[derive(Traversable)]
#[traversable(merge)]
pub struct ComputedFromHeightStdDevExtended<M: StorageMode = Rw> {
#[traversable(flatten)]
pub base: ComputedFromHeightStdDev<M>,
@@ -68,12 +70,7 @@ impl ComputedFromHeightStdDevExtended {
macro_rules! import_usd {
($suffix:expr) => {
Price::forced_import(
db,
&format!("{name}_{}", $suffix),
version,
indexes,
)?
Price::forced_import(db, &format!("{name}_{}", $suffix), version, indexes)?
};
}
@@ -115,7 +112,8 @@ impl ComputedFromHeightStdDevExtended {
exit: &Exit,
source: &impl ReadableVec<Height, StoredF32>,
) -> Result<()> {
self.base.compute_all(blocks, starting_indexes, exit, source)?;
self.base
.compute_all(blocks, starting_indexes, exit, source)?;
let sma_opt: Option<&EagerVec<PcoVec<Height, StoredF32>>> = None;
self.compute_bands(starting_indexes, exit, sma_opt, source)
@@ -148,37 +146,54 @@ impl ComputedFromHeightStdDevExtended {
let source_len = source.len();
let source_data = source.collect_range_at(start, source_len);
let sma_len = sma_opt.map(|s| s.len()).unwrap_or(self.base.sma.height.len());
let sma_len = sma_opt
.map(|s| s.len())
.unwrap_or(self.base.sma.height.len());
let sma_data: Vec<StoredF32> = if let Some(sma) = sma_opt {
sma.collect_range_at(start, sma_len)
} else {
self.base.sma.height.collect_range_at(start, sma_len)
};
let sd_data = self.base.sd.height.collect_range_at(start, self.base.sd.height.len());
let sd_data = self
.base
.sd
.height
.collect_range_at(start, self.base.sd.height.len());
for (offset, _ratio) in source_data.into_iter().enumerate() {
let index = start + offset;
let average = sma_data[offset];
let sd = sd_data[offset];
self.p0_5sd.height.truncate_push_at(index, average + StoredF32::from(0.5 * *sd))?;
self.p0_5sd
.height
.truncate_push_at(index, average + StoredF32::from(0.5 * *sd))?;
self.p1sd.height.truncate_push_at(index, average + sd)?;
self.p1_5sd.height.truncate_push_at(index, average + StoredF32::from(1.5 * *sd))?;
self.p1_5sd
.height
.truncate_push_at(index, average + StoredF32::from(1.5 * *sd))?;
self.p2sd.height.truncate_push_at(index, average + 2 * sd)?;
self.p2_5sd.height.truncate_push_at(index, average + StoredF32::from(2.5 * *sd))?;
self.p2_5sd
.height
.truncate_push_at(index, average + StoredF32::from(2.5 * *sd))?;
self.p3sd.height.truncate_push_at(index, average + 3 * sd)?;
self.m0_5sd.height.truncate_push_at(index, average - StoredF32::from(0.5 * *sd))?;
self.m0_5sd
.height
.truncate_push_at(index, average - StoredF32::from(0.5 * *sd))?;
self.m1sd.height.truncate_push_at(index, average - sd)?;
self.m1_5sd.height.truncate_push_at(index, average - StoredF32::from(1.5 * *sd))?;
self.m1_5sd
.height
.truncate_push_at(index, average - StoredF32::from(1.5 * *sd))?;
self.m2sd.height.truncate_push_at(index, average - 2 * sd)?;
self.m2_5sd.height.truncate_push_at(index, average - StoredF32::from(2.5 * *sd))?;
self.m2_5sd
.height
.truncate_push_at(index, average - StoredF32::from(2.5 * *sd))?;
self.m3sd.height.truncate_push_at(index, average - 3 * sd)?;
}
{
let _lock = exit.lock();
self.mut_band_height_vecs()
.try_for_each(|v| v.flush())?;
self.mut_band_height_vecs().try_for_each(|v| v.flush())?;
}
if let Some(sma) = sma_opt {
@@ -213,7 +228,8 @@ impl ComputedFromHeightStdDevExtended {
macro_rules! compute_band {
($usd_field:ident, $band_source:expr) => {
self.$usd_field.usd
self.$usd_field
.usd
.compute_binary::<Dollars, StoredF32, PriceTimesRatio>(
starting_indexes.height,
metric_price,

View File

@@ -16,7 +16,6 @@ use crate::{
};
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ValueFromHeightLastRolling<M: StorageMode = Rw> {
#[deref]
#[deref_mut]

View File

@@ -1,11 +1,14 @@
//! LazyFromTxDistribution - lazy txindex source + computed distribution.
//! ComputedFromTxDistribution - stored per-tx EagerVec + computed distribution.
//!
//! Like LazyFromTxDistribution, but the per-tx source is eagerly computed
//! and stored rather than lazily derived.
use brk_error::Result;
use brk_indexer::Indexer;
use brk_traversable::Traversable;
use brk_types::TxIndex;
use schemars::JsonSchema;
use vecdb::{Database, Exit, LazyVecFrom2, ReadableVec, Rw, StorageMode, Version};
use vecdb::{Database, EagerVec, Exit, ImportableVec, PcoVec, Rw, StorageMode, Version};
use crate::{
ComputeIndexes, indexes,
@@ -13,30 +16,21 @@ use crate::{
};
#[derive(Traversable)]
#[traversable(merge)]
pub struct LazyFromTxDistribution<T, S1, S2, M: StorageMode = Rw>
pub struct ComputedFromTxDistribution<T, M: StorageMode = Rw>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1: ComputedVecValue,
S2: ComputedVecValue,
{
pub txindex: LazyVecFrom2<TxIndex, T, TxIndex, S1, TxIndex, S2>,
pub txindex: M::Stored<EagerVec<PcoVec<TxIndex, T>>>,
#[traversable(flatten)]
pub distribution: TxDerivedDistribution<T, M>,
}
impl<T, S1, S2> LazyFromTxDistribution<T, S1, S2>
impl<T> ComputedFromTxDistribution<T>
where
T: NumericValue + JsonSchema,
S1: ComputedVecValue + JsonSchema,
S2: ComputedVecValue + JsonSchema,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
txindex: LazyVecFrom2<TxIndex, T, TxIndex, S1, TxIndex, S2>,
) -> Result<Self> {
pub(crate) fn forced_import(db: &Database, name: &str, version: Version) -> Result<Self> {
let txindex = EagerVec::forced_import(db, name, version)?;
let distribution = TxDerivedDistribution::forced_import(db, name, version)?;
Ok(Self {
txindex,
@@ -44,26 +38,28 @@ where
})
}
pub(crate) fn derive_from(
#[allow(clippy::too_many_arguments)]
pub(crate) fn derive_from_with_skip(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
block_windows: &BlockWindowStarts<'_>,
exit: &Exit,
skip_count: usize,
) -> Result<()>
where
T: Copy + Ord + From<f64> + Default,
f64: From<T>,
LazyVecFrom2<TxIndex, T, TxIndex, S1, TxIndex, S2>: ReadableVec<TxIndex, T>,
{
self.distribution.derive_from(
self.distribution.derive_from_with_skip(
indexer,
indexes,
starting_indexes,
block_windows,
&self.txindex,
exit,
skip_count,
)
}
}

View File

@@ -0,0 +1,68 @@
//! LazyFromTxDistribution - lazy txindex source + computed distribution.
use brk_error::Result;
use brk_indexer::Indexer;
use brk_traversable::Traversable;
use brk_types::TxIndex;
use schemars::JsonSchema;
use vecdb::{Database, Exit, LazyVecFrom2, ReadableVec, Rw, StorageMode, Version};
use crate::{
ComputeIndexes, indexes,
internal::{BlockWindowStarts, ComputedVecValue, NumericValue, TxDerivedDistribution},
};
#[derive(Traversable)]
pub struct LazyFromTxDistribution<T, S1, S2, M: StorageMode = Rw>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1: ComputedVecValue,
S2: ComputedVecValue,
{
pub txindex: LazyVecFrom2<TxIndex, T, TxIndex, S1, TxIndex, S2>,
#[traversable(flatten)]
pub distribution: TxDerivedDistribution<T, M>,
}
impl<T, S1, S2> LazyFromTxDistribution<T, S1, S2>
where
T: NumericValue + JsonSchema,
S1: ComputedVecValue + JsonSchema,
S2: ComputedVecValue + JsonSchema,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
txindex: LazyVecFrom2<TxIndex, T, TxIndex, S1, TxIndex, S2>,
) -> Result<Self> {
let distribution = TxDerivedDistribution::forced_import(db, name, version)?;
Ok(Self {
txindex,
distribution,
})
}
pub(crate) fn derive_from(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
block_windows: &BlockWindowStarts<'_>,
exit: &Exit,
) -> Result<()>
where
T: Copy + Ord + From<f64> + Default,
f64: From<T>,
LazyVecFrom2<TxIndex, T, TxIndex, S1, TxIndex, S2>: ReadableVec<TxIndex, T>,
{
self.distribution.derive_from(
indexer,
indexes,
starting_indexes,
block_windows,
&self.txindex,
exit,
)
}
}

View File

@@ -1,3 +1,5 @@
mod distribution;
mod lazy_distribution;
pub use distribution::*;
pub use lazy_distribution::*;

View File

@@ -17,12 +17,10 @@ use crate::{
};
#[derive(Traversable)]
#[traversable(merge)]
pub struct ComputedHeightDerivedCumulativeFull<T, M: StorageMode = Rw>
where
T: NumericValue + JsonSchema,
{
#[traversable(flatten)]
pub cumulative: ComputedFromHeightLast<T, M>,
#[traversable(flatten)]
pub rolling: RollingFull<T, M>,

View File

@@ -13,11 +13,12 @@ use vecdb::{Database, Exit, ReadableVec, Rw, StorageMode, Version};
use crate::{
ComputeIndexes, indexes,
internal::{BlockRollingDistribution, BlockWindowStarts, ComputedVecValue, Distribution, NumericValue},
internal::{
BlockRollingDistribution, BlockWindowStarts, ComputedVecValue, Distribution, NumericValue,
},
};
#[derive(Traversable)]
#[traversable(merge)]
pub struct TxDerivedDistribution<T, M: StorageMode = Rw>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
@@ -31,11 +32,7 @@ impl<T> TxDerivedDistribution<T>
where
T: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
) -> Result<Self> {
pub(crate) fn forced_import(db: &Database, name: &str, version: Version) -> Result<Self> {
let block = Distribution::forced_import(db, name, version)?;
let rolling = BlockRollingDistribution::forced_import(db, name, version)?;

View File

@@ -18,12 +18,10 @@ use crate::{
/// Sum (4 windows) + Distribution (8 stats × 4 windows) = 36 stored height vecs.
#[derive(Traversable)]
#[traversable(merge)]
pub struct RollingFull<T, M: StorageMode = Rw>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
#[traversable(flatten)]
pub sum: RollingWindows<T, M>,
#[traversable(flatten)]
pub distribution: RollingDistribution<T, M>,

View File

@@ -24,11 +24,6 @@ impl<I: VecIndex, T: ComputedVecValue + JsonSchema> CumulativeVec<I, T> {
)?))
}
#[inline]
pub(crate) fn inner(&self) -> &EagerVec<PcoVec<I, T>> {
&self.0
}
pub fn read_only_clone(&self) -> CumulativeVec<I, T, Ro> {
CumulativeVec(StoredVec::read_only_clone(&self.0))
}

View File

@@ -5,7 +5,6 @@
use brk_traversable::Traversable;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct Windows<A, B = A, C = A, D = A> {
#[traversable(rename = "24h")]
pub _24h: A,

View File

@@ -359,7 +359,6 @@ impl Computer {
&self.blocks,
&self.prices,
&self.mining,
&self.transactions,
&starting_indexes_clone,
exit,
)?;

View File

@@ -28,12 +28,11 @@ impl Vecs {
exit,
)?;
// Hashrate metrics (disjoint field borrow via coinbase_sum)
self.hashrate.compute(
&blocks.count,
&blocks.difficulty,
&self.rewards.coinbase_sum._24h.sats.height,
&self.rewards.coinbase_sum._24h.usd.height,
&self.rewards.coinbase.sats.rolling.sum._24h.height,
&self.rewards.coinbase.usd.rolling.sum._24h.height,
starting_indexes,
exit,
)?;

View File

@@ -20,77 +20,93 @@ impl Vecs {
) -> Result<()> {
let window_starts = count_vecs.window_starts();
self.coinbase.compute(starting_indexes.height, &window_starts, prices, exit, |vec| {
// Cursors avoid per-height PcoVec page decompression for the
// tx-indexed lookups. Coinbase txindex values are strictly
// increasing, so the cursors only advance forward.
let mut txout_cursor = indexer.vecs.transactions.first_txoutindex.cursor();
let mut count_cursor = indexes.txindex.output_count.cursor();
vec.compute_transform(
starting_indexes.height,
&indexer.vecs.transactions.first_txindex,
|(height, txindex, ..)| {
let ti = txindex.to_usize();
txout_cursor.advance(ti - txout_cursor.position());
let first_txoutindex = txout_cursor.next().unwrap().to_usize();
count_cursor.advance(ti - count_cursor.position());
let output_count: usize = count_cursor.next().unwrap().into();
let sats = indexer.vecs.outputs.value.fold_range_at(
first_txoutindex,
first_txoutindex + output_count,
Sats::ZERO,
|acc, v| acc + v,
);
(height, sats)
},
exit,
)?;
Ok(())
})?;
self.coinbase_sum.compute_rolling_sum(
self.coinbase.compute(
starting_indexes.height,
&window_starts,
&self.coinbase.sats.height,
&self.coinbase.usd.height,
prices,
exit,
|vec| {
// Cursors avoid per-height PcoVec page decompression for the
// tx-indexed lookups. Coinbase txindex values are strictly
// increasing, so the cursors only advance forward.
let mut txout_cursor = indexer.vecs.transactions.first_txoutindex.cursor();
let mut count_cursor = indexes.txindex.output_count.cursor();
vec.compute_transform(
starting_indexes.height,
&indexer.vecs.transactions.first_txindex,
|(height, txindex, ..)| {
let ti = txindex.to_usize();
txout_cursor.advance(ti - txout_cursor.position());
let first_txoutindex = txout_cursor.next().unwrap().to_usize();
count_cursor.advance(ti - count_cursor.position());
let output_count: usize = count_cursor.next().unwrap().into();
let sats = indexer.vecs.outputs.value.fold_range_at(
first_txoutindex,
first_txoutindex + output_count,
Sats::ZERO,
|acc, v| acc + v,
);
(height, sats)
},
exit,
)?;
Ok(())
},
)?;
let fee_sats_source = transactions_fees.fee.height.sum_cumulative.sum.inner();
let fee_usd_source = &transactions_fees.fee_usd_sum;
self.fee_sum.compute_rolling_sum(
// Coinbase fee is 0, so including it in the sum doesn't affect the result
self.fees.compute(
starting_indexes.height,
&window_starts,
fee_sats_source,
fee_usd_source,
prices,
exit,
|vec| {
vec.compute_sum_from_indexes(
starting_indexes.height,
&indexer.vecs.transactions.first_txindex,
&indexes.height.txindex_count,
&transactions_fees.fee.txindex,
exit,
)?;
Ok(())
},
)?;
self.subsidy.compute(starting_indexes.height, &window_starts, prices, exit, |vec| {
vec.compute_transform2(
starting_indexes.height,
&self.coinbase.sats.height,
transactions_fees.fee.height.sum_cumulative.sum.inner(),
|(height, coinbase, fees, ..)| {
(
height,
coinbase.checked_sub(fees).unwrap_or_else(|| {
dbg!(height, coinbase, fees);
panic!()
}),
)
},
exit,
)?;
Ok(())
})?;
self.subsidy.compute(
starting_indexes.height,
&window_starts,
prices,
exit,
|vec| {
vec.compute_transform2(
starting_indexes.height,
&self.coinbase.sats.height,
&self.fees.sats.height,
|(height, coinbase, fees, ..)| {
(
height,
coinbase.checked_sub(fees).unwrap_or_else(|| {
dbg!(height, coinbase, fees);
panic!()
}),
)
},
exit,
)?;
Ok(())
},
)?;
self.unclaimed_rewards
.compute(starting_indexes.height, &window_starts, prices, exit, |vec| {
self.unclaimed_rewards.compute(
starting_indexes.height,
&window_starts,
prices,
exit,
|vec| {
vec.compute_transform(
starting_indexes.height,
&self.subsidy.sats.height,
@@ -102,12 +118,13 @@ impl Vecs {
exit,
)?;
Ok(())
})?;
},
)?;
// All-time cumulative fee dominance
self.fee_dominance.height.compute_percentage(
starting_indexes.height,
transactions_fees.fee.height.sum_cumulative.cumulative.inner(),
&self.fees.sats.cumulative.height,
&self.coinbase.sats.cumulative.height,
exit,
)?;
@@ -115,26 +132,26 @@ impl Vecs {
// Rolling fee dominance = sum(fees) / sum(coinbase) * 100
self.fee_dominance_24h.height.compute_percentage(
starting_indexes.height,
&self.fee_sum._24h.sats.height,
&self.coinbase_sum._24h.sats.height,
&self.fees.sats.rolling.sum._24h.height,
&self.coinbase.sats.rolling.sum._24h.height,
exit,
)?;
self.fee_dominance_7d.height.compute_percentage(
starting_indexes.height,
&self.fee_sum._7d.sats.height,
&self.coinbase_sum._7d.sats.height,
&self.fees.sats.rolling.sum._7d.height,
&self.coinbase.sats.rolling.sum._7d.height,
exit,
)?;
self.fee_dominance_30d.height.compute_percentage(
starting_indexes.height,
&self.fee_sum._30d.sats.height,
&self.coinbase_sum._30d.sats.height,
&self.fees.sats.rolling.sum._30d.height,
&self.coinbase.sats.rolling.sum._30d.height,
exit,
)?;
self.fee_dominance_1y.height.compute_percentage(
starting_indexes.height,
&self.fee_sum._1y.sats.height,
&self.coinbase_sum._1y.sats.height,
&self.fees.sats.rolling.sum._1y.height,
&self.coinbase.sats.rolling.sum._1y.height,
exit,
)?;

View File

@@ -5,7 +5,10 @@ use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedFromHeightLast, StoredValueRollingWindows, ValueFromHeightFull, ValueFromHeightSumCumulative},
internal::{
ComputedFromHeightLast, ValueFromHeightFull,
ValueFromHeightSumCumulative,
},
};
impl Vecs {
@@ -15,10 +18,9 @@ impl Vecs {
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
coinbase_sum: StoredValueRollingWindows::forced_import(db, "coinbase_sum", version, indexes)?,
fee_sum: StoredValueRollingWindows::forced_import(db, "fee_sum", version, indexes)?,
coinbase: ValueFromHeightFull::forced_import(db, "coinbase", version, indexes)?,
subsidy: ValueFromHeightFull::forced_import(db, "subsidy", version, indexes)?,
fees: ValueFromHeightFull::forced_import(db, "fees", version, indexes)?,
unclaimed_rewards: ValueFromHeightSumCumulative::forced_import(
db,
"unclaimed_rewards",

View File

@@ -2,15 +2,17 @@ use brk_traversable::Traversable;
use brk_types::{Dollars, StoredF32};
use vecdb::{Rw, StorageMode};
use crate::internal::{ComputedFromHeightLast, StoredValueRollingWindows, ValueFromHeightFull, ValueFromHeightSumCumulative};
use crate::internal::{
ComputedFromHeightLast, ValueFromHeightFull,
ValueFromHeightSumCumulative,
};
/// Coinbase/subsidy/rewards metrics
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub coinbase_sum: StoredValueRollingWindows<M>,
pub fee_sum: StoredValueRollingWindows<M>,
pub coinbase: ValueFromHeightFull<M>,
pub subsidy: ValueFromHeightFull<M>,
pub fees: ValueFromHeightFull<M>,
pub unclaimed_rewards: ValueFromHeightSumCumulative<M>,
pub fee_dominance: ComputedFromHeightLast<StoredF32, M>,
pub fee_dominance_24h: ComputedFromHeightLast<StoredF32, M>,

View File

@@ -19,11 +19,8 @@ impl Vecs {
exit: &Exit,
) -> Result<()> {
let window_starts = blocks.count.window_starts();
self.total_count.compute(
starting_indexes.height,
&window_starts,
exit,
|full| {
self.total_count
.compute(starting_indexes.height, &window_starts, exit, |full| {
full.compute_with_skip(
starting_indexes.height,
&indexes.txindex.output_count,
@@ -32,13 +29,12 @@ impl Vecs {
exit,
0,
)
},
)?;
})?;
self.utxo_count.height.compute_transform3(
starting_indexes.height,
&*self.total_count.height.sum_cumulative.cumulative,
&*inputs_count.height.sum_cumulative.cumulative,
&*self.total_count.full.sum_cumulative.cumulative,
&*inputs_count.full.sum_cumulative.cumulative,
&scripts_count.opreturn.cumulative.height,
|(h, output_count, input_count, opreturn_count, ..)| {
let block_count = u64::from(h + 1_usize);

View File

@@ -16,7 +16,7 @@ mod vecs;
use crate::{
blocks,
indexes::{self, ComputeIndexes},
mining, prices, transactions,
mining, prices,
};
pub const DB_NAME: &str = "pools";
@@ -73,7 +73,6 @@ impl Vecs {
blocks: &blocks::Vecs,
prices: &prices::Vecs,
mining: &mining::Vecs,
transactions: &transactions::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
@@ -83,7 +82,6 @@ impl Vecs {
blocks,
prices,
mining,
transactions,
starting_indexes,
exit,
)?;
@@ -100,7 +98,6 @@ impl Vecs {
blocks: &blocks::Vecs,
prices: &prices::Vecs,
mining: &mining::Vecs,
transactions: &transactions::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
@@ -113,7 +110,6 @@ impl Vecs {
blocks,
prices,
mining,
transactions,
exit,
)
})?;

View File

@@ -10,7 +10,7 @@ use crate::{
ComputedFromHeightCumulativeSum, ComputedFromHeightLast, MaskSats, PercentageU32F32,
ValueFromHeightSumCumulative,
},
mining, prices, transactions,
mining, prices,
};
#[derive(Traversable)]
@@ -45,8 +45,12 @@ impl Vecs {
let suffix = |s: &str| format!("{}_{s}", slug);
let version = parent_version;
let blocks_mined =
ComputedFromHeightCumulativeSum::forced_import(db, &suffix("blocks_mined"), version, indexes)?;
let blocks_mined = ComputedFromHeightCumulativeSum::forced_import(
db,
&suffix("blocks_mined"),
version,
indexes,
)?;
let blocks_mined_24h_sum = ComputedFromHeightLast::forced_import(
db,
@@ -76,7 +80,8 @@ impl Vecs {
let subsidy =
ValueFromHeightSumCumulative::forced_import(db, &suffix("subsidy"), version, indexes)?;
let fee = ValueFromHeightSumCumulative::forced_import(db, &suffix("fee"), version, indexes)?;
let fee =
ValueFromHeightSumCumulative::forced_import(db, &suffix("fee"), version, indexes)?;
let coinbase =
ValueFromHeightSumCumulative::forced_import(db, &suffix("coinbase"), version, indexes)?;
@@ -130,29 +135,29 @@ impl Vecs {
blocks: &blocks::Vecs,
prices: &prices::Vecs,
mining: &mining::Vecs,
transactions: &transactions::Vecs,
exit: &Exit,
) -> Result<()> {
let window_starts = blocks.count.window_starts();
self.blocks_mined.compute(starting_indexes.height, &window_starts, exit, |vec| {
vec.compute_transform(
starting_indexes.height,
height_to_pool,
|(h, id, ..)| {
(
h,
if id == self.slug {
StoredU32::ONE
} else {
StoredU32::ZERO
},
)
},
exit,
)?;
Ok(())
})?;
self.blocks_mined
.compute(starting_indexes.height, &window_starts, exit, |vec| {
vec.compute_transform(
starting_indexes.height,
height_to_pool,
|(h, id, ..)| {
(
h,
if id == self.slug {
StoredU32::ONE
} else {
StoredU32::ZERO
},
)
},
exit,
)?;
Ok(())
})?;
// Compute rolling window blocks mined using the start heights from blocks.count
self.blocks_mined_24h_sum.height.compute_rolling_sum(
@@ -223,8 +228,12 @@ impl Vecs {
exit,
)?;
self.subsidy
.compute(starting_indexes.height, &window_starts, prices, exit, |vec| {
self.subsidy.compute(
starting_indexes.height,
&window_starts,
prices,
exit,
|vec| {
Ok(vec.compute_transform2(
starting_indexes.height,
&self.blocks_mined.height,
@@ -232,20 +241,31 @@ impl Vecs {
|(h, mask, val, ..)| (h, MaskSats::apply(mask, val)),
exit,
)?)
})?;
},
)?;
self.fee.compute(starting_indexes.height, &window_starts, prices, exit, |vec| {
Ok(vec.compute_transform2(
starting_indexes.height,
&self.blocks_mined.height,
&*transactions.fees.fee.height.sum_cumulative.sum,
|(h, mask, val, ..)| (h, MaskSats::apply(mask, val)),
exit,
)?)
})?;
self.fee.compute(
starting_indexes.height,
&window_starts,
prices,
exit,
|vec| {
Ok(vec.compute_transform2(
starting_indexes.height,
&self.blocks_mined.height,
&mining.rewards.fees.sats.height,
|(h, mask, val, ..)| (h, MaskSats::apply(mask, val)),
exit,
)?)
},
)?;
self.coinbase
.compute(starting_indexes.height, &window_starts, prices, exit, |vec| {
self.coinbase.compute(
starting_indexes.height,
&window_starts,
prices,
exit,
|vec| {
Ok(vec.compute_transform2(
starting_indexes.height,
&self.blocks_mined.height,
@@ -253,7 +273,8 @@ impl Vecs {
|(h, mask, val, ..)| (h, MaskSats::apply(mask, val)),
exit,
)?)
})?;
},
)?;
{
let mut prev = StoredU32::ZERO;

View File

@@ -5,7 +5,7 @@ use brk_types::StoredU64;
use vecdb::Exit;
use super::Vecs;
use crate::{blocks, outputs, ComputeIndexes};
use crate::{ComputeIndexes, blocks, outputs};
impl Vecs {
pub(crate) fn compute(
@@ -18,133 +18,144 @@ impl Vecs {
) -> Result<()> {
let window_starts = count_vecs.window_starts();
self.p2a.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.addresses.first_p2aaddressindex,
&indexer.vecs.addresses.p2abytes,
exit,
)?)
})?;
self.p2a
.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.addresses.first_p2aaddressindex,
&indexer.vecs.addresses.p2abytes,
exit,
)?)
})?;
self.p2ms.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.scripts.first_p2msoutputindex,
&indexer.vecs.scripts.p2ms_to_txindex,
exit,
)?)
})?;
self.p2ms
.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.scripts.first_p2msoutputindex,
&indexer.vecs.scripts.p2ms_to_txindex,
exit,
)?)
})?;
self.p2pk33.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.addresses.first_p2pk33addressindex,
&indexer.vecs.addresses.p2pk33bytes,
exit,
)?)
})?;
self.p2pk33
.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.addresses.first_p2pk33addressindex,
&indexer.vecs.addresses.p2pk33bytes,
exit,
)?)
})?;
self.p2pk65.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.addresses.first_p2pk65addressindex,
&indexer.vecs.addresses.p2pk65bytes,
exit,
)?)
})?;
self.p2pk65
.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.addresses.first_p2pk65addressindex,
&indexer.vecs.addresses.p2pk65bytes,
exit,
)?)
})?;
self.p2pkh.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.addresses.first_p2pkhaddressindex,
&indexer.vecs.addresses.p2pkhbytes,
exit,
)?)
})?;
self.p2pkh
.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.addresses.first_p2pkhaddressindex,
&indexer.vecs.addresses.p2pkhbytes,
exit,
)?)
})?;
self.p2sh.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.addresses.first_p2shaddressindex,
&indexer.vecs.addresses.p2shbytes,
exit,
)?)
})?;
self.p2sh
.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.addresses.first_p2shaddressindex,
&indexer.vecs.addresses.p2shbytes,
exit,
)?)
})?;
self.p2tr.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.addresses.first_p2traddressindex,
&indexer.vecs.addresses.p2trbytes,
exit,
)?)
})?;
self.p2tr
.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.addresses.first_p2traddressindex,
&indexer.vecs.addresses.p2trbytes,
exit,
)?)
})?;
self.p2wpkh.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.addresses.first_p2wpkhaddressindex,
&indexer.vecs.addresses.p2wpkhbytes,
exit,
)?)
})?;
self.p2wpkh
.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.addresses.first_p2wpkhaddressindex,
&indexer.vecs.addresses.p2wpkhbytes,
exit,
)?)
})?;
self.p2wsh.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.addresses.first_p2wshaddressindex,
&indexer.vecs.addresses.p2wshbytes,
exit,
)?)
})?;
self.p2wsh
.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.addresses.first_p2wshaddressindex,
&indexer.vecs.addresses.p2wshbytes,
exit,
)?)
})?;
self.opreturn.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.scripts.first_opreturnindex,
&indexer.vecs.scripts.opreturn_to_txindex,
exit,
)?)
})?;
self.opreturn
.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.scripts.first_opreturnindex,
&indexer.vecs.scripts.opreturn_to_txindex,
exit,
)?)
})?;
self.unknownoutput.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.scripts.first_unknownoutputindex,
&indexer.vecs.scripts.unknown_to_txindex,
exit,
)?)
})?;
self.unknownoutput
.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.scripts.first_unknownoutputindex,
&indexer.vecs.scripts.unknown_to_txindex,
exit,
)?)
})?;
self.emptyoutput.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.scripts.first_emptyoutputindex,
&indexer.vecs.scripts.empty_to_txindex,
exit,
)?)
})?;
self.emptyoutput
.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.scripts.first_emptyoutputindex,
&indexer.vecs.scripts.empty_to_txindex,
exit,
)?)
})?;
// Compute segwit = p2wpkh + p2wsh + p2tr
self.segwit.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_transform3(
starting_indexes.height,
&self.p2wpkh.height,
&self.p2wsh.height,
&self.p2tr.height,
|(h, p2wpkh, p2wsh, p2tr, ..)| {
(h, StoredU64::from(*p2wpkh + *p2wsh + *p2tr))
},
exit,
)?)
})?;
self.segwit
.compute(starting_indexes.height, &window_starts, exit, |v| {
Ok(v.compute_transform3(
starting_indexes.height,
&self.p2wpkh.height,
&self.p2wsh.height,
&self.p2tr.height,
|(h, p2wpkh, p2wsh, p2tr, ..)| (h, StoredU64::from(*p2wpkh + *p2wsh + *p2tr)),
exit,
)?)
})?;
// Adoption ratios: per-block ratio of script type / total outputs
self.taproot_adoption.height.compute_transform2(
starting_indexes.height,
&self.p2tr.height,
&outputs_count.total_count.height.sum_cumulative.sum.0,
&outputs_count.total_count.full.sum_cumulative.sum.0,
|(h, p2tr, total, ..)| {
let ratio = if *total > 0 {
StoredF32::from(*p2tr as f64 / *total as f64)
@@ -159,7 +170,7 @@ impl Vecs {
self.segwit_adoption.height.compute_transform2(
starting_indexes.height,
&self.segwit.height,
&outputs_count.total_count.height.sum_cumulative.sum.0,
&outputs_count.total_count.full.sum_cumulative.sum.0,
|(h, segwit, total, ..)| {
let ratio = if *total > 0 {
StoredF32::from(*segwit as f64 / *total as f64)

View File

@@ -31,14 +31,13 @@ impl Vecs {
self.size
.compute(indexer, indexes, &blocks.count, starting_indexes, exit)?;
// Fees depends on size, blocks (window starts), prices (USD conversion)
// Fees depends on size, blocks (window starts)
self.fees.compute(
indexer,
indexes,
inputs,
&self.size,
blocks,
prices,
starting_indexes,
exit,
)?;

View File

@@ -1,11 +1,11 @@
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::{Bitcoin, FeeRate, Sats};
use brk_types::{FeeRate, Sats};
use vecdb::{Exit, unlikely};
use super::super::size;
use super::Vecs;
use crate::{blocks, indexes, inputs, prices, ComputeIndexes};
use crate::{ComputeIndexes, blocks, indexes, inputs};
impl Vecs {
#[allow(clippy::too_many_arguments)]
@@ -16,7 +16,6 @@ impl Vecs {
txins: &inputs::Vecs,
size_vecs: &size::Vecs,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
@@ -36,7 +35,7 @@ impl Vecs {
exit,
)?;
self.fee_txindex.compute_transform2(
self.fee.txindex.compute_transform2(
starting_indexes.txindex,
&self.input_value,
&self.output_value,
@@ -51,57 +50,34 @@ impl Vecs {
exit,
)?;
self.fee_rate_txindex.compute_transform2(
self.fee_rate.txindex.compute_transform2(
starting_indexes.txindex,
&self.fee_txindex,
&self.fee.txindex,
&size_vecs.vsize.txindex,
|(txindex, fee, vsize, ..)| (txindex, FeeRate::from((fee, vsize))),
exit,
)?;
// Skip coinbase (first tx per block) since it has no fee
let window_starts = blocks.count.window_starts();
self.fee.compute(
starting_indexes.height,
&window_starts,
exit,
|full| {
full.compute_with_skip(
starting_indexes.height,
&self.fee_txindex,
&indexer.vecs.transactions.first_txindex,
&indexes.height.txindex_count,
exit,
1,
)
},
)?;
let block_windows = blocks.count.block_window_starts();
// Skip coinbase (first tx per block) since it has no feerate
self.fee_rate.compute_with_skip(
starting_indexes.height,
&self.fee_rate_txindex,
&indexer.vecs.transactions.first_txindex,
&indexes.height.txindex_count,
// Skip coinbase (first tx per block) since it has fee=0
self.fee.derive_from_with_skip(
indexer,
indexes,
starting_indexes,
&block_windows,
exit,
1,
)?;
// Compute fee USD sum per block: price * Bitcoin::from(sats)
self.fee_usd_sum.compute_transform2(
starting_indexes.height,
self.fee.height.sum_cumulative.sum.inner(),
&prices.usd.price,
|(h, sats, price, ..)| (h, price * Bitcoin::from(sats)),
exit,
)?;
// Rolling fee rate distribution (from per-block average)
self.fee_rate_rolling.compute_distribution(
starting_indexes.height,
&window_starts,
&self.fee_rate.average.0,
// Skip coinbase (first tx per block) since it has no feerate
self.fee_rate.derive_from_with_skip(
indexer,
indexes,
starting_indexes,
&block_windows,
exit,
1,
)?;
Ok(())

View File

@@ -3,30 +3,19 @@ use brk_types::Version;
use vecdb::{Database, EagerVec, ImportableVec};
use super::Vecs;
use crate::{
indexes,
internal::{ComputedFromHeightFull, Distribution, RollingDistribution},
};
use crate::internal::ComputedFromTxDistribution;
/// Bump this when fee/feerate aggregation logic changes (e.g., skip coinbase).
const VERSION: Version = Version::new(2);
impl Vecs {
pub(crate) fn forced_import(
db: &Database,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
pub(crate) fn forced_import(db: &Database, version: Version) -> Result<Self> {
let v = version + VERSION;
Ok(Self {
input_value: EagerVec::forced_import(db, "input_value", version)?,
output_value: EagerVec::forced_import(db, "output_value", version)?,
fee_txindex: EagerVec::forced_import(db, "fee", v)?,
fee: ComputedFromHeightFull::forced_import(db, "fee", v, indexes)?,
fee_usd_sum: EagerVec::forced_import(db, "fee_usd_sum", v)?,
fee_rate_txindex: EagerVec::forced_import(db, "fee_rate", v)?,
fee_rate: Distribution::forced_import(db, "fee_rate", v)?,
fee_rate_rolling: RollingDistribution::forced_import(db, "fee_rate", v, indexes)?,
fee: ComputedFromTxDistribution::forced_import(db, "fee", v)?,
fee_rate: ComputedFromTxDistribution::forced_import(db, "fee_rate", v)?,
})
}
}

View File

@@ -1,17 +1,13 @@
use brk_traversable::Traversable;
use brk_types::{Dollars, FeeRate, Height, Sats, TxIndex};
use brk_types::{FeeRate, Sats, TxIndex};
use vecdb::{EagerVec, PcoVec, Rw, StorageMode};
use crate::internal::{ComputedFromHeightFull, Distribution, RollingDistribution};
use crate::internal::ComputedFromTxDistribution;
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub input_value: M::Stored<EagerVec<PcoVec<TxIndex, Sats>>>,
pub output_value: M::Stored<EagerVec<PcoVec<TxIndex, Sats>>>,
pub fee_txindex: M::Stored<EagerVec<PcoVec<TxIndex, Sats>>>,
pub fee: ComputedFromHeightFull<Sats, M>,
pub fee_usd_sum: M::Stored<EagerVec<PcoVec<Height, Dollars>>>,
pub fee_rate_txindex: M::Stored<EagerVec<PcoVec<TxIndex, FeeRate>>>,
pub fee_rate: Distribution<Height, FeeRate, M>,
pub fee_rate_rolling: RollingDistribution<FeeRate, M>,
pub fee: ComputedFromTxDistribution<Sats, M>,
pub fee_rate: ComputedFromTxDistribution<FeeRate, M>,
}

View File

@@ -24,7 +24,7 @@ impl Vecs {
let count = CountVecs::forced_import(&db, version, indexer, indexes)?;
let size = SizeVecs::forced_import(&db, version, indexer)?;
let fees = FeesVecs::forced_import(&db, version, indexes)?;
let fees = FeesVecs::forced_import(&db, version)?;
let versions = VersionsVecs::forced_import(&db, version, indexes)?;
let volume = VolumeVecs::forced_import(&db, version, indexes)?;

View File

@@ -4,8 +4,8 @@ use brk_types::StoredF32;
use vecdb::Exit;
use super::Vecs;
use crate::{blocks, ComputeIndexes, indexes, inputs, outputs, prices};
use crate::transactions::{count, fees};
use crate::{ComputeIndexes, blocks, indexes, inputs, outputs, prices};
impl Vecs {
#[allow(clippy::too_many_arguments)]
@@ -87,7 +87,7 @@ impl Vecs {
// inputs_per_sec: per-block input count / block interval
self.inputs_per_sec.height.compute_transform2(
starting_indexes.height,
&inputs_count.height.sum_cumulative.sum.0,
&inputs_count.full.sum_cumulative.sum.0,
&blocks.interval.height,
|(h, input_count, interval, ..)| {
let interval_f64 = f64::from(*interval);
@@ -104,7 +104,7 @@ impl Vecs {
// outputs_per_sec: per-block output count / block interval
self.outputs_per_sec.height.compute_transform2(
starting_indexes.height,
&outputs_count.total_count.height.sum_cumulative.sum.0,
&outputs_count.total_count.full.sum_cumulative.sum.0,
&blocks.interval.height,
|(h, output_count, interval, ..)| {
let interval_f64 = f64::from(*interval);

View File

@@ -9,11 +9,8 @@ use crate::internal::{
/// Volume metrics
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
#[traversable(flatten)]
pub sent_sum: ValueFromHeightLastRolling<M>,
#[traversable(flatten)]
pub received_sum: ValueFromHeightLastRolling<M>,
#[traversable(flatten)]
pub annualized_volume: ValueFromHeightLast<M>,
pub tx_per_sec: ComputedFromHeightLast<StoredF32, M>,
pub outputs_per_sec: ComputedFromHeightLast<StoredF32, M>,