global: big snapshot

This commit is contained in:
nym21
2026-03-13 12:47:01 +01:00
parent c83955eea7
commit 2b31c7f6b7
158 changed files with 4961 additions and 6939 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -61,6 +61,19 @@ impl<T> ByAddressType<T> {
})
}
pub fn map_with_name<U>(&self, f: impl Fn(&'static str, &T) -> U) -> ByAddressType<U> {
ByAddressType {
p2pk65: f(P2PK65, &self.p2pk65),
p2pk33: f(P2PK33, &self.p2pk33),
p2pkh: f(P2PKH, &self.p2pkh),
p2sh: f(P2SH, &self.p2sh),
p2wpkh: f(P2WPKH, &self.p2wpkh),
p2wsh: f(P2WSH, &self.p2wsh),
p2tr: f(P2TR, &self.p2tr),
p2a: f(P2A, &self.p2a),
}
}
pub fn new_with_index<F>(f: F) -> Result<Self>
where
F: Fn(usize) -> Result<T>,

View File

@@ -21,9 +21,9 @@ impl Vecs {
self.lookback
.compute(&self.time, starting_indexes, exit)?;
self.count
.compute(indexer, &self.lookback, starting_indexes, exit)?;
.compute(indexer, starting_indexes, exit)?;
self.interval
.compute(indexer, &self.lookback, starting_indexes, exit)?;
.compute(indexer, starting_indexes, exit)?;
self.size
.compute(indexer, &self.lookback, starting_indexes, exit)?;
self.weight

View File

@@ -5,13 +5,10 @@ use vecdb::Exit;
use super::Vecs;
use crate::blocks::lookback;
impl Vecs {
pub(crate) fn compute(
&mut self,
indexer: &Indexer,
lookback: &lookback::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
@@ -22,20 +19,7 @@ impl Vecs {
|h| (h, StoredU32::from(1_u32)),
exit,
)?;
self.total.cumulative.height.compute_cumulative(
starting_indexes.height,
&self.total.raw.height,
exit,
)?;
// Rolling window block counts
let ws = lookback.window_starts();
self.total.sum.compute_rolling_sum(
starting_indexes.height,
&ws,
&self.total.raw.height,
exit,
)?;
self.total.compute_rest(starting_indexes.height, exit)?;
Ok(())
}

View File

@@ -5,7 +5,7 @@ use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{BlockCountTarget, ComputedPerBlockCumulativeSum, ConstantVecs},
internal::{BlockCountTarget, CachedWindowStarts, ComputedPerBlockCumulativeWithSums, ConstantVecs},
};
impl Vecs {
@@ -13,6 +13,7 @@ impl Vecs {
db: &Database,
version: Version,
indexes: &indexes::Vecs,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
Ok(Self {
target: ConstantVecs::new::<BlockCountTarget>(
@@ -20,11 +21,12 @@ impl Vecs {
version,
indexes,
),
total: ComputedPerBlockCumulativeSum::forced_import(
total: ComputedPerBlockCumulativeWithSums::forced_import(
db,
"block_count",
version,
indexes,
cached_starts,
)?,
})
}

View File

@@ -2,10 +2,10 @@ use brk_traversable::Traversable;
use brk_types::{StoredU32, StoredU64};
use vecdb::{Rw, StorageMode};
use crate::internal::{ComputedPerBlockCumulativeSum, ConstantVecs};
use crate::internal::{ComputedPerBlockCumulativeWithSums, ConstantVecs};
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub target: ConstantVecs<StoredU64>,
pub total: ComputedPerBlockCumulativeSum<StoredU32, M>,
pub total: ComputedPerBlockCumulativeWithSums<StoredU32, StoredU64, M>,
}

View File

@@ -24,11 +24,12 @@ impl Vecs {
let db = open_db(parent_path, super::DB_NAME, 50_000_000)?;
let version = parent_version;
let count = CountVecs::forced_import(&db, version, indexes)?;
let lookback = LookbackVecs::forced_import(&db, version)?;
let interval = IntervalVecs::forced_import(&db, version, indexes)?;
let size = SizeVecs::forced_import(&db, version, indexes)?;
let weight = WeightVecs::forced_import(&db, version, indexes)?;
let cached_starts = &lookback.cached_window_starts;
let count = CountVecs::forced_import(&db, version, indexes, cached_starts)?;
let interval = IntervalVecs::forced_import(&db, version, indexes, cached_starts)?;
let size = SizeVecs::forced_import(&db, version, indexes, cached_starts)?;
let weight = WeightVecs::forced_import(&db, version, indexes, cached_starts)?;
let time = TimeVecs::forced_import(&db, version, indexes)?;
let difficulty = DifficultyVecs::forced_import(&db, version, indexer, indexes)?;
let halving = HalvingVecs::forced_import(&db, version, indexes)?;

View File

@@ -4,20 +4,17 @@ use brk_types::{CheckedSub, Indexes, Timestamp};
use vecdb::{Exit, ReadableVec};
use super::Vecs;
use crate::blocks;
impl Vecs {
pub(crate) fn compute(
&mut self,
indexer: &Indexer,
lookback: &blocks::LookbackVecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
let mut prev_timestamp = None;
let window_starts = lookback.window_starts();
self.0
.compute(starting_indexes.height, &window_starts, exit, |vec| {
.compute(starting_indexes.height, exit, |vec| {
vec.compute_transform(
starting_indexes.height,
&indexer.vecs.blocks.timestamp,

View File

@@ -3,19 +3,21 @@ use brk_types::Version;
use vecdb::Database;
use super::Vecs;
use crate::{indexes, internal::ComputedPerBlockRollingAverage};
use crate::{indexes, internal::{CachedWindowStarts, ComputedPerBlockRollingAverage}};
impl Vecs {
pub(crate) fn forced_import(
db: &Database,
version: Version,
indexes: &indexes::Vecs,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
let interval = ComputedPerBlockRollingAverage::forced_import(
db,
"block_interval",
version,
indexes,
cached_starts,
)?;
Ok(Self(interval))

View File

@@ -1,14 +1,16 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Indexes, Timestamp, Version};
use vecdb::{AnyVec, Cursor, Database, EagerVec, Exit, ImportableVec, PcoVec, ReadableVec, Rw, StorageMode, VecIndex};
use vecdb::{AnyVec, CachedVec, Cursor, Database, EagerVec, Exit, ImportableVec, PcoVec, ReadableVec, Rw, StorageMode, VecIndex};
use crate::internal::WindowStarts;
use crate::internal::{CachedWindowStarts, Windows, WindowStarts};
use super::time;
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
#[traversable(skip)]
pub cached_window_starts: CachedWindowStarts,
pub _1h: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub _24h: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 1d
pub _3d: M::Stored<EagerVec<PcoVec<Height, Height>>>,
@@ -56,50 +58,63 @@ pub struct Vecs<M: StorageMode = Rw> {
impl Vecs {
pub(crate) fn forced_import(db: &Database, version: Version) -> Result<Self> {
let _1h = ImportableVec::forced_import(db, "height_1h_ago", version)?;
let _24h = ImportableVec::forced_import(db, "height_24h_ago", version)?;
let _3d = ImportableVec::forced_import(db, "height_3d_ago", version)?;
let _1w = ImportableVec::forced_import(db, "height_1w_ago", version)?;
let _8d = ImportableVec::forced_import(db, "height_8d_ago", version)?;
let _9d = ImportableVec::forced_import(db, "height_9d_ago", version)?;
let _12d = ImportableVec::forced_import(db, "height_12d_ago", version)?;
let _13d = ImportableVec::forced_import(db, "height_13d_ago", version)?;
let _2w = ImportableVec::forced_import(db, "height_2w_ago", version)?;
let _21d = ImportableVec::forced_import(db, "height_21d_ago", version)?;
let _26d = ImportableVec::forced_import(db, "height_26d_ago", version)?;
let _1m = ImportableVec::forced_import(db, "height_1m_ago", version)?;
let _34d = ImportableVec::forced_import(db, "height_34d_ago", version)?;
let _55d = ImportableVec::forced_import(db, "height_55d_ago", version)?;
let _2m = ImportableVec::forced_import(db, "height_2m_ago", version)?;
let _9w = ImportableVec::forced_import(db, "height_9w_ago", version)?;
let _12w = ImportableVec::forced_import(db, "height_12w_ago", version)?;
let _89d = ImportableVec::forced_import(db, "height_89d_ago", version)?;
let _3m = ImportableVec::forced_import(db, "height_3m_ago", version)?;
let _14w = ImportableVec::forced_import(db, "height_14w_ago", version)?;
let _111d = ImportableVec::forced_import(db, "height_111d_ago", version)?;
let _144d = ImportableVec::forced_import(db, "height_144d_ago", version)?;
let _6m = ImportableVec::forced_import(db, "height_6m_ago", version)?;
let _26w = ImportableVec::forced_import(db, "height_26w_ago", version)?;
let _200d = ImportableVec::forced_import(db, "height_200d_ago", version)?;
let _9m = ImportableVec::forced_import(db, "height_9m_ago", version)?;
let _350d = ImportableVec::forced_import(db, "height_350d_ago", version)?;
let _12m = ImportableVec::forced_import(db, "height_12m_ago", version)?;
let _1y = ImportableVec::forced_import(db, "height_1y_ago", version)?;
let _14m = ImportableVec::forced_import(db, "height_14m_ago", version)?;
let _2y = ImportableVec::forced_import(db, "height_2y_ago", version)?;
let _26m = ImportableVec::forced_import(db, "height_26m_ago", version)?;
let _3y = ImportableVec::forced_import(db, "height_3y_ago", version)?;
let _200w = ImportableVec::forced_import(db, "height_200w_ago", version)?;
let _4y = ImportableVec::forced_import(db, "height_4y_ago", version)?;
let _5y = ImportableVec::forced_import(db, "height_5y_ago", version)?;
let _6y = ImportableVec::forced_import(db, "height_6y_ago", version)?;
let _8y = ImportableVec::forced_import(db, "height_8y_ago", version)?;
let _9y = ImportableVec::forced_import(db, "height_9y_ago", version)?;
let _10y = ImportableVec::forced_import(db, "height_10y_ago", version)?;
let _12y = ImportableVec::forced_import(db, "height_12y_ago", version)?;
let _14y = ImportableVec::forced_import(db, "height_14y_ago", version)?;
let _26y = ImportableVec::forced_import(db, "height_26y_ago", version)?;
let cached_window_starts = CachedWindowStarts(Windows {
_24h: CachedVec::new(&_24h),
_1w: CachedVec::new(&_1w),
_1m: CachedVec::new(&_1m),
_1y: CachedVec::new(&_1y),
});
Ok(Self {
_1h: ImportableVec::forced_import(db, "height_1h_ago", version)?,
_24h: ImportableVec::forced_import(db, "height_24h_ago", version)?,
_3d: ImportableVec::forced_import(db, "height_3d_ago", version)?,
_1w: ImportableVec::forced_import(db, "height_1w_ago", version)?,
_8d: ImportableVec::forced_import(db, "height_8d_ago", version)?,
_9d: ImportableVec::forced_import(db, "height_9d_ago", version)?,
_12d: ImportableVec::forced_import(db, "height_12d_ago", version)?,
_13d: ImportableVec::forced_import(db, "height_13d_ago", version)?,
_2w: ImportableVec::forced_import(db, "height_2w_ago", version)?,
_21d: ImportableVec::forced_import(db, "height_21d_ago", version)?,
_26d: ImportableVec::forced_import(db, "height_26d_ago", version)?,
_1m: ImportableVec::forced_import(db, "height_1m_ago", version)?,
_34d: ImportableVec::forced_import(db, "height_34d_ago", version)?,
_55d: ImportableVec::forced_import(db, "height_55d_ago", version)?,
_2m: ImportableVec::forced_import(db, "height_2m_ago", version)?,
_9w: ImportableVec::forced_import(db, "height_9w_ago", version)?,
_12w: ImportableVec::forced_import(db, "height_12w_ago", version)?,
_89d: ImportableVec::forced_import(db, "height_89d_ago", version)?,
_3m: ImportableVec::forced_import(db, "height_3m_ago", version)?,
_14w: ImportableVec::forced_import(db, "height_14w_ago", version)?,
_111d: ImportableVec::forced_import(db, "height_111d_ago", version)?,
_144d: ImportableVec::forced_import(db, "height_144d_ago", version)?,
_6m: ImportableVec::forced_import(db, "height_6m_ago", version)?,
_26w: ImportableVec::forced_import(db, "height_26w_ago", version)?,
_200d: ImportableVec::forced_import(db, "height_200d_ago", version)?,
_9m: ImportableVec::forced_import(db, "height_9m_ago", version)?,
_350d: ImportableVec::forced_import(db, "height_350d_ago", version)?,
_12m: ImportableVec::forced_import(db, "height_12m_ago", version)?,
_1y: ImportableVec::forced_import(db, "height_1y_ago", version)?,
_14m: ImportableVec::forced_import(db, "height_14m_ago", version)?,
_2y: ImportableVec::forced_import(db, "height_2y_ago", version)?,
_26m: ImportableVec::forced_import(db, "height_26m_ago", version)?,
_3y: ImportableVec::forced_import(db, "height_3y_ago", version)?,
_200w: ImportableVec::forced_import(db, "height_200w_ago", version)?,
_4y: ImportableVec::forced_import(db, "height_4y_ago", version)?,
_5y: ImportableVec::forced_import(db, "height_5y_ago", version)?,
_6y: ImportableVec::forced_import(db, "height_6y_ago", version)?,
_8y: ImportableVec::forced_import(db, "height_8y_ago", version)?,
_9y: ImportableVec::forced_import(db, "height_9y_ago", version)?,
_10y: ImportableVec::forced_import(db, "height_10y_ago", version)?,
_12y: ImportableVec::forced_import(db, "height_12y_ago", version)?,
_14y: ImportableVec::forced_import(db, "height_14y_ago", version)?,
_26y: ImportableVec::forced_import(db, "height_26y_ago", version)?,
cached_window_starts,
_1h, _24h, _3d, _1w, _8d, _9d, _12d, _13d, _2w, _21d, _26d,
_1m, _34d, _55d, _2m, _9w, _12w, _89d, _3m, _14w, _111d, _144d,
_6m, _26w, _200d, _9m, _350d, _12m, _1y, _14m, _2y, _26m, _3y,
_200w, _4y, _5y, _6y, _8y, _9y, _10y, _12y, _14y, _26y,
})
}
@@ -112,6 +127,7 @@ impl Vecs {
}
}
pub fn start_vec(&self, days: usize) -> &EagerVec<PcoVec<Height, Height>> {
match days {
1 => &self._24h,

View File

@@ -5,7 +5,7 @@ use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedPerBlockFull, ResolutionsFull},
internal::{CachedWindowStarts, ComputedPerBlockFull, ResolutionsFull},
};
impl Vecs {
@@ -13,10 +13,23 @@ impl Vecs {
db: &Database,
version: Version,
indexes: &indexes::Vecs,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
Ok(Self {
vbytes: ComputedPerBlockFull::forced_import(db, "block_vbytes", version, indexes)?,
size: ResolutionsFull::forced_import(db, "block_size", version, indexes)?,
vbytes: ComputedPerBlockFull::forced_import(
db,
"block_vbytes",
version,
indexes,
cached_starts,
)?,
size: ResolutionsFull::forced_import(
db,
"block_size",
version,
indexes,
cached_starts,
)?,
})
}
}

View File

@@ -24,7 +24,7 @@ impl Vecs {
)?;
self.fullness
.compute(starting_indexes.height, &window_starts, exit, |vec| {
.compute(starting_indexes.height, exit, |vec| {
vec.compute_transform(
starting_indexes.height,
&indexer.vecs.blocks.weight,

View File

@@ -5,7 +5,7 @@ use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ResolutionsFull, PercentPerBlockRollingAverage},
internal::{CachedWindowStarts, ResolutionsFull, PercentPerBlockRollingAverage},
};
impl Vecs {
@@ -13,15 +13,17 @@ impl Vecs {
db: &Database,
version: Version,
indexes: &indexes::Vecs,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
let weight =
ResolutionsFull::forced_import(db, "block_weight", version, indexes)?;
ResolutionsFull::forced_import(db, "block_weight", version, indexes, cached_starts)?;
let fullness = PercentPerBlockRollingAverage::forced_import(
db,
"block_fullness",
version,
indexes,
cached_starts,
)?;
Ok(Self { weight, fullness })

View File

@@ -3,23 +3,20 @@ use brk_types::{Bitcoin, CheckedSub, Indexes, StoredF64};
use vecdb::Exit;
use super::Vecs;
use crate::{blocks, distribution};
use crate::distribution;
impl Vecs {
pub(crate) fn compute(
&mut self,
starting_indexes: &Indexes,
blocks: &blocks::Vecs,
distribution: &distribution::Vecs,
exit: &Exit,
) -> Result<()> {
let window_starts = blocks.lookback.window_starts();
let all_metrics = &distribution.utxo_cohorts.all.metrics;
let circulating_supply = &all_metrics.supply.total.sats.height;
self.coinblocks_created
.compute(starting_indexes.height, &window_starts, exit, |vec| {
.compute(starting_indexes.height, exit, |vec| {
vec.compute_transform(
starting_indexes.height,
circulating_supply,
@@ -30,7 +27,7 @@ impl Vecs {
})?;
self.coinblocks_stored
.compute(starting_indexes.height, &window_starts, exit, |vec| {
.compute(starting_indexes.height, exit, |vec| {
vec.compute_subtract(
starting_indexes.height,
&self.coinblocks_created.raw.height,

View File

@@ -5,7 +5,7 @@ use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedPerBlock, ComputedPerBlockCumulativeSum},
internal::{CachedWindowStarts, ComputedPerBlock, ComputedPerBlockCumulativeWithSums},
};
impl Vecs {
@@ -13,19 +13,22 @@ impl Vecs {
db: &Database,
version: Version,
indexes: &indexes::Vecs,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
Ok(Self {
coinblocks_created: ComputedPerBlockCumulativeSum::forced_import(
coinblocks_created: ComputedPerBlockCumulativeWithSums::forced_import(
db,
"coinblocks_created",
version,
indexes,
cached_starts,
)?,
coinblocks_stored: ComputedPerBlockCumulativeSum::forced_import(
coinblocks_stored: ComputedPerBlockCumulativeWithSums::forced_import(
db,
"coinblocks_stored",
version,
indexes,
cached_starts,
)?,
liveliness: ComputedPerBlock::forced_import(db, "liveliness", version, indexes)?,
vaultedness: ComputedPerBlock::forced_import(db, "vaultedness", version, indexes)?,

View File

@@ -2,12 +2,12 @@ use brk_traversable::Traversable;
use brk_types::StoredF64;
use vecdb::{Rw, StorageMode};
use crate::internal::{ComputedPerBlock, ComputedPerBlockCumulativeSum};
use crate::internal::{ComputedPerBlock, ComputedPerBlockCumulativeWithSums};
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub coinblocks_created: ComputedPerBlockCumulativeSum<StoredF64, M>,
pub coinblocks_stored: ComputedPerBlockCumulativeSum<StoredF64, M>,
pub coinblocks_created: ComputedPerBlockCumulativeWithSums<StoredF64, StoredF64, M>,
pub coinblocks_stored: ComputedPerBlockCumulativeWithSums<StoredF64, StoredF64, M>,
pub liveliness: ComputedPerBlock<StoredF64, M>,
pub vaultedness: ComputedPerBlock<StoredF64, M>,
pub ratio: ComputedPerBlock<StoredF64, M>,

View File

@@ -19,7 +19,7 @@ impl Vecs {
) -> Result<()> {
// Activity computes first (liveliness, vaultedness, etc.)
self.activity
.compute(starting_indexes, blocks, distribution, exit)?;
.compute(starting_indexes, distribution, exit)?;
// Phase 2: supply, adjusted, value are independent (all depend only on activity)
let (r1, r2) = rayon::join(
@@ -37,7 +37,6 @@ impl Vecs {
self.value.compute(
starting_indexes,
prices,
blocks,
distribution,
&self.activity,
exit,

View File

@@ -13,18 +13,21 @@ use super::{
ValueVecs, Vecs,
};
use crate::internal::CachedWindowStarts;
impl Vecs {
pub(crate) fn forced_import(
parent_path: &Path,
parent_version: Version,
indexes: &indexes::Vecs,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
let db = open_db(parent_path, DB_NAME, 1_000_000)?;
let version = parent_version;
let v1 = version + Version::ONE;
let activity = ActivityVecs::forced_import(&db, version, indexes)?;
let activity = ActivityVecs::forced_import(&db, version, indexes, cached_starts)?;
let supply = SupplyVecs::forced_import(&db, v1, indexes)?;
let value = ValueVecs::forced_import(&db, v1, indexes)?;
let value = ValueVecs::forced_import(&db, v1, indexes, cached_starts)?;
let cap = CapVecs::forced_import(&db, v1, indexes)?;
let prices = PricesVecs::forced_import(&db, version, indexes)?;
let adjusted = AdjustedVecs::forced_import(&db, version, indexes)?;

View File

@@ -4,27 +4,24 @@ use vecdb::Exit;
use super::super::activity;
use super::Vecs;
use crate::{blocks, distribution, prices};
use crate::{distribution, prices};
impl Vecs {
pub(crate) fn compute(
&mut self,
starting_indexes: &Indexes,
prices: &prices::Vecs,
blocks: &blocks::Vecs,
distribution: &distribution::Vecs,
activity: &activity::Vecs,
exit: &Exit,
) -> Result<()> {
let window_starts = blocks.lookback.window_starts();
let all_metrics = &distribution.utxo_cohorts.all.metrics;
let coinblocks_destroyed = &distribution.coinblocks_destroyed;
let coindays_destroyed = &all_metrics.activity.coindays_destroyed;
let circulating_supply = &all_metrics.supply.total.btc.height;
self.destroyed
.compute(starting_indexes.height, &window_starts, exit, |vec| {
.compute(starting_indexes.height, exit, |vec| {
vec.compute_multiply(
starting_indexes.height,
&prices.spot.usd.height,
@@ -35,7 +32,7 @@ impl Vecs {
})?;
self.created
.compute(starting_indexes.height, &window_starts, exit, |vec| {
.compute(starting_indexes.height, exit, |vec| {
vec.compute_multiply(
starting_indexes.height,
&prices.spot.usd.height,
@@ -46,7 +43,7 @@ impl Vecs {
})?;
self.stored
.compute(starting_indexes.height, &window_starts, exit, |vec| {
.compute(starting_indexes.height, exit, |vec| {
vec.compute_multiply(
starting_indexes.height,
&prices.spot.usd.height,
@@ -60,7 +57,7 @@ impl Vecs {
// Supply-adjusted to account for growing supply over time
// This is a key input for Reserve Risk / HODL Bank calculation
self.vocdd
.compute(starting_indexes.height, &window_starts, exit, |vec| {
.compute(starting_indexes.height, exit, |vec| {
vec.compute_transform3(
starting_indexes.height,
&prices.spot.usd.height,

View File

@@ -3,38 +3,46 @@ use brk_types::Version;
use vecdb::Database;
use super::Vecs;
use crate::{indexes, internal::ComputedPerBlockCumulativeSum};
use crate::{
indexes,
internal::{CachedWindowStarts, ComputedPerBlockCumulativeWithSums},
};
impl Vecs {
pub(crate) fn forced_import(
db: &Database,
version: Version,
indexes: &indexes::Vecs,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
Ok(Self {
destroyed: ComputedPerBlockCumulativeSum::forced_import(
destroyed: ComputedPerBlockCumulativeWithSums::forced_import(
db,
"cointime_value_destroyed",
version,
indexes,
cached_starts,
)?,
created: ComputedPerBlockCumulativeSum::forced_import(
created: ComputedPerBlockCumulativeWithSums::forced_import(
db,
"cointime_value_created",
version,
indexes,
cached_starts,
)?,
stored: ComputedPerBlockCumulativeSum::forced_import(
stored: ComputedPerBlockCumulativeWithSums::forced_import(
db,
"cointime_value_stored",
version,
indexes,
cached_starts,
)?,
vocdd: ComputedPerBlockCumulativeSum::forced_import(
vocdd: ComputedPerBlockCumulativeWithSums::forced_import(
db,
"vocdd",
version + Version::ONE,
indexes,
cached_starts,
)?,
})
}

View File

@@ -2,12 +2,12 @@ use brk_traversable::Traversable;
use brk_types::StoredF64;
use vecdb::{Rw, StorageMode};
use crate::internal::ComputedPerBlockCumulativeSum;
use crate::internal::ComputedPerBlockCumulativeWithSums;
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub destroyed: ComputedPerBlockCumulativeSum<StoredF64, M>,
pub created: ComputedPerBlockCumulativeSum<StoredF64, M>,
pub stored: ComputedPerBlockCumulativeSum<StoredF64, M>,
pub vocdd: ComputedPerBlockCumulativeSum<StoredF64, M>,
pub destroyed: ComputedPerBlockCumulativeWithSums<StoredF64, StoredF64, M>,
pub created: ComputedPerBlockCumulativeWithSums<StoredF64, StoredF64, M>,
pub stored: ComputedPerBlockCumulativeWithSums<StoredF64, StoredF64, M>,
pub vocdd: ComputedPerBlockCumulativeWithSums<StoredF64, StoredF64, M>,
}

View File

@@ -19,7 +19,7 @@ use vecdb::{AnyStoredVec, AnyVec, Database, Exit, Rw, StorageMode, WritableVec};
use crate::{
indexes,
internal::{ComputedPerBlockRollingAverage, WindowStarts},
internal::{CachedWindowStarts, ComputedPerBlockRollingAverage},
};
/// Per-block activity counts - reset each block.
@@ -77,6 +77,7 @@ impl ActivityCountVecs {
name: &str,
version: Version,
indexes: &indexes::Vecs,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
Ok(Self {
reactivated: ComputedPerBlockRollingAverage::forced_import(
@@ -84,24 +85,28 @@ impl ActivityCountVecs {
&format!("{name}_reactivated"),
version,
indexes,
cached_starts,
)?,
sending: ComputedPerBlockRollingAverage::forced_import(
db,
&format!("{name}_sending"),
version,
indexes,
cached_starts,
)?,
receiving: ComputedPerBlockRollingAverage::forced_import(
db,
&format!("{name}_receiving"),
version,
indexes,
cached_starts,
)?,
both: ComputedPerBlockRollingAverage::forced_import(
db,
&format!("{name}_both"),
version,
indexes,
cached_starts,
)?,
})
}
@@ -156,13 +161,12 @@ impl ActivityCountVecs {
pub(crate) fn compute_rest(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
exit: &Exit,
) -> Result<()> {
self.reactivated.compute_rest(max_from, windows, exit)?;
self.sending.compute_rest(max_from, windows, exit)?;
self.receiving.compute_rest(max_from, windows, exit)?;
self.both.compute_rest(max_from, windows, exit)?;
self.reactivated.compute_rest(max_from, exit)?;
self.sending.compute_rest(max_from, exit)?;
self.receiving.compute_rest(max_from, exit)?;
self.both.compute_rest(max_from, exit)?;
Ok(())
}
}
@@ -184,6 +188,7 @@ impl AddressTypeToActivityCountVecs {
name: &str,
version: Version,
indexes: &indexes::Vecs,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
Ok(Self::from(
ByAddressType::<ActivityCountVecs>::new_with_name(|type_name| {
@@ -192,6 +197,7 @@ impl AddressTypeToActivityCountVecs {
&format!("{type_name}_{name}"),
version,
indexes,
cached_starts,
)
})?,
))
@@ -228,11 +234,10 @@ impl AddressTypeToActivityCountVecs {
pub(crate) fn compute_rest(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
exit: &Exit,
) -> Result<()> {
for type_vecs in self.0.values_mut() {
type_vecs.compute_rest(max_from, windows, exit)?;
type_vecs.compute_rest(max_from, exit)?;
}
Ok(())
}
@@ -263,11 +268,12 @@ impl AddressActivityVecs {
name: &str,
version: Version,
indexes: &indexes::Vecs,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
Ok(Self {
all: ActivityCountVecs::forced_import(db, name, version, indexes)?,
all: ActivityCountVecs::forced_import(db, name, version, indexes, cached_starts)?,
by_addresstype: AddressTypeToActivityCountVecs::forced_import(
db, name, version, indexes,
db, name, version, indexes, cached_starts,
)?,
})
}
@@ -295,11 +301,10 @@ impl AddressActivityVecs {
pub(crate) fn compute_rest(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
exit: &Exit,
) -> Result<()> {
self.all.compute_rest(max_from, windows, exit)?;
self.by_addresstype.compute_rest(max_from, windows, exit)?;
self.all.compute_rest(max_from, exit)?;
self.by_addresstype.compute_rest(max_from, exit)?;
Ok(())
}

View File

@@ -1,61 +1,53 @@
use brk_cohort::ByAddressType;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, StoredI64, StoredU64, Version};
use vecdb::{Database, Exit, Rw, StorageMode};
use brk_types::{BasisPoints32, StoredI64, StoredU64, Version};
use crate::{
indexes,
internal::{WindowStarts, RollingDelta},
internal::{CachedWindowStarts, LazyRollingDeltasFromHeight},
};
use super::AddressCountsVecs;
#[derive(Traversable)]
pub struct DeltaVecs<M: StorageMode = Rw> {
pub all: RollingDelta<StoredU64, StoredI64, M>,
type AddrDelta = LazyRollingDeltasFromHeight<StoredU64, StoredI64, BasisPoints32>;
#[derive(Clone, Traversable)]
pub struct DeltaVecs {
pub all: AddrDelta,
#[traversable(flatten)]
pub by_addresstype: ByAddressType<RollingDelta<StoredU64, StoredI64, M>>,
pub by_addresstype: ByAddressType<AddrDelta>,
}
impl DeltaVecs {
pub(crate) fn forced_import(
db: &Database,
pub(crate) fn new(
version: Version,
address_count: &AddressCountsVecs,
cached_starts: &CachedWindowStarts,
indexes: &indexes::Vecs,
) -> Result<Self> {
) -> Self {
let version = version + Version::TWO;
let all = RollingDelta::forced_import(db, "address_count", version, indexes)?;
let all = LazyRollingDeltasFromHeight::new(
"address_count",
version,
&address_count.all.0.height,
cached_starts,
indexes,
);
let by_addresstype = ByAddressType::new_with_name(|name| {
RollingDelta::forced_import(db, &format!("{name}_address_count"), version, indexes)
})?;
let by_addresstype = address_count.by_addresstype.map_with_name(|name, addr| {
LazyRollingDeltasFromHeight::new(
&format!("{name}_address_count"),
version,
&addr.0.height,
cached_starts,
indexes,
)
});
Ok(Self {
Self {
all,
by_addresstype,
})
}
pub(crate) fn compute(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
address_count: &AddressCountsVecs,
exit: &Exit,
) -> Result<()> {
self.all
.compute(max_from, windows, &address_count.all.height, exit)?;
for ((_, growth), (_, addr)) in self
.by_addresstype
.iter_mut()
.zip(address_count.by_addresstype.iter())
{
growth.compute(max_from, windows, &addr.height, exit)?;
}
Ok(())
}
}

View File

@@ -6,7 +6,7 @@ use vecdb::{Database, Exit, Rw, StorageMode};
use crate::{
indexes,
internal::{ComputedPerBlockSum, WindowStarts},
internal::{CachedWindowStarts, ComputedPerBlockCumulativeWithSums},
};
use super::TotalAddressCountVecs;
@@ -14,9 +14,9 @@ use super::TotalAddressCountVecs;
/// New address count per block (global + per-type)
#[derive(Traversable)]
pub struct NewAddressCountVecs<M: StorageMode = Rw> {
pub all: ComputedPerBlockSum<StoredU64, M>,
pub all: ComputedPerBlockCumulativeWithSums<StoredU64, StoredU64, M>,
#[traversable(flatten)]
pub by_addresstype: ByAddressType<ComputedPerBlockSum<StoredU64, M>>,
pub by_addresstype: ByAddressType<ComputedPerBlockCumulativeWithSums<StoredU64, StoredU64, M>>,
}
impl NewAddressCountVecs {
@@ -24,18 +24,25 @@ impl NewAddressCountVecs {
db: &Database,
version: Version,
indexes: &indexes::Vecs,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
let all = ComputedPerBlockSum::forced_import(db, "new_address_count", version, indexes)?;
let all = ComputedPerBlockCumulativeWithSums::forced_import(
db,
"new_address_count",
version,
indexes,
cached_starts,
)?;
let by_addresstype: ByAddressType<ComputedPerBlockSum<StoredU64>> =
ByAddressType::new_with_name(|name| {
ComputedPerBlockSum::forced_import(
db,
&format!("{name}_new_address_count"),
version,
indexes,
)
})?;
let by_addresstype = ByAddressType::new_with_name(|name| {
ComputedPerBlockCumulativeWithSums::forced_import(
db,
&format!("{name}_new_address_count"),
version,
indexes,
cached_starts,
)
})?;
Ok(Self {
all,
@@ -46,11 +53,10 @@ impl NewAddressCountVecs {
pub(crate) fn compute(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
total_address_count: &TotalAddressCountVecs,
exit: &Exit,
) -> Result<()> {
self.all.compute(max_from, windows, exit, |height_vec| {
self.all.compute(max_from, exit, |height_vec| {
Ok(height_vec.compute_change(max_from, &total_address_count.all.height, 1, exit)?)
})?;
@@ -59,7 +65,7 @@ impl NewAddressCountVecs {
.iter_mut()
.zip(total_address_count.by_addresstype.iter())
{
new.compute(max_from, windows, exit, |height_vec| {
new.compute(max_from, exit, |height_vec| {
Ok(height_vec.compute_change(max_from, &total.height, 1, exit)?)
})?;
}

View File

@@ -10,7 +10,7 @@ use derive_more::{Deref, DerefMut};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, Database, Exit, Rw, StorageMode};
use crate::{blocks, distribution::DynCohortVecs, indexes, prices};
use crate::{distribution::DynCohortVecs, indexes, internal::CachedWindowStarts, prices};
use super::{super::traits::CohortVecs, vecs::AddressCohortVecs};
@@ -27,6 +27,7 @@ impl AddressCohorts {
version: Version,
indexes: &indexes::Vecs,
states_path: &Path,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
let v = version + VERSION;
@@ -34,7 +35,7 @@ impl AddressCohorts {
let create =
|filter: Filter, name: &'static str, has_state: bool| -> Result<AddressCohortVecs> {
let sp = if has_state { Some(states_path) } else { None };
AddressCohortVecs::forced_import(db, filter, name, v, indexes, sp)
AddressCohortVecs::forced_import(db, filter, name, v, indexes, sp, cached_starts)
};
let full = |f: Filter, name: &'static str| create(f, name, true);
@@ -90,22 +91,12 @@ impl AddressCohorts {
/// First phase of post-processing: compute index transforms.
pub(crate) fn compute_rest_part1(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.par_iter_mut().try_for_each(|v| {
v.address_count_delta.compute(
starting_indexes.height,
&blocks.lookback._1m,
&v.address_count.height,
exit,
)
})?;
self.par_iter_mut()
.try_for_each(|v| v.compute_rest_part1(blocks, prices, starting_indexes, exit))?;
.try_for_each(|v| v.compute_rest_part1(prices, starting_indexes, exit))?;
Ok(())
}

View File

@@ -3,15 +3,14 @@ use std::path::Path;
use brk_cohort::{CohortContext, Filter, Filtered};
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Cents, Height, Indexes, StoredI64, StoredU64, Version};
use brk_types::{BasisPointsSigned32, Cents, Height, Indexes, StoredI64, StoredU64, Version};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, AnyVec, Database, Exit, ReadableVec, Rw, StorageMode, WritableVec};
use crate::{
blocks,
distribution::state::{AddressCohortState, MinimalRealizedState},
indexes,
internal::{ComputedPerBlock, RollingDelta1m},
internal::{CachedWindowStarts, ComputedPerBlockWithDeltas},
prices,
};
@@ -28,9 +27,7 @@ pub struct AddressCohortVecs<M: StorageMode = Rw> {
#[traversable(flatten)]
pub metrics: MinimalCohortMetrics<M>,
pub address_count: ComputedPerBlock<StoredU64, M>,
#[traversable(wrap = "address_count", rename = "delta")]
pub address_count_delta: RollingDelta1m<StoredU64, StoredI64, M>,
pub address_count: ComputedPerBlockWithDeltas<StoredU64, StoredI64, BasisPointsSigned32, M>,
}
impl AddressCohortVecs {
@@ -41,6 +38,7 @@ impl AddressCohortVecs {
version: Version,
indexes: &indexes::Vecs,
states_path: Option<&Path>,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
let full_name = CohortContext::Address.full_name(&filter, name);
@@ -50,27 +48,23 @@ impl AddressCohortVecs {
full_name: &full_name,
version,
indexes,
cached_starts,
};
let address_count = ComputedPerBlockWithDeltas::forced_import(
db,
&cfg.name("address_count"),
version,
Version::ONE,
indexes,
cached_starts,
)?;
Ok(Self {
starting_height: None,
state: states_path.map(|path| Box::new(AddressCohortState::new(path, &full_name))),
metrics: MinimalCohortMetrics::forced_import(&cfg)?,
address_count: ComputedPerBlock::forced_import(
db,
&cfg.name("address_count"),
version,
indexes,
)?,
address_count_delta: RollingDelta1m::forced_import(
db,
&cfg.name("address_count_delta"),
version + Version::ONE,
indexes,
)?,
address_count,
})
}
@@ -189,13 +183,12 @@ impl DynCohortVecs for AddressCohortVecs {
fn compute_rest_part1(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.metrics
.compute_rest_part1(blocks, prices, starting_indexes, exit)
.compute_rest_part1(prices, starting_indexes, exit)
}
fn write_state(&mut self, height: Height, cleanup: bool) -> Result<()> {

View File

@@ -2,7 +2,7 @@ use brk_error::Result;
use brk_types::{Cents, Height, Indexes, Version};
use vecdb::Exit;
use crate::{blocks, prices};
use crate::prices;
/// Dynamic dispatch trait for cohort vectors.
///
@@ -34,7 +34,6 @@ pub trait DynCohortVecs: Send + Sync {
/// First phase of post-processing computations.
fn compute_rest_part1(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &Indexes,
exit: &Exit,

View File

@@ -19,13 +19,13 @@ use crate::{
metrics::{
AllCohortMetrics, BasicCohortMetrics, CohortMetricsBase,
CoreCohortMetrics, ExtendedAdjustedCohortMetrics, ExtendedCohortMetrics, ImportConfig,
MinimalCohortMetrics, ProfitabilityMetrics, RealizedFullAccum, SupplyFull,
MinimalCohortMetrics, ProfitabilityMetrics, RealizedFullAccum, SupplyCore,
TypeCohortMetrics,
},
state::UTXOCohortState,
},
indexes,
internal::AmountPerBlock,
internal::{AmountPerBlock, CachedWindowStarts},
prices,
};
@@ -70,6 +70,7 @@ impl UTXOCohorts<Rw> {
version: Version,
indexes: &indexes::Vecs,
states_path: &Path,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
let v = version + VERSION;
@@ -81,8 +82,9 @@ impl UTXOCohorts<Rw> {
full_name: &all_full_name,
version: v + Version::ONE,
indexes,
cached_starts,
};
let all_supply = SupplyFull::forced_import(&all_cfg)?;
let all_supply = SupplyCore::forced_import(&all_cfg)?;
// Phase 2: Import separate (stateful) cohorts.
@@ -96,6 +98,7 @@ impl UTXOCohorts<Rw> {
full_name: &full_name,
version: v,
indexes,
cached_starts,
};
let state = Some(Box::new(UTXOCohortState::new(states_path, &full_name)));
Ok(UTXOCohortVecs::new(
@@ -115,6 +118,7 @@ impl UTXOCohorts<Rw> {
full_name: &full_name,
version: v,
indexes,
cached_starts,
};
let state = Some(Box::new(UTXOCohortState::new(states_path, &full_name)));
Ok(UTXOCohortVecs::new(
@@ -136,6 +140,7 @@ impl UTXOCohorts<Rw> {
full_name: &full_name,
version: v,
indexes,
cached_starts,
};
let state = Some(Box::new(UTXOCohortState::new(states_path, &full_name)));
Ok(UTXOCohortVecs::new(
@@ -155,6 +160,7 @@ impl UTXOCohorts<Rw> {
full_name: &full_name,
version: v,
indexes,
cached_starts,
};
let state = Some(Box::new(UTXOCohortState::new(states_path, &full_name)));
Ok(UTXOCohortVecs::new(
@@ -186,6 +192,7 @@ impl UTXOCohorts<Rw> {
full_name: &full_name,
version: v,
indexes,
cached_starts,
};
UTXOCohortVecs::new(None, ExtendedAdjustedCohortMetrics::forced_import(&cfg)?)
};
@@ -200,6 +207,7 @@ impl UTXOCohorts<Rw> {
full_name: &full_name,
version: v,
indexes,
cached_starts,
};
UTXOCohortVecs::new(None, ExtendedCohortMetrics::forced_import(&cfg)?)
};
@@ -214,6 +222,7 @@ impl UTXOCohorts<Rw> {
full_name: &full_name,
version: v,
indexes,
cached_starts,
};
Ok(UTXOCohortVecs::new(
None,
@@ -236,6 +245,7 @@ impl UTXOCohorts<Rw> {
full_name: &full_name,
version: v,
indexes,
cached_starts,
};
Ok(UTXOCohortVecs::new(
None,
@@ -459,7 +469,6 @@ impl UTXOCohorts<Rw> {
/// First phase of post-processing: compute index transforms.
pub(crate) fn compute_rest_part1(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
@@ -497,7 +506,7 @@ impl UTXOCohorts<Rw> {
);
all.extend(self.type_.iter_mut().map(|x| x as &mut dyn DynCohortVecs));
all.into_par_iter()
.try_for_each(|v| v.compute_rest_part1(blocks, prices, starting_indexes, exit))?;
.try_for_each(|v| v.compute_rest_part1(prices, starting_indexes, exit))?;
}
// Compute matured cents from sats × price
@@ -606,19 +615,19 @@ impl UTXOCohorts<Rw> {
Box::new(|| {
age_range.par_iter_mut().try_for_each(|v| {
v.metrics
.compute_rest_part2(blocks, prices, starting_indexes, ss, exit)
.compute_rest_part2(prices, starting_indexes, ss, exit)
})
}),
Box::new(|| {
under_age.par_iter_mut().try_for_each(|v| {
v.metrics
.compute_rest_part2(blocks, prices, starting_indexes, ss, exit)
.compute_rest_part2(prices, starting_indexes, ss, exit)
})
}),
Box::new(|| {
over_age.par_iter_mut().try_for_each(|v| {
v.metrics
.compute_rest_part2(blocks, prices, starting_indexes, ss, exit)
.compute_rest_part2(prices, starting_indexes, ss, exit)
})
}),
Box::new(|| {
@@ -629,13 +638,13 @@ impl UTXOCohorts<Rw> {
Box::new(|| {
epoch.par_iter_mut().try_for_each(|v| {
v.metrics
.compute_rest_part2(blocks, prices, starting_indexes, ss, exit)
.compute_rest_part2(prices, starting_indexes, ss, exit)
})
}),
Box::new(|| {
class.par_iter_mut().try_for_each(|v| {
v.metrics
.compute_rest_part2(blocks, prices, starting_indexes, ss, exit)
.compute_rest_part2(prices, starting_indexes, ss, exit)
})
}),
Box::new(|| {

View File

@@ -3,7 +3,7 @@ use brk_error::Result;
use brk_types::{Cents, Height, Indexes, Version};
use vecdb::{Exit, ReadableVec};
use crate::{blocks, distribution::{cohorts::traits::DynCohortVecs, metrics::CoreCohortMetrics}, prices};
use crate::{distribution::{cohorts::traits::DynCohortVecs, metrics::CoreCohortMetrics}, prices};
use super::UTXOCohortVecs;
@@ -64,13 +64,12 @@ impl DynCohortVecs for UTXOCohortVecs<CoreCohortMetrics> {
fn compute_rest_part1(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.metrics
.compute_rest_part1(blocks, prices, starting_indexes, exit)
.compute_rest_part1(prices, starting_indexes, exit)
}
fn write_state(&mut self, height: Height, cleanup: bool) -> Result<()> {

View File

@@ -4,7 +4,6 @@ use brk_types::{Cents, Height, Indexes, Version};
use vecdb::{Exit, ReadableVec};
use crate::{
blocks,
distribution::{cohorts::traits::DynCohortVecs, metrics::MinimalCohortMetrics},
prices,
};
@@ -57,13 +56,12 @@ impl DynCohortVecs for UTXOCohortVecs<MinimalCohortMetrics> {
fn compute_rest_part1(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.metrics
.compute_rest_part1(blocks, prices, starting_indexes, exit)
.compute_rest_part1(prices, starting_indexes, exit)
}
fn write_state(&mut self, height: Height, cleanup: bool) -> Result<()> {

View File

@@ -49,7 +49,6 @@ use brk_types::{Cents, Height, Indexes, Version};
use vecdb::{Exit, ReadableVec};
use crate::{
blocks,
distribution::{
cohorts::traits::DynCohortVecs,
metrics::{CohortMetricsBase, CohortMetricsState},
@@ -71,7 +70,10 @@ pub struct UTXOCohortVecs<M: CohortMetricsState> {
}
impl<M: CohortMetricsState> UTXOCohortVecs<M> {
pub(crate) fn new(state: Option<Box<UTXOCohortState<M::Realized, M::CostBasis>>>, metrics: M) -> Self {
pub(crate) fn new(
state: Option<Box<UTXOCohortState<M::Realized, M::CostBasis>>>,
metrics: M,
) -> Self {
Self {
state_starting_height: None,
state,
@@ -183,24 +185,20 @@ impl<M: CohortMetricsBase + Traversable> DynCohortVecs for UTXOCohortVecs<M> {
_is_day_boundary: bool,
) -> Result<()> {
if let Some(state) = self.state.as_mut() {
self.metrics.compute_and_push_unrealized(
height,
height_price,
state,
)?;
self.metrics
.compute_and_push_unrealized(height, height_price, state)?;
}
Ok(())
}
fn compute_rest_part1(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.metrics
.compute_rest_part1(blocks, prices, starting_indexes, exit)?;
.compute_rest_part1(prices, starting_indexes, exit)?;
Ok(())
}

View File

@@ -3,7 +3,7 @@ use brk_error::Result;
use brk_types::{Cents, Height, Indexes, Version};
use vecdb::{Exit, ReadableVec};
use crate::{blocks, distribution::cohorts::traits::DynCohortVecs, distribution::metrics::TypeCohortMetrics, prices};
use crate::{distribution::cohorts::traits::DynCohortVecs, distribution::metrics::TypeCohortMetrics, prices};
use super::UTXOCohortVecs;
@@ -63,13 +63,12 @@ impl DynCohortVecs for UTXOCohortVecs<TypeCohortMetrics> {
fn compute_rest_part1(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.metrics
.compute_rest_part1(blocks, prices, starting_indexes, exit)
.compute_rest_part1(prices, starting_indexes, exit)
}
fn write_state(&mut self, height: Height, cleanup: bool) -> Result<()> {

View File

@@ -1,84 +0,0 @@
use brk_error::Result;
use brk_types::{Dollars, Height, Indexes};
use tracing::info;
use vecdb::{Exit, ReadableVec};
use crate::{blocks, prices};
use super::super::cohorts::{AddressCohorts, UTXOCohorts};
/// Compute overlapping cohorts from component cohorts.
///
/// For example:
/// - ">=1d" UTXO cohort is computed from sum of age_range cohorts that match
/// - ">=1 BTC" address cohort is computed from sum of amount_range cohorts that match
pub(crate) fn compute_overlapping(
utxo_cohorts: &mut UTXOCohorts,
address_cohorts: &mut AddressCohorts,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
info!("Computing overlapping cohorts...");
let (r1, r2) = rayon::join(
|| utxo_cohorts.compute_overlapping_vecs(starting_indexes, exit),
|| address_cohorts.compute_overlapping_vecs(starting_indexes, exit),
);
r1?;
r2?;
Ok(())
}
/// First phase of post-processing: compute index transforms.
///
/// Converts height-indexed data to day1-indexed data and other transforms.
pub(crate) fn compute_rest_part1(
utxo_cohorts: &mut UTXOCohorts,
address_cohorts: &mut AddressCohorts,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
info!("Computing rest part 1...");
let (r1, r2) = rayon::join(
|| utxo_cohorts.compute_rest_part1(blocks, prices, starting_indexes, exit),
|| address_cohorts.compute_rest_part1(blocks, prices, starting_indexes, exit),
);
r1?;
r2?;
Ok(())
}
/// Second phase of post-processing: compute relative metrics.
///
/// Computes supply ratios, market cap ratios, etc. using total references.
pub(crate) fn compute_rest_part2<HM>(
utxo_cohorts: &mut UTXOCohorts,
address_cohorts: &mut AddressCohorts,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &Indexes,
height_to_market_cap: &HM,
exit: &Exit,
) -> Result<()>
where
HM: ReadableVec<Height, Dollars> + Sync,
{
info!("Computing rest part 2...");
utxo_cohorts.compute_rest_part2(
blocks,
prices,
starting_indexes,
height_to_market_cap,
exit,
)?;
address_cohorts.compute_rest_part2(prices, starting_indexes, exit)?;
Ok(())
}

View File

@@ -1,4 +1,3 @@
pub mod aggregates;
mod block_loop;
mod context;
mod readers;

View File

@@ -4,20 +4,19 @@ use brk_types::{Bitcoin, Height, Indexes, Sats, StoredF64, Version};
use vecdb::{AnyStoredVec, AnyVec, Exit, Rw, StorageMode, WritableVec};
use crate::{
blocks,
distribution::{metrics::ImportConfig, state::{CohortState, CostBasisOps, RealizedOps}},
internal::{AmountPerBlockWithSum24h, PerBlockWithSum24h},
internal::{AmountPerBlockCumulativeWithSums, ComputedPerBlockCumulativeWithSums},
prices,
};
#[derive(Traversable)]
pub struct ActivityCore<M: StorageMode = Rw> {
pub sent: PerBlockWithSum24h<Sats, M>,
pub coindays_destroyed: PerBlockWithSum24h<StoredF64, M>,
pub sent: ComputedPerBlockCumulativeWithSums<Sats, Sats, M>,
pub coindays_destroyed: ComputedPerBlockCumulativeWithSums<StoredF64, StoredF64, M>,
#[traversable(wrap = "sent", rename = "in_profit")]
pub sent_in_profit: AmountPerBlockWithSum24h<M>,
pub sent_in_profit: AmountPerBlockCumulativeWithSums<M>,
#[traversable(wrap = "sent", rename = "in_loss")]
pub sent_in_loss: AmountPerBlockWithSum24h<M>,
pub sent_in_loss: AmountPerBlockCumulativeWithSums<M>,
}
impl ActivityCore {
@@ -103,54 +102,26 @@ impl ActivityCore {
pub(crate) fn compute_rest_part1(
&mut self,
blocks: &blocks::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.sent.sum.compute_rolling_sum(
starting_indexes.height,
&blocks.lookback._24h,
&self.sent.raw.height,
exit,
)?;
self.coindays_destroyed.sum.compute_rolling_sum(
starting_indexes.height,
&blocks.lookback._24h,
&self.coindays_destroyed.raw.height,
exit,
)?;
self.sent
.compute_rest(starting_indexes.height, exit)?;
self.coindays_destroyed
.compute_rest(starting_indexes.height, exit)?;
Ok(())
}
pub(crate) fn compute_sent_profitability(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.sent_in_profit
.raw
.compute(prices, starting_indexes.height, exit)?;
.compute_rest(starting_indexes.height, prices, exit)?;
self.sent_in_loss
.raw
.compute(prices, starting_indexes.height, exit)?;
self.sent_in_profit.sum.compute_rolling_sum(
starting_indexes.height,
&blocks.lookback._24h,
&self.sent_in_profit.raw.sats.height,
&self.sent_in_profit.raw.cents.height,
exit,
)?;
self.sent_in_loss.sum.compute_rolling_sum(
starting_indexes.height,
&blocks.lookback._24h,
&self.sent_in_loss.raw.sats.height,
&self.sent_in_loss.raw.cents.height,
exit,
)?;
.compute_rest(starting_indexes.height, prices, exit)?;
Ok(())
}
}

View File

@@ -4,9 +4,9 @@ use brk_types::{Bitcoin, Height, Indexes, Sats, StoredF32, StoredF64, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{AnyStoredVec, Exit, ReadableCloneableVec, ReadableVec, Rw, StorageMode};
use crate::internal::{ComputedPerBlock, Identity, LazyPerBlock, RollingWindowsFrom1w};
use crate::internal::{ComputedPerBlock, Identity, LazyPerBlock};
use crate::{blocks, distribution::{metrics::ImportConfig, state::{CohortState, CostBasisOps, RealizedOps}}};
use crate::distribution::{metrics::ImportConfig, state::{CohortState, CostBasisOps, RealizedOps}};
use super::ActivityCore;
@@ -17,18 +17,6 @@ pub struct ActivityFull<M: StorageMode = Rw> {
#[traversable(flatten)]
pub inner: ActivityCore<M>,
#[traversable(wrap = "coindays_destroyed", rename = "cumulative")]
pub coindays_destroyed_cumulative: ComputedPerBlock<StoredF64, M>,
#[traversable(wrap = "coindays_destroyed", rename = "sum")]
pub coindays_destroyed_sum: RollingWindowsFrom1w<StoredF64, M>,
#[traversable(wrap = "sent", rename = "sum")]
pub sent_sum_extended: RollingWindowsFrom1w<Sats, M>,
#[traversable(wrap = "sent/in_profit", rename = "sum")]
pub sent_in_profit_sum_extended: RollingWindowsFrom1w<Sats, M>,
#[traversable(wrap = "sent/in_loss", rename = "sum")]
pub sent_in_loss_sum_extended: RollingWindowsFrom1w<Sats, M>,
pub coinyears_destroyed: LazyPerBlock<StoredF64, StoredF64>,
pub dormancy: ComputedPerBlock<StoredF32, M>,
@@ -38,23 +26,17 @@ pub struct ActivityFull<M: StorageMode = Rw> {
impl ActivityFull {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let v1 = Version::ONE;
let coindays_destroyed_sum: RollingWindowsFrom1w<StoredF64> =
cfg.import("coindays_destroyed", v1)?;
let inner = ActivityCore::forced_import(cfg)?;
let coinyears_destroyed = LazyPerBlock::from_computed::<Identity<StoredF64>>(
let coinyears_destroyed = LazyPerBlock::from_height_source::<Identity<StoredF64>>(
&cfg.name("coinyears_destroyed"),
v1,
coindays_destroyed_sum._1y.height.read_only_boxed_clone(),
&coindays_destroyed_sum._1y,
cfg.version + v1,
inner.coindays_destroyed.sum._1y.height.read_only_boxed_clone(),
cfg.indexes,
);
Ok(Self {
inner: ActivityCore::forced_import(cfg)?,
coindays_destroyed_cumulative: cfg.import("coindays_destroyed_cumulative", v1)?,
coindays_destroyed_sum,
sent_sum_extended: cfg.import("sent", v1)?,
sent_in_profit_sum_extended: cfg.import("sent_in_profit", v1)?,
sent_in_loss_sum_extended: cfg.import("sent_in_loss", v1)?,
inner,
coinyears_destroyed,
dormancy: cfg.import("dormancy", v1)?,
velocity: cfg.import("velocity", v1)?,
@@ -92,50 +74,10 @@ impl ActivityFull {
pub(crate) fn compute_rest_part1(
&mut self,
blocks: &blocks::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.inner
.compute_rest_part1(blocks, starting_indexes, exit)?;
self.coindays_destroyed_cumulative
.height
.compute_cumulative(
starting_indexes.height,
&self.inner.coindays_destroyed.raw.height,
exit,
)?;
let window_starts = blocks.lookback.window_starts();
self.coindays_destroyed_sum.compute_rolling_sum(
starting_indexes.height,
&window_starts,
&self.inner.coindays_destroyed.raw.height,
exit,
)?;
self.sent_sum_extended.compute_rolling_sum(
starting_indexes.height,
&window_starts,
&self.inner.sent.raw.height,
exit,
)?;
self.sent_in_profit_sum_extended.compute_rolling_sum(
starting_indexes.height,
&window_starts,
&self.inner.sent_in_profit.raw.sats.height,
exit,
)?;
self.sent_in_loss_sum_extended.compute_rolling_sum(
starting_indexes.height,
&window_starts,
&self.inner.sent_in_loss.raw.sats.height,
exit,
)?;
Ok(())
self.inner.compute_rest_part1(starting_indexes, exit)
}
pub(crate) fn compute_rest_part2(

View File

@@ -8,7 +8,7 @@ use brk_error::Result;
use brk_types::{Height, Indexes, Version};
use vecdb::Exit;
use crate::{blocks, distribution::state::{CohortState, CostBasisOps, RealizedOps}};
use crate::distribution::state::{CohortState, CostBasisOps, RealizedOps};
pub trait ActivityLike: Send + Sync {
fn as_core(&self) -> &ActivityCore;
@@ -28,7 +28,6 @@ pub trait ActivityLike: Send + Sync {
) -> Result<()>;
fn compute_rest_part1(
&mut self,
blocks: &blocks::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()>;
@@ -47,8 +46,8 @@ impl ActivityLike for ActivityCore {
fn compute_from_stateful(&mut self, starting_indexes: &Indexes, others: &[&ActivityCore], exit: &Exit) -> Result<()> {
self.compute_from_stateful(starting_indexes, others, exit)
}
fn compute_rest_part1(&mut self, blocks: &blocks::Vecs, starting_indexes: &Indexes, exit: &Exit) -> Result<()> {
self.compute_rest_part1(blocks, starting_indexes, exit)
fn compute_rest_part1(&mut self, starting_indexes: &Indexes, exit: &Exit) -> Result<()> {
self.compute_rest_part1(starting_indexes, exit)
}
}
@@ -65,7 +64,7 @@ impl ActivityLike for ActivityFull {
fn compute_from_stateful(&mut self, starting_indexes: &Indexes, others: &[&ActivityCore], exit: &Exit) -> Result<()> {
self.compute_from_stateful(starting_indexes, others, exit)
}
fn compute_rest_part1(&mut self, blocks: &blocks::Vecs, starting_indexes: &Indexes, exit: &Exit) -> Result<()> {
self.compute_rest_part1(blocks, starting_indexes, exit)
fn compute_rest_part1(&mut self, starting_indexes: &Indexes, exit: &Exit) -> Result<()> {
self.compute_rest_part1(starting_indexes, exit)
}
}

View File

@@ -2,19 +2,18 @@ use brk_cohort::Filter;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{
Cents, Dollars, Height, Indexes, Sats, SatsSigned, StoredI64, StoredU64,
Version,
Cents, Dollars, Height, Indexes, Version,
};
use vecdb::AnyStoredVec;
use vecdb::{Exit, ReadableVec, Rw, StorageMode};
use crate::{blocks, prices};
use crate::internal::RollingDeltaExcept1m;
use crate::distribution::metrics::{
ActivityFull, CohortMetricsBase, CostBasis, ImportConfig, OutputsFull,
AdjustedSopr, RealizedFull, RelativeForAll, SupplyFull, UnrealizedFull,
use crate::{
blocks,
distribution::metrics::{
ActivityFull, AdjustedSopr, CohortMetricsBase, CostBasis, ImportConfig, OutputsBase,
RealizedFull, RelativeForAll, SupplyCore, UnrealizedFull,
},
prices,
};
/// All-cohort metrics: extended realized + adjusted (as composable add-on),
@@ -24,8 +23,8 @@ use crate::distribution::metrics::{
pub struct AllCohortMetrics<M: StorageMode = Rw> {
#[traversable(skip)]
pub filter: Filter,
pub supply: Box<SupplyFull<M>>,
pub outputs: Box<OutputsFull<M>>,
pub supply: Box<SupplyCore<M>>,
pub outputs: Box<OutputsBase<M>>,
pub activity: Box<ActivityFull<M>>,
pub realized: Box<RealizedFull<M>>,
pub cost_basis: Box<CostBasis<M>>,
@@ -34,11 +33,6 @@ pub struct AllCohortMetrics<M: StorageMode = Rw> {
pub asopr: Box<AdjustedSopr<M>>,
#[traversable(flatten)]
pub relative: Box<RelativeForAll<M>>,
#[traversable(wrap = "supply", rename = "delta")]
pub supply_delta_extended: RollingDeltaExcept1m<Sats, SatsSigned, M>,
#[traversable(wrap = "outputs/unspent_count", rename = "delta")]
pub unspent_count_delta_extended: RollingDeltaExcept1m<StoredU64, StoredI64, M>,
}
impl CohortMetricsBase for AllCohortMetrics {
@@ -84,7 +78,7 @@ impl AllCohortMetrics {
/// reference for relative metric lazy vecs in other cohorts.
pub(crate) fn forced_import_with_supply(
cfg: &ImportConfig,
supply: SupplyFull,
supply: SupplyCore,
) -> Result<Self> {
let unrealized = UnrealizedFull::forced_import(cfg)?;
let realized = RealizedFull::forced_import(cfg)?;
@@ -95,15 +89,13 @@ impl AllCohortMetrics {
Ok(Self {
filter: cfg.filter.clone(),
supply: Box::new(supply),
outputs: Box::new(OutputsFull::forced_import(cfg)?),
outputs: Box::new(OutputsBase::forced_import(cfg)?),
activity: Box::new(ActivityFull::forced_import(cfg)?),
realized: Box::new(realized),
cost_basis: Box::new(CostBasis::forced_import(cfg)?),
unrealized: Box::new(unrealized),
asopr: Box::new(asopr),
relative: Box::new(relative),
supply_delta_extended: cfg.import("supply_delta", Version::ONE)?,
unspent_count_delta_extended: cfg.import("utxo_count_delta", Version::ONE)?,
})
}
@@ -135,7 +127,6 @@ impl AllCohortMetrics {
)?;
self.asopr.compute_rest_part2(
blocks,
starting_indexes,
&self.realized.minimal.sopr.value_created.raw.height,
&self.realized.minimal.sopr.value_destroyed.raw.height,
@@ -146,26 +137,12 @@ impl AllCohortMetrics {
self.relative.compute(
starting_indexes.height,
&self.supply.core,
&self.supply,
&self.unrealized,
height_to_market_cap,
exit,
)?;
let window_starts = blocks.lookback.window_starts();
self.supply_delta_extended.compute(
starting_indexes.height,
&window_starts,
&self.supply.total.sats.height,
exit,
)?;
self.unspent_count_delta_extended.compute(
starting_indexes.height,
&window_starts,
&self.outputs.unspent_count.height,
exit,
)?;
self.activity.compute_rest_part2(
starting_indexes,
&self.supply.total.sats.height,

View File

@@ -4,11 +4,12 @@ use brk_traversable::Traversable;
use brk_types::{Height, Indexes, Sats};
use vecdb::{AnyStoredVec, Exit, ReadableVec, Rw, StorageMode};
use crate::{blocks, prices};
use crate::distribution::metrics::{
ActivityCore, CohortMetricsBase, ImportConfig, OutputsFull, RealizedCore,
RelativeToAll, SupplyFull, UnrealizedBase,
use crate::{
distribution::metrics::{
ActivityCore, CohortMetricsBase, ImportConfig, OutputsBase, RealizedCore, RelativeToAll,
SupplyCore, UnrealizedBase,
},
prices,
};
/// Basic cohort metrics: no extensions, with relative (rel_to_all).
@@ -17,8 +18,8 @@ use crate::distribution::metrics::{
pub struct BasicCohortMetrics<M: StorageMode = Rw> {
#[traversable(skip)]
pub filter: Filter,
pub supply: Box<SupplyFull<M>>,
pub outputs: Box<OutputsFull<M>>,
pub supply: Box<SupplyCore<M>>,
pub outputs: Box<OutputsBase<M>>,
pub activity: Box<ActivityCore<M>>,
pub realized: Box<RealizedCore<M>>,
pub unrealized: Box<UnrealizedBase<M>>,
@@ -46,7 +47,7 @@ impl CohortMetricsBase for BasicCohortMetrics {
impl BasicCohortMetrics {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let supply = SupplyFull::forced_import(cfg)?;
let supply = SupplyCore::forced_import(cfg)?;
let unrealized = UnrealizedBase::forced_import(cfg)?;
let realized = RealizedCore::forced_import(cfg)?;
@@ -55,7 +56,7 @@ impl BasicCohortMetrics {
Ok(Self {
filter: cfg.filter.clone(),
supply: Box::new(supply),
outputs: Box::new(OutputsFull::forced_import(cfg)?),
outputs: Box::new(OutputsBase::forced_import(cfg)?),
activity: Box::new(ActivityCore::forced_import(cfg)?),
realized: Box::new(realized),
unrealized: Box::new(unrealized),
@@ -65,14 +66,12 @@ impl BasicCohortMetrics {
pub(crate) fn compute_rest_part2(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &Indexes,
all_supply_sats: &impl ReadableVec<Height, Sats>,
exit: &Exit,
) -> Result<()> {
self.realized.compute_rest_part2(
blocks,
prices,
starting_indexes,
&self.supply.total.btc.height,
@@ -88,7 +87,7 @@ impl BasicCohortMetrics {
self.relative.compute(
starting_indexes.height,
&self.supply.core,
&self.supply,
all_supply_sats,
exit,
)?;

View File

@@ -4,19 +4,20 @@ use brk_traversable::Traversable;
use brk_types::{Height, Indexes, Sats, Version};
use vecdb::{AnyStoredVec, Exit, ReadableVec, Rw, StorageMode};
use crate::{blocks, prices};
use crate::distribution::metrics::{
ActivityCore, CohortMetricsBase, RealizedCore, ImportConfig, OutputsFull,
RelativeToAll, SupplyFull, UnrealizedCore,
use crate::{
distribution::metrics::{
ActivityCore, CohortMetricsBase, ImportConfig, OutputsBase, RealizedCore, RelativeToAll,
SupplyCore, UnrealizedCore,
},
prices,
};
#[derive(Traversable)]
pub struct CoreCohortMetrics<M: StorageMode = Rw> {
#[traversable(skip)]
pub filter: Filter,
pub supply: Box<SupplyFull<M>>,
pub outputs: Box<OutputsFull<M>>,
pub supply: Box<SupplyCore<M>>,
pub outputs: Box<OutputsBase<M>>,
pub activity: Box<ActivityCore<M>>,
pub realized: Box<RealizedCore<M>>,
pub unrealized: Box<UnrealizedCore<M>>,
@@ -28,8 +29,8 @@ impl CoreCohortMetrics {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
Ok(Self {
filter: cfg.filter.clone(),
supply: Box::new(SupplyFull::forced_import(cfg)?),
outputs: Box::new(OutputsFull::forced_import(cfg)?),
supply: Box::new(SupplyCore::forced_import(cfg)?),
outputs: Box::new(OutputsBase::forced_import(cfg)?),
activity: Box::new(ActivityCore::forced_import(cfg)?),
realized: Box::new(RealizedCore::forced_import(cfg)?),
unrealized: Box::new(UnrealizedCore::forced_import(cfg)?),
@@ -100,41 +101,34 @@ impl CoreCohortMetrics {
pub(crate) fn compute_rest_part1(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.supply
.compute(prices, starting_indexes.height, exit)?;
self.supply
.compute_rest_part1(blocks, starting_indexes, exit)?;
self.outputs
.compute_rest(blocks, starting_indexes, exit)?;
self.activity
.compute_rest_part1(blocks, starting_indexes, exit)?;
.compute_rest_part1(starting_indexes, exit)?;
self.activity
.compute_sent_profitability(blocks, prices, starting_indexes, exit)?;
.compute_sent_profitability(prices, starting_indexes, exit)?;
self.realized
.compute_rest_part1(blocks, starting_indexes, exit)?;
.compute_rest_part1(starting_indexes, exit)?;
self.unrealized.compute_rest(blocks, starting_indexes, exit)?;
self.unrealized.compute_rest(starting_indexes, exit)?;
Ok(())
}
pub(crate) fn compute_rest_part2(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &Indexes,
all_supply_sats: &impl ReadableVec<Height, Sats>,
exit: &Exit,
) -> Result<()> {
self.realized.compute_rest_part2(
blocks,
prices,
starting_indexes,
&self.supply.total.btc.height,
@@ -150,7 +144,7 @@ impl CoreCohortMetrics {
self.relative.compute(
starting_indexes.height,
&self.supply.core,
&self.supply,
all_supply_sats,
exit,
)?;

View File

@@ -2,18 +2,18 @@ use brk_cohort::Filter;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{
Dollars, Height, Indexes, Sats, SatsSigned, StoredI64, StoredU64, Version,
Dollars, Height, Indexes, Sats, Version,
};
use vecdb::AnyStoredVec;
use vecdb::{Exit, ReadableVec, Rw, StorageMode};
use crate::{blocks, prices};
use crate::internal::RollingDeltaExcept1m;
use crate::distribution::metrics::{
ActivityFull, CohortMetricsBase, CostBasis, ImportConfig, OutputsFull,
RealizedFull, RelativeWithExtended, SupplyFull, UnrealizedFull,
use crate::{
blocks,
distribution::metrics::{
ActivityFull, CohortMetricsBase, CostBasis, ImportConfig, OutputsBase, RealizedFull,
RelativeWithExtended, SupplyCore, UnrealizedFull,
},
prices,
};
/// Cohort metrics with extended realized + extended cost basis (no adjusted).
@@ -22,19 +22,14 @@ use crate::distribution::metrics::{
pub struct ExtendedCohortMetrics<M: StorageMode = Rw> {
#[traversable(skip)]
pub filter: Filter,
pub supply: Box<SupplyFull<M>>,
pub outputs: Box<OutputsFull<M>>,
pub supply: Box<SupplyCore<M>>,
pub outputs: Box<OutputsBase<M>>,
pub activity: Box<ActivityFull<M>>,
pub realized: Box<RealizedFull<M>>,
pub cost_basis: Box<CostBasis<M>>,
pub unrealized: Box<UnrealizedFull<M>>,
#[traversable(flatten)]
pub relative: Box<RelativeWithExtended<M>>,
#[traversable(wrap = "supply", rename = "delta")]
pub supply_delta_extended: RollingDeltaExcept1m<Sats, SatsSigned, M>,
#[traversable(wrap = "outputs/unspent_count", rename = "delta")]
pub unspent_count_delta_extended: RollingDeltaExcept1m<StoredU64, StoredI64, M>,
}
impl CohortMetricsBase for ExtendedCohortMetrics {
@@ -75,7 +70,7 @@ impl CohortMetricsBase for ExtendedCohortMetrics {
impl ExtendedCohortMetrics {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let supply = SupplyFull::forced_import(cfg)?;
let supply = SupplyCore::forced_import(cfg)?;
let unrealized = UnrealizedFull::forced_import(cfg)?;
let realized = RealizedFull::forced_import(cfg)?;
@@ -84,14 +79,12 @@ impl ExtendedCohortMetrics {
Ok(Self {
filter: cfg.filter.clone(),
supply: Box::new(supply),
outputs: Box::new(OutputsFull::forced_import(cfg)?),
outputs: Box::new(OutputsBase::forced_import(cfg)?),
activity: Box::new(ActivityFull::forced_import(cfg)?),
realized: Box::new(realized),
cost_basis: Box::new(CostBasis::forced_import(cfg)?),
unrealized: Box::new(unrealized),
relative: Box::new(relative),
supply_delta_extended: cfg.import("supply_delta", Version::ONE)?,
unspent_count_delta_extended: cfg.import("utxo_count_delta", Version::ONE)?,
})
}
@@ -122,7 +115,7 @@ impl ExtendedCohortMetrics {
self.relative.compute(
starting_indexes.height,
&self.supply.core,
&self.supply,
&self.unrealized,
height_to_market_cap,
all_supply_sats,
@@ -130,20 +123,6 @@ impl ExtendedCohortMetrics {
exit,
)?;
let window_starts = blocks.lookback.window_starts();
self.supply_delta_extended.compute(
starting_indexes.height,
&window_starts,
&self.supply.total.sats.height,
exit,
)?;
self.unspent_count_delta_extended.compute(
starting_indexes.height,
&window_starts,
&self.outputs.unspent_count.height,
exit,
)?;
self.activity.compute_rest_part2(
starting_indexes,
&self.supply.total.sats.height,

View File

@@ -4,11 +4,12 @@ use brk_types::{Cents, Dollars, Height, Indexes, Sats, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{AnyStoredVec, Exit, ReadableVec, Rw, StorageMode};
use crate::{blocks, prices};
use crate::distribution::metrics::{
ActivityFull, CohortMetricsBase, ImportConfig, AdjustedSopr,
RealizedFull, UnrealizedFull,
use crate::{
blocks,
distribution::metrics::{
ActivityFull, AdjustedSopr, CohortMetricsBase, ImportConfig, RealizedFull, UnrealizedFull,
},
prices,
};
use super::ExtendedCohortMetrics;
@@ -78,7 +79,6 @@ impl ExtendedAdjustedCohortMetrics {
)?;
self.asopr.compute_rest_part2(
blocks,
starting_indexes,
&self.inner.realized.minimal.sopr.value_created.raw.height,
&self.inner.realized.minimal.sopr.value_destroyed.raw.height,

View File

@@ -4,10 +4,11 @@ use brk_traversable::Traversable;
use brk_types::Indexes;
use vecdb::{AnyStoredVec, Exit, Rw, StorageMode};
use crate::{blocks, prices};
use crate::distribution::metrics::{
ImportConfig, OutputsBase, RealizedMinimal, SupplyBase, UnrealizedMinimal,
use crate::{
distribution::metrics::{
ImportConfig, OutputsBase, RealizedMinimal, SupplyBase, UnrealizedMinimal,
},
prices,
};
/// MinimalCohortMetrics: supply, outputs, realized cap/price/mvrv/profit/loss + value_created/destroyed.
@@ -83,14 +84,13 @@ impl MinimalCohortMetrics {
pub(crate) fn compute_rest_part1(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.supply.compute(prices, starting_indexes.height, exit)?;
self.realized
.compute_rest_part1(blocks, starting_indexes, exit)?;
.compute_rest_part1(starting_indexes, exit)?;
Ok(())
}

View File

@@ -4,10 +4,11 @@ use brk_traversable::Traversable;
use brk_types::Indexes;
use vecdb::{AnyStoredVec, Exit, Rw, StorageMode};
use crate::{blocks, prices};
use crate::distribution::metrics::{
ImportConfig, OutputsBase, RealizedMinimal, SupplyCore, UnrealizedBasic,
use crate::{
distribution::metrics::{
ImportConfig, OutputsBase, RealizedMinimal, SupplyCore, UnrealizedBasic,
},
prices,
};
/// TypeCohortMetrics: supply(core), outputs(base), realized(minimal), unrealized(basic).
@@ -53,16 +54,15 @@ impl TypeCohortMetrics {
pub(crate) fn compute_rest_part1(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.supply.compute(prices, starting_indexes.height, exit)?;
self.realized
.compute_rest_part1(blocks, starting_indexes, exit)?;
.compute_rest_part1(starting_indexes, exit)?;
self.unrealized
.compute_rest(blocks, starting_indexes.height, exit)?;
.compute_rest(starting_indexes.height, exit)?;
Ok(())
}

View File

@@ -7,13 +7,13 @@ use vecdb::{BytesVec, BytesVecValue, Database, ImportableVec};
use crate::{
indexes,
internal::{
AmountPerBlock, AmountPerBlockCumulative, AmountPerBlockWithSum24h, CentsType, ComputedPerBlock,
ComputedPerBlockCumulative, ComputedPerBlockCumulativeSum, FiatPerBlockWithSum24h,
PerBlockWithSum24h, PriceWithRatioExtendedPerBlock, PriceWithRatioPerBlock, RatioPerBlock, RollingWindow24hAmountPerBlock,
RollingWindow24hFiatPerBlock, RollingWindow24hPerBlock,
FiatPerBlock, FiatRollingDelta1m, FiatRollingDeltaExcept1m, NumericValue,
PercentPerBlock, PercentRollingWindows, Price, RollingDelta1m, RollingDeltaExcept1m,
RollingWindows, RollingWindowsFrom1w,
AmountPerBlock, AmountPerBlockCumulative, AmountPerBlockCumulativeWithSums,
CachedWindowStarts, CentsType, ComputedPerBlock,
ComputedPerBlockCumulative, ComputedPerBlockCumulativeWithSums,
FiatPerBlock, FiatPerBlockCumulativeWithSums, NumericValue,
PercentPerBlock, PercentRollingWindows, Price,
PriceWithRatioExtendedPerBlock, PriceWithRatioPerBlock, RatioPerBlock,
RollingWindow24hPerBlock, RollingWindows, RollingWindowsFrom1w,
},
};
@@ -39,7 +39,6 @@ macro_rules! impl_config_import {
impl_config_import!(
AmountPerBlock,
AmountPerBlockCumulative,
RollingWindow24hAmountPerBlock,
PriceWithRatioPerBlock,
PriceWithRatioExtendedPerBlock,
RatioPerBlock<BasisPoints32>,
@@ -62,9 +61,19 @@ impl<T: NumericValue + JsonSchema> ConfigImport for ComputedPerBlockCumulative<T
Self::forced_import(cfg.db, &cfg.name(suffix), cfg.version + offset, cfg.indexes)
}
}
impl<T: NumericValue + JsonSchema> ConfigImport for ComputedPerBlockCumulativeSum<T> {
impl<T, C> ConfigImport for ComputedPerBlockCumulativeWithSums<T, C>
where
T: NumericValue + JsonSchema + Into<C>,
C: NumericValue + JsonSchema,
{
fn config_import(cfg: &ImportConfig, suffix: &str, offset: Version) -> Result<Self> {
Self::forced_import(cfg.db, &cfg.name(suffix), cfg.version + offset, cfg.indexes)
Self::forced_import(
cfg.db,
&cfg.name(suffix),
cfg.version + offset,
cfg.indexes,
cfg.cached_starts,
)
}
}
impl<T: NumericValue + JsonSchema> ConfigImport for RollingWindows<T> {
@@ -77,33 +86,26 @@ impl<T: NumericValue + JsonSchema> ConfigImport for RollingWindow24hPerBlock<T>
Self::forced_import(cfg.db, &cfg.name(suffix), cfg.version + offset, cfg.indexes)
}
}
impl<T: NumericValue + JsonSchema> ConfigImport for PerBlockWithSum24h<T> {
impl ConfigImport for AmountPerBlockCumulativeWithSums {
fn config_import(cfg: &ImportConfig, suffix: &str, offset: Version) -> Result<Self> {
Ok(Self {
raw: ComputedPerBlock::config_import(cfg, suffix, offset)?,
sum: RollingWindow24hPerBlock::config_import(cfg, suffix, offset)?,
})
Self::forced_import(
cfg.db,
&cfg.name(suffix),
cfg.version + offset,
cfg.indexes,
cfg.cached_starts,
)
}
}
impl ConfigImport for AmountPerBlockWithSum24h {
impl<C: CentsType> ConfigImport for FiatPerBlockCumulativeWithSums<C> {
fn config_import(cfg: &ImportConfig, suffix: &str, offset: Version) -> Result<Self> {
Ok(Self {
raw: AmountPerBlock::config_import(cfg, suffix, offset)?,
sum: RollingWindow24hAmountPerBlock::config_import(cfg, suffix, offset)?,
})
}
}
impl<C: CentsType> ConfigImport for RollingWindow24hFiatPerBlock<C> {
fn config_import(cfg: &ImportConfig, suffix: &str, offset: Version) -> Result<Self> {
Self::forced_import(cfg.db, &cfg.name(suffix), cfg.version + offset, cfg.indexes)
}
}
impl<C: CentsType> ConfigImport for FiatPerBlockWithSum24h<C> {
fn config_import(cfg: &ImportConfig, suffix: &str, offset: Version) -> Result<Self> {
Ok(Self {
raw: FiatPerBlock::config_import(cfg, suffix, offset)?,
sum: RollingWindow24hFiatPerBlock::config_import(cfg, suffix, offset)?,
})
Self::forced_import(
cfg.db,
&cfg.name(suffix),
cfg.version + offset,
cfg.indexes,
cfg.cached_starts,
)
}
}
impl<T: NumericValue + JsonSchema> ConfigImport for RollingWindowsFrom1w<T> {
@@ -116,30 +118,6 @@ impl<C: CentsType> ConfigImport for FiatPerBlock<C> {
Self::forced_import(cfg.db, &cfg.name(suffix), cfg.version + offset, cfg.indexes)
}
}
impl<S: NumericValue + JsonSchema, C: NumericValue + JsonSchema> ConfigImport
for RollingDelta1m<S, C>
{
fn config_import(cfg: &ImportConfig, suffix: &str, offset: Version) -> Result<Self> {
Self::forced_import(cfg.db, &cfg.name(suffix), cfg.version + offset, cfg.indexes)
}
}
impl<S: NumericValue + JsonSchema, C: NumericValue + JsonSchema> ConfigImport
for RollingDeltaExcept1m<S, C>
{
fn config_import(cfg: &ImportConfig, suffix: &str, offset: Version) -> Result<Self> {
Self::forced_import(cfg.db, &cfg.name(suffix), cfg.version + offset, cfg.indexes)
}
}
impl<S: NumericValue + JsonSchema, C: CentsType> ConfigImport for FiatRollingDelta1m<S, C> {
fn config_import(cfg: &ImportConfig, suffix: &str, offset: Version) -> Result<Self> {
Self::forced_import(cfg.db, &cfg.name(suffix), cfg.version + offset, cfg.indexes)
}
}
impl<S: NumericValue + JsonSchema, C: CentsType> ConfigImport for FiatRollingDeltaExcept1m<S, C> {
fn config_import(cfg: &ImportConfig, suffix: &str, offset: Version) -> Result<Self> {
Self::forced_import(cfg.db, &cfg.name(suffix), cfg.version + offset, cfg.indexes)
}
}
impl<T: BytesVecValue> ConfigImport for BytesVec<Height, T> {
fn config_import(cfg: &ImportConfig, suffix: &str, offset: Version) -> Result<Self> {
Ok(Self::forced_import(
@@ -157,6 +135,7 @@ pub struct ImportConfig<'a> {
pub full_name: &'a str,
pub version: Version,
pub indexes: &'a indexes::Vecs,
pub cached_starts: &'a CachedWindowStarts,
}
impl<'a> ImportConfig<'a> {

View File

@@ -18,17 +18,39 @@ mod activity;
/// `ExtendedAdjustedCohortMetrics`, Rust's auto-deref resolves these through `Deref`.
macro_rules! impl_cohort_accessors {
() => {
fn filter(&self) -> &brk_cohort::Filter { &self.filter }
fn supply(&self) -> &$crate::distribution::metrics::SupplyFull { &self.supply }
fn supply_mut(&mut self) -> &mut $crate::distribution::metrics::SupplyFull { &mut self.supply }
fn outputs(&self) -> &$crate::distribution::metrics::OutputsFull { &self.outputs }
fn outputs_mut(&mut self) -> &mut $crate::distribution::metrics::OutputsFull { &mut self.outputs }
fn activity(&self) -> &Self::ActivityVecs { &self.activity }
fn activity_mut(&mut self) -> &mut Self::ActivityVecs { &mut self.activity }
fn realized(&self) -> &Self::RealizedVecs { &self.realized }
fn realized_mut(&mut self) -> &mut Self::RealizedVecs { &mut self.realized }
fn unrealized(&self) -> &Self::UnrealizedVecs { &self.unrealized }
fn unrealized_mut(&mut self) -> &mut Self::UnrealizedVecs { &mut self.unrealized }
fn filter(&self) -> &brk_cohort::Filter {
&self.filter
}
fn supply(&self) -> &$crate::distribution::metrics::SupplyCore {
&self.supply
}
fn supply_mut(&mut self) -> &mut $crate::distribution::metrics::SupplyCore {
&mut self.supply
}
fn outputs(&self) -> &$crate::distribution::metrics::OutputsBase {
&self.outputs
}
fn outputs_mut(&mut self) -> &mut $crate::distribution::metrics::OutputsBase {
&mut self.outputs
}
fn activity(&self) -> &Self::ActivityVecs {
&self.activity
}
fn activity_mut(&mut self) -> &mut Self::ActivityVecs {
&mut self.activity
}
fn realized(&self) -> &Self::RealizedVecs {
&self.realized
}
fn realized_mut(&mut self) -> &mut Self::RealizedVecs {
&mut self.realized
}
fn unrealized(&self) -> &Self::UnrealizedVecs {
&self.unrealized
}
fn unrealized_mut(&mut self) -> &mut Self::UnrealizedVecs {
&mut self.unrealized
}
};
}
@@ -44,29 +66,35 @@ mod unrealized;
pub use activity::{ActivityCore, ActivityFull, ActivityLike};
pub use cohort::{
AllCohortMetrics, BasicCohortMetrics, CoreCohortMetrics,
ExtendedAdjustedCohortMetrics, ExtendedCohortMetrics, MinimalCohortMetrics, TypeCohortMetrics,
AllCohortMetrics, BasicCohortMetrics, CoreCohortMetrics, ExtendedAdjustedCohortMetrics,
ExtendedCohortMetrics, MinimalCohortMetrics, TypeCohortMetrics,
};
pub use config::ImportConfig;
pub use cost_basis::CostBasis;
pub use outputs::OutputsBase;
pub use profitability::ProfitabilityMetrics;
pub use outputs::{OutputsBase, OutputsFull};
pub use realized::{
AdjustedSopr, RealizedCore, RealizedFull, RealizedFullAccum, RealizedLike,
RealizedMinimal,
AdjustedSopr, RealizedCore, RealizedFull, RealizedFullAccum, RealizedLike, RealizedMinimal,
};
pub use relative::{
RelativeForAll, RelativeToAll, RelativeWithExtended,
pub use relative::{RelativeForAll, RelativeToAll, RelativeWithExtended};
pub use supply::{SupplyBase, SupplyCore};
pub use unrealized::{
UnrealizedBase, UnrealizedBasic, UnrealizedCore, UnrealizedFull, UnrealizedLike,
UnrealizedMinimal,
};
pub use supply::{SupplyBase, SupplyCore, SupplyFull};
pub use unrealized::{UnrealizedBase, UnrealizedBasic, UnrealizedCore, UnrealizedFull, UnrealizedLike, UnrealizedMinimal};
use brk_cohort::Filter;
use brk_error::Result;
use brk_types::{Cents, Height, Indexes, Version};
use vecdb::{AnyStoredVec, Exit, StorageMode};
use crate::{blocks, distribution::state::{WithoutCapital, WithCapital, CohortState, CostBasisData, CostBasisOps, CostBasisRaw, CoreRealizedState, MinimalRealizedState, RealizedOps, RealizedState}, prices};
use crate::{
distribution::state::{
CohortState, CoreRealizedState, CostBasisData, CostBasisOps, CostBasisRaw,
MinimalRealizedState, RealizedOps, RealizedState, WithCapital, WithoutCapital,
},
prices,
};
pub trait CohortMetricsState {
type Realized: RealizedOps;
@@ -102,16 +130,18 @@ impl<M: StorageMode> CohortMetricsState for AllCohortMetrics<M> {
type CostBasis = CostBasisData<WithCapital>;
}
pub trait CohortMetricsBase: CohortMetricsState<Realized = RealizedState, CostBasis = CostBasisData<WithCapital>> + Send + Sync {
pub trait CohortMetricsBase:
CohortMetricsState<Realized = RealizedState, CostBasis = CostBasisData<WithCapital>> + Send + Sync
{
type ActivityVecs: ActivityLike;
type RealizedVecs: RealizedLike;
type UnrealizedVecs: UnrealizedLike;
fn filter(&self) -> &Filter;
fn supply(&self) -> &SupplyFull;
fn supply_mut(&mut self) -> &mut SupplyFull;
fn outputs(&self) -> &OutputsFull;
fn outputs_mut(&mut self) -> &mut OutputsFull;
fn supply(&self) -> &SupplyCore;
fn supply_mut(&mut self) -> &mut SupplyCore;
fn outputs(&self) -> &OutputsBase;
fn outputs_mut(&mut self) -> &mut OutputsBase;
fn activity(&self) -> &Self::ActivityVecs;
fn activity_mut(&mut self) -> &mut Self::ActivityVecs;
fn realized(&self) -> &Self::RealizedVecs;
@@ -120,20 +150,33 @@ pub trait CohortMetricsBase: CohortMetricsState<Realized = RealizedState, CostBa
fn unrealized_mut(&mut self) -> &mut Self::UnrealizedVecs;
/// Convenience: access activity as `&ActivityCore` (via `ActivityLike::as_core`).
fn activity_core(&self) -> &ActivityCore { self.activity().as_core() }
fn activity_core_mut(&mut self) -> &mut ActivityCore { self.activity_mut().as_core_mut() }
fn activity_core(&self) -> &ActivityCore {
self.activity().as_core()
}
fn activity_core_mut(&mut self) -> &mut ActivityCore {
self.activity_mut().as_core_mut()
}
/// Convenience: access realized as `&RealizedCore` (via `RealizedLike::as_core`).
fn realized_core(&self) -> &RealizedCore { self.realized().as_core() }
fn realized_core_mut(&mut self) -> &mut RealizedCore { self.realized_mut().as_core_mut() }
fn realized_core(&self) -> &RealizedCore {
self.realized().as_core()
}
fn realized_core_mut(&mut self) -> &mut RealizedCore {
self.realized_mut().as_core_mut()
}
/// Convenience: access unrealized as `&UnrealizedBase` (via `UnrealizedLike::as_base`).
fn unrealized_base(&self) -> &UnrealizedBase { self.unrealized().as_base() }
fn unrealized_base_mut(&mut self) -> &mut UnrealizedBase { self.unrealized_mut().as_base_mut() }
fn unrealized_base(&self) -> &UnrealizedBase {
self.unrealized().as_base()
}
fn unrealized_base_mut(&mut self) -> &mut UnrealizedBase {
self.unrealized_mut().as_base_mut()
}
fn validate_computed_versions(&mut self, base_version: Version) -> Result<()> {
self.supply_mut().validate_computed_versions(base_version)?;
self.activity_mut().validate_computed_versions(base_version)?;
self.activity_mut()
.validate_computed_versions(base_version)?;
Ok(())
}
@@ -164,7 +207,11 @@ pub trait CohortMetricsBase: CohortMetricsState<Realized = RealizedState, CostBa
.min(self.unrealized().min_stateful_len())
}
fn truncate_push(&mut self, height: Height, state: &CohortState<RealizedState, CostBasisData<WithCapital>>) -> Result<()> {
fn truncate_push(
&mut self,
height: Height,
state: &CohortState<RealizedState, CostBasisData<WithCapital>>,
) -> Result<()> {
self.supply_mut().truncate_push(height, state)?;
self.outputs_mut().truncate_push(height, state)?;
self.activity_mut().truncate_push(height, state)?;
@@ -175,27 +222,22 @@ pub trait CohortMetricsBase: CohortMetricsState<Realized = RealizedState, CostBa
/// First phase of computed metrics (indexes from height).
fn compute_rest_part1(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.supply_mut()
.compute(prices, starting_indexes.height, exit)?;
self.supply_mut()
.compute_rest_part1(blocks, starting_indexes, exit)?;
self.outputs_mut()
.compute_rest(blocks, starting_indexes, exit)?;
self.activity_mut()
.compute_rest_part1(blocks, starting_indexes, exit)?;
.compute_rest_part1(starting_indexes, exit)?;
self.activity_core_mut()
.compute_sent_profitability(blocks, prices, starting_indexes, exit)?;
.compute_sent_profitability(prices, starting_indexes, exit)?;
self.realized_mut()
.compute_rest_part1(blocks, starting_indexes, exit)?;
.compute_rest_part1(starting_indexes, exit)?;
self.unrealized_mut()
.compute_rest(blocks, prices, starting_indexes, exit)?;
.compute_rest(prices, starting_indexes, exit)?;
self.unrealized_mut()
.compute_net_sentiment_height(starting_indexes, exit)?;
@@ -232,7 +274,10 @@ pub trait CohortMetricsBase: CohortMetricsState<Realized = RealizedState, CostBa
)?;
self.unrealized_base_mut().compute_from_stateful(
starting_indexes,
&others.iter().map(|v| v.unrealized_base()).collect::<Vec<_>>(),
&others
.iter()
.map(|v| v.unrealized_base())
.collect::<Vec<_>>(),
exit,
)?;
Ok(())

View File

@@ -1,22 +1,33 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Indexes, StoredU64, Version};
use brk_types::{BasisPointsSigned32, Height, Indexes, StoredI64, StoredU64, Version};
use vecdb::{AnyStoredVec, AnyVec, Exit, Rw, StorageMode, WritableVec};
use crate::{distribution::state::{CohortState, CostBasisOps, RealizedOps}, internal::ComputedPerBlock};
use crate::{
distribution::{
metrics::ImportConfig,
state::{CohortState, CostBasisOps, RealizedOps},
},
internal::ComputedPerBlockWithDeltas,
};
use crate::distribution::metrics::ImportConfig;
/// Base output metrics: utxo_count only (1 stored vec).
/// Base output metrics: utxo_count + delta.
#[derive(Traversable)]
pub struct OutputsBase<M: StorageMode = Rw> {
pub unspent_count: ComputedPerBlock<StoredU64, M>,
pub unspent_count: ComputedPerBlockWithDeltas<StoredU64, StoredI64, BasisPointsSigned32, M>,
}
impl OutputsBase {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
Ok(Self {
unspent_count: cfg.import("utxo_count", Version::ZERO)?,
unspent_count: ComputedPerBlockWithDeltas::forced_import(
cfg.db,
&cfg.name("utxo_count"),
cfg.version,
Version::ONE,
cfg.indexes,
cfg.cached_starts,
)?,
})
}

View File

@@ -1,65 +0,0 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Indexes, StoredI64, StoredU64, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{AnyStoredVec, Exit, Rw, StorageMode};
use crate::{blocks, internal::RollingDelta1m};
use crate::distribution::metrics::ImportConfig;
use super::OutputsBase;
/// Full output metrics: utxo_count + delta (3 stored vecs).
#[derive(Deref, DerefMut, Traversable)]
pub struct OutputsFull<M: StorageMode = Rw> {
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub base: OutputsBase<M>,
#[traversable(wrap = "unspent_count", rename = "delta")]
pub unspent_count_delta: RollingDelta1m<StoredU64, StoredI64, M>,
}
impl OutputsFull {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let base = OutputsBase::forced_import(cfg)?;
let unspent_count_delta = cfg.import("utxo_count_delta", Version::ONE)?;
Ok(Self {
base,
unspent_count_delta,
})
}
pub(crate) fn collect_vecs_mut(&mut self) -> Vec<&mut dyn AnyStoredVec> {
self.base.collect_vecs_mut()
}
pub(crate) fn compute_from_stateful(
&mut self,
starting_indexes: &Indexes,
others: &[&Self],
exit: &Exit,
) -> Result<()> {
let base_refs: Vec<&OutputsBase> = others.iter().map(|o| &o.base).collect();
self.base.compute_from_stateful(starting_indexes, &base_refs, exit)
}
pub(crate) fn compute_rest(
&mut self,
blocks: &blocks::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.unspent_count_delta.compute(
starting_indexes.height,
&blocks.lookback._1m,
&self.base.unspent_count.height,
exit,
)?;
Ok(())
}
}

View File

@@ -1,5 +1,3 @@
mod base;
mod full;
pub use base::OutputsBase;
pub use full::OutputsFull;

View File

@@ -4,30 +4,22 @@ use brk_types::{Cents, Height, Indexes, StoredF64, Version};
use vecdb::{Exit, ReadableVec, Rw, StorageMode};
use crate::{
blocks,
internal::{ComputedPerBlock, RatioCents64, RollingWindows},
distribution::metrics::ImportConfig,
internal::{ComputedPerBlockCumulativeWithSums, RatioCents64, RollingWindows},
};
use crate::distribution::metrics::ImportConfig;
#[derive(Traversable)]
pub struct AdjustedSopr<M: StorageMode = Rw> {
pub value_created: ComputedPerBlock<Cents, M>,
pub value_destroyed: ComputedPerBlock<Cents, M>,
#[traversable(wrap = "value_created", rename = "sum")]
pub value_created_sum: RollingWindows<Cents, M>,
#[traversable(wrap = "value_destroyed", rename = "sum")]
pub value_destroyed_sum: RollingWindows<Cents, M>,
pub value_created: ComputedPerBlockCumulativeWithSums<Cents, Cents, M>,
pub value_destroyed: ComputedPerBlockCumulativeWithSums<Cents, Cents, M>,
pub ratio: RollingWindows<StoredF64, M>,
}
impl AdjustedSopr {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
Ok(Self {
value_created: cfg.import("adj_value_created", Version::ZERO)?,
value_destroyed: cfg.import("adj_value_destroyed", Version::ZERO)?,
value_created_sum: cfg.import("adj_value_created", Version::ONE)?,
value_destroyed_sum: cfg.import("adj_value_destroyed", Version::ONE)?,
value_created: cfg.import("adj_value_created", Version::ONE)?,
value_destroyed: cfg.import("adj_value_destroyed", Version::ONE)?,
ratio: cfg.import("asopr", Version::ONE)?,
})
}
@@ -35,7 +27,6 @@ impl AdjustedSopr {
#[allow(clippy::too_many_arguments)]
pub(crate) fn compute_rest_part2(
&mut self,
blocks: &blocks::Vecs,
starting_indexes: &Indexes,
base_value_created: &impl ReadableVec<Height, Cents>,
base_value_destroyed: &impl ReadableVec<Height, Cents>,
@@ -44,41 +35,32 @@ impl AdjustedSopr {
exit: &Exit,
) -> Result<()> {
// Compute value_created = base.value_created - under_1h.value_created
self.value_created.height.compute_subtract(
self.value_created.raw.height.compute_subtract(
starting_indexes.height,
base_value_created,
under_1h_value_created,
exit,
)?;
self.value_destroyed.height.compute_subtract(
self.value_destroyed.raw.height.compute_subtract(
starting_indexes.height,
base_value_destroyed,
under_1h_value_destroyed,
exit,
)?;
// Adjusted value created/destroyed rolling sums
let window_starts = blocks.lookback.window_starts();
self.value_created_sum.compute_rolling_sum(
starting_indexes.height,
&window_starts,
&self.value_created.height,
exit,
)?;
self.value_destroyed_sum.compute_rolling_sum(
starting_indexes.height,
&window_starts,
&self.value_destroyed.height,
exit,
)?;
// Cumulatives (rolling sums are lazy)
self.value_created
.compute_rest(starting_indexes.height, exit)?;
self.value_destroyed
.compute_rest(starting_indexes.height, exit)?;
// SOPR ratios from rolling sums
// SOPR ratios from lazy rolling sums
for ((sopr, vc), vd) in self
.ratio
.as_mut_array()
.into_iter()
.zip(self.value_created_sum.as_array())
.zip(self.value_destroyed_sum.as_array())
.zip(self.value_created.sum.as_array())
.zip(self.value_destroyed.sum.as_array())
{
sopr.compute_binary::<Cents, Cents, RatioCents64>(
starting_indexes.height,

View File

@@ -1,18 +1,16 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Cents, CentsSigned, Dollars, Height, Indexes, StoredF64, Version};
use brk_types::{BasisPointsSigned32, Bitcoin, Cents, CentsSigned, Dollars, Height, Indexes, StoredF64, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{
AnyStoredVec, Exit, ReadableCloneableVec, ReadableVec, Rw, StorageMode,
};
use crate::{
blocks,
distribution::state::{CohortState, CostBasisOps, RealizedOps},
internal::{
ComputedPerBlock, FiatRollingDelta1m, LazyPerBlock,
NegCentsUnsignedToDollars, PerBlockWithSum24h, RatioCents64,
RollingWindow24hPerBlock,
FiatPerBlockCumulativeWithSumsAndDeltas, LazyPerBlock, NegCentsUnsignedToDollars,
RatioCents64, RollingWindow24hPerBlock,
},
prices,
};
@@ -33,23 +31,14 @@ pub struct RealizedCore<M: StorageMode = Rw> {
#[traversable(flatten)]
pub minimal: RealizedMinimal<M>,
#[traversable(wrap = "profit", rename = "cumulative")]
pub profit_cumulative: ComputedPerBlock<Cents, M>,
#[traversable(wrap = "loss", rename = "cumulative")]
pub loss_cumulative: ComputedPerBlock<Cents, M>,
#[traversable(wrap = "cap", rename = "delta")]
pub cap_delta: FiatRollingDelta1m<Cents, CentsSigned, M>,
#[traversable(wrap = "loss", rename = "negative")]
pub neg_loss: LazyPerBlock<Dollars, Cents>,
pub net_pnl: PerBlockWithSum24h<CentsSigned, M>,
pub net_pnl: FiatPerBlockCumulativeWithSumsAndDeltas<CentsSigned, CentsSigned, BasisPointsSigned32, M>,
pub sopr: RealizedSoprCore<M>,
}
impl RealizedCore {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let v0 = Version::ZERO;
let v1 = Version::ONE;
let minimal = RealizedMinimal::forced_import(cfg)?;
@@ -61,13 +50,19 @@ impl RealizedCore {
cfg.indexes,
);
let net_pnl = FiatPerBlockCumulativeWithSumsAndDeltas::forced_import(
cfg.db,
&cfg.name("net_realized_pnl"),
cfg.version + v1,
Version::new(4),
cfg.indexes,
cfg.cached_starts,
)?;
Ok(Self {
minimal,
profit_cumulative: cfg.import("realized_profit_cumulative", v0)?,
loss_cumulative: cfg.import("realized_loss_cumulative", v0)?,
cap_delta: cfg.import("realized_cap_delta", v1)?,
neg_loss: neg_realized_loss,
net_pnl: cfg.import("net_realized_pnl", v1)?,
net_pnl,
sopr: RealizedSoprCore {
ratio: cfg.import("sopr", v1)?,
},
@@ -102,25 +97,13 @@ impl RealizedCore {
pub(crate) fn compute_rest_part1(
&mut self,
blocks: &blocks::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.minimal
.compute_rest_part1(blocks, starting_indexes, exit)?;
.compute_rest_part1(starting_indexes, exit)?;
self.profit_cumulative.height.compute_cumulative(
starting_indexes.height,
&self.minimal.profit.raw.cents.height,
exit,
)?;
self.loss_cumulative.height.compute_cumulative(
starting_indexes.height,
&self.minimal.loss.raw.cents.height,
exit,
)?;
self.net_pnl.raw.height.compute_transform2(
self.net_pnl.raw.cents.height.compute_transform2(
starting_indexes.height,
&self.minimal.profit.raw.cents.height,
&self.minimal.loss.raw.cents.height,
@@ -138,7 +121,6 @@ impl RealizedCore {
pub(crate) fn compute_rest_part2(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &Indexes,
height_to_supply: &impl ReadableVec<Height, Bitcoin>,
@@ -147,19 +129,8 @@ impl RealizedCore {
self.minimal
.compute_rest_part2(prices, starting_indexes, height_to_supply, exit)?;
self.cap_delta.compute(
starting_indexes.height,
&blocks.lookback._1m,
&self.minimal.cap.cents.height,
exit,
)?;
self.net_pnl.sum.compute_rolling_sum(
starting_indexes.height,
&blocks.lookback._24h,
&self.net_pnl.raw.height,
exit,
)?;
self.net_pnl
.compute_rest(starting_indexes.height, exit)?;
self.sopr
.ratio

View File

@@ -14,9 +14,10 @@ use crate::{
blocks,
distribution::state::{WithCapital, CohortState, CostBasisData, RealizedState},
internal::{
CentsUnsignedToDollars, ComputedPerBlock, ComputedPerBlockCumulative, FiatPerBlock,
FiatRollingDelta1m, FiatRollingDeltaExcept1m, LazyPerBlock, PercentPerBlock,
PercentRollingWindows, Price, PriceWithRatioExtendedPerBlock, RatioCents64, RatioCentsBp32,
CentsUnsignedToDollars, ComputedPerBlock, ComputedPerBlockCumulative,
ComputedPerBlockCumulativeWithSums, FiatPerBlockCumulativeWithSums,
LazyPerBlock, PercentPerBlock, PercentRollingWindows, Price,
PriceWithRatioExtendedPerBlock, RatioCents64, RatioCentsBp32,
RatioCentsSignedCentsBps32, RatioCentsSignedDollarsBps32, RatioDollarsBp32,
RatioPerBlockPercentiles, RatioPerBlockStdDevBands, RatioSma, RollingWindows,
RollingWindowsFrom1w,
@@ -31,47 +32,22 @@ use super::RealizedCore;
#[derive(Traversable)]
pub struct RealizedProfit<M: StorageMode = Rw> {
pub rel_to_rcap: PercentPerBlock<BasisPoints32, M>,
pub value_created: ComputedPerBlock<Cents, M>,
pub value_destroyed: ComputedPerBlock<Cents, M>,
#[traversable(wrap = "value_created", rename = "sum")]
pub value_created_sum: RollingWindows<Cents, M>,
#[traversable(wrap = "value_destroyed", rename = "sum")]
pub value_destroyed_sum: RollingWindows<Cents, M>,
pub value_created: ComputedPerBlockCumulativeWithSums<Cents, Cents, M>,
pub value_destroyed: ComputedPerBlockCumulativeWithSums<Cents, Cents, M>,
pub distribution_flow: LazyPerBlock<Dollars, Cents>,
#[traversable(rename = "sum")]
pub sum_extended: RollingWindowsFrom1w<Cents, M>,
}
#[derive(Traversable)]
pub struct RealizedLoss<M: StorageMode = Rw> {
pub rel_to_rcap: PercentPerBlock<BasisPoints32, M>,
pub value_created: ComputedPerBlock<Cents, M>,
pub value_destroyed: ComputedPerBlock<Cents, M>,
#[traversable(wrap = "value_created", rename = "sum")]
pub value_created_sum: RollingWindows<Cents, M>,
#[traversable(wrap = "value_destroyed", rename = "sum")]
pub value_destroyed_sum: RollingWindows<Cents, M>,
pub value_created: ComputedPerBlockCumulativeWithSums<Cents, Cents, M>,
pub value_destroyed: ComputedPerBlockCumulativeWithSums<Cents, Cents, M>,
pub capitulation_flow: LazyPerBlock<Dollars, Cents>,
#[traversable(rename = "sum")]
pub sum_extended: RollingWindowsFrom1w<Cents, M>,
}
#[derive(Traversable)]
pub struct RealizedGrossPnl<M: StorageMode = Rw> {
pub raw: FiatPerBlock<Cents, M>,
pub sum: RollingWindows<Cents, M>,
pub sell_side_risk_ratio: PercentRollingWindows<BasisPoints32, M>,
}
#[derive(Traversable)]
pub struct RealizedNetPnl<M: StorageMode = Rw> {
pub rel_to_rcap: PercentPerBlock<BasisPointsSigned32, M>,
pub cumulative: ComputedPerBlock<CentsSigned, M>,
#[traversable(rename = "sum")]
pub sum_extended: RollingWindowsFrom1w<CentsSigned, M>,
pub delta: FiatRollingDelta1m<CentsSigned, CentsSigned, M>,
#[traversable(rename = "delta")]
pub delta_extended: FiatRollingDeltaExcept1m<CentsSigned, CentsSigned, M>,
#[traversable(wrap = "change_1m", rename = "rel_to_rcap")]
pub change_1m_rel_to_rcap: PercentPerBlock<BasisPointsSigned32, M>,
#[traversable(wrap = "change_1m", rename = "rel_to_mcap")]
@@ -80,10 +56,6 @@ pub struct RealizedNetPnl<M: StorageMode = Rw> {
#[derive(Traversable)]
pub struct RealizedSopr<M: StorageMode = Rw> {
#[traversable(wrap = "value_created", rename = "sum")]
pub value_created_sum_extended: RollingWindowsFrom1w<Cents, M>,
#[traversable(wrap = "value_destroyed", rename = "sum")]
pub value_destroyed_sum_extended: RollingWindowsFrom1w<Cents, M>,
#[traversable(rename = "ratio")]
pub ratio_extended: RollingWindowsFrom1w<StoredF64, M>,
}
@@ -100,7 +72,7 @@ pub struct RealizedInvestor<M: StorageMode = Rw> {
pub price: PriceWithRatioExtendedPerBlock<M>,
pub lower_price_band: Price<ComputedPerBlock<Cents, M>>,
pub upper_price_band: Price<ComputedPerBlock<Cents, M>>,
#[traversable(wrap = "cap", rename = "raw")]
#[traversable(hidden)]
pub cap_raw: M::Stored<BytesVec<Height, CentsSquaredSats>>,
}
@@ -113,7 +85,8 @@ pub struct RealizedFull<M: StorageMode = Rw> {
pub profit: RealizedProfit<M>,
pub loss: RealizedLoss<M>,
pub gross_pnl: RealizedGrossPnl<M>,
pub gross_pnl: FiatPerBlockCumulativeWithSums<Cents, M>,
pub sell_side_risk_ratio: PercentRollingWindows<BasisPoints32, M>,
pub net_pnl: RealizedNetPnl<M>,
pub sopr: RealizedSopr<M>,
pub peak_regret: RealizedPeakRegret<M>,
@@ -121,10 +94,7 @@ pub struct RealizedFull<M: StorageMode = Rw> {
pub profit_to_loss_ratio: RollingWindows<StoredF64, M>,
#[traversable(wrap = "cap", rename = "delta")]
pub cap_delta_extended: FiatRollingDeltaExcept1m<Cents, CentsSigned, M>,
#[traversable(wrap = "cap", rename = "raw")]
#[traversable(hidden)]
pub cap_raw: M::Stored<BytesVec<Height, CentsSats>>,
#[traversable(wrap = "cap", rename = "rel_to_own_mcap")]
pub cap_rel_to_own_mcap: PercentPerBlock<BasisPoints32, M>,
@@ -145,58 +115,46 @@ impl RealizedFull {
let core = RealizedCore::forced_import(cfg)?;
// Profit
let profit_value_destroyed: ComputedPerBlock<Cents> =
cfg.import("profit_value_destroyed", v0)?;
let profit_value_destroyed: ComputedPerBlockCumulativeWithSums<Cents, Cents> =
cfg.import("profit_value_destroyed", v1)?;
let profit_flow = LazyPerBlock::from_computed::<CentsUnsignedToDollars>(
&cfg.name("profit_flow"),
&cfg.name("distribution_flow"),
cfg.version,
profit_value_destroyed.height.read_only_boxed_clone(),
&profit_value_destroyed,
profit_value_destroyed.raw.height.read_only_boxed_clone(),
&profit_value_destroyed.raw,
);
let profit = RealizedProfit {
rel_to_rcap: cfg.import("realized_profit_rel_to_rcap", Version::new(2))?,
value_created: cfg.import("profit_value_created", v0)?,
value_created: cfg.import("profit_value_created", v1)?,
value_destroyed: profit_value_destroyed,
value_created_sum: cfg.import("profit_value_created", v1)?,
value_destroyed_sum: cfg.import("profit_value_destroyed", v1)?,
distribution_flow: profit_flow,
sum_extended: cfg.import("realized_profit", v1)?,
};
// Loss
let loss_value_destroyed: ComputedPerBlock<Cents> =
cfg.import("loss_value_destroyed", v0)?;
let loss_value_destroyed: ComputedPerBlockCumulativeWithSums<Cents, Cents> =
cfg.import("loss_value_destroyed", v1)?;
let capitulation_flow = LazyPerBlock::from_computed::<CentsUnsignedToDollars>(
&cfg.name("capitulation_flow"),
cfg.version,
loss_value_destroyed.height.read_only_boxed_clone(),
&loss_value_destroyed,
loss_value_destroyed.raw.height.read_only_boxed_clone(),
&loss_value_destroyed.raw,
);
let loss = RealizedLoss {
rel_to_rcap: cfg.import("realized_loss_rel_to_rcap", Version::new(2))?,
value_created: cfg.import("loss_value_created", v0)?,
value_created: cfg.import("loss_value_created", v1)?,
value_destroyed: loss_value_destroyed,
value_created_sum: cfg.import("loss_value_created", v1)?,
value_destroyed_sum: cfg.import("loss_value_destroyed", v1)?,
capitulation_flow,
sum_extended: cfg.import("realized_loss", v1)?,
};
// Gross PnL
let gross_pnl = RealizedGrossPnl {
raw: cfg.import("realized_gross_pnl", v0)?,
sum: cfg.import("gross_pnl_sum", v1)?,
sell_side_risk_ratio: cfg.import("sell_side_risk_ratio", Version::new(2))?,
};
let gross_pnl: FiatPerBlockCumulativeWithSums<Cents> =
cfg.import("realized_gross_pnl", v1)?;
let sell_side_risk_ratio = cfg.import("sell_side_risk_ratio", Version::new(2))?;
// Net PnL
let net_pnl = RealizedNetPnl {
rel_to_rcap: cfg
.import("net_realized_pnl_rel_to_rcap", Version::new(2))?,
cumulative: cfg.import("net_realized_pnl_cumulative", v1)?,
sum_extended: cfg.import("net_realized_pnl", v1)?,
delta: cfg.import("net_pnl_delta", Version::new(5))?,
delta_extended: cfg.import("net_pnl_delta", Version::new(5))?,
change_1m_rel_to_rcap: cfg
.import("net_pnl_change_1m_rel_to_rcap", Version::new(4))?,
change_1m_rel_to_mcap: cfg
@@ -205,8 +163,6 @@ impl RealizedFull {
// SOPR
let sopr = RealizedSopr {
value_created_sum_extended: cfg.import("value_created", v1)?,
value_destroyed_sum_extended: cfg.import("value_destroyed", v1)?,
ratio_extended: cfg.import("sopr", v1)?,
};
@@ -234,12 +190,12 @@ impl RealizedFull {
profit,
loss,
gross_pnl,
sell_side_risk_ratio,
net_pnl,
sopr,
peak_regret,
investor,
profit_to_loss_ratio: cfg.import("realized_profit_to_loss_ratio", v1)?,
cap_delta_extended: cfg.import("realized_cap_delta", Version::new(5))?,
cap_raw: cfg.import("cap_raw", v0)?,
cap_rel_to_own_mcap: cfg.import("realized_cap_rel_to_own_mcap", v1)?,
price_ratio_percentiles: RatioPerBlockPercentiles::forced_import(
@@ -266,11 +222,12 @@ impl RealizedFull {
pub(crate) fn min_stateful_len(&self) -> usize {
self.profit
.value_created
.raw
.height
.len()
.min(self.profit.value_destroyed.height.len())
.min(self.loss.value_created.height.len())
.min(self.loss.value_destroyed.height.len())
.min(self.profit.value_destroyed.raw.height.len())
.min(self.loss.value_created.raw.height.len())
.min(self.loss.value_destroyed.raw.height.len())
.min(self.investor.price.cents.height.len())
.min(self.cap_raw.len())
.min(self.investor.cap_raw.len())
@@ -285,18 +242,22 @@ impl RealizedFull {
self.core.truncate_push(height, state)?;
self.profit
.value_created
.raw
.height
.truncate_push(height, state.realized.profit_value_created())?;
self.profit
.value_destroyed
.raw
.height
.truncate_push(height, state.realized.profit_value_destroyed())?;
self.loss
.value_created
.raw
.height
.truncate_push(height, state.realized.loss_value_created())?;
self.loss
.value_destroyed
.raw
.height
.truncate_push(height, state.realized.loss_value_destroyed())?;
self.investor
@@ -320,10 +281,10 @@ impl RealizedFull {
pub(crate) fn collect_vecs_mut(&mut self) -> Vec<&mut dyn AnyStoredVec> {
let mut vecs = self.core.collect_vecs_mut();
vecs.push(&mut self.profit.value_created.height as &mut dyn AnyStoredVec);
vecs.push(&mut self.profit.value_destroyed.height);
vecs.push(&mut self.loss.value_created.height);
vecs.push(&mut self.loss.value_destroyed.height);
vecs.push(&mut self.profit.value_created.raw.height as &mut dyn AnyStoredVec);
vecs.push(&mut self.profit.value_destroyed.raw.height);
vecs.push(&mut self.loss.value_created.raw.height);
vecs.push(&mut self.loss.value_destroyed.raw.height);
vecs.push(&mut self.investor.price.cents.height);
vecs.push(&mut self.cap_raw as &mut dyn AnyStoredVec);
vecs.push(&mut self.investor.cap_raw as &mut dyn AnyStoredVec);
@@ -350,18 +311,22 @@ impl RealizedFull {
) -> Result<()> {
self.profit
.value_created
.raw
.height
.truncate_push(height, accum.profit_value_created)?;
self.profit
.value_destroyed
.raw
.height
.truncate_push(height, accum.profit_value_destroyed)?;
self.loss
.value_created
.raw
.height
.truncate_push(height, accum.loss_value_created)?;
self.loss
.value_destroyed
.raw
.height
.truncate_push(height, accum.loss_value_destroyed)?;
self.cap_raw
@@ -395,18 +360,11 @@ impl RealizedFull {
pub(crate) fn compute_rest_part1(
&mut self,
blocks: &blocks::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.core
.compute_rest_part1(blocks, starting_indexes, exit)?;
self.net_pnl.cumulative.height.compute_cumulative(
starting_indexes.height,
&self.core.net_pnl.raw.height,
exit,
)?;
.compute_rest_part1(starting_indexes, exit)?;
self.peak_regret
.value
@@ -424,45 +382,20 @@ impl RealizedFull {
exit: &Exit,
) -> Result<()> {
self.core.compute_rest_part2(
blocks,
prices,
starting_indexes,
height_to_supply,
exit,
)?;
let window_starts = blocks.lookback.window_starts();
// Net PnL rolling sums (1w, 1m, 1y)
self.net_pnl.sum_extended.compute_rolling_sum(
starting_indexes.height,
&window_starts,
&self.core.net_pnl.raw.height,
exit,
)?;
// SOPR: value created/destroyed rolling sums and ratios
self.sopr.value_created_sum_extended.compute_rolling_sum(
starting_indexes.height,
&window_starts,
&self.core.minimal.sopr.value_created.raw.height,
exit,
)?;
self.sopr
.value_destroyed_sum_extended
.compute_rolling_sum(
starting_indexes.height,
&window_starts,
&self.core.minimal.sopr.value_destroyed.raw.height,
exit,
)?;
// SOPR ratios from lazy rolling sums
for ((sopr, vc), vd) in self
.sopr
.ratio_extended
.as_mut_array()
.into_iter()
.zip(self.sopr.value_created_sum_extended.as_array())
.zip(self.sopr.value_destroyed_sum_extended.as_array())
.zip(self.core.minimal.sopr.value_created.sum.as_array()[1..].iter())
.zip(self.core.minimal.sopr.value_destroyed.sum.as_array()[1..].iter())
{
sopr.compute_binary::<Cents, Cents, RatioCents64>(
starting_indexes.height,
@@ -493,36 +426,24 @@ impl RealizedFull {
.rel_to_rcap
.compute_binary::<CentsSigned, Cents, RatioCentsSignedCentsBps32>(
starting_indexes.height,
&self.core.net_pnl.raw.height,
&self.core.net_pnl.raw.cents.height,
&self.core.minimal.cap.cents.height,
exit,
)?;
// Profit/loss value created/destroyed rolling sums
self.profit.value_created_sum.compute_rolling_sum(
starting_indexes.height,
&window_starts,
&self.profit.value_created.height,
exit,
)?;
self.profit.value_destroyed_sum.compute_rolling_sum(
starting_indexes.height,
&window_starts,
&self.profit.value_destroyed.height,
exit,
)?;
self.loss.value_created_sum.compute_rolling_sum(
starting_indexes.height,
&window_starts,
&self.loss.value_created.height,
exit,
)?;
self.loss.value_destroyed_sum.compute_rolling_sum(
starting_indexes.height,
&window_starts,
&self.loss.value_destroyed.height,
exit,
)?;
// Profit/loss value created/destroyed cumulatives (rolling sums are lazy)
self.profit
.value_created
.compute_rest(starting_indexes.height, exit)?;
self.profit
.value_destroyed
.compute_rest(starting_indexes.height, exit)?;
self.loss
.value_created
.compute_rest(starting_indexes.height, exit)?;
self.loss
.value_destroyed
.compute_rest(starting_indexes.height, exit)?;
// Gross PnL
self.gross_pnl.raw.cents.height.compute_add(
@@ -531,32 +452,15 @@ impl RealizedFull {
&self.core.minimal.loss.raw.cents.height,
exit,
)?;
self.gross_pnl
.compute_rest(starting_indexes.height, exit)?;
self.gross_pnl.sum.compute_rolling_sum(
starting_indexes.height,
&window_starts,
&self.gross_pnl.raw.cents.height,
exit,
)?;
// Net PnL delta (1m base + 24h/1w/1y extended)
self.net_pnl.delta.compute(
starting_indexes.height,
&blocks.lookback._1m,
&self.net_pnl.cumulative.height,
exit,
)?;
self.net_pnl.delta_extended.compute(
starting_indexes.height,
&window_starts,
&self.net_pnl.cumulative.height,
exit,
)?;
// Net PnL 1m change relative to rcap and mcap
self.net_pnl
.change_1m_rel_to_rcap
.compute_binary::<CentsSigned, Cents, RatioCentsSignedCentsBps32>(
starting_indexes.height,
&self.net_pnl.delta.change_1m.cents.height,
&self.core.net_pnl.delta.change._1m.cents.height,
&self.core.minimal.cap.cents.height,
exit,
)?;
@@ -564,19 +468,11 @@ impl RealizedFull {
.change_1m_rel_to_mcap
.compute_binary::<CentsSigned, Dollars, RatioCentsSignedDollarsBps32>(
starting_indexes.height,
&self.net_pnl.delta.change_1m.cents.height,
&self.core.net_pnl.delta.change._1m.cents.height,
height_to_market_cap,
exit,
)?;
// Realized cap delta extended (24h/1w/1y — 1m is in RealizedCore)
self.cap_delta_extended.compute(
starting_indexes.height,
&window_starts,
&self.core.minimal.cap.cents.height,
exit,
)?;
// Peak regret rel to rcap
self.peak_regret
.rel_to_rcap
@@ -636,7 +532,6 @@ impl RealizedFull {
// Sell-side risk ratios
for (ssrr, rv) in self
.gross_pnl
.sell_side_risk_ratio
.as_mut_array()
.into_iter()
@@ -644,26 +539,12 @@ impl RealizedFull {
{
ssrr.compute_binary::<Cents, Cents, RatioCentsBp32>(
starting_indexes.height,
&rv.height,
&rv.cents.height,
&self.core.minimal.cap.cents.height,
exit,
)?;
}
// Profit/loss sum extended (1w, 1m, 1y)
self.profit.sum_extended.compute_rolling_sum(
starting_indexes.height,
&window_starts,
&self.core.minimal.profit.raw.cents.height,
exit,
)?;
self.loss.sum_extended.compute_rolling_sum(
starting_indexes.height,
&window_starts,
&self.core.minimal.loss.raw.cents.height,
exit,
)?;
// Realized cap relative to own market cap
self.cap_rel_to_own_mcap
.compute_binary::<Dollars, Dollars, RatioDollarsBp32>(
@@ -674,25 +555,17 @@ impl RealizedFull {
)?;
// Realized profit to loss ratios
self.profit_to_loss_ratio
._24h
.compute_binary::<Cents, Cents, RatioCents64>(
starting_indexes.height,
&self.core.minimal.profit.sum._24h.cents.height,
&self.core.minimal.loss.sum._24h.cents.height,
exit,
)?;
for ((ratio, profit), loss) in self
.profit_to_loss_ratio
.as_mut_array_from_1w()
.as_mut_array()
.into_iter()
.zip(self.profit.sum_extended.as_array())
.zip(self.loss.sum_extended.as_array())
.zip(self.core.minimal.profit.sum.as_array())
.zip(self.core.minimal.loss.sum.as_array())
{
ratio.compute_binary::<Cents, Cents, RatioCents64>(
starting_indexes.height,
&profit.height,
&loss.height,
&profit.cents.height,
&loss.cents.height,
exit,
)?;
}

View File

@@ -1,7 +1,7 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{
BasisPoints32, Bitcoin, Cents, Height, Indexes, Sats, StoredF32,
BasisPoints32, BasisPointsSigned32, Bitcoin, Cents, CentsSigned, Height, Indexes, Sats, StoredF32,
Version,
};
use vecdb::{
@@ -9,11 +9,10 @@ use vecdb::{
};
use crate::{
blocks,
distribution::state::{CohortState, CostBasisOps, RealizedOps},
internal::{
FiatPerBlock, FiatPerBlockWithSum24h, Identity, LazyPerBlock,
PerBlockWithSum24h, PriceWithRatioPerBlock,
ComputedPerBlockCumulativeWithSums, FiatPerBlockCumulativeWithSums,
FiatPerBlockWithDeltas, Identity, LazyPerBlock, PriceWithRatioPerBlock,
},
prices,
};
@@ -22,17 +21,15 @@ use crate::distribution::metrics::ImportConfig;
#[derive(Traversable)]
pub struct RealizedSoprMinimal<M: StorageMode = Rw> {
pub value_created: PerBlockWithSum24h<Cents, M>,
pub value_destroyed: PerBlockWithSum24h<Cents, M>,
pub value_created: ComputedPerBlockCumulativeWithSums<Cents, Cents, M>,
pub value_destroyed: ComputedPerBlockCumulativeWithSums<Cents, Cents, M>,
}
/// Minimal realized metrics: cap (fiat), profit/loss (fiat + 24h sum),
/// price, mvrv, sopr (value_created/destroyed with 24h sums).
#[derive(Traversable)]
pub struct RealizedMinimal<M: StorageMode = Rw> {
pub cap: FiatPerBlock<Cents, M>,
pub profit: FiatPerBlockWithSum24h<Cents, M>,
pub loss: FiatPerBlockWithSum24h<Cents, M>,
pub cap: FiatPerBlockWithDeltas<Cents, CentsSigned, BasisPointsSigned32, M>,
pub profit: FiatPerBlockCumulativeWithSums<Cents, M>,
pub loss: FiatPerBlockCumulativeWithSums<Cents, M>,
pub price: PriceWithRatioPerBlock<M>,
pub mvrv: LazyPerBlock<StoredF32>,
@@ -43,7 +40,14 @@ impl RealizedMinimal {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let v1 = Version::ONE;
let cap: FiatPerBlock<Cents> = cfg.import("realized_cap", Version::ZERO)?;
let cap = FiatPerBlockWithDeltas::forced_import(
cfg.db,
&cfg.name("realized_cap"),
cfg.version,
v1,
cfg.indexes,
cfg.cached_starts,
)?;
let price: PriceWithRatioPerBlock = cfg.import("realized_price", v1)?;
let mvrv = LazyPerBlock::from_lazy::<Identity<StoredF32>, BasisPoints32>(
@@ -119,34 +123,17 @@ impl RealizedMinimal {
pub(crate) fn compute_rest_part1(
&mut self,
blocks: &blocks::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.profit.sum.compute_rolling_sum(
starting_indexes.height,
&blocks.lookback._24h,
&self.profit.raw.cents.height,
exit,
)?;
self.loss.sum.compute_rolling_sum(
starting_indexes.height,
&blocks.lookback._24h,
&self.loss.raw.cents.height,
exit,
)?;
self.sopr.value_created.sum.compute_rolling_sum(
starting_indexes.height,
&blocks.lookback._24h,
&self.sopr.value_created.raw.height,
exit,
)?;
self.sopr.value_destroyed.sum.compute_rolling_sum(
starting_indexes.height,
&blocks.lookback._24h,
&self.sopr.value_destroyed.raw.height,
exit,
)?;
self.profit.compute_rest(starting_indexes.height, exit)?;
self.loss.compute_rest(starting_indexes.height, exit)?;
self.sopr
.value_created
.compute_rest(starting_indexes.height, exit)?;
self.sopr
.value_destroyed
.compute_rest(starting_indexes.height, exit)?;
Ok(())
}

View File

@@ -12,19 +12,14 @@ use brk_error::Result;
use brk_types::{Height, Indexes};
use vecdb::Exit;
use crate::{blocks, distribution::state::{WithCapital, CohortState, CostBasisData, RealizedState}};
use crate::distribution::state::{WithCapital, CohortState, CostBasisData, RealizedState};
/// Polymorphic dispatch for realized metric types.
///
/// Both `RealizedCore` and `RealizedFull` have the same inherent methods
/// but with different behavior (Full checks/pushes more fields).
/// This trait enables `CohortMetricsBase` to dispatch correctly via associated type.
pub trait RealizedLike: Send + Sync {
fn as_core(&self) -> &RealizedCore;
fn as_core_mut(&mut self) -> &mut RealizedCore;
fn min_stateful_len(&self) -> usize;
fn truncate_push(&mut self, height: Height, state: &CohortState<RealizedState, CostBasisData<WithCapital>>) -> Result<()>;
fn compute_rest_part1(&mut self, blocks: &blocks::Vecs, starting_indexes: &Indexes, exit: &Exit) -> Result<()>;
fn compute_rest_part1(&mut self, starting_indexes: &Indexes, exit: &Exit) -> Result<()>;
fn compute_from_stateful(
&mut self,
starting_indexes: &Indexes,
@@ -40,8 +35,8 @@ impl RealizedLike for RealizedCore {
fn truncate_push(&mut self, height: Height, state: &CohortState<RealizedState, CostBasisData<WithCapital>>) -> Result<()> {
self.truncate_push(height, state)
}
fn compute_rest_part1(&mut self, blocks: &blocks::Vecs, starting_indexes: &Indexes, exit: &Exit) -> Result<()> {
self.compute_rest_part1(blocks, starting_indexes, exit)
fn compute_rest_part1(&mut self, starting_indexes: &Indexes, exit: &Exit) -> Result<()> {
self.compute_rest_part1(starting_indexes, exit)
}
fn compute_from_stateful(&mut self, starting_indexes: &Indexes, others: &[&RealizedCore], exit: &Exit) -> Result<()> {
self.compute_from_stateful(starting_indexes, others, exit)
@@ -55,8 +50,8 @@ impl RealizedLike for RealizedFull {
fn truncate_push(&mut self, height: Height, state: &CohortState<RealizedState, CostBasisData<WithCapital>>) -> Result<()> {
self.truncate_push(height, state)
}
fn compute_rest_part1(&mut self, blocks: &blocks::Vecs, starting_indexes: &Indexes, exit: &Exit) -> Result<()> {
self.compute_rest_part1(blocks, starting_indexes, exit)
fn compute_rest_part1(&mut self, starting_indexes: &Indexes, exit: &Exit) -> Result<()> {
self.compute_rest_part1(starting_indexes, exit)
}
fn compute_from_stateful(&mut self, starting_indexes: &Indexes, others: &[&RealizedCore], exit: &Exit) -> Result<()> {
self.compute_from_stateful(starting_indexes, others, exit)

View File

@@ -1,13 +1,13 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Indexes, Version};
use brk_types::{BasisPointsSigned32, Height, Indexes, Sats, SatsSigned, Version};
use vecdb::{AnyStoredVec, AnyVec, Exit, Rw, StorageMode, WritableVec};
use crate::{distribution::state::{CohortState, CostBasisOps, RealizedOps}, prices};
use crate::internal::{
AmountPerBlock, HalveCents, HalveDollars, HalveSats, HalveSatsToBitcoin,
LazyAmountPerBlock,
LazyAmountPerBlock, LazyRollingDeltasFromHeight,
};
use crate::distribution::metrics::ImportConfig;
@@ -17,6 +17,7 @@ use crate::distribution::metrics::ImportConfig;
pub struct SupplyBase<M: StorageMode = Rw> {
pub total: AmountPerBlock<M>,
pub half: LazyAmountPerBlock,
pub delta: LazyRollingDeltasFromHeight<Sats, SatsSigned, BasisPointsSigned32>,
}
impl SupplyBase {
@@ -30,9 +31,18 @@ impl SupplyBase {
HalveDollars,
>(&cfg.name("supply_half"), &supply, cfg.version);
let delta = LazyRollingDeltasFromHeight::new(
&cfg.name("supply_delta"),
cfg.version + Version::ONE,
&supply.sats.height,
cfg.cached_starts,
cfg.indexes,
);
Ok(Self {
total: supply,
half: supply_half,
delta,
})
}

View File

@@ -80,6 +80,10 @@ impl SupplyCore {
Ok(())
}
pub(crate) fn validate_computed_versions(&mut self, _base_version: Version) -> Result<()> {
Ok(())
}
pub(crate) fn compute_from_stateful(
&mut self,
starting_indexes: &Indexes,

View File

@@ -1,64 +0,0 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Indexes, Sats, SatsSigned, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{AnyStoredVec, Exit, Rw, StorageMode};
use crate::{blocks, internal::RollingDelta1m};
use crate::distribution::metrics::ImportConfig;
use super::SupplyCore;
/// Full supply metrics: total + in_profit/in_loss + delta (6 stored vecs).
#[derive(Deref, DerefMut, Traversable)]
pub struct SupplyFull<M: StorageMode = Rw> {
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub core: SupplyCore<M>,
pub delta: RollingDelta1m<Sats, SatsSigned, M>,
}
impl SupplyFull {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let core = SupplyCore::forced_import(cfg)?;
let delta = cfg.import("supply_delta", Version::ONE)?;
Ok(Self { core, delta })
}
pub(crate) fn collect_vecs_mut(&mut self) -> Vec<&mut dyn AnyStoredVec> {
self.core.collect_vecs_mut()
}
pub(crate) fn validate_computed_versions(&mut self, _base_version: Version) -> Result<()> {
Ok(())
}
pub(crate) fn compute_from_stateful(
&mut self,
starting_indexes: &Indexes,
others: &[&Self],
exit: &Exit,
) -> Result<()> {
let core_refs: Vec<&SupplyCore> = others.iter().map(|o| &o.core).collect();
self.core
.compute_from_stateful(starting_indexes, &core_refs, exit)
}
pub(crate) fn compute_rest_part1(
&mut self,
blocks: &blocks::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.delta.compute(
starting_indexes.height,
&blocks.lookback._1m,
&self.core.total.sats.height,
exit,
)
}
}

View File

@@ -1,7 +1,5 @@
mod base;
mod core;
mod full;
pub use base::SupplyBase;
pub use self::core::SupplyCore;
pub use full::SupplyFull;

View File

@@ -4,7 +4,7 @@ use brk_types::{CentsSats, CentsSquaredSats, Height, Indexes, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{AnyStoredVec, AnyVec, BytesVec, Exit, ReadableVec, Rw, StorageMode, WritableVec};
use crate::{blocks, distribution::{metrics::ImportConfig, state::UnrealizedState}};
use crate::distribution::{metrics::ImportConfig, state::UnrealizedState};
use super::UnrealizedCore;
@@ -15,13 +15,13 @@ pub struct UnrealizedBase<M: StorageMode = Rw> {
#[traversable(flatten)]
pub core: UnrealizedCore<M>,
#[traversable(wrap = "invested_capital/in_profit", rename = "raw")]
#[traversable(hidden)]
pub invested_capital_in_profit_raw: M::Stored<BytesVec<Height, CentsSats>>,
#[traversable(wrap = "invested_capital/in_loss", rename = "raw")]
#[traversable(hidden)]
pub invested_capital_in_loss_raw: M::Stored<BytesVec<Height, CentsSats>>,
#[traversable(wrap = "investor_cap/in_profit", rename = "raw")]
#[traversable(hidden)]
pub investor_cap_in_profit_raw: M::Stored<BytesVec<Height, CentsSquaredSats>>,
#[traversable(wrap = "investor_cap/in_loss", rename = "raw")]
#[traversable(hidden)]
pub investor_cap_in_loss_raw: M::Stored<BytesVec<Height, CentsSquaredSats>>,
}
@@ -165,11 +165,10 @@ impl UnrealizedBase {
pub(crate) fn compute_rest(
&mut self,
blocks: &blocks::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.core.compute_rest(blocks, starting_indexes, exit)?;
self.core.compute_rest(starting_indexes, exit)?;
Ok(())
}
}

View File

@@ -5,22 +5,20 @@ use derive_more::{Deref, DerefMut};
use vecdb::{AnyStoredVec, AnyVec, Exit, Rw, StorageMode, WritableVec};
use crate::{
blocks,
distribution::{metrics::ImportConfig, state::UnrealizedState},
internal::FiatPerBlockWithSum24h,
internal::FiatPerBlockCumulativeWithSums,
};
use super::UnrealizedMinimal;
/// Basic unrealized metrics: nupl + unrealized profit/loss (fiat + 24h sums).
#[derive(Deref, DerefMut, Traversable)]
pub struct UnrealizedBasic<M: StorageMode = Rw> {
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub minimal: UnrealizedMinimal<M>,
pub profit: FiatPerBlockWithSum24h<Cents, M>,
pub loss: FiatPerBlockWithSum24h<Cents, M>,
pub profit: FiatPerBlockCumulativeWithSums<Cents, M>,
pub loss: FiatPerBlockCumulativeWithSums<Cents, M>,
}
impl UnrealizedBasic {
@@ -77,23 +75,11 @@ impl UnrealizedBasic {
pub(crate) fn compute_rest(
&mut self,
blocks: &blocks::Vecs,
max_from: Height,
exit: &Exit,
) -> Result<()> {
self.profit.sum.compute_rolling_sum(
max_from,
&blocks.lookback._24h,
&self.profit.raw.cents.height,
exit,
)?;
self.loss.sum.compute_rolling_sum(
max_from,
&blocks.lookback._24h,
&self.loss.raw.cents.height,
exit,
)?;
self.profit.compute_rest(max_from, exit)?;
self.loss.compute_rest(max_from, exit)?;
Ok(())
}
}

View File

@@ -5,7 +5,6 @@ use derive_more::{Deref, DerefMut};
use vecdb::{AnyStoredVec, Exit, ReadableCloneableVec, Rw, StorageMode};
use crate::{
blocks,
distribution::{
metrics::ImportConfig,
state::UnrealizedState,
@@ -78,15 +77,13 @@ impl UnrealizedCore {
Ok(())
}
/// Compute derived metrics from stored values.
pub(crate) fn compute_rest(
&mut self,
blocks: &blocks::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.basic
.compute_rest(blocks, starting_indexes.height, exit)?;
.compute_rest(starting_indexes.height, exit)?;
self.net_pnl
.cents

View File

@@ -6,7 +6,7 @@ use vecdb::{AnyStoredVec, Exit, Rw, StorageMode, WritableVec};
use crate::distribution::state::UnrealizedState;
use crate::internal::{CentsSubtractToCentsSigned, FiatPerBlock};
use crate::{blocks, distribution::metrics::ImportConfig, prices};
use crate::{distribution::metrics::ImportConfig, prices};
use super::UnrealizedBase;
@@ -88,12 +88,11 @@ impl UnrealizedFull {
pub(crate) fn compute_rest_all(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.inner.compute_rest(blocks, starting_indexes, exit)?;
self.inner.compute_rest(starting_indexes, exit)?;
self.gross_pnl.cents.height.compute_add(
starting_indexes.height,

View File

@@ -15,7 +15,12 @@ pub struct UnrealizedMinimal<M: StorageMode = Rw> {
impl UnrealizedMinimal {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
Ok(Self {
nupl: cfg.import("nupl", Version::ONE)?,
nupl: RatioPerBlock::forced_import_raw(
cfg.db,
&cfg.name("nupl"),
cfg.version + Version::ONE,
cfg.indexes,
)?,
})
}

View File

@@ -14,7 +14,7 @@ use brk_error::Result;
use brk_types::{Height, Indexes};
use vecdb::Exit;
use crate::{blocks, distribution::state::UnrealizedState, prices};
use crate::{distribution::state::UnrealizedState, prices};
pub trait UnrealizedLike: Send + Sync {
fn as_base(&self) -> &UnrealizedBase;
@@ -23,7 +23,6 @@ pub trait UnrealizedLike: Send + Sync {
fn truncate_push(&mut self, height: Height, state: &UnrealizedState) -> Result<()>;
fn compute_rest(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
@@ -50,12 +49,11 @@ impl UnrealizedLike for UnrealizedBase {
}
fn compute_rest(
&mut self,
blocks: &blocks::Vecs,
_prices: &prices::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.compute_rest(blocks, starting_indexes, exit)
self.compute_rest(starting_indexes, exit)
}
fn compute_net_sentiment_height(
&mut self,
@@ -81,12 +79,11 @@ impl UnrealizedLike for UnrealizedFull {
}
fn compute_rest(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.compute_rest_all(blocks, prices, starting_indexes, exit)
self.compute_rest_all(prices, starting_indexes, exit)
}
fn compute_net_sentiment_height(
&mut self,

View File

@@ -23,7 +23,7 @@ use crate::{
state::BlockState,
},
indexes, inputs,
internal::{ComputedPerBlockCumulative, finalize_db, open_db},
internal::{CachedWindowStarts, ComputedPerBlockCumulative, finalize_db, open_db},
outputs, prices, transactions,
};
@@ -32,7 +32,6 @@ use super::{
address::{
AddressCountsVecs, AddressActivityVecs, DeltaVecs, NewAddressCountVecs, TotalAddressCountVecs,
},
compute::aggregates,
};
const VERSION: Version = Version::new(22);
@@ -44,7 +43,7 @@ pub struct AddressMetricsVecs<M: StorageMode = Rw> {
pub activity: AddressActivityVecs<M>,
pub total: TotalAddressCountVecs<M>,
pub new: NewAddressCountVecs<M>,
pub delta: DeltaVecs<M>,
pub delta: DeltaVecs,
#[traversable(wrap = "indexes", rename = "funded")]
pub funded_index:
LazyVecFrom1<FundedAddressIndex, FundedAddressIndex, FundedAddressIndex, FundedAddressData>,
@@ -100,6 +99,7 @@ impl Vecs {
parent: &Path,
parent_version: Version,
indexes: &indexes::Vecs,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
let db_path = parent.join(super::DB_NAME);
let states_path = db_path.join("states");
@@ -109,9 +109,9 @@ impl Vecs {
let version = parent_version + VERSION;
let utxo_cohorts = UTXOCohorts::forced_import(&db, version, indexes, &states_path)?;
let utxo_cohorts = UTXOCohorts::forced_import(&db, version, indexes, &states_path, cached_starts)?;
let address_cohorts = AddressCohorts::forced_import(&db, version, indexes, &states_path)?;
let address_cohorts = AddressCohorts::forced_import(&db, version, indexes, &states_path, cached_starts)?;
// Create address data BytesVecs first so we can also use them for identity mappings
let fundedaddressindex_to_fundedaddressdata = BytesVec::forced_import_with(
@@ -141,16 +141,17 @@ impl Vecs {
let empty_address_count =
AddressCountsVecs::forced_import(&db, "empty_address_count", version, indexes)?;
let address_activity =
AddressActivityVecs::forced_import(&db, "address_activity", version, indexes)?;
AddressActivityVecs::forced_import(&db, "address_activity", version, indexes, cached_starts)?;
// Stored total = address_count + empty_address_count (global + per-type, with all derived indexes)
let total_address_count = TotalAddressCountVecs::forced_import(&db, version, indexes)?;
// Per-block delta of total (global + per-type)
let new_address_count = NewAddressCountVecs::forced_import(&db, version, indexes)?;
let new_address_count =
NewAddressCountVecs::forced_import(&db, version, indexes, cached_starts)?;
// Growth rate: new / address_count (global + per-type)
let delta = DeltaVecs::forced_import(&db, version, indexes)?;
// Growth rate: delta change + rate (global + per-type)
let delta = DeltaVecs::new(version, &address_count, cached_starts, indexes);
let this = Self {
supply_state: BytesVec::forced_import_with(
@@ -400,26 +401,30 @@ impl Vecs {
self.txindex_to_height = txindex_to_height;
// 5. Compute aggregates (overlapping cohorts from separate cohorts)
aggregates::compute_overlapping(
&mut self.utxo_cohorts,
&mut self.address_cohorts,
starting_indexes,
exit,
)?;
info!("Computing overlapping cohorts...");
{
let (r1, r2) = rayon::join(
|| self.utxo_cohorts.compute_overlapping_vecs(starting_indexes, exit),
|| self.address_cohorts.compute_overlapping_vecs(starting_indexes, exit),
);
r1?;
r2?;
}
// 5b. Compute coinblocks_destroyed cumulative from raw
self.coinblocks_destroyed
.compute_rest(starting_indexes.height, exit)?;
// 6. Compute rest part1 (day1 mappings)
aggregates::compute_rest_part1(
&mut self.utxo_cohorts,
&mut self.address_cohorts,
blocks,
prices,
starting_indexes,
exit,
)?;
info!("Computing rest part 1...");
{
let (r1, r2) = rayon::join(
|| self.utxo_cohorts.compute_rest_part1(prices, starting_indexes, exit),
|| self.address_cohorts.compute_rest_part1(prices, starting_indexes, exit),
);
r1?;
r2?;
}
// 6b. Compute address count sum (by addresstype → all)
self.addresses.funded.compute_rest(starting_indexes, exit)?;
@@ -433,25 +438,15 @@ impl Vecs {
exit,
)?;
let window_starts = blocks.lookback.window_starts();
self.addresses
.activity
.compute_rest(starting_indexes.height, &window_starts, exit)?;
.compute_rest(starting_indexes.height, exit)?;
self.addresses.new.compute(
starting_indexes.height,
&window_starts,
&self.addresses.total,
exit,
)?;
self.addresses.delta.compute(
starting_indexes.height,
&window_starts,
&self.addresses.funded,
exit,
)?;
// 7. Compute rest part2 (relative metrics)
let height_to_market_cap = self
.utxo_cohorts
@@ -463,15 +458,16 @@ impl Vecs {
.height
.read_only_clone();
aggregates::compute_rest_part2(
&mut self.utxo_cohorts,
&mut self.address_cohorts,
info!("Computing rest part 2...");
self.utxo_cohorts.compute_rest_part2(
blocks,
prices,
starting_indexes,
&height_to_market_cap,
exit,
)?;
self.address_cohorts
.compute_rest_part2(prices, starting_indexes, exit)?;
let _lock = exit.lock();
self.db.compact()?;

View File

@@ -69,7 +69,7 @@ impl Vecs {
.compute_binary::<Dollars, Dollars, RatioDollarsBp32>(
starting_indexes.height,
market_cap,
&transactions.volume.sent_sum.rolling._24h.usd.height,
&transactions.volume.sent_sum.sum._24h.usd.height,
exit,
)?;

View File

@@ -3,19 +3,21 @@ use brk_types::Version;
use vecdb::Database;
use super::Vecs;
use crate::{indexes, internal::ComputedPerBlockAggregated};
use crate::{indexes, internal::{CachedWindowStarts, ComputedPerBlockAggregated}};
impl Vecs {
pub(crate) fn forced_import(
db: &Database,
version: Version,
indexes: &indexes::Vecs,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
Ok(Self(ComputedPerBlockAggregated::forced_import(
db,
"input_count",
version,
indexes,
cached_starts,
)?))
}
}

View File

@@ -5,7 +5,7 @@ use brk_types::Version;
use crate::{
indexes,
internal::{finalize_db, open_db},
internal::{finalize_db, open_db, CachedWindowStarts},
};
use super::{CountVecs, SpentVecs, Vecs};
@@ -15,12 +15,13 @@ impl Vecs {
parent_path: &Path,
parent_version: Version,
indexes: &indexes::Vecs,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
let db = open_db(parent_path, super::DB_NAME, 50_000_000)?;
let version = parent_version;
let spent = SpentVecs::forced_import(&db, version)?;
let count = CountVecs::forced_import(&db, version, indexes)?;
let count = CountVecs::forced_import(&db, version, indexes, cached_starts)?;
let this = Self { db, spent, count };
finalize_db(&this.db, &this)?;

View File

@@ -1,67 +0,0 @@
//! Amount type with height-level data only (no period-derived views).
//!
//! Stores sats and cents per index, plus lazy btc and usd transforms.
//! Use when period views are unnecessary (e.g., rolling windows provide windowed data).
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Cents, Dollars, Height, Sats, Version};
use vecdb::{
Database, EagerVec, Exit, ImportableVec, LazyVecFrom1, PcoVec, ReadableCloneableVec, Rw,
StorageMode, VecIndex,
};
use crate::{
internal::{CentsUnsignedToDollars, SatsToBitcoin, SatsToCents},
prices,
};
const VERSION: Version = Version::TWO; // Match AmountPerBlock versioning
#[derive(Traversable)]
pub struct Amount<I: VecIndex, M: StorageMode = Rw> {
pub sats: M::Stored<EagerVec<PcoVec<I, Sats>>>,
pub btc: LazyVecFrom1<I, Bitcoin, I, Sats>,
pub cents: M::Stored<EagerVec<PcoVec<I, Cents>>>,
pub usd: LazyVecFrom1<I, Dollars, I, Cents>,
}
impl Amount<Height> {
pub(crate) fn forced_import(db: &Database, name: &str, version: Version) -> Result<Self> {
let v = version + VERSION;
let sats: EagerVec<PcoVec<Height, Sats>> =
EagerVec::forced_import(db, &format!("{name}_sats"), v)?;
let btc = LazyVecFrom1::transformed::<SatsToBitcoin>(name, v, sats.read_only_boxed_clone());
let cents: EagerVec<PcoVec<Height, Cents>> =
EagerVec::forced_import(db, &format!("{name}_cents"), v)?;
let usd = LazyVecFrom1::transformed::<CentsUnsignedToDollars>(
&format!("{name}_usd"),
v,
cents.read_only_boxed_clone(),
);
Ok(Self {
sats,
btc,
cents,
usd,
})
}
/// Eagerly compute cents height values: sats[h] * price_cents[h] / 1e8.
pub(crate) fn compute_cents(
&mut self,
prices: &prices::Vecs,
max_from: Height,
exit: &Exit,
) -> Result<()> {
self.cents.compute_binary::<Sats, Cents, SatsToCents>(
max_from,
&self.sats,
&prices.spot.cents.height,
exit,
)?;
Ok(())
}
}

View File

@@ -1,5 +1,3 @@
mod base;
mod lazy;
pub use base::*;
pub use lazy::*;

View File

@@ -2,12 +2,10 @@ mod distribution_stats;
mod per_resolution;
mod window_24h;
mod windows;
mod windows_except_1m;
mod windows_from_1w;
pub use distribution_stats::*;
pub use per_resolution::*;
pub use window_24h::*;
pub use windows::*;
pub use windows_except_1m::*;
pub use windows_from_1w::*;

View File

@@ -1,4 +1,11 @@
use brk_traversable::Traversable;
use brk_types::Height;
use vecdb::CachedVec;
/// Cached window starts for lazy rolling computations.
/// Clone-cheap (all fields are Arc-backed). Shared across all metrics.
#[derive(Clone)]
pub struct CachedWindowStarts(pub Windows<CachedVec<Height, Height>>);
#[derive(Clone, Traversable)]
pub struct Windows<A> {
@@ -33,4 +40,32 @@ impl<A> Windows<A> {
pub fn as_mut_array_from_1w(&mut self) -> [&mut A; 3] {
[&mut self._1w, &mut self._1m, &mut self._1y]
}
pub fn map_with_suffix<B>(&self, mut f: impl FnMut(&str, &A) -> B) -> Windows<B> {
Windows {
_24h: f(Self::SUFFIXES[0], &self._24h),
_1w: f(Self::SUFFIXES[1], &self._1w),
_1m: f(Self::SUFFIXES[2], &self._1m),
_1y: f(Self::SUFFIXES[3], &self._1y),
}
}
}
impl<A, B> Windows<(A, B)> {
pub fn unzip(self) -> (Windows<A>, Windows<B>) {
(
Windows {
_24h: self._24h.0,
_1w: self._1w.0,
_1m: self._1m.0,
_1y: self._1y.0,
},
Windows {
_24h: self._24h.1,
_1w: self._1w.1,
_1m: self._1m.1,
_1y: self._1y.1,
},
)
}
}

View File

@@ -1,30 +0,0 @@
use brk_traversable::Traversable;
#[derive(Clone, Traversable)]
pub struct WindowsExcept1m<A> {
pub _24h: A,
pub _1w: A,
pub _1y: A,
}
impl<A> WindowsExcept1m<A> {
pub const SUFFIXES: [&'static str; 3] = ["24h", "1w", "1y"];
pub fn try_from_fn<E>(
mut f: impl FnMut(&str) -> std::result::Result<A, E>,
) -> std::result::Result<Self, E> {
Ok(Self {
_24h: f(Self::SUFFIXES[0])?,
_1w: f(Self::SUFFIXES[1])?,
_1y: f(Self::SUFFIXES[2])?,
})
}
pub fn as_array(&self) -> [&A; 3] {
[&self._24h, &self._1w, &self._1y]
}
pub fn as_mut_array(&mut self) -> [&mut A; 3] {
[&mut self._24h, &mut self._1w, &mut self._1y]
}
}

View File

@@ -1,13 +1,12 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Cents, Dollars, Height, Sats, Version};
use vecdb::{AnyVec, Database, Exit, ReadableCloneableVec, ReadableVec, Rw, StorageMode};
use vecdb::{AnyVec, Database, Exit, ReadableCloneableVec, Rw, StorageMode};
use crate::{
indexes,
internal::{
CentsUnsignedToDollars, ComputedPerBlock, LazyPerBlock, SatsToBitcoin, SatsToCents,
Windows,
},
prices,
};
@@ -74,33 +73,5 @@ impl AmountPerBlock {
Ok(())
}
pub(crate) fn compute_rolling_sum(
&mut self,
max_from: Height,
window_starts: &impl ReadableVec<Height, Height>,
sats_source: &impl ReadableVec<Height, Sats>,
cents_source: &impl ReadableVec<Height, Cents>,
exit: &Exit,
) -> Result<()> {
self.sats
.height
.compute_rolling_sum(max_from, window_starts, sats_source, exit)?;
self.cents
.height
.compute_rolling_sum(max_from, window_starts, cents_source, exit)?;
Ok(())
}
}
impl Windows<AmountPerBlock> {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Windows::try_from_fn(|suffix| {
AmountPerBlock::forced_import(db, &format!("{name}_{suffix}"), version, indexes)
})
}
}

View File

@@ -5,61 +5,76 @@ use vecdb::{Database, EagerVec, Exit, PcoVec, Rw, StorageMode};
use crate::{
indexes,
internal::{AmountPerBlock, RollingSumAmountPerBlock, SatsToCents, WindowStarts},
internal::{AmountPerBlock, CachedWindowStarts, LazyRollingSumsAmountFromHeight, SatsToCents},
prices,
};
#[derive(Traversable)]
pub struct AmountPerBlockCumulativeSum<M: StorageMode = Rw> {
pub base: AmountPerBlock<M>,
pub struct AmountPerBlockCumulativeWithSums<M: StorageMode = Rw> {
pub raw: AmountPerBlock<M>,
pub cumulative: AmountPerBlock<M>,
pub sum: RollingSumAmountPerBlock<M>,
pub sum: LazyRollingSumsAmountFromHeight,
}
const VERSION: Version = Version::TWO;
impl AmountPerBlockCumulativeSum {
impl AmountPerBlockCumulativeWithSums {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
let v = version + VERSION;
let raw = AmountPerBlock::forced_import(db, name, v, indexes)?;
let cumulative =
AmountPerBlock::forced_import(db, &format!("{name}_cumulative"), v, indexes)?;
let sum = LazyRollingSumsAmountFromHeight::new(
&format!("{name}_sum"),
v,
&cumulative.sats.height,
&cumulative.cents.height,
cached_starts,
indexes,
);
Ok(Self {
base: AmountPerBlock::forced_import(db, name, v, indexes)?,
cumulative: AmountPerBlock::forced_import(
db,
&format!("{name}_cumulative"),
v,
indexes,
)?,
sum: RollingSumAmountPerBlock::forced_import(db, name, v, indexes)?,
raw,
cumulative,
sum,
})
}
pub(crate) fn compute(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
prices: &prices::Vecs,
exit: &Exit,
compute_sats: impl FnOnce(&mut EagerVec<PcoVec<Height, Sats>>) -> Result<()>,
) -> Result<()> {
compute_sats(&mut self.base.sats.height)?;
compute_sats(&mut self.raw.sats.height)?;
self.compute_rest(max_from, prices, exit)
}
pub(crate) fn compute_rest(
&mut self,
max_from: Height,
prices: &prices::Vecs,
exit: &Exit,
) -> Result<()> {
self.cumulative
.sats
.height
.compute_cumulative(max_from, &self.base.sats.height, exit)?;
.compute_cumulative(max_from, &self.raw.sats.height, exit)?;
self.base
self.raw
.cents
.height
.compute_binary::<Sats, Cents, SatsToCents>(
max_from,
&self.base.sats.height,
&self.raw.sats.height,
&prices.spot.cents.height,
exit,
)?;
@@ -67,15 +82,7 @@ impl AmountPerBlockCumulativeSum {
self.cumulative
.cents
.height
.compute_cumulative(max_from, &self.base.cents.height, exit)?;
self.sum.compute_rolling_sum(
max_from,
windows,
&self.base.sats.height,
&self.base.cents.height,
exit,
)?;
.compute_cumulative(max_from, &self.raw.cents.height, exit)?;
Ok(())
}

View File

@@ -5,7 +5,10 @@ use vecdb::{Database, EagerVec, Exit, PcoVec, Rw, StorageMode};
use crate::{
indexes,
internal::{AmountPerBlock, RollingFullAmountPerBlock, SatsToCents, WindowStarts},
internal::{
AmountPerBlock, CachedWindowStarts, LazyRollingSumsAmountFromHeight,
RollingDistributionAmountPerBlock, SatsToCents, WindowStarts,
},
prices,
};
@@ -13,8 +16,9 @@ use crate::{
pub struct AmountPerBlockFull<M: StorageMode = Rw> {
pub base: AmountPerBlock<M>,
pub cumulative: AmountPerBlock<M>,
pub sum: LazyRollingSumsAmountFromHeight,
#[traversable(flatten)]
pub rolling: RollingFullAmountPerBlock<M>,
pub rolling: RollingDistributionAmountPerBlock<M>,
}
const VERSION: Version = Version::TWO;
@@ -25,18 +29,33 @@ impl AmountPerBlockFull {
name: &str,
version: Version,
indexes: &indexes::Vecs,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
let v = version + VERSION;
let base = AmountPerBlock::forced_import(db, name, v, indexes)?;
let cumulative = AmountPerBlock::forced_import(
db,
&format!("{name}_cumulative"),
v,
indexes,
)?;
let sum = LazyRollingSumsAmountFromHeight::new(
&format!("{name}_sum"),
v,
&cumulative.sats.height,
&cumulative.cents.height,
cached_starts,
indexes,
);
let rolling =
RollingDistributionAmountPerBlock::forced_import(db, name, v, indexes)?;
Ok(Self {
base: AmountPerBlock::forced_import(db, name, v, indexes)?,
cumulative: AmountPerBlock::forced_import(
db,
&format!("{name}_cumulative"),
v,
indexes,
)?,
rolling: RollingFullAmountPerBlock::forced_import(db, name, v, indexes)?,
base,
cumulative,
sum,
rolling,
})
}

View File

@@ -0,0 +1,124 @@
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Cents, Dollars, Height, Sats, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{DeltaSub, LazyDeltaVec, LazyVecFrom1, ReadableCloneableVec};
use crate::{
indexes,
internal::{
CachedWindowStarts, CentsUnsignedToDollars, DerivedResolutions, LazyPerBlock,
LazyRollingSumFromHeight, Resolutions, SatsToBitcoin, Windows,
},
};
/// Single window slot: lazy rolling sum for Amount (sats + btc + cents + usd).
#[derive(Clone, Traversable)]
pub struct LazyRollingSumAmountFromHeight {
pub sats: LazyRollingSumFromHeight<Sats>,
pub btc: LazyPerBlock<Bitcoin, Sats>,
pub cents: LazyRollingSumFromHeight<Cents>,
pub usd: LazyPerBlock<Dollars, Cents>,
}
/// Lazy rolling sums for all 4 windows, for Amount (sats + btc + cents + usd).
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct LazyRollingSumsAmountFromHeight(pub Windows<LazyRollingSumAmountFromHeight>);
impl LazyRollingSumsAmountFromHeight {
pub fn new(
name: &str,
version: Version,
cumulative_sats: &(impl ReadableCloneableVec<Height, Sats> + 'static),
cumulative_cents: &(impl ReadableCloneableVec<Height, Cents> + 'static),
cached_starts: &CachedWindowStarts,
indexes: &indexes::Vecs,
) -> Self {
let cum_sats = cumulative_sats.read_only_boxed_clone();
let cum_cents = cumulative_cents.read_only_boxed_clone();
let make_slot = |suffix: &str, cached_start: &vecdb::CachedVec<Height, Height>| {
let full_name = format!("{name}_{suffix}");
let cached = cached_start.clone();
let starts_version = cached.version();
// Sats lazy rolling sum
let sats_sum = LazyDeltaVec::<Height, Sats, Sats, DeltaSub>::new(
&format!("{full_name}_sats"),
version,
cum_sats.clone(),
starts_version,
{
let cached = cached.clone();
move || cached.get()
},
);
let sats_resolutions = Resolutions::forced_import(
&format!("{full_name}_sats"),
sats_sum.read_only_boxed_clone(),
version,
indexes,
);
let sats = LazyRollingSumFromHeight {
height: sats_sum,
resolutions: Box::new(sats_resolutions),
};
// Btc lazy from sats
let btc = LazyPerBlock {
height: LazyVecFrom1::transformed::<SatsToBitcoin>(
&full_name,
version,
sats.height.read_only_boxed_clone(),
),
resolutions: Box::new(DerivedResolutions::from_derived_computed::<SatsToBitcoin>(
&full_name,
version,
&sats.resolutions,
)),
};
// Cents rolling sum
let cents_sum = LazyDeltaVec::<Height, Cents, Cents, DeltaSub>::new(
&format!("{full_name}_cents"),
version,
cum_cents.clone(),
starts_version,
move || cached.get(),
);
let cents_resolutions = Resolutions::forced_import(
&format!("{full_name}_cents"),
cents_sum.read_only_boxed_clone(),
version,
indexes,
);
let cents = LazyRollingSumFromHeight {
height: cents_sum,
resolutions: Box::new(cents_resolutions),
};
// Usd lazy from cents
let usd = LazyPerBlock {
height: LazyVecFrom1::transformed::<CentsUnsignedToDollars>(
&format!("{full_name}_usd"),
version,
cents.height.read_only_boxed_clone(),
),
resolutions: Box::new(DerivedResolutions::from_derived_computed::<
CentsUnsignedToDollars,
>(
&format!("{full_name}_usd"), version, &cents.resolutions
)),
};
LazyRollingSumAmountFromHeight {
sats,
btc,
cents,
usd,
}
};
Self(cached_starts.0.map_with_suffix(make_slot))
}
}

View File

@@ -4,11 +4,8 @@ mod cumulative_sum;
mod full;
mod lazy;
mod lazy_derived_resolutions;
mod rolling;
mod rolling_full;
mod rolling_sum;
mod windows;
mod with_sum_24h;
mod lazy_rolling_sum;
mod rolling_distribution;
pub use base::*;
pub use cumulative::*;
@@ -16,8 +13,5 @@ pub use cumulative_sum::*;
pub use full::*;
pub use lazy::*;
pub use lazy_derived_resolutions::*;
pub use rolling::*;
pub use rolling_full::*;
pub use rolling_sum::*;
pub use windows::*;
pub use with_sum_24h::*;
pub use lazy_rolling_sum::*;
pub use rolling_distribution::*;

View File

@@ -1,61 +0,0 @@
//! Value type for Height + Rolling pattern.
//!
//! Combines Value (sats/btc/usd per height, no period views) with
//! AmountPerBlockWindows (rolling sums across 4 windows).
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Sats, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{Database, EagerVec, Exit, PcoVec, Rw, StorageMode};
use crate::{
indexes,
internal::{Amount, AmountPerBlockWindows, WindowStarts},
prices,
};
#[derive(Deref, DerefMut, Traversable)]
pub struct AmountPerBlockRolling<M: StorageMode = Rw> {
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub amount: Amount<Height, M>,
#[traversable(flatten)]
pub rolling: AmountPerBlockWindows<M>,
}
impl AmountPerBlockRolling {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
amount: Amount::forced_import(db, name, version)?,
rolling: AmountPerBlockWindows::forced_import(db, name, version, indexes)?,
})
}
/// Compute sats height via closure, then cents from price, then rolling windows.
pub(crate) fn compute(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
prices: &prices::Vecs,
exit: &Exit,
compute_sats: impl FnOnce(&mut EagerVec<PcoVec<Height, Sats>>) -> Result<()>,
) -> Result<()> {
compute_sats(&mut self.amount.sats)?;
self.amount.compute_cents(prices, max_from, exit)?;
self.rolling.compute_rolling_sum(
max_from,
windows,
&self.amount.sats,
&self.amount.cents,
exit,
)?;
Ok(())
}
}

View File

@@ -7,21 +7,21 @@ use vecdb::{Database, Exit, ReadableVec, Rw, StorageMode};
use crate::{
indexes,
internal::{
AmountPerBlock, DistributionStats, WindowStarts, Windows, compute_rolling_distribution_from_starts,
AmountPerBlock, DistributionStats, WindowStarts, Windows,
compute_rolling_distribution_from_starts,
},
};
/// One window slot: sum + 8 distribution stats, each a AmountPerBlock.
/// One window slot: 8 distribution stats, each a AmountPerBlock.
///
/// Tree: `sum.sats.height`, `average.sats.height`, etc.
/// Tree: `average.sats.height`, `min.sats.height`, etc.
#[derive(Traversable)]
pub struct RollingFullSlot<M: StorageMode = Rw> {
pub sum: AmountPerBlock<M>,
pub struct RollingDistributionSlot<M: StorageMode = Rw> {
#[traversable(flatten)]
pub distribution: DistributionStats<AmountPerBlock<M>>,
}
impl RollingFullSlot {
impl RollingDistributionSlot {
pub(crate) fn forced_import(
db: &Database,
name: &str,
@@ -29,7 +29,6 @@ impl RollingFullSlot {
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
sum: AmountPerBlock::forced_import(db, &format!("{name}_sum"), version, indexes)?,
distribution: DistributionStats::try_from_fn(|suffix| {
AmountPerBlock::forced_import(db, &format!("{name}_{suffix}"), version, indexes)
})?,
@@ -44,15 +43,6 @@ impl RollingFullSlot {
cents_source: &impl ReadableVec<Height, Cents>,
exit: &Exit,
) -> Result<()> {
self.sum
.sats
.height
.compute_rolling_sum(max_from, starts, sats_source, exit)?;
self.sum
.cents
.height
.compute_rolling_sum(max_from, starts, cents_source, exit)?;
let d = &mut self.distribution;
macro_rules! compute_unit {
@@ -80,14 +70,16 @@ impl RollingFullSlot {
}
}
/// Rolling sum + distribution across 4 windows, window-first.
/// Rolling distribution across 4 windows, window-first.
///
/// Tree: `_24h.sum.sats.height`, `_24h.average.sats.height`, etc.
/// Tree: `_24h.average.sats.height`, `_24h.min.sats.height`, etc.
#[derive(Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct RollingFullAmountPerBlock<M: StorageMode = Rw>(pub Windows<RollingFullSlot<M>>);
pub struct RollingDistributionAmountPerBlock<M: StorageMode = Rw>(
pub Windows<RollingDistributionSlot<M>>,
);
impl RollingFullAmountPerBlock {
impl RollingDistributionAmountPerBlock {
pub(crate) fn forced_import(
db: &Database,
name: &str,
@@ -95,7 +87,12 @@ impl RollingFullAmountPerBlock {
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self(Windows::try_from_fn(|suffix| {
RollingFullSlot::forced_import(db, &format!("{name}_{suffix}"), version, indexes)
RollingDistributionSlot::forced_import(
db,
&format!("{name}_{suffix}"),
version,
indexes,
)
})?))
}

View File

@@ -1,86 +0,0 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Cents, Height, Sats, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{Database, Exit, ReadableVec, Rw, StorageMode};
use crate::{
indexes,
internal::{AmountPerBlock, RollingWindow24h, WindowStarts, Windows},
};
/// Single 24h rolling sum as amount (sats + btc + cents + usd).
///
/// Tree: `_24h.sats.height`, `_24h.btc.height`, etc.
#[derive(Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct RollingWindow24hAmountPerBlock<M: StorageMode = Rw>(
pub RollingWindow24h<AmountPerBlock<M>>,
);
impl RollingWindow24hAmountPerBlock {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self(RollingWindow24h {
_24h: AmountPerBlock::forced_import(db, &format!("{name}_24h"), version, indexes)?,
}))
}
pub(crate) fn compute_rolling_sum(
&mut self,
max_from: Height,
height_24h_ago: &impl ReadableVec<Height, Height>,
sats_source: &impl ReadableVec<Height, Sats>,
cents_source: &impl ReadableVec<Height, Cents>,
exit: &Exit,
) -> Result<()> {
self._24h
.compute_rolling_sum(max_from, height_24h_ago, sats_source, cents_source, exit)
}
}
/// Rolling sum only, window-first then unit.
///
/// Tree: `_24h.sats.height`, `_24h.btc.height`, etc.
#[derive(Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct RollingSumAmountPerBlock<M: StorageMode = Rw>(pub Windows<AmountPerBlock<M>>);
impl RollingSumAmountPerBlock {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self(Windows::<AmountPerBlock>::forced_import(
db,
&format!("{name}_sum"),
version,
indexes,
)?))
}
pub(crate) fn compute_rolling_sum(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
sats_source: &impl ReadableVec<Height, Sats>,
cents_source: &impl ReadableVec<Height, Cents>,
exit: &Exit,
) -> Result<()> {
for (w, starts) in self.0.as_mut_array().into_iter().zip(windows.as_array()) {
w.sats
.height
.compute_rolling_sum(max_from, *starts, sats_source, exit)?;
w.cents
.height
.compute_rolling_sum(max_from, *starts, cents_source, exit)?;
}
Ok(())
}
}

View File

@@ -1,52 +0,0 @@
//! AmountPerBlockWindows - window-first ordering.
//!
//! Access pattern: `coinbase_sum._24h.sats.height`
//! Each window (24h, 7d, 30d, 1y) contains sats (stored) + btc (lazy) + usd (stored).
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{Database, Exit, ReadableVec, Rw, StorageMode};
use brk_types::{Cents, Sats};
use crate::{
indexes,
internal::{AmountPerBlock, WindowStarts, Windows},
};
/// Value rolling windows — window-first, currency-last.
///
/// Each window contains `AmountPerBlock` (sats + btc lazy + usd).
#[derive(Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct AmountPerBlockWindows<M: StorageMode = Rw>(pub Windows<AmountPerBlock<M>>);
impl AmountPerBlockWindows {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self(Windows::try_from_fn(|suffix| {
AmountPerBlock::forced_import(db, &format!("{name}_{suffix}"), version, indexes)
})?))
}
pub(crate) fn compute_rolling_sum(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
sats_source: &impl ReadableVec<Height, Sats>,
cents_source: &impl ReadableVec<Height, Cents>,
exit: &Exit,
) -> Result<()> {
for (w, starts) in self.0.as_mut_array().into_iter().zip(windows.as_array()) {
w.compute_rolling_sum(max_from, *starts, sats_source, cents_source, exit)?;
}
Ok(())
}
}

View File

@@ -1,13 +0,0 @@
//! AmountPerBlockWithSum24h - AmountPerBlock raw + RollingWindow24hAmountPerBlock sum.
use brk_traversable::Traversable;
use vecdb::{Rw, StorageMode};
use crate::internal::{AmountPerBlock, RollingWindow24hAmountPerBlock};
/// Amount per-block value (sats + cents) with 24h rolling sum (also amount).
#[derive(Traversable)]
pub struct AmountPerBlockWithSum24h<M: StorageMode = Rw> {
pub raw: AmountPerBlock<M>,
pub sum: RollingWindow24hAmountPerBlock<M>,
}

View File

@@ -3,8 +3,6 @@
//! For metrics aggregated per-block from finer-grained sources (e.g., per-tx data),
//! where we want full per-block stats plus rolling window stats.
use std::ops::SubAssign;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
@@ -13,7 +11,7 @@ use vecdb::{Database, Exit, Rw, StorageMode};
use crate::{
indexes,
internal::{Full, NumericValue, RollingFull, WindowStarts},
internal::{CachedWindowStarts, Full, NumericValue, RollingFull, WindowStarts},
};
#[derive(Traversable)]
@@ -35,17 +33,22 @@ where
name: &str,
version: Version,
indexes: &indexes::Vecs,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
let height = Full::forced_import(db, name, version)?;
let rolling = RollingFull::forced_import(db, name, version, indexes)?;
let full = Full::forced_import(db, name, version)?;
let rolling = RollingFull::forced_import(
db,
name,
version,
indexes,
&full.cumulative,
cached_starts,
)?;
Ok(Self {
full: height,
rolling,
})
Ok(Self { full, rolling })
}
/// Compute Full stats via closure, then rolling windows from the per-block sum.
/// Compute Full stats via closure, then rolling distribution from the per-block sum.
pub(crate) fn compute(
&mut self,
max_from: Height,
@@ -54,7 +57,7 @@ where
compute_full: impl FnOnce(&mut Full<Height, T>) -> Result<()>,
) -> Result<()>
where
T: From<f64> + Default + SubAssign + Copy + Ord,
T: From<f64> + Default + Copy + Ord,
f64: From<T>,
{
compute_full(&mut self.full)?;

View File

@@ -1,13 +1,13 @@
//! ComputedPerBlockCumulative - raw ComputedPerBlock + cumulative ComputedPerBlock.
//!
//! Like ComputedPerBlockCumulativeSum but without RollingWindows.
//! Like ComputedPerBlockCumulativeWithSums but without RollingWindows.
//! Used for distribution metrics where rolling is optional per cohort.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, PcoVec, Rw, StorageMode};
use vecdb::{Database, Exit, Rw, StorageMode};
use crate::{
indexes,
@@ -40,20 +40,6 @@ where
Ok(Self { raw, cumulative })
}
/// Compute raw data via closure, then cumulative only (no rolling).
pub(crate) fn compute(
&mut self,
max_from: Height,
exit: &Exit,
compute_raw: impl FnOnce(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
) -> Result<()>
where
T: Default,
{
compute_raw(&mut self.raw.height)?;
self.compute_rest(max_from, exit)
}
/// Compute cumulative from already-filled raw vec.
pub(crate) fn compute_rest(&mut self, max_from: Height, exit: &Exit) -> Result<()>
where

View File

@@ -1,9 +1,13 @@
//! ComputedPerBlockCumulativeSum - raw ComputedPerBlock + cumulative ComputedPerBlock + RollingWindows (sum).
//! ComputedPerBlockCumulativeWithSums - raw ComputedPerBlock + cumulative ComputedPerBlock + lazy rolling sums.
//!
//! Like ComputedPerBlockFull but with rolling sum only (no distribution).
//! Used for count metrics where distribution stats aren't meaningful.
use std::ops::SubAssign;
//! Rolling sums are derived lazily from the cumulative vec via LazyDeltaVec.
//! No rolling sum vecs are stored on disk.
//!
//! Type parameters:
//! - `T`: per-block value type (e.g., `StoredU32` for tx counts)
//! - `M`: storage mode (`Rw` or `Ro`)
//! - `C`: cumulative type, defaults to `T`. Use a wider type (e.g., `StoredU64`)
//! when the prefix sum of `T` values could overflow `T`.
use brk_error::Result;
use brk_traversable::Traversable;
@@ -13,33 +17,42 @@ use vecdb::{Database, EagerVec, Exit, PcoVec, Rw, StorageMode};
use crate::{
indexes,
internal::{ComputedPerBlock, NumericValue, RollingWindows, WindowStarts},
internal::{CachedWindowStarts, ComputedPerBlock, LazyRollingSumsFromHeight, NumericValue},
};
#[derive(Traversable)]
pub struct ComputedPerBlockCumulativeSum<T, M: StorageMode = Rw>
pub struct ComputedPerBlockCumulativeWithSums<T, C, M: StorageMode = Rw>
where
T: NumericValue + JsonSchema,
C: NumericValue + JsonSchema,
{
pub raw: ComputedPerBlock<T, M>,
pub cumulative: ComputedPerBlock<T, M>,
pub sum: RollingWindows<T, M>,
pub cumulative: ComputedPerBlock<C, M>,
pub sum: LazyRollingSumsFromHeight<C>,
}
impl<T> ComputedPerBlockCumulativeSum<T>
impl<T, C> ComputedPerBlockCumulativeWithSums<T, C>
where
T: NumericValue + JsonSchema,
T: NumericValue + JsonSchema + Into<C>,
C: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
let raw = ComputedPerBlock::forced_import(db, name, version, indexes)?;
let cumulative =
ComputedPerBlock::forced_import(db, &format!("{name}_cumulative"), version, indexes)?;
let sum = RollingWindows::forced_import(db, name, version, indexes)?;
let sum = LazyRollingSumsFromHeight::new(
&format!("{name}_sum"),
version,
&cumulative.height,
cached_starts,
indexes,
);
Ok(Self {
raw,
@@ -48,36 +61,28 @@ where
})
}
/// Compute raw data via closure, then cumulative + rolling sum.
/// Compute raw data via closure, then cumulative. Rolling sums are lazy.
pub(crate) fn compute(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
exit: &Exit,
compute_raw: impl FnOnce(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
) -> Result<()>
where
T: Default + SubAssign,
C: Default,
{
compute_raw(&mut self.raw.height)?;
self.compute_rest(max_from, windows, exit)
self.compute_rest(max_from, exit)
}
/// Compute cumulative + rolling sum from already-populated raw data.
pub(crate) fn compute_rest(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
exit: &Exit,
) -> Result<()>
/// Compute cumulative from already-populated raw data. Rolling sums are lazy.
pub(crate) fn compute_rest(&mut self, max_from: Height, exit: &Exit) -> Result<()>
where
T: Default + SubAssign,
C: Default,
{
self.cumulative
.height
.compute_cumulative(max_from, &self.raw.height, exit)?;
self.sum
.compute_rolling_sum(max_from, windows, &self.raw.height, exit)?;
Ok(())
}
}

View File

@@ -1,292 +0,0 @@
//! RollingDelta - raw change + growth rate (%) across time windows.
//!
//! Three tiers:
//! - `RollingDelta1m` — 1m window only (2 stored vecs: change + rate). Default for all cohorts.
//! - `RollingDeltaExcept1m` — 24h + 1w + 1y windows (6 stored vecs). Extended tier only.
//! - `RollingDelta` — all 4 windows (8 stored vecs). Used for standalone global metrics.
//!
//! For a monotonic source (e.g., cumulative address count):
//! - `change._24h` = count_now - count_24h_ago
//! - `rate._24h` = (count_now - count_24h_ago) / count_24h_ago in BPS
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{BasisPoints32, BasisPointsSigned32, Height, Version};
use schemars::JsonSchema;
use vecdb::{AnyVec, Database, EagerVec, Exit, PcoVec, ReadableVec, Rw, StorageMode, VecIndex};
use crate::{
indexes,
internal::{
ComputedPerBlock, NumericValue, PercentPerBlock, PercentRollingWindows,
RollingWindows, WindowStarts, WindowsExcept1m,
},
};
/// Pre-collect source data from the earliest needed offset.
/// Returns (source_data, offset) for use in compute_delta_window.
fn collect_source<S: NumericValue>(
source: &impl ReadableVec<Height, S>,
skip: usize,
earliest_starts: &impl ReadableVec<Height, Height>,
) -> (Vec<S>, usize) {
let source_len = source.len();
let offset = if skip > 0 && skip < earliest_starts.len() {
earliest_starts.collect_one_at(skip).unwrap().to_usize()
} else {
0
};
(source.collect_range_at(offset, source_len), offset)
}
/// Shared computation: change = current - ago, rate = change / ago.
pub(super) fn compute_delta_window<S, C, B>(
change_h: &mut EagerVec<PcoVec<Height, C>>,
rate_bps_h: &mut EagerVec<PcoVec<Height, B>>,
max_from: Height,
starts: &impl ReadableVec<Height, Height>,
source: &impl ReadableVec<Height, S>,
exit: &Exit,
) -> Result<()>
where
S: NumericValue,
C: NumericValue,
B: NumericValue,
{
let skip = change_h.len();
let (mut source_data, mut offset) = collect_source(source, skip, starts);
change_h.compute_transform(
max_from,
starts,
|(h, ago_h, ..)| {
if h.to_usize() < offset || ago_h.to_usize() < offset {
// Version reset cleared the vec — re-collect from scratch
source_data = source.collect();
offset = 0;
}
let current: f64 = source_data[h.to_usize() - offset].into();
let ago: f64 = source_data[ago_h.to_usize() - offset].into();
(h, C::from(current - ago))
},
exit,
)?;
rate_bps_h.compute_transform(
max_from,
&*change_h,
|(h, change, ..)| {
if h.to_usize() < offset {
// Version reset cleared the vec — re-collect from scratch
source_data = source.collect();
offset = 0;
}
let current_f: f64 = source_data[h.to_usize() - offset].into();
let change_f: f64 = change.into();
let ago = current_f - change_f;
let rate = if ago == 0.0 { 0.0 } else { change_f / ago };
(h, B::from(rate))
},
exit,
)?;
Ok(())
}
#[derive(Traversable)]
pub struct RollingDelta<S, C = S, M: StorageMode = Rw>
where
S: NumericValue + JsonSchema,
C: NumericValue + JsonSchema,
{
pub change: RollingWindows<C, M>,
pub rate: PercentRollingWindows<BasisPoints32, M>,
_phantom: std::marker::PhantomData<S>,
}
impl<S, C> RollingDelta<S, C>
where
S: NumericValue + JsonSchema,
C: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
change: RollingWindows::forced_import(
db,
&format!("{name}_change"),
version,
indexes,
)?,
rate: PercentRollingWindows::forced_import(
db,
&format!("{name}_rate"),
version,
indexes,
)?,
_phantom: std::marker::PhantomData,
})
}
pub(crate) fn compute(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
source: &impl ReadableVec<Height, S>,
exit: &Exit,
) -> Result<()> {
for ((change_w, rate_w), starts) in self
.change
.0
.as_mut_array()
.into_iter()
.zip(self.rate.0.as_mut_array())
.zip(windows.as_array())
{
compute_delta_window(
&mut change_w.height,
&mut rate_w.bps.height,
max_from,
*starts,
source,
exit,
)?;
}
Ok(())
}
}
/// 1m-only delta: change + growth rate for the 1-month window.
/// Default tier for all cohorts (2 stored vecs).
#[derive(Traversable)]
pub struct RollingDelta1m<S, C = S, M: StorageMode = Rw>
where
S: NumericValue + JsonSchema,
C: NumericValue + JsonSchema,
{
#[traversable(wrap = "change", rename = "1m")]
pub change_1m: ComputedPerBlock<C, M>,
#[traversable(wrap = "rate", rename = "1m")]
pub rate_1m: PercentPerBlock<BasisPointsSigned32, M>,
_phantom: std::marker::PhantomData<S>,
}
impl<S, C> RollingDelta1m<S, C>
where
S: NumericValue + JsonSchema,
C: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
change_1m: ComputedPerBlock::forced_import(
db,
&format!("{name}_change_1m"),
version,
indexes,
)?,
rate_1m: PercentPerBlock::forced_import(
db,
&format!("{name}_rate_1m"),
version,
indexes,
)?,
_phantom: std::marker::PhantomData,
})
}
pub(crate) fn compute(
&mut self,
max_from: Height,
height_1m_ago: &impl ReadableVec<Height, Height>,
source: &impl ReadableVec<Height, S>,
exit: &Exit,
) -> Result<()> {
compute_delta_window(
&mut self.change_1m.height,
&mut self.rate_1m.bps.height,
max_from,
height_1m_ago,
source,
exit,
)
}
}
/// Extended delta: 24h + 1w + 1y windows (6 stored vecs).
/// Only for All/LTH/STH cohorts (Extended tier).
#[derive(Traversable)]
pub struct RollingDeltaExcept1m<S, C = S, M: StorageMode = Rw>
where
S: NumericValue + JsonSchema,
C: NumericValue + JsonSchema,
{
pub change: WindowsExcept1m<ComputedPerBlock<C, M>>,
pub rate: WindowsExcept1m<PercentPerBlock<BasisPointsSigned32, M>>,
_phantom: std::marker::PhantomData<S>,
}
impl<S, C> RollingDeltaExcept1m<S, C>
where
S: NumericValue + JsonSchema,
C: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
change: WindowsExcept1m::try_from_fn(|suffix| {
ComputedPerBlock::forced_import(
db,
&format!("{name}_change_{suffix}"),
version,
indexes,
)
})?,
rate: WindowsExcept1m::try_from_fn(|suffix| {
PercentPerBlock::forced_import(
db,
&format!("{name}_rate_{suffix}"),
version,
indexes,
)
})?,
_phantom: std::marker::PhantomData,
})
}
pub(crate) fn compute(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
source: &impl ReadableVec<Height, S>,
exit: &Exit,
) -> Result<()> {
let changes = self.change.as_mut_array();
let rates = self.rate.as_mut_array();
let starts = [windows._24h, windows._1w, windows._1y];
for ((change_w, rate_w), starts) in changes.into_iter().zip(rates).zip(starts) {
compute_delta_window(
&mut change_w.height,
&mut rate_w.bps.height,
max_from,
starts,
source,
exit,
)?;
}
Ok(())
}
}

View File

@@ -1,214 +0,0 @@
//! Fiat delta variants — same as RollingDelta* but change is FiatPerBlock<C>
//! (stored cents + lazy USD) instead of ComputedPerBlock<C> (stored cents only).
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{BasisPointsSigned32, Height, Version};
use schemars::JsonSchema;
use vecdb::{Database, Exit, ReadableVec, Rw, StorageMode};
use crate::{
indexes,
internal::{
CentsType, FiatPerBlock, NumericValue, PercentPerBlock, PercentRollingWindows,
Windows, WindowStarts, WindowsExcept1m,
},
};
use super::delta::compute_delta_window;
/// Fiat 1m-only delta: fiat change (cents + usd) + rate for the 1-month window.
#[derive(Traversable)]
pub struct FiatRollingDelta1m<S, C, M: StorageMode = Rw>
where
S: NumericValue + JsonSchema,
C: CentsType,
{
#[traversable(wrap = "change", rename = "1m")]
pub change_1m: FiatPerBlock<C, M>,
#[traversable(wrap = "rate", rename = "1m")]
pub rate_1m: PercentPerBlock<BasisPointsSigned32, M>,
_phantom: std::marker::PhantomData<S>,
}
impl<S, C> FiatRollingDelta1m<S, C>
where
S: NumericValue + JsonSchema,
C: CentsType,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
change_1m: FiatPerBlock::forced_import(
db,
&format!("{name}_change_1m"),
version,
indexes,
)?,
rate_1m: PercentPerBlock::forced_import(
db,
&format!("{name}_rate_1m"),
version,
indexes,
)?,
_phantom: std::marker::PhantomData,
})
}
pub(crate) fn compute(
&mut self,
max_from: Height,
height_1m_ago: &impl ReadableVec<Height, Height>,
source: &impl ReadableVec<Height, S>,
exit: &Exit,
) -> Result<()> {
compute_delta_window(
&mut self.change_1m.cents.height,
&mut self.rate_1m.bps.height,
max_from,
height_1m_ago,
source,
exit,
)
}
}
/// Fiat extended delta: 24h + 1w + 1y windows, fiat change (cents + usd) + rate.
#[derive(Traversable)]
pub struct FiatRollingDeltaExcept1m<S, C, M: StorageMode = Rw>
where
S: NumericValue + JsonSchema,
C: CentsType,
{
pub change: WindowsExcept1m<FiatPerBlock<C, M>>,
pub rate: WindowsExcept1m<PercentPerBlock<BasisPointsSigned32, M>>,
_phantom: std::marker::PhantomData<S>,
}
impl<S, C> FiatRollingDeltaExcept1m<S, C>
where
S: NumericValue + JsonSchema,
C: CentsType,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
change: WindowsExcept1m::try_from_fn(|suffix| {
FiatPerBlock::forced_import(
db,
&format!("{name}_change_{suffix}"),
version,
indexes,
)
})?,
rate: WindowsExcept1m::try_from_fn(|suffix| {
PercentPerBlock::forced_import(
db,
&format!("{name}_rate_{suffix}"),
version,
indexes,
)
})?,
_phantom: std::marker::PhantomData,
})
}
pub(crate) fn compute(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
source: &impl ReadableVec<Height, S>,
exit: &Exit,
) -> Result<()> {
let changes = self.change.as_mut_array();
let rates = self.rate.as_mut_array();
let starts = [windows._24h, windows._1w, windows._1y];
for ((change_w, rate_w), starts) in changes.into_iter().zip(rates).zip(starts) {
compute_delta_window(
&mut change_w.cents.height,
&mut rate_w.bps.height,
max_from,
starts,
source,
exit,
)?;
}
Ok(())
}
}
/// Fiat rolling delta: all 4 windows, fiat change (cents + usd) + rate.
#[derive(Traversable)]
pub struct FiatRollingDelta<S, C, M: StorageMode = Rw>
where
S: NumericValue + JsonSchema,
C: CentsType,
{
pub change: Windows<FiatPerBlock<C, M>>,
pub rate: PercentRollingWindows<BasisPointsSigned32, M>,
_phantom: std::marker::PhantomData<S>,
}
impl<S, C> FiatRollingDelta<S, C>
where
S: NumericValue + JsonSchema,
C: CentsType,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
change: Windows::try_from_fn(|suffix| {
FiatPerBlock::forced_import(
db,
&format!("{name}_change_{suffix}"),
version,
indexes,
)
})?,
rate: PercentRollingWindows::forced_import(
db,
&format!("{name}_rate"),
version,
indexes,
)?,
_phantom: std::marker::PhantomData,
})
}
pub(crate) fn compute(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
source: &impl ReadableVec<Height, S>,
exit: &Exit,
) -> Result<()> {
let changes = self.change.as_mut_array();
let rates = self.rate.0.as_mut_array();
let starts = windows.as_array();
for ((change_w, rate_w), starts) in changes.into_iter().zip(rates).zip(starts) {
compute_delta_window(
&mut change_w.cents.height,
&mut rate_w.bps.height,
max_from,
*starts,
source,
exit,
)?;
}
Ok(())
}
}

View File

@@ -2,8 +2,6 @@
//!
//! For metrics with stored per-block data, cumulative sums, and rolling windows.
use std::ops::SubAssign;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
@@ -12,7 +10,7 @@ use vecdb::{Database, EagerVec, Exit, PcoVec, Rw, StorageMode};
use crate::{
indexes,
internal::{ComputedPerBlock, NumericValue, RollingFull, WindowStarts},
internal::{CachedWindowStarts, ComputedPerBlock, NumericValue, RollingFull, WindowStarts},
};
#[derive(Traversable)]
@@ -35,11 +33,19 @@ where
name: &str,
version: Version,
indexes: &indexes::Vecs,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
let raw = ComputedPerBlock::forced_import(db, name, version, indexes)?;
let cumulative =
ComputedPerBlock::forced_import(db, &format!("{name}_cumulative"), version, indexes)?;
let rolling = RollingFull::forced_import(db, name, version, indexes)?;
let rolling = RollingFull::forced_import(
db,
name,
version,
indexes,
&cumulative.height,
cached_starts,
)?;
Ok(Self {
raw,
@@ -48,7 +54,7 @@ where
})
}
/// Compute raw data via closure, then cumulative + rolling.
/// Compute raw data via closure, then cumulative + rolling distribution.
pub(crate) fn compute(
&mut self,
max_from: Height,
@@ -57,7 +63,7 @@ where
compute_raw: impl FnOnce(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
) -> Result<()>
where
T: From<f64> + Default + SubAssign + Copy + Ord,
T: From<f64> + Default + Copy + Ord,
f64: From<T>,
{
compute_raw(&mut self.raw.height)?;

View File

@@ -3,25 +3,19 @@ mod base;
mod constant;
mod cumulative;
mod cumulative_sum;
mod delta;
mod resolutions;
mod resolutions_full;
mod fiat_delta;
mod full;
mod rolling_average;
mod sum;
mod with_sum_24h;
mod with_deltas;
pub use aggregated::*;
pub use base::*;
pub use constant::*;
pub use cumulative::*;
pub use cumulative_sum::*;
pub use delta::*;
pub use resolutions::*;
pub use resolutions_full::*;
pub use fiat_delta::*;
pub use full::*;
pub use rolling_average::*;
pub use sum::*;
pub use with_sum_24h::*;
pub use with_deltas::*;

View File

@@ -3,8 +3,6 @@
//! For metrics derived from indexer sources (no stored height vec).
//! Cumulative gets its own ComputedPerBlock so it has LazyAggVec index views too.
use std::ops::SubAssign;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
@@ -13,7 +11,7 @@ use vecdb::{Database, Exit, ReadableVec, Rw, StorageMode};
use crate::{
indexes,
internal::{ComputedPerBlock, NumericValue, RollingFull, WindowStarts},
internal::{CachedWindowStarts, ComputedPerBlock, NumericValue, RollingFull, WindowStarts},
};
#[derive(Traversable)]
@@ -35,10 +33,18 @@ where
name: &str,
version: Version,
indexes: &indexes::Vecs,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
let cumulative =
ComputedPerBlock::forced_import(db, &format!("{name}_cumulative"), version, indexes)?;
let rolling = RollingFull::forced_import(db, name, version, indexes)?;
let rolling = RollingFull::forced_import(
db,
name,
version,
indexes,
&cumulative.height,
cached_starts,
)?;
Ok(Self {
cumulative,
@@ -54,7 +60,7 @@ where
exit: &Exit,
) -> Result<()>
where
T: From<f64> + Default + SubAssign + Copy + Ord,
T: From<f64> + Default + Copy + Ord,
f64: From<T>,
{
self.cumulative

View File

@@ -1,8 +1,7 @@
//! ComputedPerBlock with rolling average (no distribution stats).
//!
//! Stored height data + 4-window rolling averages (24h, 1w, 1m, 1y).
//! Use instead of ComputedPerBlockDistribution when only the average
//! is analytically useful (e.g., block interval, activity counts).
//! Stored height data + f64 cumulative + lazy 4-window rolling averages.
//! Rolling averages are computed on-the-fly from the cumulative via DeltaAvg.
use brk_error::Result;
@@ -13,7 +12,7 @@ use vecdb::{Database, EagerVec, Exit, ImportableVec, PcoVec, Rw, StorageMode};
use crate::indexes;
use crate::internal::{NumericValue, RollingWindows, WindowStarts};
use crate::internal::{CachedWindowStarts, LazyRollingAvgsFromHeight, NumericValue};
#[derive(Traversable)]
pub struct ComputedPerBlockRollingAverage<T, M: StorageMode = Rw>
@@ -21,8 +20,10 @@ where
T: NumericValue + JsonSchema,
{
pub height: M::Stored<EagerVec<PcoVec<Height, T>>>,
#[traversable(hidden)]
pub cumulative: M::Stored<EagerVec<PcoVec<Height, f64>>>,
#[traversable(flatten)]
pub average: RollingWindows<T, M>,
pub average: LazyRollingAvgsFromHeight<T>,
}
impl<T> ComputedPerBlockRollingAverage<T>
@@ -34,45 +35,41 @@ where
name: &str,
version: Version,
indexes: &indexes::Vecs,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, version)?;
let average =
RollingWindows::forced_import(db, &format!("{name}_average"), version + Version::ONE, indexes)?;
let cumulative: EagerVec<PcoVec<Height, f64>> =
EagerVec::forced_import(db, &format!("{name}_cumulative"), version)?;
let average = LazyRollingAvgsFromHeight::new(
&format!("{name}_average"),
version + Version::ONE,
&cumulative,
cached_starts,
indexes,
);
Ok(Self { height, average })
Ok(Self {
height,
cumulative,
average,
})
}
/// Compute height data via closure, then rolling averages.
/// Compute height data via closure, then cumulative. Rolling averages are lazy.
pub(crate) fn compute(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
exit: &Exit,
compute_height: impl FnOnce(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
) -> Result<()>
where
T: Default,
f64: From<T>,
{
) -> Result<()> {
compute_height(&mut self.height)?;
self.compute_rest(max_from, windows, exit)
self.compute_rest(max_from, exit)
}
/// Compute rolling averages from already-populated height data.
pub(crate) fn compute_rest(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
exit: &Exit,
) -> Result<()>
where
T: Default,
f64: From<T>,
{
for (w, starts) in self.average.0.as_mut_array().into_iter().zip(windows.as_array()) {
w.height
.compute_rolling_average(max_from, *starts, &self.height, exit)?;
}
/// Compute cumulative from already-populated height data. Rolling averages are lazy.
pub(crate) fn compute_rest(&mut self, max_from: Height, exit: &Exit) -> Result<()> {
self.cumulative
.compute_cumulative(max_from, &self.height, exit)?;
Ok(())
}
}

View File

@@ -1,59 +0,0 @@
//! ComputedPerBlockSum - raw ComputedPerBlock + RollingWindows (sum only).
//!
//! Like ComputedPerBlockCumulativeSum but without the cumulative vec.
use std::ops::SubAssign;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, PcoVec, Rw, StorageMode};
use crate::{
indexes,
internal::{ComputedPerBlock, NumericValue, RollingWindows, WindowStarts},
};
#[derive(Traversable)]
pub struct ComputedPerBlockSum<T, M: StorageMode = Rw>
where
T: NumericValue + JsonSchema,
{
pub raw: ComputedPerBlock<T, M>,
pub sum: RollingWindows<T, M>,
}
impl<T> ComputedPerBlockSum<T>
where
T: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let raw = ComputedPerBlock::forced_import(db, name, version, indexes)?;
let sum = RollingWindows::forced_import(db, &format!("{name}_sum"), version, indexes)?;
Ok(Self { raw, sum })
}
/// Compute raw data via closure, then rolling sum.
pub(crate) fn compute(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
exit: &Exit,
compute_raw: impl FnOnce(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
) -> Result<()>
where
T: Default + SubAssign,
{
compute_raw(&mut self.raw.height)?;
self.sum
.compute_rolling_sum(max_from, windows, &self.raw.height, exit)?;
Ok(())
}
}

View File

@@ -0,0 +1,55 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::Version;
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Rw, StorageMode};
use crate::{
indexes,
internal::{
BpsType, CachedWindowStarts, ComputedPerBlock, LazyRollingDeltasFromHeight, NumericValue,
},
};
#[derive(Deref, DerefMut, Traversable)]
pub struct ComputedPerBlockWithDeltas<S, C, B, M: StorageMode = Rw>
where
S: NumericValue + JsonSchema + Into<f64>,
C: NumericValue + JsonSchema + From<f64>,
B: BpsType + From<f64>,
{
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub inner: ComputedPerBlock<S, M>,
pub delta: LazyRollingDeltasFromHeight<S, C, B>,
}
impl<S, C, B> ComputedPerBlockWithDeltas<S, C, B>
where
S: NumericValue + JsonSchema + Into<f64>,
C: NumericValue + JsonSchema + From<f64>,
B: BpsType + From<f64>,
{
pub(crate) fn forced_import(
db: &vecdb::Database,
name: &str,
version: Version,
delta_version_offset: Version,
indexes: &indexes::Vecs,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
let inner = ComputedPerBlock::forced_import(db, name, version, indexes)?;
let delta = LazyRollingDeltasFromHeight::new(
&format!("{name}_delta"),
version + delta_version_offset,
&inner.height,
cached_starts,
indexes,
);
Ok(Self { inner, delta })
}
}

View File

@@ -1,19 +0,0 @@
//! PerBlockWithSum24h - ComputedPerBlock + RollingWindow24hPerBlock rolling sum.
//!
//! Generic building block for metrics that store a per-block value
//! plus its 24h rolling sum. Used across activity and realized metrics.
use brk_traversable::Traversable;
use schemars::JsonSchema;
use vecdb::{Rw, StorageMode};
use crate::internal::{ComputedPerBlock, ComputedVecValue, RollingWindow24hPerBlock};
#[derive(Traversable)]
pub struct PerBlockWithSum24h<T, M: StorageMode = Rw>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub raw: ComputedPerBlock<T, M>,
pub sum: RollingWindow24hPerBlock<T, M>,
}

View File

@@ -0,0 +1,53 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use vecdb::{Database, Exit, Rw, StorageMode};
use crate::{
indexes,
internal::{CachedWindowStarts, CentsType, FiatPerBlock, LazyRollingSumsFiatFromHeight},
};
#[derive(Traversable)]
pub struct FiatPerBlockCumulativeWithSums<C: CentsType, M: StorageMode = Rw> {
pub raw: FiatPerBlock<C, M>,
pub cumulative: FiatPerBlock<C, M>,
pub sum: LazyRollingSumsFiatFromHeight<C>,
}
impl<C: CentsType> FiatPerBlockCumulativeWithSums<C> {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
let raw = FiatPerBlock::forced_import(db, name, version, indexes)?;
let cumulative = FiatPerBlock::forced_import(
db,
&format!("{name}_cumulative"),
version,
indexes,
)?;
let sum = LazyRollingSumsFiatFromHeight::new(
&format!("{name}_sum"),
version,
&cumulative.cents.height,
cached_starts,
indexes,
);
Ok(Self { raw, cumulative, sum })
}
pub(crate) fn compute_rest(&mut self, max_from: Height, exit: &Exit) -> Result<()>
where
C: Default,
{
self.cumulative
.cents
.height
.compute_cumulative(max_from, &self.raw.cents.height, exit)?;
Ok(())
}
}

View File

@@ -0,0 +1,55 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::Version;
use derive_more::{Deref, DerefMut};
use vecdb::{Database, Rw, StorageMode};
use crate::{
indexes,
internal::{BpsType, CachedWindowStarts, LazyRollingDeltasFiatFromHeight},
};
use super::{CentsType, FiatPerBlockCumulativeWithSums};
#[derive(Deref, DerefMut, Traversable)]
pub struct FiatPerBlockCumulativeWithSumsAndDeltas<C, CS, B, M: StorageMode = Rw>
where
C: CentsType + Into<f64>,
CS: CentsType + From<f64>,
B: BpsType + From<f64>,
{
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub inner: FiatPerBlockCumulativeWithSums<C, M>,
pub delta: LazyRollingDeltasFiatFromHeight<C, CS, B>,
}
impl<C, CS, B> FiatPerBlockCumulativeWithSumsAndDeltas<C, CS, B>
where
C: CentsType + Into<f64>,
CS: CentsType + From<f64>,
B: BpsType + From<f64>,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
delta_version_offset: Version,
indexes: &indexes::Vecs,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
let inner =
FiatPerBlockCumulativeWithSums::forced_import(db, name, version, indexes, cached_starts)?;
let delta = LazyRollingDeltasFiatFromHeight::new(
&format!("{name}_delta"),
version + delta_version_offset,
&inner.cumulative.cents.height,
cached_starts,
indexes,
);
Ok(Self { inner, delta })
}
}

View File

@@ -0,0 +1,77 @@
use brk_traversable::Traversable;
use brk_types::{Dollars, Height, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{DeltaSub, LazyDeltaVec, LazyVecFrom1, ReadableCloneableVec};
use crate::{
indexes,
internal::{
CachedWindowStarts, CentsType, DerivedResolutions, LazyPerBlock, LazyRollingSumFromHeight,
Resolutions, Windows,
},
};
#[derive(Clone, Traversable)]
pub struct LazyRollingSumFiatFromHeight<C: CentsType> {
pub cents: LazyRollingSumFromHeight<C>,
pub usd: LazyPerBlock<Dollars, C>,
}
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct LazyRollingSumsFiatFromHeight<C: CentsType>(
pub Windows<LazyRollingSumFiatFromHeight<C>>,
);
impl<C: CentsType> LazyRollingSumsFiatFromHeight<C> {
pub fn new(
name: &str,
version: Version,
cumulative_cents: &(impl ReadableCloneableVec<Height, C> + 'static),
cached_starts: &CachedWindowStarts,
indexes: &indexes::Vecs,
) -> Self {
let cum_cents = cumulative_cents.read_only_boxed_clone();
let make_slot = |suffix: &str, cached_start: &vecdb::CachedVec<Height, Height>| {
let full_name = format!("{name}_{suffix}");
let cached = cached_start.clone();
let starts_version = cached.version();
let cents_sum = LazyDeltaVec::<Height, C, C, DeltaSub>::new(
&format!("{full_name}_cents"),
version,
cum_cents.clone(),
starts_version,
move || cached.get(),
);
let cents_resolutions = Resolutions::forced_import(
&format!("{full_name}_cents"),
cents_sum.read_only_boxed_clone(),
version,
indexes,
);
let cents = LazyRollingSumFromHeight {
height: cents_sum,
resolutions: Box::new(cents_resolutions),
};
let usd = LazyPerBlock {
height: LazyVecFrom1::transformed::<C::ToDollars>(
&full_name,
version,
cents.height.read_only_boxed_clone(),
),
resolutions: Box::new(DerivedResolutions::from_derived_computed::<C::ToDollars>(
&full_name,
version,
&cents.resolutions,
)),
};
LazyRollingSumFiatFromHeight { cents, usd }
};
Self(cached_starts.0.map_with_suffix(make_slot))
}
}

View File

@@ -1,7 +1,12 @@
mod base;
mod cumulative_sum;
mod cumulative_sum_with_deltas;
mod lazy;
mod with_sum_24h;
mod lazy_rolling_sum;
mod with_deltas;
pub use base::*;
pub use cumulative_sum::*;
pub use cumulative_sum_with_deltas::*;
pub use lazy::*;
pub use with_sum_24h::*;
pub use lazy_rolling_sum::*;
pub use with_deltas::*;

View File

@@ -0,0 +1,55 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::Version;
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, Rw, StorageMode};
use crate::{
indexes,
internal::{BpsType, CachedWindowStarts, LazyRollingDeltasFiatFromHeight},
};
use super::{CentsType, FiatPerBlock};
#[derive(Deref, DerefMut, Traversable)]
pub struct FiatPerBlockWithDeltas<C, CS, B, M: StorageMode = Rw>
where
C: CentsType + Into<f64>,
CS: CentsType + From<f64>,
B: BpsType + From<f64>,
{
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub inner: FiatPerBlock<C, M>,
pub delta: LazyRollingDeltasFiatFromHeight<C, CS, B>,
}
impl<C, CS, B> FiatPerBlockWithDeltas<C, CS, B>
where
C: CentsType + JsonSchema + Into<f64>,
CS: CentsType + From<f64>,
B: BpsType + From<f64>,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
delta_version_offset: Version,
indexes: &indexes::Vecs,
cached_starts: &CachedWindowStarts,
) -> Result<Self> {
let inner = FiatPerBlock::forced_import(db, name, version, indexes)?;
let delta = LazyRollingDeltasFiatFromHeight::new(
&format!("{name}_delta"),
version + delta_version_offset,
&inner.cents.height,
cached_starts,
indexes,
);
Ok(Self { inner, delta })
}
}

Some files were not shown because too many files have changed in this diff Show More