global: snapshot

This commit is contained in:
nym21
2026-03-04 17:10:15 +01:00
parent 891f0dad9e
commit 9e23de4ba1
313 changed files with 9087 additions and 4918 deletions

View File

@@ -22,7 +22,8 @@ impl Vecs {
.compute(indexer, &self.time, starting_indexes, exit)?;
self.interval
.compute(indexer, &self.count, starting_indexes, exit)?;
self.size.compute(indexer, &self.count, starting_indexes, exit)?;
self.size
.compute(indexer, &self.count, starting_indexes, exit)?;
self.weight
.compute(indexer, &self.count, starting_indexes, exit)?;
self.difficulty

View File

@@ -32,120 +32,54 @@ impl Vecs {
self.compute_rolling_start_hours(time, starting_indexes, exit, 1, |s| {
&mut s.height_1h_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 1, |s| {
&mut s.height_24h_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 3, |s| {
&mut s.height_3d_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 7, |s| {
&mut s.height_1w_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 8, |s| {
&mut s.height_8d_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 9, |s| {
&mut s.height_9d_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 12, |s| {
&mut s.height_12d_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 13, |s| {
&mut s.height_13d_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 14, |s| {
&mut s.height_2w_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 21, |s| {
&mut s.height_21d_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 26, |s| {
&mut s.height_26d_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 30, |s| {
&mut s.height_1m_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 34, |s| {
&mut s.height_34d_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 55, |s| {
&mut s.height_55d_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 60, |s| {
&mut s.height_2m_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 63, |s| {
&mut s.height_9w_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 84, |s| {
&mut s.height_12w_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 89, |s| {
&mut s.height_89d_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 90, |s| {
&mut s.height_3m_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 98, |s| {
&mut s.height_14w_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 1, |s| &mut s.height_24h_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 3, |s| &mut s.height_3d_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 7, |s| &mut s.height_1w_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 8, |s| &mut s.height_8d_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 9, |s| &mut s.height_9d_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 12, |s| &mut s.height_12d_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 13, |s| &mut s.height_13d_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 14, |s| &mut s.height_2w_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 21, |s| &mut s.height_21d_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 26, |s| &mut s.height_26d_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 30, |s| &mut s.height_1m_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 34, |s| &mut s.height_34d_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 55, |s| &mut s.height_55d_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 60, |s| &mut s.height_2m_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 63, |s| &mut s.height_9w_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 84, |s| &mut s.height_12w_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 89, |s| &mut s.height_89d_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 90, |s| &mut s.height_3m_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 98, |s| &mut s.height_14w_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 111, |s| {
&mut s.height_111d_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 144, |s| {
&mut s.height_144d_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 180, |s| {
&mut s.height_6m_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 182, |s| {
&mut s.height_26w_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 180, |s| &mut s.height_6m_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 182, |s| &mut s.height_26w_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 200, |s| {
&mut s.height_200d_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 270, |s| {
&mut s.height_9m_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 270, |s| &mut s.height_9m_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 350, |s| {
&mut s.height_350d_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 360, |s| {
&mut s.height_12m_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 365, |s| {
&mut s.height_1y_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 420, |s| {
&mut s.height_14m_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 730, |s| {
&mut s.height_2y_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 780, |s| {
&mut s.height_26m_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 1095, |s| {
&mut s.height_3y_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 360, |s| &mut s.height_12m_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 365, |s| &mut s.height_1y_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 420, |s| &mut s.height_14m_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 730, |s| &mut s.height_2y_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 780, |s| &mut s.height_26m_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 1095, |s| &mut s.height_3y_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 1400, |s| {
&mut s.height_200w_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 1460, |s| {
&mut s.height_4y_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 1825, |s| {
&mut s.height_5y_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 2190, |s| {
&mut s.height_6y_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 2920, |s| {
&mut s.height_8y_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 3285, |s| {
&mut s.height_9y_ago
})?;
self.compute_rolling_start(time, starting_indexes, exit, 1460, |s| &mut s.height_4y_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 1825, |s| &mut s.height_5y_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 2190, |s| &mut s.height_6y_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 2920, |s| &mut s.height_8y_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 3285, |s| &mut s.height_9y_ago)?;
self.compute_rolling_start(time, starting_indexes, exit, 3650, |s| {
&mut s.height_10y_ago
})?;
@@ -193,13 +127,9 @@ impl Vecs {
where
F: FnOnce(&mut Self) -> &mut EagerVec<PcoVec<Height, Height>>,
{
self.compute_rolling_start_inner(
time,
starting_indexes,
exit,
get_field,
|t, prev_ts| t.difference_in_days_between(prev_ts) >= days,
)
self.compute_rolling_start_inner(time, starting_indexes, exit, get_field, |t, prev_ts| {
t.difference_in_days_between(prev_ts) >= days
})
}
fn compute_rolling_start_hours<F>(
@@ -213,13 +143,9 @@ impl Vecs {
where
F: FnOnce(&mut Self) -> &mut EagerVec<PcoVec<Height, Height>>,
{
self.compute_rolling_start_inner(
time,
starting_indexes,
exit,
get_field,
|t, prev_ts| t.difference_in_hours_between(prev_ts) >= hours,
)
self.compute_rolling_start_inner(time, starting_indexes, exit, get_field, |t, prev_ts| {
t.difference_in_hours_between(prev_ts) >= hours
})
}
fn compute_rolling_start_inner<F, D>(

View File

@@ -9,7 +9,11 @@ use crate::{
};
impl Vecs {
pub(crate) fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
pub(crate) fn forced_import(
db: &Database,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
block_count_target: ConstantVecs::new::<BlockCountTarget>(
"block_count_target",

View File

@@ -12,54 +12,52 @@ pub struct Vecs<M: StorageMode = Rw> {
pub block_count: ComputedFromHeightCumulativeSum<StoredU32, M>,
pub block_count_sum: RollingWindows<StoredU32, M>,
// Window starts sorted by duration
pub height_1h_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_24h_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 1d
pub height_24h_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 1d
pub height_3d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_1w_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 7d
pub height_1w_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 7d
pub height_8d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_9d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_12d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_13d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_2w_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 14d
pub height_2w_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 14d
pub height_21d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_26d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_1m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 30d
pub height_1m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 30d
pub height_34d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_55d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_2m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 60d
pub height_9w_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 63d
pub height_12w_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 84d
pub height_2m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 60d
pub height_9w_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 63d
pub height_12w_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 84d
pub height_89d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_3m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 90d
pub height_14w_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 98d
pub height_3m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 90d
pub height_14w_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 98d
pub height_111d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_144d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_6m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 180d
pub height_26w_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 182d
pub height_6m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 180d
pub height_26w_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 182d
pub height_200d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_9m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 270d
pub height_9m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 270d
pub height_350d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_12m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 360d
pub height_1y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 365d
pub height_14m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 420d
pub height_2y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 730d
pub height_26m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 780d
pub height_3y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 1095d
pub height_200w_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 1400d
pub height_4y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 1460d
pub height_5y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 1825d
pub height_6y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 2190d
pub height_8y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 2920d
pub height_9y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 3285d
pub height_10y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 3650d
pub height_12y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 4380d
pub height_14y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 5110d
pub height_26y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 9490d
pub height_12m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 360d
pub height_1y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 365d
pub height_14m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 420d
pub height_2y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 730d
pub height_26m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 780d
pub height_3y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 1095d
pub height_200w_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 1400d
pub height_4y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 1460d
pub height_5y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 1825d
pub height_6y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 2190d
pub height_8y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 2920d
pub height_9y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 3285d
pub height_10y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 3650d
pub height_12y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 4380d
pub height_14y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 5110d
pub height_26y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 9490d
}
impl Vecs {
/// Get the standard 4 rolling window start heights (24h, 1w, 1m, 1y).
pub fn window_starts(&self) -> WindowStarts<'_> {
WindowStarts {
_24h: &self.height_24h_ago,

View File

@@ -43,12 +43,14 @@ impl Vecs {
)?;
// Compute blocks before next adjustment
self.blocks_before_next_adjustment.height.compute_transform(
starting_indexes.height,
&indexes.height.identity,
|(h, ..)| (h, StoredU32::from(h.left_before_next_diff_adj())),
exit,
)?;
self.blocks_before_next_adjustment
.height
.compute_transform(
starting_indexes.height,
&indexes.height.identity,
|(h, ..)| (h, StoredU32::from(h.left_before_next_diff_adj())),
exit,
)?;
// Compute days before next adjustment
self.days_before_next_adjustment.height.compute_transform(

View File

@@ -26,7 +26,12 @@ impl Vecs {
indexes,
),
as_hash: ComputedFromHeight::forced_import(db, "difficulty_as_hash", version, indexes)?,
adjustment: PercentFromHeight::forced_import(db, "difficulty_adjustment", version, indexes)?,
adjustment: PercentFromHeight::forced_import(
db,
"difficulty_adjustment",
version,
indexes,
)?,
epoch: ComputedFromHeight::forced_import(db, "difficulty_epoch", version, indexes)?,
blocks_before_next_adjustment: ComputedFromHeight::forced_import(
db,

View File

@@ -3,8 +3,6 @@ use brk_types::{BasisPointsSigned32, DifficultyEpoch, StoredF32, StoredF64, Stor
use vecdb::{Rw, StorageMode};
use crate::internal::{ComputedFromHeight, ComputedHeightDerived, PercentFromHeight};
/// Difficulty metrics: raw difficulty, derived stats, adjustment, and countdown
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub raw: ComputedHeightDerived<StoredF64>,

View File

@@ -3,8 +3,6 @@ use brk_types::{HalvingEpoch, StoredF32, StoredU32};
use vecdb::{Rw, StorageMode};
use crate::internal::ComputedFromHeight;
/// Halving epoch metrics and countdown
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub epoch: ComputedFromHeight<HalvingEpoch, M>,

View File

@@ -2,15 +2,15 @@ use std::path::Path;
use brk_error::Result;
use brk_indexer::Indexer;
use brk_traversable::Traversable;
use brk_types::Version;
use vecdb::{Database, PAGE_SIZE};
use crate::indexes;
use crate::{
indexes,
internal::{finalize_db, open_db},
};
use super::{
CountVecs, DifficultyVecs, HalvingVecs, IntervalVecs, SizeVecs,
TimeVecs, Vecs, WeightVecs,
CountVecs, DifficultyVecs, HalvingVecs, IntervalVecs, SizeVecs, TimeVecs, Vecs, WeightVecs,
};
impl Vecs {
@@ -20,9 +20,7 @@ impl Vecs {
indexer: &Indexer,
indexes: &indexes::Vecs,
) -> Result<Self> {
let db = Database::open(&parent_path.join(super::DB_NAME))?;
db.set_min_len(PAGE_SIZE * 50_000_000)?;
let db = open_db(parent_path, super::DB_NAME, 50_000_000)?;
let version = parent_version;
let count = CountVecs::forced_import(&db, version, indexes)?;
@@ -43,14 +41,7 @@ impl Vecs {
difficulty,
halving,
};
this.db.retain_regions(
this.iter_any_exportable()
.flat_map(|v| v.region_names())
.collect(),
)?;
this.db.compact()?;
finalize_db(&this.db, &this)?;
Ok(this)
}
}

View File

@@ -16,11 +16,8 @@ impl Vecs {
) -> Result<()> {
let mut prev_timestamp = None;
let window_starts = count_vecs.window_starts();
self.0.compute(
starting_indexes.height,
&window_starts,
exit,
|vec| {
self.0
.compute(starting_indexes.height, &window_starts, exit, |vec| {
vec.compute_transform(
starting_indexes.height,
&indexer.vecs.blocks.timestamp,
@@ -39,8 +36,7 @@ impl Vecs {
exit,
)?;
Ok(())
},
)?;
})?;
Ok(())
}

View File

@@ -8,6 +8,5 @@ use crate::internal::ComputedFromHeightDistribution;
#[derive(Deref, DerefMut, Traversable)]
pub struct Vecs<M: StorageMode = Rw>(
#[traversable(flatten)]
pub ComputedFromHeightDistribution<Timestamp, M>,
#[traversable(flatten)] pub ComputedFromHeightDistribution<Timestamp, M>,
);

View File

@@ -17,19 +17,15 @@ impl Vecs {
let window_starts = count_vecs.window_starts();
// vbytes = floor(weight / 4), stored at height level
self.vbytes.compute(
starting_indexes.height,
&window_starts,
exit,
|height| {
self.vbytes
.compute(starting_indexes.height, &window_starts, exit, |height| {
Ok(height.compute_transform(
starting_indexes.height,
&indexer.vecs.blocks.weight,
|(h, weight, ..)| (h, StoredU64::from(weight.to_vbytes_floor())),
exit,
)?)
},
)?;
})?;
// size from indexer total_size
self.size.compute(

View File

@@ -15,18 +15,8 @@ impl Vecs {
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
vbytes: ComputedFromHeightFull::forced_import(
db,
"block_vbytes",
version,
indexes,
)?,
size: ComputedHeightDerivedFull::forced_import(
db,
"block_size",
version,
indexes,
)?,
vbytes: ComputedFromHeightFull::forced_import(db, "block_vbytes", version, indexes)?,
size: ComputedHeightDerivedFull::forced_import(db, "block_size", version, indexes)?,
})
}
}

View File

@@ -11,8 +11,7 @@ impl Vecs {
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let timestamp_monotonic =
EagerVec::forced_import(db, "timestamp_monotonic", version)?;
let timestamp_monotonic = EagerVec::forced_import(db, "timestamp_monotonic", version)?;
Ok(Self {
date: LazyVecFrom1::init(
@@ -28,11 +27,7 @@ impl Vecs {
}
impl TimestampIndexes {
fn forced_import(
db: &Database,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
macro_rules! period {
($field:ident) => {
LazyVecFrom1::init(
@@ -50,6 +45,22 @@ impl TimestampIndexes {
};
}
Ok(Self(crate::indexes_from!(period, epoch)))
Ok(Self(crate::internal::PerPeriod {
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: epoch!(halvingepoch),
difficultyepoch: epoch!(difficultyepoch),
}))
}
}

View File

@@ -1,15 +1,13 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{
Date, Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4,
Indexes, Minute10, Minute30, Month1, Month3, Month6, Timestamp, Week1, Year1, Year10,
Date, Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour4, Hour12, Indexes,
Minute10, Minute30, Month1, Month3, Month6, Timestamp, Week1, Year1, Year10,
};
use derive_more::{Deref, DerefMut};
use vecdb::{EagerVec, Exit, LazyVecFrom1, PcoVec, ReadableVec, Rw, StorageMode};
use crate::{indexes, internal::PerPeriod};
/// Timestamp and date metrics for blocks
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub date: LazyVecFrom1<Height, Date, Height, Timestamp>,
@@ -58,13 +56,21 @@ impl TimestampIndexes {
) -> Result<()> {
let prev_height = starting_indexes.height.decremented().unwrap_or_default();
self.halvingepoch.compute_indirect_sequential(
indexes.height.halvingepoch.collect_one(prev_height).unwrap_or_default(),
indexes
.height
.halvingepoch
.collect_one(prev_height)
.unwrap_or_default(),
&indexes.halvingepoch.first_height,
&indexer.vecs.blocks.timestamp,
exit,
)?;
self.difficultyepoch.compute_indirect_sequential(
indexes.height.difficultyepoch.collect_one(prev_height).unwrap_or_default(),
indexes
.height
.difficultyepoch
.collect_one(prev_height)
.unwrap_or_default(),
&indexes.difficultyepoch.first_height,
&indexer.vecs.blocks.timestamp,
exit,

View File

@@ -23,11 +23,8 @@ impl Vecs {
exit,
)?;
self.fullness.compute(
starting_indexes.height,
&window_starts,
exit,
|vec| {
self.fullness
.compute(starting_indexes.height, &window_starts, exit, |vec| {
vec.compute_transform(
starting_indexes.height,
&indexer.vecs.blocks.weight,
@@ -35,8 +32,7 @@ impl Vecs {
exit,
)?;
Ok(())
},
)?;
})?;
Ok(())
}

View File

@@ -14,12 +14,8 @@ impl Vecs {
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let weight = ComputedHeightDerivedFull::forced_import(
db,
"block_weight",
version,
indexes,
)?;
let weight =
ComputedHeightDerivedFull::forced_import(db, "block_weight", version, indexes)?;
let fullness =
PercentFromHeightDistribution::forced_import(db, "block_fullness", version, indexes)?;

View File

@@ -5,11 +5,15 @@ use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedFromHeightCumulativeSum, ComputedFromHeight},
internal::{ComputedFromHeight, ComputedFromHeightCumulativeSum},
};
impl Vecs {
pub(crate) fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
pub(crate) fn forced_import(
db: &Database,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
coinblocks_created: ComputedFromHeightCumulativeSum::forced_import(
db,

View File

@@ -2,7 +2,7 @@ use brk_traversable::Traversable;
use brk_types::StoredF64;
use vecdb::{Rw, StorageMode};
use crate::internal::{ComputedFromHeightCumulativeSum, ComputedFromHeight};
use crate::internal::{ComputedFromHeight, ComputedFromHeightCumulativeSum};
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {

View File

@@ -14,31 +14,35 @@ impl Vecs {
activity: &activity::Vecs,
exit: &Exit,
) -> Result<()> {
self.cointime_adj_inflation_rate.bps.height.compute_transform2(
self.cointime_adj_inflation_rate
.bps
.height
.compute_transform2(
starting_indexes.height,
&activity.liveliness.height,
&supply.inflation_rate.bps.height,
|(h, liveliness, inflation, ..)| {
(
h,
BasisPointsSigned32::from(f64::from(liveliness) * f64::from(inflation)),
)
},
exit,
)?;
self.cointime_adj_tx_velocity_btc.height.compute_multiply(
starting_indexes.height,
&activity.liveliness.height,
&supply.inflation_rate.bps.height,
|(h, liveliness, inflation, ..)| (h, BasisPointsSigned32::from(f64::from(liveliness) * f64::from(inflation))),
&activity.activity_to_vaultedness_ratio.height,
&supply.velocity.btc.height,
exit,
)?;
self.cointime_adj_tx_velocity_btc
.height
.compute_multiply(
starting_indexes.height,
&activity.activity_to_vaultedness_ratio.height,
&supply.velocity.btc.height,
exit,
)?;
self.cointime_adj_tx_velocity_usd
.height
.compute_multiply(
starting_indexes.height,
&activity.activity_to_vaultedness_ratio.height,
&supply.velocity.usd.height,
exit,
)?;
self.cointime_adj_tx_velocity_usd.height.compute_multiply(
starting_indexes.height,
&activity.activity_to_vaultedness_ratio.height,
&supply.velocity.usd.height,
exit,
)?;
Ok(())
}

View File

@@ -9,7 +9,11 @@ use crate::{
};
impl Vecs {
pub(crate) fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
pub(crate) fn forced_import(
db: &Database,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
cointime_adj_inflation_rate: PercentFromHeight::forced_import(
db,

View File

@@ -6,7 +6,11 @@ use super::Vecs;
use crate::{indexes, internal::FiatFromHeight};
impl Vecs {
pub(crate) fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
pub(crate) fn forced_import(
db: &Database,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
thermo_cap: FiatFromHeight::forced_import(db, "thermo_cap", version, indexes)?,
investor_cap: FiatFromHeight::forced_import(db, "investor_cap", version, indexes)?,

View File

@@ -22,12 +22,8 @@ impl Vecs {
.compute(starting_indexes, blocks, distribution, exit)?;
// Supply computes next (depends on activity)
self.supply.compute(
starting_indexes,
distribution,
&self.activity,
exit,
)?;
self.supply
.compute(starting_indexes, distribution, &self.activity, exit)?;
// Adjusted velocity metrics (BTC) - can compute without price
self.adjusted

View File

@@ -1,15 +1,17 @@
use std::path::Path;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::Version;
use vecdb::{Database, PAGE_SIZE};
use crate::{
indexes,
internal::{finalize_db, open_db},
};
use super::{
ActivityVecs, AdjustedVecs, CapVecs, DB_NAME, PricingVecs, ReserveRiskVecs, SupplyVecs,
ValueVecs, Vecs,
};
use crate::indexes;
impl Vecs {
pub(crate) fn forced_import(
@@ -17,9 +19,7 @@ impl Vecs {
parent_version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let db = Database::open(&parent_path.join(DB_NAME))?;
db.set_min_len(PAGE_SIZE * 1_000_000)?;
let db = open_db(parent_path, DB_NAME, 1_000_000)?;
let version = parent_version;
let v1 = version + Version::ONE;
let activity = ActivityVecs::forced_import(&db, version, indexes)?;
@@ -40,14 +40,7 @@ impl Vecs {
adjusted,
reserve_risk,
};
this.db.retain_regions(
this.iter_any_exportable()
.flat_map(|v| v.region_names())
.collect(),
)?;
this.db.compact()?;
finalize_db(&this.db, &this)?;
Ok(this)
}
}

View File

@@ -3,7 +3,7 @@ use brk_types::{Indexes, StoredF64};
use vecdb::Exit;
use super::{super::value, Vecs};
use crate::{blocks, prices, traits::ComputeRollingMedianFromStarts};
use crate::{blocks, internal::ComputeRollingMedianFromStarts, prices};
impl Vecs {
pub(crate) fn compute(

View File

@@ -12,18 +12,8 @@ impl Vecs {
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
vaulted_supply: ValueFromHeight::forced_import(
db,
"vaulted_supply",
version,
indexes,
)?,
active_supply: ValueFromHeight::forced_import(
db,
"active_supply",
version,
indexes,
)?,
vaulted_supply: ValueFromHeight::forced_import(db, "vaulted_supply", version, indexes)?,
active_supply: ValueFromHeight::forced_import(db, "active_supply", version, indexes)?,
})
}
}

View File

@@ -41,8 +41,11 @@ impl Vecs {
.btc
.height;
self.cointime_value_destroyed
.compute(starting_indexes.height, &window_starts, exit, |vec| {
self.cointime_value_destroyed.compute(
starting_indexes.height,
&window_starts,
exit,
|vec| {
vec.compute_multiply(
starting_indexes.height,
&prices.price.usd.height,
@@ -50,10 +53,14 @@ impl Vecs {
exit,
)?;
Ok(())
})?;
},
)?;
self.cointime_value_created
.compute(starting_indexes.height, &window_starts, exit, |vec| {
self.cointime_value_created.compute(
starting_indexes.height,
&window_starts,
exit,
|vec| {
vec.compute_multiply(
starting_indexes.height,
&prices.price.usd.height,
@@ -61,10 +68,14 @@ impl Vecs {
exit,
)?;
Ok(())
})?;
},
)?;
self.cointime_value_stored
.compute(starting_indexes.height, &window_starts, exit, |vec| {
self.cointime_value_stored.compute(
starting_indexes.height,
&window_starts,
exit,
|vec| {
vec.compute_multiply(
starting_indexes.height,
&prices.price.usd.height,
@@ -72,7 +83,8 @@ impl Vecs {
exit,
)?;
Ok(())
})?;
},
)?;
// VOCDD: Value of Coin Days Destroyed = price × (CDD / circulating_supply)
// Supply-adjusted to account for growing supply over time

View File

@@ -6,7 +6,11 @@ use super::Vecs;
use crate::{indexes, internal::ComputedFromHeightCumulativeSum};
impl Vecs {
pub(crate) fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
pub(crate) fn forced_import(
db: &Database,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
cointime_value_destroyed: ComputedFromHeightCumulativeSum::forced_import(
db,

View File

@@ -22,7 +22,10 @@ use derive_more::{Deref, DerefMut};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, AnyVec, Database, Exit, Rw, StorageMode, WritableVec};
use crate::{indexes, internal::{ComputedFromHeightDistribution, WindowStarts}};
use crate::{
indexes,
internal::{ComputedFromHeightDistribution, WindowStarts},
};
/// Per-block activity counts - reset each block.
///
@@ -137,7 +140,9 @@ impl ActivityCountVecs {
.min(self.both.height.len())
}
pub(crate) fn par_iter_height_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
pub(crate) fn par_iter_height_mut(
&mut self,
) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
[
&mut self.reactivated.height as &mut dyn AnyStoredVec,
&mut self.sending.height as &mut dyn AnyStoredVec,
@@ -181,9 +186,7 @@ impl ActivityCountVecs {
self.balance_decreased
.height
.truncate_push(height, (counts.sending - counts.both).into())?;
self.both
.height
.truncate_push(height, counts.both.into())?;
self.both.height.truncate_push(height, counts.both.into())?;
Ok(())
}
@@ -196,8 +199,10 @@ impl ActivityCountVecs {
self.reactivated.compute_rest(max_from, windows, exit)?;
self.sending.compute_rest(max_from, windows, exit)?;
self.receiving.compute_rest(max_from, windows, exit)?;
self.balance_increased.compute_rest(max_from, windows, exit)?;
self.balance_decreased.compute_rest(max_from, windows, exit)?;
self.balance_increased
.compute_rest(max_from, windows, exit)?;
self.balance_decreased
.compute_rest(max_from, windows, exit)?;
self.both.compute_rest(max_from, windows, exit)?;
Ok(())
}
@@ -223,16 +228,27 @@ impl AddressTypeToActivityCountVecs {
) -> Result<Self> {
Ok(Self::from(
ByAddressType::<ActivityCountVecs>::new_with_name(|type_name| {
ActivityCountVecs::forced_import(db, &format!("{type_name}_{name}"), version, indexes)
ActivityCountVecs::forced_import(
db,
&format!("{type_name}_{name}"),
version,
indexes,
)
})?,
))
}
pub(crate) fn min_stateful_height(&self) -> usize {
self.0.values().map(|v| v.min_stateful_height()).min().unwrap_or(0)
self.0
.values()
.map(|v| v.min_stateful_height())
.min()
.unwrap_or(0)
}
pub(crate) fn par_iter_height_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
pub(crate) fn par_iter_height_mut(
&mut self,
) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
let mut vecs: Vec<&mut dyn AnyStoredVec> = Vec::new();
for type_vecs in self.0.values_mut() {
vecs.push(&mut type_vecs.reactivated.height);
@@ -274,7 +290,6 @@ impl AddressTypeToActivityCountVecs {
}
Ok(())
}
}
/// Storage for activity metrics (global + per type).
@@ -301,10 +316,14 @@ impl AddressActivityVecs {
}
pub(crate) fn min_stateful_height(&self) -> usize {
self.all.min_stateful_height().min(self.by_addresstype.min_stateful_height())
self.all
.min_stateful_height()
.min(self.by_addresstype.min_stateful_height())
}
pub(crate) fn par_iter_height_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
pub(crate) fn par_iter_height_mut(
&mut self,
) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
self.all
.par_iter_height_mut()
.chain(self.by_addresstype.par_iter_height_mut())
@@ -337,5 +356,4 @@ impl AddressActivityVecs {
self.by_addresstype.truncate_push_height(height, counts)?;
Ok(())
}
}

View File

@@ -70,14 +70,62 @@ impl From<(&AddressTypeToAddrCountVecs, Height)> for AddressTypeToAddressCount {
fn from((groups, starting_height): (&AddressTypeToAddrCountVecs, Height)) -> Self {
if let Some(prev_height) = starting_height.decremented() {
Self(ByAddressType {
p2pk65: groups.p2pk65.count.height.collect_one(prev_height).unwrap().into(),
p2pk33: groups.p2pk33.count.height.collect_one(prev_height).unwrap().into(),
p2pkh: groups.p2pkh.count.height.collect_one(prev_height).unwrap().into(),
p2sh: groups.p2sh.count.height.collect_one(prev_height).unwrap().into(),
p2wpkh: groups.p2wpkh.count.height.collect_one(prev_height).unwrap().into(),
p2wsh: groups.p2wsh.count.height.collect_one(prev_height).unwrap().into(),
p2tr: groups.p2tr.count.height.collect_one(prev_height).unwrap().into(),
p2a: groups.p2a.count.height.collect_one(prev_height).unwrap().into(),
p2pk65: groups
.p2pk65
.count
.height
.collect_one(prev_height)
.unwrap()
.into(),
p2pk33: groups
.p2pk33
.count
.height
.collect_one(prev_height)
.unwrap()
.into(),
p2pkh: groups
.p2pkh
.count
.height
.collect_one(prev_height)
.unwrap()
.into(),
p2sh: groups
.p2sh
.count
.height
.collect_one(prev_height)
.unwrap()
.into(),
p2wpkh: groups
.p2wpkh
.count
.height
.collect_one(prev_height)
.unwrap()
.into(),
p2wsh: groups
.p2wsh
.count
.height
.collect_one(prev_height)
.unwrap()
.into(),
p2tr: groups
.p2tr
.count
.height
.collect_one(prev_height)
.unwrap()
.into(),
p2a: groups
.p2a
.count
.height
.collect_one(prev_height)
.unwrap()
.into(),
})
} else {
Default::default()
@@ -103,24 +151,23 @@ impl AddressTypeToAddrCountVecs {
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self::from(
ByAddressType::<AddrCountVecs>::new_with_name(|type_name| {
AddrCountVecs::forced_import(
db,
&format!("{type_name}_{name}"),
version,
indexes,
)
})?,
))
Ok(Self::from(ByAddressType::<AddrCountVecs>::new_with_name(
|type_name| {
AddrCountVecs::forced_import(db, &format!("{type_name}_{name}"), version, indexes)
},
)?))
}
pub(crate) fn min_stateful_height(&self) -> usize {
self.0.values().map(|v| v.count.height.len()).min().unwrap()
}
pub(crate) fn par_iter_height_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
self.0.par_values_mut().map(|v| &mut v.count.height as &mut dyn AnyStoredVec)
pub(crate) fn par_iter_height_mut(
&mut self,
) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
self.0
.par_values_mut()
.map(|v| &mut v.count.height as &mut dyn AnyStoredVec)
}
pub(crate) fn truncate_push_height(
@@ -180,10 +227,16 @@ impl AddrCountsVecs {
}
pub(crate) fn min_stateful_height(&self) -> usize {
self.all.count.height.len().min(self.by_addresstype.min_stateful_height())
self.all
.count
.height
.len()
.min(self.by_addresstype.min_stateful_height())
}
pub(crate) fn par_iter_height_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
pub(crate) fn par_iter_height_mut(
&mut self,
) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
rayon::iter::once(&mut self.all.count.height as &mut dyn AnyStoredVec)
.chain(self.by_addresstype.par_iter_height_mut())
}

View File

@@ -4,9 +4,7 @@ use brk_types::{
EmptyAddressData, EmptyAddressIndex, FundedAddressData, FundedAddressIndex, Height,
};
use rayon::prelude::*;
use vecdb::{
AnyStoredVec, BytesVec, Rw, Stamp, StorageMode, WritableVec,
};
use vecdb::{AnyStoredVec, BytesVec, Rw, Stamp, StorageMode, WritableVec};
/// Storage for both funded and empty address data.
#[derive(Traversable)]

View File

@@ -1,5 +1,3 @@
//! Growth rate: new_addr_count / addr_count (global + per-type)
use brk_cohort::ByAddressType;
use brk_error::Result;
use brk_traversable::Traversable;
@@ -27,12 +25,8 @@ impl GrowthRateVecs {
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let all = PercentFromHeightDistribution::forced_import(
db,
"growth_rate",
version,
indexes,
)?;
let all =
PercentFromHeightDistribution::forced_import(db, "growth_rate", version, indexes)?;
let by_addresstype = ByAddressType::new_with_name(|name| {
PercentFromHeightDistribution::forced_import(
@@ -43,7 +37,10 @@ impl GrowthRateVecs {
)
})?;
Ok(Self { all, by_addresstype })
Ok(Self {
all,
by_addresstype,
})
}
pub(crate) fn compute(
@@ -64,24 +61,14 @@ impl GrowthRateVecs {
)
})?;
for ((_, growth), ((_, new), (_, addr))) in self
.by_addresstype
.iter_mut()
.zip(
new_addr_count
.by_addresstype
.iter()
.zip(addr_count.by_addresstype.iter()),
)
{
for ((_, growth), ((_, new), (_, addr))) in self.by_addresstype.iter_mut().zip(
new_addr_count
.by_addresstype
.iter()
.zip(addr_count.by_addresstype.iter()),
) {
growth.compute(max_from, windows, exit, |target| {
compute_ratio(
target,
max_from,
&new.height,
&addr.count.height,
exit,
)
compute_ratio(target, max_from, &new.height, &addr.count.height, exit)
})?;
}

View File

@@ -1,12 +1,13 @@
//! New address count: per-block delta of total_addr_count (global + per-type)
use brk_cohort::ByAddressType;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, StoredU64, Version};
use vecdb::{Database, Exit, Rw, StorageMode};
use crate::{indexes, internal::{ComputedFromHeightFull, WindowStarts}};
use crate::{
indexes,
internal::{ComputedFromHeightFull, WindowStarts},
};
use super::TotalAddrCountVecs;
@@ -24,12 +25,7 @@ impl NewAddrCountVecs {
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let all = ComputedFromHeightFull::forced_import(
db,
"new_addr_count",
version,
indexes,
)?;
let all = ComputedFromHeightFull::forced_import(db, "new_addr_count", version, indexes)?;
let by_addresstype: ByAddressType<ComputedFromHeightFull<StoredU64>> =
ByAddressType::new_with_name(|name| {

View File

@@ -1,5 +1,3 @@
//! Total address count: addr_count + empty_addr_count (global + per-type)
use brk_cohort::ByAddressType;
use brk_error::Result;
use brk_traversable::Traversable;
@@ -24,25 +22,22 @@ impl TotalAddrCountVecs {
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let all = ComputedFromHeight::forced_import(
db,
"total_addr_count",
version,
indexes,
)?;
let all = ComputedFromHeight::forced_import(db, "total_addr_count", version, indexes)?;
let by_addresstype: ByAddressType<ComputedFromHeight<StoredU64>> = ByAddressType::new_with_name(
|name| {
let by_addresstype: ByAddressType<ComputedFromHeight<StoredU64>> =
ByAddressType::new_with_name(|name| {
ComputedFromHeight::forced_import(
db,
&format!("{name}_total_addr_count"),
version,
indexes,
)
},
)?;
})?;
Ok(Self { all, by_addresstype })
Ok(Self {
all,
by_addresstype,
})
}
/// Eagerly compute total = addr_count + empty_addr_count.
@@ -60,22 +55,15 @@ impl TotalAddrCountVecs {
exit,
)?;
for ((_, total), ((_, addr), (_, empty))) in self
.by_addresstype
.iter_mut()
.zip(
addr_count
.by_addresstype
.iter()
.zip(empty_addr_count.by_addresstype.iter()),
)
{
total.height.compute_add(
max_from,
&addr.count.height,
&empty.count.height,
exit,
)?;
for ((_, total), ((_, addr), (_, empty))) in self.by_addresstype.iter_mut().zip(
addr_count
.by_addresstype
.iter()
.zip(empty_addr_count.by_addresstype.iter()),
) {
total
.height
.compute_add(max_from, &addr.count.height, &empty.count.height, exit)?;
}
Ok(())

View File

@@ -60,7 +60,12 @@ impl<T> AddressTypeToTypeIndexMap<T> {
}
/// Insert a value for a specific address type and typeindex.
pub(crate) fn insert_for_type(&mut self, address_type: OutputType, typeindex: TypeIndex, value: T) {
pub(crate) fn insert_for_type(
&mut self,
address_type: OutputType,
typeindex: TypeIndex,
value: T,
) {
self.get_mut(address_type).unwrap().insert(typeindex, value);
}
@@ -76,7 +81,9 @@ impl<T> AddressTypeToTypeIndexMap<T> {
}
/// Iterate mutably over entries by address type.
pub(crate) fn iter_mut(&mut self) -> impl Iterator<Item = (OutputType, &mut FxHashMap<TypeIndex, T>)> {
pub(crate) fn iter_mut(
&mut self,
) -> impl Iterator<Item = (OutputType, &mut FxHashMap<TypeIndex, T>)> {
self.0.iter_mut()
}
}

View File

@@ -49,7 +49,10 @@ impl AddressCache {
/// Merge address data into funded cache.
#[inline]
pub(crate) fn merge_funded(&mut self, data: AddressTypeToTypeIndexMap<WithAddressDataSource<FundedAddressData>>) {
pub(crate) fn merge_funded(
&mut self,
data: AddressTypeToTypeIndexMap<WithAddressDataSource<FundedAddressData>>,
) {
self.funded.merge_mut(data);
}
@@ -63,7 +66,10 @@ impl AddressCache {
}
/// Update transaction counts for addresses.
pub(crate) fn update_tx_counts(&mut self, txindex_vecs: AddressTypeToTypeIndexMap<SmallVec<[TxIndex; 4]>>) {
pub(crate) fn update_tx_counts(
&mut self,
txindex_vecs: AddressTypeToTypeIndexMap<SmallVec<[TxIndex; 4]>>,
) {
update_tx_counts(&mut self.funded, &mut self.empty, txindex_vecs);
}
@@ -97,7 +103,9 @@ pub(crate) fn load_uncached_address_data(
// Check if this is a new address (typeindex >= first for this height)
let first = *first_addressindexes.get(address_type).unwrap();
if first <= typeindex {
return Ok(Some(WithAddressDataSource::New(FundedAddressData::default())));
return Ok(Some(WithAddressDataSource::New(
FundedAddressData::default(),
)));
}
// Skip if already in cache

View File

@@ -26,7 +26,10 @@ impl<'a> AddressLookup<'a> {
&mut self,
output_type: OutputType,
type_index: TypeIndex,
) -> (&mut WithAddressDataSource<FundedAddressData>, TrackingStatus) {
) -> (
&mut WithAddressDataSource<FundedAddressData>,
TrackingStatus,
) {
use std::collections::hash_map::Entry;
let map = self.funded.get_mut(output_type).unwrap();

View File

@@ -150,14 +150,7 @@ pub(crate) fn process_sent(
.state
.as_mut()
.unwrap()
.send(
addr_data,
value,
current_price,
prev_price,
peak_price,
age,
)?;
.send(addr_data, value, current_price, prev_price, peak_price, age)?;
}
}
}

View File

@@ -102,7 +102,9 @@ pub(crate) fn process_inputs(
);
let mut sent_data = HeightToAddressTypeToVec::with_capacity(estimated_unique_heights);
let mut address_data =
AddressTypeToTypeIndexMap::<WithAddressDataSource<FundedAddressData>>::with_capacity(estimated_per_type);
AddressTypeToTypeIndexMap::<WithAddressDataSource<FundedAddressData>>::with_capacity(
estimated_per_type,
);
let mut txindex_vecs =
AddressTypeToTypeIndexMap::<SmallVec<[TxIndex; 4]>>::with_capacity(estimated_per_type);

View File

@@ -52,7 +52,9 @@ pub(crate) fn process_outputs(
let mut transacted = Transacted::default();
let mut received_data = AddressTypeToVec::with_capacity(estimated_per_type);
let mut address_data =
AddressTypeToTypeIndexMap::<WithAddressDataSource<FundedAddressData>>::with_capacity(estimated_per_type);
AddressTypeToTypeIndexMap::<WithAddressDataSource<FundedAddressData>>::with_capacity(
estimated_per_type,
);
let mut txindex_vecs =
AddressTypeToTypeIndexMap::<SmallVec<[TxIndex; 4]>>::with_capacity(estimated_per_type);

View File

@@ -33,13 +33,11 @@ impl AddressCohorts {
let v = version + VERSION;
// Helper to create a cohort - only amount_range cohorts have state
let create = |filter: Filter,
name: &'static str,
has_state: bool|
-> Result<AddressCohortVecs> {
let sp = if has_state { Some(states_path) } else { None };
AddressCohortVecs::forced_import(db, filter, name, v, indexes, sp)
};
let create =
|filter: Filter, name: &'static str, has_state: bool| -> Result<AddressCohortVecs> {
let sp = if has_state { Some(states_path) } else { None };
AddressCohortVecs::forced_import(db, filter, name, v, indexes, sp)
};
let full = |f: Filter, name: &'static str| create(f, name, true);
let none = |f: Filter, name: &'static str| create(f, name, false);
@@ -156,7 +154,9 @@ impl AddressCohorts {
}
/// Returns a parallel iterator over all vecs for parallel writing.
pub(crate) fn par_iter_vecs_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
pub(crate) fn par_iter_vecs_mut(
&mut self,
) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
// Collect all vecs from all cohorts
self.0
.iter_mut()

View File

@@ -5,21 +5,15 @@ use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Cents, Dollars, Height, Indexes, Sats, StoredF64, StoredU64, Version};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, AnyVec, Database, Exit, WritableVec, ReadableVec, Rw, StorageMode};
use vecdb::{AnyStoredVec, AnyVec, Database, Exit, ReadableVec, Rw, StorageMode, WritableVec};
use crate::{
blocks,
distribution::state::AddressCohortState,
indexes,
internal::ComputedFromHeight,
prices,
blocks, distribution::state::AddressCohortState, indexes, internal::ComputedFromHeight, prices,
};
use crate::distribution::metrics::{BasicCohortMetrics, CohortMetricsBase, ImportConfig};
use super::super::traits::{CohortVecs, DynCohortVecs};
/// Address cohort with metrics and optional runtime state.
#[derive(Traversable)]
pub struct AddressCohortVecs<M: StorageMode = Rw> {
/// Starting height when state was imported
@@ -60,8 +54,7 @@ impl AddressCohortVecs {
Ok(Self {
starting_height: None,
state: states_path
.map(|path| Box::new(AddressCohortState::new(path, &full_name))),
state: states_path.map(|path| Box::new(AddressCohortState::new(path, &full_name))),
metrics: BasicCohortMetrics::forced_import(&cfg)?,
@@ -86,7 +79,9 @@ impl AddressCohortVecs {
}
/// Returns a parallel iterator over all vecs for parallel writing.
pub(crate) fn par_iter_vecs_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
pub(crate) fn par_iter_vecs_mut(
&mut self,
) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
rayon::iter::once(&mut self.addr_count.height as &mut dyn AnyStoredVec)
.chain(self.metrics.par_iter_mut())
}

View File

@@ -1,8 +1,8 @@
use std::{cmp::Reverse, collections::BinaryHeap, fs, path::Path};
use brk_cohort::{
ByAgeRange, ByAmountRange, ByEpoch, ByGreatEqualAmount, ByLowerThanAmount,
ByMaxAge, ByMinAge, BySpendableType, ByYear, CohortContext, Filter, Filtered, TERM_NAMES, Term,
ByAgeRange, ByAmountRange, ByEpoch, ByGreatEqualAmount, ByLowerThanAmount, ByMaxAge, ByMinAge,
BySpendableType, ByYear, CohortContext, Filter, Filtered, TERM_NAMES, Term,
};
use brk_error::Result;
use brk_traversable::Traversable;
@@ -11,7 +11,9 @@ use brk_types::{
Sats, Version,
};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, Database, Exit, ReadOnlyClone, ReadableVec, Rw, StorageMode, WritableVec};
use vecdb::{
AnyStoredVec, Database, Exit, ReadOnlyClone, ReadableVec, Rw, StorageMode, WritableVec,
};
use crate::{
blocks,
@@ -23,8 +25,7 @@ use crate::{
use crate::distribution::metrics::{
AdjustedCohortMetrics, AllCohortMetrics, BasicCohortMetrics, CohortMetricsBase,
ExtendedAdjustedCohortMetrics, ExtendedCohortMetrics, ImportConfig,
SupplyMetrics,
ExtendedAdjustedCohortMetrics, ExtendedCohortMetrics, ImportConfig, SupplyMetrics,
};
use super::vecs::UTXOCohortVecs;
@@ -146,12 +147,7 @@ impl UTXOCohorts<Rw> {
version: v,
indexes,
};
UTXOCohortVecs::new(
None,
ExtendedAdjustedCohortMetrics::forced_import(
&cfg,
)?,
)
UTXOCohortVecs::new(None, ExtendedAdjustedCohortMetrics::forced_import(&cfg)?)
};
// lth: ExtendedCohortMetrics
@@ -165,10 +161,7 @@ impl UTXOCohorts<Rw> {
version: v,
indexes,
};
UTXOCohortVecs::new(
None,
ExtendedCohortMetrics::forced_import(&cfg)?,
)
UTXOCohortVecs::new(None, ExtendedCohortMetrics::forced_import(&cfg)?)
};
// max_age: AdjustedCohortMetrics (adjusted + peak_regret)
@@ -243,9 +236,6 @@ impl UTXOCohorts<Rw> {
})
}
// === Iteration helpers ===
/// Parallel iterator over all separate (stateful) cohorts.
pub(crate) fn par_iter_separate_mut(
&mut self,
) -> impl ParallelIterator<Item = &mut dyn DynCohortVecs> {
@@ -296,9 +286,6 @@ impl UTXOCohorts<Rw> {
v.into_iter()
}
// === Computation methods ===
/// Compute overlapping cohorts from component age/amount range cohorts.
pub(crate) fn compute_overlapping_vecs(
&mut self,
starting_indexes: &Indexes,
@@ -573,8 +560,22 @@ impl UTXOCohorts<Rw> {
HM: ReadableVec<Height, Dollars> + Sync,
{
// Get up_to_1h value sources for adjusted computation (cloned to avoid borrow conflicts).
let up_to_1h_value_created = self.age_range.up_to_1h.metrics.realized.value_created.height.read_only_clone();
let up_to_1h_value_destroyed = self.age_range.up_to_1h.metrics.realized.value_destroyed.height.read_only_clone();
let up_to_1h_value_created = self
.age_range
.up_to_1h
.metrics
.realized
.value_created
.height
.read_only_clone();
let up_to_1h_value_destroyed = self
.age_range
.up_to_1h
.metrics
.realized
.value_destroyed
.height
.read_only_clone();
// "all" cohort computed first (no all_supply_sats needed).
self.all.metrics.compute_rest_part2(
@@ -1024,5 +1025,4 @@ impl UTXOCohorts<Rw> {
Ok(())
}
}

View File

@@ -24,19 +24,32 @@ impl UTXOCohorts<Rw> {
let supply_state = received.spendable_supply;
// New UTXOs go into up_to_1h, current epoch, and current year
self.age_range.up_to_1h.state.as_mut().unwrap().receive_utxo(&supply_state, price);
self.epoch.mut_vec_from_height(height).state.as_mut().unwrap().receive_utxo(&supply_state, price);
self.year.mut_vec_from_timestamp(timestamp).state.as_mut().unwrap().receive_utxo(&supply_state, price);
self.age_range
.up_to_1h
.state
.as_mut()
.unwrap()
.receive_utxo(&supply_state, price);
self.epoch
.mut_vec_from_height(height)
.state
.as_mut()
.unwrap()
.receive_utxo(&supply_state, price);
self.year
.mut_vec_from_timestamp(timestamp)
.state
.as_mut()
.unwrap()
.receive_utxo(&supply_state, price);
// Update output type cohorts
self.type_
.iter_typed_mut()
.for_each(|(output_type, vecs)| {
vecs.state
.as_mut()
.unwrap()
.receive_utxo(received.by_type.get(output_type), price)
});
self.type_.iter_typed_mut().for_each(|(output_type, vecs)| {
vecs.state
.as_mut()
.unwrap()
.receive_utxo(received.by_type.get(output_type), price)
});
// Update amount range cohorts
received

View File

@@ -36,9 +36,8 @@ impl UTXOCohorts<Rw> {
let mut min_receive_height: Option<Height> = None;
for (receive_height, sent) in height_to_sent {
min_receive_height = Some(
min_receive_height.map_or(receive_height, |cur| cur.min(receive_height)),
);
min_receive_height =
Some(min_receive_height.map_or(receive_height, |cur| cur.min(receive_height)));
// Update chain_state to reflect spent supply
chain_state[receive_height.to_usize()].supply -= &sent.spendable_supply;
@@ -52,19 +51,25 @@ impl UTXOCohorts<Rw> {
let peak_price = price_range_max.max_between(receive_height, send_height);
// Update age range cohort (direct index lookup)
self.age_range.get_mut(age).state.as_mut().unwrap().send_utxo(
&sent.spendable_supply,
current_price,
prev_price,
peak_price,
age,
);
self.age_range
.get_mut(age)
.state
.as_mut()
.unwrap()
.send_utxo(
&sent.spendable_supply,
current_price,
prev_price,
peak_price,
age,
);
// Update epoch cohort (direct lookup by height)
self.epoch
.mut_vec_from_height(receive_height)
.state
.as_mut().unwrap()
.as_mut()
.unwrap()
.send_utxo(
&sent.spendable_supply,
current_price,
@@ -77,7 +82,8 @@ impl UTXOCohorts<Rw> {
self.year
.mut_vec_from_timestamp(block_state.timestamp)
.state
.as_mut().unwrap()
.as_mut()
.unwrap()
.send_utxo(
&sent.spendable_supply,
current_price,
@@ -91,26 +97,24 @@ impl UTXOCohorts<Rw> {
.spendable
.iter_typed()
.for_each(|(output_type, supply_state)| {
self.type_.get_mut(output_type).state.as_mut().unwrap().send_utxo(
supply_state,
current_price,
prev_price,
peak_price,
age,
)
self.type_
.get_mut(output_type)
.state
.as_mut()
.unwrap()
.send_utxo(supply_state, current_price, prev_price, peak_price, age)
});
// Update amount range cohorts
sent.by_size_group
.iter_typed()
.for_each(|(group, supply_state)| {
self.amount_range.get_mut(group).state.as_mut().unwrap().send_utxo(
supply_state,
current_price,
prev_price,
peak_price,
age,
);
self.amount_range
.get_mut(group)
.state
.as_mut()
.unwrap()
.send_utxo(supply_state, current_price, prev_price, peak_price, age);
});
}

View File

@@ -16,7 +16,11 @@ impl UTXOCohorts<Rw> {
/// - k = 20 boundaries to check
/// - n = total blocks in chain_state
/// - Linear scan for end_idx is faster than binary search since typically 0-2 blocks cross each boundary
pub(crate) fn tick_tock_next_block(&mut self, chain_state: &[BlockState], timestamp: Timestamp) {
pub(crate) fn tick_tock_next_block(
&mut self,
chain_state: &[BlockState],
timestamp: Timestamp,
) {
if chain_state.is_empty() {
return;
}

View File

@@ -10,10 +10,6 @@ use crate::distribution::metrics::CohortMetricsBase;
use super::super::traits::DynCohortVecs;
/// UTXO cohort with metrics and optional runtime state.
///
/// Generic over the metrics type to support different cohort configurations
/// (e.g. AllCohortMetrics, ExtendedCohortMetrics, BasicCohortMetrics, etc.)
#[derive(Traversable)]
pub struct UTXOCohortVecs<Metrics> {
/// Starting height when state was imported
@@ -38,7 +34,6 @@ impl<Metrics> UTXOCohortVecs<Metrics> {
metrics,
}
}
}
impl<Metrics: CohortMetricsBase + Traversable> Filtered for UTXOCohortVecs<Metrics> {
@@ -117,8 +112,11 @@ impl<Metrics: CohortMetricsBase + Traversable> DynCohortVecs for UTXOCohortVecs<
height_price: Cents,
) -> Result<()> {
if let Some(state) = self.state.as_mut() {
self.metrics
.compute_then_truncate_push_unrealized_states(height, height_price, state)?;
self.metrics.compute_then_truncate_push_unrealized_states(
height,
height_price,
state,
)?;
}
Ok(())
}

View File

@@ -3,7 +3,9 @@ use std::thread;
use brk_cohort::ByAddressType;
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::{Cents, Date, Height, OutputType, Sats, Timestamp, TxIndex, TypeIndex, ONE_DAY_IN_SEC};
use brk_types::{
Cents, Date, Height, ONE_DAY_IN_SEC, OutputType, Sats, Timestamp, TxIndex, TypeIndex,
};
use rayon::prelude::*;
use rustc_hash::FxHashSet;
use tracing::{debug, info};
@@ -401,12 +403,11 @@ pub(crate) fn process_blocks(
// Main thread: Update UTXO cohorts
vecs.utxo_cohorts
.receive(transacted, height, timestamp, block_price);
if let Some(min_h) = vecs.utxo_cohorts
.send(height_to_sent, chain_state, ctx.price_range_max)
if let Some(min_h) =
vecs.utxo_cohorts
.send(height_to_sent, chain_state, ctx.price_range_max)
{
min_supply_modified = Some(
min_supply_modified.map_or(min_h, |cur| cur.min(min_h)),
);
min_supply_modified = Some(min_supply_modified.map_or(min_h, |cur| cur.min(min_h)));
}
});
@@ -423,8 +424,7 @@ pub(crate) fn process_blocks(
let h = height.to_usize();
let is_last_of_day = height == last_height
|| *cached_timestamps[h] / ONE_DAY_IN_SEC
!= *cached_timestamps[h + 1] / ONE_DAY_IN_SEC;
|| *cached_timestamps[h] / ONE_DAY_IN_SEC != *cached_timestamps[h + 1] / ONE_DAY_IN_SEC;
let date_opt = is_last_of_day.then(|| Date::from(timestamp));
push_cohort_states(

View File

@@ -1,9 +1,7 @@
use brk_cohort::{ByAddressType, ByAnyAddress};
use brk_indexer::Indexer;
use brk_types::{
Height, OutPoint, OutputType, Sats, StoredU64, TxIndex, TypeIndex,
};
use vecdb::{Reader, ReadableVec, VecIndex};
use brk_types::{Height, OutPoint, OutputType, Sats, StoredU64, TxIndex, TypeIndex};
use vecdb::{ReadableVec, Reader, VecIndex};
use crate::{
distribution::{
@@ -46,9 +44,21 @@ impl<'a> TxOutReaders<'a> {
output_count: usize,
) -> Vec<TxOutData> {
let end = first_txoutindex + output_count;
self.indexer.vecs.outputs.value.collect_range_into_at(first_txoutindex, end, &mut self.values_buf);
self.indexer.vecs.outputs.outputtype.collect_range_into_at(first_txoutindex, end, &mut self.outputtypes_buf);
self.indexer.vecs.outputs.typeindex.collect_range_into_at(first_txoutindex, end, &mut self.typeindexes_buf);
self.indexer.vecs.outputs.value.collect_range_into_at(
first_txoutindex,
end,
&mut self.values_buf,
);
self.indexer.vecs.outputs.outputtype.collect_range_into_at(
first_txoutindex,
end,
&mut self.outputtypes_buf,
);
self.indexer.vecs.outputs.typeindex.collect_range_into_at(
first_txoutindex,
end,
&mut self.typeindexes_buf,
);
self.values_buf
.iter()
@@ -94,12 +104,31 @@ impl<'a> TxInReaders<'a> {
current_height: Height,
) -> (Vec<Sats>, Vec<Height>, Vec<OutputType>, Vec<TypeIndex>) {
let end = first_txinindex + input_count;
let values: Vec<Sats> = self.txins.spent.value.collect_range_at(first_txinindex, end);
self.indexer.vecs.inputs.outpoint.collect_range_into_at(first_txinindex, end, &mut self.outpoints_buf);
let outputtypes: Vec<OutputType> = self.indexer.vecs.inputs.outputtype.collect_range_at(first_txinindex, end);
let typeindexes: Vec<TypeIndex> = self.indexer.vecs.inputs.typeindex.collect_range_at(first_txinindex, end);
let values: Vec<Sats> = self
.txins
.spent
.value
.collect_range_at(first_txinindex, end);
self.indexer.vecs.inputs.outpoint.collect_range_into_at(
first_txinindex,
end,
&mut self.outpoints_buf,
);
let outputtypes: Vec<OutputType> = self
.indexer
.vecs
.inputs
.outputtype
.collect_range_at(first_txinindex, end);
let typeindexes: Vec<TypeIndex> = self
.indexer
.vecs
.inputs
.typeindex
.collect_range_at(first_txinindex, end);
let prev_heights: Vec<Height> = self.outpoints_buf
let prev_heights: Vec<Height> = self
.outpoints_buf
.iter()
.map(|outpoint| {
if outpoint.is_coinbase() {
@@ -175,7 +204,11 @@ impl IndexToTxIndexBuf {
txindex_to_count: &impl ReadableVec<TxIndex, StoredU64>,
) -> &[TxIndex] {
let first = block_first_txindex.to_usize();
txindex_to_count.collect_range_into_at(first, first + block_tx_count as usize, &mut self.counts);
txindex_to_count.collect_range_into_at(
first,
first + block_tx_count as usize,
&mut self.counts,
);
let total: u64 = self.counts.iter().map(|c| u64::from(*c)).sum();
self.result.clear();
@@ -183,7 +216,8 @@ impl IndexToTxIndexBuf {
for (offset, count) in self.counts.iter().enumerate() {
let txindex = TxIndex::from(first + offset);
self.result.extend(std::iter::repeat_n(txindex, u64::from(*count) as usize));
self.result
.extend(std::iter::repeat_n(txindex, u64::from(*count) as usize));
}
&self.result

View File

@@ -71,9 +71,15 @@ pub(crate) fn recover_state(
}
// Import UTXO cohort states - all must succeed
debug!("importing UTXO cohort states at height {}", consistent_height);
debug!(
"importing UTXO cohort states at height {}",
consistent_height
);
if !utxo_cohorts.import_separate_states(consistent_height) {
warn!("UTXO cohort state import failed at height {}", consistent_height);
warn!(
"UTXO cohort state import failed at height {}",
consistent_height
);
return Ok(RecoveredState {
starting_height: Height::ZERO,
});
@@ -81,9 +87,15 @@ pub(crate) fn recover_state(
debug!("UTXO cohort states imported");
// Import address cohort states - all must succeed
debug!("importing address cohort states at height {}", consistent_height);
debug!(
"importing address cohort states at height {}",
consistent_height
);
if !address_cohorts.import_separate_states(consistent_height) {
warn!("Address cohort state import failed at height {}", consistent_height);
warn!(
"Address cohort state import failed at height {}",
consistent_height
);
return Ok(RecoveredState {
starting_height: Height::ZERO,
});
@@ -163,16 +175,25 @@ fn rollback_states(
return Height::ZERO;
};
let chain_height = Height::from(s).incremented();
debug!("chain_state rolled back to stamp {:?}, height {}", s, chain_height);
debug!(
"chain_state rolled back to stamp {:?}, height {}",
s, chain_height
);
heights.insert(chain_height);
let Ok(stamps) = address_indexes_rollbacks else {
warn!("address_indexes rollback failed: {:?}", address_indexes_rollbacks);
warn!(
"address_indexes rollback failed: {:?}",
address_indexes_rollbacks
);
return Height::ZERO;
};
for (i, s) in stamps.iter().enumerate() {
let h = Height::from(*s).incremented();
debug!("address_indexes[{}] rolled back to stamp {:?}, height {}", i, s, h);
debug!(
"address_indexes[{}] rolled back to stamp {:?}, height {}",
i, s, h
);
heights.insert(h);
}
@@ -182,7 +203,10 @@ fn rollback_states(
};
for (i, s) in stamps.iter().enumerate() {
let h = Height::from(*s).incremented();
debug!("address_data[{}] rolled back to stamp {:?}, height {}", i, s, h);
debug!(
"address_data[{}] rolled back to stamp {:?}, height {}",
i, s, h
);
heights.insert(h);
}

View File

@@ -4,7 +4,7 @@ use brk_error::Result;
use brk_types::{EmptyAddressData, FundedAddressData, Height};
use rayon::prelude::*;
use tracing::info;
use vecdb::{AnyStoredVec, AnyVec, VecIndex, WritableVec, Stamp};
use vecdb::{AnyStoredVec, AnyVec, Stamp, VecIndex, WritableVec};
use crate::distribution::{
Vecs,
@@ -65,8 +65,8 @@ pub(crate) fn write(
// Incremental supply_state write: only rewrite from the earliest modified height
let supply_state_len = vecs.supply_state.len();
let truncate_to = min_supply_modified
.map_or(supply_state_len, |h| h.to_usize().min(supply_state_len));
let truncate_to =
min_supply_modified.map_or(supply_state_len, |h| h.to_usize().min(supply_state_len));
vecs.supply_state
.truncate_if_needed(Height::from(truncate_to))?;
for block_state in &chain_state[truncate_to..] {

View File

@@ -2,7 +2,9 @@ use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Height, Indexes, Sats, StoredF64, Version};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, AnyVec, EagerVec, Exit, ImportableVec, PcoVec, Rw, StorageMode, WritableVec};
use vecdb::{
AnyStoredVec, AnyVec, EagerVec, Exit, ImportableVec, PcoVec, Rw, StorageMode, WritableVec,
};
use crate::{
blocks,
@@ -51,7 +53,8 @@ impl ActivityMetrics {
cfg.version,
)?,
coinblocks_destroyed: cfg.import_cumulative_sum("coinblocks_destroyed", Version::ZERO)?,
coinblocks_destroyed: cfg
.import_cumulative_sum("coinblocks_destroyed", Version::ZERO)?,
coindays_destroyed: cfg.import_cumulative_sum("coindays_destroyed", Version::ZERO)?,
})
}
@@ -151,25 +154,27 @@ impl ActivityMetrics {
exit,
)?;
self.coinblocks_destroyed.compute(starting_indexes.height, &window_starts, exit, |v| {
v.compute_transform(
starting_indexes.height,
&self.satblocks_destroyed,
|(i, v, ..)| (i, StoredF64::from(Bitcoin::from(v))),
exit,
)?;
Ok(())
})?;
self.coinblocks_destroyed
.compute(starting_indexes.height, &window_starts, exit, |v| {
v.compute_transform(
starting_indexes.height,
&self.satblocks_destroyed,
|(i, v, ..)| (i, StoredF64::from(Bitcoin::from(v))),
exit,
)?;
Ok(())
})?;
self.coindays_destroyed.compute(starting_indexes.height, &window_starts, exit, |v| {
v.compute_transform(
starting_indexes.height,
&self.satdays_destroyed,
|(i, v, ..)| (i, StoredF64::from(Bitcoin::from(v))),
exit,
)?;
Ok(())
})?;
self.coindays_destroyed
.compute(starting_indexes.height, &window_starts, exit, |v| {
v.compute_transform(
starting_indexes.height,
&self.satdays_destroyed,
|(i, v, ..)| (i, StoredF64::from(Bitcoin::from(v))),
exit,
)?;
Ok(())
})?;
Ok(())
}

View File

@@ -28,19 +28,45 @@ pub struct AdjustedCohortMetrics<M: StorageMode = Rw> {
}
impl CohortMetricsBase for AdjustedCohortMetrics {
fn filter(&self) -> &Filter { &self.filter }
fn supply(&self) -> &SupplyMetrics { &self.supply }
fn supply_mut(&mut self) -> &mut SupplyMetrics { &mut self.supply }
fn outputs(&self) -> &OutputsMetrics { &self.outputs }
fn outputs_mut(&mut self) -> &mut OutputsMetrics { &mut self.outputs }
fn activity(&self) -> &ActivityMetrics { &self.activity }
fn activity_mut(&mut self) -> &mut ActivityMetrics { &mut self.activity }
fn realized_base(&self) -> &RealizedBase { &self.realized }
fn realized_base_mut(&mut self) -> &mut RealizedBase { &mut self.realized }
fn unrealized_base(&self) -> &UnrealizedBase { &self.unrealized }
fn unrealized_base_mut(&mut self) -> &mut UnrealizedBase { &mut self.unrealized }
fn cost_basis_base(&self) -> &CostBasisBase { &self.cost_basis }
fn cost_basis_base_mut(&mut self) -> &mut CostBasisBase { &mut self.cost_basis }
fn filter(&self) -> &Filter {
&self.filter
}
fn supply(&self) -> &SupplyMetrics {
&self.supply
}
fn supply_mut(&mut self) -> &mut SupplyMetrics {
&mut self.supply
}
fn outputs(&self) -> &OutputsMetrics {
&self.outputs
}
fn outputs_mut(&mut self) -> &mut OutputsMetrics {
&mut self.outputs
}
fn activity(&self) -> &ActivityMetrics {
&self.activity
}
fn activity_mut(&mut self) -> &mut ActivityMetrics {
&mut self.activity
}
fn realized_base(&self) -> &RealizedBase {
&self.realized
}
fn realized_base_mut(&mut self) -> &mut RealizedBase {
&mut self.realized
}
fn unrealized_base(&self) -> &UnrealizedBase {
&self.unrealized
}
fn unrealized_base_mut(&mut self) -> &mut UnrealizedBase {
&mut self.unrealized
}
fn cost_basis_base(&self) -> &CostBasisBase {
&self.cost_basis
}
fn cost_basis_base_mut(&mut self) -> &mut CostBasisBase {
&mut self.cost_basis
}
fn validate_computed_versions(&mut self, base_version: Version) -> Result<()> {
self.supply.validate_computed_versions(base_version)?;
@@ -48,7 +74,10 @@ impl CohortMetricsBase for AdjustedCohortMetrics {
Ok(())
}
fn compute_then_truncate_push_unrealized_states(
&mut self, height: Height, height_price: Cents, state: &mut CohortState,
&mut self,
height: Height,
height_price: Cents,
state: &mut CohortState,
) -> Result<()> {
state.apply_pending();
self.cost_basis.truncate_push_minmax(height, state)?;
@@ -69,9 +98,7 @@ impl CohortMetricsBase for AdjustedCohortMetrics {
}
impl AdjustedCohortMetrics {
pub(crate) fn forced_import(
cfg: &ImportConfig,
) -> Result<Self> {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let supply = SupplyMetrics::forced_import(cfg)?;
let unrealized = UnrealizedBase::forced_import(cfg)?;
let realized = RealizedWithAdjusted::forced_import(cfg)?;
@@ -125,5 +152,4 @@ impl AdjustedCohortMetrics {
Ok(())
}
}

View File

@@ -84,8 +84,7 @@ impl CohortMetricsBase for AllCohortMetrics {
state.apply_pending();
self.cost_basis.truncate_push_minmax(height, state)?;
let unrealized_state = state.compute_unrealized_state(height_price);
self.unrealized
.truncate_push(height, &unrealized_state)?;
self.unrealized.truncate_push(height, &unrealized_state)?;
self.cost_basis
.extended
.truncate_push_percentiles(height, state, height_price)?;

View File

@@ -8,8 +8,8 @@ use vecdb::{AnyStoredVec, Exit, ReadableVec, Rw, StorageMode};
use crate::{blocks, distribution::state::CohortState, prices};
use crate::distribution::metrics::{
ActivityMetrics, CohortMetricsBase, CostBasisBase, ImportConfig, OutputsMetrics,
RealizedBase, RelativeWithRelToAll, SupplyMetrics, UnrealizedBase,
ActivityMetrics, CohortMetricsBase, CostBasisBase, ImportConfig, OutputsMetrics, RealizedBase,
RelativeWithRelToAll, SupplyMetrics, UnrealizedBase,
};
/// Basic cohort metrics: no extensions, with relative (rel_to_all).
@@ -28,26 +28,55 @@ pub struct BasicCohortMetrics<M: StorageMode = Rw> {
}
impl CohortMetricsBase for BasicCohortMetrics {
fn filter(&self) -> &Filter { &self.filter }
fn supply(&self) -> &SupplyMetrics { &self.supply }
fn supply_mut(&mut self) -> &mut SupplyMetrics { &mut self.supply }
fn outputs(&self) -> &OutputsMetrics { &self.outputs }
fn outputs_mut(&mut self) -> &mut OutputsMetrics { &mut self.outputs }
fn activity(&self) -> &ActivityMetrics { &self.activity }
fn activity_mut(&mut self) -> &mut ActivityMetrics { &mut self.activity }
fn realized_base(&self) -> &RealizedBase { &self.realized }
fn realized_base_mut(&mut self) -> &mut RealizedBase { &mut self.realized }
fn unrealized_base(&self) -> &UnrealizedBase { &self.unrealized }
fn unrealized_base_mut(&mut self) -> &mut UnrealizedBase { &mut self.unrealized }
fn cost_basis_base(&self) -> &CostBasisBase { &self.cost_basis }
fn cost_basis_base_mut(&mut self) -> &mut CostBasisBase { &mut self.cost_basis }
fn filter(&self) -> &Filter {
&self.filter
}
fn supply(&self) -> &SupplyMetrics {
&self.supply
}
fn supply_mut(&mut self) -> &mut SupplyMetrics {
&mut self.supply
}
fn outputs(&self) -> &OutputsMetrics {
&self.outputs
}
fn outputs_mut(&mut self) -> &mut OutputsMetrics {
&mut self.outputs
}
fn activity(&self) -> &ActivityMetrics {
&self.activity
}
fn activity_mut(&mut self) -> &mut ActivityMetrics {
&mut self.activity
}
fn realized_base(&self) -> &RealizedBase {
&self.realized
}
fn realized_base_mut(&mut self) -> &mut RealizedBase {
&mut self.realized
}
fn unrealized_base(&self) -> &UnrealizedBase {
&self.unrealized
}
fn unrealized_base_mut(&mut self) -> &mut UnrealizedBase {
&mut self.unrealized
}
fn cost_basis_base(&self) -> &CostBasisBase {
&self.cost_basis
}
fn cost_basis_base_mut(&mut self) -> &mut CostBasisBase {
&mut self.cost_basis
}
fn validate_computed_versions(&mut self, base_version: Version) -> Result<()> {
self.supply.validate_computed_versions(base_version)?;
self.activity.validate_computed_versions(base_version)?;
Ok(())
}
fn compute_then_truncate_push_unrealized_states(
&mut self, height: Height, height_price: Cents, state: &mut CohortState,
&mut self,
height: Height,
height_price: Cents,
state: &mut CohortState,
) -> Result<()> {
state.apply_pending();
self.cost_basis.truncate_push_minmax(height, state)?;
@@ -68,9 +97,7 @@ impl CohortMetricsBase for BasicCohortMetrics {
}
impl BasicCohortMetrics {
pub(crate) fn forced_import(
cfg: &ImportConfig,
) -> Result<Self> {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let supply = SupplyMetrics::forced_import(cfg)?;
let unrealized = UnrealizedBase::forced_import(cfg)?;
let realized = RealizedBase::forced_import(cfg)?;

View File

@@ -29,19 +29,45 @@ pub struct ExtendedCohortMetrics<M: StorageMode = Rw> {
}
impl CohortMetricsBase for ExtendedCohortMetrics {
fn filter(&self) -> &Filter { &self.filter }
fn supply(&self) -> &SupplyMetrics { &self.supply }
fn supply_mut(&mut self) -> &mut SupplyMetrics { &mut self.supply }
fn outputs(&self) -> &OutputsMetrics { &self.outputs }
fn outputs_mut(&mut self) -> &mut OutputsMetrics { &mut self.outputs }
fn activity(&self) -> &ActivityMetrics { &self.activity }
fn activity_mut(&mut self) -> &mut ActivityMetrics { &mut self.activity }
fn realized_base(&self) -> &RealizedBase { &self.realized }
fn realized_base_mut(&mut self) -> &mut RealizedBase { &mut self.realized }
fn unrealized_base(&self) -> &UnrealizedBase { &self.unrealized }
fn unrealized_base_mut(&mut self) -> &mut UnrealizedBase { &mut self.unrealized }
fn cost_basis_base(&self) -> &CostBasisBase { &self.cost_basis }
fn cost_basis_base_mut(&mut self) -> &mut CostBasisBase { &mut self.cost_basis }
fn filter(&self) -> &Filter {
&self.filter
}
fn supply(&self) -> &SupplyMetrics {
&self.supply
}
fn supply_mut(&mut self) -> &mut SupplyMetrics {
&mut self.supply
}
fn outputs(&self) -> &OutputsMetrics {
&self.outputs
}
fn outputs_mut(&mut self) -> &mut OutputsMetrics {
&mut self.outputs
}
fn activity(&self) -> &ActivityMetrics {
&self.activity
}
fn activity_mut(&mut self) -> &mut ActivityMetrics {
&mut self.activity
}
fn realized_base(&self) -> &RealizedBase {
&self.realized
}
fn realized_base_mut(&mut self) -> &mut RealizedBase {
&mut self.realized
}
fn unrealized_base(&self) -> &UnrealizedBase {
&self.unrealized
}
fn unrealized_base_mut(&mut self) -> &mut UnrealizedBase {
&mut self.unrealized
}
fn cost_basis_base(&self) -> &CostBasisBase {
&self.cost_basis
}
fn cost_basis_base_mut(&mut self) -> &mut CostBasisBase {
&mut self.cost_basis
}
fn validate_computed_versions(&mut self, base_version: Version) -> Result<()> {
self.supply.validate_computed_versions(base_version)?;
@@ -50,13 +76,18 @@ impl CohortMetricsBase for ExtendedCohortMetrics {
Ok(())
}
fn compute_then_truncate_push_unrealized_states(
&mut self, height: Height, height_price: Cents, state: &mut CohortState,
&mut self,
height: Height,
height_price: Cents,
state: &mut CohortState,
) -> Result<()> {
state.apply_pending();
self.cost_basis.truncate_push_minmax(height, state)?;
let unrealized_state = state.compute_unrealized_state(height_price);
self.unrealized.truncate_push(height, &unrealized_state)?;
self.cost_basis.extended.truncate_push_percentiles(height, state, height_price)?;
self.cost_basis
.extended
.truncate_push_percentiles(height, state, height_price)?;
Ok(())
}
fn collect_all_vecs_mut(&mut self) -> Vec<&mut dyn AnyStoredVec> {
@@ -73,9 +104,7 @@ impl CohortMetricsBase for ExtendedCohortMetrics {
}
impl ExtendedCohortMetrics {
pub(crate) fn forced_import(
cfg: &ImportConfig,
) -> Result<Self> {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let supply = SupplyMetrics::forced_import(cfg)?;
let unrealized = UnrealizedBase::forced_import(cfg)?;
let realized = RealizedWithExtended::forced_import(cfg)?;
@@ -125,5 +154,4 @@ impl ExtendedCohortMetrics {
Ok(())
}
}

View File

@@ -29,19 +29,45 @@ pub struct ExtendedAdjustedCohortMetrics<M: StorageMode = Rw> {
}
impl CohortMetricsBase for ExtendedAdjustedCohortMetrics {
fn filter(&self) -> &Filter { &self.filter }
fn supply(&self) -> &SupplyMetrics { &self.supply }
fn supply_mut(&mut self) -> &mut SupplyMetrics { &mut self.supply }
fn outputs(&self) -> &OutputsMetrics { &self.outputs }
fn outputs_mut(&mut self) -> &mut OutputsMetrics { &mut self.outputs }
fn activity(&self) -> &ActivityMetrics { &self.activity }
fn activity_mut(&mut self) -> &mut ActivityMetrics { &mut self.activity }
fn realized_base(&self) -> &RealizedBase { &self.realized }
fn realized_base_mut(&mut self) -> &mut RealizedBase { &mut self.realized }
fn unrealized_base(&self) -> &UnrealizedBase { &self.unrealized }
fn unrealized_base_mut(&mut self) -> &mut UnrealizedBase { &mut self.unrealized }
fn cost_basis_base(&self) -> &CostBasisBase { &self.cost_basis }
fn cost_basis_base_mut(&mut self) -> &mut CostBasisBase { &mut self.cost_basis }
fn filter(&self) -> &Filter {
&self.filter
}
fn supply(&self) -> &SupplyMetrics {
&self.supply
}
fn supply_mut(&mut self) -> &mut SupplyMetrics {
&mut self.supply
}
fn outputs(&self) -> &OutputsMetrics {
&self.outputs
}
fn outputs_mut(&mut self) -> &mut OutputsMetrics {
&mut self.outputs
}
fn activity(&self) -> &ActivityMetrics {
&self.activity
}
fn activity_mut(&mut self) -> &mut ActivityMetrics {
&mut self.activity
}
fn realized_base(&self) -> &RealizedBase {
&self.realized
}
fn realized_base_mut(&mut self) -> &mut RealizedBase {
&mut self.realized
}
fn unrealized_base(&self) -> &UnrealizedBase {
&self.unrealized
}
fn unrealized_base_mut(&mut self) -> &mut UnrealizedBase {
&mut self.unrealized
}
fn cost_basis_base(&self) -> &CostBasisBase {
&self.cost_basis
}
fn cost_basis_base_mut(&mut self) -> &mut CostBasisBase {
&mut self.cost_basis
}
fn validate_computed_versions(&mut self, base_version: Version) -> Result<()> {
self.supply.validate_computed_versions(base_version)?;
self.activity.validate_computed_versions(base_version)?;
@@ -49,13 +75,18 @@ impl CohortMetricsBase for ExtendedAdjustedCohortMetrics {
Ok(())
}
fn compute_then_truncate_push_unrealized_states(
&mut self, height: Height, height_price: Cents, state: &mut CohortState,
&mut self,
height: Height,
height_price: Cents,
state: &mut CohortState,
) -> Result<()> {
state.apply_pending();
self.cost_basis.truncate_push_minmax(height, state)?;
let unrealized_state = state.compute_unrealized_state(height_price);
self.unrealized.truncate_push(height, &unrealized_state)?;
self.cost_basis.extended.truncate_push_percentiles(height, state, height_price)?;
self.cost_basis
.extended
.truncate_push_percentiles(height, state, height_price)?;
Ok(())
}
fn collect_all_vecs_mut(&mut self) -> Vec<&mut dyn AnyStoredVec> {
@@ -72,9 +103,7 @@ impl CohortMetricsBase for ExtendedAdjustedCohortMetrics {
}
impl ExtendedAdjustedCohortMetrics {
pub(crate) fn forced_import(
cfg: &ImportConfig,
) -> Result<Self> {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let supply = SupplyMetrics::forced_import(cfg)?;
let unrealized = UnrealizedBase::forced_import(cfg)?;
let realized = RealizedWithExtendedAdjusted::forced_import(cfg)?;
@@ -129,5 +158,4 @@ impl ExtendedAdjustedCohortMetrics {
Ok(())
}
}

View File

@@ -7,14 +7,14 @@ use vecdb::{BytesVec, BytesVecValue, Database, ImportableVec};
use crate::{
indexes,
internal::{
CentsType, ComputedFromHeight, ComputedFromHeightCumulative, ComputedFromHeightCumulativeSum,
ComputedFromHeightRatio, FiatFromHeight, NumericValue, PercentFromHeight,
PercentRollingEmas1w1m, PercentRollingWindows, Price, RollingEmas1w1m, RollingEmas2w,
RollingWindows, ValueFromHeight, ValueFromHeightChange, ValueFromHeightCumulative,
CentsType, ComputedFromHeight, ComputedFromHeightCumulative,
ComputedFromHeightCumulativeSum, ComputedFromHeightRatio, FiatFromHeight, NumericValue,
PercentFromHeight, PercentRollingEmas1w1m, PercentRollingWindows, Price, RollingEmas1w1m,
RollingEmas2w, RollingWindows, ValueFromHeight, ValueFromHeightChange,
ValueFromHeightCumulative,
},
};
/// Configuration for importing metrics.
#[derive(Clone, Copy)]
pub struct ImportConfig<'a> {
pub db: &'a Database,
@@ -25,7 +25,6 @@ pub struct ImportConfig<'a> {
}
impl<'a> ImportConfig<'a> {
/// Get full metric name with filter prefix.
pub(crate) fn name(&self, suffix: &str) -> String {
if self.full_name.is_empty() {
suffix.to_string()
@@ -36,14 +35,17 @@ impl<'a> ImportConfig<'a> {
}
}
// --- Computed types ---
pub(crate) fn import_computed<T: NumericValue + JsonSchema>(
&self,
suffix: &str,
offset: Version,
) -> Result<ComputedFromHeight<T>> {
ComputedFromHeight::forced_import(self.db, &self.name(suffix), self.version + offset, self.indexes)
ComputedFromHeight::forced_import(
self.db,
&self.name(suffix),
self.version + offset,
self.indexes,
)
}
pub(crate) fn import_cumulative<T: NumericValue + JsonSchema>(
@@ -51,7 +53,12 @@ impl<'a> ImportConfig<'a> {
suffix: &str,
offset: Version,
) -> Result<ComputedFromHeightCumulative<T>> {
ComputedFromHeightCumulative::forced_import(self.db, &self.name(suffix), self.version + offset, self.indexes)
ComputedFromHeightCumulative::forced_import(
self.db,
&self.name(suffix),
self.version + offset,
self.indexes,
)
}
pub(crate) fn import_cumulative_sum<T: NumericValue + JsonSchema>(
@@ -59,17 +66,25 @@ impl<'a> ImportConfig<'a> {
suffix: &str,
offset: Version,
) -> Result<ComputedFromHeightCumulativeSum<T>> {
ComputedFromHeightCumulativeSum::forced_import(self.db, &self.name(suffix), self.version + offset, self.indexes)
ComputedFromHeightCumulativeSum::forced_import(
self.db,
&self.name(suffix),
self.version + offset,
self.indexes,
)
}
// --- Percent types ---
pub(crate) fn import_percent_bp16(
&self,
suffix: &str,
offset: Version,
) -> Result<PercentFromHeight<BasisPoints16>> {
PercentFromHeight::forced_import(self.db, &self.name(suffix), self.version + offset, self.indexes)
PercentFromHeight::forced_import(
self.db,
&self.name(suffix),
self.version + offset,
self.indexes,
)
}
pub(crate) fn import_percent_bps16(
@@ -77,62 +92,158 @@ impl<'a> ImportConfig<'a> {
suffix: &str,
offset: Version,
) -> Result<PercentFromHeight<BasisPointsSigned16>> {
PercentFromHeight::forced_import(self.db, &self.name(suffix), self.version + offset, self.indexes)
PercentFromHeight::forced_import(
self.db,
&self.name(suffix),
self.version + offset,
self.indexes,
)
}
// --- Value types ---
pub(crate) fn import_fiat<C: CentsType>(&self, suffix: &str, offset: Version) -> Result<FiatFromHeight<C>> {
FiatFromHeight::forced_import(self.db, &self.name(suffix), self.version + offset, self.indexes)
pub(crate) fn import_fiat<C: CentsType>(
&self,
suffix: &str,
offset: Version,
) -> Result<FiatFromHeight<C>> {
FiatFromHeight::forced_import(
self.db,
&self.name(suffix),
self.version + offset,
self.indexes,
)
}
pub(crate) fn import_value(&self, suffix: &str, offset: Version) -> Result<ValueFromHeight> {
ValueFromHeight::forced_import(self.db, &self.name(suffix), self.version + offset, self.indexes)
ValueFromHeight::forced_import(
self.db,
&self.name(suffix),
self.version + offset,
self.indexes,
)
}
pub(crate) fn import_value_cumulative(&self, suffix: &str, offset: Version) -> Result<ValueFromHeightCumulative> {
ValueFromHeightCumulative::forced_import(self.db, &self.name(suffix), self.version + offset, self.indexes)
pub(crate) fn import_value_cumulative(
&self,
suffix: &str,
offset: Version,
) -> Result<ValueFromHeightCumulative> {
ValueFromHeightCumulative::forced_import(
self.db,
&self.name(suffix),
self.version + offset,
self.indexes,
)
}
pub(crate) fn import_value_change(&self, suffix: &str, offset: Version) -> Result<ValueFromHeightChange> {
ValueFromHeightChange::forced_import(self.db, &self.name(suffix), self.version + offset, self.indexes)
pub(crate) fn import_value_change(
&self,
suffix: &str,
offset: Version,
) -> Result<ValueFromHeightChange> {
ValueFromHeightChange::forced_import(
self.db,
&self.name(suffix),
self.version + offset,
self.indexes,
)
}
// --- Price and ratio ---
pub(crate) fn import_price(&self, suffix: &str, offset: Version) -> Result<Price<ComputedFromHeight<Cents>>> {
Price::forced_import(self.db, &self.name(suffix), self.version + offset, self.indexes)
pub(crate) fn import_price(
&self,
suffix: &str,
offset: Version,
) -> Result<Price<ComputedFromHeight<Cents>>> {
Price::forced_import(
self.db,
&self.name(suffix),
self.version + offset,
self.indexes,
)
}
pub(crate) fn import_ratio(&self, suffix: &str, offset: Version) -> Result<ComputedFromHeightRatio> {
ComputedFromHeightRatio::forced_import(self.db, &self.name(suffix), self.version + offset, self.indexes)
pub(crate) fn import_ratio(
&self,
suffix: &str,
offset: Version,
) -> Result<ComputedFromHeightRatio> {
ComputedFromHeightRatio::forced_import(
self.db,
&self.name(suffix),
self.version + offset,
self.indexes,
)
}
// --- Bytes ---
pub(crate) fn import_bytes<T: BytesVecValue>(&self, suffix: &str, offset: Version) -> Result<BytesVec<Height, T>> {
Ok(BytesVec::forced_import(self.db, &self.name(suffix), self.version + offset)?)
pub(crate) fn import_bytes<T: BytesVecValue>(
&self,
suffix: &str,
offset: Version,
) -> Result<BytesVec<Height, T>> {
Ok(BytesVec::forced_import(
self.db,
&self.name(suffix),
self.version + offset,
)?)
}
// --- Rolling ---
pub(crate) fn import_rolling<T: NumericValue + JsonSchema>(&self, suffix: &str, offset: Version) -> Result<RollingWindows<T>> {
RollingWindows::forced_import(self.db, &self.name(suffix), self.version + offset, self.indexes)
pub(crate) fn import_rolling<T: NumericValue + JsonSchema>(
&self,
suffix: &str,
offset: Version,
) -> Result<RollingWindows<T>> {
RollingWindows::forced_import(
self.db,
&self.name(suffix),
self.version + offset,
self.indexes,
)
}
pub(crate) fn import_percent_rolling_bp16(&self, suffix: &str, offset: Version) -> Result<PercentRollingWindows<BasisPoints16>> {
PercentRollingWindows::forced_import(self.db, &self.name(suffix), self.version + offset, self.indexes)
pub(crate) fn import_percent_rolling_bp16(
&self,
suffix: &str,
offset: Version,
) -> Result<PercentRollingWindows<BasisPoints16>> {
PercentRollingWindows::forced_import(
self.db,
&self.name(suffix),
self.version + offset,
self.indexes,
)
}
pub(crate) fn import_emas_1w_1m<T: NumericValue + JsonSchema>(&self, suffix: &str, offset: Version) -> Result<RollingEmas1w1m<T>> {
RollingEmas1w1m::forced_import(self.db, &self.name(suffix), self.version + offset, self.indexes)
pub(crate) fn import_emas_1w_1m<T: NumericValue + JsonSchema>(
&self,
suffix: &str,
offset: Version,
) -> Result<RollingEmas1w1m<T>> {
RollingEmas1w1m::forced_import(
self.db,
&self.name(suffix),
self.version + offset,
self.indexes,
)
}
pub(crate) fn import_percent_emas_1w_1m_bp16(&self, suffix: &str, offset: Version) -> Result<PercentRollingEmas1w1m<BasisPoints16>> {
PercentRollingEmas1w1m::forced_import(self.db, &self.name(suffix), self.version + offset, self.indexes)
pub(crate) fn import_percent_emas_1w_1m_bp16(
&self,
suffix: &str,
offset: Version,
) -> Result<PercentRollingEmas1w1m<BasisPoints16>> {
PercentRollingEmas1w1m::forced_import(
self.db,
&self.name(suffix),
self.version + offset,
self.indexes,
)
}
pub(crate) fn import_emas_2w(&self, suffix: &str, offset: Version) -> Result<RollingEmas2w> {
RollingEmas2w::forced_import(self.db, &self.name(suffix), self.version + offset, self.indexes)
RollingEmas2w::forced_import(
self.db,
&self.name(suffix),
self.version + offset,
self.indexes,
)
}
}

View File

@@ -69,12 +69,18 @@ impl CostBasisBase {
) -> Result<()> {
self.min.cents.height.compute_min_of_others(
starting_indexes.height,
&others.iter().map(|v| &v.min.cents.height).collect::<Vec<_>>(),
&others
.iter()
.map(|v| &v.min.cents.height)
.collect::<Vec<_>>(),
exit,
)?;
self.max.cents.height.compute_max_of_others(
starting_indexes.height,
&others.iter().map(|v| &v.max.cents.height).collect::<Vec<_>>(),
&others
.iter()
.map(|v| &v.max.cents.height)
.collect::<Vec<_>>(),
exit,
)?;
Ok(())

View File

@@ -5,10 +5,7 @@ use vecdb::{AnyStoredVec, Rw, StorageMode, WritableVec};
use crate::{
distribution::state::CohortState,
internal::{
PERCENTILES_LEN, PercentFromHeight, PercentilesVecs,
compute_spot_percentile_rank,
},
internal::{PERCENTILES_LEN, PercentFromHeight, PercentilesVecs, compute_spot_percentile_rank},
};
use crate::distribution::metrics::ImportConfig;
@@ -44,8 +41,10 @@ impl CostBasisExtended {
cfg.version,
cfg.indexes,
)?,
spot_cost_basis_percentile: cfg.import_percent_bp16("spot_cost_basis_percentile", Version::ZERO)?,
spot_invested_capital_percentile: cfg.import_percent_bp16("spot_invested_capital_percentile", Version::ZERO)?,
spot_cost_basis_percentile: cfg
.import_percent_bp16("spot_cost_basis_percentile", Version::ZERO)?,
spot_invested_capital_percentile: cfg
.import_percent_bp16("spot_invested_capital_percentile", Version::ZERO)?,
})
}

View File

@@ -25,11 +25,6 @@ use vecdb::{AnyStoredVec, Exit};
use crate::{blocks, distribution::state::CohortState, prices};
/// Trait defining the interface for cohort metrics containers.
///
/// Provides typed accessor methods for base sub-metric components, default
/// implementations for shared operations that only use base fields, and
/// required methods for operations that vary by extension level.
pub trait CohortMetricsBase: Send + Sync {
fn filter(&self) -> &Filter;
fn supply(&self) -> &SupplyMetrics;
@@ -45,14 +40,8 @@ pub trait CohortMetricsBase: Send + Sync {
fn cost_basis_base(&self) -> &CostBasisBase;
fn cost_basis_base_mut(&mut self) -> &mut CostBasisBase;
// === Required methods (vary by extension level) ===
/// Validate computed versions against base version.
/// Extended types also validate cost_basis extended versions.
fn validate_computed_versions(&mut self, base_version: Version) -> Result<()>;
/// Compute and push unrealized states.
/// Extended types also push cost_basis percentiles.
fn compute_then_truncate_push_unrealized_states(
&mut self,
height: Height,
@@ -60,12 +49,8 @@ pub trait CohortMetricsBase: Send + Sync {
state: &mut CohortState,
) -> Result<()>;
/// Collect all stored vecs for parallel writing.
fn collect_all_vecs_mut(&mut self) -> Vec<&mut dyn AnyStoredVec>;
// === Default methods (shared across all cohort metric types, use base fields only) ===
/// Get minimum length across height-indexed vectors written in block loop.
fn min_stateful_height_len(&self) -> usize {
self.supply()
.min_len()
@@ -76,7 +61,6 @@ pub trait CohortMetricsBase: Send + Sync {
.min(self.cost_basis_base().min_stateful_height_len())
}
/// Push state values to height-indexed vectors.
fn truncate_push(&mut self, height: Height, state: &CohortState) -> Result<()> {
self.supply_mut()
.truncate_push(height, state.supply.value)?;
@@ -225,12 +209,18 @@ pub trait CohortMetricsBase: Send + Sync {
)?;
self.unrealized_base_mut().compute_from_stateful(
starting_indexes,
&others.iter().map(|v| v.unrealized_base()).collect::<Vec<_>>(),
&others
.iter()
.map(|v| v.unrealized_base())
.collect::<Vec<_>>(),
exit,
)?;
self.cost_basis_base_mut().compute_from_stateful(
starting_indexes,
&others.iter().map(|v| v.cost_basis_base()).collect::<Vec<_>>(),
&others
.iter()
.map(|v| v.cost_basis_base())
.collect::<Vec<_>>(),
exit,
)?;
Ok(())

View File

@@ -10,18 +10,14 @@ use crate::{
use crate::distribution::metrics::ImportConfig;
/// Adjusted realized metrics (only for adjusted cohorts: all, sth, max_age).
#[derive(Traversable)]
pub struct RealizedAdjusted<M: StorageMode = Rw> {
// === Adjusted Value (computed: cohort - up_to_1h) ===
pub adjusted_value_created: ComputedFromHeight<Cents, M>,
pub adjusted_value_destroyed: ComputedFromHeight<Cents, M>,
// === Adjusted Value Created/Destroyed Rolling Sums ===
pub adjusted_value_created_sum: RollingWindows<Cents, M>,
pub adjusted_value_destroyed_sum: RollingWindows<Cents, M>,
// === Adjusted SOPR (rolling window ratios) ===
pub adjusted_sopr: RollingWindows<StoredF64, M>,
pub adjusted_sopr_ema: RollingEmas1w1m<StoredF64, M>,
}
@@ -30,9 +26,12 @@ impl RealizedAdjusted {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
Ok(RealizedAdjusted {
adjusted_value_created: cfg.import_computed("adjusted_value_created", Version::ZERO)?,
adjusted_value_destroyed: cfg.import_computed("adjusted_value_destroyed", Version::ZERO)?,
adjusted_value_created_sum: cfg.import_rolling("adjusted_value_created", Version::ONE)?,
adjusted_value_destroyed_sum: cfg.import_rolling("adjusted_value_destroyed", Version::ONE)?,
adjusted_value_destroyed: cfg
.import_computed("adjusted_value_destroyed", Version::ZERO)?,
adjusted_value_created_sum: cfg
.import_rolling("adjusted_value_created", Version::ONE)?,
adjusted_value_destroyed_sum: cfg
.import_rolling("adjusted_value_destroyed", Version::ONE)?,
adjusted_sopr: cfg.import_rolling("adjusted_sopr", Version::ONE)?,
adjusted_sopr_ema: cfg.import_emas_1w_1m("adjusted_sopr_24h", Version::ONE)?,
})
@@ -66,19 +65,31 @@ impl RealizedAdjusted {
// Adjusted value created/destroyed rolling sums
let window_starts = blocks.count.window_starts();
self.adjusted_value_created_sum.compute_rolling_sum(
starting_indexes.height, &window_starts, &self.adjusted_value_created.height, exit,
starting_indexes.height,
&window_starts,
&self.adjusted_value_created.height,
exit,
)?;
self.adjusted_value_destroyed_sum.compute_rolling_sum(
starting_indexes.height, &window_starts, &self.adjusted_value_destroyed.height, exit,
starting_indexes.height,
&window_starts,
&self.adjusted_value_destroyed.height,
exit,
)?;
// SOPR ratios from rolling sums
for ((sopr, vc), vd) in self.adjusted_sopr.as_mut_array().into_iter()
for ((sopr, vc), vd) in self
.adjusted_sopr
.as_mut_array()
.into_iter()
.zip(self.adjusted_value_created_sum.as_array())
.zip(self.adjusted_value_destroyed_sum.as_array())
{
sopr.compute_binary::<Cents, Cents, RatioCents64>(
starting_indexes.height, &vc.height, &vd.height, exit,
starting_indexes.height,
&vc.height,
&vd.height,
exit,
)?;
}

View File

@@ -1,56 +1,49 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{
BasisPoints16, BasisPoints32, BasisPointsSigned16,
Bitcoin, Cents, CentsSats, CentsSigned, CentsSquaredSats, Dollars, Height, Indexes, Sats, StoredF32, StoredF64, Version,
BasisPoints16, BasisPoints32, BasisPointsSigned16, Bitcoin, Cents, CentsSats, CentsSigned,
CentsSquaredSats, Dollars, Height, Indexes, Sats, StoredF32, StoredF64, Version,
};
use vecdb::{
AnyStoredVec, AnyVec, BytesVec, Exit, ReadableCloneableVec,
ReadableVec, Rw, StorageMode, WritableVec,
AnyStoredVec, AnyVec, BytesVec, Exit, ReadableCloneableVec, ReadableVec, Rw, StorageMode,
WritableVec,
};
use crate::{
blocks,
distribution::state::RealizedState,
internal::{
CentsPlus, CentsUnsignedToDollars, ComputedFromHeightCumulative, ComputedFromHeight,
ComputedFromHeightRatio, FiatFromHeight, NegCentsUnsignedToDollars, PercentFromHeight,
PercentRollingEmas1w1m, PercentRollingWindows, ValueFromHeightCumulative, LazyFromHeight,
Price,
RatioCentsBp16, RatioCentsSignedCentsBps16, RatioCentsSignedDollarsBps16, RatioCents64,
RollingEmas1w1m, RollingEmas2w, RollingWindows, Identity,
CentsPlus, CentsUnsignedToDollars, ComputedFromHeight, ComputedFromHeightCumulative,
ComputedFromHeightRatio, FiatFromHeight, Identity, LazyFromHeight,
NegCentsUnsignedToDollars, PercentFromHeight, PercentRollingEmas1w1m,
PercentRollingWindows, Price, RatioCents64, RatioCentsBp16, RatioCentsSignedCentsBps16,
RatioCentsSignedDollarsBps16, RollingEmas1w1m, RollingEmas2w, RollingWindows,
ValueFromHeightCumulative,
},
prices,
};
use crate::distribution::metrics::ImportConfig;
/// Base realized metrics (always computed).
#[derive(Traversable)]
pub struct RealizedBase<M: StorageMode = Rw> {
// === Realized Cap ===
pub realized_cap_cents: ComputedFromHeight<Cents, M>,
pub realized_cap: LazyFromHeight<Dollars, Cents>,
pub realized_price: Price<ComputedFromHeight<Cents, M>>,
pub realized_price_ratio: ComputedFromHeightRatio<M>,
pub realized_cap_change_1m: ComputedFromHeight<CentsSigned, M>,
// === Investor Price ===
pub investor_price: Price<ComputedFromHeight<Cents, M>>,
pub investor_price_ratio: ComputedFromHeightRatio<M>,
// === Floor/Ceiling Price Bands ===
pub lower_price_band: Price<ComputedFromHeight<Cents, M>>,
pub upper_price_band: Price<ComputedFromHeight<Cents, M>>,
// === Raw values for aggregation ===
pub cap_raw: M::Stored<BytesVec<Height, CentsSats>>,
pub investor_cap_raw: M::Stored<BytesVec<Height, CentsSquaredSats>>,
// === MVRV ===
pub mvrv: LazyFromHeight<StoredF32>,
// === Realized Profit/Loss ===
pub realized_profit: ComputedFromHeightCumulative<Cents, M>,
pub realized_profit_ema_1w: ComputedFromHeight<Cents, M>,
pub realized_loss: ComputedFromHeightCumulative<Cents, M>,
@@ -60,50 +53,38 @@ pub struct RealizedBase<M: StorageMode = Rw> {
pub net_realized_pnl_ema_1w: ComputedFromHeight<CentsSigned, M>,
pub gross_pnl: FiatFromHeight<Cents, M>,
// === Realized vs Realized Cap Ratios ===
pub realized_profit_rel_to_realized_cap: PercentFromHeight<BasisPoints16, M>,
pub realized_loss_rel_to_realized_cap: PercentFromHeight<BasisPoints16, M>,
pub net_realized_pnl_rel_to_realized_cap: PercentFromHeight<BasisPointsSigned16, M>,
// === Value Created/Destroyed Splits (stored) ===
pub profit_value_created: ComputedFromHeight<Cents, M>,
pub profit_value_destroyed: ComputedFromHeight<Cents, M>,
pub loss_value_created: ComputedFromHeight<Cents, M>,
pub loss_value_destroyed: ComputedFromHeight<Cents, M>,
// === Value Created/Destroyed Totals ===
pub value_created: ComputedFromHeight<Cents, M>,
pub value_destroyed: ComputedFromHeight<Cents, M>,
// === Capitulation/Profit Flow (lazy aliases) ===
pub capitulation_flow: LazyFromHeight<Dollars, Cents>,
pub profit_flow: LazyFromHeight<Dollars, Cents>,
// === Value Created/Destroyed Rolling Sums ===
pub value_created_sum: RollingWindows<Cents, M>,
pub value_destroyed_sum: RollingWindows<Cents, M>,
// === SOPR (rolling window ratios) ===
pub sopr: RollingWindows<StoredF64, M>,
pub sopr_24h_ema: RollingEmas1w1m<StoredF64, M>,
// === Sell Side Risk ===
pub gross_pnl_sum: RollingWindows<Cents, M>,
pub sell_side_risk_ratio: PercentRollingWindows<BasisPoints16, M>,
pub sell_side_risk_ratio_24h_ema: PercentRollingEmas1w1m<BasisPoints16, M>,
// === Net Realized PnL Deltas ===
pub net_pnl_change_1m: ComputedFromHeight<CentsSigned, M>,
pub net_pnl_change_1m_rel_to_realized_cap:
PercentFromHeight<BasisPointsSigned16, M>,
pub net_pnl_change_1m_rel_to_market_cap:
PercentFromHeight<BasisPointsSigned16, M>,
pub net_pnl_change_1m_rel_to_realized_cap: PercentFromHeight<BasisPointsSigned16, M>,
pub net_pnl_change_1m_rel_to_market_cap: PercentFromHeight<BasisPointsSigned16, M>,
// === Peak Regret ===
pub peak_regret: ComputedFromHeightCumulative<Cents, M>,
pub peak_regret_rel_to_realized_cap: PercentFromHeight<BasisPoints16, M>,
// === Sent in Profit/Loss ===
pub sent_in_profit: ValueFromHeightCumulative<M>,
pub sent_in_profit_ema: RollingEmas2w<M>,
pub sent_in_loss: ValueFromHeightCumulative<M>,
@@ -111,15 +92,16 @@ pub struct RealizedBase<M: StorageMode = Rw> {
}
impl RealizedBase {
/// Import realized base metrics from database.
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let v0 = Version::ZERO;
let v1 = Version::ONE;
let realized_cap_cents = cfg.import_computed("realized_cap_cents", v0)?;
let realized_cap = LazyFromHeight::from_computed::<CentsUnsignedToDollars>(
&cfg.name("realized_cap"), cfg.version,
realized_cap_cents.height.read_only_boxed_clone(), &realized_cap_cents,
&cfg.name("realized_cap"),
cfg.version,
realized_cap_cents.height.read_only_boxed_clone(),
&realized_cap_cents,
);
let realized_profit = cfg.import_cumulative("realized_profit", v0)?;
@@ -128,8 +110,10 @@ impl RealizedBase {
let realized_loss_ema_1w = cfg.import_computed("realized_loss_ema_1w", v0)?;
let neg_realized_loss = LazyFromHeight::from_height_source::<NegCentsUnsignedToDollars>(
&cfg.name("neg_realized_loss"), cfg.version + Version::ONE,
realized_loss.height.read_only_boxed_clone(), cfg.indexes,
&cfg.name("neg_realized_loss"),
cfg.version + Version::ONE,
realized_loss.height.read_only_boxed_clone(),
cfg.indexes,
);
let net_realized_pnl = cfg.import_cumulative("net_realized_pnl", v0)?;
@@ -161,17 +145,23 @@ impl RealizedBase {
let value_destroyed = cfg.import_computed("value_destroyed", v0)?;
let capitulation_flow = LazyFromHeight::from_computed::<CentsUnsignedToDollars>(
&cfg.name("capitulation_flow"), cfg.version,
loss_value_destroyed.height.read_only_boxed_clone(), &loss_value_destroyed,
&cfg.name("capitulation_flow"),
cfg.version,
loss_value_destroyed.height.read_only_boxed_clone(),
&loss_value_destroyed,
);
let profit_flow = LazyFromHeight::from_computed::<CentsUnsignedToDollars>(
&cfg.name("profit_flow"), cfg.version,
profit_value_destroyed.height.read_only_boxed_clone(), &profit_value_destroyed,
&cfg.name("profit_flow"),
cfg.version,
profit_value_destroyed.height.read_only_boxed_clone(),
&profit_value_destroyed,
);
let realized_price_ratio = cfg.import_ratio("realized_price", v1)?;
let mvrv = LazyFromHeight::from_lazy::<Identity<StoredF32>, BasisPoints32>(
&cfg.name("mvrv"), cfg.version, &realized_price_ratio.ratio,
&cfg.name("mvrv"),
cfg.version,
&realized_price_ratio.ratio,
);
// Rolling windows
@@ -183,7 +173,8 @@ impl RealizedBase {
// EMAs
let sopr_24h_ema = cfg.import_emas_1w_1m("sopr_24h", v1)?;
let sell_side_risk_ratio_24h_ema = cfg.import_percent_emas_1w_1m_bp16("sell_side_risk_ratio_24h", v1)?;
let sell_side_risk_ratio_24h_ema =
cfg.import_percent_emas_1w_1m_bp16("sell_side_risk_ratio_24h", v1)?;
let peak_regret_rel_to_realized_cap =
cfg.import_percent_bp16("realized_peak_regret_rel_to_realized_cap", v1)?;
@@ -228,10 +219,10 @@ impl RealizedBase {
sell_side_risk_ratio,
sell_side_risk_ratio_24h_ema,
net_pnl_change_1m: cfg.import_computed("net_pnl_change_1m", Version::new(3))?,
net_pnl_change_1m_rel_to_realized_cap:
cfg.import_percent_bps16("net_pnl_change_1m_rel_to_realized_cap", Version::new(3))?,
net_pnl_change_1m_rel_to_market_cap:
cfg.import_percent_bps16("net_pnl_change_1m_rel_to_market_cap", Version::new(3))?,
net_pnl_change_1m_rel_to_realized_cap: cfg
.import_percent_bps16("net_pnl_change_1m_rel_to_realized_cap", Version::new(3))?,
net_pnl_change_1m_rel_to_market_cap: cfg
.import_percent_bps16("net_pnl_change_1m_rel_to_market_cap", Version::new(3))?,
peak_regret,
peak_regret_rel_to_realized_cap,
sent_in_profit: cfg.import_value_cumulative("sent_in_profit", v0)?,
@@ -241,7 +232,6 @@ impl RealizedBase {
})
}
/// Get minimum length across height-indexed vectors written in block loop.
pub(crate) fn min_stateful_height_len(&self) -> usize {
self.realized_cap
.height
@@ -260,7 +250,6 @@ impl RealizedBase {
.min(self.sent_in_loss.base.sats.height.len())
}
/// Push realized state values to height-indexed vectors.
pub(crate) fn truncate_push(&mut self, height: Height, state: &RealizedState) -> Result<()> {
self.realized_cap_cents
.height
@@ -271,7 +260,8 @@ impl RealizedBase {
self.realized_loss
.height
.truncate_push(height, state.loss())?;
self.investor_price.cents
self.investor_price
.cents
.height
.truncate_push(height, state.investor_price())?;
self.cap_raw.truncate_push(height, state.cap_raw())?;
@@ -306,7 +296,6 @@ impl RealizedBase {
Ok(())
}
/// Returns a Vec of mutable references to all stored vecs for parallel writing.
pub(crate) fn collect_vecs_mut(&mut self) -> Vec<&mut dyn AnyStoredVec> {
vec![
&mut self.realized_cap_cents.height as &mut dyn AnyStoredVec,
@@ -325,7 +314,6 @@ impl RealizedBase {
]
}
/// Compute aggregate values from separate cohorts.
pub(crate) fn compute_from_stateful(
&mut self,
starting_indexes: &Indexes,
@@ -362,7 +350,8 @@ impl RealizedBase {
.iter()
.map(|o| o.investor_price.cents.height.version())
.fold(vecdb::Version::ZERO, |acc, v| acc + v);
self.investor_price.cents
self.investor_price
.cents
.height
.validate_computed_version_or_reset(investor_price_dep_version)?;
@@ -404,7 +393,8 @@ impl RealizedBase {
} else {
Cents::new((sum_investor_cap / sum_cap.inner()) as u64)
};
self.investor_price.cents
self.investor_price
.cents
.height
.truncate_push(height, investor_price)?;
}
@@ -474,7 +464,6 @@ impl RealizedBase {
Ok(())
}
/// First phase of computed metrics (indexes from height).
pub(crate) fn compute_rest_part1(
&mut self,
starting_indexes: &Indexes,
@@ -492,7 +481,10 @@ impl RealizedBase {
&self.realized_profit.height,
&self.realized_loss.height,
|(i, profit, loss, ..)| {
(i, CentsSigned::new(profit.inner() as i64 - loss.inner() as i64))
(
i,
CentsSigned::new(profit.inner() as i64 - loss.inner() as i64),
)
},
exit,
)?;
@@ -512,7 +504,6 @@ impl RealizedBase {
Ok(())
}
/// Second phase of computed metrics (base-only parts: realized price, rolling sums, EMAs).
#[allow(clippy::too_many_arguments)]
pub(crate) fn compute_rest_part2_base(
&mut self,
@@ -608,34 +599,54 @@ impl RealizedBase {
exit,
)?;
// === Rolling sum intermediates ===
let window_starts = blocks.count.window_starts();
self.value_created_sum.compute_rolling_sum(
starting_indexes.height, &window_starts, &self.value_created.height, exit,
starting_indexes.height,
&window_starts,
&self.value_created.height,
exit,
)?;
self.value_destroyed_sum.compute_rolling_sum(
starting_indexes.height, &window_starts, &self.value_destroyed.height, exit,
starting_indexes.height,
&window_starts,
&self.value_destroyed.height,
exit,
)?;
self.gross_pnl_sum.compute_rolling_sum(
starting_indexes.height, &window_starts, &self.gross_pnl.cents.height, exit,
starting_indexes.height,
&window_starts,
&self.gross_pnl.cents.height,
exit,
)?;
// Compute SOPR from rolling sums
for ((sopr, vc), vd) in self.sopr.as_mut_array().into_iter()
for ((sopr, vc), vd) in self
.sopr
.as_mut_array()
.into_iter()
.zip(self.value_created_sum.as_array())
.zip(self.value_destroyed_sum.as_array())
{
sopr.compute_binary::<Cents, Cents, RatioCents64>(
starting_indexes.height, &vc.height, &vd.height, exit,
starting_indexes.height,
&vc.height,
&vd.height,
exit,
)?;
}
// Compute sell-side risk ratios
for (ssrr, rv) in self.sell_side_risk_ratio.as_mut_array().into_iter()
for (ssrr, rv) in self
.sell_side_risk_ratio
.as_mut_array()
.into_iter()
.zip(self.gross_pnl_sum.as_array())
{
ssrr.compute_binary::<Cents, Cents, RatioCentsBp16>(
starting_indexes.height, &rv.height, &self.realized_cap_cents.height, exit,
starting_indexes.height,
&rv.height,
&self.realized_cap_cents.height,
exit,
)?;
}
@@ -652,14 +663,12 @@ impl RealizedBase {
&self.realized_loss.height,
exit,
)?;
self.net_realized_pnl_ema_1w
.height
.compute_rolling_ema(
starting_indexes.height,
&blocks.count.height_1w_ago,
&self.net_realized_pnl.height,
exit,
)?;
self.net_realized_pnl_ema_1w.height.compute_rolling_ema(
starting_indexes.height,
&blocks.count.height_1w_ago,
&self.net_realized_pnl.height,
exit,
)?;
// 14-day EMA of sent in profit/loss
self.sent_in_profit_ema.compute(
@@ -726,14 +735,12 @@ impl RealizedBase {
)?;
// Net realized PnL cumulative 30d delta
self.net_pnl_change_1m
.height
.compute_rolling_change(
starting_indexes.height,
&blocks.count.height_1m_ago,
&self.net_realized_pnl.cumulative.height,
exit,
)?;
self.net_pnl_change_1m.height.compute_rolling_change(
starting_indexes.height,
&blocks.count.height_1m_ago,
&self.net_realized_pnl.cumulative.height,
exit,
)?;
self.net_pnl_change_1m_rel_to_realized_cap
.compute_binary::<CentsSigned, Cents, RatioCentsSignedCentsBps16>(

View File

@@ -6,8 +6,8 @@ use vecdb::{Exit, ReadableVec, Rw, StorageMode};
use crate::{
blocks,
internal::{
ComputedFromHeightRatioExtension, PercentFromHeight,
RatioCents64, RatioDollarsBp16, RollingWindows,
ComputedFromHeightRatioExtension, PercentFromHeight, RatioCents64, RatioDollarsBp16,
RollingWindows,
},
};
@@ -15,19 +15,15 @@ use crate::distribution::metrics::ImportConfig;
use super::RealizedBase;
/// Extended realized metrics (only for extended cohorts: all, sth, lth, age_range).
#[derive(Traversable)]
pub struct RealizedExtended<M: StorageMode = Rw> {
pub realized_cap_rel_to_own_market_cap: PercentFromHeight<BasisPoints16, M>,
// === Realized Profit/Loss Rolling Sums ===
pub realized_profit_sum: RollingWindows<Cents, M>,
pub realized_loss_sum: RollingWindows<Cents, M>,
// === Realized Profit to Loss Ratio (from rolling sums) ===
pub realized_profit_to_loss_ratio: RollingWindows<StoredF64, M>,
// === Extended ratio metrics for realized/investor price ===
pub realized_price_ratio_ext: ComputedFromHeightRatioExtension<M>,
pub investor_price_ratio_ext: ComputedFromHeightRatioExtension<M>,
}
@@ -35,15 +31,23 @@ pub struct RealizedExtended<M: StorageMode = Rw> {
impl RealizedExtended {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
Ok(RealizedExtended {
realized_cap_rel_to_own_market_cap: cfg.import_percent_bp16("realized_cap_rel_to_own_market_cap", Version::ZERO)?,
realized_cap_rel_to_own_market_cap: cfg
.import_percent_bp16("realized_cap_rel_to_own_market_cap", Version::ZERO)?,
realized_profit_sum: cfg.import_rolling("realized_profit", Version::ONE)?,
realized_loss_sum: cfg.import_rolling("realized_loss", Version::ONE)?,
realized_profit_to_loss_ratio: cfg.import_rolling("realized_profit_to_loss_ratio", Version::ONE)?,
realized_profit_to_loss_ratio: cfg
.import_rolling("realized_profit_to_loss_ratio", Version::ONE)?,
realized_price_ratio_ext: ComputedFromHeightRatioExtension::forced_import(
cfg.db, &cfg.name("realized_price"), cfg.version + Version::ONE, cfg.indexes,
cfg.db,
&cfg.name("realized_price"),
cfg.version + Version::ONE,
cfg.indexes,
)?,
investor_price_ratio_ext: ComputedFromHeightRatioExtension::forced_import(
cfg.db, &cfg.name("investor_price"), cfg.version, cfg.indexes,
cfg.db,
&cfg.name("investor_price"),
cfg.version,
cfg.indexes,
)?,
})
}
@@ -60,10 +64,16 @@ impl RealizedExtended {
// Realized profit/loss rolling sums
let window_starts = blocks.count.window_starts();
self.realized_profit_sum.compute_rolling_sum(
starting_indexes.height, &window_starts, &base.realized_profit.height, exit,
starting_indexes.height,
&window_starts,
&base.realized_profit.height,
exit,
)?;
self.realized_loss_sum.compute_rolling_sum(
starting_indexes.height, &window_starts, &base.realized_loss.height, exit,
starting_indexes.height,
&window_starts,
&base.realized_loss.height,
exit,
)?;
// Realized cap relative to own market cap
@@ -76,12 +86,18 @@ impl RealizedExtended {
)?;
// Realized profit to loss ratios
for ((ratio, profit), loss) in self.realized_profit_to_loss_ratio.as_mut_array().into_iter()
for ((ratio, profit), loss) in self
.realized_profit_to_loss_ratio
.as_mut_array()
.into_iter()
.zip(self.realized_profit_sum.as_array())
.zip(self.realized_loss_sum.as_array())
{
ratio.compute_binary::<Cents, Cents, RatioCents64>(
starting_indexes.height, &profit.height, &loss.height, exit,
starting_indexes.height,
&profit.height,
&loss.height,
exit,
)?;
}

View File

@@ -4,29 +4,23 @@ use brk_types::{BasisPoints16, BasisPointsSigned16, Dollars, Height, Sats, Store
use vecdb::{Exit, ReadableCloneableVec, ReadableVec, Rw, StorageMode};
use crate::internal::{
Bps16ToFloat, LazyFromHeight,
NegRatioDollarsBps16, PercentFromHeight, RatioDollarsBp16, RatioDollarsBps16, RatioSatsBp16,
Bps16ToFloat, LazyFromHeight, NegRatioDollarsBps16, PercentFromHeight, RatioDollarsBp16,
RatioDollarsBps16, RatioSatsBp16,
};
use crate::distribution::metrics::{ImportConfig, RealizedBase, UnrealizedBase};
/// Base relative metrics (always computed when relative is enabled).
/// All fields are non-Optional - market_cap and realized_cap are always
/// available when relative metrics are enabled.
#[derive(Traversable)]
pub struct RelativeBase<M: StorageMode = Rw> {
// === Supply in Profit/Loss Relative to Own Supply ===
pub supply_in_profit_rel_to_own_supply: PercentFromHeight<BasisPoints16, M>,
pub supply_in_loss_rel_to_own_supply: PercentFromHeight<BasisPoints16, M>,
// === Unrealized vs Market Cap ===
pub unrealized_profit_rel_to_market_cap: PercentFromHeight<BasisPoints16, M>,
pub unrealized_loss_rel_to_market_cap: PercentFromHeight<BasisPoints16, M>,
pub neg_unrealized_loss_rel_to_market_cap: PercentFromHeight<BasisPointsSigned16, M>,
pub net_unrealized_pnl_rel_to_market_cap: PercentFromHeight<BasisPointsSigned16, M>,
pub nupl: LazyFromHeight<StoredF32, BasisPointsSigned16>,
// === Invested Capital in Profit/Loss as % of Realized Cap ===
pub invested_capital_in_profit_rel_to_realized_cap: PercentFromHeight<BasisPoints16, M>,
pub invested_capital_in_loss_rel_to_realized_cap: PercentFromHeight<BasisPoints16, M>,
}
@@ -42,27 +36,34 @@ impl RelativeBase {
let nupl = LazyFromHeight::from_computed::<Bps16ToFloat>(
&cfg.name("nupl"),
cfg.version + v2,
net_unrealized_pnl_rel_to_market_cap.bps.height.read_only_boxed_clone(),
net_unrealized_pnl_rel_to_market_cap
.bps
.height
.read_only_boxed_clone(),
&net_unrealized_pnl_rel_to_market_cap.bps,
);
Ok(Self {
supply_in_profit_rel_to_own_supply:
cfg.import_percent_bp16("supply_in_profit_rel_to_own_supply", v1)?,
supply_in_loss_rel_to_own_supply:
cfg.import_percent_bp16("supply_in_loss_rel_to_own_supply", v1)?,
unrealized_profit_rel_to_market_cap:
cfg.import_percent_bp16("unrealized_profit_rel_to_market_cap", v2)?,
unrealized_loss_rel_to_market_cap:
cfg.import_percent_bp16("unrealized_loss_rel_to_market_cap", v2)?,
neg_unrealized_loss_rel_to_market_cap:
cfg.import_percent_bps16("neg_unrealized_loss_rel_to_market_cap", v2)?,
supply_in_profit_rel_to_own_supply: cfg
.import_percent_bp16("supply_in_profit_rel_to_own_supply", v1)?,
supply_in_loss_rel_to_own_supply: cfg
.import_percent_bp16("supply_in_loss_rel_to_own_supply", v1)?,
unrealized_profit_rel_to_market_cap: cfg
.import_percent_bp16("unrealized_profit_rel_to_market_cap", v2)?,
unrealized_loss_rel_to_market_cap: cfg
.import_percent_bp16("unrealized_loss_rel_to_market_cap", v2)?,
neg_unrealized_loss_rel_to_market_cap: cfg
.import_percent_bps16("neg_unrealized_loss_rel_to_market_cap", v2)?,
net_unrealized_pnl_rel_to_market_cap,
nupl,
invested_capital_in_profit_rel_to_realized_cap:
cfg.import_percent_bp16("invested_capital_in_profit_rel_to_realized_cap", Version::ZERO)?,
invested_capital_in_loss_rel_to_realized_cap:
cfg.import_percent_bp16("invested_capital_in_loss_rel_to_realized_cap", Version::ZERO)?,
invested_capital_in_profit_rel_to_realized_cap: cfg.import_percent_bp16(
"invested_capital_in_profit_rel_to_realized_cap",
Version::ZERO,
)?,
invested_capital_in_loss_rel_to_realized_cap: cfg.import_percent_bp16(
"invested_capital_in_loss_rel_to_realized_cap",
Version::ZERO,
)?,
})
}
@@ -77,35 +78,59 @@ impl RelativeBase {
) -> Result<()> {
self.supply_in_profit_rel_to_own_supply
.compute_binary::<Sats, Sats, RatioSatsBp16>(
max_from, &unrealized.supply_in_profit.sats.height, supply_total_sats, exit,
max_from,
&unrealized.supply_in_profit.sats.height,
supply_total_sats,
exit,
)?;
self.supply_in_loss_rel_to_own_supply
.compute_binary::<Sats, Sats, RatioSatsBp16>(
max_from, &unrealized.supply_in_loss.sats.height, supply_total_sats, exit,
max_from,
&unrealized.supply_in_loss.sats.height,
supply_total_sats,
exit,
)?;
self.unrealized_profit_rel_to_market_cap
.compute_binary::<Dollars, Dollars, RatioDollarsBp16>(
max_from, &unrealized.unrealized_profit.usd.height, market_cap, exit,
max_from,
&unrealized.unrealized_profit.usd.height,
market_cap,
exit,
)?;
self.unrealized_loss_rel_to_market_cap
.compute_binary::<Dollars, Dollars, RatioDollarsBp16>(
max_from, &unrealized.unrealized_loss.usd.height, market_cap, exit,
max_from,
&unrealized.unrealized_loss.usd.height,
market_cap,
exit,
)?;
self.neg_unrealized_loss_rel_to_market_cap
.compute_binary::<Dollars, Dollars, NegRatioDollarsBps16>(
max_from, &unrealized.unrealized_loss.usd.height, market_cap, exit,
max_from,
&unrealized.unrealized_loss.usd.height,
market_cap,
exit,
)?;
self.net_unrealized_pnl_rel_to_market_cap
.compute_binary::<Dollars, Dollars, RatioDollarsBps16>(
max_from, &unrealized.net_unrealized_pnl.usd.height, market_cap, exit,
max_from,
&unrealized.net_unrealized_pnl.usd.height,
market_cap,
exit,
)?;
self.invested_capital_in_profit_rel_to_realized_cap
.compute_binary::<Dollars, Dollars, RatioDollarsBp16>(
max_from, &unrealized.invested_capital_in_profit.usd.height, &realized.realized_cap.height, exit,
max_from,
&unrealized.invested_capital_in_profit.usd.height,
&realized.realized_cap.height,
exit,
)?;
self.invested_capital_in_loss_rel_to_realized_cap
.compute_binary::<Dollars, Dollars, RatioDollarsBp16>(
max_from, &unrealized.invested_capital_in_loss.usd.height, &realized.realized_cap.height, exit,
max_from,
&unrealized.invested_capital_in_loss.usd.height,
&realized.realized_cap.height,
exit,
)?;
Ok(())
}

View File

@@ -12,14 +12,10 @@ use crate::distribution::metrics::{ImportConfig, UnrealizedBase};
/// Extended relative metrics for own market cap (extended && rel_to_all).
#[derive(Traversable)]
pub struct RelativeExtendedOwnMarketCap<M: StorageMode = Rw> {
pub unrealized_profit_rel_to_own_market_cap:
PercentFromHeight<BasisPoints16, M>,
pub unrealized_loss_rel_to_own_market_cap:
PercentFromHeight<BasisPoints16, M>,
pub neg_unrealized_loss_rel_to_own_market_cap:
PercentFromHeight<BasisPointsSigned16, M>,
pub net_unrealized_pnl_rel_to_own_market_cap:
PercentFromHeight<BasisPointsSigned16, M>,
pub unrealized_profit_rel_to_own_market_cap: PercentFromHeight<BasisPoints16, M>,
pub unrealized_loss_rel_to_own_market_cap: PercentFromHeight<BasisPoints16, M>,
pub neg_unrealized_loss_rel_to_own_market_cap: PercentFromHeight<BasisPointsSigned16, M>,
pub net_unrealized_pnl_rel_to_own_market_cap: PercentFromHeight<BasisPointsSigned16, M>,
}
impl RelativeExtendedOwnMarketCap {
@@ -27,14 +23,14 @@ impl RelativeExtendedOwnMarketCap {
let v2 = Version::new(2);
Ok(Self {
unrealized_profit_rel_to_own_market_cap:
cfg.import_percent_bp16("unrealized_profit_rel_to_own_market_cap", v2)?,
unrealized_loss_rel_to_own_market_cap:
cfg.import_percent_bp16("unrealized_loss_rel_to_own_market_cap", v2)?,
neg_unrealized_loss_rel_to_own_market_cap:
cfg.import_percent_bps16("neg_unrealized_loss_rel_to_own_market_cap", v2)?,
net_unrealized_pnl_rel_to_own_market_cap:
cfg.import_percent_bps16("net_unrealized_pnl_rel_to_own_market_cap", v2)?,
unrealized_profit_rel_to_own_market_cap: cfg
.import_percent_bp16("unrealized_profit_rel_to_own_market_cap", v2)?,
unrealized_loss_rel_to_own_market_cap: cfg
.import_percent_bp16("unrealized_loss_rel_to_own_market_cap", v2)?,
neg_unrealized_loss_rel_to_own_market_cap: cfg
.import_percent_bps16("neg_unrealized_loss_rel_to_own_market_cap", v2)?,
net_unrealized_pnl_rel_to_own_market_cap: cfg
.import_percent_bps16("net_unrealized_pnl_rel_to_own_market_cap", v2)?,
})
}
@@ -47,19 +43,31 @@ impl RelativeExtendedOwnMarketCap {
) -> Result<()> {
self.unrealized_profit_rel_to_own_market_cap
.compute_binary::<Dollars, Dollars, RatioDollarsBp16>(
max_from, &unrealized.unrealized_profit.usd.height, own_market_cap, exit,
max_from,
&unrealized.unrealized_profit.usd.height,
own_market_cap,
exit,
)?;
self.unrealized_loss_rel_to_own_market_cap
.compute_binary::<Dollars, Dollars, RatioDollarsBp16>(
max_from, &unrealized.unrealized_loss.usd.height, own_market_cap, exit,
max_from,
&unrealized.unrealized_loss.usd.height,
own_market_cap,
exit,
)?;
self.neg_unrealized_loss_rel_to_own_market_cap
.compute_binary::<Dollars, Dollars, NegRatioDollarsBps16>(
max_from, &unrealized.unrealized_loss.usd.height, own_market_cap, exit,
max_from,
&unrealized.unrealized_loss.usd.height,
own_market_cap,
exit,
)?;
self.net_unrealized_pnl_rel_to_own_market_cap
.compute_binary::<Dollars, Dollars, RatioDollarsBps16>(
max_from, &unrealized.net_unrealized_pnl.usd.height, own_market_cap, exit,
max_from,
&unrealized.net_unrealized_pnl.usd.height,
own_market_cap,
exit,
)?;
Ok(())
}

View File

@@ -12,14 +12,10 @@ use crate::distribution::metrics::{ImportConfig, UnrealizedBase};
/// Extended relative metrics for own total unrealized PnL (extended only).
#[derive(Traversable)]
pub struct RelativeExtendedOwnPnl<M: StorageMode = Rw> {
pub unrealized_profit_rel_to_own_gross_pnl:
PercentFromHeight<BasisPoints16, M>,
pub unrealized_loss_rel_to_own_gross_pnl:
PercentFromHeight<BasisPoints16, M>,
pub neg_unrealized_loss_rel_to_own_gross_pnl:
PercentFromHeight<BasisPointsSigned16, M>,
pub net_unrealized_pnl_rel_to_own_gross_pnl:
PercentFromHeight<BasisPointsSigned16, M>,
pub unrealized_profit_rel_to_own_gross_pnl: PercentFromHeight<BasisPoints16, M>,
pub unrealized_loss_rel_to_own_gross_pnl: PercentFromHeight<BasisPoints16, M>,
pub neg_unrealized_loss_rel_to_own_gross_pnl: PercentFromHeight<BasisPointsSigned16, M>,
pub net_unrealized_pnl_rel_to_own_gross_pnl: PercentFromHeight<BasisPointsSigned16, M>,
}
impl RelativeExtendedOwnPnl {
@@ -28,14 +24,14 @@ impl RelativeExtendedOwnPnl {
let v2 = Version::new(2);
Ok(Self {
unrealized_profit_rel_to_own_gross_pnl:
cfg.import_percent_bp16("unrealized_profit_rel_to_own_gross_pnl", v1)?,
unrealized_loss_rel_to_own_gross_pnl:
cfg.import_percent_bp16("unrealized_loss_rel_to_own_gross_pnl", v1)?,
neg_unrealized_loss_rel_to_own_gross_pnl:
cfg.import_percent_bps16("neg_unrealized_loss_rel_to_own_gross_pnl", v1)?,
net_unrealized_pnl_rel_to_own_gross_pnl:
cfg.import_percent_bps16("net_unrealized_pnl_rel_to_own_gross_pnl", v2)?,
unrealized_profit_rel_to_own_gross_pnl: cfg
.import_percent_bp16("unrealized_profit_rel_to_own_gross_pnl", v1)?,
unrealized_loss_rel_to_own_gross_pnl: cfg
.import_percent_bp16("unrealized_loss_rel_to_own_gross_pnl", v1)?,
neg_unrealized_loss_rel_to_own_gross_pnl: cfg
.import_percent_bps16("neg_unrealized_loss_rel_to_own_gross_pnl", v1)?,
net_unrealized_pnl_rel_to_own_gross_pnl: cfg
.import_percent_bps16("net_unrealized_pnl_rel_to_own_gross_pnl", v2)?,
})
}
@@ -47,19 +43,31 @@ impl RelativeExtendedOwnPnl {
) -> Result<()> {
self.unrealized_profit_rel_to_own_gross_pnl
.compute_binary::<Dollars, Dollars, RatioDollarsBp16>(
max_from, &unrealized.unrealized_profit.usd.height, &unrealized.gross_pnl.usd.height, exit,
max_from,
&unrealized.unrealized_profit.usd.height,
&unrealized.gross_pnl.usd.height,
exit,
)?;
self.unrealized_loss_rel_to_own_gross_pnl
.compute_binary::<Dollars, Dollars, RatioDollarsBp16>(
max_from, &unrealized.unrealized_loss.usd.height, &unrealized.gross_pnl.usd.height, exit,
max_from,
&unrealized.unrealized_loss.usd.height,
&unrealized.gross_pnl.usd.height,
exit,
)?;
self.neg_unrealized_loss_rel_to_own_gross_pnl
.compute_binary::<Dollars, Dollars, NegRatioDollarsBps16>(
max_from, &unrealized.unrealized_loss.usd.height, &unrealized.gross_pnl.usd.height, exit,
max_from,
&unrealized.unrealized_loss.usd.height,
&unrealized.gross_pnl.usd.height,
exit,
)?;
self.net_unrealized_pnl_rel_to_own_gross_pnl
.compute_binary::<Dollars, Dollars, RatioDollarsBps16>(
max_from, &unrealized.net_unrealized_pnl.usd.height, &unrealized.gross_pnl.usd.height, exit,
max_from,
&unrealized.net_unrealized_pnl.usd.height,
&unrealized.gross_pnl.usd.height,
exit,
)?;
Ok(())
}

View File

@@ -10,23 +10,20 @@ use crate::distribution::metrics::{ImportConfig, UnrealizedBase};
/// Relative-to-all metrics (not present for the "all" cohort itself).
#[derive(Traversable)]
pub struct RelativeToAll<M: StorageMode = Rw> {
pub supply_rel_to_circulating_supply:
PercentFromHeight<BasisPoints16, M>,
pub supply_in_profit_rel_to_circulating_supply:
PercentFromHeight<BasisPoints16, M>,
pub supply_in_loss_rel_to_circulating_supply:
PercentFromHeight<BasisPoints16, M>,
pub supply_rel_to_circulating_supply: PercentFromHeight<BasisPoints16, M>,
pub supply_in_profit_rel_to_circulating_supply: PercentFromHeight<BasisPoints16, M>,
pub supply_in_loss_rel_to_circulating_supply: PercentFromHeight<BasisPoints16, M>,
}
impl RelativeToAll {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
Ok(Self {
supply_rel_to_circulating_supply:
cfg.import_percent_bp16("supply_rel_to_circulating_supply", Version::ONE)?,
supply_in_profit_rel_to_circulating_supply:
cfg.import_percent_bp16("supply_in_profit_rel_to_circulating_supply", Version::ONE)?,
supply_in_loss_rel_to_circulating_supply:
cfg.import_percent_bp16("supply_in_loss_rel_to_circulating_supply", Version::ONE)?,
supply_rel_to_circulating_supply: cfg
.import_percent_bp16("supply_rel_to_circulating_supply", Version::ONE)?,
supply_in_profit_rel_to_circulating_supply: cfg
.import_percent_bp16("supply_in_profit_rel_to_circulating_supply", Version::ONE)?,
supply_in_loss_rel_to_circulating_supply: cfg
.import_percent_bp16("supply_in_loss_rel_to_circulating_supply", Version::ONE)?,
})
}
@@ -40,15 +37,24 @@ impl RelativeToAll {
) -> Result<()> {
self.supply_rel_to_circulating_supply
.compute_binary::<Sats, Sats, RatioSatsBp16>(
max_from, supply_total_sats, all_supply_sats, exit,
max_from,
supply_total_sats,
all_supply_sats,
exit,
)?;
self.supply_in_profit_rel_to_circulating_supply
.compute_binary::<Sats, Sats, RatioSatsBp16>(
max_from, &unrealized.supply_in_profit.sats.height, all_supply_sats, exit,
max_from,
&unrealized.supply_in_profit.sats.height,
all_supply_sats,
exit,
)?;
self.supply_in_loss_rel_to_circulating_supply
.compute_binary::<Sats, Sats, RatioSatsBp16>(
max_from, &unrealized.supply_in_loss.sats.height, all_supply_sats, exit,
max_from,
&unrealized.supply_in_loss.sats.height,
all_supply_sats,
exit,
)?;
Ok(())
}

View File

@@ -6,10 +6,7 @@ use vecdb::{Exit, ReadableVec, Rw, StorageMode};
use crate::distribution::metrics::{ImportConfig, RealizedBase, UnrealizedBase};
use super::{
RelativeBase, RelativeExtendedOwnMarketCap, RelativeExtendedOwnPnl,
RelativeToAll,
};
use super::{RelativeBase, RelativeExtendedOwnMarketCap, RelativeExtendedOwnPnl, RelativeToAll};
/// Full extended relative metrics (base + rel_to_all + own_market_cap + own_pnl).
/// Used by: sth, lth, age_range cohorts.

View File

@@ -7,8 +7,8 @@ use rayon::prelude::*;
use vecdb::{AnyStoredVec, AnyVec, Exit, Rw, StorageMode, WritableVec};
use crate::internal::{
HalveCents, HalveDollars, HalveSats, HalveSatsToBitcoin,
LazyValueFromHeight, ValueFromHeightChange, ValueFromHeight,
HalveCents, HalveDollars, HalveSats, HalveSatsToBitcoin, LazyValueFromHeight, ValueFromHeight,
ValueFromHeightChange,
};
use super::ImportConfig;

View File

@@ -2,8 +2,8 @@ use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Cents, CentsSats, CentsSigned, CentsSquaredSats, Height, Indexes, Version};
use vecdb::{
AnyStoredVec, AnyVec, BytesVec, Exit, ReadableCloneableVec, ReadableVec,
Rw, StorageMode, WritableVec,
AnyStoredVec, AnyVec, BytesVec, Exit, ReadableCloneableVec, ReadableVec, Rw, StorageMode,
WritableVec,
};
use crate::{
@@ -19,36 +19,28 @@ use brk_types::Dollars;
use crate::distribution::metrics::ImportConfig;
/// Base unrealized profit/loss metrics (always computed).
#[derive(Traversable)]
pub struct UnrealizedBase<M: StorageMode = Rw> {
// === Supply in Profit/Loss ===
pub supply_in_profit: ValueFromHeight<M>,
pub supply_in_loss: ValueFromHeight<M>,
// === Unrealized Profit/Loss ===
pub unrealized_profit: FiatFromHeight<Cents, M>,
pub unrealized_loss: FiatFromHeight<Cents, M>,
// === Invested Capital in Profit/Loss ===
pub invested_capital_in_profit: FiatFromHeight<Cents, M>,
pub invested_capital_in_loss: FiatFromHeight<Cents, M>,
// === Raw values for precise aggregation (used to compute pain/greed indices) ===
pub invested_capital_in_profit_raw: M::Stored<BytesVec<Height, CentsSats>>,
pub invested_capital_in_loss_raw: M::Stored<BytesVec<Height, CentsSats>>,
pub investor_cap_in_profit_raw: M::Stored<BytesVec<Height, CentsSquaredSats>>,
pub investor_cap_in_loss_raw: M::Stored<BytesVec<Height, CentsSquaredSats>>,
// === Pain/Greed Indices ===
pub pain_index: FiatFromHeight<Cents, M>,
pub greed_index: FiatFromHeight<Cents, M>,
pub net_sentiment: FiatFromHeight<CentsSigned, M>,
// === Negated ===
pub neg_unrealized_loss: LazyFromHeight<Dollars, Cents>,
// === Net and Total ===
pub net_unrealized_pnl: FiatFromHeight<CentsSigned, M>,
pub gross_pnl: FiatFromHeight<Cents, M>,
}
@@ -65,7 +57,8 @@ impl UnrealizedBase {
let invested_capital_in_profit = cfg.import_fiat("invested_capital_in_profit", v0)?;
let invested_capital_in_loss = cfg.import_fiat("invested_capital_in_loss", v0)?;
let invested_capital_in_profit_raw = cfg.import_bytes("invested_capital_in_profit_raw", v0)?;
let invested_capital_in_profit_raw =
cfg.import_bytes("invested_capital_in_profit_raw", v0)?;
let invested_capital_in_loss_raw = cfg.import_bytes("invested_capital_in_loss_raw", v0)?;
let investor_cap_in_profit_raw = cfg.import_bytes("investor_cap_in_profit_raw", v0)?;
let investor_cap_in_loss_raw = cfg.import_bytes("investor_cap_in_loss_raw", v0)?;
@@ -193,39 +186,30 @@ impl UnrealizedBase {
others: &[&Self],
exit: &Exit,
) -> Result<()> {
self.supply_in_profit
.sats
.height
.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.supply_in_profit.sats.height)
.collect::<Vec<_>>(),
exit,
)?;
self.supply_in_loss
.sats
.height
.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.supply_in_loss.sats.height)
.collect::<Vec<_>>(),
exit,
)?;
self.unrealized_profit
.cents
.height
.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.unrealized_profit.cents.height)
.collect::<Vec<_>>(),
exit,
)?;
self.supply_in_profit.sats.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.supply_in_profit.sats.height)
.collect::<Vec<_>>(),
exit,
)?;
self.supply_in_loss.sats.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.supply_in_loss.sats.height)
.collect::<Vec<_>>(),
exit,
)?;
self.unrealized_profit.cents.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.unrealized_profit.cents.height)
.collect::<Vec<_>>(),
exit,
)?;
self.unrealized_loss.cents.height.compute_sum_of_others(
starting_indexes.height,
&others
@@ -273,7 +257,10 @@ impl UnrealizedBase {
// Pre-collect all cohort data to avoid per-element BytesVec reads in nested loop
let invested_profit_ranges: Vec<Vec<CentsSats>> = others
.iter()
.map(|o| o.invested_capital_in_profit_raw.collect_range_at(start, end))
.map(|o| {
o.invested_capital_in_profit_raw
.collect_range_at(start, end)
})
.collect();
let invested_loss_ranges: Vec<Vec<CentsSats>> = others
.iter()
@@ -336,10 +323,7 @@ impl UnrealizedBase {
}
let investor_price_losers = investor_cap.inner() / invested_cap.inner();
let spot_u128 = spot.as_u128();
(
h,
Cents::new((investor_price_losers - spot_u128) as u64),
)
(h, Cents::new((investor_price_losers - spot_u128) as u64))
},
exit,
)?;
@@ -356,10 +340,7 @@ impl UnrealizedBase {
}
let investor_price_winners = investor_cap.inner() / invested_cap.inner();
let spot_u128 = spot.as_u128();
(
h,
Cents::new((spot_u128 - investor_price_winners) as u64),
)
(h, Cents::new((spot_u128 - investor_price_winners) as u64))
},
exit,
)?;

View File

@@ -18,8 +18,7 @@ impl AddressCohortState {
pub(crate) fn new(path: &Path, name: &str) -> Self {
Self {
addr_count: 0,
inner: CohortState::new(path, name)
.with_price_rounding(COST_BASIS_PRICE_DIGITS),
inner: CohortState::new(path, name).with_price_rounding(COST_BASIS_PRICE_DIGITS),
}
}
@@ -136,5 +135,4 @@ impl AddressCohortState {
self.inner.decrement_snapshot(&snapshot);
}
}

View File

@@ -1,7 +1,7 @@
use std::path::Path;
use brk_error::Result;
use brk_types::{Age, CentsSats, Cents, CostBasisSnapshot, Height, Sats, SupplyState};
use brk_types::{Age, Cents, CentsSats, CostBasisSnapshot, Height, Sats, SupplyState};
use super::super::cost_basis::{CostBasisData, Percentiles, RealizedState, UnrealizedState};
@@ -256,8 +256,7 @@ impl CohortState {
}
pub(crate) fn compute_unrealized_state(&mut self, height_price: Cents) -> UnrealizedState {
self.cost_basis_data
.compute_unrealized_state(height_price)
self.cost_basis_data.compute_unrealized_state(height_price)
}
pub(crate) fn write(&mut self, height: Height, cleanup: bool) -> Result<()> {

View File

@@ -6,7 +6,7 @@ use std::{
use brk_error::{Error, Result};
use brk_types::{
CentsCompact, CentsSats, CentsSquaredSats, Cents, CostBasisDistribution, Height, Sats,
Cents, CentsCompact, CentsSats, CentsSquaredSats, CostBasisDistribution, Height, Sats,
};
use rustc_hash::FxHashMap;
use vecdb::Bytes;
@@ -95,7 +95,13 @@ impl CostBasisData {
pub(crate) fn iter(&self) -> impl Iterator<Item = (CentsCompact, &Sats)> {
self.assert_pending_empty();
self.state.as_ref().unwrap().base.map.iter().map(|(&k, v)| (k, v))
self.state
.as_ref()
.unwrap()
.base
.map
.iter()
.map(|(&k, v)| (k, v))
}
pub(crate) fn is_empty(&self) -> bool {
@@ -105,7 +111,8 @@ impl CostBasisData {
pub(crate) fn first_key_value(&self) -> Option<(CentsCompact, &Sats)> {
self.assert_pending_empty();
self.state
.as_ref().unwrap()
.as_ref()
.unwrap()
.base
.map
.first_key_value()
@@ -115,7 +122,8 @@ impl CostBasisData {
pub(crate) fn last_key_value(&self) -> Option<(CentsCompact, &Sats)> {
self.assert_pending_empty();
self.state
.as_ref().unwrap()
.as_ref()
.unwrap()
.base
.map
.last_key_value()
@@ -179,7 +187,14 @@ impl CostBasisData {
self.percentiles_dirty = true;
}
for (cents, (inc, dec)) in self.pending.drain() {
let entry = self.state.as_mut().unwrap().base.map.entry(cents).or_default();
let entry = self
.state
.as_mut()
.unwrap()
.base
.map
.entry(cents)
.or_default();
*entry += inc;
if *entry < dec {
panic!(
@@ -322,7 +337,10 @@ impl CostBasisData {
}
}
fs::write(self.path_state(height), self.state.as_ref().unwrap().serialize()?)?;
fs::write(
self.path_state(height),
self.state.as_ref().unwrap().serialize()?,
)?;
Ok(())
}

View File

@@ -1,6 +1,6 @@
use std::cmp::Ordering;
use brk_types::{CentsSats, CentsSquaredSats, Cents, Sats};
use brk_types::{Cents, CentsSats, CentsSquaredSats, Sats};
/// Realized state using u128 for raw cent*sat values internally.
/// This avoids overflow and defers division to output time for efficiency.
@@ -156,14 +156,22 @@ impl RealizedState {
/// Increment using pre-computed snapshot values (for address path)
#[inline]
pub(crate) fn increment_snapshot(&mut self, price_sats: CentsSats, investor_cap: CentsSquaredSats) {
pub(crate) fn increment_snapshot(
&mut self,
price_sats: CentsSats,
investor_cap: CentsSquaredSats,
) {
self.cap_raw += price_sats.as_u128();
self.investor_cap_raw += investor_cap;
}
/// Decrement using pre-computed snapshot values (for address path)
#[inline]
pub(crate) fn decrement_snapshot(&mut self, price_sats: CentsSats, investor_cap: CentsSquaredSats) {
pub(crate) fn decrement_snapshot(
&mut self,
price_sats: CentsSats,
investor_cap: CentsSquaredSats,
) {
self.cap_raw -= price_sats.as_u128();
self.investor_cap_raw -= investor_cap;
}

View File

@@ -35,7 +35,6 @@ impl UnrealizedState {
invested_capital_in_profit_raw: 0,
invested_capital_in_loss_raw: 0,
};
}
/// Internal cache state using u128 for raw cent*sat values.
@@ -279,5 +278,4 @@ impl CachedUnrealizedState {
state
}
}

View File

@@ -4,12 +4,12 @@ use brk_error::Result;
use brk_indexer::Indexer;
use brk_traversable::Traversable;
use brk_types::{
Cents, EmptyAddressData, EmptyAddressIndex, FundedAddressData, FundedAddressIndex,
Height, Indexes, SupplyState, Timestamp, TxIndex, Version,
Cents, EmptyAddressData, EmptyAddressIndex, FundedAddressData, FundedAddressIndex, Height,
Indexes, SupplyState, Timestamp, TxIndex, Version,
};
use tracing::{debug, info};
use vecdb::{
AnyVec, BytesVec, Database, Exit, ImportableVec, LazyVecFrom1, PAGE_SIZE, ReadOnlyClone,
AnyVec, BytesVec, Database, Exit, ImportableVec, LazyVecFrom1, ReadOnlyClone,
ReadableCloneableVec, ReadableVec, Rw, Stamp, StorageMode, WritableVec,
};
@@ -22,7 +22,9 @@ use crate::{
},
state::BlockState,
},
indexes, inputs, outputs, prices, transactions,
indexes, inputs,
internal::{finalize_db, open_db},
outputs, prices, transactions,
};
use super::{
@@ -34,8 +36,6 @@ use super::{
};
const VERSION: Version = Version::new(22);
/// Main struct holding all computed vectors and state for stateful computation.
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
#[traversable(skip)]
@@ -95,8 +95,7 @@ impl Vecs {
let db_path = parent.join(super::DB_NAME);
let states_path = db_path.join("states");
let db = Database::open(&db_path)?;
db.set_min_len(PAGE_SIZE * 20_000_000)?;
let db = open_db(parent, super::DB_NAME, 20_000_000)?;
db.set_min_regions(50_000)?;
let version = parent_version + VERSION;
@@ -139,8 +138,7 @@ impl Vecs {
let total_addr_count = TotalAddrCountVecs::forced_import(&db, version, indexes)?;
// Per-block delta of total (global + per-type)
let new_addr_count =
NewAddrCountVecs::forced_import(&db, version, indexes)?;
let new_addr_count = NewAddrCountVecs::forced_import(&db, version, indexes)?;
// Growth rate: new / addr_count (global + per-type)
let growth_rate = GrowthRateVecs::forced_import(&db, version, indexes)?;
@@ -180,13 +178,7 @@ impl Vecs {
states_path,
};
this.db.retain_regions(
this.iter_any_exportable()
.flat_map(|v| v.region_names())
.collect(),
)?;
this.db.compact()?;
finalize_db(&this.db, &this)?;
Ok(this)
}
@@ -308,7 +300,10 @@ impl Vecs {
Height::ZERO
} else if chain_state.len() == usize::from(recovered_height) {
// Normal resume: chain_state already matches, reuse as-is
debug!("reusing in-memory chain_state ({} entries)", chain_state.len());
debug!(
"reusing in-memory chain_state ({} entries)",
chain_state.len()
);
recovered_height
} else {
debug!("rebuilding chain_state from stored values");
@@ -359,8 +354,7 @@ impl Vecs {
let cached_prices = std::mem::take(&mut self.cached_prices);
let cached_timestamps = std::mem::take(&mut self.cached_timestamps);
let cached_price_range_max =
std::mem::take(&mut self.cached_price_range_max);
let cached_price_range_max = std::mem::take(&mut self.cached_price_range_max);
process_blocks(
self,
@@ -424,8 +418,12 @@ impl Vecs {
self.address_activity
.compute_rest(starting_indexes.height, &window_starts, exit)?;
self.new_addr_count
.compute(starting_indexes.height, &window_starts, &self.total_addr_count, exit)?;
self.new_addr_count.compute(
starting_indexes.height,
&window_starts,
&self.total_addr_count,
exit,
)?;
// 6e. Compute growth_rate = new_addr_count / addr_count
self.growth_rate.compute(

View File

@@ -6,7 +6,7 @@ use brk_types::{
P2PKHBytes, P2SHAddressIndex, P2SHBytes, P2TRAddressIndex, P2TRBytes, P2WPKHAddressIndex,
P2WPKHBytes, P2WSHAddressIndex, P2WSHBytes, TxIndex, UnknownOutputIndex, Version,
};
use vecdb::{ReadableCloneableVec, LazyVecFrom1};
use vecdb::{LazyVecFrom1, ReadableCloneableVec};
#[derive(Clone, Traversable)]
pub struct Vecs {
@@ -26,12 +26,14 @@ pub struct Vecs {
#[derive(Clone, Traversable)]
pub struct P2PK33Vecs {
pub identity: LazyVecFrom1<P2PK33AddressIndex, P2PK33AddressIndex, P2PK33AddressIndex, P2PK33Bytes>,
pub identity:
LazyVecFrom1<P2PK33AddressIndex, P2PK33AddressIndex, P2PK33AddressIndex, P2PK33Bytes>,
}
#[derive(Clone, Traversable)]
pub struct P2PK65Vecs {
pub identity: LazyVecFrom1<P2PK65AddressIndex, P2PK65AddressIndex, P2PK65AddressIndex, P2PK65Bytes>,
pub identity:
LazyVecFrom1<P2PK65AddressIndex, P2PK65AddressIndex, P2PK65AddressIndex, P2PK65Bytes>,
}
#[derive(Clone, Traversable)]
@@ -51,7 +53,8 @@ pub struct P2TRVecs {
#[derive(Clone, Traversable)]
pub struct P2WPKHVecs {
pub identity: LazyVecFrom1<P2WPKHAddressIndex, P2WPKHAddressIndex, P2WPKHAddressIndex, P2WPKHBytes>,
pub identity:
LazyVecFrom1<P2WPKHAddressIndex, P2WPKHAddressIndex, P2WPKHAddressIndex, P2WPKHBytes>,
}
#[derive(Clone, Traversable)]
@@ -163,7 +166,11 @@ impl Vecs {
identity: LazyVecFrom1::init(
"emptyoutputindex",
version,
indexer.vecs.scripts.empty_to_txindex.read_only_boxed_clone(),
indexer
.vecs
.scripts
.empty_to_txindex
.read_only_boxed_clone(),
|index, _| index,
),
},
@@ -171,7 +178,11 @@ impl Vecs {
identity: LazyVecFrom1::init(
"unknownoutputindex",
version,
indexer.vecs.scripts.unknown_to_txindex.read_only_boxed_clone(),
indexer
.vecs
.scripts
.unknown_to_txindex
.read_only_boxed_clone(),
|index, _| index,
),
},
@@ -179,7 +190,11 @@ impl Vecs {
identity: LazyVecFrom1::init(
"opreturnindex",
version,
indexer.vecs.scripts.opreturn_to_txindex.read_only_boxed_clone(),
indexer
.vecs
.scripts
.opreturn_to_txindex
.read_only_boxed_clone(),
|index, _| index,
),
},

View File

@@ -1,8 +1,7 @@
use brk_traversable::Traversable;
use brk_types::{
Day1, Day3, Year10, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4,
Minute10, Minute30, Month1, Month3, Month6, StoredU64, Version, Week1,
Year1,
Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour4, Hour12, Minute10, Minute30,
Month1, Month3, Month6, StoredU64, Version, Week1, Year1, Year10,
};
use vecdb::{Database, EagerVec, ImportableVec, PcoVec, Rw, StorageMode};

View File

@@ -25,12 +25,15 @@ use brk_error::Result;
use brk_indexer::Indexer;
use brk_traversable::Traversable;
use brk_types::{
Date, Day1, Day3, Hour1, Hour4, Hour12, Indexes, Minute10, Minute30, Month1,
Month3, Month6, Version, Week1, Year1, Year10,
Date, Day1, Day3, Height, Hour1, Hour4, Hour12, Indexes, Minute10, Minute30, Month1, Month3,
Month6, Version, Week1, Year1, Year10,
};
use vecdb::{Database, Exit, PAGE_SIZE, ReadableVec, Rw, StorageMode};
use vecdb::{Database, Exit, ReadableVec, Rw, StorageMode};
use crate::blocks;
use crate::{
blocks,
internal::{finalize_db, open_db},
};
pub use address::Vecs as AddressVecs;
pub use day1::Vecs as Day1Vecs;
@@ -86,8 +89,7 @@ impl Vecs {
parent_version: Version,
indexer: &Indexer,
) -> Result<Self> {
let db = Database::open(&parent.join(DB_NAME))?;
db.set_min_len(PAGE_SIZE * 10_000_000)?;
let db = open_db(parent, DB_NAME, 10_000_000)?;
let version = parent_version;
@@ -115,13 +117,7 @@ impl Vecs {
db,
};
this.db.retain_regions(
this.iter_any_exportable()
.flat_map(|v| v.region_names())
.collect(),
)?;
this.db.compact()?;
finalize_db(&this.db, &this)?;
Ok(this)
}
@@ -148,7 +144,39 @@ impl Vecs {
starting_indexes: Indexes,
exit: &Exit,
) -> Result<Indexes> {
// Transaction indexes - compute input/output counts
self.compute_tx_indexes(indexer, &starting_indexes, exit)?;
self.compute_height_indexes(indexer, &starting_indexes, exit)?;
let prev_height = starting_indexes.height.decremented().unwrap_or_default();
self.compute_timestamp_mappings(blocks_time, &starting_indexes, exit)?;
let starting_day1 = self.compute_calendar_mappings(
indexer,
blocks_time,
&starting_indexes,
prev_height,
exit,
)?;
self.compute_period_vecs(
indexer,
blocks_time,
&starting_indexes,
prev_height,
starting_day1,
exit,
)?;
Ok(starting_indexes)
}
fn compute_tx_indexes(
&mut self,
indexer: &Indexer,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.txindex.input_count.compute_count_from_indexes(
starting_indexes.txindex,
&indexer.vecs.transactions.first_txinindex,
@@ -161,80 +189,68 @@ impl Vecs {
&indexer.vecs.outputs.value,
exit,
)?;
Ok(())
}
// Height indexes
fn compute_height_indexes(
&mut self,
indexer: &Indexer,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.height.txindex_count.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.transactions.first_txindex,
&indexer.vecs.transactions.txid,
exit,
)?;
self.height.identity.compute_from_index(
starting_indexes.height,
&indexer.vecs.blocks.weight,
exit,
)?;
Ok(())
}
let decremented_starting_height = starting_indexes.height.decremented().unwrap_or_default();
fn compute_timestamp_mappings(
&mut self,
blocks_time: &blocks::time::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
macro_rules! from_timestamp {
($field:ident, $period:ty) => {
self.height.$field.compute_transform(
starting_indexes.height,
&blocks_time.timestamp_monotonic,
|(h, ts, _)| (h, <$period>::from_timestamp(ts)),
exit,
)?;
};
}
// --- Timestamp-based height → period mappings ---
from_timestamp!(minute10, Minute10);
from_timestamp!(minute30, Minute30);
from_timestamp!(hour1, Hour1);
from_timestamp!(hour4, Hour4);
from_timestamp!(hour12, Hour12);
from_timestamp!(day3, Day3);
// Minute10
self.height.minute10.compute_transform(
starting_indexes.height,
&blocks_time.timestamp_monotonic,
|(h, ts, _)| (h, Minute10::from_timestamp(ts)),
exit,
)?;
Ok(())
}
// Minute30
self.height.minute30.compute_transform(
starting_indexes.height,
&blocks_time.timestamp_monotonic,
|(h, ts, _)| (h, Minute30::from_timestamp(ts)),
exit,
)?;
// Hour1
self.height.hour1.compute_transform(
starting_indexes.height,
&blocks_time.timestamp_monotonic,
|(h, ts, _)| (h, Hour1::from_timestamp(ts)),
exit,
)?;
// Hour4
self.height.hour4.compute_transform(
starting_indexes.height,
&blocks_time.timestamp_monotonic,
|(h, ts, _)| (h, Hour4::from_timestamp(ts)),
exit,
)?;
// Hour12
self.height.hour12.compute_transform(
starting_indexes.height,
&blocks_time.timestamp_monotonic,
|(h, ts, _)| (h, Hour12::from_timestamp(ts)),
exit,
)?;
// Day3
self.height.day3.compute_transform(
starting_indexes.height,
&blocks_time.timestamp_monotonic,
|(h, ts, _)| (h, Day3::from_timestamp(ts)),
exit,
)?;
// --- Calendar-based height → period mappings ---
// Day1 (uses blocks_time.date computed in blocks::time::compute_early)
fn compute_calendar_mappings(
&mut self,
indexer: &Indexer,
blocks_time: &blocks::time::Vecs,
starting_indexes: &Indexes,
prev_height: Height,
exit: &Exit,
) -> Result<Day1> {
let starting_day1 = self
.height
.day1
.collect_one(decremented_starting_height)
.collect_one(prev_height)
.unwrap_or_default();
self.height.day1.compute_transform(
@@ -244,73 +260,14 @@ impl Vecs {
exit,
)?;
let starting_day1 =
if let Some(day1) = self.height.day1.collect_one(decremented_starting_height) {
starting_day1.min(day1)
} else {
starting_day1
};
let starting_day1 = if let Some(day1) = self.height.day1.collect_one(prev_height) {
starting_day1.min(day1)
} else {
starting_day1
};
// Difficulty epoch
let starting_difficultyepoch = self
.height
.difficultyepoch
.collect_one(decremented_starting_height)
.unwrap_or_default();
self.compute_epoch(indexer, blocks_time, starting_indexes, prev_height, exit)?;
self.height.difficultyepoch.compute_from_index(
starting_indexes.height,
&indexer.vecs.blocks.weight,
exit,
)?;
self.difficultyepoch.first_height.compute_first_per_index(
starting_indexes.height,
&self.height.difficultyepoch,
exit,
)?;
self.difficultyepoch.identity.compute_from_index(
starting_difficultyepoch,
&self.difficultyepoch.first_height,
exit,
)?;
self.difficultyepoch
.height_count
.compute_count_from_indexes(
starting_difficultyepoch,
&self.difficultyepoch.first_height,
&blocks_time.date,
exit,
)?;
// Halving epoch
let starting_halvingepoch = self
.height
.halvingepoch
.collect_one(decremented_starting_height)
.unwrap_or_default();
self.height.halvingepoch.compute_from_index(
starting_indexes.height,
&indexer.vecs.blocks.weight,
exit,
)?;
self.halvingepoch.first_height.compute_first_per_index(
starting_indexes.height,
&self.height.halvingepoch,
exit,
)?;
self.halvingepoch.identity.compute_from_index(
starting_halvingepoch,
&self.halvingepoch.first_height,
exit,
)?;
// Height → period mappings (calendar-based, derived from height.day1)
self.height.week1.compute_transform(
starting_indexes.height,
&self.height.day1,
@@ -348,81 +305,114 @@ impl Vecs {
exit,
)?;
// --- Compute period-level vecs (first_height + identity) ---
Ok(starting_day1)
}
let prev_height = decremented_starting_height;
fn compute_epoch(
&mut self,
indexer: &Indexer,
blocks_time: &blocks::time::Vecs,
starting_indexes: &Indexes,
prev_height: Height,
exit: &Exit,
) -> Result<()> {
let starting_difficultyepoch = self
.height
.difficultyepoch
.collect_one(prev_height)
.unwrap_or_default();
// Minute10
self.minute10.first_height.compute_first_per_index(
self.height.difficultyepoch.compute_from_index(
starting_indexes.height,
&self.height.minute10,
&indexer.vecs.blocks.weight,
exit,
)?;
self.minute10.identity.compute_from_index(
self.height.minute10.collect_one(prev_height).unwrap_or_default(),
&self.minute10.first_height,
exit,
)?;
// Minute30
self.minute30.first_height.compute_first_per_index(
self.difficultyepoch.first_height.compute_first_per_index(
starting_indexes.height,
&self.height.minute30,
&self.height.difficultyepoch,
exit,
)?;
self.minute30.identity.compute_from_index(
self.height.minute30.collect_one(prev_height).unwrap_or_default(),
&self.minute30.first_height,
self.difficultyepoch.identity.compute_from_index(
starting_difficultyepoch,
&self.difficultyepoch.first_height,
exit,
)?;
self.difficultyepoch
.height_count
.compute_count_from_indexes(
starting_difficultyepoch,
&self.difficultyepoch.first_height,
&blocks_time.date,
exit,
)?;
// Hour1
self.hour1.first_height.compute_first_per_index(
let starting_halvingepoch = self
.height
.halvingepoch
.collect_one(prev_height)
.unwrap_or_default();
self.height.halvingepoch.compute_from_index(
starting_indexes.height,
&self.height.hour1,
&indexer.vecs.blocks.weight,
exit,
)?;
self.hour1.identity.compute_from_index(
self.height.hour1.collect_one(prev_height).unwrap_or_default(),
&self.hour1.first_height,
exit,
)?;
// Hour4
self.hour4.first_height.compute_first_per_index(
self.halvingepoch.first_height.compute_first_per_index(
starting_indexes.height,
&self.height.hour4,
&self.height.halvingepoch,
exit,
)?;
self.hour4.identity.compute_from_index(
self.height.hour4.collect_one(prev_height).unwrap_or_default(),
&self.hour4.first_height,
self.halvingepoch.identity.compute_from_index(
starting_halvingepoch,
&self.halvingepoch.first_height,
exit,
)?;
// Hour12
self.hour12.first_height.compute_first_per_index(
starting_indexes.height,
&self.height.hour12,
exit,
)?;
self.hour12.identity.compute_from_index(
self.height.hour12.collect_one(prev_height).unwrap_or_default(),
&self.hour12.first_height,
exit,
)?;
Ok(())
}
fn compute_period_vecs(
&mut self,
indexer: &Indexer,
blocks_time: &blocks::time::Vecs,
starting_indexes: &Indexes,
prev_height: Height,
starting_day1: Day1,
exit: &Exit,
) -> Result<()> {
macro_rules! basic_period {
($period:ident) => {
self.$period.first_height.compute_first_per_index(
starting_indexes.height,
&self.height.$period,
exit,
)?;
self.$period.identity.compute_from_index(
self.height
.$period
.collect_one(prev_height)
.unwrap_or_default(),
&self.$period.first_height,
exit,
)?;
};
}
basic_period!(minute10);
basic_period!(minute30);
basic_period!(hour1);
basic_period!(hour4);
basic_period!(hour12);
basic_period!(day3);
// Day1
self.day1.first_height.compute_first_per_index(
starting_indexes.height,
&self.height.day1,
exit,
)?;
self.day1.identity.compute_from_index(
starting_day1,
&self.day1.first_height,
exit,
)?;
self.day1
.identity
.compute_from_index(starting_day1, &self.day1.first_height, exit)?;
self.day1.date.compute_transform(
starting_day1,
&self.day1.identity,
@@ -436,134 +426,41 @@ impl Vecs {
exit,
)?;
// Day3
self.day3.first_height.compute_first_per_index(
starting_indexes.height,
&self.height.day3,
exit,
)?;
self.day3.identity.compute_from_index(
self.height.day3.collect_one(prev_height).unwrap_or_default(),
&self.day3.first_height,
exit,
)?;
let date = &blocks_time.date;
let blocks_time_date = &blocks_time.date;
macro_rules! dated_period {
($period:ident) => {{
self.$period.first_height.compute_first_per_index(
starting_indexes.height,
&self.height.$period,
exit,
)?;
let start = self
.height
.$period
.collect_one(prev_height)
.unwrap_or_default();
self.$period.identity.compute_from_index(
start,
&self.$period.first_height,
exit,
)?;
self.$period.date.compute_transform(
start,
&self.$period.first_height,
|(idx, first_h, _)| (idx, date.collect_one(first_h).unwrap()),
exit,
)?;
}};
}
// Week
self.week1.first_height.compute_first_per_index(
starting_indexes.height,
&self.height.week1,
exit,
)?;
let starting_week1 = self.height.week1.collect_one(prev_height).unwrap_or_default();
self.week1.identity.compute_from_index(
starting_week1,
&self.week1.first_height,
exit,
)?;
self.week1.date.compute_transform(
starting_week1,
&self.week1.first_height,
|(wi, first_h, _)| (wi, blocks_time_date.collect_one(first_h).unwrap()),
exit,
)?;
dated_period!(week1);
dated_period!(month1);
dated_period!(month3);
dated_period!(month6);
dated_period!(year1);
dated_period!(year10);
// Month
self.month1.first_height.compute_first_per_index(
starting_indexes.height,
&self.height.month1,
exit,
)?;
let starting_month1 = self.height.month1.collect_one(prev_height).unwrap_or_default();
self.month1.identity.compute_from_index(
starting_month1,
&self.month1.first_height,
exit,
)?;
self.month1.date.compute_transform(
starting_month1,
&self.month1.first_height,
|(mi, first_h, _)| (mi, blocks_time_date.collect_one(first_h).unwrap()),
exit,
)?;
// Quarter
self.month3.first_height.compute_first_per_index(
starting_indexes.height,
&self.height.month3,
exit,
)?;
let starting_month3 = self.height.month3.collect_one(prev_height).unwrap_or_default();
self.month3.identity.compute_from_index(
starting_month3,
&self.month3.first_height,
exit,
)?;
self.month3.date.compute_transform(
starting_month3,
&self.month3.first_height,
|(qi, first_h, _)| (qi, blocks_time_date.collect_one(first_h).unwrap()),
exit,
)?;
// Semester
self.month6.first_height.compute_first_per_index(
starting_indexes.height,
&self.height.month6,
exit,
)?;
let starting_month6 = self.height.month6.collect_one(prev_height).unwrap_or_default();
self.month6.identity.compute_from_index(
starting_month6,
&self.month6.first_height,
exit,
)?;
self.month6.date.compute_transform(
starting_month6,
&self.month6.first_height,
|(si, first_h, _)| (si, blocks_time_date.collect_one(first_h).unwrap()),
exit,
)?;
// Year
self.year1.first_height.compute_first_per_index(
starting_indexes.height,
&self.height.year1,
exit,
)?;
let starting_year1 = self.height.year1.collect_one(prev_height).unwrap_or_default();
self.year1.identity.compute_from_index(
starting_year1,
&self.year1.first_height,
exit,
)?;
self.year1.date.compute_transform(
starting_year1,
&self.year1.first_height,
|(yi, first_h, _)| (yi, blocks_time_date.collect_one(first_h).unwrap()),
exit,
)?;
// Decade
self.year10.first_height.compute_first_per_index(
starting_indexes.height,
&self.height.year10,
exit,
)?;
let starting_year10 = self.height.year10.collect_one(prev_height).unwrap_or_default();
self.year10.identity.compute_from_index(
starting_year10,
&self.year10.first_height,
exit,
)?;
self.year10.date.compute_transform(
starting_year10,
&self.year10.first_height,
|(di, first_h, _)| (di, blocks_time_date.collect_one(first_h).unwrap()),
exit,
)?;
Ok(starting_indexes)
Ok(())
}
}

View File

@@ -1,7 +1,9 @@
use brk_indexer::Indexer;
use brk_traversable::Traversable;
use brk_types::{StoredU64, TxIndex, Txid, Version};
use vecdb::{Database, EagerVec, ImportableVec, ReadableCloneableVec, LazyVecFrom1, PcoVec, Rw, StorageMode};
use vecdb::{
Database, EagerVec, ImportableVec, LazyVecFrom1, PcoVec, ReadableCloneableVec, Rw, StorageMode,
};
use brk_error::Result;
@@ -13,7 +15,11 @@ pub struct Vecs<M: StorageMode = Rw> {
}
impl Vecs {
pub(crate) fn forced_import(db: &Database, version: Version, indexer: &Indexer) -> Result<Self> {
pub(crate) fn forced_import(
db: &Database,
version: Version,
indexer: &Indexer,
) -> Result<Self> {
Ok(Self {
identity: LazyVecFrom1::init(
"txindex",

View File

@@ -1,7 +1,7 @@
use brk_indexer::Indexer;
use brk_traversable::Traversable;
use brk_types::{OutPoint, TxInIndex, Version};
use vecdb::{ReadableCloneableVec, LazyVecFrom1};
use vecdb::{LazyVecFrom1, ReadableCloneableVec};
#[derive(Clone, Traversable)]
pub struct Vecs {

View File

@@ -1,7 +1,7 @@
use brk_indexer::Indexer;
use brk_traversable::Traversable;
use brk_types::{Sats, TxOutIndex, Version};
use vecdb::{ReadableCloneableVec, LazyVecFrom1};
use vecdb::{LazyVecFrom1, ReadableCloneableVec};
#[derive(Clone, Traversable)]
pub struct Vecs {

View File

@@ -1,5 +1,5 @@
use brk_traversable::Traversable;
use brk_types::{Date, Year10, Height, Version};
use brk_types::{Date, Height, Version, Year10};
use vecdb::{Database, EagerVec, ImportableVec, PcoVec, Rw, StorageMode};
use brk_error::Result;

View File

@@ -16,11 +16,8 @@ impl Vecs {
exit: &Exit,
) -> Result<()> {
let window_starts = blocks.count.window_starts();
self.0.compute(
starting_indexes.height,
&window_starts,
exit,
|full| {
self.0
.compute(starting_indexes.height, &window_starts, exit, |full| {
full.compute_with_skip(
starting_indexes.height,
&indexes.txindex.input_count,
@@ -29,8 +26,7 @@ impl Vecs {
exit,
0,
)
},
)?;
})?;
Ok(())
}

View File

@@ -1,12 +1,14 @@
use std::path::Path;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::Version;
use vecdb::{Database, PAGE_SIZE};
use crate::{
indexes,
internal::{finalize_db, open_db},
};
use super::{CountVecs, SpentVecs, Vecs};
use crate::indexes;
impl Vecs {
pub(crate) fn forced_import(
@@ -14,23 +16,14 @@ impl Vecs {
parent_version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let db = Database::open(&parent_path.join(super::DB_NAME))?;
db.set_min_len(PAGE_SIZE * 50_000_000)?;
let db = open_db(parent_path, super::DB_NAME, 50_000_000)?;
let version = parent_version;
let spent = SpentVecs::forced_import(&db, version)?;
let count = CountVecs::forced_import(&db, version, indexes)?;
let this = Self { db, spent, count };
this.db.retain_regions(
this.iter_any_exportable()
.flat_map(|v| v.region_names())
.collect(),
)?;
this.db.compact()?;
finalize_db(&this.db, &this)?;
Ok(this)
}
}

View File

@@ -2,7 +2,7 @@ use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::{Indexes, Sats, TxInIndex, TxIndex, TxOutIndex, Vout};
use tracing::info;
use vecdb::{AnyStoredVec, AnyVec, Database, Exit, WritableVec, ReadableVec, VecIndex};
use vecdb::{AnyStoredVec, AnyVec, Database, Exit, ReadableVec, VecIndex, WritableVec};
use super::Vecs;
@@ -41,7 +41,7 @@ impl Vecs {
let first_txoutindex_reader = indexer.vecs.transactions.first_txoutindex.reader();
let value_reader = indexer.vecs.outputs.value.reader();
let actual_total = target - min;
let mut entries: Vec<Entry> = Vec::with_capacity(actual_total.min(BATCH_SIZE));
let mut entries: Vec<Entry> = Vec::with_capacity(actual_total.min(BATCH_SIZE));
let mut batch_start = min;
while batch_start < target {
@@ -49,16 +49,20 @@ impl Vecs {
entries.clear();
let mut j = 0usize;
indexer.vecs.inputs.outpoint.for_each_range_at(batch_start, batch_end, |outpoint| {
entries.push(Entry {
txinindex: TxInIndex::from(batch_start + j),
txindex: outpoint.txindex(),
vout: outpoint.vout(),
txoutindex: TxOutIndex::COINBASE,
value: Sats::MAX,
indexer
.vecs
.inputs
.outpoint
.for_each_range_at(batch_start, batch_end, |outpoint| {
entries.push(Entry {
txinindex: TxInIndex::from(batch_start + j),
txindex: outpoint.txindex(),
vout: outpoint.vout(),
txoutindex: TxOutIndex::COINBASE,
value: Sats::MAX,
});
j += 1;
});
j += 1;
});
// Coinbase entries (txindex MAX) sorted to end
entries.sort_unstable_by_key(|e| e.txindex);
@@ -66,7 +70,8 @@ impl Vecs {
if entry.txindex.is_coinbase() {
break;
}
entry.txoutindex = first_txoutindex_reader.get(entry.txindex.to_usize()) + entry.vout;
entry.txoutindex =
first_txoutindex_reader.get(entry.txindex.to_usize()) + entry.vout;
}
entries.sort_unstable_by_key(|e| e.txoutindex);

View File

@@ -1,21 +1,14 @@
//! Compute functions for aggregation - take optional vecs, compute what's needed.
//!
//! These functions replace the Option-based compute logic in flexible builders.
//! Each function takes optional mutable references and computes only for Some() vecs.
use brk_error::Result;
use brk_types::{CheckedSub, StoredU64};
use schemars::JsonSchema;
use vecdb::{
AnyStoredVec, AnyVec, EagerVec, Exit, WritableVec, ReadableVec, PcoVec, VecIndex,
VecValue,
AnyStoredVec, AnyVec, EagerVec, Exit, PcoVec, ReadableVec, VecIndex, VecValue, WritableVec,
};
use brk_types::get_percentile;
use crate::internal::ComputedVecValue;
/// Helper to validate and get starting index for a single vec
fn validate_and_start<I: VecIndex, T: ComputedVecValue + JsonSchema>(
vec: &mut EagerVec<PcoVec<I, T>>,
combined_version: vecdb::Version,
@@ -25,14 +18,6 @@ fn validate_and_start<I: VecIndex, T: ComputedVecValue + JsonSchema>(
Ok(current_start.min(I::from(vec.len())))
}
/// Compute aggregations from a source vec into target vecs.
///
/// This function computes all requested aggregations in a single pass when possible,
/// optimizing for the common case where multiple aggregations are needed.
///
/// The `skip_count` parameter allows skipping the first N items from ALL calculations.
/// This is useful for excluding coinbase transactions (which have 0 fee) from
/// fee/feerate aggregations.
#[allow(clippy::too_many_arguments)]
pub(crate) fn compute_aggregations<I, T, A>(
max_from: I,
@@ -97,7 +82,9 @@ where
let mut cumulative_val = cumulative.as_ref().map(|cumulative_vec| {
index.decremented().map_or(T::from(0_usize), |idx| {
cumulative_vec.collect_one_at(idx.to_usize()).unwrap_or(T::from(0_usize))
cumulative_vec
.collect_one_at(idx.to_usize())
.unwrap_or(T::from(0_usize))
})
});
@@ -106,7 +93,11 @@ where
let first_indexes_batch: Vec<A> = first_indexes.collect_range_at(start, fi_len);
let count_indexes_batch: Vec<StoredU64> = count_indexes.collect_range_at(start, fi_len);
first_indexes_batch.into_iter().zip(count_indexes_batch).enumerate().try_for_each(|(j, (first_index, count_index))| -> Result<()> {
first_indexes_batch
.into_iter()
.zip(count_indexes_batch)
.enumerate()
.try_for_each(|(j, (first_index, count_index))| -> Result<()> {
let idx = start + j;
let count = u64::from(count_index) as usize;
@@ -116,7 +107,9 @@ where
if let Some(ref mut first_vec) = first {
let f = if effective_count > 0 {
source.collect_one_at(effective_first_index.to_usize()).unwrap()
source
.collect_one_at(effective_first_index.to_usize())
.unwrap()
} else {
T::from(0_usize)
};
@@ -259,10 +252,19 @@ where
} else if needs_aggregates {
// Aggregates only (sum/average/cumulative) — no Vec allocation needed
let efi = effective_first_index.to_usize();
let (sum_val, len) = source.fold_range_at(efi, efi + effective_count, (T::from(0_usize), 0_usize), |(acc, cnt), val| (acc + val, cnt + 1));
let (sum_val, len) = source.fold_range_at(
efi,
efi + effective_count,
(T::from(0_usize), 0_usize),
|(acc, cnt), val| (acc + val, cnt + 1),
);
if let Some(ref mut average_vec) = average {
let avg = if len > 0 { sum_val / len } else { T::from(0_usize) };
let avg = if len > 0 {
sum_val / len
} else {
T::from(0_usize)
};
average_vec.truncate_push_at(idx, avg)?;
}
@@ -296,10 +298,6 @@ where
Ok(())
}
/// Compute distribution stats from a fixed n-block rolling window.
///
/// For each height `h`, aggregates all source items from blocks `max(0, h - n_blocks + 1)..=h`
/// and computes average, min, max, median, and percentiles across the full window.
#[allow(clippy::too_many_arguments)]
pub(crate) fn compute_aggregations_nblock_window<I, T, A>(
max_from: I,
@@ -322,11 +320,19 @@ where
T: ComputedVecValue + JsonSchema,
A: VecIndex + VecValue + CheckedSub<A>,
{
let combined_version =
source.version() + first_indexes.version() + count_indexes.version();
let combined_version = source.version() + first_indexes.version() + count_indexes.version();
let mut idx = max_from;
for vec in [&mut *min, &mut *max, &mut *average, &mut *median, &mut *pct10, &mut *pct25, &mut *pct75, &mut *pct90] {
for vec in [
&mut *min,
&mut *max,
&mut *average,
&mut *median,
&mut *pct10,
&mut *pct25,
&mut *pct75,
&mut *pct90,
] {
idx = validate_and_start(vec, combined_version, idx)?;
}
let index = idx;
@@ -362,7 +368,16 @@ where
let effective_count = range_end_usize.saturating_sub(range_start_usize);
if effective_count == 0 {
for vec in [&mut *min, &mut *max, &mut *average, &mut *median, &mut *pct10, &mut *pct25, &mut *pct75, &mut *pct90] {
for vec in [
&mut *min,
&mut *max,
&mut *average,
&mut *median,
&mut *pct10,
&mut *pct25,
&mut *pct75,
&mut *pct90,
] {
vec.truncate_push_at(idx, zero)?;
}
} else {

View File

@@ -0,0 +1,52 @@
use brk_error::Result;
use brk_types::BasisPointsSigned16;
use vecdb::{EagerVec, Exit, PcoVec, ReadableVec, VecIndex, VecValue};
pub trait ComputeDrawdown<I: VecIndex> {
fn compute_drawdown<C, A>(
&mut self,
max_from: I,
current: &impl ReadableVec<I, C>,
ath: &impl ReadableVec<I, A>,
exit: &Exit,
) -> Result<()>
where
C: VecValue,
A: VecValue,
f64: From<C> + From<A>;
}
impl<I> ComputeDrawdown<I> for EagerVec<PcoVec<I, BasisPointsSigned16>>
where
I: VecIndex,
{
fn compute_drawdown<C, A>(
&mut self,
max_from: I,
current: &impl ReadableVec<I, C>,
ath: &impl ReadableVec<I, A>,
exit: &Exit,
) -> Result<()>
where
C: VecValue,
A: VecValue,
f64: From<C> + From<A>,
{
self.compute_transform2(
max_from,
current,
ath,
|(i, current, ath, _)| {
let ath_f64 = f64::from(ath);
let drawdown = if ath_f64 == 0.0 {
BasisPointsSigned16::default()
} else {
BasisPointsSigned16::from((f64::from(current) - ath_f64) / ath_f64)
};
(i, drawdown)
},
exit,
)?;
Ok(())
}
}

View File

@@ -1,6 +1,12 @@
mod aggregation;
mod drawdown;
mod sliding_distribution;
mod sliding_median;
pub(crate) mod sliding_window;
mod tdigest;
pub(crate) use aggregation::*;
pub(crate) use drawdown::*;
pub(crate) use sliding_distribution::*;
pub(crate) use sliding_median::*;
pub(crate) use tdigest::*;

View File

@@ -0,0 +1,158 @@
use brk_error::Result;
use vecdb::{
AnyStoredVec, AnyVec, EagerVec, Exit, PcoVec, PcoVecValue, ReadableVec, VecIndex, VecValue,
WritableVec,
};
use super::sliding_window::SlidingWindowSorted;
/// Compute all 8 rolling distribution stats (avg, min, max, p10, p25, median, p75, p90)
/// in a single sorted-vec pass per window.
#[allow(clippy::too_many_arguments)]
pub fn compute_rolling_distribution_from_starts<I, T, A>(
max_from: I,
window_starts: &impl ReadableVec<I, I>,
values: &impl ReadableVec<I, A>,
average_out: &mut EagerVec<PcoVec<I, T>>,
min_out: &mut EagerVec<PcoVec<I, T>>,
max_out: &mut EagerVec<PcoVec<I, T>>,
p10_out: &mut EagerVec<PcoVec<I, T>>,
p25_out: &mut EagerVec<PcoVec<I, T>>,
median_out: &mut EagerVec<PcoVec<I, T>>,
p75_out: &mut EagerVec<PcoVec<I, T>>,
p90_out: &mut EagerVec<PcoVec<I, T>>,
exit: &Exit,
) -> Result<()>
where
I: VecIndex,
T: PcoVecValue + From<f64>,
A: VecValue + Copy,
f64: From<A>,
{
let version = window_starts.version() + values.version();
for v in [
&mut *average_out,
&mut *min_out,
&mut *max_out,
&mut *p10_out,
&mut *p25_out,
&mut *median_out,
&mut *p75_out,
&mut *p90_out,
] {
v.validate_and_truncate(version, max_from)?;
}
let skip = [
average_out.len(),
min_out.len(),
max_out.len(),
p10_out.len(),
p25_out.len(),
median_out.len(),
p75_out.len(),
p90_out.len(),
]
.into_iter()
.min()
.unwrap();
let end = window_starts.len().min(values.len());
if skip >= end {
return Ok(());
}
let range_start = if skip > 0 {
window_starts.collect_one_at(skip - 1).unwrap().to_usize()
} else {
0
};
let partial_values: Vec<f64> = values
.collect_range_at(range_start, end)
.into_iter()
.map(|a| f64::from(a))
.collect();
let capacity = if skip > 0 && skip < end {
let first_start = window_starts.collect_one_at(skip).unwrap().to_usize();
(skip + 1).saturating_sub(first_start)
} else if !partial_values.is_empty() {
partial_values.len().min(1024)
} else {
0
};
let mut window = SlidingWindowSorted::with_capacity(capacity);
if skip > 0 {
window.reconstruct(&partial_values, range_start, skip);
}
let starts_batch = window_starts.collect_range_at(skip, end);
for (j, start) in starts_batch.into_iter().enumerate() {
let i = skip + j;
let v = partial_values[i - range_start];
let start_usize = start.to_usize();
window.advance(v, start_usize, &partial_values, range_start);
if window.is_empty() {
let zero = T::from(0.0);
for v in [
&mut *average_out,
&mut *min_out,
&mut *max_out,
&mut *p10_out,
&mut *p25_out,
&mut *median_out,
&mut *p75_out,
&mut *p90_out,
] {
v.checked_push_at(i, zero)?;
}
} else {
average_out.checked_push_at(i, T::from(window.average()))?;
min_out.checked_push_at(i, T::from(window.min()))?;
max_out.checked_push_at(i, T::from(window.max()))?;
p10_out.checked_push_at(i, T::from(window.percentile(0.10)))?;
p25_out.checked_push_at(i, T::from(window.percentile(0.25)))?;
median_out.checked_push_at(i, T::from(window.percentile(0.50)))?;
p75_out.checked_push_at(i, T::from(window.percentile(0.75)))?;
p90_out.checked_push_at(i, T::from(window.percentile(0.90)))?;
}
if average_out.batch_limit_reached() {
let _lock = exit.lock();
for v in [
&mut *average_out,
&mut *min_out,
&mut *max_out,
&mut *p10_out,
&mut *p25_out,
&mut *median_out,
&mut *p75_out,
&mut *p90_out,
] {
v.write()?;
}
}
}
// Final flush
let _lock = exit.lock();
for v in [
average_out,
min_out,
max_out,
p10_out,
p25_out,
median_out,
p75_out,
p90_out,
] {
v.write()?;
}
Ok(())
}

View File

@@ -0,0 +1,90 @@
use brk_error::Result;
use vecdb::{
AnyVec, EagerVec, Exit, PcoVec, PcoVecValue, ReadableVec, VecIndex, VecValue, WritableVec,
};
use super::sliding_window::SlidingWindowSorted;
pub trait ComputeRollingMedianFromStarts<I: VecIndex, T> {
fn compute_rolling_median_from_starts<A>(
&mut self,
max_from: I,
window_starts: &impl ReadableVec<I, I>,
values: &impl ReadableVec<I, A>,
exit: &Exit,
) -> Result<()>
where
A: VecValue + Copy,
f64: From<A>;
}
impl<I, T> ComputeRollingMedianFromStarts<I, T> for EagerVec<PcoVec<I, T>>
where
I: VecIndex,
T: PcoVecValue + From<f64>,
{
fn compute_rolling_median_from_starts<A>(
&mut self,
max_from: I,
window_starts: &impl ReadableVec<I, I>,
values: &impl ReadableVec<I, A>,
exit: &Exit,
) -> Result<()>
where
A: VecValue + Copy,
f64: From<A>,
{
self.validate_and_truncate(window_starts.version() + values.version(), max_from)?;
self.repeat_until_complete(exit, |this| {
let skip = this.len();
let end = window_starts.len().min(values.len());
let range_start = if skip > 0 {
window_starts.collect_one_at(skip - 1).unwrap().to_usize()
} else {
0
};
let partial_values: Vec<f64> = values
.collect_range_at(range_start, end)
.into_iter()
.map(|a| f64::from(a))
.collect();
let capacity = if skip > 0 && skip < end {
let first_start = window_starts.collect_one_at(skip).unwrap().to_usize();
(skip + 1).saturating_sub(first_start)
} else if !partial_values.is_empty() {
partial_values.len().min(1024)
} else {
0
};
let mut window = SlidingWindowSorted::with_capacity(capacity);
if skip > 0 {
window.reconstruct(&partial_values, range_start, skip);
}
let starts_batch = window_starts.collect_range_at(skip, end);
for (j, start) in starts_batch.into_iter().enumerate() {
let i = skip + j;
let v = partial_values[i - range_start];
let start_usize = start.to_usize();
window.advance(v, start_usize, &partial_values, range_start);
let median = window.percentile(0.50);
this.checked_push_at(i, T::from(median))?;
if this.batch_limit_reached() {
break;
}
}
Ok(())
})?;
Ok(())
}
}

View File

@@ -37,9 +37,11 @@ impl SortedBlocks {
}
// Find the block where value belongs: first block whose max >= value
let block_idx = self.blocks.iter().position(|b| {
*b.last().unwrap() >= value
}).unwrap_or(self.blocks.len() - 1);
let block_idx = self
.blocks
.iter()
.position(|b| *b.last().unwrap() >= value)
.unwrap_or(self.blocks.len() - 1);
let block = &mut self.blocks[block_idx];
let pos = block.partition_point(|a| *a < value);
@@ -131,7 +133,13 @@ impl SlidingWindowSorted {
}
/// Add a new value and remove all expired values up to `new_start`.
pub fn advance(&mut self, value: f64, new_start: usize, partial_values: &[f64], range_start: usize) {
pub fn advance(
&mut self,
value: f64,
new_start: usize,
partial_values: &[f64],
range_start: usize,
) {
self.running_sum += value;
self.sorted.insert(value);
@@ -159,12 +167,20 @@ impl SlidingWindowSorted {
#[inline]
pub fn min(&self) -> f64 {
if self.sorted.is_empty() { 0.0 } else { self.sorted.first() }
if self.sorted.is_empty() {
0.0
} else {
self.sorted.first()
}
}
#[inline]
pub fn max(&self) -> f64 {
if self.sorted.is_empty() { 0.0 } else { self.sorted.last() }
if self.sorted.is_empty() {
0.0
} else {
self.sorted.last()
}
}
/// Extract a percentile (0.0-1.0) using linear interpolation.

View File

@@ -67,9 +67,11 @@ impl TDigest {
}
// Single binary search: unclamped position doubles as insert point
let search = self
.centroids
.binary_search_by(|c| c.mean.partial_cmp(&value).unwrap_or(std::cmp::Ordering::Equal));
let search = self.centroids.binary_search_by(|c| {
c.mean
.partial_cmp(&value)
.unwrap_or(std::cmp::Ordering::Equal)
});
let insert_pos = match search {
Ok(i) | Err(i) => i,
};

View File

@@ -0,0 +1,26 @@
use std::path::Path;
use brk_error::Result;
use brk_traversable::Traversable;
use vecdb::{Database, PAGE_SIZE};
pub(crate) fn open_db(
parent_path: &Path,
db_name: &str,
page_multiplier: usize,
) -> Result<Database> {
let db = Database::open(&parent_path.join(db_name))?;
db.set_min_len(PAGE_SIZE * page_multiplier)?;
Ok(db)
}
pub(crate) fn finalize_db(db: &Database, traversable: &impl Traversable) -> Result<()> {
db.retain_regions(
traversable
.iter_any_exportable()
.flat_map(|v| v.region_names())
.collect(),
)?;
db.compact()?;
Ok(())
}

View File

@@ -1,5 +1,3 @@
//! ComputedHeightDerived — sparse time periods + dense epochs (last value).
use brk_traversable::Traversable;
use brk_types::{
Day1, Day3, DifficultyEpoch, FromCoarserIndex, HalvingEpoch, Height, Hour1, Hour4, Hour12,
@@ -12,7 +10,7 @@ use vecdb::{
};
use crate::{
indexes, indexes_from,
indexes,
internal::{ComputedVecValue, NumericValue, PerPeriod},
};
@@ -41,7 +39,6 @@ pub struct ComputedHeightDerived<T>(
where
T: ComputedVecValue + PartialOrd + JsonSchema;
/// Already read-only (no StorageMode); cloning is sufficient.
impl<T> ReadOnlyClone for ComputedHeightDerived<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
@@ -116,6 +113,22 @@ where
};
}
Self(indexes_from!(period, epoch))
Self(PerPeriod {
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: epoch!(halvingepoch),
difficultyepoch: epoch!(difficultyepoch),
})
}
}

View File

@@ -1,59 +1,20 @@
//! LazyHeightDerived — unary transform of height-derived last values.
use std::marker::PhantomData;
use brk_traversable::Traversable;
use brk_types::{
Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour4, Hour12,
Minute10, Minute30, Month1, Month3, Month6, Version, Week1, Year1, Year10,
Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour4, Hour12, Minute10, Minute30,
Month1, Month3, Month6, Version, Week1, Year1, Year10,
};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{
LazyVecFrom1, ReadableBoxedVec, ReadableCloneableVec, UnaryTransform, VecIndex, VecValue,
};
use vecdb::{ReadableBoxedVec, ReadableCloneableVec, UnaryTransform, VecValue};
use crate::{
indexes, indexes_from,
indexes,
internal::{
ComputedFromHeight, ComputedHeightDerived, ComputedVecValue, NumericValue, PerPeriod,
},
};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct LazyTransformLast<I, T, S1T = T>(pub LazyVecFrom1<I, T, I, S1T>)
where
I: VecIndex,
T: VecValue + PartialOrd + JsonSchema,
S1T: VecValue;
impl<I, T, S1T> LazyTransformLast<I, T, S1T>
where
I: VecIndex,
T: VecValue + PartialOrd + JsonSchema + 'static,
S1T: VecValue + JsonSchema,
{
fn from_boxed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
source: ReadableBoxedVec<I, S1T>,
) -> Self {
Self(LazyVecFrom1::transformed::<F>(name, version, source))
}
}
struct MapOption<F>(PhantomData<F>);
impl<F, S, T> UnaryTransform<Option<S>, Option<T>> for MapOption<F>
where
F: UnaryTransform<S, T>,
{
#[inline(always)]
fn apply(value: Option<S>) -> Option<T> {
value.map(F::apply)
}
}
use super::{LazyTransformLast, MapOption};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(transparent)]
@@ -106,8 +67,7 @@ where
where
S1T: NumericValue,
{
let derived =
ComputedHeightDerived::forced_import(name, height_source, version, indexes);
let derived = ComputedHeightDerived::forced_import(name, height_source, version, indexes);
Self::from_derived_computed::<F>(name, version, &derived)
}
@@ -135,7 +95,23 @@ where
};
}
Self(indexes_from!(period, epoch))
Self(PerPeriod {
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: epoch!(halvingepoch),
difficultyepoch: epoch!(difficultyepoch),
})
}
pub(crate) fn from_lazy<F, S2T>(
@@ -163,6 +139,22 @@ where
};
}
Self(indexes_from!(period, epoch))
Self(PerPeriod {
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: epoch!(halvingepoch),
difficultyepoch: epoch!(difficultyepoch),
})
}
}

View File

@@ -1,5 +1,3 @@
//! Lazy value type for Last pattern across all height-derived indexes.
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Cents, Dollars, Sats, Version};
use vecdb::UnaryTransform;
@@ -15,7 +13,12 @@ pub struct LazyValueHeightDerived {
}
impl LazyValueHeightDerived {
pub(crate) fn from_block_source<SatsTransform, BitcoinTransform, CentsTransform, DollarsTransform>(
pub(crate) fn from_block_source<
SatsTransform,
BitcoinTransform,
CentsTransform,
DollarsTransform,
>(
name: &str,
source: &ValueFromHeight,
version: Version,
@@ -50,6 +53,11 @@ impl LazyValueHeightDerived {
&source.usd.rest,
);
Self { sats, btc, cents, usd }
Self {
sats,
btc,
cents,
usd,
}
}
}

View File

@@ -0,0 +1,15 @@
use std::marker::PhantomData;
use vecdb::UnaryTransform;
pub struct MapOption<F>(PhantomData<F>);
impl<F, S, T> UnaryTransform<Option<S>, Option<T>> for MapOption<F>
where
F: UnaryTransform<S, T>,
{
#[inline(always)]
fn apply(value: Option<S>) -> Option<T> {
value.map(F::apply)
}
}

View File

@@ -2,8 +2,12 @@ mod full;
mod last;
mod lazy_last;
mod lazy_value;
mod map_option;
mod transform_last;
pub use full::*;
pub use last::*;
pub use lazy_last::*;
pub use lazy_value::*;
pub use map_option::*;
pub use transform_last::*;

Some files were not shown because too many files have changed in this diff Show More