global: MASSIVE snapshot

This commit is contained in:
nym21
2026-01-07 01:16:37 +01:00
parent e832ffbe23
commit cb0abc324e
487 changed files with 21155 additions and 13627 deletions

View File

@@ -47,38 +47,35 @@ impl Vecs {
Ok(())
})?;
self.indexes_to_1w_block_count
.compute_all(starting_indexes, exit, |v| {
v.compute_sum(
starting_indexes.dateindex,
self.indexes_to_block_count.dateindex.unwrap_sum(),
7,
exit,
)?;
Ok(())
})?;
self.indexes_to_1w_block_count.compute_all(starting_indexes, exit, |v| {
v.compute_sum(
starting_indexes.dateindex,
self.indexes_to_block_count.dateindex.sum.inner(),
7,
exit,
)?;
Ok(())
})?;
self.indexes_to_1m_block_count
.compute_all(starting_indexes, exit, |v| {
v.compute_sum(
starting_indexes.dateindex,
self.indexes_to_block_count.dateindex.unwrap_sum(),
30,
exit,
)?;
Ok(())
})?;
self.indexes_to_1m_block_count.compute_all(starting_indexes, exit, |v| {
v.compute_sum(
starting_indexes.dateindex,
self.indexes_to_block_count.dateindex.sum.inner(),
30,
exit,
)?;
Ok(())
})?;
self.indexes_to_1y_block_count
.compute_all(starting_indexes, exit, |v| {
v.compute_sum(
starting_indexes.dateindex,
self.indexes_to_block_count.dateindex.unwrap_sum(),
365,
exit,
)?;
Ok(())
})?;
self.indexes_to_1y_block_count.compute_all(starting_indexes, exit, |v| {
v.compute_sum(
starting_indexes.dateindex,
self.indexes_to_block_count.dateindex.sum.inner(),
365,
exit,
)?;
Ok(())
})?;
Ok(())
}

View File

@@ -10,14 +10,11 @@ use crate::{
TARGET_BLOCKS_PER_YEAR,
},
indexes,
internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeight, Source, VecBuilderOptions},
internal::{ComputedBlockSumCum, ComputedDateLast},
};
impl Vecs {
pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
let last = || VecBuilderOptions::default().add_last();
let sum_cum = || VecBuilderOptions::default().add_sum().add_cumulative();
Ok(Self {
dateindex_to_block_count_target: LazyVecFrom1::init(
"block_count_target",
@@ -62,37 +59,29 @@ impl Vecs {
|_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_DECADE)),
),
height_to_24h_block_count: EagerVec::forced_import(db, "24h_block_count", version)?,
indexes_to_block_count: ComputedVecsFromHeight::forced_import(
indexes_to_block_count: ComputedBlockSumCum::forced_import(
db,
"block_count",
Source::Compute,
version,
indexes,
sum_cum(),
)?,
indexes_to_1w_block_count: ComputedVecsFromDateIndex::forced_import(
indexes_to_1w_block_count: ComputedDateLast::forced_import(
db,
"1w_block_count",
Source::Compute,
version,
indexes,
last(),
)?,
indexes_to_1m_block_count: ComputedVecsFromDateIndex::forced_import(
indexes_to_1m_block_count: ComputedDateLast::forced_import(
db,
"1m_block_count",
Source::Compute,
version,
indexes,
last(),
)?,
indexes_to_1y_block_count: ComputedVecsFromDateIndex::forced_import(
indexes_to_1y_block_count: ComputedDateLast::forced_import(
db,
"1y_block_count",
Source::Compute,
version,
indexes,
last(),
)?,
})
}

View File

@@ -1,24 +1,28 @@
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex,
StoredU32, StoredU64, WeekIndex, YearIndex,
DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, StoredU32, StoredU64,
WeekIndex, YearIndex,
};
use vecdb::LazyVecFrom1;
use crate::internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeight};
use crate::internal::{ComputedBlockSumCum, ComputedDateLast};
#[derive(Clone, Traversable)]
pub struct Vecs {
pub dateindex_to_block_count_target: LazyVecFrom1<DateIndex, StoredU64, DateIndex, DateIndex>,
pub weekindex_to_block_count_target: LazyVecFrom1<WeekIndex, StoredU64, WeekIndex, WeekIndex>,
pub monthindex_to_block_count_target: LazyVecFrom1<MonthIndex, StoredU64, MonthIndex, MonthIndex>,
pub quarterindex_to_block_count_target: LazyVecFrom1<QuarterIndex, StoredU64, QuarterIndex, QuarterIndex>,
pub semesterindex_to_block_count_target: LazyVecFrom1<SemesterIndex, StoredU64, SemesterIndex, SemesterIndex>,
pub monthindex_to_block_count_target:
LazyVecFrom1<MonthIndex, StoredU64, MonthIndex, MonthIndex>,
pub quarterindex_to_block_count_target:
LazyVecFrom1<QuarterIndex, StoredU64, QuarterIndex, QuarterIndex>,
pub semesterindex_to_block_count_target:
LazyVecFrom1<SemesterIndex, StoredU64, SemesterIndex, SemesterIndex>,
pub yearindex_to_block_count_target: LazyVecFrom1<YearIndex, StoredU64, YearIndex, YearIndex>,
pub decadeindex_to_block_count_target: LazyVecFrom1<DecadeIndex, StoredU64, DecadeIndex, DecadeIndex>,
pub decadeindex_to_block_count_target:
LazyVecFrom1<DecadeIndex, StoredU64, DecadeIndex, DecadeIndex>,
pub height_to_24h_block_count: vecdb::EagerVec<vecdb::PcoVec<brk_types::Height, StoredU32>>,
pub indexes_to_block_count: ComputedVecsFromHeight<StoredU32>,
pub indexes_to_1w_block_count: ComputedVecsFromDateIndex<StoredU32>,
pub indexes_to_1m_block_count: ComputedVecsFromDateIndex<StoredU32>,
pub indexes_to_1y_block_count: ComputedVecsFromDateIndex<StoredU32>,
pub indexes_to_block_count: ComputedBlockSumCum<StoredU32>,
pub indexes_to_1w_block_count: ComputedDateLast<StoredU32>,
pub indexes_to_1m_block_count: ComputedDateLast<StoredU32>,
pub indexes_to_1y_block_count: ComputedDateLast<StoredU32>,
}

View File

@@ -15,8 +15,7 @@ impl Vecs {
) -> Result<()> {
let mut height_to_difficultyepoch_iter =
indexes.block.height_to_difficultyepoch.into_iter();
self.indexes_to_difficultyepoch
.compute_all(starting_indexes, exit, |vec| {
self.indexes_to_difficultyepoch.compute_all(starting_indexes, exit, |vec| {
let mut height_count_iter = indexes.time.dateindex_to_height_count.into_iter();
vec.compute_transform(
starting_indexes.dateindex,
@@ -48,10 +47,7 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_transform(
starting_indexes.height,
self.indexes_to_blocks_before_next_difficulty_adjustment
.height
.as_ref()
.unwrap(),
&self.indexes_to_blocks_before_next_difficulty_adjustment.height,
|(h, blocks, ..)| (h, (*blocks as f32 / TARGET_BLOCKS_PER_DAY_F32).into()),
exit,
)?;

View File

@@ -5,41 +5,32 @@ use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeight, Source, VecBuilderOptions},
internal::{ComputedBlockLast, ComputedDateLast},
};
impl Vecs {
pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
let v2 = Version::TWO;
let last = || VecBuilderOptions::default().add_last();
Ok(Self {
indexes_to_difficultyepoch: ComputedVecsFromDateIndex::forced_import(
indexes_to_difficultyepoch: ComputedDateLast::forced_import(
db,
"difficultyepoch",
Source::Compute,
version,
indexes,
last(),
)?,
indexes_to_blocks_before_next_difficulty_adjustment:
ComputedVecsFromHeight::forced_import(
db,
"blocks_before_next_difficulty_adjustment",
Source::Compute,
version + v2,
indexes,
last(),
)?,
indexes_to_days_before_next_difficulty_adjustment:
ComputedVecsFromHeight::forced_import(
db,
"days_before_next_difficulty_adjustment",
Source::Compute,
version + v2,
indexes,
last(),
)?,
indexes_to_blocks_before_next_difficulty_adjustment: ComputedBlockLast::forced_import(
db,
"blocks_before_next_difficulty_adjustment",
version + v2,
indexes,
)?,
indexes_to_days_before_next_difficulty_adjustment: ComputedBlockLast::forced_import(
db,
"days_before_next_difficulty_adjustment",
version + v2,
indexes,
)?,
})
}
}

View File

@@ -1,12 +1,12 @@
use brk_traversable::Traversable;
use brk_types::{DifficultyEpoch, StoredF32, StoredU32};
use crate::internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeight};
use crate::internal::{ComputedBlockLast, ComputedDateLast};
/// Difficulty epoch metrics and countdown
#[derive(Clone, Traversable)]
pub struct Vecs {
pub indexes_to_difficultyepoch: ComputedVecsFromDateIndex<DifficultyEpoch>,
pub indexes_to_blocks_before_next_difficulty_adjustment: ComputedVecsFromHeight<StoredU32>,
pub indexes_to_days_before_next_difficulty_adjustment: ComputedVecsFromHeight<StoredF32>,
pub indexes_to_difficultyepoch: ComputedDateLast<DifficultyEpoch>,
pub indexes_to_blocks_before_next_difficulty_adjustment: ComputedBlockLast<StoredU32>,
pub indexes_to_days_before_next_difficulty_adjustment: ComputedBlockLast<StoredF32>,
}

View File

@@ -14,8 +14,7 @@ impl Vecs {
exit: &Exit,
) -> Result<()> {
let mut height_to_halvingepoch_iter = indexes.block.height_to_halvingepoch.into_iter();
self.indexes_to_halvingepoch
.compute_all(starting_indexes, exit, |vec| {
self.indexes_to_halvingepoch.compute_all(starting_indexes, exit, |vec| {
let mut height_count_iter = indexes.time.dateindex_to_height_count.into_iter();
vec.compute_transform(
starting_indexes.dateindex,
@@ -54,10 +53,7 @@ impl Vecs {
|v| {
v.compute_transform(
starting_indexes.height,
self.indexes_to_blocks_before_next_halving
.height
.as_ref()
.unwrap(),
&self.indexes_to_blocks_before_next_halving.height,
|(h, blocks, ..)| (h, (*blocks as f32 / TARGET_BLOCKS_PER_DAY_F32).into()),
exit,
)?;

View File

@@ -5,38 +5,31 @@ use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeight, Source, VecBuilderOptions},
internal::{ComputedBlockLast, ComputedDateLast},
};
impl Vecs {
pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
let v2 = Version::TWO;
let last = || VecBuilderOptions::default().add_last();
Ok(Self {
indexes_to_halvingepoch: ComputedVecsFromDateIndex::forced_import(
indexes_to_halvingepoch: ComputedDateLast::forced_import(
db,
"halvingepoch",
Source::Compute,
version,
indexes,
last(),
)?,
indexes_to_blocks_before_next_halving: ComputedVecsFromHeight::forced_import(
indexes_to_blocks_before_next_halving: ComputedBlockLast::forced_import(
db,
"blocks_before_next_halving",
Source::Compute,
version + v2,
indexes,
last(),
)?,
indexes_to_days_before_next_halving: ComputedVecsFromHeight::forced_import(
indexes_to_days_before_next_halving: ComputedBlockLast::forced_import(
db,
"days_before_next_halving",
Source::Compute,
version + v2,
indexes,
last(),
)?,
})
}

View File

@@ -1,12 +1,12 @@
use brk_traversable::Traversable;
use brk_types::{HalvingEpoch, StoredF32, StoredU32};
use crate::internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeight};
use crate::internal::{ComputedBlockLast, ComputedDateLast};
/// Halving epoch metrics and countdown
#[derive(Clone, Traversable)]
pub struct Vecs {
pub indexes_to_halvingepoch: ComputedVecsFromDateIndex<HalvingEpoch>,
pub indexes_to_blocks_before_next_halving: ComputedVecsFromHeight<StoredU32>,
pub indexes_to_days_before_next_halving: ComputedVecsFromHeight<StoredF32>,
pub indexes_to_halvingepoch: ComputedDateLast<HalvingEpoch>,
pub indexes_to_blocks_before_next_halving: ComputedBlockLast<StoredU32>,
pub indexes_to_days_before_next_halving: ComputedBlockLast<StoredF32>,
}

View File

@@ -2,7 +2,7 @@ use brk_error::Result;
use vecdb::Exit;
use super::Vecs;
use crate::{ComputeIndexes, indexes};
use crate::{indexes, ComputeIndexes};
impl Vecs {
pub fn compute(
@@ -11,11 +11,11 @@ impl Vecs {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_block_interval.compute_rest(
self.indexes_to_block_interval.derive_from(
indexes,
starting_indexes,
&self.height_to_interval,
exit,
Some(&self.height_to_interval),
)?;
Ok(())

View File

@@ -4,10 +4,7 @@ use brk_types::{CheckedSub, Height, Timestamp, Version};
use vecdb::{Database, IterableCloneableVec, LazyVecFrom1};
use super::Vecs;
use crate::{
indexes,
internal::{ComputedVecsFromHeight, Source, VecBuilderOptions},
};
use crate::{indexes, internal::DerivedComputedBlockDistribution};
impl Vecs {
pub fn forced_import(
@@ -16,13 +13,6 @@ impl Vecs {
indexer: &Indexer,
indexes: &indexes::Vecs,
) -> Result<Self> {
let stats = || {
VecBuilderOptions::default()
.add_average()
.add_minmax()
.add_percentiles()
};
let height_to_interval = LazyVecFrom1::init(
"interval",
version,
@@ -40,16 +30,17 @@ impl Vecs {
},
);
let indexes_to_block_interval = DerivedComputedBlockDistribution::forced_import(
db,
"block_interval",
height_to_interval.boxed_clone(),
version,
indexes,
)?;
Ok(Self {
indexes_to_block_interval: ComputedVecsFromHeight::forced_import(
db,
"block_interval",
Source::Vec(height_to_interval.boxed_clone()),
version,
indexes,
stats(),
)?,
height_to_interval,
indexes_to_block_interval,
})
}
}

View File

@@ -2,10 +2,10 @@ use brk_traversable::Traversable;
use brk_types::{Height, Timestamp};
use vecdb::LazyVecFrom1;
use crate::internal::ComputedVecsFromHeight;
use crate::internal::DerivedComputedBlockDistribution;
#[derive(Clone, Traversable)]
pub struct Vecs {
pub height_to_interval: LazyVecFrom1<Height, Timestamp, Height, Timestamp>,
pub indexes_to_block_interval: ComputedVecsFromHeight<Timestamp>,
pub indexes_to_block_interval: DerivedComputedBlockDistribution<Timestamp>,
}

View File

@@ -5,11 +5,7 @@ use vecdb::Exit;
use super::Vecs;
use super::super::{count, rewards, ONE_TERA_HASH, TARGET_BLOCKS_PER_DAY_F64};
use crate::{
indexes,
utils::OptionExt,
ComputeIndexes,
};
use crate::{indexes, ComputeIndexes};
impl Vecs {
pub fn compute(
@@ -21,11 +17,11 @@ impl Vecs {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_difficulty.compute_rest(
self.indexes_to_difficulty.derive_from(
indexes,
starting_indexes,
&indexer.vecs.block.height_to_difficulty,
exit,
Some(&indexer.vecs.block.height_to_difficulty),
)?;
self.indexes_to_difficulty_as_hash
@@ -45,7 +41,7 @@ impl Vecs {
v.compute_transform2(
starting_indexes.height,
&count_vecs.height_to_24h_block_count,
self.indexes_to_difficulty_as_hash.height.u(),
&self.indexes_to_difficulty_as_hash.height,
|(i, block_count_sum, difficulty_as_hash, ..)| {
(
i,
@@ -60,44 +56,40 @@ impl Vecs {
Ok(())
})?;
self.indexes_to_hash_rate_1w_sma
.compute_all(starting_indexes, exit, |v| {
self.indexes_to_hash_rate_1w_sma.compute_all(starting_indexes, exit, |v| {
v.compute_sma(
starting_indexes.dateindex,
self.indexes_to_hash_rate.dateindex.unwrap_last(),
self.indexes_to_hash_rate.dateindex.inner(),
7,
exit,
)?;
Ok(())
})?;
self.indexes_to_hash_rate_1m_sma
.compute_all(starting_indexes, exit, |v| {
self.indexes_to_hash_rate_1m_sma.compute_all(starting_indexes, exit, |v| {
v.compute_sma(
starting_indexes.dateindex,
self.indexes_to_hash_rate.dateindex.unwrap_last(),
self.indexes_to_hash_rate.dateindex.inner(),
30,
exit,
)?;
Ok(())
})?;
self.indexes_to_hash_rate_2m_sma
.compute_all(starting_indexes, exit, |v| {
self.indexes_to_hash_rate_2m_sma.compute_all(starting_indexes, exit, |v| {
v.compute_sma(
starting_indexes.dateindex,
self.indexes_to_hash_rate.dateindex.unwrap_last(),
self.indexes_to_hash_rate.dateindex.inner(),
2 * 30,
exit,
)?;
Ok(())
})?;
self.indexes_to_hash_rate_1y_sma
.compute_all(starting_indexes, exit, |v| {
self.indexes_to_hash_rate_1y_sma.compute_all(starting_indexes, exit, |v| {
v.compute_sma(
starting_indexes.dateindex,
self.indexes_to_hash_rate.dateindex.unwrap_last(),
self.indexes_to_hash_rate.dateindex.inner(),
365,
exit,
)?;
@@ -124,7 +116,7 @@ impl Vecs {
v.compute_transform2(
starting_indexes.height,
&rewards_vecs.height_to_24h_coinbase_usd_sum,
self.indexes_to_hash_rate.height.u(),
&self.indexes_to_hash_rate.height,
|(i, coinbase_sum, hashrate, ..)| {
let hashrate_ths = *hashrate / ONE_TERA_HASH;
let price = if hashrate_ths == 0.0 {
@@ -143,7 +135,7 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_transform(
starting_indexes.height,
self.indexes_to_hash_price_ths.height.u(),
&self.indexes_to_hash_price_ths.height,
|(i, price, ..)| (i, (*price * 1000.0).into()),
exit,
)?;
@@ -155,7 +147,7 @@ impl Vecs {
v.compute_transform2(
starting_indexes.height,
&rewards_vecs.height_to_24h_coinbase_sum,
self.indexes_to_hash_rate.height.u(),
&self.indexes_to_hash_rate.height,
|(i, coinbase_sum, hashrate, ..)| {
let hashrate_ths = *hashrate / ONE_TERA_HASH;
let value = if hashrate_ths == 0.0 {
@@ -174,7 +166,7 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_transform(
starting_indexes.height,
self.indexes_to_hash_value_ths.height.u(),
&self.indexes_to_hash_value_ths.height,
|(i, value, ..)| (i, (*value * 1000.0).into()),
exit,
)?;
@@ -185,7 +177,7 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_all_time_low_(
starting_indexes.height,
self.indexes_to_hash_price_ths.height.u(),
&self.indexes_to_hash_price_ths.height,
exit,
true,
)?;
@@ -196,7 +188,7 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_all_time_low_(
starting_indexes.height,
self.indexes_to_hash_price_phs.height.u(),
&self.indexes_to_hash_price_phs.height,
exit,
true,
)?;
@@ -207,7 +199,7 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_all_time_low_(
starting_indexes.height,
self.indexes_to_hash_value_ths.height.u(),
&self.indexes_to_hash_value_ths.height,
exit,
true,
)?;
@@ -218,7 +210,7 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_all_time_low_(
starting_indexes.height,
self.indexes_to_hash_value_phs.height.u(),
&self.indexes_to_hash_value_phs.height,
exit,
true,
)?;
@@ -229,8 +221,8 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_percentage_difference(
starting_indexes.height,
self.indexes_to_hash_price_phs.height.u(),
self.indexes_to_hash_price_phs_min.height.u(),
&self.indexes_to_hash_price_phs.height,
&self.indexes_to_hash_price_phs_min.height,
exit,
)?;
Ok(())
@@ -240,8 +232,8 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_percentage_difference(
starting_indexes.height,
self.indexes_to_hash_value_phs.height.u(),
self.indexes_to_hash_value_phs_min.height.u(),
&self.indexes_to_hash_value_phs.height,
&self.indexes_to_hash_value_phs_min.height,
exit,
)?;
Ok(())

View File

@@ -6,7 +6,7 @@ use vecdb::{Database, IterableCloneableVec};
use super::Vecs;
use crate::{
indexes,
internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeight, Source, VecBuilderOptions},
internal::{ComputedBlockLast, ComputedBlockSum, ComputedDateLast, DerivedComputedBlockLast},
};
impl Vecs {
@@ -19,153 +19,116 @@ impl Vecs {
let v4 = Version::new(4);
let v5 = Version::new(5);
let last = || VecBuilderOptions::default().add_last();
let sum = || VecBuilderOptions::default().add_sum();
Ok(Self {
indexes_to_hash_rate: ComputedVecsFromHeight::forced_import(
indexes_to_hash_rate: ComputedBlockLast::forced_import(
db,
"hash_rate",
Source::Compute,
version + v5,
indexes,
last(),
)?,
indexes_to_hash_rate_1w_sma: ComputedVecsFromDateIndex::forced_import(
indexes_to_hash_rate_1w_sma: ComputedDateLast::forced_import(
db,
"hash_rate_1w_sma",
Source::Compute,
version,
indexes,
last(),
)?,
indexes_to_hash_rate_1m_sma: ComputedVecsFromDateIndex::forced_import(
indexes_to_hash_rate_1m_sma: ComputedDateLast::forced_import(
db,
"hash_rate_1m_sma",
Source::Compute,
version,
indexes,
last(),
)?,
indexes_to_hash_rate_2m_sma: ComputedVecsFromDateIndex::forced_import(
indexes_to_hash_rate_2m_sma: ComputedDateLast::forced_import(
db,
"hash_rate_2m_sma",
Source::Compute,
version,
indexes,
last(),
)?,
indexes_to_hash_rate_1y_sma: ComputedVecsFromDateIndex::forced_import(
indexes_to_hash_rate_1y_sma: ComputedDateLast::forced_import(
db,
"hash_rate_1y_sma",
Source::Compute,
version,
indexes,
last(),
)?,
indexes_to_hash_price_ths: ComputedVecsFromHeight::forced_import(
indexes_to_hash_price_ths: ComputedBlockLast::forced_import(
db,
"hash_price_ths",
Source::Compute,
version + v4,
indexes,
last(),
)?,
indexes_to_hash_price_ths_min: ComputedVecsFromHeight::forced_import(
indexes_to_hash_price_ths_min: ComputedBlockLast::forced_import(
db,
"hash_price_ths_min",
Source::Compute,
version + v4,
indexes,
last(),
)?,
indexes_to_hash_price_phs: ComputedVecsFromHeight::forced_import(
indexes_to_hash_price_phs: ComputedBlockLast::forced_import(
db,
"hash_price_phs",
Source::Compute,
version + v4,
indexes,
last(),
)?,
indexes_to_hash_price_phs_min: ComputedVecsFromHeight::forced_import(
indexes_to_hash_price_phs_min: ComputedBlockLast::forced_import(
db,
"hash_price_phs_min",
Source::Compute,
version + v4,
indexes,
last(),
)?,
indexes_to_hash_price_rebound: ComputedVecsFromHeight::forced_import(
indexes_to_hash_price_rebound: ComputedBlockLast::forced_import(
db,
"hash_price_rebound",
Source::Compute,
version + v4,
indexes,
last(),
)?,
indexes_to_hash_value_ths: ComputedVecsFromHeight::forced_import(
indexes_to_hash_value_ths: ComputedBlockLast::forced_import(
db,
"hash_value_ths",
Source::Compute,
version + v4,
indexes,
last(),
)?,
indexes_to_hash_value_ths_min: ComputedVecsFromHeight::forced_import(
indexes_to_hash_value_ths_min: ComputedBlockLast::forced_import(
db,
"hash_value_ths_min",
Source::Compute,
version + v4,
indexes,
last(),
)?,
indexes_to_hash_value_phs: ComputedVecsFromHeight::forced_import(
indexes_to_hash_value_phs: ComputedBlockLast::forced_import(
db,
"hash_value_phs",
Source::Compute,
version + v4,
indexes,
last(),
)?,
indexes_to_hash_value_phs_min: ComputedVecsFromHeight::forced_import(
indexes_to_hash_value_phs_min: ComputedBlockLast::forced_import(
db,
"hash_value_phs_min",
Source::Compute,
version + v4,
indexes,
last(),
)?,
indexes_to_hash_value_rebound: ComputedVecsFromHeight::forced_import(
indexes_to_hash_value_rebound: ComputedBlockLast::forced_import(
db,
"hash_value_rebound",
Source::Compute,
version + v4,
indexes,
last(),
)?,
indexes_to_difficulty: ComputedVecsFromHeight::forced_import(
// Derived from external indexer data - no height storage needed
indexes_to_difficulty: DerivedComputedBlockLast::forced_import(
db,
"difficulty",
Source::Vec(indexer.vecs.block.height_to_difficulty.boxed_clone()),
indexer.vecs.block.height_to_difficulty.boxed_clone(),
version,
indexes,
last(),
)?,
indexes_to_difficulty_as_hash: ComputedVecsFromHeight::forced_import(
indexes_to_difficulty_as_hash: ComputedBlockLast::forced_import(
db,
"difficulty_as_hash",
Source::Compute,
version,
indexes,
last(),
)?,
indexes_to_difficulty_adjustment: ComputedVecsFromHeight::forced_import(
indexes_to_difficulty_adjustment: ComputedBlockSum::forced_import(
db,
"difficulty_adjustment",
Source::Compute,
version,
indexes,
sum(),
)?,
})
}

View File

@@ -1,27 +1,30 @@
use brk_traversable::Traversable;
use brk_types::{StoredF32, StoredF64};
use crate::internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeight};
use crate::internal::{
ComputedBlockLast, ComputedBlockSum, ComputedDateLast, DerivedComputedBlockLast,
};
/// Mining-related metrics: hash rate, hash price, hash value, difficulty
#[derive(Clone, Traversable)]
pub struct Vecs {
pub indexes_to_hash_rate: ComputedVecsFromHeight<StoredF64>,
pub indexes_to_hash_rate_1w_sma: ComputedVecsFromDateIndex<StoredF64>,
pub indexes_to_hash_rate_1m_sma: ComputedVecsFromDateIndex<StoredF32>,
pub indexes_to_hash_rate_2m_sma: ComputedVecsFromDateIndex<StoredF32>,
pub indexes_to_hash_rate_1y_sma: ComputedVecsFromDateIndex<StoredF32>,
pub indexes_to_hash_price_ths: ComputedVecsFromHeight<StoredF32>,
pub indexes_to_hash_price_ths_min: ComputedVecsFromHeight<StoredF32>,
pub indexes_to_hash_price_phs: ComputedVecsFromHeight<StoredF32>,
pub indexes_to_hash_price_phs_min: ComputedVecsFromHeight<StoredF32>,
pub indexes_to_hash_price_rebound: ComputedVecsFromHeight<StoredF32>,
pub indexes_to_hash_value_ths: ComputedVecsFromHeight<StoredF32>,
pub indexes_to_hash_value_ths_min: ComputedVecsFromHeight<StoredF32>,
pub indexes_to_hash_value_phs: ComputedVecsFromHeight<StoredF32>,
pub indexes_to_hash_value_phs_min: ComputedVecsFromHeight<StoredF32>,
pub indexes_to_hash_value_rebound: ComputedVecsFromHeight<StoredF32>,
pub indexes_to_difficulty: ComputedVecsFromHeight<StoredF64>,
pub indexes_to_difficulty_as_hash: ComputedVecsFromHeight<StoredF32>,
pub indexes_to_difficulty_adjustment: ComputedVecsFromHeight<StoredF32>,
pub indexes_to_hash_rate: ComputedBlockLast<StoredF64>,
pub indexes_to_hash_rate_1w_sma: ComputedDateLast<StoredF64>,
pub indexes_to_hash_rate_1m_sma: ComputedDateLast<StoredF32>,
pub indexes_to_hash_rate_2m_sma: ComputedDateLast<StoredF32>,
pub indexes_to_hash_rate_1y_sma: ComputedDateLast<StoredF32>,
pub indexes_to_hash_price_ths: ComputedBlockLast<StoredF32>,
pub indexes_to_hash_price_ths_min: ComputedBlockLast<StoredF32>,
pub indexes_to_hash_price_phs: ComputedBlockLast<StoredF32>,
pub indexes_to_hash_price_phs_min: ComputedBlockLast<StoredF32>,
pub indexes_to_hash_price_rebound: ComputedBlockLast<StoredF32>,
pub indexes_to_hash_value_ths: ComputedBlockLast<StoredF32>,
pub indexes_to_hash_value_ths_min: ComputedBlockLast<StoredF32>,
pub indexes_to_hash_value_phs: ComputedBlockLast<StoredF32>,
pub indexes_to_hash_value_phs_min: ComputedBlockLast<StoredF32>,
pub indexes_to_hash_value_rebound: ComputedBlockLast<StoredF32>,
/// Derived from indexer - no height storage needed
pub indexes_to_difficulty: DerivedComputedBlockLast<StoredF64>,
pub indexes_to_difficulty_as_hash: ComputedBlockLast<StoredF32>,
pub indexes_to_difficulty_adjustment: ComputedBlockSum<StoredF32>,
}

View File

@@ -3,14 +3,9 @@ use brk_indexer::Indexer;
use brk_types::{CheckedSub, Dollars, HalvingEpoch, Height, Sats, StoredF32, TxOutIndex};
use vecdb::{Exit, IterableVec, TypedVecIterator, VecIndex};
use super::Vecs;
use crate::{
transactions,
ComputeIndexes,
indexes, price,
utils::OptionExt,
};
use super::super::count;
use super::Vecs;
use crate::{indexes, price, transactions, ComputeIndexes};
impl Vecs {
#[allow(clippy::too_many_arguments)]
@@ -53,13 +48,7 @@ impl Vecs {
Ok(())
})?;
let mut height_to_coinbase_iter = self
.indexes_to_coinbase
.sats
.height
.as_ref()
.unwrap()
.into_iter();
let mut height_to_coinbase_iter = self.indexes_to_coinbase.sats.height.into_iter();
self.height_to_24h_coinbase_sum.compute_transform(
starting_indexes.height,
&count_vecs.height_to_24h_block_count,
@@ -75,12 +64,8 @@ impl Vecs {
)?;
drop(height_to_coinbase_iter);
if let Some(mut height_to_coinbase_iter) = self
.indexes_to_coinbase
.dollars
.as_ref()
.map(|c| c.height.u().into_iter())
{
if let Some(ref dollars) = self.indexes_to_coinbase.dollars {
let mut height_to_coinbase_iter = dollars.height.into_iter();
self.height_to_24h_coinbase_usd_sum.compute_transform(
starting_indexes.height,
&count_vecs.height_to_24h_block_count,
@@ -98,10 +83,11 @@ impl Vecs {
self.indexes_to_subsidy
.compute_all(indexes, price, starting_indexes, exit, |vec| {
// KISS: height.sum_cum.sum.0 is now a concrete field
vec.compute_transform2(
starting_indexes.height,
self.indexes_to_coinbase.sats.height.u(),
transactions_fees.indexes_to_fee.sats.height.unwrap_sum(),
&self.indexes_to_coinbase.sats.height,
&transactions_fees.indexes_to_fee.sats.height.sum_cum.sum.0,
|(height, coinbase, fees, ..)| {
(
height,
@@ -124,7 +110,7 @@ impl Vecs {
|vec| {
vec.compute_transform(
starting_indexes.height,
self.indexes_to_subsidy.sats.height.u(),
&self.indexes_to_subsidy.sats.height,
|(height, subsidy, ..)| {
let halving = HalvingEpoch::from(height);
let expected = Sats::FIFTY_BTC / 2_usize.pow(halving.to_usize() as u32);
@@ -136,10 +122,11 @@ impl Vecs {
},
)?;
// KISS: dateindex.sum_cum.sum.0 is now a concrete field
self.dateindex_to_fee_dominance.compute_transform2(
starting_indexes.dateindex,
transactions_fees.indexes_to_fee.sats.dateindex.unwrap_sum(),
self.indexes_to_coinbase.sats.dateindex.unwrap_sum(),
&transactions_fees.indexes_to_fee.sats.dateindex.sum_cum.sum.0,
&self.indexes_to_coinbase.sats.dateindex.sum_cum.sum.0,
|(i, fee, coinbase, ..)| {
let coinbase_f64 = u64::from(coinbase) as f64;
let dominance = if coinbase_f64 == 0.0 {
@@ -154,8 +141,8 @@ impl Vecs {
self.dateindex_to_subsidy_dominance.compute_transform2(
starting_indexes.dateindex,
self.indexes_to_subsidy.sats.dateindex.unwrap_sum(),
self.indexes_to_coinbase.sats.dateindex.unwrap_sum(),
&self.indexes_to_subsidy.sats.dateindex.sum_cum.sum.0,
&self.indexes_to_coinbase.sats.dateindex.sum_cum.sum.0,
|(i, subsidy, coinbase, ..)| {
let coinbase_f64 = u64::from(coinbase) as f64;
let dominance = if coinbase_f64 == 0.0 {
@@ -169,13 +156,15 @@ impl Vecs {
)?;
if let Some(sma) = self.indexes_to_subsidy_usd_1y_sma.as_mut() {
let date_to_coinbase_usd_sum = self
let date_to_coinbase_usd_sum = &self
.indexes_to_coinbase
.dollars
.as_ref()
.unwrap()
.dateindex
.unwrap_sum();
.sum_cum
.sum
.0;
sma.compute_all(starting_indexes, exit, |v| {
v.compute_sma(

View File

@@ -5,7 +5,7 @@ use vecdb::{Database, EagerVec, ImportableVec};
use super::Vecs;
use crate::{
indexes,
internal::{ComputedValueVecsFromHeight, ComputedVecsFromDateIndex, Source, VecBuilderOptions},
internal::{ComputedDateLast, ValueBlockFull, ValueBlockSumCum},
};
impl Vecs {
@@ -15,8 +15,6 @@ impl Vecs {
indexes: &indexes::Vecs,
compute_dollars: bool,
) -> Result<Self> {
let last = || VecBuilderOptions::default().add_last();
Ok(Self {
height_to_24h_coinbase_sum: EagerVec::forced_import(db, "24h_coinbase_sum", version)?,
height_to_24h_coinbase_usd_sum: EagerVec::forced_import(
@@ -24,42 +22,26 @@ impl Vecs {
"24h_coinbase_usd_sum",
version,
)?,
indexes_to_coinbase: ComputedValueVecsFromHeight::forced_import(
indexes_to_coinbase: ValueBlockFull::forced_import(
db,
"coinbase",
Source::Compute,
version,
VecBuilderOptions::default()
.add_sum()
.add_cumulative()
.add_percentiles()
.add_minmax()
.add_average(),
compute_dollars,
indexes,
compute_dollars,
)?,
indexes_to_subsidy: ComputedValueVecsFromHeight::forced_import(
indexes_to_subsidy: ValueBlockFull::forced_import(
db,
"subsidy",
Source::Compute,
version,
VecBuilderOptions::default()
.add_percentiles()
.add_sum()
.add_cumulative()
.add_minmax()
.add_average(),
compute_dollars,
indexes,
compute_dollars,
)?,
indexes_to_unclaimed_rewards: ComputedValueVecsFromHeight::forced_import(
indexes_to_unclaimed_rewards: ValueBlockSumCum::forced_import(
db,
"unclaimed_rewards",
Source::Compute,
version,
VecBuilderOptions::default().add_sum().add_cumulative(),
compute_dollars,
indexes,
compute_dollars,
)?,
dateindex_to_fee_dominance: EagerVec::forced_import(db, "fee_dominance", version)?,
dateindex_to_subsidy_dominance: EagerVec::forced_import(
@@ -69,14 +51,7 @@ impl Vecs {
)?,
indexes_to_subsidy_usd_1y_sma: compute_dollars
.then(|| {
ComputedVecsFromDateIndex::forced_import(
db,
"subsidy_usd_1y_sma",
Source::Compute,
version,
indexes,
last(),
)
ComputedDateLast::forced_import(db, "subsidy_usd_1y_sma", version, indexes)
})
.transpose()?,
})

View File

@@ -2,17 +2,17 @@ use brk_traversable::Traversable;
use brk_types::{DateIndex, Dollars, Height, Sats, StoredF32};
use vecdb::{EagerVec, PcoVec};
use crate::internal::{ComputedValueVecsFromHeight, ComputedVecsFromDateIndex};
use crate::internal::{ComputedDateLast, ValueBlockFull, ValueBlockSumCum};
/// Coinbase/subsidy/rewards metrics
#[derive(Clone, Traversable)]
pub struct Vecs {
pub height_to_24h_coinbase_sum: EagerVec<PcoVec<Height, Sats>>,
pub height_to_24h_coinbase_usd_sum: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_coinbase: ComputedValueVecsFromHeight,
pub indexes_to_subsidy: ComputedValueVecsFromHeight,
pub indexes_to_unclaimed_rewards: ComputedValueVecsFromHeight,
pub indexes_to_coinbase: ValueBlockFull,
pub indexes_to_subsidy: ValueBlockFull,
pub indexes_to_unclaimed_rewards: ValueBlockSumCum,
pub dateindex_to_fee_dominance: EagerVec<PcoVec<DateIndex, StoredF32>>,
pub dateindex_to_subsidy_dominance: EagerVec<PcoVec<DateIndex, StoredF32>>,
pub indexes_to_subsidy_usd_1y_sma: Option<ComputedVecsFromDateIndex<Dollars>>,
pub indexes_to_subsidy_usd_1y_sma: Option<ComputedDateLast<Dollars>>,
}

View File

@@ -13,18 +13,18 @@ impl Vecs {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_block_size.compute_rest(
self.indexes_to_block_size.derive_from(
indexes,
starting_indexes,
&indexer.vecs.block.height_to_total_size,
exit,
Some(&indexer.vecs.block.height_to_total_size),
)?;
self.indexes_to_block_vbytes.compute_rest(
self.indexes_to_block_vbytes.derive_from(
indexes,
starting_indexes,
&self.height_to_vbytes,
exit,
Some(&self.height_to_vbytes),
)?;
Ok(())

View File

@@ -6,7 +6,7 @@ use vecdb::{Database, IterableCloneableVec, LazyVecFrom1, VecIndex};
use super::Vecs;
use crate::{
indexes,
internal::{ComputedVecsFromHeight, Source, VecBuilderOptions},
internal::DerivedComputedBlockFull,
};
impl Vecs {
@@ -16,15 +16,6 @@ impl Vecs {
indexer: &Indexer,
indexes: &indexes::Vecs,
) -> Result<Self> {
let full_stats = || {
VecBuilderOptions::default()
.add_average()
.add_minmax()
.add_percentiles()
.add_sum()
.add_cumulative()
};
let height_to_vbytes = LazyVecFrom1::init(
"vbytes",
version,
@@ -37,21 +28,19 @@ impl Vecs {
);
Ok(Self {
indexes_to_block_size: ComputedVecsFromHeight::forced_import(
indexes_to_block_size: DerivedComputedBlockFull::forced_import(
db,
"block_size",
Source::Vec(indexer.vecs.block.height_to_total_size.boxed_clone()),
indexer.vecs.block.height_to_total_size.boxed_clone(),
version,
indexes,
full_stats(),
)?,
indexes_to_block_vbytes: ComputedVecsFromHeight::forced_import(
indexes_to_block_vbytes: DerivedComputedBlockFull::forced_import(
db,
"block_vbytes",
Source::Vec(height_to_vbytes.boxed_clone()),
height_to_vbytes.boxed_clone(),
version,
indexes,
full_stats(),
)?,
height_to_vbytes,
})

View File

@@ -2,11 +2,11 @@ use brk_traversable::Traversable;
use brk_types::{Height, StoredU64, Weight};
use vecdb::LazyVecFrom1;
use crate::internal::ComputedVecsFromHeight;
use crate::internal::DerivedComputedBlockFull;
#[derive(Clone, Traversable)]
pub struct Vecs {
pub height_to_vbytes: LazyVecFrom1<Height, StoredU64, Height, Weight>,
pub indexes_to_block_size: ComputedVecsFromHeight<StoredU64>,
pub indexes_to_block_vbytes: ComputedVecsFromHeight<StoredU64>,
pub indexes_to_block_size: DerivedComputedBlockFull<StoredU64>,
pub indexes_to_block_vbytes: DerivedComputedBlockFull<StoredU64>,
}

View File

@@ -6,10 +6,7 @@ use vecdb::{
};
use super::Vecs;
use crate::{
indexes,
internal::{ComputedVecsFromDateIndex, Source, VecBuilderOptions},
};
use crate::{indexes, internal::ComputedVecsDateFirst};
impl Vecs {
pub fn forced_import(
@@ -47,13 +44,11 @@ impl Vecs {
.and_then(|h: Height| timestamp_iter.get(h))
},
),
timeindexes_to_timestamp: ComputedVecsFromDateIndex::forced_import(
timeindexes_to_timestamp: ComputedVecsDateFirst::forced_import(
db,
"timestamp",
Source::Compute,
version,
indexes,
VecBuilderOptions::default().add_first(),
)?,
})
}

View File

@@ -2,7 +2,7 @@ use brk_traversable::Traversable;
use brk_types::{Date, DifficultyEpoch, Height, Timestamp};
use vecdb::{EagerVec, LazyVecFrom1, LazyVecFrom2, PcoVec};
use crate::internal::ComputedVecsFromDateIndex;
use crate::internal::ComputedVecsDateFirst;
/// Timestamp and date metrics for blocks
#[derive(Clone, Traversable)]
@@ -12,5 +12,5 @@ pub struct Vecs {
pub height_to_timestamp_fixed: EagerVec<PcoVec<Height, Timestamp>>,
pub difficultyepoch_to_timestamp:
LazyVecFrom2<DifficultyEpoch, Timestamp, DifficultyEpoch, Height, Height, Timestamp>,
pub timeindexes_to_timestamp: ComputedVecsFromDateIndex<Timestamp>,
pub timeindexes_to_timestamp: ComputedVecsDateFirst<Timestamp>,
}

View File

@@ -13,11 +13,11 @@ impl Vecs {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_block_weight.compute_rest(
self.indexes_to_block_weight.derive_from(
indexes,
starting_indexes,
&indexer.vecs.block.height_to_weight,
exit,
Some(&indexer.vecs.block.height_to_weight),
)?;
Ok(())

View File

@@ -7,7 +7,7 @@ use super::Vecs;
use crate::{
indexes,
internal::{
ComputedVecsFromHeight, LazyVecsFromHeight, Source, VecBuilderOptions, WeightToFullness,
DerivedComputedBlockFull, LazyBlockFull, WeightToFullness,
},
};
@@ -18,30 +18,21 @@ impl Vecs {
indexer: &Indexer,
indexes: &indexes::Vecs,
) -> Result<Self> {
let full_stats = || {
VecBuilderOptions::default()
.add_average()
.add_minmax()
.add_percentiles()
.add_sum()
.add_cumulative()
};
let indexes_to_block_weight = ComputedVecsFromHeight::forced_import(
let indexes_to_block_weight = DerivedComputedBlockFull::forced_import(
db,
"block_weight",
Source::Vec(indexer.vecs.block.height_to_weight.boxed_clone()),
indexer.vecs.block.height_to_weight.boxed_clone(),
version,
indexes,
full_stats(),
)?;
let indexes_to_block_fullness = LazyVecsFromHeight::from_computed::<WeightToFullness>(
"block_fullness",
version,
indexer.vecs.block.height_to_weight.boxed_clone(),
&indexes_to_block_weight,
);
let indexes_to_block_fullness =
LazyBlockFull::from_derived::<WeightToFullness>(
"block_fullness",
version,
indexer.vecs.block.height_to_weight.boxed_clone(),
&indexes_to_block_weight,
);
Ok(Self {
indexes_to_block_weight,

View File

@@ -1,11 +1,11 @@
use brk_traversable::Traversable;
use brk_types::{StoredF32, Weight};
use crate::internal::{ComputedVecsFromHeight, LazyVecsFromHeight};
use crate::internal::{DerivedComputedBlockFull, LazyBlockFull};
#[derive(Clone, Traversable)]
pub struct Vecs {
pub indexes_to_block_weight: ComputedVecsFromHeight<Weight>,
pub indexes_to_block_weight: DerivedComputedBlockFull<Weight>,
/// Block fullness as percentage of max block weight (0-100%)
pub indexes_to_block_fullness: LazyVecsFromHeight<StoredF32, Weight>,
pub indexes_to_block_fullness: LazyBlockFull<StoredF32, Weight>,
}

View File

@@ -3,7 +3,7 @@ use brk_types::{Bitcoin, CheckedSub, StoredF64};
use vecdb::{Exit, TypedVecIterator};
use super::Vecs;
use crate::{distribution, indexes, utils::OptionExt, ComputeIndexes};
use crate::{distribution, indexes, ComputeIndexes};
impl Vecs {
pub fn compute(
@@ -37,12 +37,10 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |vec| {
let mut coinblocks_destroyed_iter = indexes_to_coinblocks_destroyed
.height
.as_ref()
.unwrap()
.into_iter();
vec.compute_transform(
starting_indexes.height,
self.indexes_to_coinblocks_created.height.u(),
&self.indexes_to_coinblocks_created.height,
|(i, created, ..)| {
let destroyed = coinblocks_destroyed_iter.get_unwrap(i);
(i, created.checked_sub(destroyed).unwrap())
@@ -56,12 +54,8 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_divide(
starting_indexes.height,
indexes_to_coinblocks_destroyed
.height_extra
.unwrap_cumulative(),
self.indexes_to_coinblocks_created
.height_extra
.unwrap_cumulative(),
indexes_to_coinblocks_destroyed.height_cumulative.inner(),
self.indexes_to_coinblocks_created.height_cumulative.inner(),
exit,
)?;
Ok(())
@@ -71,7 +65,7 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_transform(
starting_indexes.height,
self.indexes_to_liveliness.height.u(),
&self.indexes_to_liveliness.height,
|(i, v, ..)| (i, StoredF64::from(1.0).checked_sub(v).unwrap()),
exit,
)?;
@@ -85,8 +79,8 @@ impl Vecs {
|vec| {
vec.compute_divide(
starting_indexes.height,
self.indexes_to_liveliness.height.u(),
self.indexes_to_vaultedness.height.u(),
&self.indexes_to_liveliness.height,
&self.indexes_to_vaultedness.height,
exit,
)?;
Ok(())

View File

@@ -5,36 +5,42 @@ use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedVecsFromHeight, Source, VecBuilderOptions},
internal::{ComputedBlockLast, ComputedBlockSumCum},
};
impl Vecs {
pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
let last = || VecBuilderOptions::default().add_last();
let sum_cum = || VecBuilderOptions::default().add_sum().add_cumulative();
macro_rules! computed_h {
($name:expr, $opts:expr) => {
ComputedVecsFromHeight::forced_import(
db,
$name,
Source::Compute,
version,
indexes,
$opts,
)?
};
}
Ok(Self {
indexes_to_coinblocks_created: computed_h!("coinblocks_created", sum_cum()),
indexes_to_coinblocks_stored: computed_h!("coinblocks_stored", sum_cum()),
indexes_to_liveliness: computed_h!("liveliness", last()),
indexes_to_vaultedness: computed_h!("vaultedness", last()),
indexes_to_activity_to_vaultedness_ratio: computed_h!(
indexes_to_coinblocks_created: ComputedBlockSumCum::forced_import(
db,
"coinblocks_created",
version,
indexes,
)?,
indexes_to_coinblocks_stored: ComputedBlockSumCum::forced_import(
db,
"coinblocks_stored",
version,
indexes,
)?,
indexes_to_liveliness: ComputedBlockLast::forced_import(
db,
"liveliness",
version,
indexes,
)?,
indexes_to_vaultedness: ComputedBlockLast::forced_import(
db,
"vaultedness",
version,
indexes,
)?,
indexes_to_activity_to_vaultedness_ratio: ComputedBlockLast::forced_import(
db,
"activity_to_vaultedness_ratio",
last()
),
version,
indexes,
)?,
})
}
}

View File

@@ -1,13 +1,13 @@
use brk_traversable::Traversable;
use brk_types::StoredF64;
use crate::internal::ComputedVecsFromHeight;
use crate::internal::{ComputedBlockLast, ComputedBlockSumCum};
#[derive(Clone, Traversable)]
pub struct Vecs {
pub indexes_to_coinblocks_created: ComputedVecsFromHeight<StoredF64>,
pub indexes_to_coinblocks_stored: ComputedVecsFromHeight<StoredF64>,
pub indexes_to_liveliness: ComputedVecsFromHeight<StoredF64>,
pub indexes_to_vaultedness: ComputedVecsFromHeight<StoredF64>,
pub indexes_to_activity_to_vaultedness_ratio: ComputedVecsFromHeight<StoredF64>,
pub indexes_to_coinblocks_created: ComputedBlockSumCum<StoredF64>,
pub indexes_to_coinblocks_stored: ComputedBlockSumCum<StoredF64>,
pub indexes_to_liveliness: ComputedBlockLast<StoredF64>,
pub indexes_to_vaultedness: ComputedBlockLast<StoredF64>,
pub indexes_to_activity_to_vaultedness_ratio: ComputedBlockLast<StoredF64>,
}

View File

@@ -3,7 +3,7 @@ use vecdb::Exit;
use super::Vecs;
use super::super::activity;
use crate::{supply, ComputeIndexes, utils::OptionExt};
use crate::{supply, ComputeIndexes};
impl Vecs {
pub fn compute(
@@ -18,11 +18,8 @@ impl Vecs {
.compute_all(starting_indexes, exit, |v| {
v.compute_multiply(
starting_indexes.dateindex,
activity
.indexes_to_activity_to_vaultedness_ratio
.dateindex
.unwrap_last(),
supply.inflation.indexes.dateindex.u(),
activity.indexes_to_activity_to_vaultedness_ratio.dateindex.inner(),
&supply.inflation.indexes.dateindex,
exit,
)?;
Ok(())
@@ -32,33 +29,23 @@ impl Vecs {
.compute_all(starting_indexes, exit, |v| {
v.compute_multiply(
starting_indexes.dateindex,
activity
.indexes_to_activity_to_vaultedness_ratio
.dateindex
.unwrap_last(),
supply.velocity.indexes_to_btc.dateindex.u(),
activity.indexes_to_activity_to_vaultedness_ratio.dateindex.inner(),
&supply.velocity.indexes_to_btc.dateindex,
exit,
)?;
Ok(())
})?;
if has_price {
self.indexes_to_cointime_adj_tx_usd_velocity.compute_all(
starting_indexes,
exit,
|v| {
v.compute_multiply(
starting_indexes.dateindex,
activity
.indexes_to_activity_to_vaultedness_ratio
.dateindex
.unwrap_last(),
supply.velocity.indexes_to_usd.u().dateindex.u(),
exit,
)?;
Ok(())
},
)?;
self.indexes_to_cointime_adj_tx_usd_velocity.compute_all(starting_indexes, exit, |v| {
v.compute_multiply(
starting_indexes.dateindex,
activity.indexes_to_activity_to_vaultedness_ratio.dateindex.inner(),
&supply.velocity.indexes_to_usd.as_ref().unwrap().dateindex,
exit,
)?;
Ok(())
})?;
}
Ok(())

View File

@@ -3,32 +3,29 @@ use brk_types::Version;
use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedVecsFromDateIndex, Source, VecBuilderOptions},
};
use crate::{indexes, internal::ComputedDateLast};
impl Vecs {
pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
let last = || VecBuilderOptions::default().add_last();
macro_rules! computed_di {
($name:expr) => {
ComputedVecsFromDateIndex::forced_import(
db,
$name,
Source::Compute,
version,
indexes,
last(),
)?
};
}
Ok(Self {
indexes_to_cointime_adj_inflation_rate: computed_di!("cointime_adj_inflation_rate"),
indexes_to_cointime_adj_tx_btc_velocity: computed_di!("cointime_adj_tx_btc_velocity"),
indexes_to_cointime_adj_tx_usd_velocity: computed_di!("cointime_adj_tx_usd_velocity"),
indexes_to_cointime_adj_inflation_rate: ComputedDateLast::forced_import(
db,
"cointime_adj_inflation_rate",
version,
indexes,
)?,
indexes_to_cointime_adj_tx_btc_velocity: ComputedDateLast::forced_import(
db,
"cointime_adj_tx_btc_velocity",
version,
indexes,
)?,
indexes_to_cointime_adj_tx_usd_velocity: ComputedDateLast::forced_import(
db,
"cointime_adj_tx_usd_velocity",
version,
indexes,
)?,
})
}
}

View File

@@ -1,11 +1,11 @@
use brk_traversable::Traversable;
use brk_types::{StoredF32, StoredF64};
use crate::internal::ComputedVecsFromDateIndex;
use crate::internal::ComputedDateLast;
#[derive(Clone, Traversable)]
pub struct Vecs {
pub indexes_to_cointime_adj_inflation_rate: ComputedVecsFromDateIndex<StoredF32>,
pub indexes_to_cointime_adj_tx_btc_velocity: ComputedVecsFromDateIndex<StoredF64>,
pub indexes_to_cointime_adj_tx_usd_velocity: ComputedVecsFromDateIndex<StoredF64>,
pub indexes_to_cointime_adj_inflation_rate: ComputedDateLast<StoredF32>,
pub indexes_to_cointime_adj_tx_btc_velocity: ComputedDateLast<StoredF64>,
pub indexes_to_cointime_adj_tx_usd_velocity: ComputedDateLast<StoredF64>,
}

View File

@@ -4,7 +4,7 @@ use vecdb::Exit;
use super::super::{activity, value};
use super::Vecs;
use crate::{ComputeIndexes, blocks, distribution, indexes, utils::OptionExt};
use crate::{blocks, distribution, indexes, utils::OptionExt, ComputeIndexes};
impl Vecs {
#[allow(clippy::too_many_arguments)]
@@ -36,16 +36,17 @@ impl Vecs {
self.indexes_to_thermo_cap
.compute_all(indexes, starting_indexes, exit, |vec| {
// KISS: height_cumulative is now a concrete field (not Option)
vec.compute_transform(
starting_indexes.height,
blocks
&blocks
.rewards
.indexes_to_subsidy
.dollars
.as_ref()
.unwrap()
.height_extra
.unwrap_cumulative(),
.height_cumulative
.0,
|(i, v, ..)| (i, v),
exit,
)?;
@@ -57,7 +58,7 @@ impl Vecs {
vec.compute_subtract(
starting_indexes.height,
realized_cap,
self.indexes_to_thermo_cap.height.u(),
&self.indexes_to_thermo_cap.height,
exit,
)?;
Ok(())
@@ -68,7 +69,7 @@ impl Vecs {
vec.compute_divide(
starting_indexes.height,
realized_cap,
activity.indexes_to_vaultedness.height.u(),
&activity.indexes_to_vaultedness.height,
exit,
)?;
Ok(())
@@ -79,7 +80,7 @@ impl Vecs {
vec.compute_multiply(
starting_indexes.height,
realized_cap,
activity.indexes_to_liveliness.height.u(),
&activity.indexes_to_liveliness.height,
exit,
)?;
Ok(())
@@ -90,15 +91,9 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_transform3(
starting_indexes.height,
value
.indexes_to_cointime_value_destroyed
.height_extra
.unwrap_cumulative(),
value.indexes_to_cointime_value_destroyed.height_cumulative.inner(),
circulating_supply,
activity
.indexes_to_coinblocks_stored
.height_extra
.unwrap_cumulative(),
activity.indexes_to_coinblocks_stored.height_cumulative.inner(),
|(i, destroyed, supply, stored, ..)| {
let destroyed: f64 = *destroyed;
let supply: f64 = supply.into();

View File

@@ -3,34 +3,41 @@ use brk_types::Version;
use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedVecsFromHeight, Source, VecBuilderOptions},
};
use crate::{indexes, internal::ComputedBlockLast};
impl Vecs {
pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
let last = || VecBuilderOptions::default().add_last();
macro_rules! computed_h {
($name:expr) => {
ComputedVecsFromHeight::forced_import(
db,
$name,
Source::Compute,
version,
indexes,
last(),
)?
};
}
Ok(Self {
indexes_to_thermo_cap: computed_h!("thermo_cap"),
indexes_to_investor_cap: computed_h!("investor_cap"),
indexes_to_vaulted_cap: computed_h!("vaulted_cap"),
indexes_to_active_cap: computed_h!("active_cap"),
indexes_to_cointime_cap: computed_h!("cointime_cap"),
indexes_to_thermo_cap: ComputedBlockLast::forced_import(
db,
"thermo_cap",
version,
indexes,
)?,
indexes_to_investor_cap: ComputedBlockLast::forced_import(
db,
"investor_cap",
version,
indexes,
)?,
indexes_to_vaulted_cap: ComputedBlockLast::forced_import(
db,
"vaulted_cap",
version,
indexes,
)?,
indexes_to_active_cap: ComputedBlockLast::forced_import(
db,
"active_cap",
version,
indexes,
)?,
indexes_to_cointime_cap: ComputedBlockLast::forced_import(
db,
"cointime_cap",
version,
indexes,
)?,
})
}
}

View File

@@ -1,13 +1,13 @@
use brk_traversable::Traversable;
use brk_types::Dollars;
use crate::internal::ComputedVecsFromHeight;
use crate::internal::ComputedBlockLast;
#[derive(Clone, Traversable)]
pub struct Vecs {
pub indexes_to_thermo_cap: ComputedVecsFromHeight<Dollars>,
pub indexes_to_investor_cap: ComputedVecsFromHeight<Dollars>,
pub indexes_to_vaulted_cap: ComputedVecsFromHeight<Dollars>,
pub indexes_to_active_cap: ComputedVecsFromHeight<Dollars>,
pub indexes_to_cointime_cap: ComputedVecsFromHeight<Dollars>,
pub indexes_to_thermo_cap: ComputedBlockLast<Dollars>,
pub indexes_to_investor_cap: ComputedBlockLast<Dollars>,
pub indexes_to_vaulted_cap: ComputedBlockLast<Dollars>,
pub indexes_to_active_cap: ComputedBlockLast<Dollars>,
pub indexes_to_cointime_cap: ComputedBlockLast<Dollars>,
}

View File

@@ -19,22 +19,21 @@ impl Vecs {
exit: &Exit,
) -> Result<()> {
let circulating_supply = &distribution.utxo_cohorts.all.metrics.supply.height_to_supply_value.bitcoin;
let realized_price = distribution
let realized_price = &distribution
.utxo_cohorts
.all
.metrics
.realized
.u()
.indexes_to_realized_price
.height
.u();
.height;
self.indexes_to_vaulted_price
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_divide(
starting_indexes.height,
realized_price,
activity.indexes_to_vaultedness.height.u(),
&activity.indexes_to_vaultedness.height,
exit,
)?;
Ok(())
@@ -44,7 +43,7 @@ impl Vecs {
price,
starting_indexes,
exit,
Some(self.indexes_to_vaulted_price.dateindex.unwrap_last()),
Some(&self.indexes_to_vaulted_price.dateindex.0),
)?;
self.indexes_to_active_price
@@ -52,7 +51,7 @@ impl Vecs {
vec.compute_multiply(
starting_indexes.height,
realized_price,
activity.indexes_to_liveliness.height.u(),
&activity.indexes_to_liveliness.height,
exit,
)?;
Ok(())
@@ -62,7 +61,7 @@ impl Vecs {
price,
starting_indexes,
exit,
Some(self.indexes_to_active_price.dateindex.unwrap_last()),
Some(&self.indexes_to_active_price.dateindex.0),
)?;
self.indexes_to_true_market_mean.compute_all(
@@ -72,7 +71,7 @@ impl Vecs {
|vec| {
vec.compute_divide(
starting_indexes.height,
cap.indexes_to_investor_cap.height.u(),
&cap.indexes_to_investor_cap.height,
&supply.indexes_to_active_supply.bitcoin.height,
exit,
)?;
@@ -84,7 +83,7 @@ impl Vecs {
price,
starting_indexes,
exit,
Some(self.indexes_to_true_market_mean.dateindex.unwrap_last()),
Some(&self.indexes_to_true_market_mean.dateindex.0),
)?;
// cointime_price = cointime_cap / circulating_supply
@@ -92,7 +91,7 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_divide(
starting_indexes.height,
cap.indexes_to_cointime_cap.height.u(),
&cap.indexes_to_cointime_cap.height,
circulating_supply,
exit,
)?;
@@ -103,7 +102,7 @@ impl Vecs {
price,
starting_indexes,
exit,
Some(self.indexes_to_cointime_price.dateindex.unwrap_last()),
Some(&self.indexes_to_cointime_price.dateindex.0),
)?;
Ok(())

View File

@@ -5,7 +5,7 @@ use vecdb::Database;
use super::Vecs;
use crate::{
indexes, price,
internal::{ComputedRatioVecsFromDateIndex, ComputedVecsFromHeight, Source, VecBuilderOptions},
internal::{ComputedRatioVecsDate, ComputedBlockLast},
};
impl Vecs {
@@ -15,17 +15,13 @@ impl Vecs {
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
) -> Result<Self> {
let last = || VecBuilderOptions::default().add_last();
macro_rules! computed_h {
($name:expr) => {
ComputedVecsFromHeight::forced_import(
ComputedBlockLast::forced_import(
db,
$name,
Source::Compute,
version,
indexes,
last(),
)?
};
}
@@ -38,7 +34,7 @@ impl Vecs {
macro_rules! ratio_di {
($name:expr, $source:expr) => {
ComputedRatioVecsFromDateIndex::forced_import(
ComputedRatioVecsDate::forced_import(
db,
$name,
Some($source),

View File

@@ -1,16 +1,16 @@
use brk_traversable::Traversable;
use brk_types::Dollars;
use crate::internal::{ComputedRatioVecsFromDateIndex, ComputedVecsFromHeight};
use crate::internal::{ComputedRatioVecsDate, ComputedBlockLast};
#[derive(Clone, Traversable)]
pub struct Vecs {
pub indexes_to_vaulted_price: ComputedVecsFromHeight<Dollars>,
pub indexes_to_vaulted_price_ratio: ComputedRatioVecsFromDateIndex,
pub indexes_to_active_price: ComputedVecsFromHeight<Dollars>,
pub indexes_to_active_price_ratio: ComputedRatioVecsFromDateIndex,
pub indexes_to_true_market_mean: ComputedVecsFromHeight<Dollars>,
pub indexes_to_true_market_mean_ratio: ComputedRatioVecsFromDateIndex,
pub indexes_to_cointime_price: ComputedVecsFromHeight<Dollars>,
pub indexes_to_cointime_price_ratio: ComputedRatioVecsFromDateIndex,
pub indexes_to_vaulted_price: ComputedBlockLast<Dollars>,
pub indexes_to_vaulted_price_ratio: ComputedRatioVecsDate,
pub indexes_to_active_price: ComputedBlockLast<Dollars>,
pub indexes_to_active_price_ratio: ComputedRatioVecsDate,
pub indexes_to_true_market_mean: ComputedBlockLast<Dollars>,
pub indexes_to_true_market_mean_ratio: ComputedRatioVecsDate,
pub indexes_to_cointime_price: ComputedBlockLast<Dollars>,
pub indexes_to_cointime_price_ratio: ComputedRatioVecsDate,
}

View File

@@ -3,7 +3,7 @@ use vecdb::Exit;
use super::Vecs;
use super::super::activity;
use crate::{distribution, indexes, price, ComputeIndexes, utils::OptionExt};
use crate::{distribution, indexes, price, ComputeIndexes};
impl Vecs {
pub fn compute(
@@ -26,7 +26,7 @@ impl Vecs {
vec.compute_multiply(
starting_indexes.height,
circulating_supply,
activity.indexes_to_vaultedness.height.u(),
&activity.indexes_to_vaultedness.height,
exit,
)?;
Ok(())
@@ -42,7 +42,7 @@ impl Vecs {
vec.compute_multiply(
starting_indexes.height,
circulating_supply,
activity.indexes_to_liveliness.height.u(),
&activity.indexes_to_liveliness.height,
exit,
)?;
Ok(())

View File

@@ -5,7 +5,7 @@ use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedValueVecsFromHeight, Source, VecBuilderOptions},
internal::ValueBlockLast,
};
impl Vecs {
@@ -15,25 +15,21 @@ impl Vecs {
indexes: &indexes::Vecs,
compute_dollars: bool,
) -> Result<Self> {
let last = || VecBuilderOptions::default().add_last();
macro_rules! value_h {
($name:expr) => {
ComputedValueVecsFromHeight::forced_import(
db,
$name,
Source::Compute,
version,
last(),
compute_dollars,
indexes,
)?
};
}
Ok(Self {
indexes_to_vaulted_supply: value_h!("vaulted_supply"),
indexes_to_active_supply: value_h!("active_supply"),
indexes_to_vaulted_supply: ValueBlockLast::forced_import(
db,
"vaulted_supply",
version,
indexes,
compute_dollars,
)?,
indexes_to_active_supply: ValueBlockLast::forced_import(
db,
"active_supply",
version,
indexes,
compute_dollars,
)?,
})
}
}

View File

@@ -1,9 +1,9 @@
use brk_traversable::Traversable;
use crate::internal::ComputedValueVecsFromHeight;
use crate::internal::ValueBlockLast;
#[derive(Clone, Traversable)]
pub struct Vecs {
pub indexes_to_vaulted_supply: ComputedValueVecsFromHeight,
pub indexes_to_active_supply: ComputedValueVecsFromHeight,
pub indexes_to_vaulted_supply: ValueBlockLast,
pub indexes_to_active_supply: ValueBlockLast,
}

View File

@@ -3,7 +3,7 @@ use vecdb::Exit;
use super::super::activity;
use super::Vecs;
use crate::{distribution, indexes, price, utils::OptionExt, ComputeIndexes};
use crate::{distribution, indexes, price, ComputeIndexes};
impl Vecs {
pub fn compute(
@@ -30,7 +30,7 @@ impl Vecs {
vec.compute_multiply(
starting_indexes.height,
&price.usd.chainindexes_to_price_close.height,
indexes_to_coinblocks_destroyed.height.u(),
&indexes_to_coinblocks_destroyed.height,
exit,
)?;
Ok(())
@@ -45,7 +45,7 @@ impl Vecs {
vec.compute_multiply(
starting_indexes.height,
&price.usd.chainindexes_to_price_close.height,
activity.indexes_to_coinblocks_created.height.u(),
&activity.indexes_to_coinblocks_created.height,
exit,
)?;
Ok(())
@@ -60,7 +60,7 @@ impl Vecs {
vec.compute_multiply(
starting_indexes.height,
&price.usd.chainindexes_to_price_close.height,
activity.indexes_to_coinblocks_stored.height.u(),
&activity.indexes_to_coinblocks_stored.height,
exit,
)?;
Ok(())

View File

@@ -3,32 +3,29 @@ use brk_types::Version;
use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedVecsFromHeight, Source, VecBuilderOptions},
};
use crate::{indexes, internal::ComputedBlockSumCum};
impl Vecs {
pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
let sum_cum = || VecBuilderOptions::default().add_sum().add_cumulative();
macro_rules! computed_h {
($name:expr) => {
ComputedVecsFromHeight::forced_import(
db,
$name,
Source::Compute,
version,
indexes,
sum_cum(),
)?
};
}
Ok(Self {
indexes_to_cointime_value_destroyed: computed_h!("cointime_value_destroyed"),
indexes_to_cointime_value_created: computed_h!("cointime_value_created"),
indexes_to_cointime_value_stored: computed_h!("cointime_value_stored"),
indexes_to_cointime_value_destroyed: ComputedBlockSumCum::forced_import(
db,
"cointime_value_destroyed",
version,
indexes,
)?,
indexes_to_cointime_value_created: ComputedBlockSumCum::forced_import(
db,
"cointime_value_created",
version,
indexes,
)?,
indexes_to_cointime_value_stored: ComputedBlockSumCum::forced_import(
db,
"cointime_value_stored",
version,
indexes,
)?,
})
}
}

View File

@@ -1,11 +1,11 @@
use brk_traversable::Traversable;
use brk_types::StoredF64;
use crate::internal::ComputedVecsFromHeight;
use crate::internal::ComputedBlockSumCum;
#[derive(Clone, Traversable)]
pub struct Vecs {
pub indexes_to_cointime_value_destroyed: ComputedVecsFromHeight<StoredF64>,
pub indexes_to_cointime_value_created: ComputedVecsFromHeight<StoredF64>,
pub indexes_to_cointime_value_stored: ComputedVecsFromHeight<StoredF64>,
pub indexes_to_cointime_value_destroyed: ComputedBlockSumCum<StoredF64>,
pub indexes_to_cointime_value_created: ComputedBlockSumCum<StoredF64>,
pub indexes_to_cointime_value_stored: ComputedBlockSumCum<StoredF64>,
}

View File

@@ -2,17 +2,14 @@ use brk_cohort::ByAddressType;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, StoredU64, Version};
use derive_deref::{Deref, DerefMut};
use derive_more::{Deref, DerefMut};
use rayon::prelude::*;
use vecdb::{
AnyStoredVec, AnyVec, Database, EagerVec, Exit, GenericStoredVec, ImportableVec,
IterableCloneableVec, PcoVec, TypedVecIterator,
};
use crate::{
ComputeIndexes, indexes,
internal::{ComputedVecsFromHeight, Source, VecBuilderOptions},
};
use crate::{ComputeIndexes, indexes, internal::DerivedComputedBlockLast};
/// Address count per address type (runtime state).
#[derive(Debug, Default, Deref, DerefMut)]
@@ -142,11 +139,13 @@ impl AddressTypeToHeightToAddressCount {
/// Address count per address type, indexed by various indexes (dateindex, etc.).
#[derive(Clone, Deref, DerefMut, Traversable)]
pub struct AddressTypeToIndexesToAddressCount(ByAddressType<ComputedVecsFromHeight<StoredU64>>);
pub struct AddressTypeToIndexesToAddressCount(ByAddressType<DerivedComputedBlockLast<StoredU64>>);
impl From<ByAddressType<ComputedVecsFromHeight<StoredU64>>> for AddressTypeToIndexesToAddressCount {
impl From<ByAddressType<DerivedComputedBlockLast<StoredU64>>>
for AddressTypeToIndexesToAddressCount
{
#[inline]
fn from(value: ByAddressType<ComputedVecsFromHeight<StoredU64>>) -> Self {
fn from(value: ByAddressType<DerivedComputedBlockLast<StoredU64>>) -> Self {
Self(value)
}
}
@@ -160,17 +159,16 @@ impl AddressTypeToIndexesToAddressCount {
sources: &AddressTypeToHeightToAddressCount,
) -> Result<Self> {
Ok(Self::from(ByAddressType::<
ComputedVecsFromHeight<StoredU64>,
DerivedComputedBlockLast<StoredU64>,
>::try_zip_with_name(
sources,
|type_name, source| {
ComputedVecsFromHeight::forced_import(
DerivedComputedBlockLast::forced_import(
db,
&format!("{type_name}_{name}"),
Source::Vec(source.boxed_clone()),
source.boxed_clone(),
version,
indexes,
VecBuilderOptions::default().add_last(),
)
},
)?))
@@ -183,53 +181,53 @@ impl AddressTypeToIndexesToAddressCount {
exit: &Exit,
addresstype_to_height_to_addresscount: &AddressTypeToHeightToAddressCount,
) -> Result<()> {
self.p2pk65.compute_rest(
self.p2pk65.derive_from(
indexes,
starting_indexes,
&addresstype_to_height_to_addresscount.p2pk65,
exit,
Some(&addresstype_to_height_to_addresscount.p2pk65),
)?;
self.p2pk33.compute_rest(
self.p2pk33.derive_from(
indexes,
starting_indexes,
&addresstype_to_height_to_addresscount.p2pk33,
exit,
Some(&addresstype_to_height_to_addresscount.p2pk33),
)?;
self.p2pkh.compute_rest(
self.p2pkh.derive_from(
indexes,
starting_indexes,
&addresstype_to_height_to_addresscount.p2pkh,
exit,
Some(&addresstype_to_height_to_addresscount.p2pkh),
)?;
self.p2sh.compute_rest(
self.p2sh.derive_from(
indexes,
starting_indexes,
&addresstype_to_height_to_addresscount.p2sh,
exit,
Some(&addresstype_to_height_to_addresscount.p2sh),
)?;
self.p2wpkh.compute_rest(
self.p2wpkh.derive_from(
indexes,
starting_indexes,
&addresstype_to_height_to_addresscount.p2wpkh,
exit,
Some(&addresstype_to_height_to_addresscount.p2wpkh),
)?;
self.p2wsh.compute_rest(
self.p2wsh.derive_from(
indexes,
starting_indexes,
&addresstype_to_height_to_addresscount.p2wsh,
exit,
Some(&addresstype_to_height_to_addresscount.p2wsh),
)?;
self.p2tr.compute_rest(
self.p2tr.derive_from(
indexes,
starting_indexes,
&addresstype_to_height_to_addresscount.p2tr,
exit,
Some(&addresstype_to_height_to_addresscount.p2tr),
)?;
self.p2a.compute_rest(
self.p2a.derive_from(
indexes,
starting_indexes,
&addresstype_to_height_to_addresscount.p2a,
exit,
Some(&addresstype_to_height_to_addresscount.p2a),
)?;
Ok(())
}

View File

@@ -1,5 +1,5 @@
use brk_types::Height;
use derive_deref::{Deref, DerefMut};
use derive_more::{Deref, DerefMut};
use rustc_hash::FxHashMap;
use super::vec::AddressTypeToVec;

View File

@@ -2,7 +2,7 @@ use std::{collections::hash_map::Entry, mem};
use brk_cohort::ByAddressType;
use brk_types::{OutputType, TypeIndex};
use derive_deref::{Deref, DerefMut};
use derive_more::{Deref, DerefMut};
use rustc_hash::FxHashMap;
use smallvec::{Array, SmallVec};

View File

@@ -1,5 +1,5 @@
use brk_cohort::ByAddressType;
use derive_deref::{Deref, DerefMut};
use derive_more::{Deref, DerefMut};
/// A vector for each address type.
#[derive(Debug, Deref, DerefMut)]

View File

@@ -6,7 +6,7 @@ use brk_cohort::{
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Dollars, Height, Version};
use derive_deref::{Deref, DerefMut};
use derive_more::{Deref, DerefMut};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, Database, Exit, IterableVec};

View File

@@ -14,7 +14,7 @@ use crate::{
ComputeIndexes,
distribution::state::AddressCohortState,
indexes,
internal::{ComputedVecsFromHeight, Source, VecBuilderOptions},
internal::DerivedComputedBlockLast,
price,
};
@@ -42,7 +42,7 @@ pub struct AddressCohortVecs {
pub height_to_addr_count: EagerVec<PcoVec<Height, StoredU64>>,
/// Address count indexed by various dimensions
pub indexes_to_addr_count: ComputedVecsFromHeight<StoredU64>,
pub indexes_to_addr_count: DerivedComputedBlockLast<StoredU64>,
}
impl AddressCohortVecs {
@@ -86,13 +86,12 @@ impl AddressCohortVecs {
metrics: CohortMetrics::forced_import(&cfg, all_supply)?,
indexes_to_addr_count: ComputedVecsFromHeight::forced_import(
indexes_to_addr_count: DerivedComputedBlockLast::forced_import(
db,
&cfg.name("addr_count"),
Source::Vec(height_to_addr_count.boxed_clone()),
height_to_addr_count.boxed_clone(),
version + VERSION,
indexes,
VecBuilderOptions::default().add_last(),
)?,
height_to_addr_count,
})
@@ -248,11 +247,11 @@ impl DynCohortVecs for AddressCohortVecs {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_addr_count.compute_rest(
self.indexes_to_addr_count.derive_from(
indexes,
starting_indexes,
&self.height_to_addr_count,
exit,
Some(&self.height_to_addr_count),
)?;
self.metrics
.compute_rest_part1(indexes, price, starting_indexes, exit)?;

View File

@@ -7,7 +7,7 @@ use brk_cohort::{
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Dollars, Height, Sats, Version};
use derive_deref::{Deref, DerefMut};
use derive_more::{Deref, DerefMut};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, Database, Exit, IterableVec};

View File

@@ -20,7 +20,6 @@ use crate::{
state::{BlockState, Transacted},
},
inputs, outputs,
utils::OptionExt,
};
use super::{
@@ -63,10 +62,10 @@ pub fn process_blocks(
let height_to_first_txoutindex = &indexer.vecs.txout.height_to_first_txoutindex;
let height_to_first_txinindex = &indexer.vecs.txin.height_to_first_txinindex;
// From transactions and inputs/outputs (via .height.u() or .height.unwrap_sum() patterns):
let height_to_tx_count = transactions.count.indexes_to_tx_count.height.u();
let height_to_output_count = outputs.count.indexes_to_count.height.unwrap_sum();
let height_to_input_count = inputs.count.indexes_to_count.height.unwrap_sum();
// From transactions and inputs/outputs (via .height or .height.sum_cum.sum patterns):
let height_to_tx_count = &transactions.count.indexes_to_tx_count.height;
let height_to_output_count = &outputs.count.indexes_to_count.height.sum_cum.sum.0;
let height_to_input_count = &inputs.count.indexes_to_count.height.sum_cum.sum.0;
// From blocks:
let height_to_timestamp = &blocks.time.height_to_timestamp_fixed;
let height_to_date = &blocks.time.height_to_date_fixed;
@@ -77,7 +76,7 @@ pub fn process_blocks(
// From price (optional):
let height_to_price = price.map(|p| &p.usd.chainindexes_to_price_close.height);
let dateindex_to_price = price.map(|p| p.usd.timeindexes_to_price_close.dateindex.u());
let dateindex_to_price = price.map(|p| &p.usd.timeindexes_to_price_close.dateindex);
// Access pre-computed vectors from context for thread-safe access
let height_to_price_vec = &ctx.height_to_price;

View File

@@ -9,8 +9,7 @@ use vecdb::{
use crate::{
ComputeIndexes, indexes,
internal::{ComputedValueVecsFromHeight, ComputedVecsFromHeight, Source, VecBuilderOptions},
price,
internal::{ComputedBlockSumCum, DerivedValueBlockSumCum},
};
use super::ImportConfig;
@@ -21,8 +20,8 @@ pub struct ActivityMetrics {
/// Total satoshis sent at each height
pub height_to_sent: EagerVec<PcoVec<Height, Sats>>,
/// Sent amounts indexed by various dimensions
pub indexes_to_sent: ComputedValueVecsFromHeight,
/// Sent amounts indexed by various dimensions (derives from height_to_sent)
pub indexes_to_sent: DerivedValueBlockSumCum,
/// Satoshi-blocks destroyed (supply * blocks_old when spent)
pub height_to_satblocks_destroyed: EagerVec<PcoVec<Height, Sats>>,
@@ -31,28 +30,24 @@ pub struct ActivityMetrics {
pub height_to_satdays_destroyed: EagerVec<PcoVec<Height, Sats>>,
/// Coin-blocks destroyed (in BTC rather than sats)
pub indexes_to_coinblocks_destroyed: ComputedVecsFromHeight<StoredF64>,
pub indexes_to_coinblocks_destroyed: ComputedBlockSumCum<StoredF64>,
/// Coin-days destroyed (in BTC rather than sats)
pub indexes_to_coindays_destroyed: ComputedVecsFromHeight<StoredF64>,
pub indexes_to_coindays_destroyed: ComputedBlockSumCum<StoredF64>,
}
impl ActivityMetrics {
/// Import activity metrics from database.
pub fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let compute_dollars = cfg.compute_dollars();
let sum_cum = VecBuilderOptions::default().add_sum().add_cumulative();
let height_to_sent: EagerVec<PcoVec<Height, Sats>> =
EagerVec::forced_import(cfg.db, &cfg.name("sent"), cfg.version)?;
let indexes_to_sent = ComputedValueVecsFromHeight::forced_import(
let indexes_to_sent = DerivedValueBlockSumCum::forced_import(
cfg.db,
&cfg.name("sent"),
Source::Vec(height_to_sent.boxed_clone()),
cfg.version,
sum_cum,
compute_dollars,
cfg.indexes,
height_to_sent.boxed_clone(),
cfg.price,
)?;
Ok(Self {
@@ -71,22 +66,18 @@ impl ActivityMetrics {
cfg.version,
)?,
indexes_to_coinblocks_destroyed: ComputedVecsFromHeight::forced_import(
indexes_to_coinblocks_destroyed: ComputedBlockSumCum::forced_import(
cfg.db,
&cfg.name("coinblocks_destroyed"),
Source::Compute,
cfg.version,
cfg.indexes,
sum_cum,
)?,
indexes_to_coindays_destroyed: ComputedVecsFromHeight::forced_import(
indexes_to_coindays_destroyed: ComputedBlockSumCum::forced_import(
cfg.db,
&cfg.name("coindays_destroyed"),
Source::Compute,
cfg.version,
cfg.indexes,
sum_cum,
)?,
})
}
@@ -174,16 +165,14 @@ impl ActivityMetrics {
pub fn compute_rest_part1(
&mut self,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_sent.compute_rest(
self.indexes_to_sent.derive_from(
indexes,
price,
starting_indexes,
&self.height_to_sent,
exit,
Some(&self.height_to_sent),
)?;
self.indexes_to_coinblocks_destroyed

View File

@@ -10,7 +10,7 @@ use vecdb::{
use crate::{
ComputeIndexes,
distribution::state::CohortState,
internal::{ComputedVecsFromHeight, CostBasisPercentiles, Source, VecBuilderOptions},
internal::{CostBasisPercentiles, DerivedComputedBlockLast},
};
use super::ImportConfig;
@@ -20,11 +20,11 @@ use super::ImportConfig;
pub struct CostBasisMetrics {
/// Minimum cost basis for any UTXO at this height
pub height_to_min_cost_basis: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_min_cost_basis: ComputedVecsFromHeight<Dollars>,
pub indexes_to_min_cost_basis: DerivedComputedBlockLast<Dollars>,
/// Maximum cost basis for any UTXO at this height
pub height_to_max_cost_basis: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_max_cost_basis: ComputedVecsFromHeight<Dollars>,
pub indexes_to_max_cost_basis: DerivedComputedBlockLast<Dollars>,
/// Cost basis distribution percentiles (median, quartiles, etc.)
pub percentiles: Option<CostBasisPercentiles>,
@@ -34,7 +34,6 @@ impl CostBasisMetrics {
/// Import cost basis metrics from database.
pub fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let extended = cfg.extended();
let last = VecBuilderOptions::default().add_last();
let height_to_min_cost_basis =
EagerVec::forced_import(cfg.db, &cfg.name("min_cost_basis"), cfg.version)?;
@@ -43,21 +42,19 @@ impl CostBasisMetrics {
EagerVec::forced_import(cfg.db, &cfg.name("max_cost_basis"), cfg.version)?;
Ok(Self {
indexes_to_min_cost_basis: ComputedVecsFromHeight::forced_import(
indexes_to_min_cost_basis: DerivedComputedBlockLast::forced_import(
cfg.db,
&cfg.name("min_cost_basis"),
Source::Vec(height_to_min_cost_basis.boxed_clone()),
height_to_min_cost_basis.boxed_clone(),
cfg.version,
cfg.indexes,
last,
)?,
indexes_to_max_cost_basis: ComputedVecsFromHeight::forced_import(
indexes_to_max_cost_basis: DerivedComputedBlockLast::forced_import(
cfg.db,
&cfg.name("max_cost_basis"),
Source::Vec(height_to_max_cost_basis.boxed_clone()),
height_to_max_cost_basis.boxed_clone(),
cfg.version,
cfg.indexes,
last,
)?,
height_to_min_cost_basis,
height_to_max_cost_basis,
@@ -145,8 +142,7 @@ impl CostBasisMetrics {
.vecs
.iter_mut()
.flatten()
.filter_map(|v| v.dateindex.as_mut())
.map(|v| v as &mut dyn AnyStoredVec),
.map(|v| &mut v.dateindex as &mut dyn AnyStoredVec),
);
}
vecs.into_par_iter()
@@ -193,18 +189,18 @@ impl CostBasisMetrics {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_min_cost_basis.compute_rest(
self.indexes_to_min_cost_basis.derive_from(
indexes,
starting_indexes,
&self.height_to_min_cost_basis,
exit,
Some(&self.height_to_min_cost_basis),
)?;
self.indexes_to_max_cost_basis.compute_rest(
self.indexes_to_max_cost_basis.derive_from(
indexes,
starting_indexes,
&self.height_to_max_cost_basis,
exit,
Some(&self.height_to_max_cost_basis),
)?;
Ok(())

View File

@@ -295,7 +295,7 @@ impl CohortMetrics {
self.supply
.compute_rest_part1(indexes, price, starting_indexes, exit)?;
self.activity
.compute_rest_part1(indexes, price, starting_indexes, exit)?;
.compute_rest_part1(indexes, starting_indexes, exit)?;
if let Some(realized) = self.realized.as_mut() {
realized.compute_rest_part1(indexes, starting_indexes, exit)?;

View File

@@ -12,12 +12,12 @@ use crate::{
distribution::state::RealizedState,
indexes,
internal::{
ComputedRatioVecsFromDateIndex, ComputedVecsFromDateIndex, ComputedVecsFromHeight,
DollarsMinus, LazyVecsFrom2FromHeight, LazyVecsFromDateIndex, LazyVecsFromHeight,
PercentageDollarsF32, Source, StoredF32Identity, VecBuilderOptions,
BinaryBlockSum, BinaryBlockSumCumLast, ComputedBlockLast, ComputedBlockSum,
ComputedBlockSumCum, ComputedDateLast, ComputedRatioVecsDate, DerivedComputedBlockLast,
DerivedComputedBlockSum, DerivedComputedBlockSumCum, DollarsMinus, LazyBlockSum,
LazyBlockSumCum, LazyDateLast, PercentageDollarsF32, StoredF32Identity,
},
price,
utils::OptionExt,
};
use super::ImportConfig;
@@ -27,48 +27,48 @@ use super::ImportConfig;
pub struct RealizedMetrics {
// === Realized Cap ===
pub height_to_realized_cap: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_realized_cap: ComputedVecsFromHeight<Dollars>,
pub indexes_to_realized_price: ComputedVecsFromHeight<Dollars>,
pub indexes_to_realized_price_extra: ComputedRatioVecsFromDateIndex,
pub indexes_to_realized_cap_rel_to_own_market_cap: Option<ComputedVecsFromHeight<StoredF32>>,
pub indexes_to_realized_cap_30d_delta: ComputedVecsFromDateIndex<Dollars>,
pub indexes_to_realized_cap: DerivedComputedBlockLast<Dollars>,
pub indexes_to_realized_price: ComputedBlockLast<Dollars>,
pub indexes_to_realized_price_extra: ComputedRatioVecsDate,
pub indexes_to_realized_cap_rel_to_own_market_cap: Option<ComputedBlockLast<StoredF32>>,
pub indexes_to_realized_cap_30d_delta: ComputedDateLast<Dollars>,
// === MVRV (Market Value to Realized Value) ===
// Proxy for indexes_to_realized_price_extra.ratio (close / realized_price = market_cap / realized_cap)
pub indexes_to_mvrv: LazyVecsFromDateIndex<StoredF32>,
pub indexes_to_mvrv: LazyDateLast<StoredF32>,
// === Realized Profit/Loss ===
pub height_to_realized_profit: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_realized_profit: ComputedVecsFromHeight<Dollars>,
pub indexes_to_realized_profit: DerivedComputedBlockSumCum<Dollars>,
pub height_to_realized_loss: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_realized_loss: ComputedVecsFromHeight<Dollars>,
pub indexes_to_neg_realized_loss: LazyVecsFromHeight<Dollars>,
pub indexes_to_net_realized_pnl: ComputedVecsFromHeight<Dollars>,
pub indexes_to_realized_value: ComputedVecsFromHeight<Dollars>,
pub indexes_to_realized_loss: DerivedComputedBlockSumCum<Dollars>,
pub indexes_to_neg_realized_loss: LazyBlockSumCum<Dollars>,
pub indexes_to_net_realized_pnl: ComputedBlockSumCum<Dollars>,
pub indexes_to_realized_value: ComputedBlockSum<Dollars>,
// === Realized vs Realized Cap Ratios (lazy) ===
pub indexes_to_realized_profit_rel_to_realized_cap:
LazyVecsFrom2FromHeight<StoredF32, Dollars, Dollars>,
BinaryBlockSumCumLast<StoredF32, Dollars, Dollars>,
pub indexes_to_realized_loss_rel_to_realized_cap:
LazyVecsFrom2FromHeight<StoredF32, Dollars, Dollars>,
BinaryBlockSumCumLast<StoredF32, Dollars, Dollars>,
pub indexes_to_net_realized_pnl_rel_to_realized_cap:
LazyVecsFrom2FromHeight<StoredF32, Dollars, Dollars>,
BinaryBlockSumCumLast<StoredF32, Dollars, Dollars>,
// === Total Realized PnL ===
pub indexes_to_total_realized_pnl: LazyVecsFromHeight<Dollars>,
pub indexes_to_total_realized_pnl: LazyBlockSum<Dollars>,
pub dateindex_to_realized_profit_to_loss_ratio: Option<EagerVec<PcoVec<DateIndex, StoredF64>>>,
// === Value Created/Destroyed ===
pub height_to_value_created: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_value_created: ComputedVecsFromHeight<Dollars>,
#[traversable(rename = "value_created_sum")]
pub indexes_to_value_created: DerivedComputedBlockSum<Dollars>,
pub height_to_value_destroyed: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_value_destroyed: ComputedVecsFromHeight<Dollars>,
#[traversable(rename = "value_destroyed_sum")]
pub indexes_to_value_destroyed: DerivedComputedBlockSum<Dollars>,
// === Adjusted Value (lazy: cohort - up_to_1h) ===
pub indexes_to_adjusted_value_created:
Option<LazyVecsFrom2FromHeight<Dollars, Dollars, Dollars>>,
pub indexes_to_adjusted_value_destroyed:
Option<LazyVecsFrom2FromHeight<Dollars, Dollars, Dollars>>,
pub indexes_to_adjusted_value_created: Option<BinaryBlockSum<Dollars, Dollars, Dollars>>,
pub indexes_to_adjusted_value_destroyed: Option<BinaryBlockSum<Dollars, Dollars, Dollars>>,
// === SOPR (Spent Output Profit Ratio) ===
pub dateindex_to_sopr: EagerVec<PcoVec<DateIndex, StoredF64>>,
@@ -84,11 +84,11 @@ pub struct RealizedMetrics {
pub dateindex_to_sell_side_risk_ratio_30d_ema: EagerVec<PcoVec<DateIndex, StoredF32>>,
// === Net Realized PnL Deltas ===
pub indexes_to_net_realized_pnl_cumulative_30d_delta: ComputedVecsFromDateIndex<Dollars>,
pub indexes_to_net_realized_pnl_cumulative_30d_delta: ComputedDateLast<Dollars>,
pub indexes_to_net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap:
ComputedVecsFromDateIndex<StoredF32>,
ComputedDateLast<StoredF32>,
pub indexes_to_net_realized_pnl_cumulative_30d_delta_rel_to_market_cap:
ComputedVecsFromDateIndex<StoredF32>,
ComputedDateLast<StoredF32>,
}
impl RealizedMetrics {
@@ -98,23 +98,19 @@ impl RealizedMetrics {
let v3 = Version::new(3);
let extended = cfg.extended();
let compute_adjusted = cfg.compute_adjusted();
let last = VecBuilderOptions::default().add_last();
let sum = VecBuilderOptions::default().add_sum();
let sum_cum = VecBuilderOptions::default().add_sum().add_cumulative();
let height_to_realized_loss: EagerVec<PcoVec<Height, Dollars>> =
EagerVec::forced_import(cfg.db, &cfg.name("realized_loss"), cfg.version)?;
let indexes_to_realized_loss = ComputedVecsFromHeight::forced_import(
let indexes_to_realized_loss = DerivedComputedBlockSumCum::forced_import(
cfg.db,
&cfg.name("realized_loss"),
Source::Vec(height_to_realized_loss.boxed_clone()),
height_to_realized_loss.boxed_clone(),
cfg.version,
cfg.indexes,
sum_cum,
)?;
let indexes_to_neg_realized_loss = LazyVecsFromHeight::from_computed::<Negate>(
let indexes_to_neg_realized_loss = LazyBlockSumCum::from_derived::<Negate>(
&cfg.name("neg_realized_loss"),
cfg.version + v1,
height_to_realized_loss.boxed_clone(),
@@ -122,24 +118,18 @@ impl RealizedMetrics {
);
// realized_value is the source for total_realized_pnl (they're identical)
let indexes_to_realized_value = ComputedVecsFromHeight::forced_import(
let indexes_to_realized_value = ComputedBlockSum::forced_import(
cfg.db,
&cfg.name("realized_value"),
Source::Compute,
cfg.version,
cfg.indexes,
sum,
)?;
// total_realized_pnl is a lazy alias to realized_value
let indexes_to_total_realized_pnl = LazyVecsFromHeight::from_computed::<Ident>(
let indexes_to_total_realized_pnl = LazyBlockSum::from_computed::<Ident>(
&cfg.name("total_realized_pnl"),
cfg.version + v1,
indexes_to_realized_value
.height
.as_ref()
.unwrap()
.boxed_clone(),
indexes_to_realized_value.height.boxed_clone(),
&indexes_to_realized_value,
);
@@ -147,39 +137,35 @@ impl RealizedMetrics {
let height_to_realized_cap: EagerVec<PcoVec<Height, Dollars>> =
EagerVec::forced_import(cfg.db, &cfg.name("realized_cap"), cfg.version)?;
let indexes_to_realized_cap = ComputedVecsFromHeight::forced_import(
let indexes_to_realized_cap = DerivedComputedBlockLast::forced_import(
cfg.db,
&cfg.name("realized_cap"),
Source::Vec(height_to_realized_cap.boxed_clone()),
height_to_realized_cap.boxed_clone(),
cfg.version,
cfg.indexes,
last,
)?;
let height_to_realized_profit: EagerVec<PcoVec<Height, Dollars>> =
EagerVec::forced_import(cfg.db, &cfg.name("realized_profit"), cfg.version)?;
let indexes_to_realized_profit = ComputedVecsFromHeight::forced_import(
let indexes_to_realized_profit = DerivedComputedBlockSumCum::forced_import(
cfg.db,
&cfg.name("realized_profit"),
Source::Vec(height_to_realized_profit.boxed_clone()),
height_to_realized_profit.boxed_clone(),
cfg.version,
cfg.indexes,
sum_cum,
)?;
let indexes_to_net_realized_pnl = ComputedVecsFromHeight::forced_import(
let indexes_to_net_realized_pnl = ComputedBlockSumCum::forced_import(
cfg.db,
&cfg.name("net_realized_pnl"),
Source::Compute,
cfg.version,
cfg.indexes,
sum_cum,
)?;
// Construct lazy ratio vecs (before struct assignment to satisfy borrow checker)
let indexes_to_realized_profit_rel_to_realized_cap =
LazyVecsFrom2FromHeight::from_computed::<PercentageDollarsF32>(
BinaryBlockSumCumLast::from_derived::<PercentageDollarsF32>(
&cfg.name("realized_profit_rel_to_realized_cap"),
cfg.version + v1,
height_to_realized_profit.boxed_clone(),
@@ -189,7 +175,7 @@ impl RealizedMetrics {
);
let indexes_to_realized_loss_rel_to_realized_cap =
LazyVecsFrom2FromHeight::from_computed::<PercentageDollarsF32>(
BinaryBlockSumCumLast::from_derived::<PercentageDollarsF32>(
&cfg.name("realized_loss_rel_to_realized_cap"),
cfg.version + v1,
height_to_realized_loss.boxed_clone(),
@@ -199,26 +185,20 @@ impl RealizedMetrics {
);
let indexes_to_net_realized_pnl_rel_to_realized_cap =
LazyVecsFrom2FromHeight::from_computed::<PercentageDollarsF32>(
BinaryBlockSumCumLast::from_computed_derived::<PercentageDollarsF32>(
&cfg.name("net_realized_pnl_rel_to_realized_cap"),
cfg.version + v1,
indexes_to_net_realized_pnl
.height
.as_ref()
.unwrap()
.boxed_clone(),
indexes_to_net_realized_pnl.height.boxed_clone(),
height_to_realized_cap.boxed_clone(),
&indexes_to_net_realized_pnl,
&indexes_to_realized_cap,
);
let indexes_to_realized_price = ComputedVecsFromHeight::forced_import(
let indexes_to_realized_price = ComputedBlockLast::forced_import(
cfg.db,
&cfg.name("realized_price"),
Source::Compute,
cfg.version + v1,
cfg.indexes,
last,
)?;
let height_to_value_created =
@@ -226,28 +206,26 @@ impl RealizedMetrics {
let height_to_value_destroyed =
EagerVec::forced_import(cfg.db, &cfg.name("value_destroyed"), cfg.version)?;
let indexes_to_value_created = ComputedVecsFromHeight::forced_import(
let indexes_to_value_created = DerivedComputedBlockSum::forced_import(
cfg.db,
&cfg.name("value_created"),
Source::Vec(height_to_value_created.boxed_clone()),
height_to_value_created.boxed_clone(),
cfg.version,
cfg.indexes,
sum,
)?;
let indexes_to_value_destroyed = ComputedVecsFromHeight::forced_import(
let indexes_to_value_destroyed = DerivedComputedBlockSum::forced_import(
cfg.db,
&cfg.name("value_destroyed"),
Source::Vec(height_to_value_destroyed.boxed_clone()),
height_to_value_destroyed.boxed_clone(),
cfg.version,
cfg.indexes,
sum,
)?;
// Create lazy adjusted vecs if compute_adjusted and up_to_1h is available
let indexes_to_adjusted_value_created = (compute_adjusted && cfg.up_to_1h_realized.is_some())
.then(|| {
let indexes_to_adjusted_value_created =
(compute_adjusted && cfg.up_to_1h_realized.is_some()).then(|| {
let up_to_1h = cfg.up_to_1h_realized.unwrap();
LazyVecsFrom2FromHeight::from_computed::<DollarsMinus>(
BinaryBlockSum::from_derived::<DollarsMinus>(
&cfg.name("adjusted_value_created"),
cfg.version,
height_to_value_created.boxed_clone(),
@@ -259,7 +237,7 @@ impl RealizedMetrics {
let indexes_to_adjusted_value_destroyed =
(compute_adjusted && cfg.up_to_1h_realized.is_some()).then(|| {
let up_to_1h = cfg.up_to_1h_realized.unwrap();
LazyVecsFrom2FromHeight::from_computed::<DollarsMinus>(
BinaryBlockSum::from_derived::<DollarsMinus>(
&cfg.name("adjusted_value_destroyed"),
cfg.version,
height_to_value_destroyed.boxed_clone(),
@@ -270,7 +248,7 @@ impl RealizedMetrics {
});
// Create realized_price_extra first so we can reference its ratio for MVRV proxy
let indexes_to_realized_price_extra = ComputedRatioVecsFromDateIndex::forced_import(
let indexes_to_realized_price_extra = ComputedRatioVecsDate::forced_import(
cfg.db,
&cfg.name("realized_price"),
Some(&indexes_to_realized_price),
@@ -282,14 +260,9 @@ impl RealizedMetrics {
// MVRV is a lazy proxy for realized_price_extra.ratio
// ratio = close / realized_price = market_cap / realized_cap = MVRV
let indexes_to_mvrv = LazyVecsFromDateIndex::from_computed::<StoredF32Identity>(
let indexes_to_mvrv = LazyDateLast::from_source::<StoredF32Identity>(
&cfg.name("mvrv"),
cfg.version,
indexes_to_realized_price_extra
.ratio
.dateindex
.as_ref()
.map(|v| v.boxed_clone()),
&indexes_to_realized_price_extra.ratio,
);
@@ -303,23 +276,19 @@ impl RealizedMetrics {
indexes_to_mvrv,
indexes_to_realized_cap_rel_to_own_market_cap: extended
.then(|| {
ComputedVecsFromHeight::forced_import(
ComputedBlockLast::forced_import(
cfg.db,
&cfg.name("realized_cap_rel_to_own_market_cap"),
Source::Compute,
cfg.version,
cfg.indexes,
last,
)
})
.transpose()?,
indexes_to_realized_cap_30d_delta: ComputedVecsFromDateIndex::forced_import(
indexes_to_realized_cap_30d_delta: ComputedDateLast::forced_import(
cfg.db,
&cfg.name("realized_cap_30d_delta"),
Source::Compute,
cfg.version,
cfg.indexes,
last,
)?,
// === Realized Profit/Loss ===
@@ -416,32 +385,25 @@ impl RealizedMetrics {
)?,
// === Net Realized PnL Deltas ===
indexes_to_net_realized_pnl_cumulative_30d_delta:
ComputedVecsFromDateIndex::forced_import(
cfg.db,
&cfg.name("net_realized_pnl_cumulative_30d_delta"),
Source::Compute,
cfg.version + v3,
cfg.indexes,
last,
)?,
indexes_to_net_realized_pnl_cumulative_30d_delta: ComputedDateLast::forced_import(
cfg.db,
&cfg.name("net_realized_pnl_cumulative_30d_delta"),
cfg.version + v3,
cfg.indexes,
)?,
indexes_to_net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap:
ComputedVecsFromDateIndex::forced_import(
ComputedDateLast::forced_import(
cfg.db,
&cfg.name("net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap"),
Source::Compute,
cfg.version + v3,
cfg.indexes,
last,
)?,
indexes_to_net_realized_pnl_cumulative_30d_delta_rel_to_market_cap:
ComputedVecsFromDateIndex::forced_import(
ComputedDateLast::forced_import(
cfg.db,
&cfg.name("net_realized_pnl_cumulative_30d_delta_rel_to_market_cap"),
Source::Compute,
cfg.version + v3,
cfg.indexes,
last,
)?,
})
}
@@ -558,25 +520,25 @@ impl RealizedMetrics {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_realized_cap.compute_rest(
self.indexes_to_realized_cap.derive_from(
indexes,
starting_indexes,
&self.height_to_realized_cap,
exit,
Some(&self.height_to_realized_cap),
)?;
self.indexes_to_realized_profit.compute_rest(
self.indexes_to_realized_profit.derive_from(
indexes,
starting_indexes,
&self.height_to_realized_profit,
exit,
Some(&self.height_to_realized_profit),
)?;
self.indexes_to_realized_loss.compute_rest(
self.indexes_to_realized_loss.derive_from(
indexes,
starting_indexes,
&self.height_to_realized_loss,
exit,
Some(&self.height_to_realized_loss),
)?;
// net_realized_pnl = profit - loss
@@ -605,18 +567,18 @@ impl RealizedMetrics {
Ok(())
})?;
self.indexes_to_value_created.compute_rest(
self.indexes_to_value_created.derive_from(
indexes,
starting_indexes,
&self.height_to_value_created,
exit,
Some(&self.height_to_value_created),
)?;
self.indexes_to_value_destroyed.compute_rest(
self.indexes_to_value_destroyed.derive_from(
indexes,
starting_indexes,
&self.height_to_value_destroyed,
exit,
Some(&self.height_to_value_destroyed),
)?;
Ok(())
@@ -651,7 +613,7 @@ impl RealizedMetrics {
price,
starting_indexes,
exit,
Some(self.indexes_to_realized_price.dateindex.unwrap_last()),
Some(&self.indexes_to_realized_price.dateindex.0),
)?;
}
@@ -660,7 +622,7 @@ impl RealizedMetrics {
.compute_all(starting_indexes, exit, |vec| {
vec.compute_change(
starting_indexes.dateindex,
self.indexes_to_realized_cap.dateindex.unwrap_last(),
&self.indexes_to_realized_cap.dateindex.0,
30,
exit,
)?;
@@ -670,8 +632,8 @@ impl RealizedMetrics {
// SOPR = value_created / value_destroyed
self.dateindex_to_sopr.compute_divide(
starting_indexes.dateindex,
self.indexes_to_value_created.dateindex.unwrap_sum(),
self.indexes_to_value_destroyed.dateindex.unwrap_sum(),
&self.indexes_to_value_created.dateindex.0,
&self.indexes_to_value_destroyed.dateindex.0,
exit,
)?;
@@ -692,17 +654,13 @@ impl RealizedMetrics {
// Optional: adjusted SOPR (lazy: cohort - up_to_1h)
if let (Some(adjusted_sopr), Some(adj_created), Some(adj_destroyed)) = (
self.dateindex_to_adjusted_sopr.as_mut(),
self.indexes_to_adjusted_value_created
.as_ref()
.and_then(|v| v.dateindex.sum.as_ref()),
self.indexes_to_adjusted_value_destroyed
.as_ref()
.and_then(|v| v.dateindex.sum.as_ref()),
self.indexes_to_adjusted_value_created.as_ref(),
self.indexes_to_adjusted_value_destroyed.as_ref(),
) {
adjusted_sopr.compute_divide(
starting_indexes.dateindex,
adj_created.as_ref(),
adj_destroyed.as_ref(),
&*adj_created.dateindex,
&*adj_destroyed.dateindex,
exit,
)?;
@@ -728,8 +686,8 @@ impl RealizedMetrics {
// sell_side_risk_ratio = realized_value / realized_cap
self.dateindex_to_sell_side_risk_ratio.compute_percentage(
starting_indexes.dateindex,
self.indexes_to_realized_value.dateindex.unwrap_sum(),
self.indexes_to_realized_cap.dateindex.unwrap_last(),
&self.indexes_to_realized_value.dateindex.0,
&self.indexes_to_realized_cap.dateindex.0,
exit,
)?;
@@ -752,9 +710,7 @@ impl RealizedMetrics {
.compute_all(starting_indexes, exit, |vec| {
vec.compute_change(
starting_indexes.dateindex,
self.indexes_to_net_realized_pnl
.dateindex
.unwrap_cumulative(),
&self.indexes_to_net_realized_pnl.dateindex.cumulative.0,
30,
exit,
)?;
@@ -766,10 +722,10 @@ impl RealizedMetrics {
.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
self.indexes_to_net_realized_pnl_cumulative_30d_delta
.dateindex
.u(),
self.indexes_to_realized_cap.dateindex.unwrap_last(),
&self
.indexes_to_net_realized_pnl_cumulative_30d_delta
.dateindex,
&self.indexes_to_realized_cap.dateindex.0,
exit,
)?;
Ok(())
@@ -781,9 +737,9 @@ impl RealizedMetrics {
.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
self.indexes_to_net_realized_pnl_cumulative_30d_delta
.dateindex
.u(),
&self
.indexes_to_net_realized_pnl_cumulative_30d_delta
.dateindex,
dateindex_to_market_cap,
exit,
)?;
@@ -811,8 +767,8 @@ impl RealizedMetrics {
if let Some(ratio) = self.dateindex_to_realized_profit_to_loss_ratio.as_mut() {
ratio.compute_divide(
starting_indexes.dateindex,
self.indexes_to_realized_profit.dateindex.unwrap_sum(),
self.indexes_to_realized_loss.dateindex.unwrap_sum(),
&self.indexes_to_realized_profit.dateindex.sum.0,
&self.indexes_to_realized_loss.dateindex.sum.0,
exit,
)?;
}

View File

@@ -4,7 +4,7 @@ use brk_types::{Bitcoin, Dollars, Height, Sats, StoredF32, StoredF64, Version};
use vecdb::{IterableCloneableVec, LazyVecFrom2};
use crate::internal::{
LazyVecsFrom2FromDateIndex, NegPercentageDollarsF32, NegRatio32, PercentageBtcF64,
BinaryDateLast, NegPercentageDollarsF32, NegRatio32, PercentageBtcF64,
PercentageDollarsF32, PercentageSatsF64, Ratio32,
};
@@ -15,28 +15,31 @@ use super::{ImportConfig, SupplyMetrics, UnrealizedMetrics};
#[derive(Clone, Traversable)]
pub struct RelativeMetrics {
// === Supply Relative to Circulating Supply (lazy from global supply) ===
// KISS: both sources are ComputedVecsDateLast<Sats>
pub indexes_to_supply_rel_to_circulating_supply:
Option<LazyVecsFrom2FromDateIndex<StoredF64, Sats, Sats>>,
Option<BinaryDateLast<StoredF64, Sats, Sats>>,
// === Supply in Profit/Loss Relative to Own Supply (lazy) ===
pub height_to_supply_in_profit_rel_to_own_supply:
LazyVecFrom2<Height, StoredF64, Height, Bitcoin, Height, Bitcoin>,
pub height_to_supply_in_loss_rel_to_own_supply:
LazyVecFrom2<Height, StoredF64, Height, Bitcoin, Height, Bitcoin>,
// KISS: both unrealized and supply are now KISS types
pub indexes_to_supply_in_profit_rel_to_own_supply:
LazyVecsFrom2FromDateIndex<StoredF64, Sats, Sats>,
BinaryDateLast<StoredF64, Sats, Sats>,
pub indexes_to_supply_in_loss_rel_to_own_supply:
LazyVecsFrom2FromDateIndex<StoredF64, Sats, Sats>,
BinaryDateLast<StoredF64, Sats, Sats>,
// === Supply in Profit/Loss Relative to Circulating Supply (lazy from global supply) ===
pub height_to_supply_in_profit_rel_to_circulating_supply:
Option<LazyVecFrom2<Height, StoredF64, Height, Bitcoin, Height, Bitcoin>>,
pub height_to_supply_in_loss_rel_to_circulating_supply:
Option<LazyVecFrom2<Height, StoredF64, Height, Bitcoin, Height, Bitcoin>>,
// KISS: both unrealized and global_supply are now KISS types
pub indexes_to_supply_in_profit_rel_to_circulating_supply:
Option<LazyVecsFrom2FromDateIndex<StoredF64, Sats, Sats>>,
Option<BinaryDateLast<StoredF64, Sats, Sats>>,
pub indexes_to_supply_in_loss_rel_to_circulating_supply:
Option<LazyVecsFrom2FromDateIndex<StoredF64, Sats, Sats>>,
Option<BinaryDateLast<StoredF64, Sats, Sats>>,
// === Unrealized vs Market Cap (lazy from global market cap) ===
pub height_to_unrealized_profit_rel_to_market_cap:
@@ -47,18 +50,20 @@ pub struct RelativeMetrics {
Option<LazyVecFrom2<Height, StoredF32, Height, Dollars, Height, Dollars>>,
pub height_to_net_unrealized_pnl_rel_to_market_cap:
Option<LazyVecFrom2<Height, StoredF32, Height, Dollars, Height, Dollars>>,
// KISS: DerivedDateLast + ComputedVecsDateLast
pub indexes_to_unrealized_profit_rel_to_market_cap:
Option<LazyVecsFrom2FromDateIndex<StoredF32, Dollars, Dollars>>,
Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
pub indexes_to_unrealized_loss_rel_to_market_cap:
Option<LazyVecsFrom2FromDateIndex<StoredF32, Dollars, Dollars>>,
Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
pub indexes_to_neg_unrealized_loss_rel_to_market_cap:
Option<LazyVecsFrom2FromDateIndex<StoredF32, Dollars, Dollars>>,
Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
// KISS: both ComputedVecsDateLast
pub indexes_to_net_unrealized_pnl_rel_to_market_cap:
Option<LazyVecsFrom2FromDateIndex<StoredF32, Dollars, Dollars>>,
Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
// === NUPL (Net Unrealized Profit/Loss) ===
// Proxy for indexes_to_net_unrealized_pnl_rel_to_market_cap
pub indexes_to_nupl: Option<LazyVecsFrom2FromDateIndex<StoredF32, Dollars, Dollars>>,
// KISS: both ComputedVecsDateLast
pub indexes_to_nupl: Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
// === Unrealized vs Own Market Cap (lazy) ===
pub height_to_unrealized_profit_rel_to_own_market_cap:
@@ -69,14 +74,16 @@ pub struct RelativeMetrics {
Option<LazyVecFrom2<Height, StoredF32, Height, Dollars, Height, Dollars>>,
pub height_to_net_unrealized_pnl_rel_to_own_market_cap:
Option<LazyVecFrom2<Height, StoredF32, Height, Dollars, Height, Dollars>>,
// KISS: DerivedDateLast + ComputedVecsDateLast
pub indexes_to_unrealized_profit_rel_to_own_market_cap:
Option<LazyVecsFrom2FromDateIndex<StoredF32, Dollars, Dollars>>,
Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
pub indexes_to_unrealized_loss_rel_to_own_market_cap:
Option<LazyVecsFrom2FromDateIndex<StoredF32, Dollars, Dollars>>,
Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
pub indexes_to_neg_unrealized_loss_rel_to_own_market_cap:
Option<LazyVecsFrom2FromDateIndex<StoredF32, Dollars, Dollars>>,
Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
// KISS: both ComputedVecsDateLast
pub indexes_to_net_unrealized_pnl_rel_to_own_market_cap:
Option<LazyVecsFrom2FromDateIndex<StoredF32, Dollars, Dollars>>,
Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
// === Unrealized vs Own Total Unrealized PnL (lazy) ===
pub height_to_unrealized_profit_rel_to_own_total_unrealized_pnl:
@@ -87,14 +94,15 @@ pub struct RelativeMetrics {
Option<LazyVecFrom2<Height, StoredF32, Height, Dollars, Height, Dollars>>,
pub height_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl:
Option<LazyVecFrom2<Height, StoredF32, Height, Dollars, Height, Dollars>>,
// KISS: DerivedDateLast + DerivedDateLast
pub indexes_to_unrealized_profit_rel_to_own_total_unrealized_pnl:
Option<LazyVecsFrom2FromDateIndex<StoredF32, Dollars, Dollars>>,
Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
pub indexes_to_unrealized_loss_rel_to_own_total_unrealized_pnl:
Option<LazyVecsFrom2FromDateIndex<StoredF32, Dollars, Dollars>>,
Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
pub indexes_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl:
Option<LazyVecsFrom2FromDateIndex<StoredF32, Dollars, Dollars>>,
Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
pub indexes_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl:
Option<LazyVecsFrom2FromDateIndex<StoredF32, Dollars, Dollars>>,
Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
}
impl RelativeMetrics {
@@ -115,6 +123,7 @@ impl RelativeMetrics {
// Global sources from "all" cohort
let global_supply_sats = all_supply.map(|s| &s.indexes_to_supply.sats);
let global_supply_sats_dateindex = all_supply.map(|s| &s.indexes_to_supply.sats_dateindex);
let global_supply_btc = all_supply.map(|s| &s.height_to_supply_value.bitcoin);
let global_market_cap = all_supply.and_then(|s| s.indexes_to_supply.dollars.as_ref());
let global_market_cap_height =
@@ -129,10 +138,12 @@ impl RelativeMetrics {
indexes_to_supply_rel_to_circulating_supply: (compute_rel_to_all
&& global_supply_sats.is_some())
.then(|| {
LazyVecsFrom2FromDateIndex::from_computed::<PercentageSatsF64>(
BinaryDateLast::from_both_derived_last::<PercentageSatsF64>(
&cfg.name("supply_rel_to_circulating_supply"),
cfg.version + v1,
supply.indexes_to_supply.sats_dateindex.boxed_clone(),
&supply.indexes_to_supply.sats,
global_supply_sats_dateindex.unwrap().boxed_clone(),
global_supply_sats.unwrap(),
)
}),
@@ -159,20 +170,23 @@ impl RelativeMetrics {
supply.height_to_supply_value.bitcoin.boxed_clone(),
),
indexes_to_supply_in_profit_rel_to_own_supply:
LazyVecsFrom2FromDateIndex::from_computed::<PercentageSatsF64>(
BinaryDateLast::from_both_derived_last::<PercentageSatsF64>(
&cfg.name("supply_in_profit_rel_to_own_supply"),
cfg.version + v1,
unrealized.dateindex_to_supply_in_profit.boxed_clone(),
&unrealized.indexes_to_supply_in_profit.sats,
supply.indexes_to_supply.sats_dateindex.boxed_clone(),
&supply.indexes_to_supply.sats,
),
indexes_to_supply_in_loss_rel_to_own_supply:
BinaryDateLast::from_both_derived_last::<PercentageSatsF64>(
&cfg.name("supply_in_loss_rel_to_own_supply"),
cfg.version + v1,
unrealized.dateindex_to_supply_in_loss.boxed_clone(),
&unrealized.indexes_to_supply_in_loss.sats,
supply.indexes_to_supply.sats_dateindex.boxed_clone(),
&supply.indexes_to_supply.sats,
),
indexes_to_supply_in_loss_rel_to_own_supply: LazyVecsFrom2FromDateIndex::from_computed::<
PercentageSatsF64,
>(
&cfg.name("supply_in_loss_rel_to_own_supply"),
cfg.version + v1,
&unrealized.indexes_to_supply_in_loss.sats,
&supply.indexes_to_supply.sats,
),
// === Supply in Profit/Loss Relative to Circulating Supply (lazy from global supply) ===
height_to_supply_in_profit_rel_to_circulating_supply: (compute_rel_to_all
@@ -204,20 +218,24 @@ impl RelativeMetrics {
indexes_to_supply_in_profit_rel_to_circulating_supply: (compute_rel_to_all
&& global_supply_sats.is_some())
.then(|| {
LazyVecsFrom2FromDateIndex::from_computed::<PercentageSatsF64>(
BinaryDateLast::from_both_derived_last::<PercentageSatsF64>(
&cfg.name("supply_in_profit_rel_to_circulating_supply"),
cfg.version + v1,
unrealized.dateindex_to_supply_in_profit.boxed_clone(),
&unrealized.indexes_to_supply_in_profit.sats,
global_supply_sats_dateindex.unwrap().boxed_clone(),
global_supply_sats.unwrap(),
)
}),
indexes_to_supply_in_loss_rel_to_circulating_supply: (compute_rel_to_all
&& global_supply_sats.is_some())
.then(|| {
LazyVecsFrom2FromDateIndex::from_computed::<PercentageSatsF64>(
BinaryDateLast::from_both_derived_last::<PercentageSatsF64>(
&cfg.name("supply_in_loss_rel_to_circulating_supply"),
cfg.version + v1,
unrealized.dateindex_to_supply_in_loss.boxed_clone(),
&unrealized.indexes_to_supply_in_loss.sats,
global_supply_sats_dateindex.unwrap().boxed_clone(),
global_supply_sats.unwrap(),
)
}),
@@ -255,32 +273,36 @@ impl RelativeMetrics {
mc.boxed_clone(),
)
}),
// KISS: market_cap is now ComputedVecsDateLast
indexes_to_unrealized_profit_rel_to_market_cap: global_market_cap.map(|mc| {
LazyVecsFrom2FromDateIndex::from_computed::<PercentageDollarsF32>(
BinaryDateLast::from_derived_last_and_computed_last::<PercentageDollarsF32>(
&cfg.name("unrealized_profit_rel_to_market_cap"),
cfg.version + v2,
unrealized.dateindex_to_unrealized_profit.boxed_clone(),
&unrealized.indexes_to_unrealized_profit,
mc,
)
}),
indexes_to_unrealized_loss_rel_to_market_cap: global_market_cap.map(|mc| {
LazyVecsFrom2FromDateIndex::from_computed::<PercentageDollarsF32>(
BinaryDateLast::from_derived_last_and_computed_last::<PercentageDollarsF32>(
&cfg.name("unrealized_loss_rel_to_market_cap"),
cfg.version + v2,
unrealized.dateindex_to_unrealized_loss.boxed_clone(),
&unrealized.indexes_to_unrealized_loss,
mc,
)
}),
indexes_to_neg_unrealized_loss_rel_to_market_cap: global_market_cap.map(|mc| {
LazyVecsFrom2FromDateIndex::from_computed::<NegPercentageDollarsF32>(
BinaryDateLast::from_derived_last_and_computed_last::<NegPercentageDollarsF32>(
&cfg.name("neg_unrealized_loss_rel_to_market_cap"),
cfg.version + v2,
unrealized.dateindex_to_unrealized_loss.boxed_clone(),
&unrealized.indexes_to_unrealized_loss,
mc,
)
}),
indexes_to_net_unrealized_pnl_rel_to_market_cap: global_market_cap.map(|mc| {
LazyVecsFrom2FromDateIndex::from_computed::<PercentageDollarsF32>(
BinaryDateLast::from_computed_both_last::<PercentageDollarsF32>(
&cfg.name("net_unrealized_pnl_rel_to_market_cap"),
cfg.version + v2,
&unrealized.indexes_to_net_unrealized_pnl,
@@ -290,7 +312,7 @@ impl RelativeMetrics {
// NUPL is a proxy for net_unrealized_pnl_rel_to_market_cap
indexes_to_nupl: global_market_cap.map(|mc| {
LazyVecsFrom2FromDateIndex::from_computed::<PercentageDollarsF32>(
BinaryDateLast::from_computed_both_last::<PercentageDollarsF32>(
&cfg.name("nupl"),
cfg.version + v2,
&unrealized.indexes_to_net_unrealized_pnl,
@@ -347,12 +369,14 @@ impl RelativeMetrics {
})
})
.flatten(),
// KISS: own_market_cap is now ComputedVecsDateLast
indexes_to_unrealized_profit_rel_to_own_market_cap: (extended && compute_rel_to_all)
.then(|| {
own_market_cap.map(|mc| {
LazyVecsFrom2FromDateIndex::from_computed::<PercentageDollarsF32>(
BinaryDateLast::from_derived_last_and_computed_last::<PercentageDollarsF32>(
&cfg.name("unrealized_profit_rel_to_own_market_cap"),
cfg.version + v2,
unrealized.dateindex_to_unrealized_profit.boxed_clone(),
&unrealized.indexes_to_unrealized_profit,
mc,
)
@@ -362,9 +386,10 @@ impl RelativeMetrics {
indexes_to_unrealized_loss_rel_to_own_market_cap: (extended && compute_rel_to_all)
.then(|| {
own_market_cap.map(|mc| {
LazyVecsFrom2FromDateIndex::from_computed::<PercentageDollarsF32>(
BinaryDateLast::from_derived_last_and_computed_last::<PercentageDollarsF32>(
&cfg.name("unrealized_loss_rel_to_own_market_cap"),
cfg.version + v2,
unrealized.dateindex_to_unrealized_loss.boxed_clone(),
&unrealized.indexes_to_unrealized_loss,
mc,
)
@@ -374,9 +399,10 @@ impl RelativeMetrics {
indexes_to_neg_unrealized_loss_rel_to_own_market_cap: (extended && compute_rel_to_all)
.then(|| {
own_market_cap.map(|mc| {
LazyVecsFrom2FromDateIndex::from_computed::<NegPercentageDollarsF32>(
BinaryDateLast::from_derived_last_and_computed_last::<NegPercentageDollarsF32>(
&cfg.name("neg_unrealized_loss_rel_to_own_market_cap"),
cfg.version + v2,
unrealized.dateindex_to_unrealized_loss.boxed_clone(),
&unrealized.indexes_to_unrealized_loss,
mc,
)
@@ -386,7 +412,7 @@ impl RelativeMetrics {
indexes_to_net_unrealized_pnl_rel_to_own_market_cap: (extended && compute_rel_to_all)
.then(|| {
own_market_cap.map(|mc| {
LazyVecsFrom2FromDateIndex::from_computed::<PercentageDollarsF32>(
BinaryDateLast::from_computed_both_last::<PercentageDollarsF32>(
&cfg.name("net_unrealized_pnl_rel_to_own_market_cap"),
cfg.version + v2,
&unrealized.indexes_to_net_unrealized_pnl,
@@ -430,31 +456,34 @@ impl RelativeMetrics {
)
}),
indexes_to_unrealized_profit_rel_to_own_total_unrealized_pnl: extended.then(|| {
LazyVecsFrom2FromDateIndex::from_computed::<Ratio32>(
BinaryDateLast::from_derived_last_and_computed_last::<Ratio32>(
&cfg.name("unrealized_profit_rel_to_own_total_unrealized_pnl"),
cfg.version + v1,
unrealized.dateindex_to_unrealized_profit.boxed_clone(),
&unrealized.indexes_to_unrealized_profit,
&unrealized.indexes_to_total_unrealized_pnl,
)
}),
indexes_to_unrealized_loss_rel_to_own_total_unrealized_pnl: extended.then(|| {
LazyVecsFrom2FromDateIndex::from_computed::<Ratio32>(
BinaryDateLast::from_derived_last_and_computed_last::<Ratio32>(
&cfg.name("unrealized_loss_rel_to_own_total_unrealized_pnl"),
cfg.version + v1,
unrealized.dateindex_to_unrealized_loss.boxed_clone(),
&unrealized.indexes_to_unrealized_loss,
&unrealized.indexes_to_total_unrealized_pnl,
)
}),
indexes_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl: extended.then(|| {
LazyVecsFrom2FromDateIndex::from_computed::<NegRatio32>(
BinaryDateLast::from_derived_last_and_computed_last::<NegRatio32>(
&cfg.name("neg_unrealized_loss_rel_to_own_total_unrealized_pnl"),
cfg.version + v1,
unrealized.dateindex_to_unrealized_loss.boxed_clone(),
&unrealized.indexes_to_unrealized_loss,
&unrealized.indexes_to_total_unrealized_pnl,
)
}),
indexes_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl: extended.then(|| {
LazyVecsFrom2FromDateIndex::from_computed::<Ratio32>(
BinaryDateLast::from_computed_both_last::<Ratio32>(
&cfg.name("net_unrealized_pnl_rel_to_own_total_unrealized_pnl"),
cfg.version + v1,
&unrealized.indexes_to_net_unrealized_pnl,

View File

@@ -10,9 +10,8 @@ use vecdb::{
use crate::{
ComputeIndexes, indexes,
internal::{
ComputedHeightValueVecs, ComputedValueVecsFromDateIndex, ComputedVecsFromHeight,
HalfClosePriceTimesSats, HalveDollars, HalveSats, HalveSatsToBitcoin, LazyHeightValueVecs,
LazyValueVecsFromDateIndex, Source, VecBuilderOptions,
DerivedComputedBlockLast, HalfClosePriceTimesSats, HalveDollars, HalveSats,
HalveSatsToBitcoin, LazyBlockValue, LazyDerivedBlockValue, LazyValueDateLast, ValueDateLast,
},
price,
};
@@ -22,26 +21,13 @@ use super::ImportConfig;
/// Supply and UTXO count metrics for a cohort.
#[derive(Clone, Traversable)]
pub struct SupplyMetrics {
/// Total supply at each height
pub height_to_supply: EagerVec<PcoVec<Height, Sats>>,
/// Supply value in BTC and USD (computed from height_to_supply)
pub height_to_supply_value: ComputedHeightValueVecs,
/// Supply indexed by date
pub indexes_to_supply: ComputedValueVecsFromDateIndex,
/// UTXO count at each height
pub height_to_supply_value: LazyDerivedBlockValue,
pub indexes_to_supply: ValueDateLast,
pub height_to_utxo_count: EagerVec<PcoVec<Height, StoredU64>>,
/// UTXO count indexed by various dimensions
pub indexes_to_utxo_count: ComputedVecsFromHeight<StoredU64>,
/// Half of supply value (used for computing median) - lazy from supply_value
pub height_to_supply_half_value: LazyHeightValueVecs,
/// Half of supply indexed by date - lazy from indexes_to_supply
pub indexes_to_supply_half: LazyValueVecsFromDateIndex,
pub indexes_to_utxo_count: DerivedComputedBlockLast<StoredU64>,
pub height_to_supply_half_value: LazyBlockValue,
pub indexes_to_supply_half: LazyValueDateLast,
}
impl SupplyMetrics {
@@ -49,7 +35,6 @@ impl SupplyMetrics {
pub fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let v1 = Version::ONE;
let compute_dollars = cfg.compute_dollars();
let last = VecBuilderOptions::default().add_last();
let height_to_supply: EagerVec<PcoVec<Height, Sats>> =
EagerVec::forced_import(cfg.db, &cfg.name("supply"), cfg.version)?;
@@ -58,26 +43,23 @@ impl SupplyMetrics {
.price
.map(|p| p.usd.chainindexes_to_price_close.height.boxed_clone());
let height_to_supply_value = ComputedHeightValueVecs::forced_import(
cfg.db,
let height_to_supply_value = LazyDerivedBlockValue::from_source(
&cfg.name("supply"),
Source::Vec(height_to_supply.boxed_clone()),
height_to_supply.boxed_clone(),
cfg.version,
price_source.clone(),
)?;
);
let indexes_to_supply = ComputedValueVecsFromDateIndex::forced_import(
let indexes_to_supply = ValueDateLast::forced_import(
cfg.db,
&cfg.name("supply"),
Source::Compute,
cfg.version + v1,
last,
compute_dollars,
cfg.indexes,
)?;
// Create lazy supply_half from supply sources
let height_to_supply_half_value = LazyHeightValueVecs::from_sources::<
let height_to_supply_half_value = LazyBlockValue::from_sources::<
HalveSats,
HalveSatsToBitcoin,
HalfClosePriceTimesSats,
@@ -89,7 +71,7 @@ impl SupplyMetrics {
);
let indexes_to_supply_half =
LazyValueVecsFromDateIndex::from_source::<HalveSats, HalveSatsToBitcoin, HalveDollars>(
LazyValueDateLast::from_source::<HalveSats, HalveSatsToBitcoin, HalveDollars>(
&cfg.name("supply_half"),
&indexes_to_supply,
cfg.version,
@@ -99,13 +81,12 @@ impl SupplyMetrics {
EagerVec::forced_import(cfg.db, &cfg.name("utxo_count"), cfg.version)?;
Ok(Self {
indexes_to_utxo_count: ComputedVecsFromHeight::forced_import(
indexes_to_utxo_count: DerivedComputedBlockLast::forced_import(
cfg.db,
&cfg.name("utxo_count"),
Source::Vec(height_to_utxo_count.boxed_clone()),
height_to_utxo_count.boxed_clone(),
cfg.version,
cfg.indexes,
last,
)?,
height_to_supply,
height_to_supply_value,
@@ -208,11 +189,11 @@ impl SupplyMetrics {
Ok(())
})?;
self.indexes_to_utxo_count.compute_rest(
self.indexes_to_utxo_count.derive_from(
indexes,
starting_indexes,
&self.height_to_utxo_count,
exit,
Some(&self.height_to_utxo_count),
)?;
Ok(())

View File

@@ -11,8 +11,8 @@ use crate::{
ComputeIndexes,
distribution::state::UnrealizedState,
internal::{
ComputedHeightValueVecs, ComputedValueVecsFromDateIndex, ComputedVecsFromDateIndex,
DollarsMinus, DollarsPlus, LazyVecsFromDateIndex, Source, VecBuilderOptions,
ComputedDateLast, DerivedDateLast, DollarsMinus, DollarsPlus, LazyDateLast,
LazyDerivedBlockValue, ValueDerivedDateLast,
},
};
@@ -23,42 +23,41 @@ use super::ImportConfig;
pub struct UnrealizedMetrics {
// === Supply in Profit/Loss ===
pub height_to_supply_in_profit: EagerVec<PcoVec<Height, Sats>>,
pub indexes_to_supply_in_profit: ComputedValueVecsFromDateIndex,
pub indexes_to_supply_in_profit: ValueDerivedDateLast,
pub height_to_supply_in_loss: EagerVec<PcoVec<Height, Sats>>,
pub indexes_to_supply_in_loss: ComputedValueVecsFromDateIndex,
pub indexes_to_supply_in_loss: ValueDerivedDateLast,
pub dateindex_to_supply_in_profit: EagerVec<PcoVec<DateIndex, Sats>>,
pub dateindex_to_supply_in_loss: EagerVec<PcoVec<DateIndex, Sats>>,
pub height_to_supply_in_profit_value: ComputedHeightValueVecs,
pub height_to_supply_in_loss_value: ComputedHeightValueVecs,
pub height_to_supply_in_profit_value: LazyDerivedBlockValue,
pub height_to_supply_in_loss_value: LazyDerivedBlockValue,
// === Unrealized Profit/Loss ===
pub height_to_unrealized_profit: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_unrealized_profit: ComputedVecsFromDateIndex<Dollars>,
pub indexes_to_unrealized_profit: DerivedDateLast<Dollars>,
pub height_to_unrealized_loss: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_unrealized_loss: ComputedVecsFromDateIndex<Dollars>,
pub indexes_to_unrealized_loss: DerivedDateLast<Dollars>,
pub dateindex_to_unrealized_profit: EagerVec<PcoVec<DateIndex, Dollars>>,
pub dateindex_to_unrealized_loss: EagerVec<PcoVec<DateIndex, Dollars>>,
// === Negated and Net ===
pub height_to_neg_unrealized_loss: LazyVecFrom1<Height, Dollars, Height, Dollars>,
pub indexes_to_neg_unrealized_loss: LazyVecsFromDateIndex<Dollars>,
pub indexes_to_neg_unrealized_loss: LazyDateLast<Dollars>,
// net = profit - loss (height is lazy, indexes computed)
pub height_to_net_unrealized_pnl:
LazyVecFrom2<Height, Dollars, Height, Dollars, Height, Dollars>,
pub indexes_to_net_unrealized_pnl: ComputedVecsFromDateIndex<Dollars>,
pub indexes_to_net_unrealized_pnl: ComputedDateLast<Dollars>,
// total = profit + loss (height is lazy, indexes computed)
pub height_to_total_unrealized_pnl:
LazyVecFrom2<Height, Dollars, Height, Dollars, Height, Dollars>,
pub indexes_to_total_unrealized_pnl: ComputedVecsFromDateIndex<Dollars>,
pub indexes_to_total_unrealized_pnl: ComputedDateLast<Dollars>,
}
impl UnrealizedMetrics {
/// Import unrealized metrics from database.
pub fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let compute_dollars = cfg.compute_dollars();
let last = VecBuilderOptions::default().add_last();
let dateindex_to_supply_in_profit =
EagerVec::forced_import(cfg.db, &cfg.name("supply_in_profit"), cfg.version)?;
@@ -76,33 +75,29 @@ impl UnrealizedMetrics {
height_to_unrealized_loss.boxed_clone(),
);
let indexes_to_unrealized_loss = ComputedVecsFromDateIndex::forced_import(
cfg.db,
let indexes_to_unrealized_loss = DerivedDateLast::from_source(
&cfg.name("unrealized_loss"),
Source::Vec(dateindex_to_unrealized_loss.boxed_clone()),
cfg.version,
dateindex_to_unrealized_loss.boxed_clone(),
cfg.indexes,
last,
)?;
);
let indexes_to_neg_unrealized_loss = LazyVecsFromDateIndex::from_computed::<Negate>(
let indexes_to_neg_unrealized_loss = LazyDateLast::from_derived::<Negate>(
&cfg.name("neg_unrealized_loss"),
cfg.version,
Some(dateindex_to_unrealized_loss.boxed_clone()),
dateindex_to_unrealized_loss.boxed_clone(),
&indexes_to_unrealized_loss,
);
// Extract profit sources for lazy net/total vecs
let height_to_unrealized_profit: EagerVec<PcoVec<Height, Dollars>> =
EagerVec::forced_import(cfg.db, &cfg.name("unrealized_profit"), cfg.version)?;
let indexes_to_unrealized_profit = ComputedVecsFromDateIndex::forced_import(
cfg.db,
let indexes_to_unrealized_profit = DerivedDateLast::from_source(
&cfg.name("unrealized_profit"),
Source::Vec(dateindex_to_unrealized_profit.boxed_clone()),
cfg.version,
dateindex_to_unrealized_profit.boxed_clone(),
cfg.indexes,
last,
)?;
);
// Create lazy height vecs from profit/loss sources
let height_to_net_unrealized_pnl = LazyVecFrom2::transformed::<DollarsMinus>(
@@ -119,21 +114,17 @@ impl UnrealizedMetrics {
);
// indexes_to_net/total remain computed (needed by relative.rs)
let indexes_to_net_unrealized_pnl = ComputedVecsFromDateIndex::forced_import(
let indexes_to_net_unrealized_pnl = ComputedDateLast::forced_import(
cfg.db,
&cfg.name("net_unrealized_pnl"),
Source::Compute,
cfg.version,
cfg.indexes,
last,
)?;
let indexes_to_total_unrealized_pnl = ComputedVecsFromDateIndex::forced_import(
let indexes_to_total_unrealized_pnl = ComputedDateLast::forced_import(
cfg.db,
&cfg.name("total_unrealized_pnl"),
Source::Compute,
cfg.version,
cfg.indexes,
last,
)?;
let height_to_supply_in_profit: EagerVec<PcoVec<Height, Sats>> =
@@ -145,40 +136,36 @@ impl UnrealizedMetrics {
.price
.map(|p| p.usd.chainindexes_to_price_close.height.boxed_clone());
let height_to_supply_in_profit_value = ComputedHeightValueVecs::forced_import(
cfg.db,
let height_to_supply_in_profit_value = LazyDerivedBlockValue::from_source(
&cfg.name("supply_in_profit"),
Source::Vec(height_to_supply_in_profit.boxed_clone()),
height_to_supply_in_profit.boxed_clone(),
cfg.version,
price_source.clone(),
)?;
let height_to_supply_in_loss_value = ComputedHeightValueVecs::forced_import(
cfg.db,
);
let height_to_supply_in_loss_value = LazyDerivedBlockValue::from_source(
&cfg.name("supply_in_loss"),
Source::Vec(height_to_supply_in_loss.boxed_clone()),
height_to_supply_in_loss.boxed_clone(),
cfg.version,
price_source,
)?;
);
Ok(Self {
// === Supply in Profit/Loss ===
height_to_supply_in_profit,
indexes_to_supply_in_profit: ComputedValueVecsFromDateIndex::forced_import(
indexes_to_supply_in_profit: ValueDerivedDateLast::from_source(
cfg.db,
&cfg.name("supply_in_profit"),
Source::Vec(dateindex_to_supply_in_profit.boxed_clone()),
dateindex_to_supply_in_profit.boxed_clone(),
cfg.version,
last,
compute_dollars,
cfg.indexes,
)?,
height_to_supply_in_loss,
indexes_to_supply_in_loss: ComputedValueVecsFromDateIndex::forced_import(
indexes_to_supply_in_loss: ValueDerivedDateLast::from_source(
cfg.db,
&cfg.name("supply_in_loss"),
Source::Vec(dateindex_to_supply_in_loss.boxed_clone()),
dateindex_to_supply_in_loss.boxed_clone(),
cfg.version,
last,
compute_dollars,
cfg.indexes,
)?,
@@ -362,31 +349,14 @@ impl UnrealizedMetrics {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_supply_in_profit.compute_rest(
price,
starting_indexes,
exit,
Some(&self.dateindex_to_supply_in_profit),
)?;
// KISS: compute_rest doesn't need source vec - lazy vecs are set up during import
self.indexes_to_supply_in_profit
.compute_rest(price, starting_indexes, exit)?;
self.indexes_to_supply_in_loss.compute_rest(
price,
starting_indexes,
exit,
Some(&self.dateindex_to_supply_in_loss),
)?;
self.indexes_to_supply_in_loss
.compute_rest(price, starting_indexes, exit)?;
self.indexes_to_unrealized_profit.compute_rest(
starting_indexes,
exit,
Some(&self.dateindex_to_unrealized_profit),
)?;
self.indexes_to_unrealized_loss.compute_rest(
starting_indexes,
exit,
Some(&self.dateindex_to_unrealized_loss),
)?;
// indexes_to_unrealized_profit/loss are Derived - no compute needed (lazy only)
// height_to_net/total are lazy, but indexes still need compute
// total_unrealized_pnl = profit + loss

View File

@@ -2,12 +2,9 @@ use std::path::Path;
use brk_error::Result;
use brk_types::{Sats, SupplyState};
use derive_deref::{Deref, DerefMut};
use derive_more::{Deref, DerefMut};
use super::{
super::cost_basis::RealizedState,
base::CohortState,
};
use super::{super::cost_basis::RealizedState, base::CohortState};
#[derive(Clone, Deref, DerefMut)]
pub struct UTXOCohortState(CohortState);

View File

@@ -7,7 +7,7 @@ use std::{
use brk_error::{Error, Result};
use brk_types::{CentsCompact, Dollars, Height, Sats, SupplyState};
use derive_deref::{Deref, DerefMut};
use derive_more::{Deref, DerefMut};
use pco::standalone::{simple_decompress, simpler_compress};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};

View File

@@ -20,7 +20,7 @@ use crate::{
state::BlockState,
},
indexes, inputs,
internal::{ComputedVecsFromHeight, Source, VecBuilderOptions},
internal::ComputedBlockLast,
outputs, price, transactions,
};
@@ -49,8 +49,8 @@ pub struct Vecs {
pub addresstype_to_indexes_to_addr_count: AddressTypeToIndexesToAddressCount,
pub addresstype_to_indexes_to_empty_addr_count: AddressTypeToIndexesToAddressCount,
pub indexes_to_addr_count: ComputedVecsFromHeight<StoredU64>,
pub indexes_to_empty_addr_count: ComputedVecsFromHeight<StoredU64>,
pub indexes_to_addr_count: ComputedBlockLast<StoredU64>,
pub indexes_to_empty_addr_count: ComputedBlockLast<StoredU64>,
pub loadedaddressindex_to_loadedaddressindex:
LazyVecFrom1<LoadedAddressIndex, LoadedAddressIndex, LoadedAddressIndex, LoadedAddressData>,
pub emptyaddressindex_to_emptyaddressindex:
@@ -123,21 +123,17 @@ impl Vecs {
.with_saved_stamped_changes(SAVED_STAMPED_CHANGES),
)?,
indexes_to_addr_count: ComputedVecsFromHeight::forced_import(
indexes_to_addr_count: ComputedBlockLast::forced_import(
&db,
"addr_count",
Source::Compute,
version,
indexes,
VecBuilderOptions::default().add_last(),
)?,
indexes_to_empty_addr_count: ComputedVecsFromHeight::forced_import(
indexes_to_empty_addr_count: ComputedBlockLast::forced_import(
&db,
"empty_addr_count",
Source::Compute,
version,
indexes,
VecBuilderOptions::default().add_last(),
)?,
addresstype_to_indexes_to_addr_count:
@@ -380,11 +376,12 @@ impl Vecs {
.as_ref()
.cloned();
// KISS: dateindex is no longer Option, just clone directly
let dateindex_to_market_cap = supply_metrics
.indexes_to_supply
.dollars
.as_ref()
.and_then(|v| v.dateindex.as_ref().cloned());
.map(|v| v.dateindex.clone());
let height_to_market_cap_ref = height_to_market_cap.as_ref();
let dateindex_to_market_cap_ref = dateindex_to_market_cap.as_ref();

View File

@@ -13,12 +13,12 @@ impl Vecs {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_count.compute_rest(
self.indexes_to_count.derive_from(
indexer,
indexes,
starting_indexes,
&indexes.transaction.txindex_to_input_count,
exit,
Some(&indexes.transaction.txindex_to_input_count),
)?;
Ok(())

View File

@@ -1,33 +1,15 @@
use brk_error::Result;
use brk_types::Version;
use vecdb::{Database, IterableCloneableVec};
use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedVecsFromTxindex, Source, VecBuilderOptions},
};
use crate::{indexes, internal::DerivedTxFull};
impl Vecs {
pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
let full_stats = || {
VecBuilderOptions::default()
.add_average()
.add_minmax()
.add_percentiles()
.add_sum()
.add_cumulative()
};
let indexes_to_count =
DerivedTxFull::forced_import(db, "input_count", version, indexes)?;
Ok(Self {
indexes_to_count: ComputedVecsFromTxindex::forced_import(
db,
"input_count",
Source::Vec(indexes.transaction.txindex_to_input_count.boxed_clone()),
version,
indexes,
full_stats(),
)?,
})
Ok(Self { indexes_to_count })
}
}

View File

@@ -1,9 +1,9 @@
use brk_traversable::Traversable;
use brk_types::StoredU64;
use crate::internal::ComputedVecsFromTxindex;
use crate::internal::DerivedTxFull;
#[derive(Clone, Traversable)]
pub struct Vecs {
pub indexes_to_count: ComputedVecsFromTxindex<StoredU64>,
pub indexes_to_count: DerivedTxFull<StoredU64>,
}

View File

@@ -0,0 +1,59 @@
//! Lazy average-value aggregation.
use brk_traversable::Traversable;
use brk_types::Version;
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{FromCoarserIndex, IterableBoxedVec, LazyVecFrom2, VecIndex, VecValue};
use crate::internal::ComputedVecValue;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(wrap = "average")]
pub struct LazyAverage<I, T, S1I, S2T>(pub LazyVecFrom2<I, T, S1I, T, I, S2T>)
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1I: VecIndex,
S2T: VecValue;
impl<I, T, S1I, S2T> LazyAverage<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1I: VecIndex + 'static + FromCoarserIndex<I>,
S2T: VecValue,
{
pub fn from_source(
name: &str,
version: Version,
source: IterableBoxedVec<S1I, T>,
len_source: IterableBoxedVec<I, S2T>,
) -> Self {
Self(LazyVecFrom2::init(
&format!("{name}_average"),
version + VERSION,
source,
len_source,
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
let mut sum = T::from(0);
let mut len = 0usize;
for v in
S1I::inclusive_range_from(i, source.vec_len()).flat_map(|i| source.get_at(i))
{
sum += v;
len += 1;
}
if len == 0 {
return None;
}
Some(sum / len)
},
))
}
}

View File

@@ -0,0 +1,48 @@
//! Lazy cumulative-only aggregation (takes last value from cumulative source).
use brk_traversable::Traversable;
use brk_types::Version;
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{FromCoarserIndex, IterableBoxedVec, LazyVecFrom2, VecIndex, VecValue};
use crate::internal::ComputedVecValue;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(wrap = "cumulative")]
pub struct LazyCumulative<I, T, S1I, S2T>(pub LazyVecFrom2<I, T, S1I, T, I, S2T>)
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1I: VecIndex,
S2T: VecValue;
impl<I, T, S1I, S2T> LazyCumulative<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1I: VecIndex + 'static + FromCoarserIndex<I>,
S2T: VecValue,
{
pub fn from_source(
name: &str,
version: Version,
cumulative_source: IterableBoxedVec<S1I, T>,
len_source: IterableBoxedVec<I, S2T>,
) -> Self {
Self(LazyVecFrom2::init(
&format!("{name}_cumulative"),
version + VERSION,
cumulative_source,
len_source,
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
source.get_at(S1I::max_from(i, source.vec_len()))
},
))
}
}

View File

@@ -0,0 +1,52 @@
//! Lazy distribution pattern (average, min, max).
use brk_traversable::Traversable;
use brk_types::Version;
use schemars::JsonSchema;
use vecdb::{FromCoarserIndex, IterableBoxedVec, VecIndex};
use super::{LazyAverage, LazyMax, LazyMin};
use crate::internal::ComputedVecValue;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Traversable)]
pub struct LazyDistribution<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1I: VecIndex,
S2T: ComputedVecValue,
{
#[traversable(flatten)]
pub average: LazyAverage<I, T, S1I, S2T>,
#[traversable(flatten)]
pub min: LazyMin<I, T, S1I, S2T>,
#[traversable(flatten)]
pub max: LazyMax<I, T, S1I, S2T>,
}
impl<I, T, S1I, S2T> LazyDistribution<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1I: VecIndex + 'static + FromCoarserIndex<I>,
S2T: ComputedVecValue,
{
pub fn from_distribution(
name: &str,
version: Version,
source_average: IterableBoxedVec<S1I, T>,
source_min: IterableBoxedVec<S1I, T>,
source_max: IterableBoxedVec<S1I, T>,
len_source: IterableBoxedVec<I, S2T>,
) -> Self {
let v = version + VERSION;
Self {
average: LazyAverage::from_source(name, v, source_average, len_source.clone()),
min: LazyMin::from_source(name, v, source_min, len_source.clone()),
max: LazyMax::from_source(name, v, source_max, len_source),
}
}
}

View File

@@ -0,0 +1,48 @@
//! Lazy first-value aggregation.
use brk_traversable::Traversable;
use brk_types::Version;
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{FromCoarserIndex, IterableBoxedVec, LazyVecFrom2, VecIndex, VecValue};
use crate::internal::ComputedVecValue;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(wrap = "first")]
pub struct LazyFirst<I, T, S1I, S2T>(pub LazyVecFrom2<I, T, S1I, T, I, S2T>)
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1I: VecIndex,
S2T: VecValue;
impl<I, T, S1I, S2T> LazyFirst<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1I: VecIndex + 'static + FromCoarserIndex<I>,
S2T: VecValue,
{
pub fn from_source(
name: &str,
version: Version,
source: IterableBoxedVec<S1I, T>,
len_source: IterableBoxedVec<I, S2T>,
) -> Self {
Self(LazyVecFrom2::init(
name,
version + VERSION,
source,
len_source,
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
source.get_at(S1I::min_from(i))
},
))
}
}

View File

@@ -0,0 +1,49 @@
//! Lazy last-value aggregation.
use brk_traversable::Traversable;
use brk_types::Version;
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{FromCoarserIndex, IterableBoxedVec, LazyVecFrom2, VecIndex, VecValue};
use crate::internal::ComputedVecValue;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(wrap = "last")]
pub struct LazyLast<I, T, S1I, S2T>(pub LazyVecFrom2<I, T, S1I, T, I, S2T>)
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1I: VecIndex,
S2T: VecValue;
impl<I, T, S1I, S2T> LazyLast<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1I: VecIndex + 'static + FromCoarserIndex<I>,
S2T: VecValue,
{
pub fn from_source(
name: &str,
version: Version,
source: IterableBoxedVec<S1I, T>,
len_source: IterableBoxedVec<I, S2T>,
) -> Self {
Self(LazyVecFrom2::init(
name,
version + VERSION,
source,
len_source,
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
source.get_at(S1I::max_from(i, source.vec_len()))
},
))
}
}

View File

@@ -0,0 +1,50 @@
//! Lazy max-value aggregation.
use brk_traversable::Traversable;
use brk_types::Version;
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{FromCoarserIndex, IterableBoxedVec, LazyVecFrom2, VecIndex, VecValue};
use crate::internal::ComputedVecValue;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(wrap = "max")]
pub struct LazyMax<I, T, S1I, S2T>(pub LazyVecFrom2<I, T, S1I, T, I, S2T>)
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1I: VecIndex,
S2T: VecValue;
impl<I, T, S1I, S2T> LazyMax<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1I: VecIndex + 'static + FromCoarserIndex<I>,
S2T: VecValue,
{
pub fn from_source(
name: &str,
version: Version,
source: IterableBoxedVec<S1I, T>,
len_source: IterableBoxedVec<I, S2T>,
) -> Self {
Self(LazyVecFrom2::init(
&format!("{name}_max"),
version + VERSION,
source,
len_source,
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
S1I::inclusive_range_from(i, source.vec_len())
.flat_map(|i| source.get_at(i))
.max()
},
))
}
}

View File

@@ -0,0 +1,50 @@
//! Lazy min-value aggregation.
use brk_traversable::Traversable;
use brk_types::Version;
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{FromCoarserIndex, IterableBoxedVec, LazyVecFrom2, VecIndex, VecValue};
use crate::internal::ComputedVecValue;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(wrap = "min")]
pub struct LazyMin<I, T, S1I, S2T>(pub LazyVecFrom2<I, T, S1I, T, I, S2T>)
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1I: VecIndex,
S2T: VecValue;
impl<I, T, S1I, S2T> LazyMin<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1I: VecIndex + 'static + FromCoarserIndex<I>,
S2T: VecValue,
{
pub fn from_source(
name: &str,
version: Version,
source: IterableBoxedVec<S1I, T>,
len_source: IterableBoxedVec<I, S2T>,
) -> Self {
Self(LazyVecFrom2::init(
&format!("{name}_min"),
version + VERSION,
source,
len_source,
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
S1I::inclusive_range_from(i, source.vec_len())
.flat_map(|i| source.get_at(i))
.min()
},
))
}
}

View File

@@ -0,0 +1,23 @@
//! Lazy aggregation primitives (finer index → coarser index).
mod average;
mod cumulative;
mod distribution;
mod first;
mod last;
mod max;
mod min;
mod stats_aggregate;
mod sum;
mod sum_cum;
pub use average::*;
pub use cumulative::*;
pub use distribution::*;
pub use first::*;
pub use last::*;
pub use max::*;
pub use min::*;
pub use stats_aggregate::*;
pub use sum::*;
pub use sum_cum::*;

View File

@@ -0,0 +1,61 @@
//! Lazy stats aggregate pattern (average, min, max, sum, cumulative).
use brk_traversable::Traversable;
use brk_types::Version;
use schemars::JsonSchema;
use vecdb::{FromCoarserIndex, IterableBoxedVec, VecIndex, VecValue};
use super::{LazyAverage, LazyCumulative, LazyMax, LazyMin, LazySum};
use crate::internal::ComputedVecValue;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Traversable)]
pub struct LazyFull<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1I: VecIndex,
S2T: VecValue,
{
#[traversable(flatten)]
pub average: LazyAverage<I, T, S1I, S2T>,
#[traversable(flatten)]
pub min: LazyMin<I, T, S1I, S2T>,
#[traversable(flatten)]
pub max: LazyMax<I, T, S1I, S2T>,
#[traversable(flatten)]
pub sum: LazySum<I, T, S1I, S2T>,
#[traversable(flatten)]
pub cumulative: LazyCumulative<I, T, S1I, S2T>,
}
impl<I, T, S1I, S2T> LazyFull<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1I: VecIndex + 'static + FromCoarserIndex<I>,
S2T: VecValue,
{
#[allow(clippy::too_many_arguments)]
pub fn from_stats_aggregate(
name: &str,
version: Version,
source_average: IterableBoxedVec<S1I, T>,
source_min: IterableBoxedVec<S1I, T>,
source_max: IterableBoxedVec<S1I, T>,
source_sum: IterableBoxedVec<S1I, T>,
source_cumulative: IterableBoxedVec<S1I, T>,
len_source: IterableBoxedVec<I, S2T>,
) -> Self {
let v = version + VERSION;
Self {
average: LazyAverage::from_source(name, v, source_average, len_source.clone()),
min: LazyMin::from_source(name, v, source_min, len_source.clone()),
max: LazyMax::from_source(name, v, source_max, len_source.clone()),
sum: LazySum::from_source(name, v, source_sum, len_source.clone()),
cumulative: LazyCumulative::from_source(name, v, source_cumulative, len_source),
}
}
}

View File

@@ -0,0 +1,60 @@
//! Lazy sum-value aggregation.
use brk_traversable::Traversable;
use brk_types::Version;
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{FromCoarserIndex, IterableBoxedVec, LazyVecFrom2, VecIndex, VecValue};
use crate::internal::ComputedVecValue;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(wrap = "sum")]
pub struct LazySum<I, T, S1I, S2T>(pub LazyVecFrom2<I, T, S1I, T, I, S2T>)
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1I: VecIndex,
S2T: VecValue;
impl<I, T, S1I, S2T> LazySum<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1I: VecIndex + 'static + FromCoarserIndex<I>,
S2T: VecValue,
{
pub fn from_source(
name: &str,
version: Version,
source: IterableBoxedVec<S1I, T>,
len_source: IterableBoxedVec<I, S2T>,
) -> Self {
Self(LazyVecFrom2::init(
&format!("{name}_sum"),
version + VERSION,
source,
len_source,
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
let mut sum = T::from(0);
let mut has_values = false;
for v in S1I::inclusive_range_from(i, source.vec_len())
.flat_map(|i| source.get_at(i))
{
sum += v;
has_values = true;
}
if !has_values {
return None;
}
Some(sum)
},
))
}
}

View File

@@ -0,0 +1,51 @@
//! Lazy sum + cumulative aggregation.
use brk_traversable::Traversable;
use brk_types::Version;
use schemars::JsonSchema;
use vecdb::{FromCoarserIndex, IterableBoxedVec, VecIndex, VecValue};
use crate::internal::{ComputedVecValue, LazyCumulative, LazySum};
const VERSION: Version = Version::ZERO;
#[derive(Clone, Traversable)]
pub struct LazySumCum<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1I: VecIndex,
S2T: VecValue,
{
#[traversable(flatten)]
pub sum: LazySum<I, T, S1I, S2T>,
#[traversable(flatten)]
pub cumulative: LazyCumulative<I, T, S1I, S2T>,
}
impl<I, T, S1I, S2T> LazySumCum<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1I: VecIndex + 'static + FromCoarserIndex<I>,
S2T: VecValue,
{
pub fn from_sources(
name: &str,
version: Version,
sum_source: IterableBoxedVec<S1I, T>,
cumulative_source: IterableBoxedVec<S1I, T>,
len_source: IterableBoxedVec<I, S2T>,
) -> Self {
Self {
sum: LazySum::from_source(name, version + VERSION, sum_source, len_source.clone()),
cumulative: LazyCumulative::from_source(
name,
version + VERSION,
cumulative_source,
len_source,
),
}
}
}

View File

@@ -1,843 +0,0 @@
use brk_error::{Error, Result};
use brk_traversable::Traversable;
use brk_types::{CheckedSub, StoredU64, Version};
use schemars::JsonSchema;
use vecdb::{
AnyStoredVec, Database, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableVec, PcoVec,
VecIndex, VecValue,
};
use crate::utils::{OptionExt, get_percentile};
use super::super::ComputedVecValue;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Debug, Traversable)]
pub struct EagerVecsBuilder<I, T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
{
pub first: Option<Box<EagerVec<PcoVec<I, T>>>>,
pub average: Option<Box<EagerVec<PcoVec<I, T>>>>,
pub sum: Option<Box<EagerVec<PcoVec<I, T>>>>,
pub max: Option<Box<EagerVec<PcoVec<I, T>>>>,
pub pct90: Option<Box<EagerVec<PcoVec<I, T>>>>,
pub pct75: Option<Box<EagerVec<PcoVec<I, T>>>>,
pub median: Option<Box<EagerVec<PcoVec<I, T>>>>,
pub pct25: Option<Box<EagerVec<PcoVec<I, T>>>>,
pub pct10: Option<Box<EagerVec<PcoVec<I, T>>>>,
pub min: Option<Box<EagerVec<PcoVec<I, T>>>>,
pub last: Option<Box<EagerVec<PcoVec<I, T>>>>,
pub cumulative: Option<Box<EagerVec<PcoVec<I, T>>>>,
}
impl<I, T> EagerVecsBuilder<I, T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
options: VecBuilderOptions,
) -> Result<Self> {
let only_one_active = options.is_only_one_active();
let suffix = |s: &str| format!("{name}_{s}");
let maybe_suffix = |s: &str| {
if only_one_active {
name.to_string()
} else {
suffix(s)
}
};
let v = version + VERSION;
macro_rules! import {
($s:expr) => {
Box::new(EagerVec::forced_import(db, &maybe_suffix($s), v).unwrap())
};
}
let s = Self {
first: options.first.then(|| import!("first")),
last: options
.last
.then(|| Box::new(EagerVec::forced_import(db, name, v).unwrap())),
min: options.min.then(|| import!("min")),
max: options.max.then(|| import!("max")),
median: options.median.then(|| import!("median")),
average: options.average.then(|| import!("avg")),
sum: options.sum.then(|| {
let sum_name = if !options.last && !options.average && !options.min && !options.max
{
name.to_string()
} else {
maybe_suffix("sum")
};
Box::new(EagerVec::forced_import(db, &sum_name, v).unwrap())
}),
cumulative: options
.cumulative
.then(|| Box::new(EagerVec::forced_import(db, &suffix("cumulative"), v).unwrap())),
pct90: options.pct90.then(|| import!("pct90")),
pct75: options.pct75.then(|| import!("pct75")),
pct25: options.pct25.then(|| import!("pct25")),
pct10: options.pct10.then(|| import!("pct10")),
};
Ok(s)
}
#[inline]
fn needs_percentiles(&self) -> bool {
self.pct90.is_some()
|| self.pct75.is_some()
|| self.median.is_some()
|| self.pct25.is_some()
|| self.pct10.is_some()
}
#[inline]
fn needs_minmax(&self) -> bool {
self.max.is_some() || self.min.is_some()
}
#[inline]
fn needs_sum_or_cumulative(&self) -> bool {
self.sum.is_some() || self.cumulative.is_some()
}
#[inline]
fn needs_average_sum_or_cumulative(&self) -> bool {
self.needs_sum_or_cumulative() || self.average.is_some()
}
/// Compute min/max in O(n) without sorting or collecting
#[inline]
fn compute_minmax_streaming(
&mut self,
index: usize,
iter: impl Iterator<Item = T>,
) -> Result<()> {
let mut min_val: Option<T> = None;
let mut max_val: Option<T> = None;
let need_min = self.min.is_some();
let need_max = self.max.is_some();
for val in iter {
if need_min {
min_val = Some(min_val.map_or(val, |m| if val < m { val } else { m }));
}
if need_max {
max_val = Some(max_val.map_or(val, |m| if val > m { val } else { m }));
}
}
if let Some(min) = self.min.as_mut() {
min.truncate_push_at(index, min_val.unwrap())?;
}
if let Some(max) = self.max.as_mut() {
max.truncate_push_at(index, max_val.unwrap())?;
}
Ok(())
}
/// Compute min/max from collected values in O(n) without sorting
#[inline]
fn compute_minmax_from_slice(&mut self, index: usize, values: &[T]) -> Result<()> {
if let Some(min) = self.min.as_mut() {
min.truncate_push_at(index, *values.iter().min().unwrap())?;
}
if let Some(max) = self.max.as_mut() {
max.truncate_push_at(index, *values.iter().max().unwrap())?;
}
Ok(())
}
/// Compute percentiles from sorted values (assumes values is already sorted)
fn compute_percentiles_from_sorted(&mut self, index: usize, values: &[T]) -> Result<()> {
if let Some(max) = self.max.as_mut() {
max.truncate_push_at(
index,
*values
.last()
.ok_or(Error::Internal("Empty values for percentiles"))?,
)?;
}
if let Some(pct90) = self.pct90.as_mut() {
pct90.truncate_push_at(index, get_percentile(values, 0.90))?;
}
if let Some(pct75) = self.pct75.as_mut() {
pct75.truncate_push_at(index, get_percentile(values, 0.75))?;
}
if let Some(median) = self.median.as_mut() {
median.truncate_push_at(index, get_percentile(values, 0.50))?;
}
if let Some(pct25) = self.pct25.as_mut() {
pct25.truncate_push_at(index, get_percentile(values, 0.25))?;
}
if let Some(pct10) = self.pct10.as_mut() {
pct10.truncate_push_at(index, get_percentile(values, 0.10))?;
}
if let Some(min) = self.min.as_mut() {
min.truncate_push_at(index, *values.first().unwrap())?;
}
Ok(())
}
/// Compute sum, average, and cumulative from values
fn compute_aggregates(
&mut self,
index: usize,
values: Vec<T>,
cumulative: &mut Option<T>,
) -> Result<()> {
let len = values.len();
let sum = values.into_iter().fold(T::from(0), |a, b| a + b);
if let Some(average) = self.average.as_mut() {
// len == 0 handled by T's Div<usize> returning NaN
average.truncate_push_at(index, sum / len)?;
}
if self.needs_sum_or_cumulative() {
if let Some(sum_vec) = self.sum.as_mut() {
sum_vec.truncate_push_at(index, sum)?;
}
if let Some(cumulative_vec) = self.cumulative.as_mut() {
let t = cumulative.unwrap() + sum;
cumulative.replace(t);
cumulative_vec.truncate_push_at(index, t)?;
}
}
Ok(())
}
pub fn extend(
&mut self,
max_from: I,
source: &impl IterableVec<I, T>,
exit: &Exit,
) -> Result<()> {
if self.cumulative.is_none() {
return Ok(());
};
self.validate_computed_version_or_reset(source.version())?;
let index = self.starting_index(max_from);
let cumulative_vec = self.cumulative.um();
let mut cumulative = index.decremented().map_or(T::from(0_usize), |index| {
cumulative_vec.iter().get_unwrap(index)
});
source
.iter()
.enumerate()
.skip(index.to_usize())
.try_for_each(|(i, v)| -> Result<()> {
cumulative += v;
cumulative_vec.truncate_push_at(i, cumulative)?;
Ok(())
})?;
let _lock = exit.lock();
self.write()?;
Ok(())
}
pub fn compute<A>(
&mut self,
max_from: I,
source: &impl IterableVec<A, T>,
first_indexes: &impl IterableVec<I, A>,
count_indexes: &impl IterableVec<I, StoredU64>,
exit: &Exit,
) -> Result<()>
where
A: VecIndex + VecValue + CheckedSub<A>,
{
self.validate_computed_version_or_reset(
source.version() + first_indexes.version() + count_indexes.version(),
)?;
let index = self.starting_index(max_from);
let mut source_iter = source.iter();
let cumulative_vec = self.cumulative.as_mut();
let mut cumulative = cumulative_vec.map(|cumulative_vec| {
index.decremented().map_or(T::from(0_usize), |index| {
cumulative_vec.iter().get_unwrap(index)
})
});
let mut count_indexes_iter = count_indexes.iter().skip(index.to_usize());
first_indexes
.iter()
.enumerate()
.skip(index.to_usize())
.try_for_each(|(index, first_index)| -> Result<()> {
let count_index = count_indexes_iter.next().unwrap();
if let Some(first) = self.first.as_mut() {
let f = source_iter
.get(first_index)
.unwrap_or_else(|| T::from(0_usize));
first.truncate_push_at(index, f)?;
}
if let Some(last) = self.last.as_mut() {
let count_index = *count_index as usize;
if count_index == 0 {
panic!("should compute last if count can be 0")
}
let last_index = first_index + (count_index - 1);
let v = source_iter.get_unwrap(last_index);
// .context("to work")
// .inspect_err(|_| {
// dbg!(first_index, count_index, last_index);
// })
// .unwrap()
// ;
last.truncate_push_at(index, v)?;
}
let needs_percentiles = self.needs_percentiles();
let needs_minmax = self.needs_minmax();
let needs_aggregates = self.needs_average_sum_or_cumulative();
// Fast path: only min/max needed, no sorting or allocation required
if needs_minmax && !needs_percentiles && !needs_aggregates {
source_iter.set_position(first_index);
self.compute_minmax_streaming(
index,
(&mut source_iter).take(*count_index as usize),
)?;
} else if needs_percentiles || needs_aggregates {
source_iter.set_position(first_index);
let mut values = (&mut source_iter)
.take(*count_index as usize)
.collect::<Vec<_>>();
if needs_percentiles {
values.sort_unstable();
self.compute_percentiles_from_sorted(index, &values)?;
} else if needs_minmax {
// We have values collected but only need min/max (along with aggregates)
self.compute_minmax_from_slice(index, &values)?;
}
if needs_aggregates {
self.compute_aggregates(index, values, &mut cumulative)?;
}
}
Ok(())
})?;
let _lock = exit.lock();
self.write()?;
Ok(())
}
#[allow(clippy::wrong_self_convention)]
pub fn from_aligned<A>(
&mut self,
max_from: I,
source: &EagerVecsBuilder<A, T>,
first_indexes: &impl IterableVec<I, A>,
count_indexes: &impl IterableVec<I, StoredU64>,
exit: &Exit,
) -> Result<()>
where
A: VecIndex + VecValue + CheckedSub<A>,
{
if self.needs_percentiles() {
panic!("percentiles unsupported in from_aligned");
}
self.validate_computed_version_or_reset(
VERSION + first_indexes.version() + count_indexes.version(),
)?;
let index = self.starting_index(max_from);
let mut source_first_iter = source.first.as_ref().map(|f| f.iter());
let mut source_last_iter = source.last.as_ref().map(|f| f.iter());
let mut source_max_iter = source.max.as_ref().map(|f| f.iter());
let mut source_min_iter = source.min.as_ref().map(|f| f.iter());
let mut source_average_iter = source.average.as_ref().map(|f| f.iter());
let mut source_sum_iter = source.sum.as_ref().map(|f| f.iter());
let mut cumulative = self.cumulative.as_mut().map(|cumulative_vec| {
index.decremented().map_or(T::from(0_usize), |index| {
cumulative_vec.iter().get_unwrap(index)
})
});
let mut count_indexes_iter = count_indexes.iter().skip(index.to_usize());
first_indexes
.iter()
.enumerate()
.skip(index.to_usize())
.try_for_each(|(index, first_index, ..)| -> Result<()> {
let count_index = count_indexes_iter.next().unwrap();
if let Some(first) = self.first.as_mut() {
let v = source_first_iter.um().get_unwrap(first_index);
first.truncate_push_at(index, v)?;
}
if let Some(last) = self.last.as_mut() {
let count_index = *count_index as usize;
if count_index == 0 {
panic!("should compute last if count can be 0")
}
let last_index = first_index + (count_index - 1);
let v = source_last_iter.um().get_unwrap(last_index);
last.truncate_push_at(index, v)?;
}
let needs_minmax = self.needs_minmax();
let needs_aggregates = self.needs_average_sum_or_cumulative();
if needs_minmax || needs_aggregates {
// Min/max: use streaming O(n) instead of sort O(n log n)
if needs_minmax {
if let Some(max) = self.max.as_mut() {
let source_max_iter = source_max_iter.um();
source_max_iter.set_position(first_index);
let max_val =
source_max_iter.take(*count_index as usize).max().unwrap();
max.truncate_push_at(index, max_val)?;
}
if let Some(min) = self.min.as_mut() {
let source_min_iter = source_min_iter.um();
source_min_iter.set_position(first_index);
let min_val =
source_min_iter.take(*count_index as usize).min().unwrap();
min.truncate_push_at(index, min_val)?;
}
}
if needs_aggregates {
if let Some(average) = self.average.as_mut() {
let source_average_iter = source_average_iter.um();
source_average_iter.set_position(first_index);
let mut len = 0usize;
let sum = (&mut *source_average_iter)
.take(*count_index as usize)
.inspect(|_| len += 1)
.fold(T::from(0), |a, b| a + b);
// TODO: Multiply by count then divide by cumulative
// Right now it's not 100% accurate as there could be more or less elements in the lower timeframe (28 days vs 31 days in a month for example)
// len == 0 handled by T's Div<usize> returning NaN
let avg = sum / len;
average.truncate_push_at(index, avg)?;
}
if self.needs_sum_or_cumulative() {
let source_sum_iter = source_sum_iter.um();
source_sum_iter.set_position(first_index);
let sum = source_sum_iter
.take(*count_index as usize)
.fold(T::from(0), |a, b| a + b);
if let Some(sum_vec) = self.sum.as_mut() {
sum_vec.truncate_push_at(index, sum)?;
}
if let Some(cumulative_vec) = self.cumulative.as_mut() {
let t = cumulative.unwrap() + sum;
cumulative.replace(t);
cumulative_vec.truncate_push_at(index, t)?;
}
}
}
}
Ok(())
})?;
let _lock = exit.lock();
self.write()?;
Ok(())
}
pub fn starting_index(&self, max_from: I) -> I {
max_from.min(I::from(
self.iter_any_exportable().map(|v| v.len()).min().unwrap(),
))
}
#[inline]
pub fn unwrap_first(&self) -> &EagerVec<PcoVec<I, T>> {
self.first.u()
}
#[inline]
pub fn unwrap_average(&self) -> &EagerVec<PcoVec<I, T>> {
self.average.u()
}
#[inline]
pub fn unwrap_sum(&self) -> &EagerVec<PcoVec<I, T>> {
self.sum.u()
}
#[inline]
pub fn unwrap_max(&self) -> &EagerVec<PcoVec<I, T>> {
self.max.u()
}
#[inline]
pub fn unwrap_pct90(&self) -> &EagerVec<PcoVec<I, T>> {
self.pct90.u()
}
#[inline]
pub fn unwrap_pct75(&self) -> &EagerVec<PcoVec<I, T>> {
self.pct75.u()
}
#[inline]
pub fn unwrap_median(&self) -> &EagerVec<PcoVec<I, T>> {
self.median.u()
}
#[inline]
pub fn unwrap_pct25(&self) -> &EagerVec<PcoVec<I, T>> {
self.pct25.u()
}
#[inline]
pub fn unwrap_pct10(&self) -> &EagerVec<PcoVec<I, T>> {
self.pct10.u()
}
#[inline]
pub fn unwrap_min(&self) -> &EagerVec<PcoVec<I, T>> {
self.min.u()
}
#[inline]
pub fn unwrap_last(&self) -> &EagerVec<PcoVec<I, T>> {
self.last.u()
}
#[inline]
pub fn unwrap_cumulative(&self) -> &EagerVec<PcoVec<I, T>> {
self.cumulative.u()
}
pub fn write(&mut self) -> Result<()> {
if let Some(first) = self.first.as_mut() {
first.write()?;
}
if let Some(last) = self.last.as_mut() {
last.write()?;
}
if let Some(min) = self.min.as_mut() {
min.write()?;
}
if let Some(max) = self.max.as_mut() {
max.write()?;
}
if let Some(median) = self.median.as_mut() {
median.write()?;
}
if let Some(average) = self.average.as_mut() {
average.write()?;
}
if let Some(sum) = self.sum.as_mut() {
sum.write()?;
}
if let Some(cumulative) = self.cumulative.as_mut() {
cumulative.write()?;
}
if let Some(pct90) = self.pct90.as_mut() {
pct90.write()?;
}
if let Some(pct75) = self.pct75.as_mut() {
pct75.write()?;
}
if let Some(pct25) = self.pct25.as_mut() {
pct25.write()?;
}
if let Some(pct10) = self.pct10.as_mut() {
pct10.write()?;
}
Ok(())
}
pub fn validate_computed_version_or_reset(&mut self, dep_version: Version) -> Result<()> {
if let Some(first) = self.first.as_mut() {
first.validate_computed_version_or_reset(dep_version)?;
}
if let Some(last) = self.last.as_mut() {
last.validate_computed_version_or_reset(dep_version)?;
}
if let Some(min) = self.min.as_mut() {
min.validate_computed_version_or_reset(dep_version)?;
}
if let Some(max) = self.max.as_mut() {
max.validate_computed_version_or_reset(dep_version)?;
}
if let Some(median) = self.median.as_mut() {
median.validate_computed_version_or_reset(dep_version)?;
}
if let Some(average) = self.average.as_mut() {
average.validate_computed_version_or_reset(dep_version)?;
}
if let Some(sum) = self.sum.as_mut() {
sum.validate_computed_version_or_reset(dep_version)?;
}
if let Some(cumulative) = self.cumulative.as_mut() {
cumulative.validate_computed_version_or_reset(dep_version)?;
}
if let Some(pct90) = self.pct90.as_mut() {
pct90.validate_computed_version_or_reset(dep_version)?;
}
if let Some(pct75) = self.pct75.as_mut() {
pct75.validate_computed_version_or_reset(dep_version)?;
}
if let Some(pct25) = self.pct25.as_mut() {
pct25.validate_computed_version_or_reset(dep_version)?;
}
if let Some(pct10) = self.pct10.as_mut() {
pct10.validate_computed_version_or_reset(dep_version)?;
}
Ok(())
}
}
#[derive(Default, Clone, Copy)]
pub struct VecBuilderOptions {
average: bool,
sum: bool,
max: bool,
pct90: bool,
pct75: bool,
median: bool,
pct25: bool,
pct10: bool,
min: bool,
first: bool,
last: bool,
cumulative: bool,
}
impl VecBuilderOptions {
pub fn average(&self) -> bool {
self.average
}
pub fn sum(&self) -> bool {
self.sum
}
pub fn max(&self) -> bool {
self.max
}
pub fn pct90(&self) -> bool {
self.pct90
}
pub fn pct75(&self) -> bool {
self.pct75
}
pub fn median(&self) -> bool {
self.median
}
pub fn pct25(&self) -> bool {
self.pct25
}
pub fn pct10(&self) -> bool {
self.pct10
}
pub fn min(&self) -> bool {
self.min
}
pub fn first(&self) -> bool {
self.first
}
pub fn last(&self) -> bool {
self.last
}
pub fn cumulative(&self) -> bool {
self.cumulative
}
pub fn add_first(mut self) -> Self {
self.first = true;
self
}
pub fn add_last(mut self) -> Self {
self.last = true;
self
}
pub fn add_min(mut self) -> Self {
self.min = true;
self
}
pub fn add_max(mut self) -> Self {
self.max = true;
self
}
pub fn add_median(mut self) -> Self {
self.median = true;
self
}
pub fn add_average(mut self) -> Self {
self.average = true;
self
}
pub fn add_sum(mut self) -> Self {
self.sum = true;
self
}
pub fn add_pct90(mut self) -> Self {
self.pct90 = true;
self
}
pub fn add_pct75(mut self) -> Self {
self.pct75 = true;
self
}
pub fn add_pct25(mut self) -> Self {
self.pct25 = true;
self
}
pub fn add_pct10(mut self) -> Self {
self.pct10 = true;
self
}
pub fn add_cumulative(mut self) -> Self {
self.cumulative = true;
self
}
pub fn rm_min(mut self) -> Self {
self.min = false;
self
}
pub fn rm_max(mut self) -> Self {
self.max = false;
self
}
pub fn rm_median(mut self) -> Self {
self.median = false;
self
}
pub fn rm_average(mut self) -> Self {
self.average = false;
self
}
pub fn rm_sum(mut self) -> Self {
self.sum = false;
self
}
pub fn rm_pct90(mut self) -> Self {
self.pct90 = false;
self
}
pub fn rm_pct75(mut self) -> Self {
self.pct75 = false;
self
}
pub fn rm_pct25(mut self) -> Self {
self.pct25 = false;
self
}
pub fn rm_pct10(mut self) -> Self {
self.pct10 = false;
self
}
pub fn rm_cumulative(mut self) -> Self {
self.cumulative = false;
self
}
pub fn add_minmax(mut self) -> Self {
self.min = true;
self.max = true;
self
}
pub fn add_percentiles(mut self) -> Self {
self.pct90 = true;
self.pct75 = true;
self.median = true;
self.pct25 = true;
self.pct10 = true;
self
}
pub fn remove_percentiles(mut self) -> Self {
self.pct90 = false;
self.pct75 = false;
self.median = false;
self.pct25 = false;
self.pct10 = false;
self
}
pub fn is_only_one_active(&self) -> bool {
[
self.average,
self.sum,
self.max,
self.pct90,
self.pct75,
self.median,
self.pct25,
self.pct10,
self.min,
self.first,
self.last,
self.cumulative,
]
.iter()
.filter(|b| **b)
.count()
== 1
}
pub fn copy_self_extra(&self) -> Self {
Self {
cumulative: self.cumulative,
..Self::default()
}
}
}

View File

@@ -1,361 +0,0 @@
use brk_traversable::Traversable;
use brk_types::Version;
use schemars::JsonSchema;
use vecdb::{FromCoarserIndex, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2, VecIndex};
use crate::internal::{EagerVecsBuilder, VecBuilderOptions};
use crate::utils::OptionExt;
use super::super::ComputedVecValue;
#[allow(clippy::type_complexity)]
#[derive(Clone, Traversable)]
pub struct LazyVecsBuilder<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1I: VecIndex,
S2T: ComputedVecValue,
{
pub first: Option<Box<LazyVecFrom2<I, T, S1I, T, I, S2T>>>,
pub average: Option<Box<LazyVecFrom2<I, T, S1I, T, I, S2T>>>,
pub sum: Option<Box<LazyVecFrom2<I, T, S1I, T, I, S2T>>>,
pub max: Option<Box<LazyVecFrom2<I, T, S1I, T, I, S2T>>>,
pub min: Option<Box<LazyVecFrom2<I, T, S1I, T, I, S2T>>>,
pub last: Option<Box<LazyVecFrom2<I, T, S1I, T, I, S2T>>>,
pub cumulative: Option<Box<LazyVecFrom2<I, T, S1I, T, I, S2T>>>,
}
const VERSION: Version = Version::ZERO;
impl<I, T, S1I, S2T> LazyVecsBuilder<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1I: VecIndex + 'static + FromCoarserIndex<I>,
S2T: ComputedVecValue,
{
#[allow(clippy::too_many_arguments)]
pub fn forced_import(
name: &str,
version: Version,
source: Option<IterableBoxedVec<S1I, T>>,
source_extra: &EagerVecsBuilder<S1I, T>,
len_source: IterableBoxedVec<I, S2T>,
options: LazyVecBuilderOptions,
) -> Self {
let only_one_active = options.is_only_one_active();
let suffix = |s: &str| format!("{name}_{s}");
let maybe_suffix = |s: &str| {
if only_one_active {
name.to_string()
} else {
suffix(s)
}
};
Self {
first: options.first.then(|| {
Box::new(LazyVecFrom2::init(
&maybe_suffix("first"),
version + VERSION,
source_extra
.first
.as_ref()
.map_or_else(|| source.u().clone(), |v| v.clone()),
len_source.clone(),
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
source.get_at(S1I::min_from(i))
},
))
}),
last: options.last.then(|| {
Box::new(LazyVecFrom2::init(
name,
version + VERSION,
source_extra.last.as_ref().map_or_else(
|| {
source
.as_ref()
.unwrap_or_else(|| {
dbg!(name, I::to_string());
panic!()
})
.clone()
},
|v| v.clone(),
),
len_source.clone(),
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
source.get_at(S1I::max_from(i, source.vec_len()))
},
))
}),
min: options.min.then(|| {
Box::new(LazyVecFrom2::init(
&maybe_suffix("min"),
version + VERSION,
source_extra
.min
.as_ref()
.map_or_else(|| source.u().clone(), |v| v.clone()),
len_source.clone(),
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
S1I::inclusive_range_from(i, source.vec_len())
.flat_map(|i| source.get_at(i))
.min()
},
))
}),
max: options.max.then(|| {
Box::new(LazyVecFrom2::init(
&maybe_suffix("max"),
version + VERSION,
source_extra
.max
.as_ref()
.map_or_else(|| source.u().clone(), |v| v.clone()),
len_source.clone(),
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
S1I::inclusive_range_from(i, source.vec_len())
.flat_map(|i| source.get_at(i))
.max()
},
))
}),
average: options.average.then(|| {
Box::new(LazyVecFrom2::init(
&maybe_suffix("avg"),
version + VERSION,
source_extra
.average
.as_ref()
.map_or_else(|| source.u().clone(), |v| v.clone()),
len_source.clone(),
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
let mut sum = T::from(0);
let mut len = 0usize;
for v in S1I::inclusive_range_from(i, source.vec_len())
.flat_map(|i| source.get_at(i))
{
sum += v;
len += 1;
}
if len == 0 {
return None;
}
Some(sum / len)
},
))
}),
sum: options.sum.then(|| {
Box::new(LazyVecFrom2::init(
&(if !options.last && !options.average && !options.min && !options.max {
name.to_string()
} else {
maybe_suffix("sum")
}),
version + VERSION,
source_extra
.sum
.as_ref()
.map_or_else(|| source.u().clone(), |v| v.clone()),
len_source.clone(),
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
let mut sum = T::from(0);
let mut has_values = false;
for v in S1I::inclusive_range_from(i, source.vec_len())
.flat_map(|i| source.get_at(i))
{
sum += v;
has_values = true;
}
if !has_values {
return None;
}
Some(sum)
},
))
}),
cumulative: options.cumulative.then(|| {
Box::new(LazyVecFrom2::init(
&suffix("cumulative"),
version + VERSION,
source_extra.cumulative.u().boxed_clone(),
len_source.clone(),
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
source.get_at(S1I::max_from(i, source.vec_len()))
},
))
}),
}
}
pub fn starting_index(&self, max_from: I) -> I {
max_from.min(I::from(
self.iter_any_exportable().map(|v| v.len()).min().unwrap(),
))
}
pub fn unwrap_first(&self) -> &LazyVecFrom2<I, T, S1I, T, I, S2T> {
self.first.u()
}
pub fn unwrap_average(&self) -> &LazyVecFrom2<I, T, S1I, T, I, S2T> {
self.average.u()
}
pub fn unwrap_sum(&self) -> &LazyVecFrom2<I, T, S1I, T, I, S2T> {
self.sum.u()
}
pub fn unwrap_max(&self) -> &LazyVecFrom2<I, T, S1I, T, I, S2T> {
self.max.u()
}
pub fn unwrap_min(&self) -> &LazyVecFrom2<I, T, S1I, T, I, S2T> {
self.min.u()
}
pub fn unwrap_last(&self) -> &LazyVecFrom2<I, T, S1I, T, I, S2T> {
self.last.u()
}
pub fn unwrap_cumulative(&self) -> &LazyVecFrom2<I, T, S1I, T, I, S2T> {
self.cumulative.u()
}
}
#[derive(Default, Clone, Copy)]
pub struct LazyVecBuilderOptions {
average: bool,
sum: bool,
max: bool,
min: bool,
first: bool,
last: bool,
cumulative: bool,
}
impl From<VecBuilderOptions> for LazyVecBuilderOptions {
#[inline]
fn from(value: VecBuilderOptions) -> Self {
Self {
average: value.average(),
sum: value.sum(),
max: value.max(),
min: value.min(),
first: value.first(),
last: value.last(),
cumulative: value.cumulative(),
}
}
}
impl LazyVecBuilderOptions {
pub fn add_first(mut self) -> Self {
self.first = true;
self
}
pub fn add_last(mut self) -> Self {
self.last = true;
self
}
pub fn add_min(mut self) -> Self {
self.min = true;
self
}
pub fn add_max(mut self) -> Self {
self.max = true;
self
}
pub fn add_average(mut self) -> Self {
self.average = true;
self
}
pub fn add_sum(mut self) -> Self {
self.sum = true;
self
}
pub fn add_cumulative(mut self) -> Self {
self.cumulative = true;
self
}
pub fn rm_min(mut self) -> Self {
self.min = false;
self
}
pub fn rm_max(mut self) -> Self {
self.max = false;
self
}
pub fn rm_average(mut self) -> Self {
self.average = false;
self
}
pub fn rm_sum(mut self) -> Self {
self.sum = false;
self
}
pub fn rm_cumulative(mut self) -> Self {
self.cumulative = false;
self
}
pub fn add_minmax(mut self) -> Self {
self.min = true;
self.max = true;
self
}
pub fn is_only_one_active(&self) -> bool {
[
self.average,
self.sum,
self.max,
self.min,
self.first,
self.last,
self.cumulative,
]
.iter()
.filter(|b| **b)
.count()
== 1
}
pub fn copy_self_extra(&self) -> Self {
Self {
cumulative: self.cumulative,
..Self::default()
}
}
}

View File

@@ -1,9 +0,0 @@
mod eager;
mod lazy;
mod transform;
mod transform2;
pub use eager::*;
pub use lazy::*;
pub use transform::*;
pub use transform2::*;

View File

@@ -1,224 +0,0 @@
use brk_traversable::Traversable;
use brk_types::Version;
use schemars::JsonSchema;
use vecdb::{IterableCloneableVec, LazyVecFrom1, UnaryTransform, VecIndex};
use super::{
super::ComputedVecValue,
eager::EagerVecsBuilder,
lazy::LazyVecsBuilder,
};
const VERSION: Version = Version::ZERO;
/// Lazy transform version of `EagerVecsBuilder`.
/// Each group is a `LazyVecFrom1` that transforms from the corresponding stored group.
/// S1T is the source type, T is the output type (can be the same for transforms like negation).
#[derive(Clone, Traversable)]
pub struct LazyTransformBuilder<I, T, S1T = T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1T: ComputedVecValue,
{
pub first: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub average: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub sum: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub max: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub pct90: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub pct75: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub median: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub pct25: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub pct10: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub min: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub last: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub cumulative: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
}
impl<I, T, S1T> LazyTransformBuilder<I, T, S1T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
/// Create a lazy transform from a stored `EagerVecsBuilder`.
/// F is the transform type (e.g., `Negate`, `Halve`).
pub fn from_eager<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
source: &EagerVecsBuilder<I, S1T>,
) -> Self {
let v = version + VERSION;
let suffix = |s: &str| format!("{name}_{s}");
Self {
first: source.first.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("first"),
v,
s.boxed_clone(),
))
}),
average: source.average.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("avg"),
v,
s.boxed_clone(),
))
}),
sum: source.sum.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("sum"),
v,
s.boxed_clone(),
))
}),
max: source.max.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("max"),
v,
s.boxed_clone(),
))
}),
pct90: source.pct90.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("pct90"),
v,
s.boxed_clone(),
))
}),
pct75: source.pct75.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("pct75"),
v,
s.boxed_clone(),
))
}),
median: source.median.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("median"),
v,
s.boxed_clone(),
))
}),
pct25: source.pct25.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("pct25"),
v,
s.boxed_clone(),
))
}),
pct10: source.pct10.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("pct10"),
v,
s.boxed_clone(),
))
}),
min: source.min.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("min"),
v,
s.boxed_clone(),
))
}),
last: source
.last
.as_ref()
.map(|s| Box::new(LazyVecFrom1::transformed::<F>(name, v, s.boxed_clone()))),
cumulative: source.cumulative.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("cumulative"),
v,
s.boxed_clone(),
))
}),
}
}
}
impl<I, T, S1T> LazyTransformBuilder<I, T, S1T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1T: ComputedVecValue,
{
pub fn unwrap_sum(&self) -> &LazyVecFrom1<I, T, I, S1T> {
self.sum.as_ref().unwrap()
}
pub fn unwrap_cumulative(&self) -> &LazyVecFrom1<I, T, I, S1T> {
self.cumulative.as_ref().unwrap()
}
}
impl<I, T, S1T> LazyTransformBuilder<I, T, S1T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
/// Create a lazy transform from a `LazyVecsBuilder`.
/// Note: LazyVecsBuilder doesn't have percentiles, so those will be None.
pub fn from_lazy<F: UnaryTransform<S1T, T>, S1I: VecIndex, S2T: ComputedVecValue>(
name: &str,
version: Version,
source: &LazyVecsBuilder<I, S1T, S1I, S2T>,
) -> Self {
let v = version + VERSION;
// Use same suffix pattern as EagerVecsBuilder
let suffix = |s: &str| format!("{name}_{s}");
Self {
first: source.first.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("first"),
v,
s.boxed_clone(),
))
}),
average: source.average.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("avg"),
v,
s.boxed_clone(),
))
}),
sum: source.sum.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("sum"),
v,
s.boxed_clone(),
))
}),
max: source.max.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("max"),
v,
s.boxed_clone(),
))
}),
pct90: None,
pct75: None,
median: None,
pct25: None,
pct10: None,
min: source.min.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("min"),
v,
s.boxed_clone(),
))
}),
last: source
.last
.as_ref()
.map(|s| Box::new(LazyVecFrom1::transformed::<F>(name, v, s.boxed_clone()))),
cumulative: source.cumulative.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("cumulative"),
v,
s.boxed_clone(),
))
}),
}
}
}

View File

@@ -1,240 +0,0 @@
use brk_traversable::Traversable;
use brk_types::Version;
use schemars::JsonSchema;
use vecdb::{BinaryTransform, IterableCloneableVec, LazyVecFrom2, VecIndex};
use super::{
super::ComputedVecValue,
eager::EagerVecsBuilder,
lazy::LazyVecsBuilder,
};
const VERSION: Version = Version::ZERO;
/// Lazy binary transform builder.
/// Each group is a `LazyVecFrom2` that transforms from two corresponding stored groups.
#[derive(Clone, Traversable)]
#[allow(clippy::type_complexity)]
pub struct LazyTransform2Builder<I, T, S1T, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
pub first: Option<Box<LazyVecFrom2<I, T, I, S1T, I, S2T>>>,
pub average: Option<Box<LazyVecFrom2<I, T, I, S1T, I, S2T>>>,
pub sum: Option<Box<LazyVecFrom2<I, T, I, S1T, I, S2T>>>,
pub max: Option<Box<LazyVecFrom2<I, T, I, S1T, I, S2T>>>,
pub min: Option<Box<LazyVecFrom2<I, T, I, S1T, I, S2T>>>,
pub last: Option<Box<LazyVecFrom2<I, T, I, S1T, I, S2T>>>,
pub cumulative: Option<Box<LazyVecFrom2<I, T, I, S1T, I, S2T>>>,
}
impl<I, T, S1T, S2T> LazyTransform2Builder<I, T, S1T, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
/// Create a lazy binary transform from two stored `EagerVecsBuilder`.
pub fn from_eager<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &EagerVecsBuilder<I, S1T>,
source2: &EagerVecsBuilder<I, S2T>,
) -> Self {
let v = version + VERSION;
let suffix = |s: &str| format!("{name}_{s}");
Self {
first: source1
.first
.as_ref()
.zip(source2.first.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("first"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
average: source1
.average
.as_ref()
.zip(source2.average.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("avg"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
sum: source1
.sum
.as_ref()
.zip(source2.sum.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("sum"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
max: source1
.max
.as_ref()
.zip(source2.max.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("max"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
min: source1
.min
.as_ref()
.zip(source2.min.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("min"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
last: source1
.last
.as_ref()
.zip(source2.last.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
name,
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
cumulative: source1
.cumulative
.as_ref()
.zip(source2.cumulative.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("cumulative"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
}
}
/// Create a lazy binary transform from two `LazyVecsBuilder`.
pub fn from_lazy<
F: BinaryTransform<S1T, S2T, T>,
S1I: VecIndex,
S1E: ComputedVecValue,
S2I: VecIndex,
S2E: ComputedVecValue,
>(
name: &str,
version: Version,
source1: &LazyVecsBuilder<I, S1T, S1I, S1E>,
source2: &LazyVecsBuilder<I, S2T, S2I, S2E>,
) -> Self {
let v = version + VERSION;
let suffix = |s: &str| format!("{name}_{s}");
Self {
first: source1
.first
.as_ref()
.zip(source2.first.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("first"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
average: source1
.average
.as_ref()
.zip(source2.average.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("avg"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
sum: source1
.sum
.as_ref()
.zip(source2.sum.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("sum"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
max: source1
.max
.as_ref()
.zip(source2.max.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("max"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
min: source1
.min
.as_ref()
.zip(source2.min.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("min"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
last: source1
.last
.as_ref()
.zip(source2.last.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
name,
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
cumulative: source1
.cumulative
.as_ref()
.zip(source2.cumulative.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("cumulative"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
}
}
}

View File

@@ -0,0 +1,521 @@
//! Compute functions for aggregation - take optional vecs, compute what's needed.
//!
//! These functions replace the Option-based compute logic in flexible builders.
//! Each function takes optional mutable references and computes only for Some() vecs.
use brk_error::{Error, Result};
use brk_types::{CheckedSub, StoredU64};
use schemars::JsonSchema;
use vecdb::{
AnyStoredVec, AnyVec, EagerVec, Exit, GenericStoredVec, IterableVec, PcoVec, VecIndex, VecValue,
};
use crate::utils::get_percentile;
use super::ComputedVecValue;
/// Helper to validate and get starting index for a single vec
fn validate_and_start<I: VecIndex, T: ComputedVecValue + JsonSchema>(
vec: &mut EagerVec<PcoVec<I, T>>,
combined_version: vecdb::Version,
current_start: I,
) -> Result<I> {
vec.validate_computed_version_or_reset(combined_version)?;
Ok(current_start.min(I::from(vec.len())))
}
/// Compute aggregations from a source vec into target vecs.
///
/// This function computes all requested aggregations in a single pass when possible,
/// optimizing for the common case where multiple aggregations are needed.
#[allow(clippy::too_many_arguments)]
pub fn compute_aggregations<I, T, A>(
max_from: I,
source: &impl IterableVec<A, T>,
first_indexes: &impl IterableVec<I, A>,
count_indexes: &impl IterableVec<I, StoredU64>,
exit: &Exit,
mut first: Option<&mut EagerVec<PcoVec<I, T>>>,
mut last: Option<&mut EagerVec<PcoVec<I, T>>>,
mut min: Option<&mut EagerVec<PcoVec<I, T>>>,
mut max: Option<&mut EagerVec<PcoVec<I, T>>>,
mut average: Option<&mut EagerVec<PcoVec<I, T>>>,
mut sum: Option<&mut EagerVec<PcoVec<I, T>>>,
mut cumulative: Option<&mut EagerVec<PcoVec<I, T>>>,
mut median: Option<&mut EagerVec<PcoVec<I, T>>>,
mut pct10: Option<&mut EagerVec<PcoVec<I, T>>>,
mut pct25: Option<&mut EagerVec<PcoVec<I, T>>>,
mut pct75: Option<&mut EagerVec<PcoVec<I, T>>>,
mut pct90: Option<&mut EagerVec<PcoVec<I, T>>>,
) -> Result<()>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
A: VecIndex + VecValue + CheckedSub<A>,
{
let combined_version = source.version() + first_indexes.version() + count_indexes.version();
let mut starting_index = max_from;
if let Some(ref mut v) = first {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = last {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = min {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = max {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = average {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = sum {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = cumulative {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = median {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = pct10 {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = pct25 {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = pct75 {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = pct90 {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
let index = starting_index;
let needs_first = first.is_some();
let needs_last = last.is_some();
let needs_min = min.is_some();
let needs_max = max.is_some();
let needs_average = average.is_some();
let needs_sum = sum.is_some();
let needs_cumulative = cumulative.is_some();
let needs_percentiles = median.is_some()
|| pct10.is_some()
|| pct25.is_some()
|| pct75.is_some()
|| pct90.is_some();
let needs_minmax = needs_min || needs_max;
let needs_sum_or_cumulative = needs_sum || needs_cumulative;
let needs_aggregates = needs_sum_or_cumulative || needs_average;
if !needs_first && !needs_last && !needs_minmax && !needs_aggregates && !needs_percentiles {
return Ok(());
}
let mut source_iter = source.iter();
let mut cumulative_val = cumulative.as_ref().map(|cumulative_vec| {
index.decremented().map_or(T::from(0_usize), |idx| {
cumulative_vec.iter().get_unwrap(idx)
})
});
let mut count_indexes_iter = count_indexes.iter().skip(index.to_usize());
first_indexes
.iter()
.enumerate()
.skip(index.to_usize())
.try_for_each(|(idx, first_index)| -> Result<()> {
let count_index = count_indexes_iter.next().unwrap();
let count = *count_index as usize;
if let Some(ref mut first_vec) = first {
let f = source_iter
.get(first_index)
.unwrap_or_else(|| T::from(0_usize));
first_vec.truncate_push_at(idx, f)?;
}
if let Some(ref mut last_vec) = last {
if count == 0 {
panic!("should not compute last if count can be 0");
}
let last_index = first_index + (count - 1);
let v = source_iter.get_unwrap(last_index);
last_vec.truncate_push_at(idx, v)?;
}
// Fast path: only min/max needed, no sorting or allocation required
if needs_minmax && !needs_percentiles && !needs_aggregates {
source_iter.set_position(first_index);
let mut min_val: Option<T> = None;
let mut max_val: Option<T> = None;
for val in (&mut source_iter).take(count) {
if needs_min {
min_val = Some(min_val.map_or(val, |m| if val < m { val } else { m }));
}
if needs_max {
max_val = Some(max_val.map_or(val, |m| if val > m { val } else { m }));
}
}
if let Some(ref mut min_vec) = min {
min_vec.truncate_push_at(idx, min_val.unwrap())?;
}
if let Some(ref mut max_vec) = max {
max_vec.truncate_push_at(idx, max_val.unwrap())?;
}
} else if needs_percentiles || needs_aggregates || needs_minmax {
source_iter.set_position(first_index);
let mut values: Vec<T> = (&mut source_iter).take(count).collect();
if needs_percentiles {
values.sort_unstable();
if let Some(ref mut max_vec) = max {
max_vec.truncate_push_at(
idx,
*values
.last()
.ok_or(Error::Internal("Empty values for percentiles"))?,
)?;
}
if let Some(ref mut pct90_vec) = pct90 {
pct90_vec.truncate_push_at(idx, get_percentile(&values, 0.90))?;
}
if let Some(ref mut pct75_vec) = pct75 {
pct75_vec.truncate_push_at(idx, get_percentile(&values, 0.75))?;
}
if let Some(ref mut median_vec) = median {
median_vec.truncate_push_at(idx, get_percentile(&values, 0.50))?;
}
if let Some(ref mut pct25_vec) = pct25 {
pct25_vec.truncate_push_at(idx, get_percentile(&values, 0.25))?;
}
if let Some(ref mut pct10_vec) = pct10 {
pct10_vec.truncate_push_at(idx, get_percentile(&values, 0.10))?;
}
if let Some(ref mut min_vec) = min {
min_vec.truncate_push_at(idx, *values.first().unwrap())?;
}
} else if needs_minmax {
if let Some(ref mut min_vec) = min {
min_vec.truncate_push_at(idx, *values.iter().min().unwrap())?;
}
if let Some(ref mut max_vec) = max {
max_vec.truncate_push_at(idx, *values.iter().max().unwrap())?;
}
}
if needs_aggregates {
let len = values.len();
let sum_val = values.into_iter().fold(T::from(0), |a, b| a + b);
if let Some(ref mut average_vec) = average {
average_vec.truncate_push_at(idx, sum_val / len)?;
}
if needs_sum_or_cumulative {
if let Some(ref mut sum_vec) = sum {
sum_vec.truncate_push_at(idx, sum_val)?;
}
if let Some(ref mut cumulative_vec) = cumulative {
let t = cumulative_val.unwrap() + sum_val;
cumulative_val.replace(t);
cumulative_vec.truncate_push_at(idx, t)?;
}
}
}
}
Ok(())
})?;
let _lock = exit.lock();
if let Some(v) = first {
v.write()?;
}
if let Some(v) = last {
v.write()?;
}
if let Some(v) = min {
v.write()?;
}
if let Some(v) = max {
v.write()?;
}
if let Some(v) = average {
v.write()?;
}
if let Some(v) = sum {
v.write()?;
}
if let Some(v) = cumulative {
v.write()?;
}
if let Some(v) = median {
v.write()?;
}
if let Some(v) = pct10 {
v.write()?;
}
if let Some(v) = pct25 {
v.write()?;
}
if let Some(v) = pct75 {
v.write()?;
}
if let Some(v) = pct90 {
v.write()?;
}
Ok(())
}
/// Compute cumulative extension from a source vec.
///
/// Used when only cumulative needs to be extended from an existing source.
pub fn compute_cumulative_extend<I, T>(
max_from: I,
source: &impl IterableVec<I, T>,
cumulative: &mut EagerVec<PcoVec<I, T>>,
exit: &Exit,
) -> Result<()>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
{
cumulative.validate_computed_version_or_reset(source.version())?;
let index = max_from.min(I::from(cumulative.len()));
let mut cumulative_val = index
.decremented()
.map_or(T::from(0_usize), |idx| cumulative.iter().get_unwrap(idx));
source
.iter()
.enumerate()
.skip(index.to_usize())
.try_for_each(|(i, v)| -> Result<()> {
cumulative_val += v;
cumulative.truncate_push_at(i, cumulative_val)?;
Ok(())
})?;
let _lock = exit.lock();
cumulative.write()?;
Ok(())
}
/// Compute coarser aggregations from already-aggregated source data.
///
/// This is used for dateindex → weekindex, monthindex, etc. where we derive
/// coarser aggregations from finer ones.
///
/// NOTE: Percentiles are NOT supported - they cannot be derived from finer percentiles.
#[allow(clippy::too_many_arguments)]
pub fn compute_aggregations_from_aligned<I, T, A>(
max_from: I,
first_indexes: &impl IterableVec<I, A>,
count_indexes: &impl IterableVec<I, StoredU64>,
exit: &Exit,
// Source vecs (already aggregated at finer level)
source_first: Option<&EagerVec<PcoVec<A, T>>>,
source_last: Option<&EagerVec<PcoVec<A, T>>>,
source_min: Option<&EagerVec<PcoVec<A, T>>>,
source_max: Option<&EagerVec<PcoVec<A, T>>>,
source_average: Option<&EagerVec<PcoVec<A, T>>>,
source_sum: Option<&EagerVec<PcoVec<A, T>>>,
// Target vecs
mut first: Option<&mut EagerVec<PcoVec<I, T>>>,
mut last: Option<&mut EagerVec<PcoVec<I, T>>>,
mut min: Option<&mut EagerVec<PcoVec<I, T>>>,
mut max: Option<&mut EagerVec<PcoVec<I, T>>>,
mut average: Option<&mut EagerVec<PcoVec<I, T>>>,
mut sum: Option<&mut EagerVec<PcoVec<I, T>>>,
mut cumulative: Option<&mut EagerVec<PcoVec<I, T>>>,
) -> Result<()>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
A: VecIndex + VecValue + CheckedSub<A>,
{
let combined_version = first_indexes.version() + count_indexes.version();
let mut starting_index = max_from;
if let Some(ref mut v) = first {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = last {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = min {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = max {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = average {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = sum {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = cumulative {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
let index = starting_index;
let needs_first = first.is_some();
let needs_last = last.is_some();
let needs_min = min.is_some();
let needs_max = max.is_some();
let needs_average = average.is_some();
let needs_sum = sum.is_some();
let needs_cumulative = cumulative.is_some();
if !needs_first
&& !needs_last
&& !needs_min
&& !needs_max
&& !needs_average
&& !needs_sum
&& !needs_cumulative
{
return Ok(());
}
let mut source_first_iter = source_first.map(|f| f.iter());
let mut source_last_iter = source_last.map(|f| f.iter());
let mut source_min_iter = source_min.map(|f| f.iter());
let mut source_max_iter = source_max.map(|f| f.iter());
let mut source_average_iter = source_average.map(|f| f.iter());
let mut source_sum_iter = source_sum.map(|f| f.iter());
let mut cumulative_val = cumulative.as_ref().map(|cumulative_vec| {
index.decremented().map_or(T::from(0_usize), |idx| {
cumulative_vec.iter().get_unwrap(idx)
})
});
let mut count_indexes_iter = count_indexes.iter().skip(index.to_usize());
first_indexes
.iter()
.enumerate()
.skip(index.to_usize())
.try_for_each(|(idx, first_index)| -> Result<()> {
let count_index = count_indexes_iter.next().unwrap();
let count = *count_index as usize;
if let Some(ref mut first_vec) = first {
let source_iter = source_first_iter
.as_mut()
.expect("source_first required for first");
let v = source_iter.get_unwrap(first_index);
first_vec.truncate_push_at(idx, v)?;
}
if let Some(ref mut last_vec) = last {
if count == 0 {
panic!("should not compute last if count can be 0");
}
let last_index = first_index + (count - 1);
let source_iter = source_last_iter
.as_mut()
.expect("source_last required for last");
let v = source_iter.get_unwrap(last_index);
last_vec.truncate_push_at(idx, v)?;
}
if let Some(ref mut min_vec) = min {
let source_iter = source_min_iter
.as_mut()
.expect("source_min required for min");
source_iter.set_position(first_index);
let min_val = source_iter.take(count).min().unwrap();
min_vec.truncate_push_at(idx, min_val)?;
}
if let Some(ref mut max_vec) = max {
let source_iter = source_max_iter
.as_mut()
.expect("source_max required for max");
source_iter.set_position(first_index);
let max_val = source_iter.take(count).max().unwrap();
max_vec.truncate_push_at(idx, max_val)?;
}
if let Some(ref mut average_vec) = average {
let source_iter = source_average_iter
.as_mut()
.expect("source_average required for average");
source_iter.set_position(first_index);
let mut len = 0usize;
let sum_val = (&mut *source_iter)
.take(count)
.inspect(|_| len += 1)
.fold(T::from(0), |a, b| a + b);
// TODO: Multiply by count then divide by cumulative for accuracy
let average = sum_val / len;
average_vec.truncate_push_at(idx, average)?;
}
if needs_sum || needs_cumulative {
let source_iter = source_sum_iter
.as_mut()
.expect("source_sum required for sum/cumulative");
source_iter.set_position(first_index);
let sum_val = source_iter.take(count).fold(T::from(0), |a, b| a + b);
if let Some(ref mut sum_vec) = sum {
sum_vec.truncate_push_at(idx, sum_val)?;
}
if let Some(ref mut cumulative_vec) = cumulative {
let t = cumulative_val.unwrap() + sum_val;
cumulative_val.replace(t);
cumulative_vec.truncate_push_at(idx, t)?;
}
}
Ok(())
})?;
let _lock = exit.lock();
if let Some(v) = first {
v.write()?;
}
if let Some(v) = last {
v.write()?;
}
if let Some(v) = min {
v.write()?;
}
if let Some(v) = max {
v.write()?;
}
if let Some(v) = average {
v.write()?;
}
if let Some(v) = sum {
v.write()?;
}
if let Some(v) = cumulative {
v.write()?;
}
Ok(())
}

View File

@@ -0,0 +1,69 @@
//! ComputedBlock with full stats aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedVecValue, DerivedComputedBlockFull, NumericValue};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedBlockFull<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
#[traversable(wrap = "base")]
pub height: EagerVec<PcoVec<Height, T>>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: DerivedComputedBlockFull<T>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedBlockFull<T>
where
T: NumericValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let rest = DerivedComputedBlockFull::forced_import(
db,
name,
height.boxed_clone(),
v,
indexes,
)?;
Ok(Self { height, rest })
}
pub fn compute_all<F>(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
{
compute(&mut self.height)?;
self.rest.derive_from(indexes, starting_indexes, &self.height, exit)
}
}

View File

@@ -0,0 +1,64 @@
//! ComputedBlock using only LastVec aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedVecValue, DerivedComputedBlockLast, NumericValue};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedBlockLast<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub height: EagerVec<PcoVec<Height, T>>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: DerivedComputedBlockLast<T>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedBlockLast<T>
where
T: NumericValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let rest =
DerivedComputedBlockLast::forced_import(db, name, height.boxed_clone(), v, indexes)?;
Ok(Self { height, rest })
}
pub fn compute_all<F>(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
{
compute(&mut self.height)?;
self.rest
.derive_from(indexes, starting_indexes, &self.height, exit)
}
}

View File

@@ -0,0 +1,13 @@
//! Block-level computed types (height + dateindex + periods + difficultyepoch).
//!
//! For simpler chain-level types (height + difficultyepoch only), see `chain/`.
mod full;
mod last;
mod sum;
mod sum_cum;
pub use full::*;
pub use last::*;
pub use sum::*;
pub use sum_cum::*;

View File

@@ -0,0 +1,69 @@
//! ComputedBlock using Sum-only aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedVecValue, DerivedComputedBlockSum, NumericValue};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedBlockSum<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
#[traversable(wrap = "base")]
pub height: EagerVec<PcoVec<Height, T>>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: DerivedComputedBlockSum<T>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedBlockSum<T>
where
T: NumericValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let rest = DerivedComputedBlockSum::forced_import(
db,
name,
height.boxed_clone(),
v,
indexes,
)?;
Ok(Self { height, rest })
}
pub fn compute_all<F>(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
{
compute(&mut self.height)?;
self.rest.derive_from(indexes, starting_indexes, &self.height, exit)
}
}

View File

@@ -0,0 +1,97 @@
//! ComputedBlock using SumCum aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{
AnyStoredVec, AnyVec, Database, EagerVec, Exit, GenericStoredVec, ImportableVec,
IterableCloneableVec, IterableVec, PcoVec, VecIndex,
};
use crate::{indexes, ComputeIndexes};
use crate::internal::{ComputedVecValue, DerivedComputedBlockSumCum, NumericValue};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedBlockSumCum<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
#[traversable(wrap = "base")]
pub height: EagerVec<PcoVec<Height, T>>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: DerivedComputedBlockSumCum<T>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedBlockSumCum<T>
where
T: NumericValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let rest = DerivedComputedBlockSumCum::forced_import(
db,
name,
height.boxed_clone(),
v,
indexes,
)?;
Ok(Self { height, rest })
}
pub fn compute_all<F>(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
{
compute(&mut self.height)?;
self.rest.derive_from(indexes, starting_indexes, &self.height, exit)
}
/// Derive from an external height source (e.g., a LazyVec).
pub fn derive_from(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
source: &impl IterableVec<Height, T>,
exit: &Exit,
) -> Result<()> {
let target_len = source.len();
let starting_height = starting_indexes.height.to_usize().min(self.height.len());
self.height
.validate_computed_version_or_reset(source.version())?;
let mut source_iter = source.iter();
for h_idx in starting_height..target_len {
let height = Height::from(h_idx);
let value = source_iter.get_unwrap(height);
self.height.truncate_push(height, value)?;
}
self.height.write()?;
self.rest.derive_from(indexes, starting_indexes, &self.height, exit)
}
}

View File

@@ -0,0 +1,68 @@
//! ComputedChain for first-value aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DifficultyEpoch, Height, Version};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedVecValue, LazyFirst, NumericValue};
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct ComputedChainFirst<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub height: EagerVec<PcoVec<Height, T>>,
pub difficultyepoch: LazyFirst<DifficultyEpoch, T, Height, DifficultyEpoch>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedChainFirst<T>
where
T: NumericValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let difficultyepoch = LazyFirst::from_source(
name,
v,
height.boxed_clone(),
indexes
.block
.difficultyepoch_to_difficultyepoch
.boxed_clone(),
);
Ok(Self {
height,
difficultyepoch,
})
}
pub fn compute<F>(
&mut self,
_starting_indexes: &ComputeIndexes,
_exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
{
compute(&mut self.height)?;
Ok(())
}
}

View File

@@ -0,0 +1,68 @@
//! ComputedChain for last-value aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DifficultyEpoch, Height, Version};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedVecValue, LazyLast, NumericValue};
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct ComputedChainLast<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub height: EagerVec<PcoVec<Height, T>>,
pub difficultyepoch: LazyLast<DifficultyEpoch, T, Height, DifficultyEpoch>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedChainLast<T>
where
T: NumericValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let difficultyepoch = LazyLast::from_source(
name,
v,
height.boxed_clone(),
indexes
.block
.difficultyepoch_to_difficultyepoch
.boxed_clone(),
);
Ok(Self {
height,
difficultyepoch,
})
}
pub fn compute<F>(
&mut self,
_starting_indexes: &ComputeIndexes,
_exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
{
compute(&mut self.height)?;
Ok(())
}
}

View File

@@ -0,0 +1,68 @@
//! ComputedChain for max-value aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DifficultyEpoch, Height, Version};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedVecValue, LazyMax, NumericValue};
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct ComputedChainMax<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub height: EagerVec<PcoVec<Height, T>>,
pub difficultyepoch: LazyMax<DifficultyEpoch, T, Height, DifficultyEpoch>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedChainMax<T>
where
T: NumericValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let difficultyepoch = LazyMax::from_source(
name,
v,
height.boxed_clone(),
indexes
.block
.difficultyepoch_to_difficultyepoch
.boxed_clone(),
);
Ok(Self {
height,
difficultyepoch,
})
}
pub fn compute<F>(
&mut self,
_starting_indexes: &ComputeIndexes,
_exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
{
compute(&mut self.height)?;
Ok(())
}
}

View File

@@ -0,0 +1,68 @@
//! ComputedChain for min-value aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DifficultyEpoch, Height, Version};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedVecValue, LazyMin, NumericValue};
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct ComputedChainMin<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub height: EagerVec<PcoVec<Height, T>>,
pub difficultyepoch: LazyMin<DifficultyEpoch, T, Height, DifficultyEpoch>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedChainMin<T>
where
T: NumericValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let difficultyepoch = LazyMin::from_source(
name,
v,
height.boxed_clone(),
indexes
.block
.difficultyepoch_to_difficultyepoch
.boxed_clone(),
);
Ok(Self {
height,
difficultyepoch,
})
}
pub fn compute<F>(
&mut self,
_starting_indexes: &ComputeIndexes,
_exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
{
compute(&mut self.height)?;
Ok(())
}
}

View File

@@ -0,0 +1,13 @@
//! Chain-level computed types (height + difficultyepoch only).
//!
//! These are simpler than block-level types which include dateindex + periods.
mod first;
mod last;
mod max;
mod min;
pub use first::*;
pub use last::*;
pub use max::*;
pub use min::*;

View File

@@ -0,0 +1,64 @@
//! ComputedVecsDate using only average-value aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedVecValue, DerivedDateAverage};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedVecsDateAverage<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub dateindex: EagerVec<PcoVec<DateIndex, T>>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: DerivedDateAverage<T>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedVecsDateAverage<T>
where
T: ComputedVecValue + JsonSchema + 'static,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let dateindex = EagerVec::forced_import(db, name, version + VERSION)?;
Ok(Self {
rest: DerivedDateAverage::from_source(
name,
version + VERSION,
dateindex.boxed_clone(),
indexes,
),
dateindex,
})
}
pub fn compute_all<F>(
&mut self,
_starting_indexes: &ComputeIndexes,
_exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<DateIndex, T>>) -> Result<()>,
{
compute(&mut self.dateindex)?;
Ok(())
}
}

View File

@@ -0,0 +1,64 @@
//! ComputedVecsDate using only first-value aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedVecValue, DerivedDateFirst};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedVecsDateFirst<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub dateindex: EagerVec<PcoVec<DateIndex, T>>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: DerivedDateFirst<T>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedVecsDateFirst<T>
where
T: ComputedVecValue + JsonSchema + 'static,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let dateindex = EagerVec::forced_import(db, name, version + VERSION)?;
Ok(Self {
rest: DerivedDateFirst::from_source(
name,
version + VERSION,
dateindex.boxed_clone(),
indexes,
),
dateindex,
})
}
pub fn compute_all<F>(
&mut self,
_starting_indexes: &ComputeIndexes,
_exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<DateIndex, T>>) -> Result<()>,
{
compute(&mut self.dateindex)?;
Ok(())
}
}

View File

@@ -0,0 +1,73 @@
//! ComputedVecsDate using only last-value aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, IterableVec, PcoVec};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedVecValue, DerivedDateLast};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedDateLast<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub dateindex: EagerVec<PcoVec<DateIndex, T>>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: DerivedDateLast<T>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedDateLast<T>
where
T: ComputedVecValue + JsonSchema + 'static,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let dateindex = EagerVec::forced_import(db, name, version + VERSION)?;
Ok(Self {
rest: DerivedDateLast::from_source(
name,
version + VERSION,
dateindex.boxed_clone(),
indexes,
),
dateindex,
})
}
pub fn compute_all<F>(
&mut self,
_starting_indexes: &ComputeIndexes,
_exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<DateIndex, T>>) -> Result<()>,
{
compute(&mut self.dateindex)?;
Ok(())
}
pub fn compute_rest(
&mut self,
_starting_indexes: &ComputeIndexes,
_exit: &Exit,
_dateindex: Option<&impl IterableVec<DateIndex, T>>,
) -> Result<()> {
Ok(())
}
}

View File

@@ -0,0 +1,64 @@
//! ComputedVecsDate using only max-value aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedVecValue, DerivedDateMax};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedVecsDateMax<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub dateindex: EagerVec<PcoVec<DateIndex, T>>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: DerivedDateMax<T>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedVecsDateMax<T>
where
T: ComputedVecValue + JsonSchema + 'static,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let dateindex = EagerVec::forced_import(db, name, version + VERSION)?;
Ok(Self {
rest: DerivedDateMax::from_source(
name,
version + VERSION,
dateindex.boxed_clone(),
indexes,
),
dateindex,
})
}
pub fn compute_all<F>(
&mut self,
_starting_indexes: &ComputeIndexes,
_exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<DateIndex, T>>) -> Result<()>,
{
compute(&mut self.dateindex)?;
Ok(())
}
}

View File

@@ -0,0 +1,64 @@
//! ComputedVecsDate using only min-value aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedVecValue, DerivedDateMin};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedVecsDateMin<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub dateindex: EagerVec<PcoVec<DateIndex, T>>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: DerivedDateMin<T>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedVecsDateMin<T>
where
T: ComputedVecValue + JsonSchema + 'static,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let dateindex = EagerVec::forced_import(db, name, version + VERSION)?;
Ok(Self {
rest: DerivedDateMin::from_source(
name,
version + VERSION,
dateindex.boxed_clone(),
indexes,
),
dateindex,
})
}
pub fn compute_all<F>(
&mut self,
_starting_indexes: &ComputeIndexes,
_exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<DateIndex, T>>) -> Result<()>,
{
compute(&mut self.dateindex)?;
Ok(())
}
}

View File

@@ -0,0 +1,11 @@
mod average;
mod first;
mod last;
mod max;
mod min;
pub use average::*;
pub use first::*;
pub use last::*;
pub use max::*;
pub use min::*;

View File

@@ -0,0 +1,93 @@
//! DerivedComputedBlockDistribution - dateindex storage + lazy time periods + difficultyepoch.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, DifficultyEpoch, Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, Exit, IterableBoxedVec, IterableCloneableVec, IterableVec};
use crate::{
ComputeIndexes, indexes,
internal::{
ComputedVecValue, DerivedDateDistribution, Distribution, LazyDistribution, NumericValue,
},
};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct DerivedComputedBlockDistribution<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub dateindex: Distribution<DateIndex, T>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub dates: DerivedDateDistribution<T>,
pub difficultyepoch: LazyDistribution<DifficultyEpoch, T, Height, DifficultyEpoch>,
}
const VERSION: Version = Version::ZERO;
impl<T> DerivedComputedBlockDistribution<T>
where
T: NumericValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
height_source: IterableBoxedVec<Height, T>,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let dateindex = Distribution::forced_import(db, name, version + VERSION)?;
let v = version + VERSION;
let dates = DerivedDateDistribution::from_sources(
name,
v,
dateindex.average.0.boxed_clone(),
dateindex.minmax.min.0.boxed_clone(),
dateindex.minmax.max.0.boxed_clone(),
indexes,
);
let difficultyepoch = LazyDistribution::from_distribution(
name,
v,
height_source.boxed_clone(),
height_source.boxed_clone(),
height_source,
indexes
.block
.difficultyepoch_to_difficultyepoch
.boxed_clone(),
);
Ok(Self {
dateindex,
dates,
difficultyepoch,
})
}
pub fn derive_from(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
height_source: &impl IterableVec<Height, T>,
exit: &Exit,
) -> Result<()> {
self.dateindex.compute(
starting_indexes.dateindex,
height_source,
&indexes.time.dateindex_to_first_height,
&indexes.time.dateindex_to_height_count,
exit,
)?;
Ok(())
}
}

View File

@@ -0,0 +1,110 @@
//! DerivedComputedBlockFull - height_cumulative + dateindex storage + difficultyepoch + lazy time periods.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, DifficultyEpoch, Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, Exit, IterableBoxedVec, IterableCloneableVec, IterableVec};
use crate::{
ComputeIndexes, indexes,
internal::{
ComputedVecValue, CumulativeVec, DerivedDateFull, Full, LazyFull, NumericValue,
compute_cumulative_extend,
},
};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct DerivedComputedBlockFull<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub height_cumulative: CumulativeVec<Height, T>,
pub dateindex: Full<DateIndex, T>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub dates: DerivedDateFull<T>,
pub difficultyepoch: LazyFull<DifficultyEpoch, T, Height, DifficultyEpoch>,
}
const VERSION: Version = Version::ZERO;
impl<T> DerivedComputedBlockFull<T>
where
T: NumericValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
height_source: IterableBoxedVec<Height, T>,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height_cumulative = CumulativeVec::forced_import(db, name, v)?;
let dateindex = Full::forced_import(db, name, v)?;
Ok(Self {
dates: DerivedDateFull::from_sources(
name,
v,
dateindex.distribution.average.0.boxed_clone(),
dateindex.distribution.minmax.min.0.boxed_clone(),
dateindex.distribution.minmax.max.0.boxed_clone(),
dateindex.sum_cum.sum.0.boxed_clone(),
dateindex.sum_cum.cumulative.0.boxed_clone(),
indexes,
),
difficultyepoch: LazyFull::from_stats_aggregate(
name,
v,
height_source.boxed_clone(),
height_source.boxed_clone(),
height_source.boxed_clone(),
height_source.boxed_clone(),
height_cumulative.0.boxed_clone(),
indexes
.block
.difficultyepoch_to_difficultyepoch
.boxed_clone(),
),
height_cumulative,
dateindex,
})
}
pub fn derive_from(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
height_source: &impl IterableVec<Height, T>,
exit: &Exit,
) -> Result<()> {
// Compute height_cumulative from external source
self.compute_height_cumulative(starting_indexes.height, height_source, exit)?;
// Compute dateindex aggregations
self.dateindex.compute(
starting_indexes.dateindex,
height_source,
&indexes.time.dateindex_to_first_height,
&indexes.time.dateindex_to_height_count,
exit,
)?;
Ok(())
}
fn compute_height_cumulative(
&mut self,
max_from: Height,
height_source: &impl IterableVec<Height, T>,
exit: &Exit,
) -> Result<()> {
compute_cumulative_extend(max_from, height_source, &mut self.height_cumulative.0, exit)
}
}

Some files were not shown because too many files have changed in this diff Show More