computer: simplified a bunch of things

This commit is contained in:
nym21
2026-02-26 19:37:22 +01:00
parent 9e4fe62de2
commit cccaf6b206
252 changed files with 3788 additions and 7279 deletions

View File

@@ -15,17 +15,24 @@ impl Vecs {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
// Block count height + cumulative first (rolling computed after window starts)
self.block_count.height.compute_range(
starting_indexes.height,
&indexer.vecs.blocks.weight,
|h| (h, StoredU32::from(1_u32)),
exit,
)?;
self.block_count
.compute_cumulative(starting_indexes, exit)?;
self.block_count.cumulative.height.compute_cumulative(
starting_indexes.height,
&self.block_count.height,
exit,
)?;
// Compute rolling window starts (collect monotonic data once for all windows)
let monotonic_data: Vec<Timestamp> = time.timestamp_monotonic.collect();
self.compute_rolling_start_hours(&monotonic_data, time, starting_indexes, exit, 1, |s| {
&mut s.height_1h_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 1, |s| {
&mut s.height_24h_ago
})?;
@@ -157,13 +164,19 @@ impl Vecs {
|s| &mut s.height_10y_ago,
)?;
// Compute rolling window block counts
// Compute rolling window block counts (both block_count's own rolling + separate block_count_sum)
let ws = crate::internal::WindowStarts {
_24h: &self.height_24h_ago,
_7d: &self.height_1w_ago,
_30d: &self.height_1m_ago,
_1y: &self.height_1y_ago,
};
self.block_count.rolling.compute_rolling_sum(
starting_indexes.height,
&ws,
&self.block_count.height,
exit,
)?;
self.block_count_sum.compute_rolling_sum(
starting_indexes.height,
&ws,
@@ -202,4 +215,33 @@ impl Vecs {
exit,
)?)
}
fn compute_rolling_start_hours<F>(
&mut self,
monotonic_data: &[Timestamp],
time: &time::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
hours: usize,
get_field: F,
) -> Result<()>
where
F: FnOnce(&mut Self) -> &mut EagerVec<PcoVec<Height, Height>>,
{
let mut prev = Height::ZERO;
Ok(get_field(self).compute_transform(
starting_indexes.height,
&time.timestamp_monotonic,
|(h, t, ..)| {
while t.difference_in_hours_between(monotonic_data[prev.to_usize()]) >= hours {
prev.increment();
if prev > h {
unreachable!()
}
}
(h, prev)
},
exit,
)?)
}
}

View File

@@ -5,7 +5,7 @@ use vecdb::{Database, ImportableVec};
use super::Vecs;
use crate::{
indexes,
internal::{BlockCountTarget, ComputedFromHeightSumCum, ConstantVecs, RollingWindows},
internal::{BlockCountTarget, ComputedFromHeightCumulativeSum, ConstantVecs, RollingWindows},
};
impl Vecs {
@@ -16,12 +16,13 @@ impl Vecs {
version,
indexes,
),
block_count: ComputedFromHeightSumCum::forced_import(
block_count: ComputedFromHeightCumulativeSum::forced_import(
db,
"block_count",
version,
indexes,
)?,
height_1h_ago: ImportableVec::forced_import(db, "height_1h_ago", version)?,
height_24h_ago: ImportableVec::forced_import(db, "height_24h_ago", version)?,
height_3d_ago: ImportableVec::forced_import(db, "height_3d_ago", version)?,
height_1w_ago: ImportableVec::forced_import(db, "height_1w_ago", version)?,

View File

@@ -2,14 +2,17 @@ use brk_traversable::Traversable;
use brk_types::{Height, StoredU32, StoredU64};
use vecdb::{EagerVec, PcoVec, Rw, StorageMode};
use crate::internal::{ComputedFromHeightSumCum, ConstantVecs, RollingWindows, WindowStarts};
use crate::internal::{
BlockWindowStarts, ComputedFromHeightCumulativeSum, ConstantVecs, RollingWindows, WindowStarts,
};
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub block_count_target: ConstantVecs<StoredU64>,
pub block_count: ComputedFromHeightSumCum<StoredU32, M>,
pub block_count: ComputedFromHeightCumulativeSum<StoredU32, M>,
pub block_count_sum: RollingWindows<StoredU32, M>,
pub height_1h_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_24h_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_3d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_1w_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
@@ -53,6 +56,14 @@ impl Vecs {
}
}
/// Get the 2 block-count rolling window start heights (1h, 24h) for tx-derived metrics.
pub fn block_window_starts(&self) -> BlockWindowStarts<'_> {
BlockWindowStarts {
_1h: &self.height_1h_ago,
_24h: &self.height_24h_ago,
}
}
pub fn start_vec(&self, days: usize) -> &EagerVec<PcoVec<Height, Height>> {
match days {
1 => &self.height_24h_ago,

View File

@@ -27,8 +27,8 @@ impl Vecs {
let count = CountVecs::forced_import(&db, version, indexes)?;
let interval = IntervalVecs::forced_import(&db, version, indexes)?;
let size = SizeVecs::forced_import(&db, version, indexer, indexes)?;
let weight = WeightVecs::forced_import(&db, version, indexer, indexes)?;
let size = SizeVecs::forced_import(&db, version, indexes)?;
let weight = WeightVecs::forced_import(&db, version, indexes)?;
let time = TimeVecs::forced_import(&db, version)?;
let difficulty = DifficultyVecs::forced_import(&db, version, indexer, indexes)?;
let halving = HalvingVecs::forced_import(&db, version, indexes)?;

View File

@@ -1,32 +1,29 @@
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::Version;
use vecdb::{Database, ReadableCloneableVec};
use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedFromHeightCumFull, ComputedHeightDerivedCumFull},
internal::{ComputedFromHeightCumulativeFull, ComputedHeightDerivedCumulativeFull},
};
impl Vecs {
pub(crate) fn forced_import(
db: &Database,
version: Version,
indexer: &Indexer,
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
vbytes: ComputedFromHeightCumFull::forced_import(
vbytes: ComputedFromHeightCumulativeFull::forced_import(
db,
"block_vbytes",
version,
indexes,
)?,
size: ComputedHeightDerivedCumFull::forced_import(
size: ComputedHeightDerivedCumulativeFull::forced_import(
db,
"block_size",
indexer.vecs.blocks.total_size.read_only_boxed_clone(),
version,
indexes,
)?,

View File

@@ -2,10 +2,10 @@ use brk_traversable::Traversable;
use brk_types::StoredU64;
use vecdb::{Rw, StorageMode};
use crate::internal::{ComputedFromHeightCumFull, ComputedHeightDerivedCumFull};
use crate::internal::{ComputedFromHeightCumulativeFull, ComputedHeightDerivedCumulativeFull};
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub vbytes: ComputedFromHeightCumFull<StoredU64, M>,
pub size: ComputedHeightDerivedCumFull<StoredU64, M>,
pub vbytes: ComputedFromHeightCumulativeFull<StoredU64, M>,
pub size: ComputedHeightDerivedCumulativeFull<StoredU64, M>,
}

View File

@@ -1,25 +1,22 @@
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::Version;
use vecdb::{Database, ReadableCloneableVec};
use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedFromHeightLast, ComputedHeightDerivedCumFull, RollingDistribution},
internal::{ComputedFromHeightLast, ComputedHeightDerivedCumulativeFull, RollingDistribution},
};
impl Vecs {
pub(crate) fn forced_import(
db: &Database,
version: Version,
indexer: &Indexer,
indexes: &indexes::Vecs,
) -> Result<Self> {
let weight = ComputedHeightDerivedCumFull::forced_import(
let weight = ComputedHeightDerivedCumulativeFull::forced_import(
db,
"block_weight",
indexer.vecs.blocks.weight.read_only_boxed_clone(),
version,
indexes,
)?;

View File

@@ -3,12 +3,12 @@ use brk_types::{StoredF32, Weight};
use vecdb::{Rw, StorageMode};
use crate::internal::{
ComputedFromHeightLast, ComputedHeightDerivedCumFull, RollingDistribution,
ComputedFromHeightLast, ComputedHeightDerivedCumulativeFull, RollingDistribution,
};
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub weight: ComputedHeightDerivedCumFull<Weight, M>,
pub weight: ComputedHeightDerivedCumulativeFull<Weight, M>,
pub fullness: ComputedFromHeightLast<StoredF32, M>,
pub fullness_rolling: RollingDistribution<StoredF32, M>,
}

View File

@@ -5,19 +5,19 @@ use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedFromHeightCumSum, ComputedFromHeightLast},
internal::{ComputedFromHeightCumulativeSum, ComputedFromHeightLast},
};
impl Vecs {
pub(crate) fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
Ok(Self {
coinblocks_created: ComputedFromHeightCumSum::forced_import(
coinblocks_created: ComputedFromHeightCumulativeSum::forced_import(
db,
"coinblocks_created",
version,
indexes,
)?,
coinblocks_stored: ComputedFromHeightCumSum::forced_import(
coinblocks_stored: ComputedFromHeightCumulativeSum::forced_import(
db,
"coinblocks_stored",
version,

View File

@@ -2,12 +2,12 @@ use brk_traversable::Traversable;
use brk_types::StoredF64;
use vecdb::{Rw, StorageMode};
use crate::internal::{ComputedFromHeightCumSum, ComputedFromHeightLast};
use crate::internal::{ComputedFromHeightCumulativeSum, ComputedFromHeightLast};
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub coinblocks_created: ComputedFromHeightCumSum<StoredF64, M>,
pub coinblocks_stored: ComputedFromHeightCumSum<StoredF64, M>,
pub coinblocks_created: ComputedFromHeightCumulativeSum<StoredF64, M>,
pub coinblocks_stored: ComputedFromHeightCumulativeSum<StoredF64, M>,
pub liveliness: ComputedFromHeightLast<StoredF64, M>,
pub vaultedness: ComputedFromHeightLast<StoredF64, M>,
pub activity_to_vaultedness_ratio: ComputedFromHeightLast<StoredF64, M>,

View File

@@ -36,7 +36,7 @@ impl Vecs {
self.thermo_cap.height.compute_transform(
starting_indexes.height,
&*mining.rewards.subsidy.usd.height_cumulative,
&mining.rewards.subsidy.usd.cumulative.height,
|(i, v, ..)| (i, v),
exit,
)?;

View File

@@ -9,14 +9,13 @@ use super::{
ActivityVecs, AdjustedVecs, CapVecs, DB_NAME, PricingVecs, ReserveRiskVecs, SupplyVecs,
VERSION, ValueVecs, Vecs,
};
use crate::{indexes, prices};
use crate::indexes;
impl Vecs {
pub(crate) fn forced_import(
parent_path: &Path,
parent_version: Version,
indexes: &indexes::Vecs,
prices: &prices::Vecs,
) -> Result<Self> {
let db = Database::open(&parent_path.join(DB_NAME))?;
db.set_min_len(PAGE_SIZE * 1_000_000)?;
@@ -24,7 +23,7 @@ impl Vecs {
let version = parent_version + VERSION;
let v1 = version + Version::ONE;
let activity = ActivityVecs::forced_import(&db, version, indexes)?;
let supply = SupplyVecs::forced_import(&db, v1, indexes, prices)?;
let supply = SupplyVecs::forced_import(&db, v1, indexes)?;
let value = ValueVecs::forced_import(&db, v1, indexes)?;
let cap = CapVecs::forced_import(&db, v1, indexes)?;
let pricing = PricingVecs::forced_import(&db, version, indexes)?;

View File

@@ -32,9 +32,10 @@ impl Vecs {
.metrics
.realized
.realized_price
.usd
.height;
self.vaulted_price.height.compute_divide(
self.vaulted_price.usd.height.compute_divide(
starting_indexes.height,
realized_price,
&activity.vaultedness.height,
@@ -46,10 +47,10 @@ impl Vecs {
prices,
starting_indexes,
exit,
Some(&self.vaulted_price.height),
Some(&self.vaulted_price.usd.height),
)?;
self.active_price.height.compute_multiply(
self.active_price.usd.height.compute_multiply(
starting_indexes.height,
realized_price,
&activity.liveliness.height,
@@ -61,10 +62,10 @@ impl Vecs {
prices,
starting_indexes,
exit,
Some(&self.active_price.height),
Some(&self.active_price.usd.height),
)?;
self.true_market_mean.height.compute_divide(
self.true_market_mean.usd.height.compute_divide(
starting_indexes.height,
&cap.investor_cap.height,
&supply.active_supply.btc.height,
@@ -76,11 +77,11 @@ impl Vecs {
prices,
starting_indexes,
exit,
Some(&self.true_market_mean.height),
Some(&self.true_market_mean.usd.height),
)?;
// cointime_price = cointime_cap / circulating_supply
self.cointime_price.height.compute_divide(
self.cointime_price.usd.height.compute_divide(
starting_indexes.height,
&cap.cointime_cap.height,
circulating_supply,
@@ -92,7 +93,7 @@ impl Vecs {
prices,
starting_indexes,
exit,
Some(&self.cointime_price.height),
Some(&self.cointime_price.usd.height),
)?;
Ok(())

View File

@@ -5,7 +5,7 @@ use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedFromHeightRatio, PriceFromHeight},
internal::{ComputedFromHeightRatio, Price},
};
impl Vecs {
@@ -14,43 +14,43 @@ impl Vecs {
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let vaulted_price = PriceFromHeight::forced_import(db, "vaulted_price", version, indexes)?;
let vaulted_price = Price::forced_import(db, "vaulted_price", version, indexes)?;
let vaulted_price_ratio = ComputedFromHeightRatio::forced_import(
db,
"vaulted_price",
Some(&vaulted_price),
Some(&vaulted_price.usd),
version,
indexes,
true,
)?;
let active_price = PriceFromHeight::forced_import(db, "active_price", version, indexes)?;
let active_price = Price::forced_import(db, "active_price", version, indexes)?;
let active_price_ratio = ComputedFromHeightRatio::forced_import(
db,
"active_price",
Some(&active_price),
Some(&active_price.usd),
version,
indexes,
true,
)?;
let true_market_mean =
PriceFromHeight::forced_import(db, "true_market_mean", version, indexes)?;
Price::forced_import(db, "true_market_mean", version, indexes)?;
let true_market_mean_ratio = ComputedFromHeightRatio::forced_import(
db,
"true_market_mean",
Some(&true_market_mean),
Some(&true_market_mean.usd),
version,
indexes,
true,
)?;
let cointime_price =
PriceFromHeight::forced_import(db, "cointime_price", version, indexes)?;
Price::forced_import(db, "cointime_price", version, indexes)?;
let cointime_price_ratio = ComputedFromHeightRatio::forced_import(
db,
"cointime_price",
Some(&cointime_price),
Some(&cointime_price.usd),
version,
indexes,
true,

View File

@@ -3,14 +3,13 @@ use brk_types::Version;
use vecdb::Database;
use super::Vecs;
use crate::{indexes, internal::ValueFromHeightLast, prices};
use crate::{indexes, internal::ValueFromHeightLast};
impl Vecs {
pub(crate) fn forced_import(
db: &Database,
version: Version,
indexes: &indexes::Vecs,
prices: &prices::Vecs,
) -> Result<Self> {
Ok(Self {
vaulted_supply: ValueFromHeightLast::forced_import(
@@ -18,14 +17,12 @@ impl Vecs {
"vaulted_supply",
version,
indexes,
prices,
)?,
active_supply: ValueFromHeightLast::forced_import(
db,
"active_supply",
version,
indexes,
prices,
)?,
})
}

View File

@@ -3,30 +3,30 @@ use brk_types::Version;
use vecdb::Database;
use super::Vecs;
use crate::{indexes, internal::ComputedFromHeightCumSum};
use crate::{indexes, internal::ComputedFromHeightCumulativeSum};
impl Vecs {
pub(crate) fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
Ok(Self {
cointime_value_destroyed: ComputedFromHeightCumSum::forced_import(
cointime_value_destroyed: ComputedFromHeightCumulativeSum::forced_import(
db,
"cointime_value_destroyed",
version,
indexes,
)?,
cointime_value_created: ComputedFromHeightCumSum::forced_import(
cointime_value_created: ComputedFromHeightCumulativeSum::forced_import(
db,
"cointime_value_created",
version,
indexes,
)?,
cointime_value_stored: ComputedFromHeightCumSum::forced_import(
cointime_value_stored: ComputedFromHeightCumulativeSum::forced_import(
db,
"cointime_value_stored",
version,
indexes,
)?,
vocdd: ComputedFromHeightCumSum::forced_import(
vocdd: ComputedFromHeightCumulativeSum::forced_import(
db,
"vocdd",
version + Version::ONE,

View File

@@ -2,12 +2,12 @@ use brk_traversable::Traversable;
use brk_types::StoredF64;
use vecdb::{Rw, StorageMode};
use crate::internal::ComputedFromHeightCumSum;
use crate::internal::ComputedFromHeightCumulativeSum;
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub cointime_value_destroyed: ComputedFromHeightCumSum<StoredF64, M>,
pub cointime_value_created: ComputedFromHeightCumSum<StoredF64, M>,
pub cointime_value_stored: ComputedFromHeightCumSum<StoredF64, M>,
pub vocdd: ComputedFromHeightCumSum<StoredF64, M>,
pub cointime_value_destroyed: ComputedFromHeightCumulativeSum<StoredF64, M>,
pub cointime_value_created: ComputedFromHeightCumulativeSum<StoredF64, M>,
pub cointime_value_stored: ComputedFromHeightCumulativeSum<StoredF64, M>,
pub vocdd: ComputedFromHeightCumulativeSum<StoredF64, M>,
}

View File

@@ -1,75 +1,113 @@
//! Growth rate: new_addr_count / addr_count (global + per-type)
use brk_cohort::{ByAddressType, zip2_by_addresstype};
use brk_cohort::ByAddressType;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, StoredF32, StoredU64, Version};
use vecdb::ReadableCloneableVec;
use vecdb::{Database, EagerVec, Exit, PcoVec, ReadableVec, Rw, StorageMode};
use crate::{
indexes,
internal::{LazyBinaryComputedFromHeightDistribution, RatioU64F32},
internal::{ComputedFromHeightDistribution, WindowStarts},
};
use super::{AddrCountsVecs, NewAddrCountVecs};
/// Growth rate by type - lazy ratio with distribution stats
pub type GrowthRateByType =
ByAddressType<LazyBinaryComputedFromHeightDistribution<StoredF32, StoredU64, StoredU64>>;
/// Growth rate: new_addr_count / addr_count (global + per-type)
#[derive(Clone, Traversable)]
pub struct GrowthRateVecs {
pub all: LazyBinaryComputedFromHeightDistribution<StoredF32, StoredU64, StoredU64>,
#[derive(Traversable)]
pub struct GrowthRateVecs<M: StorageMode = Rw> {
pub all: ComputedFromHeightDistribution<StoredF32, M>,
#[traversable(flatten)]
pub by_addresstype: GrowthRateByType,
pub by_addresstype: ByAddressType<ComputedFromHeightDistribution<StoredF32, M>>,
}
impl GrowthRateVecs {
pub(crate) fn forced_import(
db: &Database,
version: Version,
indexes: &indexes::Vecs,
new_addr_count: &NewAddrCountVecs,
addr_count: &AddrCountsVecs,
) -> Result<Self> {
let all = make_growth_rate(
let all = ComputedFromHeightDistribution::forced_import(
db,
"growth_rate",
version,
indexes,
&new_addr_count.all.height,
&addr_count.all.count.height,
);
)?;
let by_addresstype: GrowthRateByType = zip2_by_addresstype(
&new_addr_count.by_addresstype,
&addr_count.by_addresstype,
|name, new, addr| {
Ok(make_growth_rate(
let by_addresstype: ByAddressType<ComputedFromHeightDistribution<StoredF32>> =
ByAddressType::new_with_name(|name| {
ComputedFromHeightDistribution::forced_import(
db,
&format!("{name}_growth_rate"),
version,
indexes,
&new.height,
&addr.count.height,
))
},
)?;
)
})?;
Ok(Self { all, by_addresstype })
}
pub(crate) fn compute(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
new_addr_count: &NewAddrCountVecs,
addr_count: &AddrCountsVecs,
exit: &Exit,
) -> Result<()> {
self.all.compute(max_from, windows, exit, |target| {
compute_ratio(
target,
max_from,
&new_addr_count.all.height,
&addr_count.all.count.height,
exit,
)
})?;
for ((_, growth), ((_, new), (_, addr))) in self
.by_addresstype
.iter_mut()
.zip(
new_addr_count
.by_addresstype
.iter()
.zip(addr_count.by_addresstype.iter()),
)
{
growth.compute(max_from, windows, exit, |target| {
compute_ratio(
target,
max_from,
&new.height,
&addr.count.height,
exit,
)
})?;
}
Ok(())
}
}
fn make_growth_rate<V1, V2>(
name: &str,
version: Version,
indexes: &indexes::Vecs,
new: &V1,
addr: &V2,
) -> LazyBinaryComputedFromHeightDistribution<StoredF32, StoredU64, StoredU64>
where
V1: ReadableCloneableVec<Height, StoredU64>,
V2: ReadableCloneableVec<Height, StoredU64>,
{
LazyBinaryComputedFromHeightDistribution::<StoredF32, StoredU64, StoredU64>::forced_import::<
RatioU64F32,
>(name, version, new.read_only_boxed_clone(), addr.read_only_boxed_clone(), indexes)
fn compute_ratio(
target: &mut EagerVec<PcoVec<Height, StoredF32>>,
max_from: Height,
numerator: &impl ReadableVec<Height, StoredU64>,
denominator: &impl ReadableVec<Height, StoredU64>,
exit: &Exit,
) -> Result<()> {
target.compute_transform2(
max_from,
numerator,
denominator,
|(h, num, den, ..)| {
let n = *num as f64;
let d = *den as f64;
let ratio = if d == 0.0 { 0.0 } else { n / d };
(h, StoredF32::from(ratio))
},
exit,
)?;
Ok(())
}

View File

@@ -11,8 +11,8 @@ use brk_types::{
use rayon::prelude::*;
use rustc_hash::FxHashMap;
use vecdb::{
AnyStoredVec, AnyVec, BytesVec, Database, ReadableVec, WritableVec, ImportOptions, ImportableVec,
Reader, Rw, Stamp, StorageMode,
AnyStoredVec, AnyVec, BytesVec, Database, ImportOptions, ImportableVec, ReadableVec, Reader,
Rw, Stamp, StorageMode, WritableVec,
};
use super::super::AddressTypeToTypeIndexMap;

View File

@@ -1,25 +1,23 @@
//! New address count: delta of total_addr_count (global + per-type)
//! New address count: delta of total_addr_count (global + per-type)
use brk_cohort::{ByAddressType, zip_by_addresstype};
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{StoredU64, Version};
use brk_types::{Height, StoredU64, Version};
use vecdb::{Database, Exit, Ident, Rw, StorageMode};
use crate::{ComputeIndexes, indexes, internal::LazyComputedFromHeightFull};
use crate::{indexes, internal::{LazyComputedFromHeightFull, WindowStarts}};
use super::TotalAddrCountVecs;
/// New addresses by type - identity transform with stored day1 stats
/// The delta is computed at the compute step, not lazily
pub type NewAddrCountByType<M = Rw> = ByAddressType<LazyComputedFromHeightFull<StoredU64, StoredU64, M>>;
/// New address count per block (global + per-type)
#[derive(Traversable)]
pub struct NewAddrCountVecs<M: StorageMode = Rw> {
pub all: LazyComputedFromHeightFull<StoredU64, StoredU64, M>,
#[traversable(flatten)]
pub by_addresstype: NewAddrCountByType<M>,
pub by_addresstype: ByAddressType<LazyComputedFromHeightFull<StoredU64, StoredU64, M>>,
}
impl NewAddrCountVecs {
@@ -37,7 +35,7 @@ impl NewAddrCountVecs {
indexes,
)?;
let by_addresstype: NewAddrCountByType =
let by_addresstype: ByAddressType<LazyComputedFromHeightFull<StoredU64, StoredU64>> =
zip_by_addresstype(&total_addr_count.by_addresstype, |name, total| {
LazyComputedFromHeightFull::forced_import::<Ident>(
db,
@@ -54,14 +52,15 @@ impl NewAddrCountVecs {
})
}
pub(crate) fn compute_cumulative(
pub(crate) fn compute(
&mut self,
starting_indexes: &ComputeIndexes,
max_from: Height,
windows: &WindowStarts<'_>,
exit: &Exit,
) -> Result<()> {
self.all.compute_cumulative(starting_indexes, exit)?;
self.all.compute(max_from, windows, exit)?;
for vecs in self.by_addresstype.values_mut() {
vecs.compute_cumulative(starting_indexes, exit)?;
vecs.compute(max_from, windows, exit)?;
}
Ok(())
}

View File

@@ -1,57 +1,85 @@
//! Total address count: addr_count + empty_addr_count (global + per-type)
use brk_cohort::{ByAddressType, zip2_by_addresstype};
use brk_cohort::ByAddressType;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{StoredU64, Version};
use vecdb::ReadableCloneableVec;
use brk_types::{Height, StoredU64, Version};
use vecdb::{Database, Exit, Rw, StorageMode};
use crate::{indexes, internal::{LazyBinaryComputedFromHeightLast, U64Plus}};
use crate::{indexes, internal::ComputedFromHeightLast};
use super::AddrCountsVecs;
/// Total addresses by type - lazy sum with all derived indexes
pub type TotalAddrCountByType =
ByAddressType<LazyBinaryComputedFromHeightLast<StoredU64, StoredU64, StoredU64>>;
/// Total address count (global + per-type) with all derived indexes
#[derive(Clone, Traversable)]
pub struct TotalAddrCountVecs {
pub all: LazyBinaryComputedFromHeightLast<StoredU64, StoredU64, StoredU64>,
#[derive(Traversable)]
pub struct TotalAddrCountVecs<M: StorageMode = Rw> {
pub all: ComputedFromHeightLast<StoredU64, M>,
#[traversable(flatten)]
pub by_addresstype: TotalAddrCountByType,
pub by_addresstype: ByAddressType<ComputedFromHeightLast<StoredU64, M>>,
}
impl TotalAddrCountVecs {
pub(crate) fn forced_import(
db: &Database,
version: Version,
indexes: &indexes::Vecs,
addr_count: &AddrCountsVecs,
empty_addr_count: &AddrCountsVecs,
) -> Result<Self> {
let all = LazyBinaryComputedFromHeightLast::forced_import::<U64Plus>(
let all = ComputedFromHeightLast::forced_import(
db,
"total_addr_count",
version,
addr_count.all.count.height.read_only_boxed_clone(),
empty_addr_count.all.count.height.read_only_boxed_clone(),
indexes,
);
)?;
let by_addresstype: TotalAddrCountByType = zip2_by_addresstype(
&addr_count.by_addresstype,
&empty_addr_count.by_addresstype,
|name, addr, empty| {
Ok(LazyBinaryComputedFromHeightLast::forced_import::<U64Plus>(
let by_addresstype: ByAddressType<ComputedFromHeightLast<StoredU64>> = ByAddressType::new_with_name(
|name| {
ComputedFromHeightLast::forced_import(
db,
&format!("{name}_total_addr_count"),
version,
addr.count.height.read_only_boxed_clone(),
empty.count.height.read_only_boxed_clone(),
indexes,
))
)
},
)?;
Ok(Self { all, by_addresstype })
}
/// Eagerly compute total = addr_count + empty_addr_count.
pub(crate) fn compute(
&mut self,
max_from: Height,
addr_count: &AddrCountsVecs,
empty_addr_count: &AddrCountsVecs,
exit: &Exit,
) -> Result<()> {
self.all.height.compute_transform2(
max_from,
&addr_count.all.count.height,
&empty_addr_count.all.count.height,
|(h, a, b, ..)| (h, StoredU64::from(*a + *b)),
exit,
)?;
for ((_, total), ((_, addr), (_, empty))) in self
.by_addresstype
.iter_mut()
.zip(
addr_count
.by_addresstype
.iter()
.zip(empty_addr_count.by_addresstype.iter()),
)
{
total.height.compute_transform2(
max_from,
&addr.count.height,
&empty.count.height,
|(h, a, b, ..)| (h, StoredU64::from(*a + *b)),
exit,
)?;
}
Ok(())
}
}

View File

@@ -1,24 +1,24 @@
use brk_cohort::ByAddressType;
use brk_error::Result;
use brk_types::{AnyAddressDataIndexEnum, FundedAddressData, OutputType, TypeIndex};
use brk_types::{
AnyAddressDataIndexEnum, EmptyAddressData, FundedAddressData, OutputType, TxIndex, TypeIndex,
};
use smallvec::SmallVec;
use crate::distribution::{
address::{AddressTypeToTypeIndexMap, AddressesDataVecs, AnyAddressIndexesVecs},
compute::VecsReaders,
};
use super::super::cohort::{
EmptyAddressDataWithSource, FundedAddressDataWithSource, TxIndexVec, WithAddressDataSource,
update_tx_counts,
};
use super::super::cohort::{WithAddressDataSource, update_tx_counts};
use super::lookup::AddressLookup;
/// Cache for address data within a flush interval.
pub struct AddressCache {
/// Addresses with non-zero balance
funded: AddressTypeToTypeIndexMap<FundedAddressDataWithSource>,
funded: AddressTypeToTypeIndexMap<WithAddressDataSource<FundedAddressData>>,
/// Addresses that became empty (zero balance)
empty: AddressTypeToTypeIndexMap<EmptyAddressDataWithSource>,
empty: AddressTypeToTypeIndexMap<WithAddressDataSource<EmptyAddressData>>,
}
impl Default for AddressCache {
@@ -49,7 +49,7 @@ impl AddressCache {
/// Merge address data into funded cache.
#[inline]
pub(crate) fn merge_funded(&mut self, data: AddressTypeToTypeIndexMap<FundedAddressDataWithSource>) {
pub(crate) fn merge_funded(&mut self, data: AddressTypeToTypeIndexMap<WithAddressDataSource<FundedAddressData>>) {
self.funded.merge_mut(data);
}
@@ -63,7 +63,7 @@ impl AddressCache {
}
/// Update transaction counts for addresses.
pub(crate) fn update_tx_counts(&mut self, txindex_vecs: AddressTypeToTypeIndexMap<TxIndexVec>) {
pub(crate) fn update_tx_counts(&mut self, txindex_vecs: AddressTypeToTypeIndexMap<SmallVec<[TxIndex; 4]>>) {
update_tx_counts(&mut self.funded, &mut self.empty, txindex_vecs);
}
@@ -71,8 +71,8 @@ impl AddressCache {
pub(crate) fn take(
&mut self,
) -> (
AddressTypeToTypeIndexMap<EmptyAddressDataWithSource>,
AddressTypeToTypeIndexMap<FundedAddressDataWithSource>,
AddressTypeToTypeIndexMap<WithAddressDataSource<EmptyAddressData>>,
AddressTypeToTypeIndexMap<WithAddressDataSource<FundedAddressData>>,
) {
(
std::mem::take(&mut self.empty),
@@ -93,7 +93,7 @@ pub(crate) fn load_uncached_address_data(
vr: &VecsReaders,
any_address_indexes: &AnyAddressIndexesVecs,
addresses_data: &AddressesDataVecs,
) -> Result<Option<FundedAddressDataWithSource>> {
) -> Result<Option<WithAddressDataSource<FundedAddressData>>> {
// Check if this is a new address (typeindex >= first for this height)
let first = *first_addressindexes.get(address_type).unwrap();
if first <= typeindex {

View File

@@ -1,10 +1,8 @@
use brk_types::{FundedAddressData, OutputType, TypeIndex};
use brk_types::{EmptyAddressData, FundedAddressData, OutputType, TypeIndex};
use crate::distribution::address::AddressTypeToTypeIndexMap;
use super::super::cohort::{
EmptyAddressDataWithSource, FundedAddressDataWithSource, WithAddressDataSource,
};
use super::super::cohort::WithAddressDataSource;
/// Tracking status of an address - determines cohort update strategy.
#[derive(Clone, Copy)]
@@ -19,8 +17,8 @@ pub enum TrackingStatus {
/// Context for looking up and storing address data during block processing.
pub struct AddressLookup<'a> {
pub funded: &'a mut AddressTypeToTypeIndexMap<FundedAddressDataWithSource>,
pub empty: &'a mut AddressTypeToTypeIndexMap<EmptyAddressDataWithSource>,
pub funded: &'a mut AddressTypeToTypeIndexMap<WithAddressDataSource<FundedAddressData>>,
pub empty: &'a mut AddressTypeToTypeIndexMap<WithAddressDataSource<EmptyAddressData>>,
}
impl<'a> AddressLookup<'a> {
@@ -28,7 +26,7 @@ impl<'a> AddressLookup<'a> {
&mut self,
output_type: OutputType,
type_index: TypeIndex,
) -> (&mut FundedAddressDataWithSource, TrackingStatus) {
) -> (&mut WithAddressDataSource<FundedAddressData>, TrackingStatus) {
use std::collections::hash_map::Entry;
let map = self.funded.get_mut(output_type).unwrap();
@@ -83,7 +81,7 @@ impl<'a> AddressLookup<'a> {
&mut self,
output_type: OutputType,
type_index: TypeIndex,
) -> &mut FundedAddressDataWithSource {
) -> &mut WithAddressDataSource<FundedAddressData> {
self.funded
.get_mut(output_type)
.unwrap()

View File

@@ -7,7 +7,7 @@ use vecdb::AnyVec;
use crate::distribution::{AddressTypeToTypeIndexMap, AddressesDataVecs};
use super::with_source::{EmptyAddressDataWithSource, FundedAddressDataWithSource};
use super::with_source::WithAddressDataSource;
/// Process funded address data updates.
///
@@ -17,7 +17,7 @@ use super::with_source::{EmptyAddressDataWithSource, FundedAddressDataWithSource
/// - Transition empty -> funded: delete from empty, push to funded
pub(crate) fn process_funded_addresses(
addresses_data: &mut AddressesDataVecs,
funded_updates: AddressTypeToTypeIndexMap<FundedAddressDataWithSource>,
funded_updates: AddressTypeToTypeIndexMap<WithAddressDataSource<FundedAddressData>>,
) -> Result<AddressTypeToTypeIndexMap<AnyAddressIndex>> {
let total: usize = funded_updates.iter().map(|(_, m)| m.len()).sum();
@@ -28,13 +28,13 @@ pub(crate) fn process_funded_addresses(
for (address_type, items) in funded_updates.into_iter() {
for (typeindex, source) in items {
match source {
FundedAddressDataWithSource::New(data) => {
WithAddressDataSource::New(data) => {
pushes.push((address_type, typeindex, data));
}
FundedAddressDataWithSource::FromFunded(index, data) => {
WithAddressDataSource::FromFunded(index, data) => {
updates.push((index, data));
}
FundedAddressDataWithSource::FromEmpty(empty_index, data) => {
WithAddressDataSource::FromEmpty(empty_index, data) => {
deletes.push(empty_index);
pushes.push((address_type, typeindex, data));
}
@@ -88,7 +88,7 @@ pub(crate) fn process_funded_addresses(
/// - Transition funded -> empty: delete from funded, push to empty
pub(crate) fn process_empty_addresses(
addresses_data: &mut AddressesDataVecs,
empty_updates: AddressTypeToTypeIndexMap<EmptyAddressDataWithSource>,
empty_updates: AddressTypeToTypeIndexMap<WithAddressDataSource<EmptyAddressData>>,
) -> Result<AddressTypeToTypeIndexMap<AnyAddressIndex>> {
let total: usize = empty_updates.iter().map(|(_, m)| m.len()).sum();
@@ -99,13 +99,13 @@ pub(crate) fn process_empty_addresses(
for (address_type, items) in empty_updates.into_iter() {
for (typeindex, source) in items {
match source {
EmptyAddressDataWithSource::New(data) => {
WithAddressDataSource::New(data) => {
pushes.push((address_type, typeindex, data));
}
EmptyAddressDataWithSource::FromEmpty(index, data) => {
WithAddressDataSource::FromEmpty(index, data) => {
updates.push((index, data));
}
EmptyAddressDataWithSource::FromFunded(funded_index, data) => {
WithAddressDataSource::FromFunded(funded_index, data) => {
deletes.push(funded_index);
pushes.push((address_type, typeindex, data));
}

View File

@@ -1,6 +1,9 @@
use brk_types::{EmptyAddressData, FundedAddressData, TxIndex};
use smallvec::SmallVec;
use crate::distribution::address::AddressTypeToTypeIndexMap;
use super::with_source::{EmptyAddressDataWithSource, FundedAddressDataWithSource, TxIndexVec};
use super::with_source::WithAddressDataSource;
/// Update tx_count for addresses based on unique transactions they participated in.
///
@@ -11,9 +14,9 @@ use super::with_source::{EmptyAddressDataWithSource, FundedAddressDataWithSource
/// Addresses are looked up in funded_cache first, then empty_cache.
/// NOTE: This should be called AFTER merging parallel-fetched address data into funded_cache.
pub(crate) fn update_tx_counts(
funded_cache: &mut AddressTypeToTypeIndexMap<FundedAddressDataWithSource>,
empty_cache: &mut AddressTypeToTypeIndexMap<EmptyAddressDataWithSource>,
mut txindex_vecs: AddressTypeToTypeIndexMap<TxIndexVec>,
funded_cache: &mut AddressTypeToTypeIndexMap<WithAddressDataSource<FundedAddressData>>,
empty_cache: &mut AddressTypeToTypeIndexMap<WithAddressDataSource<EmptyAddressData>>,
mut txindex_vecs: AddressTypeToTypeIndexMap<SmallVec<[TxIndex; 4]>>,
) {
// First, deduplicate txindex_vecs for addresses that appear multiple times in a block
for (_, map) in txindex_vecs.iter_mut() {

View File

@@ -1,16 +1,4 @@
use brk_types::{
EmptyAddressData, EmptyAddressIndex, FundedAddressData, FundedAddressIndex, TxIndex,
};
use smallvec::SmallVec;
/// Funded address data with source tracking for flush operations.
pub type FundedAddressDataWithSource = WithAddressDataSource<FundedAddressData>;
/// Empty address data with source tracking for flush operations.
pub type EmptyAddressDataWithSource = WithAddressDataSource<EmptyAddressData>;
/// SmallVec for transaction indexes - most addresses have few transactions per block.
pub type TxIndexVec = SmallVec<[TxIndex; 4]>;
use brk_types::{EmptyAddressData, EmptyAddressIndex, FundedAddressData, FundedAddressIndex};
/// Address data wrapped with its source location for flush operations.
///

View File

@@ -1,8 +1,9 @@
use brk_cohort::ByAddressType;
use brk_error::Result;
use brk_types::{Height, OutputType, Sats, TxIndex, TypeIndex};
use brk_types::{FundedAddressData, Height, OutputType, Sats, TxIndex, TypeIndex};
use rayon::prelude::*;
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use crate::distribution::{
address::{AddressTypeToTypeIndexMap, AddressesDataVecs, AnyAddressIndexesVecs},
@@ -14,7 +15,7 @@ use crate::distribution::address::HeightToAddressTypeToVec;
use super::super::{
cache::{AddressCache, load_uncached_address_data},
cohort::{FundedAddressDataWithSource, TxIndexVec},
cohort::WithAddressDataSource,
};
/// Result of processing inputs for a block.
@@ -24,9 +25,9 @@ pub struct InputsResult {
/// Per-height, per-address-type sent data: (typeindex, value) for each address.
pub sent_data: HeightToAddressTypeToVec<(TypeIndex, Sats)>,
/// Address data looked up during processing, keyed by (address_type, typeindex).
pub address_data: AddressTypeToTypeIndexMap<FundedAddressDataWithSource>,
pub address_data: AddressTypeToTypeIndexMap<WithAddressDataSource<FundedAddressData>>,
/// Transaction indexes per address for tx_count tracking.
pub txindex_vecs: AddressTypeToTypeIndexMap<TxIndexVec>,
pub txindex_vecs: AddressTypeToTypeIndexMap<SmallVec<[TxIndex; 4]>>,
}
/// Process inputs (spent UTXOs) for a block.
@@ -101,9 +102,9 @@ pub(crate) fn process_inputs(
);
let mut sent_data = HeightToAddressTypeToVec::with_capacity(estimated_unique_heights);
let mut address_data =
AddressTypeToTypeIndexMap::<FundedAddressDataWithSource>::with_capacity(estimated_per_type);
AddressTypeToTypeIndexMap::<WithAddressDataSource<FundedAddressData>>::with_capacity(estimated_per_type);
let mut txindex_vecs =
AddressTypeToTypeIndexMap::<TxIndexVec>::with_capacity(estimated_per_type);
AddressTypeToTypeIndexMap::<SmallVec<[TxIndex; 4]>>::with_capacity(estimated_per_type);
for (prev_height, value, output_type, addr_info) in items {
height_to_sent

View File

@@ -1,6 +1,7 @@
use brk_cohort::ByAddressType;
use brk_error::Result;
use brk_types::{Sats, TxIndex, TypeIndex};
use brk_types::{FundedAddressData, Sats, TxIndex, TypeIndex};
use smallvec::SmallVec;
use crate::distribution::{
address::{
@@ -12,7 +13,7 @@ use crate::distribution::{
use super::super::{
cache::{AddressCache, load_uncached_address_data},
cohort::{FundedAddressDataWithSource, TxIndexVec},
cohort::WithAddressDataSource,
};
/// Result of processing outputs for a block.
@@ -22,9 +23,9 @@ pub struct OutputsResult {
/// Per-address-type received data: (typeindex, value) for each address.
pub received_data: AddressTypeToVec<(TypeIndex, Sats)>,
/// Address data looked up during processing, keyed by (address_type, typeindex).
pub address_data: AddressTypeToTypeIndexMap<FundedAddressDataWithSource>,
pub address_data: AddressTypeToTypeIndexMap<WithAddressDataSource<FundedAddressData>>,
/// Transaction indexes per address for tx_count tracking.
pub txindex_vecs: AddressTypeToTypeIndexMap<TxIndexVec>,
pub txindex_vecs: AddressTypeToTypeIndexMap<SmallVec<[TxIndex; 4]>>,
}
/// Process outputs (new UTXOs) for a block.
@@ -51,9 +52,9 @@ pub(crate) fn process_outputs(
let mut transacted = Transacted::default();
let mut received_data = AddressTypeToVec::with_capacity(estimated_per_type);
let mut address_data =
AddressTypeToTypeIndexMap::<FundedAddressDataWithSource>::with_capacity(estimated_per_type);
AddressTypeToTypeIndexMap::<WithAddressDataSource<FundedAddressData>>::with_capacity(estimated_per_type);
let mut txindex_vecs =
AddressTypeToTypeIndexMap::<TxIndexVec>::with_capacity(estimated_per_type);
AddressTypeToTypeIndexMap::<SmallVec<[TxIndex; 4]>>::with_capacity(estimated_per_type);
// Single pass: read from pre-collected vecs and accumulate
for (local_idx, txoutdata) in txoutdata_vec.iter().enumerate() {

View File

@@ -5,14 +5,14 @@ use brk_cohort::{
};
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Dollars, Height, Version};
use brk_types::{Dollars, Height, Sats, Version};
use derive_more::{Deref, DerefMut};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, Database, Exit, ReadableVec, Rw, StorageMode};
use crate::{ComputeIndexes, blocks, distribution::DynCohortVecs, indexes, prices};
use crate::distribution::metrics::{CohortMetricsBase, SupplyMetrics};
use crate::distribution::metrics::CohortMetricsBase;
use super::{super::traits::CohortVecs, vecs::AddressCohortVecs};
@@ -24,16 +24,11 @@ pub struct AddressCohorts<M: StorageMode = Rw>(AddressGroups<AddressCohortVecs<M
impl AddressCohorts {
/// Import all Address cohorts from database.
///
/// `all_supply` is the supply metrics from the UTXO "all" cohort, used as global
/// sources for `*_rel_to_market_cap` ratios.
pub(crate) fn forced_import(
db: &Database,
version: Version,
indexes: &indexes::Vecs,
prices: &prices::Vecs,
states_path: &Path,
all_supply: &SupplyMetrics,
) -> Result<Self> {
let v = version + VERSION;
@@ -43,7 +38,7 @@ impl AddressCohorts {
has_state: bool|
-> Result<AddressCohortVecs> {
let sp = if has_state { Some(states_path) } else { None };
AddressCohortVecs::forced_import(db, filter, name, v, indexes, prices, sp, all_supply)
AddressCohortVecs::forced_import(db, filter, name, v, indexes, sp)
};
let full = |f: Filter, name: &'static str| create(f, name, true);
@@ -135,16 +130,18 @@ impl AddressCohorts {
}
/// Second phase of post-processing: compute relative metrics.
pub(crate) fn compute_rest_part2<HM>(
pub(crate) fn compute_rest_part2<HM, AS>(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &ComputeIndexes,
height_to_market_cap: &HM,
all_supply_sats: &AS,
exit: &Exit,
) -> Result<()>
where
HM: ReadableVec<Height, Dollars> + Sync,
AS: ReadableVec<Height, Sats> + Sync,
{
self.0.par_iter_mut().try_for_each(|v| {
v.compute_rest_part2(
@@ -152,6 +149,7 @@ impl AddressCohorts {
prices,
starting_indexes,
height_to_market_cap,
all_supply_sats,
exit,
)
})

View File

@@ -3,7 +3,7 @@ use std::path::Path;
use brk_cohort::{CohortContext, Filter, Filtered};
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Cents, Dollars, Height, StoredF64, StoredU64, Version};
use brk_types::{Cents, Dollars, Height, Sats, StoredF64, StoredU64, Version};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, AnyVec, Database, Exit, WritableVec, ReadableVec, Rw, StorageMode};
@@ -15,7 +15,7 @@ use crate::{
prices,
};
use crate::distribution::metrics::{BasicCohortMetrics, CohortMetricsBase, ImportConfig, SupplyMetrics};
use crate::distribution::metrics::{BasicCohortMetrics, CohortMetricsBase, ImportConfig};
use super::super::traits::{CohortVecs, DynCohortVecs};
@@ -41,19 +41,13 @@ pub struct AddressCohortVecs<M: StorageMode = Rw> {
impl AddressCohortVecs {
/// Import address cohort from database.
///
/// `all_supply` is the supply metrics from the "all" cohort, used as global
/// sources for `*_rel_to_market_cap` ratios.
#[allow(clippy::too_many_arguments)]
pub(crate) fn forced_import(
db: &Database,
filter: Filter,
name: &str,
version: Version,
indexes: &indexes::Vecs,
prices: &prices::Vecs,
states_path: Option<&Path>,
all_supply: &SupplyMetrics,
) -> Result<Self> {
let full_name = CohortContext::Address.full_name(&filter, name);
@@ -64,7 +58,6 @@ impl AddressCohortVecs {
context: CohortContext::Address,
version,
indexes,
prices,
};
Ok(Self {
@@ -73,7 +66,7 @@ impl AddressCohortVecs {
state: states_path
.map(|path| Box::new(AddressCohortState::new(path, &full_name))),
metrics: BasicCohortMetrics::forced_import(&cfg, all_supply)?,
metrics: BasicCohortMetrics::forced_import(&cfg)?,
addr_count: ComputedFromHeightLast::forced_import(
db,
@@ -287,6 +280,7 @@ impl CohortVecs for AddressCohortVecs {
prices: &prices::Vecs,
starting_indexes: &ComputeIndexes,
height_to_market_cap: &impl ReadableVec<Height, Dollars>,
all_supply_sats: &impl ReadableVec<Height, Sats>,
exit: &Exit,
) -> Result<()> {
self.metrics.compute_rest_part2(
@@ -294,6 +288,7 @@ impl CohortVecs for AddressCohortVecs {
prices,
starting_indexes,
height_to_market_cap,
all_supply_sats,
exit,
)?;
Ok(())

View File

@@ -1,5 +1,5 @@
use brk_error::Result;
use brk_types::{Cents, Dollars, Height, Version};
use brk_types::{Cents, Dollars, Height, Sats, Version};
use vecdb::{Exit, ReadableVec};
use crate::{ComputeIndexes, blocks, prices};
@@ -75,6 +75,7 @@ pub trait CohortVecs: DynCohortVecs {
prices: &prices::Vecs,
starting_indexes: &ComputeIndexes,
height_to_market_cap: &impl ReadableVec<Height, Dollars>,
all_supply_sats: &impl ReadableVec<Height, Sats>,
exit: &Exit,
) -> Result<()>;
}

View File

@@ -11,7 +11,7 @@ use brk_types::{
StoredF32, Timestamp, Version,
};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, Database, Exit, ReadableVec, Rw, StorageMode, VecIndex, WritableVec};
use vecdb::{AnyStoredVec, Database, Exit, ReadOnlyClone, ReadableVec, Rw, StorageMode, VecIndex, WritableVec};
use crate::{
ComputeIndexes, blocks,
@@ -24,7 +24,7 @@ use crate::{
use crate::distribution::metrics::{
AdjustedCohortMetrics, AllCohortMetrics, BasicCohortMetrics, CohortMetricsBase,
ExtendedAdjustedCohortMetrics, ExtendedCohortMetrics, ImportConfig, PeakRegretCohortMetrics,
RealizedBase, SupplyMetrics,
SupplyMetrics,
};
use super::vecs::UTXOCohortVecs;
@@ -68,7 +68,6 @@ impl UTXOCohorts<Rw> {
db: &Database,
version: Version,
indexes: &indexes::Vecs,
prices: &prices::Vecs,
states_path: &Path,
) -> Result<Self> {
let v = version + VERSION;
@@ -82,7 +81,6 @@ impl UTXOCohorts<Rw> {
context: CohortContext::Utxo,
version: v + Version::ONE,
indexes,
prices,
};
let all_supply = SupplyMetrics::forced_import(&all_cfg)?;
@@ -90,7 +88,6 @@ impl UTXOCohorts<Rw> {
// age_range: ExtendedCohortMetrics with full state
let age_range = {
let s = &all_supply;
ByAgeRange::try_new(&|f: Filter, name: &'static str| -> Result<_> {
let full_name = CohortContext::Utxo.full_name(&f, name);
let cfg = ImportConfig {
@@ -100,12 +97,11 @@ impl UTXOCohorts<Rw> {
context: CohortContext::Utxo,
version: v,
indexes,
prices,
};
let state = Some(Box::new(UTXOCohortState::new(states_path, &full_name)));
Ok(UTXOCohortVecs::new(
state,
ExtendedCohortMetrics::forced_import(&cfg, s)?,
ExtendedCohortMetrics::forced_import(&cfg)?,
))
})?
};
@@ -121,12 +117,11 @@ impl UTXOCohorts<Rw> {
context: CohortContext::Utxo,
version: v,
indexes,
prices,
};
let state = Some(Box::new(UTXOCohortState::new(states_path, &full_name)));
Ok(UTXOCohortVecs::new(
state,
BasicCohortMetrics::forced_import(&cfg, &all_supply)?,
BasicCohortMetrics::forced_import(&cfg)?,
))
};
@@ -135,18 +130,13 @@ impl UTXOCohorts<Rw> {
let year = ByYear::try_new(&basic_separate)?;
let type_ = BySpendableType::try_new(&basic_separate)?;
// Phase 3: Get up_to_1h realized for adjusted computation.
let up_to_1h_realized: &RealizedBase = &age_range.up_to_1h.metrics.realized;
// Phase 4: Import "all" cohort with pre-imported supply.
// Phase 3: Import "all" cohort with pre-imported supply.
let all = UTXOCohortVecs::new(
None,
AllCohortMetrics::forced_import_with_supply(&all_cfg, all_supply, up_to_1h_realized)?,
AllCohortMetrics::forced_import_with_supply(&all_cfg, all_supply)?,
);
let all_supply_ref = &all.metrics.supply;
// Phase 5: Import aggregate cohorts.
// Phase 4: Import aggregate cohorts.
// sth: ExtendedAdjustedCohortMetrics
let sth = {
@@ -159,14 +149,11 @@ impl UTXOCohorts<Rw> {
context: CohortContext::Utxo,
version: v,
indexes,
prices,
};
UTXOCohortVecs::new(
None,
ExtendedAdjustedCohortMetrics::forced_import(
&cfg,
all_supply_ref,
up_to_1h_realized,
)?,
)
};
@@ -182,17 +169,15 @@ impl UTXOCohorts<Rw> {
context: CohortContext::Utxo,
version: v,
indexes,
prices,
};
UTXOCohortVecs::new(
None,
ExtendedCohortMetrics::forced_import(&cfg, all_supply_ref)?,
ExtendedCohortMetrics::forced_import(&cfg)?,
)
};
// max_age: AdjustedCohortMetrics (adjusted + peak_regret)
let max_age = {
let s = all_supply_ref;
ByMaxAge::try_new(&|f: Filter, name: &'static str| -> Result<_> {
let full_name = CohortContext::Utxo.full_name(&f, name);
let cfg = ImportConfig {
@@ -202,18 +187,16 @@ impl UTXOCohorts<Rw> {
context: CohortContext::Utxo,
version: v,
indexes,
prices,
};
Ok(UTXOCohortVecs::new(
None,
AdjustedCohortMetrics::forced_import(&cfg, s, up_to_1h_realized)?,
AdjustedCohortMetrics::forced_import(&cfg)?,
))
})?
};
// min_age: PeakRegretCohortMetrics
let min_age = {
let s = all_supply_ref;
ByMinAge::try_new(&|f: Filter, name: &'static str| -> Result<_> {
let full_name = CohortContext::Utxo.full_name(&f, name);
let cfg = ImportConfig {
@@ -223,11 +206,10 @@ impl UTXOCohorts<Rw> {
context: CohortContext::Utxo,
version: v,
indexes,
prices,
};
Ok(UTXOCohortVecs::new(
None,
PeakRegretCohortMetrics::forced_import(&cfg, s)?,
PeakRegretCohortMetrics::forced_import(&cfg)?,
))
})?
};
@@ -243,11 +225,10 @@ impl UTXOCohorts<Rw> {
context: CohortContext::Utxo,
version: v,
indexes,
prices,
};
Ok(UTXOCohortVecs::new(
None,
BasicCohortMetrics::forced_import(&cfg, all_supply_ref)?,
BasicCohortMetrics::forced_import(&cfg)?,
))
};
@@ -647,18 +628,32 @@ impl UTXOCohorts<Rw> {
where
HM: ReadableVec<Height, Dollars> + Sync,
{
// Get up_to_1h value sources for adjusted computation (cloned to avoid borrow conflicts).
let up_to_1h_value_created = self.age_range.up_to_1h.metrics.realized.value_created.height.read_only_clone();
let up_to_1h_value_destroyed = self.age_range.up_to_1h.metrics.realized.value_destroyed.height.read_only_clone();
// "all" cohort computed first (no all_supply_sats needed).
self.all.metrics.compute_rest_part2(
blocks,
prices,
starting_indexes,
height_to_market_cap,
&up_to_1h_value_created,
&up_to_1h_value_destroyed,
exit,
)?;
// Clone all_supply_sats for non-all cohorts.
let all_supply_sats = self.all.metrics.supply.total.sats.height.read_only_clone();
self.sth.metrics.compute_rest_part2(
blocks,
prices,
starting_indexes,
height_to_market_cap,
&up_to_1h_value_created,
&up_to_1h_value_destroyed,
&all_supply_sats,
exit,
)?;
self.lth.metrics.compute_rest_part2(
@@ -666,6 +661,7 @@ impl UTXOCohorts<Rw> {
prices,
starting_indexes,
height_to_market_cap,
&all_supply_sats,
exit,
)?;
self.age_range.par_iter_mut().try_for_each(|v| {
@@ -674,6 +670,7 @@ impl UTXOCohorts<Rw> {
prices,
starting_indexes,
height_to_market_cap,
&all_supply_sats,
exit,
)
})?;
@@ -683,6 +680,9 @@ impl UTXOCohorts<Rw> {
prices,
starting_indexes,
height_to_market_cap,
&up_to_1h_value_created,
&up_to_1h_value_destroyed,
&all_supply_sats,
exit,
)
})?;
@@ -692,6 +692,7 @@ impl UTXOCohorts<Rw> {
prices,
starting_indexes,
height_to_market_cap,
&all_supply_sats,
exit,
)
})?;
@@ -701,6 +702,7 @@ impl UTXOCohorts<Rw> {
prices,
starting_indexes,
height_to_market_cap,
&all_supply_sats,
exit,
)
})?;
@@ -710,6 +712,7 @@ impl UTXOCohorts<Rw> {
prices,
starting_indexes,
height_to_market_cap,
&all_supply_sats,
exit,
)
})?;
@@ -719,6 +722,7 @@ impl UTXOCohorts<Rw> {
prices,
starting_indexes,
height_to_market_cap,
&all_supply_sats,
exit,
)
})?;
@@ -728,6 +732,7 @@ impl UTXOCohorts<Rw> {
prices,
starting_indexes,
height_to_market_cap,
&all_supply_sats,
exit,
)
})?;
@@ -737,6 +742,7 @@ impl UTXOCohorts<Rw> {
prices,
starting_indexes,
height_to_market_cap,
&all_supply_sats,
exit,
)
})?;
@@ -746,6 +752,7 @@ impl UTXOCohorts<Rw> {
prices,
starting_indexes,
height_to_market_cap,
&all_supply_sats,
exit,
)
})?;

View File

@@ -75,6 +75,7 @@ where
prices,
starting_indexes,
height_to_market_cap,
&utxo_cohorts.all.metrics.supply.total.sats.height,
exit,
)?;

View File

@@ -65,10 +65,10 @@ pub(crate) fn process_blocks(
let height_to_first_txoutindex = &indexer.vecs.outputs.first_txoutindex;
let height_to_first_txinindex = &indexer.vecs.inputs.first_txinindex;
// From transactions and inputs/outputs (via .height or .height.sum_cum.sum patterns):
// From transactions and inputs/outputs (via .height or .height.sum_cumulative.sum patterns):
let height_to_tx_count = &transactions.count.tx_count.height;
let height_to_output_count = &outputs.count.total_count.sum_cum.sum.0;
let height_to_input_count = &inputs.count.height.sum_cum.sum.0;
let height_to_output_count = &outputs.count.total_count.sum_cumulative.sum.0;
let height_to_input_count = &inputs.count.height.sum_cumulative.sum.0;
// From blocks:
let height_to_timestamp = &blocks.time.timestamp_monotonic;
let height_to_date = &blocks.time.date;

View File

@@ -1,17 +1,14 @@
use std::time::Instant;
use brk_error::Result;
use brk_types::Height;
use brk_types::{EmptyAddressData, FundedAddressData, Height};
use rayon::prelude::*;
use tracing::info;
use vecdb::{AnyStoredVec, WritableVec, Stamp};
use crate::distribution::{
Vecs,
block::{
EmptyAddressDataWithSource, FundedAddressDataWithSource, process_empty_addresses,
process_funded_addresses,
},
block::{WithAddressDataSource, process_empty_addresses, process_funded_addresses},
state::BlockState,
};
@@ -28,8 +25,8 @@ use super::super::address::{AddressTypeToTypeIndexMap, AddressesDataVecs, AnyAdd
pub(crate) fn process_address_updates(
addresses_data: &mut AddressesDataVecs,
address_indexes: &mut AnyAddressIndexesVecs,
empty_updates: AddressTypeToTypeIndexMap<EmptyAddressDataWithSource>,
funded_updates: AddressTypeToTypeIndexMap<FundedAddressDataWithSource>,
empty_updates: AddressTypeToTypeIndexMap<WithAddressDataSource<EmptyAddressData>>,
funded_updates: AddressTypeToTypeIndexMap<WithAddressDataSource<FundedAddressData>>,
) -> Result<()> {
info!("Processing address updates...");

View File

@@ -6,7 +6,7 @@ use vecdb::{AnyStoredVec, AnyVec, EagerVec, Exit, ImportableVec, PcoVec, Rw, Sto
use crate::{
ComputeIndexes, blocks,
internal::{ComputedFromHeightCumSum, LazyComputedValueFromHeightCum, ValueEmaFromHeight},
internal::{ComputedFromHeightCumulativeSum, LazyComputedValueFromHeightCumulative, ValueEmaFromHeight},
};
use super::ImportConfig;
@@ -15,7 +15,7 @@ use super::ImportConfig;
#[derive(Traversable)]
pub struct ActivityMetrics<M: StorageMode = Rw> {
/// Total satoshis sent at each height + derived indexes
pub sent: LazyComputedValueFromHeightCum<M>,
pub sent: LazyComputedValueFromHeightCumulative<M>,
/// 14-day EMA of sent supply (sats, btc, usd)
pub sent_14d_ema: ValueEmaFromHeight<M>,
@@ -27,22 +27,21 @@ pub struct ActivityMetrics<M: StorageMode = Rw> {
pub satdays_destroyed: M::Stored<EagerVec<PcoVec<Height, Sats>>>,
/// Coin-blocks destroyed (in BTC rather than sats)
pub coinblocks_destroyed: ComputedFromHeightCumSum<StoredF64, M>,
pub coinblocks_destroyed: ComputedFromHeightCumulativeSum<StoredF64, M>,
/// Coin-days destroyed (in BTC rather than sats)
pub coindays_destroyed: ComputedFromHeightCumSum<StoredF64, M>,
pub coindays_destroyed: ComputedFromHeightCumulativeSum<StoredF64, M>,
}
impl ActivityMetrics {
/// Import activity metrics from database.
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
Ok(Self {
sent: LazyComputedValueFromHeightCum::forced_import(
sent: LazyComputedValueFromHeightCumulative::forced_import(
cfg.db,
&cfg.name("sent"),
cfg.version,
cfg.indexes,
cfg.prices,
)?,
sent_14d_ema: ValueEmaFromHeight::forced_import(
@@ -64,14 +63,14 @@ impl ActivityMetrics {
cfg.version,
)?,
coinblocks_destroyed: ComputedFromHeightCumSum::forced_import(
coinblocks_destroyed: ComputedFromHeightCumulativeSum::forced_import(
cfg.db,
&cfg.name("coinblocks_destroyed"),
cfg.version,
cfg.indexes,
)?,
coindays_destroyed: ComputedFromHeightCumSum::forced_import(
coindays_destroyed: ComputedFromHeightCumulativeSum::forced_import(
cfg.db,
&cfg.name("coindays_destroyed"),
cfg.version,
@@ -165,8 +164,6 @@ impl ActivityMetrics {
) -> Result<()> {
let window_starts = blocks.count.window_starts();
self.sent.compute_cumulative(starting_indexes.height, exit)?;
// 14-day rolling average of sent (sats and dollars)
self.sent_14d_ema.compute_rolling_average(
starting_indexes.height,

View File

@@ -1,7 +1,7 @@
use brk_cohort::Filter;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Cents, Dollars, Height, Version};
use brk_types::{Cents, Dollars, Height, Sats, Version};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, Exit, ReadableVec, Rw, StorageMode};
@@ -25,7 +25,7 @@ pub struct AdjustedCohortMetrics<M: StorageMode = Rw> {
pub realized: Box<RealizedWithAdjusted<M>>,
pub cost_basis: Box<CostBasisBase<M>>,
pub unrealized: Box<UnrealizedWithPeakRegret<M>>,
pub relative: Box<RelativeWithPeakRegret>,
pub relative: Box<RelativeWithPeakRegret<M>>,
}
impl CohortMetricsBase for AdjustedCohortMetrics {
@@ -73,21 +73,12 @@ impl CohortMetricsBase for AdjustedCohortMetrics {
impl AdjustedCohortMetrics {
pub(crate) fn forced_import(
cfg: &ImportConfig,
all_supply: &SupplyMetrics,
up_to_1h: &RealizedBase,
) -> Result<Self> {
let supply = SupplyMetrics::forced_import(cfg)?;
let unrealized = UnrealizedWithPeakRegret::forced_import(cfg)?;
let realized = RealizedWithAdjusted::forced_import(cfg, up_to_1h)?;
let realized = RealizedWithAdjusted::forced_import(cfg)?;
let relative = RelativeWithPeakRegret::forced_import(
cfg,
&unrealized.base,
&supply,
all_supply,
&realized.base,
&unrealized.peak_regret_ext.peak_regret,
);
let relative = RelativeWithPeakRegret::forced_import(cfg)?;
Ok(Self {
filter: cfg.filter.clone(),
@@ -101,12 +92,16 @@ impl AdjustedCohortMetrics {
})
}
#[allow(clippy::too_many_arguments)]
pub(crate) fn compute_rest_part2(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &ComputeIndexes,
height_to_market_cap: &impl ReadableVec<Height, Dollars>,
up_to_1h_value_created: &impl ReadableVec<Height, Dollars>,
up_to_1h_value_destroyed: &impl ReadableVec<Height, Dollars>,
all_supply_sats: &impl ReadableVec<Height, Sats>,
exit: &Exit,
) -> Result<()> {
self.realized.compute_rest_part2(
@@ -115,8 +110,23 @@ impl AdjustedCohortMetrics {
starting_indexes,
&self.supply.total.btc.height,
height_to_market_cap,
up_to_1h_value_created,
up_to_1h_value_destroyed,
exit,
)
)?;
self.relative.compute(
starting_indexes.height,
&self.unrealized.base,
&self.realized.base,
&self.supply.total.sats.height,
height_to_market_cap,
all_supply_sats,
&self.unrealized.peak_regret_ext.peak_regret.height,
exit,
)?;
Ok(())
}
}

View File

@@ -26,23 +26,49 @@ pub struct AllCohortMetrics<M: StorageMode = Rw> {
pub realized: Box<RealizedWithExtendedAdjusted<M>>,
pub cost_basis: Box<CostBasisWithExtended<M>>,
pub unrealized: Box<UnrealizedWithPeakRegret<M>>,
pub relative: Box<RelativeForAll>,
pub relative: Box<RelativeForAll<M>>,
}
impl CohortMetricsBase for AllCohortMetrics {
fn filter(&self) -> &Filter { &self.filter }
fn supply(&self) -> &SupplyMetrics { &self.supply }
fn supply_mut(&mut self) -> &mut SupplyMetrics { &mut self.supply }
fn outputs(&self) -> &OutputsMetrics { &self.outputs }
fn outputs_mut(&mut self) -> &mut OutputsMetrics { &mut self.outputs }
fn activity(&self) -> &ActivityMetrics { &self.activity }
fn activity_mut(&mut self) -> &mut ActivityMetrics { &mut self.activity }
fn realized_base(&self) -> &RealizedBase { &self.realized }
fn realized_base_mut(&mut self) -> &mut RealizedBase { &mut self.realized }
fn unrealized_base(&self) -> &UnrealizedBase { &self.unrealized }
fn unrealized_base_mut(&mut self) -> &mut UnrealizedBase { &mut self.unrealized }
fn cost_basis_base(&self) -> &CostBasisBase { &self.cost_basis }
fn cost_basis_base_mut(&mut self) -> &mut CostBasisBase { &mut self.cost_basis }
fn filter(&self) -> &Filter {
&self.filter
}
fn supply(&self) -> &SupplyMetrics {
&self.supply
}
fn supply_mut(&mut self) -> &mut SupplyMetrics {
&mut self.supply
}
fn outputs(&self) -> &OutputsMetrics {
&self.outputs
}
fn outputs_mut(&mut self) -> &mut OutputsMetrics {
&mut self.outputs
}
fn activity(&self) -> &ActivityMetrics {
&self.activity
}
fn activity_mut(&mut self) -> &mut ActivityMetrics {
&mut self.activity
}
fn realized_base(&self) -> &RealizedBase {
&self.realized
}
fn realized_base_mut(&mut self) -> &mut RealizedBase {
&mut self.realized
}
fn unrealized_base(&self) -> &UnrealizedBase {
&self.unrealized
}
fn unrealized_base_mut(&mut self) -> &mut UnrealizedBase {
&mut self.unrealized
}
fn cost_basis_base(&self) -> &CostBasisBase {
&self.cost_basis
}
fn cost_basis_base_mut(&mut self) -> &mut CostBasisBase {
&mut self.cost_basis
}
fn validate_computed_versions(&mut self, base_version: Version) -> Result<()> {
self.supply.validate_computed_versions(base_version)?;
self.activity.validate_computed_versions(base_version)?;
@@ -50,14 +76,21 @@ impl CohortMetricsBase for AllCohortMetrics {
Ok(())
}
fn compute_then_truncate_push_unrealized_states(
&mut self, height: Height, height_price: Cents, state: &mut CohortState,
&mut self,
height: Height,
height_price: Cents,
state: &mut CohortState,
) -> Result<()> {
state.apply_pending();
self.cost_basis.truncate_push_minmax(height, state)?;
let (height_unrealized_state, _) = state.compute_unrealized_states(height_price, None);
self.unrealized.base.truncate_push(height, &height_unrealized_state)?;
self.unrealized
.base
.truncate_push(height, &height_unrealized_state)?;
let spot = height_price.to_dollars();
self.cost_basis.extended.truncate_push_percentiles(height, state, spot)?;
self.cost_basis
.extended
.truncate_push_percentiles(height, state, spot)?;
Ok(())
}
fn collect_all_vecs_mut(&mut self) -> Vec<&mut dyn AnyStoredVec> {
@@ -82,18 +115,11 @@ impl AllCohortMetrics {
pub(crate) fn forced_import_with_supply(
cfg: &ImportConfig,
supply: SupplyMetrics,
up_to_1h: &RealizedBase,
) -> Result<Self> {
let unrealized = UnrealizedWithPeakRegret::forced_import(cfg)?;
let realized = RealizedWithExtendedAdjusted::forced_import(cfg, up_to_1h)?;
let realized = RealizedWithExtendedAdjusted::forced_import(cfg)?;
let relative = RelativeForAll::forced_import(
cfg,
&unrealized.base,
&supply,
&realized.base,
&unrealized.peak_regret_ext.peak_regret,
);
let relative = RelativeForAll::forced_import(cfg)?;
Ok(Self {
filter: cfg.filter.clone(),
@@ -107,12 +133,15 @@ impl AllCohortMetrics {
})
}
#[allow(clippy::too_many_arguments)]
pub(crate) fn compute_rest_part2(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &ComputeIndexes,
height_to_market_cap: &impl ReadableVec<Height, Dollars>,
up_to_1h_value_created: &impl ReadableVec<Height, Dollars>,
up_to_1h_value_destroyed: &impl ReadableVec<Height, Dollars>,
exit: &Exit,
) -> Result<()> {
self.realized.compute_rest_part2(
@@ -121,8 +150,21 @@ impl AllCohortMetrics {
starting_indexes,
&self.supply.total.btc.height,
height_to_market_cap,
up_to_1h_value_created,
up_to_1h_value_destroyed,
exit,
)
}
)?;
self.relative.compute(
starting_indexes.height,
&self.unrealized.base,
&self.realized.base,
&self.supply.total.sats.height,
height_to_market_cap,
&self.unrealized.peak_regret_ext.peak_regret.height,
exit,
)?;
Ok(())
}
}

View File

@@ -1,15 +1,15 @@
use brk_cohort::Filter;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Cents, Dollars, Height, Version};
use brk_types::{Cents, Dollars, Height, Sats, Version};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, Exit, ReadableVec, Rw, StorageMode};
use crate::{ComputeIndexes, blocks, distribution::state::CohortState, prices};
use crate::distribution::metrics::{
ActivityMetrics, CohortMetricsBase, CostBasisBase, ImportConfig, OutputsMetrics, RealizedBase,
RelativeWithRelToAll, SupplyMetrics, UnrealizedBase,
ActivityMetrics, CohortMetricsBase, CostBasisBase, ImportConfig, OutputsMetrics,
RealizedBase, RelativeWithRelToAll, SupplyMetrics, UnrealizedBase,
};
/// Basic cohort metrics: no extensions, with relative (rel_to_all).
@@ -24,7 +24,7 @@ pub struct BasicCohortMetrics<M: StorageMode = Rw> {
pub realized: Box<RealizedBase<M>>,
pub cost_basis: Box<CostBasisBase<M>>,
pub unrealized: Box<UnrealizedBase<M>>,
pub relative: Box<RelativeWithRelToAll>,
pub relative: Box<RelativeWithRelToAll<M>>,
}
impl CohortMetricsBase for BasicCohortMetrics {
@@ -70,15 +70,12 @@ impl CohortMetricsBase for BasicCohortMetrics {
impl BasicCohortMetrics {
pub(crate) fn forced_import(
cfg: &ImportConfig,
all_supply: &SupplyMetrics,
) -> Result<Self> {
let supply = SupplyMetrics::forced_import(cfg)?;
let unrealized = UnrealizedBase::forced_import(cfg)?;
let realized = RealizedBase::forced_import(cfg)?;
let relative = RelativeWithRelToAll::forced_import(
cfg, &unrealized, &supply, all_supply, &realized,
);
let relative = RelativeWithRelToAll::forced_import(cfg)?;
Ok(Self {
filter: cfg.filter.clone(),
@@ -102,6 +99,7 @@ impl BasicCohortMetrics {
prices: &prices::Vecs,
starting_indexes: &ComputeIndexes,
height_to_market_cap: &impl ReadableVec<Height, Dollars>,
all_supply_sats: &impl ReadableVec<Height, Sats>,
exit: &Exit,
) -> Result<()> {
self.realized.compute_rest_part2_base(
@@ -111,7 +109,19 @@ impl BasicCohortMetrics {
&self.supply.total.btc.height,
height_to_market_cap,
exit,
)
)?;
self.relative.compute(
starting_indexes.height,
&self.unrealized,
&self.realized,
&self.supply.total.sats.height,
height_to_market_cap,
all_supply_sats,
exit,
)?;
Ok(())
}
pub(crate) fn compute_from_stateful(

View File

@@ -1,7 +1,7 @@
use brk_cohort::Filter;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Cents, Dollars, Height, Version};
use brk_types::{Cents, Dollars, Height, Sats, Version};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, Exit, ReadableVec, Rw, StorageMode};
@@ -25,7 +25,7 @@ pub struct ExtendedCohortMetrics<M: StorageMode = Rw> {
pub realized: Box<RealizedWithExtended<M>>,
pub cost_basis: Box<CostBasisWithExtended<M>>,
pub unrealized: Box<UnrealizedWithPeakRegret<M>>,
pub relative: Box<RelativeWithExtended>,
pub relative: Box<RelativeWithExtended<M>>,
}
impl CohortMetricsBase for ExtendedCohortMetrics {
@@ -77,20 +77,12 @@ impl CohortMetricsBase for ExtendedCohortMetrics {
impl ExtendedCohortMetrics {
pub(crate) fn forced_import(
cfg: &ImportConfig,
all_supply: &SupplyMetrics,
) -> Result<Self> {
let supply = SupplyMetrics::forced_import(cfg)?;
let unrealized = UnrealizedWithPeakRegret::forced_import(cfg)?;
let realized = RealizedWithExtended::forced_import(cfg)?;
let relative = RelativeWithExtended::forced_import(
cfg,
&unrealized.base,
&supply,
all_supply,
&realized.base,
&unrealized.peak_regret_ext.peak_regret,
);
let relative = RelativeWithExtended::forced_import(cfg)?;
Ok(Self {
filter: cfg.filter.clone(),
@@ -110,6 +102,7 @@ impl ExtendedCohortMetrics {
prices: &prices::Vecs,
starting_indexes: &ComputeIndexes,
height_to_market_cap: &impl ReadableVec<Height, Dollars>,
all_supply_sats: &impl ReadableVec<Height, Sats>,
exit: &Exit,
) -> Result<()> {
self.realized.compute_rest_part2(
@@ -119,7 +112,21 @@ impl ExtendedCohortMetrics {
&self.supply.total.btc.height,
height_to_market_cap,
exit,
)
)?;
self.relative.compute(
starting_indexes.height,
&self.unrealized.base,
&self.realized.base,
&self.supply.total.sats.height,
height_to_market_cap,
all_supply_sats,
&self.supply.total.usd.height,
&self.unrealized.peak_regret_ext.peak_regret.height,
exit,
)?;
Ok(())
}
}

View File

@@ -1,7 +1,7 @@
use brk_cohort::Filter;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Cents, Dollars, Height, Version};
use brk_types::{Cents, Dollars, Height, Sats, Version};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, Exit, ReadableVec, Rw, StorageMode};
@@ -25,7 +25,7 @@ pub struct ExtendedAdjustedCohortMetrics<M: StorageMode = Rw> {
pub realized: Box<RealizedWithExtendedAdjusted<M>>,
pub cost_basis: Box<CostBasisWithExtended<M>>,
pub unrealized: Box<UnrealizedWithPeakRegret<M>>,
pub relative: Box<RelativeWithExtended>,
pub relative: Box<RelativeWithExtended<M>>,
}
impl CohortMetricsBase for ExtendedAdjustedCohortMetrics {
@@ -76,21 +76,12 @@ impl CohortMetricsBase for ExtendedAdjustedCohortMetrics {
impl ExtendedAdjustedCohortMetrics {
pub(crate) fn forced_import(
cfg: &ImportConfig,
all_supply: &SupplyMetrics,
up_to_1h: &RealizedBase,
) -> Result<Self> {
let supply = SupplyMetrics::forced_import(cfg)?;
let unrealized = UnrealizedWithPeakRegret::forced_import(cfg)?;
let realized = RealizedWithExtendedAdjusted::forced_import(cfg, up_to_1h)?;
let realized = RealizedWithExtendedAdjusted::forced_import(cfg)?;
let relative = RelativeWithExtended::forced_import(
cfg,
&unrealized.base,
&supply,
all_supply,
&realized.base,
&unrealized.peak_regret_ext.peak_regret,
);
let relative = RelativeWithExtended::forced_import(cfg)?;
Ok(Self {
filter: cfg.filter.clone(),
@@ -104,12 +95,16 @@ impl ExtendedAdjustedCohortMetrics {
})
}
#[allow(clippy::too_many_arguments)]
pub(crate) fn compute_rest_part2(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &ComputeIndexes,
height_to_market_cap: &impl ReadableVec<Height, Dollars>,
up_to_1h_value_created: &impl ReadableVec<Height, Dollars>,
up_to_1h_value_destroyed: &impl ReadableVec<Height, Dollars>,
all_supply_sats: &impl ReadableVec<Height, Sats>,
exit: &Exit,
) -> Result<()> {
self.realized.compute_rest_part2(
@@ -118,8 +113,24 @@ impl ExtendedAdjustedCohortMetrics {
starting_indexes,
&self.supply.total.btc.height,
height_to_market_cap,
up_to_1h_value_created,
up_to_1h_value_destroyed,
exit,
)
)?;
self.relative.compute(
starting_indexes.height,
&self.unrealized.base,
&self.realized.base,
&self.supply.total.sats.height,
height_to_market_cap,
all_supply_sats,
&self.supply.total.usd.height,
&self.unrealized.peak_regret_ext.peak_regret.height,
exit,
)?;
Ok(())
}
}

View File

@@ -1,7 +1,7 @@
use brk_cohort::Filter;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Cents, Dollars, Height, Version};
use brk_types::{Cents, Dollars, Height, Sats, Version};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, Exit, ReadableVec, Rw, StorageMode};
@@ -24,7 +24,7 @@ pub struct PeakRegretCohortMetrics<M: StorageMode = Rw> {
pub realized: Box<RealizedBase<M>>,
pub cost_basis: Box<CostBasisBase<M>>,
pub unrealized: Box<UnrealizedWithPeakRegret<M>>,
pub relative: Box<RelativeWithPeakRegret>,
pub relative: Box<RelativeWithPeakRegret<M>>,
}
impl CohortMetricsBase for PeakRegretCohortMetrics {
@@ -72,20 +72,12 @@ impl CohortMetricsBase for PeakRegretCohortMetrics {
impl PeakRegretCohortMetrics {
pub(crate) fn forced_import(
cfg: &ImportConfig,
all_supply: &SupplyMetrics,
) -> Result<Self> {
let supply = SupplyMetrics::forced_import(cfg)?;
let unrealized = UnrealizedWithPeakRegret::forced_import(cfg)?;
let realized = RealizedBase::forced_import(cfg)?;
let relative = RelativeWithPeakRegret::forced_import(
cfg,
&unrealized.base,
&supply,
all_supply,
&realized,
&unrealized.peak_regret_ext.peak_regret,
);
let relative = RelativeWithPeakRegret::forced_import(cfg)?;
Ok(Self {
filter: cfg.filter.clone(),
@@ -105,6 +97,7 @@ impl PeakRegretCohortMetrics {
prices: &prices::Vecs,
starting_indexes: &ComputeIndexes,
height_to_market_cap: &impl ReadableVec<Height, Dollars>,
all_supply_sats: &impl ReadableVec<Height, Sats>,
exit: &Exit,
) -> Result<()> {
self.realized.compute_rest_part2_base(
@@ -114,7 +107,20 @@ impl PeakRegretCohortMetrics {
&self.supply.total.btc.height,
height_to_market_cap,
exit,
)
)?;
self.relative.compute(
starting_indexes.height,
&self.unrealized.base,
&self.realized,
&self.supply.total.sats.height,
height_to_market_cap,
all_supply_sats,
&self.unrealized.peak_regret_ext.peak_regret.height,
exit,
)?;
Ok(())
}
}

View File

@@ -2,7 +2,7 @@ use brk_cohort::{CohortContext, Filter};
use brk_types::Version;
use vecdb::Database;
use crate::{indexes, prices};
use crate::indexes;
/// Configuration for importing metrics.
pub struct ImportConfig<'a> {
@@ -12,7 +12,6 @@ pub struct ImportConfig<'a> {
pub context: CohortContext,
pub version: Version,
pub indexes: &'a indexes::Vecs,
pub prices: &'a prices::Vecs,
}
impl<'a> ImportConfig<'a> {

View File

@@ -6,7 +6,7 @@ use vecdb::{AnyStoredVec, AnyVec, Exit, Rw, StorageMode, WritableVec};
use crate::{
ComputeIndexes,
distribution::state::CohortState,
internal::{ComputedFromHeightLast, Price, PriceFromHeight},
internal::{ComputedFromHeightLast, Price},
};
use crate::distribution::metrics::ImportConfig;
@@ -24,13 +24,13 @@ pub struct CostBasisBase<M: StorageMode = Rw> {
impl CostBasisBase {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
Ok(Self {
min: PriceFromHeight::forced_import(
min: Price::forced_import(
cfg.db,
&cfg.name("min_cost_basis"),
cfg.version,
cfg.indexes,
)?,
max: PriceFromHeight::forced_import(
max: Price::forced_import(
cfg.db,
&cfg.name("max_cost_basis"),
cfg.version,
@@ -40,7 +40,7 @@ impl CostBasisBase {
}
pub(crate) fn min_stateful_height_len(&self) -> usize {
self.min.height.len().min(self.max.height.len())
self.min.usd.height.len().min(self.max.usd.height.len())
}
pub(crate) fn truncate_push_minmax(
@@ -48,14 +48,14 @@ impl CostBasisBase {
height: Height,
state: &CohortState,
) -> Result<()> {
self.min.height.truncate_push(
self.min.usd.height.truncate_push(
height,
state
.cost_basis_data_first_key_value()
.map(|(cents, _)| cents.into())
.unwrap_or(Dollars::NAN),
)?;
self.max.height.truncate_push(
self.max.usd.height.truncate_push(
height,
state
.cost_basis_data_last_key_value()
@@ -67,8 +67,8 @@ impl CostBasisBase {
pub(crate) fn collect_vecs_mut(&mut self) -> Vec<&mut dyn AnyStoredVec> {
vec![
&mut self.min.height as &mut dyn AnyStoredVec,
&mut self.max.height,
&mut self.min.usd.height as &mut dyn AnyStoredVec,
&mut self.max.usd.height,
]
}
@@ -78,14 +78,14 @@ impl CostBasisBase {
others: &[&Self],
exit: &Exit,
) -> Result<()> {
self.min.height.compute_min_of_others(
self.min.usd.height.compute_min_of_others(
starting_indexes.height,
&others.iter().map(|v| &v.min.height).collect::<Vec<_>>(),
&others.iter().map(|v| &v.min.usd.height).collect::<Vec<_>>(),
exit,
)?;
self.max.height.compute_max_of_others(
self.max.usd.height.compute_max_of_others(
starting_indexes.height,
&others.iter().map(|v| &v.max.height).collect::<Vec<_>>(),
&others.iter().map(|v| &v.max.usd.height).collect::<Vec<_>>(),
exit,
)?;
Ok(())

View File

@@ -100,14 +100,14 @@ impl CostBasisExtended {
.vecs
.iter_mut()
.flatten()
.map(|v| &mut v.height as &mut dyn AnyStoredVec),
.map(|v| &mut v.usd.height as &mut dyn AnyStoredVec),
);
vecs.extend(
self.invested_capital
.vecs
.iter_mut()
.flatten()
.map(|v| &mut v.height as &mut dyn AnyStoredVec),
.map(|v| &mut v.usd.height as &mut dyn AnyStoredVec),
);
vecs.push(&mut self.spot_cost_basis_percentile.height);
vecs.push(&mut self.spot_invested_capital_percentile.height);

View File

@@ -152,13 +152,24 @@ pub trait CohortMetricsBase: Send + Sync {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.supply_mut()
.compute(prices, starting_indexes.height, exit)?;
self.supply_mut()
.compute_rest_part1(blocks, starting_indexes, exit)?;
self.outputs_mut()
.compute_rest(blocks, starting_indexes, exit)?;
self.activity_mut()
.sent
.compute(prices, starting_indexes.height, exit)?;
self.activity_mut()
.compute_rest_part1(blocks, starting_indexes, exit)?;
self.realized_base_mut()
.sent_in_profit
.compute(prices, starting_indexes.height, exit)?;
self.realized_base_mut()
.sent_in_loss
.compute(prices, starting_indexes.height, exit)?;
self.realized_base_mut()
.compute_rest_part1(starting_indexes, exit)?;

View File

@@ -1,26 +1,21 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Dollars, StoredF64, Version};
use vecdb::{Exit, Ident, ReadableCloneableVec, Rw, StorageMode};
use brk_types::{Dollars, Height, StoredF64, Version};
use vecdb::{Exit, Ident, ReadableCloneableVec, ReadableVec, Rw, StorageMode};
use crate::{
ComputeIndexes, blocks,
internal::{
ComputedFromHeightLast, DollarsMinus, LazyBinaryFromHeightLast,
LazyFromHeightLast, Ratio64,
},
internal::{ComputedFromHeightLast, LazyFromHeightLast, Ratio64},
};
use crate::distribution::metrics::ImportConfig;
use super::RealizedBase;
/// Adjusted realized metrics (only for adjusted cohorts: all, sth, max_age).
#[derive(Traversable)]
pub struct RealizedAdjusted<M: StorageMode = Rw> {
// === Adjusted Value (lazy: cohort - up_to_1h) ===
pub adjusted_value_created: LazyBinaryFromHeightLast<Dollars, Dollars, Dollars>,
pub adjusted_value_destroyed: LazyBinaryFromHeightLast<Dollars, Dollars, Dollars>,
// === Adjusted Value (computed: cohort - up_to_1h) ===
pub adjusted_value_created: ComputedFromHeightLast<Dollars, M>,
pub adjusted_value_destroyed: ComputedFromHeightLast<Dollars, M>,
// === Adjusted Value Created/Destroyed Rolling Sums ===
pub adjusted_value_created_24h: ComputedFromHeightLast<Dollars, M>,
@@ -34,10 +29,10 @@ pub struct RealizedAdjusted<M: StorageMode = Rw> {
// === Adjusted SOPR (rolling window ratios) ===
pub adjusted_sopr: LazyFromHeightLast<StoredF64>,
pub adjusted_sopr_24h: LazyBinaryFromHeightLast<StoredF64, Dollars, Dollars>,
pub adjusted_sopr_7d: LazyBinaryFromHeightLast<StoredF64, Dollars, Dollars>,
pub adjusted_sopr_30d: LazyBinaryFromHeightLast<StoredF64, Dollars, Dollars>,
pub adjusted_sopr_1y: LazyBinaryFromHeightLast<StoredF64, Dollars, Dollars>,
pub adjusted_sopr_24h: ComputedFromHeightLast<StoredF64, M>,
pub adjusted_sopr_7d: ComputedFromHeightLast<StoredF64, M>,
pub adjusted_sopr_30d: ComputedFromHeightLast<StoredF64, M>,
pub adjusted_sopr_1y: ComputedFromHeightLast<StoredF64, M>,
pub adjusted_sopr_24h_7d_ema: ComputedFromHeightLast<StoredF64, M>,
pub adjusted_sopr_7d_ema: LazyFromHeightLast<StoredF64>,
pub adjusted_sopr_24h_30d_ema: ComputedFromHeightLast<StoredF64, M>,
@@ -45,35 +40,32 @@ pub struct RealizedAdjusted<M: StorageMode = Rw> {
}
impl RealizedAdjusted {
pub(crate) fn forced_import(
cfg: &ImportConfig,
base: &RealizedBase,
up_to_1h: &RealizedBase,
) -> Result<Self> {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let v1 = Version::ONE;
macro_rules! import_rolling {
($name:expr) => {
ComputedFromHeightLast::forced_import(cfg.db, &cfg.name($name), cfg.version + v1, cfg.indexes)?
ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name($name),
cfg.version + v1,
cfg.indexes,
)?
};
}
let adjusted_value_created = LazyBinaryFromHeightLast::from_both_binary_block::<
DollarsMinus, Dollars, Dollars, Dollars, Dollars,
>(
let adjusted_value_created = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("adjusted_value_created"),
cfg.version,
&base.value_created,
&up_to_1h.value_created,
);
let adjusted_value_destroyed = LazyBinaryFromHeightLast::from_both_binary_block::<
DollarsMinus, Dollars, Dollars, Dollars, Dollars,
>(
cfg.indexes,
)?;
let adjusted_value_destroyed = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("adjusted_value_destroyed"),
cfg.version,
&base.value_destroyed,
&up_to_1h.value_destroyed,
);
cfg.indexes,
)?;
let adjusted_value_created_24h = import_rolling!("adjusted_value_created_24h");
let adjusted_value_created_7d = import_rolling!("adjusted_value_created_7d");
@@ -84,31 +76,50 @@ impl RealizedAdjusted {
let adjusted_value_destroyed_30d = import_rolling!("adjusted_value_destroyed_30d");
let adjusted_value_destroyed_1y = import_rolling!("adjusted_value_destroyed_1y");
let adjusted_sopr_24h = LazyBinaryFromHeightLast::from_computed_last::<Ratio64>(
&cfg.name("adjusted_sopr_24h"), cfg.version + v1, &adjusted_value_created_24h, &adjusted_value_destroyed_24h,
);
let adjusted_sopr_7d = LazyBinaryFromHeightLast::from_computed_last::<Ratio64>(
&cfg.name("adjusted_sopr_7d"), cfg.version + v1, &adjusted_value_created_7d, &adjusted_value_destroyed_7d,
);
let adjusted_sopr_30d = LazyBinaryFromHeightLast::from_computed_last::<Ratio64>(
&cfg.name("adjusted_sopr_30d"), cfg.version + v1, &adjusted_value_created_30d, &adjusted_value_destroyed_30d,
);
let adjusted_sopr_1y = LazyBinaryFromHeightLast::from_computed_last::<Ratio64>(
&cfg.name("adjusted_sopr_1y"), cfg.version + v1, &adjusted_value_created_1y, &adjusted_value_destroyed_1y,
);
let adjusted_sopr = LazyFromHeightLast::from_binary::<Ident, Dollars, Dollars>(
&cfg.name("adjusted_sopr"), cfg.version + v1, &adjusted_sopr_24h,
let adjusted_sopr_24h = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("adjusted_sopr_24h"),
cfg.version + v1,
cfg.indexes,
)?;
let adjusted_sopr_7d = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("adjusted_sopr_7d"),
cfg.version + v1,
cfg.indexes,
)?;
let adjusted_sopr_30d = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("adjusted_sopr_30d"),
cfg.version + v1,
cfg.indexes,
)?;
let adjusted_sopr_1y = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("adjusted_sopr_1y"),
cfg.version + v1,
cfg.indexes,
)?;
let adjusted_sopr = LazyFromHeightLast::from_computed::<Ident>(
&cfg.name("adjusted_sopr"),
cfg.version + v1,
adjusted_sopr_24h.height.read_only_boxed_clone(),
&adjusted_sopr_24h,
);
let adjusted_sopr_24h_7d_ema = import_rolling!("adjusted_sopr_24h_7d_ema");
let adjusted_sopr_7d_ema = LazyFromHeightLast::from_computed::<Ident>(
&cfg.name("adjusted_sopr_7d_ema"), cfg.version + v1,
adjusted_sopr_24h_7d_ema.height.read_only_boxed_clone(), &adjusted_sopr_24h_7d_ema,
&cfg.name("adjusted_sopr_7d_ema"),
cfg.version + v1,
adjusted_sopr_24h_7d_ema.height.read_only_boxed_clone(),
&adjusted_sopr_24h_7d_ema,
);
let adjusted_sopr_24h_30d_ema = import_rolling!("adjusted_sopr_24h_30d_ema");
let adjusted_sopr_30d_ema = LazyFromHeightLast::from_computed::<Ident>(
&cfg.name("adjusted_sopr_30d_ema"), cfg.version + v1,
adjusted_sopr_24h_30d_ema.height.read_only_boxed_clone(), &adjusted_sopr_24h_30d_ema,
&cfg.name("adjusted_sopr_30d_ema"),
cfg.version + v1,
adjusted_sopr_24h_30d_ema.height.read_only_boxed_clone(),
&adjusted_sopr_24h_30d_ema,
);
Ok(RealizedAdjusted {
@@ -134,36 +145,137 @@ impl RealizedAdjusted {
})
}
#[allow(clippy::too_many_arguments)]
pub(crate) fn compute_rest_part2_adj(
&mut self,
blocks: &blocks::Vecs,
starting_indexes: &ComputeIndexes,
base_value_created: &impl ReadableVec<Height, Dollars>,
base_value_destroyed: &impl ReadableVec<Height, Dollars>,
up_to_1h_value_created: &impl ReadableVec<Height, Dollars>,
up_to_1h_value_destroyed: &impl ReadableVec<Height, Dollars>,
exit: &Exit,
) -> Result<()> {
// Compute adjusted_value_created = base.value_created - up_to_1h.value_created
self.adjusted_value_created.height.compute_subtract(
starting_indexes.height,
base_value_created,
up_to_1h_value_created,
exit,
)?;
self.adjusted_value_destroyed.height.compute_subtract(
starting_indexes.height,
base_value_destroyed,
up_to_1h_value_destroyed,
exit,
)?;
// Adjusted value created/destroyed rolling sums
self.adjusted_value_created_24h.height.compute_rolling_sum(starting_indexes.height, &blocks.count.height_24h_ago, &self.adjusted_value_created.height, exit)?;
self.adjusted_value_created_7d.height.compute_rolling_sum(starting_indexes.height, &blocks.count.height_1w_ago, &self.adjusted_value_created.height, exit)?;
self.adjusted_value_created_30d.height.compute_rolling_sum(starting_indexes.height, &blocks.count.height_1m_ago, &self.adjusted_value_created.height, exit)?;
self.adjusted_value_created_1y.height.compute_rolling_sum(starting_indexes.height, &blocks.count.height_1y_ago, &self.adjusted_value_created.height, exit)?;
self.adjusted_value_destroyed_24h.height.compute_rolling_sum(starting_indexes.height, &blocks.count.height_24h_ago, &self.adjusted_value_destroyed.height, exit)?;
self.adjusted_value_destroyed_7d.height.compute_rolling_sum(starting_indexes.height, &blocks.count.height_1w_ago, &self.adjusted_value_destroyed.height, exit)?;
self.adjusted_value_destroyed_30d.height.compute_rolling_sum(starting_indexes.height, &blocks.count.height_1m_ago, &self.adjusted_value_destroyed.height, exit)?;
self.adjusted_value_destroyed_1y.height.compute_rolling_sum(starting_indexes.height, &blocks.count.height_1y_ago, &self.adjusted_value_destroyed.height, exit)?;
// Adjusted SOPR EMAs
self.adjusted_sopr_24h_7d_ema.height.compute_rolling_average(
self.adjusted_value_created_24h.height.compute_rolling_sum(
starting_indexes.height,
&blocks.count.height_24h_ago,
&self.adjusted_value_created.height,
exit,
)?;
self.adjusted_value_created_7d.height.compute_rolling_sum(
starting_indexes.height,
&blocks.count.height_1w_ago,
&self.adjusted_sopr.height,
&self.adjusted_value_created.height,
exit,
)?;
self.adjusted_sopr_24h_30d_ema.height.compute_rolling_average(
self.adjusted_value_created_30d.height.compute_rolling_sum(
starting_indexes.height,
&blocks.count.height_1m_ago,
&self.adjusted_sopr.height,
&self.adjusted_value_created.height,
exit,
)?;
self.adjusted_value_created_1y.height.compute_rolling_sum(
starting_indexes.height,
&blocks.count.height_1y_ago,
&self.adjusted_value_created.height,
exit,
)?;
self.adjusted_value_destroyed_24h
.height
.compute_rolling_sum(
starting_indexes.height,
&blocks.count.height_24h_ago,
&self.adjusted_value_destroyed.height,
exit,
)?;
self.adjusted_value_destroyed_7d
.height
.compute_rolling_sum(
starting_indexes.height,
&blocks.count.height_1w_ago,
&self.adjusted_value_destroyed.height,
exit,
)?;
self.adjusted_value_destroyed_30d
.height
.compute_rolling_sum(
starting_indexes.height,
&blocks.count.height_1m_ago,
&self.adjusted_value_destroyed.height,
exit,
)?;
self.adjusted_value_destroyed_1y
.height
.compute_rolling_sum(
starting_indexes.height,
&blocks.count.height_1y_ago,
&self.adjusted_value_destroyed.height,
exit,
)?;
// SOPR ratios from rolling sums
self.adjusted_sopr_24h
.compute_binary::<Dollars, Dollars, Ratio64>(
starting_indexes.height,
&self.adjusted_value_created_24h.height,
&self.adjusted_value_destroyed_24h.height,
exit,
)?;
self.adjusted_sopr_7d
.compute_binary::<Dollars, Dollars, Ratio64>(
starting_indexes.height,
&self.adjusted_value_created_7d.height,
&self.adjusted_value_destroyed_7d.height,
exit,
)?;
self.adjusted_sopr_30d
.compute_binary::<Dollars, Dollars, Ratio64>(
starting_indexes.height,
&self.adjusted_value_created_30d.height,
&self.adjusted_value_destroyed_30d.height,
exit,
)?;
self.adjusted_sopr_1y
.compute_binary::<Dollars, Dollars, Ratio64>(
starting_indexes.height,
&self.adjusted_value_created_1y.height,
&self.adjusted_value_destroyed_1y.height,
exit,
)?;
// Adjusted SOPR EMAs
self.adjusted_sopr_24h_7d_ema
.height
.compute_rolling_average(
starting_indexes.height,
&blocks.count.height_1w_ago,
&self.adjusted_sopr.height,
exit,
)?;
self.adjusted_sopr_24h_30d_ema
.height
.compute_rolling_average(
starting_indexes.height,
&blocks.count.height_1m_ago,
&self.adjusted_sopr.height,
exit,
)?;
Ok(())
}

View File

@@ -1,24 +1,21 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{
Bitcoin, Cents, CentsSats, CentsSquaredSats, Dollars, Height, StoredF32, StoredF64,
Version,
Bitcoin, Cents, CentsSats, CentsSquaredSats, Dollars, Height, StoredF32, StoredF64, Version,
};
use vecdb::{
AnyStoredVec, AnyVec, BytesVec, Exit, WritableVec, Ident, ImportableVec,
ReadableCloneableVec, ReadableVec, Negate, Rw, StorageMode,
AnyStoredVec, AnyVec, BytesVec, Exit, Ident, ImportableVec, Negate, ReadableCloneableVec,
ReadableVec, Rw, StorageMode, WritableVec,
};
use crate::{
ComputeIndexes, blocks,
distribution::state::RealizedState,
internal::{
CentsUnsignedToDollars, ComputedFromHeightCum, ComputedFromHeightLast,
ComputedFromHeightRatio, DollarsPlus,
DollarsSquaredDivide, LazyBinaryFromHeightLast,
LazyBinaryPriceFromHeight, LazyComputedValueFromHeightCum, LazyFromHeightLast,
LazyPriceFromCents, PercentageDollarsF32, Price, PriceFromHeight,
Ratio64, StoredF32Identity, ValueEmaFromHeight,
CentsUnsignedToDollars, ComputedFromHeightCumulative, ComputedFromHeightLast,
ComputedFromHeightRatio, DollarsPlus, LazyComputedValueFromHeightCumulative, LazyFromHeightLast,
PercentageDollarsF32, Price, Ratio64,
StoredF32Identity, ValueEmaFromHeight,
},
prices,
};
@@ -37,12 +34,12 @@ pub struct RealizedBase<M: StorageMode = Rw> {
// === Investor Price ===
pub investor_price_cents: ComputedFromHeightLast<Cents, M>,
pub investor_price: LazyPriceFromCents,
pub investor_price: Price<LazyFromHeightLast<Dollars, Cents>>,
pub investor_price_extra: ComputedFromHeightRatio<M>,
// === Floor/Ceiling Price Bands ===
pub lower_price_band: LazyBinaryPriceFromHeight,
pub upper_price_band: LazyBinaryPriceFromHeight,
pub lower_price_band: Price<ComputedFromHeightLast<Dollars, M>>,
pub upper_price_band: Price<ComputedFromHeightLast<Dollars, M>>,
// === Raw values for aggregation ===
pub cap_raw: M::Stored<BytesVec<Height, CentsSats>>,
@@ -52,21 +49,19 @@ pub struct RealizedBase<M: StorageMode = Rw> {
pub mvrv: LazyFromHeightLast<StoredF32>,
// === Realized Profit/Loss ===
pub realized_profit: ComputedFromHeightCum<Dollars, M>,
pub realized_profit: ComputedFromHeightCumulative<Dollars, M>,
pub realized_profit_7d_ema: ComputedFromHeightLast<Dollars, M>,
pub realized_loss: ComputedFromHeightCum<Dollars, M>,
pub realized_loss: ComputedFromHeightCumulative<Dollars, M>,
pub realized_loss_7d_ema: ComputedFromHeightLast<Dollars, M>,
pub neg_realized_loss: LazyFromHeightLast<Dollars>,
pub net_realized_pnl: ComputedFromHeightCum<Dollars, M>,
pub net_realized_pnl: ComputedFromHeightCumulative<Dollars, M>,
pub net_realized_pnl_7d_ema: ComputedFromHeightLast<Dollars, M>,
pub realized_value: ComputedFromHeightLast<Dollars, M>,
// === Realized vs Realized Cap Ratios (lazy) ===
pub realized_profit_rel_to_realized_cap:
LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
pub realized_loss_rel_to_realized_cap: LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
pub net_realized_pnl_rel_to_realized_cap:
LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
// === Realized vs Realized Cap Ratios ===
pub realized_profit_rel_to_realized_cap: ComputedFromHeightLast<StoredF32, M>,
pub realized_loss_rel_to_realized_cap: ComputedFromHeightLast<StoredF32, M>,
pub net_realized_pnl_rel_to_realized_cap: ComputedFromHeightLast<StoredF32, M>,
// === Total Realized PnL ===
pub total_realized_pnl: LazyFromHeightLast<Dollars>,
@@ -77,9 +72,9 @@ pub struct RealizedBase<M: StorageMode = Rw> {
pub loss_value_created: ComputedFromHeightLast<Dollars, M>,
pub loss_value_destroyed: ComputedFromHeightLast<Dollars, M>,
// === Value Created/Destroyed Totals (lazy) ===
pub value_created: LazyBinaryFromHeightLast<Dollars, Dollars, Dollars>,
pub value_destroyed: LazyBinaryFromHeightLast<Dollars, Dollars, Dollars>,
// === Value Created/Destroyed Totals ===
pub value_created: ComputedFromHeightLast<Dollars, M>,
pub value_destroyed: ComputedFromHeightLast<Dollars, M>,
// === Capitulation/Profit Flow (lazy aliases) ===
pub capitulation_flow: LazyFromHeightLast<Dollars>,
@@ -97,10 +92,10 @@ pub struct RealizedBase<M: StorageMode = Rw> {
// === SOPR (rolling window ratios) ===
pub sopr: LazyFromHeightLast<StoredF64>,
pub sopr_24h: LazyBinaryFromHeightLast<StoredF64, Dollars, Dollars>,
pub sopr_7d: LazyBinaryFromHeightLast<StoredF64, Dollars, Dollars>,
pub sopr_30d: LazyBinaryFromHeightLast<StoredF64, Dollars, Dollars>,
pub sopr_1y: LazyBinaryFromHeightLast<StoredF64, Dollars, Dollars>,
pub sopr_24h: ComputedFromHeightLast<StoredF64, M>,
pub sopr_7d: ComputedFromHeightLast<StoredF64, M>,
pub sopr_30d: ComputedFromHeightLast<StoredF64, M>,
pub sopr_1y: ComputedFromHeightLast<StoredF64, M>,
pub sopr_24h_7d_ema: ComputedFromHeightLast<StoredF64, M>,
pub sopr_7d_ema: LazyFromHeightLast<StoredF64>,
pub sopr_24h_30d_ema: ComputedFromHeightLast<StoredF64, M>,
@@ -114,10 +109,10 @@ pub struct RealizedBase<M: StorageMode = Rw> {
// === Sell Side Risk (rolling window ratios) ===
pub sell_side_risk_ratio: LazyFromHeightLast<StoredF32>,
pub sell_side_risk_ratio_24h: LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
pub sell_side_risk_ratio_7d: LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
pub sell_side_risk_ratio_30d: LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
pub sell_side_risk_ratio_1y: LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
pub sell_side_risk_ratio_24h: ComputedFromHeightLast<StoredF32, M>,
pub sell_side_risk_ratio_7d: ComputedFromHeightLast<StoredF32, M>,
pub sell_side_risk_ratio_30d: ComputedFromHeightLast<StoredF32, M>,
pub sell_side_risk_ratio_1y: ComputedFromHeightLast<StoredF32, M>,
pub sell_side_risk_ratio_24h_7d_ema: ComputedFromHeightLast<StoredF32, M>,
pub sell_side_risk_ratio_7d_ema: LazyFromHeightLast<StoredF32>,
pub sell_side_risk_ratio_24h_30d_ema: ComputedFromHeightLast<StoredF32, M>,
@@ -125,17 +120,19 @@ pub struct RealizedBase<M: StorageMode = Rw> {
// === Net Realized PnL Deltas ===
pub net_realized_pnl_cumulative_30d_delta: ComputedFromHeightLast<Dollars, M>,
pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: ComputedFromHeightLast<StoredF32, M>,
pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: ComputedFromHeightLast<StoredF32, M>,
pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap:
ComputedFromHeightLast<StoredF32, M>,
pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap:
ComputedFromHeightLast<StoredF32, M>,
// === Peak Regret ===
pub peak_regret: ComputedFromHeightCum<Dollars, M>,
pub peak_regret_rel_to_realized_cap: LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
pub peak_regret: ComputedFromHeightCumulative<Dollars, M>,
pub peak_regret_rel_to_realized_cap: ComputedFromHeightLast<StoredF32, M>,
// === Sent in Profit/Loss ===
pub sent_in_profit: LazyComputedValueFromHeightCum<M>,
pub sent_in_profit: LazyComputedValueFromHeightCumulative<M>,
pub sent_in_profit_14d_ema: ValueEmaFromHeight<M>,
pub sent_in_loss: LazyComputedValueFromHeightCum<M>,
pub sent_in_loss: LazyComputedValueFromHeightCumulative<M>,
pub sent_in_loss_14d_ema: ValueEmaFromHeight<M>,
}
@@ -162,7 +159,7 @@ impl RealizedBase {
&realized_cap_cents,
);
let realized_profit = ComputedFromHeightCum::forced_import(
let realized_profit = ComputedFromHeightCumulative::forced_import(
cfg.db,
&cfg.name("realized_profit"),
cfg.version,
@@ -176,7 +173,7 @@ impl RealizedBase {
cfg.indexes,
)?;
let realized_loss = ComputedFromHeightCum::forced_import(
let realized_loss = ComputedFromHeightCumulative::forced_import(
cfg.db,
&cfg.name("realized_loss"),
cfg.version,
@@ -190,14 +187,14 @@ impl RealizedBase {
cfg.indexes,
)?;
let neg_realized_loss = LazyFromHeightLast::from_computed::<Negate>(
let neg_realized_loss = LazyFromHeightLast::from_height_source::<Negate>(
&cfg.name("neg_realized_loss"),
cfg.version + v1,
realized_loss.height.read_only_boxed_clone(),
&realized_loss,
cfg.indexes,
);
let net_realized_pnl = ComputedFromHeightCum::forced_import(
let net_realized_pnl = ComputedFromHeightCumulative::forced_import(
cfg.db,
&cfg.name("net_realized_pnl"),
cfg.version,
@@ -211,7 +208,7 @@ impl RealizedBase {
cfg.indexes,
)?;
let peak_regret = ComputedFromHeightCum::forced_import(
let peak_regret = ComputedFromHeightCumulative::forced_import(
cfg.db,
&cfg.name("realized_peak_regret"),
cfg.version + v2,
@@ -232,31 +229,28 @@ impl RealizedBase {
&realized_value,
);
let realized_profit_rel_to_realized_cap =
LazyBinaryFromHeightLast::from_block_last_and_lazy_block_last::<PercentageDollarsF32, _>(
&cfg.name("realized_profit_rel_to_realized_cap"),
cfg.version + v1,
&realized_profit,
&realized_cap,
);
let realized_profit_rel_to_realized_cap = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("realized_profit_rel_to_realized_cap"),
cfg.version + v1,
cfg.indexes,
)?;
let realized_loss_rel_to_realized_cap =
LazyBinaryFromHeightLast::from_block_last_and_lazy_block_last::<PercentageDollarsF32, _>(
&cfg.name("realized_loss_rel_to_realized_cap"),
cfg.version + v1,
&realized_loss,
&realized_cap,
);
let realized_loss_rel_to_realized_cap = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("realized_loss_rel_to_realized_cap"),
cfg.version + v1,
cfg.indexes,
)?;
let net_realized_pnl_rel_to_realized_cap =
LazyBinaryFromHeightLast::from_block_last_and_lazy_block_last::<PercentageDollarsF32, _>(
&cfg.name("net_realized_pnl_rel_to_realized_cap"),
cfg.version + v1,
&net_realized_pnl,
&realized_cap,
);
let net_realized_pnl_rel_to_realized_cap = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("net_realized_pnl_rel_to_realized_cap"),
cfg.version + v1,
cfg.indexes,
)?;
let realized_price = PriceFromHeight::forced_import(
let realized_price = Price::forced_import(
cfg.db,
&cfg.name("realized_price"),
cfg.version + v1,
@@ -270,7 +264,7 @@ impl RealizedBase {
cfg.indexes,
)?;
let investor_price = LazyPriceFromCents::from_computed::<CentsUnsignedToDollars>(
let investor_price = Price::from_computed::<CentsUnsignedToDollars>(
&cfg.name("investor_price"),
cfg.version,
&investor_price_cents,
@@ -279,27 +273,24 @@ impl RealizedBase {
let investor_price_extra = ComputedFromHeightRatio::forced_import_from_lazy(
cfg.db,
&cfg.name("investor_price"),
&investor_price.usd,
cfg.version,
cfg.indexes,
extended,
)?;
let lower_price_band =
LazyBinaryPriceFromHeight::from_price_and_lazy_price::<DollarsSquaredDivide>(
&cfg.name("lower_price_band"),
cfg.version,
&realized_price,
&investor_price,
);
let lower_price_band = Price::forced_import(
cfg.db,
&cfg.name("lower_price_band"),
cfg.version,
cfg.indexes,
)?;
let upper_price_band =
LazyBinaryPriceFromHeight::from_lazy_price_and_price::<DollarsSquaredDivide>(
&cfg.name("upper_price_band"),
cfg.version,
&investor_price,
&realized_price,
);
let upper_price_band = Price::forced_import(
cfg.db,
&cfg.name("upper_price_band"),
cfg.version,
cfg.indexes,
)?;
let cap_raw = BytesVec::forced_import(cfg.db, &cfg.name("cap_raw"), cfg.version)?;
let investor_cap_raw =
@@ -330,18 +321,18 @@ impl RealizedBase {
cfg.indexes,
)?;
let value_created = LazyBinaryFromHeightLast::from_computed_last::<DollarsPlus>(
let value_created = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("value_created"),
cfg.version,
&profit_value_created,
&loss_value_created,
);
let value_destroyed = LazyBinaryFromHeightLast::from_computed_last::<DollarsPlus>(
cfg.indexes,
)?;
let value_destroyed = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("value_destroyed"),
cfg.version,
&profit_value_destroyed,
&loss_value_destroyed,
);
cfg.indexes,
)?;
let capitulation_flow = LazyFromHeightLast::from_computed::<Ident>(
&cfg.name("capitulation_flow"),
@@ -359,7 +350,7 @@ impl RealizedBase {
let realized_price_extra = ComputedFromHeightRatio::forced_import(
cfg.db,
&cfg.name("realized_price"),
Some(&realized_price),
Some(&realized_price.usd),
cfg.version + v1,
cfg.indexes,
extended,
@@ -375,7 +366,12 @@ impl RealizedBase {
// === Rolling sum intermediates ===
macro_rules! import_rolling {
($name:expr) => {
ComputedFromHeightLast::forced_import(cfg.db, &cfg.name($name), cfg.version + v1, cfg.indexes)?
ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name($name),
cfg.version + v1,
cfg.indexes,
)?
};
}
@@ -393,69 +389,70 @@ impl RealizedBase {
let realized_value_30d = import_rolling!("realized_value_30d");
let realized_value_1y = import_rolling!("realized_value_1y");
// === Rolling window lazy ratios ===
let sopr_24h = LazyBinaryFromHeightLast::from_computed_last::<Ratio64>(
&cfg.name("sopr_24h"), cfg.version + v1, &value_created_24h, &value_destroyed_24h,
);
let sopr_7d = LazyBinaryFromHeightLast::from_computed_last::<Ratio64>(
&cfg.name("sopr_7d"), cfg.version + v1, &value_created_7d, &value_destroyed_7d,
);
let sopr_30d = LazyBinaryFromHeightLast::from_computed_last::<Ratio64>(
&cfg.name("sopr_30d"), cfg.version + v1, &value_created_30d, &value_destroyed_30d,
);
let sopr_1y = LazyBinaryFromHeightLast::from_computed_last::<Ratio64>(
&cfg.name("sopr_1y"), cfg.version + v1, &value_created_1y, &value_destroyed_1y,
);
let sopr = LazyFromHeightLast::from_binary::<Ident, Dollars, Dollars>(
&cfg.name("sopr"), cfg.version + v1, &sopr_24h,
// === Rolling window stored ratios ===
let sopr_24h = import_rolling!("sopr_24h");
let sopr_7d = import_rolling!("sopr_7d");
let sopr_30d = import_rolling!("sopr_30d");
let sopr_1y = import_rolling!("sopr_1y");
let sopr = LazyFromHeightLast::from_computed::<Ident>(
&cfg.name("sopr"),
cfg.version + v1,
sopr_24h.height.read_only_boxed_clone(),
&sopr_24h,
);
let sell_side_risk_ratio_24h = LazyBinaryFromHeightLast::from_block_last_and_lazy_block_last::<PercentageDollarsF32, _>(
&cfg.name("sell_side_risk_ratio_24h"), cfg.version + v1, &realized_value_24h, &realized_cap,
);
let sell_side_risk_ratio_7d = LazyBinaryFromHeightLast::from_block_last_and_lazy_block_last::<PercentageDollarsF32, _>(
&cfg.name("sell_side_risk_ratio_7d"), cfg.version + v1, &realized_value_7d, &realized_cap,
);
let sell_side_risk_ratio_30d = LazyBinaryFromHeightLast::from_block_last_and_lazy_block_last::<PercentageDollarsF32, _>(
&cfg.name("sell_side_risk_ratio_30d"), cfg.version + v1, &realized_value_30d, &realized_cap,
);
let sell_side_risk_ratio_1y = LazyBinaryFromHeightLast::from_block_last_and_lazy_block_last::<PercentageDollarsF32, _>(
&cfg.name("sell_side_risk_ratio_1y"), cfg.version + v1, &realized_value_1y, &realized_cap,
);
let sell_side_risk_ratio = LazyFromHeightLast::from_binary::<Ident, Dollars, Dollars>(
&cfg.name("sell_side_risk_ratio"), cfg.version + v1, &sell_side_risk_ratio_24h,
let sell_side_risk_ratio_24h = import_rolling!("sell_side_risk_ratio_24h");
let sell_side_risk_ratio_7d = import_rolling!("sell_side_risk_ratio_7d");
let sell_side_risk_ratio_30d = import_rolling!("sell_side_risk_ratio_30d");
let sell_side_risk_ratio_1y = import_rolling!("sell_side_risk_ratio_1y");
let sell_side_risk_ratio = LazyFromHeightLast::from_computed::<Ident>(
&cfg.name("sell_side_risk_ratio"),
cfg.version + v1,
sell_side_risk_ratio_24h.height.read_only_boxed_clone(),
&sell_side_risk_ratio_24h,
);
// === EMA imports + identity aliases ===
let sopr_24h_7d_ema = import_rolling!("sopr_24h_7d_ema");
let sopr_7d_ema = LazyFromHeightLast::from_computed::<Ident>(
&cfg.name("sopr_7d_ema"), cfg.version + v1,
sopr_24h_7d_ema.height.read_only_boxed_clone(), &sopr_24h_7d_ema,
&cfg.name("sopr_7d_ema"),
cfg.version + v1,
sopr_24h_7d_ema.height.read_only_boxed_clone(),
&sopr_24h_7d_ema,
);
let sopr_24h_30d_ema = import_rolling!("sopr_24h_30d_ema");
let sopr_30d_ema = LazyFromHeightLast::from_computed::<Ident>(
&cfg.name("sopr_30d_ema"), cfg.version + v1,
sopr_24h_30d_ema.height.read_only_boxed_clone(), &sopr_24h_30d_ema,
&cfg.name("sopr_30d_ema"),
cfg.version + v1,
sopr_24h_30d_ema.height.read_only_boxed_clone(),
&sopr_24h_30d_ema,
);
let sell_side_risk_ratio_24h_7d_ema = import_rolling!("sell_side_risk_ratio_24h_7d_ema");
let sell_side_risk_ratio_7d_ema = LazyFromHeightLast::from_computed::<Ident>(
&cfg.name("sell_side_risk_ratio_7d_ema"), cfg.version + v1,
sell_side_risk_ratio_24h_7d_ema.height.read_only_boxed_clone(), &sell_side_risk_ratio_24h_7d_ema,
&cfg.name("sell_side_risk_ratio_7d_ema"),
cfg.version + v1,
sell_side_risk_ratio_24h_7d_ema
.height
.read_only_boxed_clone(),
&sell_side_risk_ratio_24h_7d_ema,
);
let sell_side_risk_ratio_24h_30d_ema = import_rolling!("sell_side_risk_ratio_24h_30d_ema");
let sell_side_risk_ratio_30d_ema = LazyFromHeightLast::from_computed::<Ident>(
&cfg.name("sell_side_risk_ratio_30d_ema"), cfg.version + v1,
sell_side_risk_ratio_24h_30d_ema.height.read_only_boxed_clone(), &sell_side_risk_ratio_24h_30d_ema,
&cfg.name("sell_side_risk_ratio_30d_ema"),
cfg.version + v1,
sell_side_risk_ratio_24h_30d_ema
.height
.read_only_boxed_clone(),
&sell_side_risk_ratio_24h_30d_ema,
);
let peak_regret_rel_to_realized_cap =
LazyBinaryFromHeightLast::from_block_last_and_lazy_block_last::<PercentageDollarsF32, _>(
&cfg.name("peak_regret_rel_to_realized_cap"),
cfg.version + v1,
&peak_regret,
&realized_cap,
);
let peak_regret_rel_to_realized_cap = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("peak_regret_rel_to_realized_cap"),
cfg.version + v1,
cfg.indexes,
)?;
Ok(Self {
realized_cap_cents,
@@ -548,12 +545,11 @@ impl RealizedBase {
)?,
peak_regret,
peak_regret_rel_to_realized_cap,
sent_in_profit: LazyComputedValueFromHeightCum::forced_import(
sent_in_profit: LazyComputedValueFromHeightCumulative::forced_import(
cfg.db,
&cfg.name("sent_in_profit"),
cfg.version,
cfg.indexes,
cfg.prices,
)?,
sent_in_profit_14d_ema: ValueEmaFromHeight::forced_import(
cfg.db,
@@ -561,12 +557,11 @@ impl RealizedBase {
cfg.version,
cfg.indexes,
)?,
sent_in_loss: LazyComputedValueFromHeightCum::forced_import(
sent_in_loss: LazyComputedValueFromHeightCumulative::forced_import(
cfg.db,
&cfg.name("sent_in_loss"),
cfg.version,
cfg.indexes,
cfg.prices,
)?,
sent_in_loss_14d_ema: ValueEmaFromHeight::forced_import(
cfg.db,
@@ -804,9 +799,9 @@ impl RealizedBase {
exit: &Exit,
) -> Result<()> {
self.realized_profit
.compute_cumulative(starting_indexes.height, exit)?;
.compute_rest(starting_indexes.height, exit)?;
self.realized_loss
.compute_cumulative(starting_indexes.height, exit)?;
.compute_rest(starting_indexes.height, exit)?;
self.net_realized_pnl
.compute(starting_indexes.height, exit, |vec| {
@@ -827,12 +822,7 @@ impl RealizedBase {
)?;
self.peak_regret
.compute_cumulative(starting_indexes.height, exit)?;
self.sent_in_profit
.compute_cumulative(starting_indexes.height, exit)?;
self.sent_in_loss
.compute_cumulative(starting_indexes.height, exit)?;
.compute_rest(starting_indexes.height, exit)?;
Ok(())
}
@@ -848,7 +838,7 @@ impl RealizedBase {
height_to_market_cap: &impl ReadableVec<Height, Dollars>,
exit: &Exit,
) -> Result<()> {
self.realized_price.height.compute_divide(
self.realized_price.usd.height.compute_divide(
starting_indexes.height,
&self.realized_cap.height,
height_to_supply,
@@ -860,7 +850,12 @@ impl RealizedBase {
prices,
starting_indexes,
exit,
Some(&self.realized_price.height),
Some(&self.realized_price.usd.height),
)?;
self.realized_price_extra.compute_usd_bands(
starting_indexes,
&self.realized_price.usd.height,
exit,
)?;
self.investor_price_extra.compute_rest(
@@ -868,7 +863,12 @@ impl RealizedBase {
prices,
starting_indexes,
exit,
Some(&self.investor_price.height),
Some(&self.investor_price.usd.height),
)?;
self.investor_price_extra.compute_usd_bands(
starting_indexes,
&self.investor_price.usd.height,
exit,
)?;
self.realized_cap_30d_delta.height.compute_rolling_change(
@@ -878,39 +878,160 @@ impl RealizedBase {
exit,
)?;
// Compute value_created/destroyed from stored components
self.value_created
.compute_binary::<Dollars, Dollars, DollarsPlus>(
starting_indexes.height,
&self.profit_value_created.height,
&self.loss_value_created.height,
exit,
)?;
self.value_destroyed
.compute_binary::<Dollars, Dollars, DollarsPlus>(
starting_indexes.height,
&self.profit_value_destroyed.height,
&self.loss_value_destroyed.height,
exit,
)?;
// === Rolling sum intermediates ===
macro_rules! rolling_sum {
($target:expr, $window:expr, $source:expr) => {
$target.height.compute_rolling_sum(
starting_indexes.height, $window, $source, exit,
starting_indexes.height,
$window,
$source,
exit,
)?
};
}
rolling_sum!(self.value_created_24h, &blocks.count.height_24h_ago, &self.value_created.height);
rolling_sum!(self.value_created_7d, &blocks.count.height_1w_ago, &self.value_created.height);
rolling_sum!(self.value_created_30d, &blocks.count.height_1m_ago, &self.value_created.height);
rolling_sum!(self.value_created_1y, &blocks.count.height_1y_ago, &self.value_created.height);
rolling_sum!(self.value_destroyed_24h, &blocks.count.height_24h_ago, &self.value_destroyed.height);
rolling_sum!(self.value_destroyed_7d, &blocks.count.height_1w_ago, &self.value_destroyed.height);
rolling_sum!(self.value_destroyed_30d, &blocks.count.height_1m_ago, &self.value_destroyed.height);
rolling_sum!(self.value_destroyed_1y, &blocks.count.height_1y_ago, &self.value_destroyed.height);
rolling_sum!(
self.value_created_24h,
&blocks.count.height_24h_ago,
&self.value_created.height
);
rolling_sum!(
self.value_created_7d,
&blocks.count.height_1w_ago,
&self.value_created.height
);
rolling_sum!(
self.value_created_30d,
&blocks.count.height_1m_ago,
&self.value_created.height
);
rolling_sum!(
self.value_created_1y,
&blocks.count.height_1y_ago,
&self.value_created.height
);
rolling_sum!(
self.value_destroyed_24h,
&blocks.count.height_24h_ago,
&self.value_destroyed.height
);
rolling_sum!(
self.value_destroyed_7d,
&blocks.count.height_1w_ago,
&self.value_destroyed.height
);
rolling_sum!(
self.value_destroyed_30d,
&blocks.count.height_1m_ago,
&self.value_destroyed.height
);
rolling_sum!(
self.value_destroyed_1y,
&blocks.count.height_1y_ago,
&self.value_destroyed.height
);
// Realized value rolling sums
rolling_sum!(self.realized_value_24h, &blocks.count.height_24h_ago, &self.realized_value.height);
rolling_sum!(self.realized_value_7d, &blocks.count.height_1w_ago, &self.realized_value.height);
rolling_sum!(self.realized_value_30d, &blocks.count.height_1m_ago, &self.realized_value.height);
rolling_sum!(self.realized_value_1y, &blocks.count.height_1y_ago, &self.realized_value.height);
rolling_sum!(
self.realized_value_24h,
&blocks.count.height_24h_ago,
&self.realized_value.height
);
rolling_sum!(
self.realized_value_7d,
&blocks.count.height_1w_ago,
&self.realized_value.height
);
rolling_sum!(
self.realized_value_30d,
&blocks.count.height_1m_ago,
&self.realized_value.height
);
rolling_sum!(
self.realized_value_1y,
&blocks.count.height_1y_ago,
&self.realized_value.height
);
// 7d rolling averages
self.realized_profit_7d_ema
.height
.compute_rolling_average(
// Compute SOPR from rolling sums
self.sopr_24h.compute_binary::<Dollars, Dollars, Ratio64>(
starting_indexes.height,
&self.value_created_24h.height,
&self.value_destroyed_24h.height,
exit,
)?;
self.sopr_7d.compute_binary::<Dollars, Dollars, Ratio64>(
starting_indexes.height,
&self.value_created_7d.height,
&self.value_destroyed_7d.height,
exit,
)?;
self.sopr_30d.compute_binary::<Dollars, Dollars, Ratio64>(
starting_indexes.height,
&self.value_created_30d.height,
&self.value_destroyed_30d.height,
exit,
)?;
self.sopr_1y.compute_binary::<Dollars, Dollars, Ratio64>(
starting_indexes.height,
&self.value_created_1y.height,
&self.value_destroyed_1y.height,
exit,
)?;
// Compute sell-side risk ratios
self.sell_side_risk_ratio_24h
.compute_binary::<Dollars, Dollars, PercentageDollarsF32>(
starting_indexes.height,
&blocks.count.height_1w_ago,
&self.realized_profit.height,
&self.realized_value_24h.height,
&self.realized_cap.height,
exit,
)?;
self.sell_side_risk_ratio_7d
.compute_binary::<Dollars, Dollars, PercentageDollarsF32>(
starting_indexes.height,
&self.realized_value_7d.height,
&self.realized_cap.height,
exit,
)?;
self.sell_side_risk_ratio_30d
.compute_binary::<Dollars, Dollars, PercentageDollarsF32>(
starting_indexes.height,
&self.realized_value_30d.height,
&self.realized_cap.height,
exit,
)?;
self.sell_side_risk_ratio_1y
.compute_binary::<Dollars, Dollars, PercentageDollarsF32>(
starting_indexes.height,
&self.realized_value_1y.height,
&self.realized_cap.height,
exit,
)?;
// 7d rolling averages
self.realized_profit_7d_ema.height.compute_rolling_average(
starting_indexes.height,
&blocks.count.height_1w_ago,
&self.realized_profit.height,
exit,
)?;
self.realized_loss_7d_ema.height.compute_rolling_average(
starting_indexes.height,
&blocks.count.height_1w_ago,
@@ -974,6 +1095,36 @@ impl RealizedBase {
exit,
)?;
// Realized profit/loss/net relative to realized cap
self.realized_profit_rel_to_realized_cap
.compute_binary::<Dollars, Dollars, PercentageDollarsF32>(
starting_indexes.height,
&self.realized_profit.height,
&self.realized_cap.height,
exit,
)?;
self.realized_loss_rel_to_realized_cap
.compute_binary::<Dollars, Dollars, PercentageDollarsF32>(
starting_indexes.height,
&self.realized_loss.height,
&self.realized_cap.height,
exit,
)?;
self.net_realized_pnl_rel_to_realized_cap
.compute_binary::<Dollars, Dollars, PercentageDollarsF32>(
starting_indexes.height,
&self.net_realized_pnl.height,
&self.realized_cap.height,
exit,
)?;
self.peak_regret_rel_to_realized_cap
.compute_binary::<Dollars, Dollars, PercentageDollarsF32>(
starting_indexes.height,
&self.peak_regret.height,
&self.realized_cap.height,
exit,
)?;
// Net realized PnL cumulative 30d delta
self.net_realized_pnl_cumulative_30d_delta
.height

View File

@@ -6,7 +6,7 @@ use vecdb::{Exit, ReadableVec, Rw, StorageMode};
use crate::{
ComputeIndexes, blocks,
internal::{
ComputedFromHeightLast, LazyBinaryFromHeightLast, Ratio64,
ComputedFromHeightLast, Ratio64,
},
};
@@ -29,11 +29,11 @@ pub struct RealizedExtended<M: StorageMode = Rw> {
pub realized_loss_30d: ComputedFromHeightLast<Dollars, M>,
pub realized_loss_1y: ComputedFromHeightLast<Dollars, M>,
// === Realized Profit to Loss Ratio (lazy from rolling sums) ===
pub realized_profit_to_loss_ratio_24h: LazyBinaryFromHeightLast<StoredF64, Dollars, Dollars>,
pub realized_profit_to_loss_ratio_7d: LazyBinaryFromHeightLast<StoredF64, Dollars, Dollars>,
pub realized_profit_to_loss_ratio_30d: LazyBinaryFromHeightLast<StoredF64, Dollars, Dollars>,
pub realized_profit_to_loss_ratio_1y: LazyBinaryFromHeightLast<StoredF64, Dollars, Dollars>,
// === Realized Profit to Loss Ratio (from rolling sums) ===
pub realized_profit_to_loss_ratio_24h: ComputedFromHeightLast<StoredF64, M>,
pub realized_profit_to_loss_ratio_7d: ComputedFromHeightLast<StoredF64, M>,
pub realized_profit_to_loss_ratio_30d: ComputedFromHeightLast<StoredF64, M>,
pub realized_profit_to_loss_ratio_1y: ComputedFromHeightLast<StoredF64, M>,
}
impl RealizedExtended {
@@ -46,28 +46,6 @@ impl RealizedExtended {
};
}
let realized_profit_24h = import_rolling!("realized_profit_24h");
let realized_profit_7d = import_rolling!("realized_profit_7d");
let realized_profit_30d = import_rolling!("realized_profit_30d");
let realized_profit_1y = import_rolling!("realized_profit_1y");
let realized_loss_24h = import_rolling!("realized_loss_24h");
let realized_loss_7d = import_rolling!("realized_loss_7d");
let realized_loss_30d = import_rolling!("realized_loss_30d");
let realized_loss_1y = import_rolling!("realized_loss_1y");
let realized_profit_to_loss_ratio_24h = LazyBinaryFromHeightLast::from_computed_last::<Ratio64>(
&cfg.name("realized_profit_to_loss_ratio_24h"), cfg.version + v1, &realized_profit_24h, &realized_loss_24h,
);
let realized_profit_to_loss_ratio_7d = LazyBinaryFromHeightLast::from_computed_last::<Ratio64>(
&cfg.name("realized_profit_to_loss_ratio_7d"), cfg.version + v1, &realized_profit_7d, &realized_loss_7d,
);
let realized_profit_to_loss_ratio_30d = LazyBinaryFromHeightLast::from_computed_last::<Ratio64>(
&cfg.name("realized_profit_to_loss_ratio_30d"), cfg.version + v1, &realized_profit_30d, &realized_loss_30d,
);
let realized_profit_to_loss_ratio_1y = LazyBinaryFromHeightLast::from_computed_last::<Ratio64>(
&cfg.name("realized_profit_to_loss_ratio_1y"), cfg.version + v1, &realized_profit_1y, &realized_loss_1y,
);
Ok(RealizedExtended {
realized_cap_rel_to_own_market_cap: ComputedFromHeightLast::forced_import(
cfg.db,
@@ -75,18 +53,18 @@ impl RealizedExtended {
cfg.version,
cfg.indexes,
)?,
realized_profit_24h,
realized_profit_7d,
realized_profit_30d,
realized_profit_1y,
realized_loss_24h,
realized_loss_7d,
realized_loss_30d,
realized_loss_1y,
realized_profit_to_loss_ratio_24h,
realized_profit_to_loss_ratio_7d,
realized_profit_to_loss_ratio_30d,
realized_profit_to_loss_ratio_1y,
realized_profit_24h: import_rolling!("realized_profit_24h"),
realized_profit_7d: import_rolling!("realized_profit_7d"),
realized_profit_30d: import_rolling!("realized_profit_30d"),
realized_profit_1y: import_rolling!("realized_profit_1y"),
realized_loss_24h: import_rolling!("realized_loss_24h"),
realized_loss_7d: import_rolling!("realized_loss_7d"),
realized_loss_30d: import_rolling!("realized_loss_30d"),
realized_loss_1y: import_rolling!("realized_loss_1y"),
realized_profit_to_loss_ratio_24h: import_rolling!("realized_profit_to_loss_ratio_24h"),
realized_profit_to_loss_ratio_7d: import_rolling!("realized_profit_to_loss_ratio_7d"),
realized_profit_to_loss_ratio_30d: import_rolling!("realized_profit_to_loss_ratio_30d"),
realized_profit_to_loss_ratio_1y: import_rolling!("realized_profit_to_loss_ratio_1y"),
})
}
@@ -116,6 +94,20 @@ impl RealizedExtended {
exit,
)?;
// Realized profit to loss ratios
self.realized_profit_to_loss_ratio_24h.compute_binary::<Dollars, Dollars, Ratio64>(
starting_indexes.height, &self.realized_profit_24h.height, &self.realized_loss_24h.height, exit,
)?;
self.realized_profit_to_loss_ratio_7d.compute_binary::<Dollars, Dollars, Ratio64>(
starting_indexes.height, &self.realized_profit_7d.height, &self.realized_loss_7d.height, exit,
)?;
self.realized_profit_to_loss_ratio_30d.compute_binary::<Dollars, Dollars, Ratio64>(
starting_indexes.height, &self.realized_profit_30d.height, &self.realized_loss_30d.height, exit,
)?;
self.realized_profit_to_loss_ratio_1y.compute_binary::<Dollars, Dollars, Ratio64>(
starting_indexes.height, &self.realized_profit_1y.height, &self.realized_loss_1y.height, exit,
)?;
Ok(())
}
}

View File

@@ -23,9 +23,9 @@ pub struct RealizedWithAdjusted<M: StorageMode = Rw> {
}
impl RealizedWithAdjusted {
pub(crate) fn forced_import(cfg: &ImportConfig, up_to_1h: &RealizedBase) -> Result<Self> {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let base = RealizedBase::forced_import(cfg)?;
let adjusted = RealizedAdjusted::forced_import(cfg, &base, up_to_1h)?;
let adjusted = RealizedAdjusted::forced_import(cfg)?;
Ok(Self { base, adjusted })
}
@@ -37,6 +37,8 @@ impl RealizedWithAdjusted {
starting_indexes: &ComputeIndexes,
height_to_supply: &impl ReadableVec<Height, Bitcoin>,
height_to_market_cap: &impl ReadableVec<Height, Dollars>,
up_to_1h_value_created: &impl ReadableVec<Height, Dollars>,
up_to_1h_value_destroyed: &impl ReadableVec<Height, Dollars>,
exit: &Exit,
) -> Result<()> {
self.base.compute_rest_part2_base(
@@ -51,6 +53,10 @@ impl RealizedWithAdjusted {
self.adjusted.compute_rest_part2_adj(
blocks,
starting_indexes,
&self.base.value_created.height,
&self.base.value_destroyed.height,
up_to_1h_value_created,
up_to_1h_value_destroyed,
exit,
)?;

View File

@@ -25,10 +25,10 @@ pub struct RealizedWithExtendedAdjusted<M: StorageMode = Rw> {
}
impl RealizedWithExtendedAdjusted {
pub(crate) fn forced_import(cfg: &ImportConfig, up_to_1h: &RealizedBase) -> Result<Self> {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let base = RealizedBase::forced_import(cfg)?;
let extended = RealizedExtended::forced_import(cfg)?;
let adjusted = RealizedAdjusted::forced_import(cfg, &base, up_to_1h)?;
let adjusted = RealizedAdjusted::forced_import(cfg)?;
Ok(Self {
base,
extended,
@@ -44,6 +44,8 @@ impl RealizedWithExtendedAdjusted {
starting_indexes: &ComputeIndexes,
height_to_supply: &impl ReadableVec<Height, Bitcoin>,
height_to_market_cap: &impl ReadableVec<Height, Dollars>,
up_to_1h_value_created: &impl ReadableVec<Height, Dollars>,
up_to_1h_value_destroyed: &impl ReadableVec<Height, Dollars>,
exit: &Exit,
) -> Result<()> {
self.base.compute_rest_part2_base(
@@ -66,6 +68,10 @@ impl RealizedWithExtendedAdjusted {
self.adjusted.compute_rest_part2_adj(
blocks,
starting_indexes,
&self.base.value_created.height,
&self.base.value_destroyed.height,
up_to_1h_value_created,
up_to_1h_value_destroyed,
exit,
)?;

View File

@@ -1,131 +1,117 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Cents, Dollars, Sats, StoredF32, StoredF64, Version};
use brk_types::{Dollars, Height, Sats, StoredF32, StoredF64, Version};
use vecdb::{Exit, ReadableVec, Rw, StorageMode};
use crate::internal::{
LazyBinaryComputedFromHeightLast, LazyBinaryFromHeightLast, LazyFromHeightLast,
ComputedFromHeightLast,
NegPercentageDollarsF32, PercentageDollarsF32, PercentageSatsF64,
};
use crate::distribution::metrics::{ImportConfig, SupplyMetrics, UnrealizedBase};
use crate::distribution::metrics::{ImportConfig, RealizedBase, UnrealizedBase};
/// Base relative metrics (always computed when relative is enabled).
/// All fields are non-Optional - market_cap and realized_cap are always
/// available when relative metrics are enabled.
#[derive(Clone, Traversable)]
pub struct RelativeBase {
#[derive(Traversable)]
pub struct RelativeBase<M: StorageMode = Rw> {
// === Supply in Profit/Loss Relative to Own Supply ===
pub supply_in_profit_rel_to_own_supply: LazyBinaryFromHeightLast<StoredF64, Sats, Sats>,
pub supply_in_loss_rel_to_own_supply: LazyBinaryFromHeightLast<StoredF64, Sats, Sats>,
pub supply_in_profit_rel_to_own_supply: ComputedFromHeightLast<StoredF64, M>,
pub supply_in_loss_rel_to_own_supply: ComputedFromHeightLast<StoredF64, M>,
// === Unrealized vs Market Cap ===
pub unrealized_profit_rel_to_market_cap: LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
pub unrealized_loss_rel_to_market_cap: LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
pub neg_unrealized_loss_rel_to_market_cap:
LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
pub net_unrealized_pnl_rel_to_market_cap:
LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
pub nupl: LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
pub unrealized_profit_rel_to_market_cap: ComputedFromHeightLast<StoredF32, M>,
pub unrealized_loss_rel_to_market_cap: ComputedFromHeightLast<StoredF32, M>,
pub neg_unrealized_loss_rel_to_market_cap: ComputedFromHeightLast<StoredF32, M>,
pub net_unrealized_pnl_rel_to_market_cap: ComputedFromHeightLast<StoredF32, M>,
pub nupl: ComputedFromHeightLast<StoredF32, M>,
// === Invested Capital in Profit/Loss as % of Realized Cap ===
pub invested_capital_in_profit_pct: LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
pub invested_capital_in_loss_pct: LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
pub invested_capital_in_profit_pct: ComputedFromHeightLast<StoredF32, M>,
pub invested_capital_in_loss_pct: ComputedFromHeightLast<StoredF32, M>,
}
impl RelativeBase {
/// Import base relative metrics.
///
/// `market_cap` is either `all_supply.total.usd` (for non-"all" cohorts)
/// or `supply.total.usd` (for the "all" cohort itself).
pub(crate) fn forced_import(
cfg: &ImportConfig,
unrealized: &UnrealizedBase,
supply: &SupplyMetrics,
market_cap: &LazyBinaryComputedFromHeightLast<Dollars, Sats, Dollars>,
realized_cap: &LazyFromHeightLast<Dollars, Cents>,
) -> Self {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let v1 = Version::ONE;
let v2 = Version::new(2);
Self {
supply_in_profit_rel_to_own_supply:
LazyBinaryFromHeightLast::from_computed_last::<PercentageSatsF64>(
&cfg.name("supply_in_profit_rel_to_own_supply"),
cfg.version + v1,
&unrealized.supply_in_profit.sats,
&supply.total.sats,
),
supply_in_loss_rel_to_own_supply:
LazyBinaryFromHeightLast::from_computed_last::<PercentageSatsF64>(
&cfg.name("supply_in_loss_rel_to_own_supply"),
cfg.version + v1,
&unrealized.supply_in_loss.sats,
&supply.total.sats,
),
Ok(Self {
supply_in_profit_rel_to_own_supply: ComputedFromHeightLast::forced_import(
cfg.db, &cfg.name("supply_in_profit_rel_to_own_supply"), cfg.version + v1, cfg.indexes,
)?,
supply_in_loss_rel_to_own_supply: ComputedFromHeightLast::forced_import(
cfg.db, &cfg.name("supply_in_loss_rel_to_own_supply"), cfg.version + v1, cfg.indexes,
)?,
unrealized_profit_rel_to_market_cap: ComputedFromHeightLast::forced_import(
cfg.db, &cfg.name("unrealized_profit_rel_to_market_cap"), cfg.version + v2, cfg.indexes,
)?,
unrealized_loss_rel_to_market_cap: ComputedFromHeightLast::forced_import(
cfg.db, &cfg.name("unrealized_loss_rel_to_market_cap"), cfg.version + v2, cfg.indexes,
)?,
neg_unrealized_loss_rel_to_market_cap: ComputedFromHeightLast::forced_import(
cfg.db, &cfg.name("neg_unrealized_loss_rel_to_market_cap"), cfg.version + v2, cfg.indexes,
)?,
net_unrealized_pnl_rel_to_market_cap: ComputedFromHeightLast::forced_import(
cfg.db, &cfg.name("net_unrealized_pnl_rel_to_market_cap"), cfg.version + v2, cfg.indexes,
)?,
nupl: ComputedFromHeightLast::forced_import(
cfg.db, &cfg.name("nupl"), cfg.version + v2, cfg.indexes,
)?,
invested_capital_in_profit_pct: ComputedFromHeightLast::forced_import(
cfg.db, &cfg.name("invested_capital_in_profit_pct"), cfg.version, cfg.indexes,
)?,
invested_capital_in_loss_pct: ComputedFromHeightLast::forced_import(
cfg.db, &cfg.name("invested_capital_in_loss_pct"), cfg.version, cfg.indexes,
)?,
})
}
unrealized_profit_rel_to_market_cap:
LazyBinaryFromHeightLast::from_block_last_and_lazy_binary_computed_block_last::<
PercentageDollarsF32, _, _,
>(
&cfg.name("unrealized_profit_rel_to_market_cap"),
cfg.version + v2,
&unrealized.unrealized_profit,
market_cap,
),
unrealized_loss_rel_to_market_cap:
LazyBinaryFromHeightLast::from_block_last_and_lazy_binary_computed_block_last::<
PercentageDollarsF32, _, _,
>(
&cfg.name("unrealized_loss_rel_to_market_cap"),
cfg.version + v2,
&unrealized.unrealized_loss,
market_cap,
),
neg_unrealized_loss_rel_to_market_cap:
LazyBinaryFromHeightLast::from_block_last_and_lazy_binary_computed_block_last::<
NegPercentageDollarsF32, _, _,
>(
&cfg.name("neg_unrealized_loss_rel_to_market_cap"),
cfg.version + v2,
&unrealized.unrealized_loss,
market_cap,
),
net_unrealized_pnl_rel_to_market_cap:
LazyBinaryFromHeightLast::from_binary_block_and_lazy_binary_block_last::<
PercentageDollarsF32, _, _, _, _,
>(
&cfg.name("net_unrealized_pnl_rel_to_market_cap"),
cfg.version + v2,
&unrealized.net_unrealized_pnl,
market_cap,
),
nupl:
LazyBinaryFromHeightLast::from_binary_block_and_lazy_binary_block_last::<
PercentageDollarsF32, _, _, _, _,
>(
&cfg.name("nupl"),
cfg.version + v2,
&unrealized.net_unrealized_pnl,
market_cap,
),
invested_capital_in_profit_pct:
LazyBinaryFromHeightLast::from_block_last_and_lazy_block_last::<
PercentageDollarsF32, _,
>(
&cfg.name("invested_capital_in_profit_pct"),
cfg.version,
&unrealized.invested_capital_in_profit,
realized_cap,
),
invested_capital_in_loss_pct:
LazyBinaryFromHeightLast::from_block_last_and_lazy_block_last::<
PercentageDollarsF32, _,
>(
&cfg.name("invested_capital_in_loss_pct"),
cfg.version,
&unrealized.invested_capital_in_loss,
realized_cap,
),
}
pub(crate) fn compute(
&mut self,
max_from: Height,
unrealized: &UnrealizedBase,
realized: &RealizedBase,
supply_total_sats: &impl ReadableVec<Height, Sats>,
market_cap: &impl ReadableVec<Height, Dollars>,
exit: &Exit,
) -> Result<()> {
self.supply_in_profit_rel_to_own_supply
.compute_binary::<Sats, Sats, PercentageSatsF64>(
max_from, &unrealized.supply_in_profit.sats.height, supply_total_sats, exit,
)?;
self.supply_in_loss_rel_to_own_supply
.compute_binary::<Sats, Sats, PercentageSatsF64>(
max_from, &unrealized.supply_in_loss.sats.height, supply_total_sats, exit,
)?;
self.unrealized_profit_rel_to_market_cap
.compute_binary::<Dollars, Dollars, PercentageDollarsF32>(
max_from, &unrealized.unrealized_profit.height, market_cap, exit,
)?;
self.unrealized_loss_rel_to_market_cap
.compute_binary::<Dollars, Dollars, PercentageDollarsF32>(
max_from, &unrealized.unrealized_loss.height, market_cap, exit,
)?;
self.neg_unrealized_loss_rel_to_market_cap
.compute_binary::<Dollars, Dollars, NegPercentageDollarsF32>(
max_from, &unrealized.unrealized_loss.height, market_cap, exit,
)?;
self.net_unrealized_pnl_rel_to_market_cap
.compute_binary::<Dollars, Dollars, PercentageDollarsF32>(
max_from, &unrealized.net_unrealized_pnl.height, market_cap, exit,
)?;
self.nupl
.compute_binary::<Dollars, Dollars, PercentageDollarsF32>(
max_from, &unrealized.net_unrealized_pnl.height, market_cap, exit,
)?;
self.invested_capital_in_profit_pct
.compute_binary::<Dollars, Dollars, PercentageDollarsF32>(
max_from, &unrealized.invested_capital_in_profit.height, &realized.realized_cap.height, exit,
)?;
self.invested_capital_in_loss_pct
.compute_binary::<Dollars, Dollars, PercentageDollarsF32>(
max_from, &unrealized.invested_capital_in_loss.height, &realized.realized_cap.height, exit,
)?;
Ok(())
}
}

View File

@@ -1,71 +1,88 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Dollars, Sats, StoredF32, Version};
use brk_types::{Dollars, Height, StoredF32};
use vecdb::{Exit, ReadableVec, Rw, StorageMode};
use crate::internal::{
LazyBinaryComputedFromHeightLast, LazyBinaryFromHeightLast,
NegPercentageDollarsF32, PercentageDollarsF32,
ComputedFromHeightLast, NegPercentageDollarsF32, PercentageDollarsF32,
};
use crate::distribution::metrics::{ImportConfig, UnrealizedBase};
/// Extended relative metrics for own market cap (extended && rel_to_all).
#[derive(Clone, Traversable)]
pub struct RelativeExtendedOwnMarketCap {
#[derive(Traversable)]
pub struct RelativeExtendedOwnMarketCap<M: StorageMode = Rw> {
pub unrealized_profit_rel_to_own_market_cap:
LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
ComputedFromHeightLast<StoredF32, M>,
pub unrealized_loss_rel_to_own_market_cap:
LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
ComputedFromHeightLast<StoredF32, M>,
pub neg_unrealized_loss_rel_to_own_market_cap:
LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
ComputedFromHeightLast<StoredF32, M>,
pub net_unrealized_pnl_rel_to_own_market_cap:
LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
ComputedFromHeightLast<StoredF32, M>,
}
impl RelativeExtendedOwnMarketCap {
pub(crate) fn forced_import(
cfg: &ImportConfig,
unrealized: &UnrealizedBase,
own_market_cap: &LazyBinaryComputedFromHeightLast<Dollars, Sats, Dollars>,
) -> Self {
let v2 = Version::new(2);
) -> Result<Self> {
let v2 = brk_types::Version::new(2);
Self {
Ok(Self {
unrealized_profit_rel_to_own_market_cap:
LazyBinaryFromHeightLast::from_block_last_and_lazy_binary_computed_block_last::<
PercentageDollarsF32, _, _,
>(
ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("unrealized_profit_rel_to_own_market_cap"),
cfg.version + v2,
&unrealized.unrealized_profit,
own_market_cap,
),
cfg.indexes,
)?,
unrealized_loss_rel_to_own_market_cap:
LazyBinaryFromHeightLast::from_block_last_and_lazy_binary_computed_block_last::<
PercentageDollarsF32, _, _,
>(
ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("unrealized_loss_rel_to_own_market_cap"),
cfg.version + v2,
&unrealized.unrealized_loss,
own_market_cap,
),
cfg.indexes,
)?,
neg_unrealized_loss_rel_to_own_market_cap:
LazyBinaryFromHeightLast::from_block_last_and_lazy_binary_computed_block_last::<
NegPercentageDollarsF32, _, _,
>(
ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("neg_unrealized_loss_rel_to_own_market_cap"),
cfg.version + v2,
&unrealized.unrealized_loss,
own_market_cap,
),
cfg.indexes,
)?,
net_unrealized_pnl_rel_to_own_market_cap:
LazyBinaryFromHeightLast::from_binary_block_and_lazy_binary_block_last::<
PercentageDollarsF32, _, _, _, _,
>(
ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("net_unrealized_pnl_rel_to_own_market_cap"),
cfg.version + v2,
&unrealized.net_unrealized_pnl,
own_market_cap,
),
}
cfg.indexes,
)?,
})
}
pub(crate) fn compute(
&mut self,
max_from: Height,
unrealized: &UnrealizedBase,
own_market_cap: &impl ReadableVec<Height, Dollars>,
exit: &Exit,
) -> Result<()> {
self.unrealized_profit_rel_to_own_market_cap
.compute_binary::<Dollars, Dollars, PercentageDollarsF32>(
max_from, &unrealized.unrealized_profit.height, own_market_cap, exit,
)?;
self.unrealized_loss_rel_to_own_market_cap
.compute_binary::<Dollars, Dollars, PercentageDollarsF32>(
max_from, &unrealized.unrealized_loss.height, own_market_cap, exit,
)?;
self.neg_unrealized_loss_rel_to_own_market_cap
.compute_binary::<Dollars, Dollars, NegPercentageDollarsF32>(
max_from, &unrealized.unrealized_loss.height, own_market_cap, exit,
)?;
self.net_unrealized_pnl_rel_to_own_market_cap
.compute_binary::<Dollars, Dollars, PercentageDollarsF32>(
max_from, &unrealized.net_unrealized_pnl.height, own_market_cap, exit,
)?;
Ok(())
}
}

View File

@@ -1,62 +1,88 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Dollars, StoredF32, Version};
use brk_types::{Dollars, Height, StoredF32};
use vecdb::{Exit, Rw, StorageMode};
use crate::internal::{
LazyBinaryFromHeightLast, NegPercentageDollarsF32, PercentageDollarsF32,
ComputedFromHeightLast, NegPercentageDollarsF32, PercentageDollarsF32,
};
use crate::distribution::metrics::{ImportConfig, UnrealizedBase};
/// Extended relative metrics for own total unrealized PnL (extended only).
#[derive(Clone, Traversable)]
pub struct RelativeExtendedOwnPnl {
#[derive(Traversable)]
pub struct RelativeExtendedOwnPnl<M: StorageMode = Rw> {
pub unrealized_profit_rel_to_own_total_unrealized_pnl:
LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
ComputedFromHeightLast<StoredF32, M>,
pub unrealized_loss_rel_to_own_total_unrealized_pnl:
LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
ComputedFromHeightLast<StoredF32, M>,
pub neg_unrealized_loss_rel_to_own_total_unrealized_pnl:
LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
ComputedFromHeightLast<StoredF32, M>,
pub net_unrealized_pnl_rel_to_own_total_unrealized_pnl:
LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
ComputedFromHeightLast<StoredF32, M>,
}
impl RelativeExtendedOwnPnl {
pub(crate) fn forced_import(
cfg: &ImportConfig,
unrealized: &UnrealizedBase,
) -> Self {
let v1 = Version::ONE;
let v2 = Version::new(2);
) -> Result<Self> {
let v1 = brk_types::Version::ONE;
let v2 = brk_types::Version::new(2);
Self {
Ok(Self {
unrealized_profit_rel_to_own_total_unrealized_pnl:
LazyBinaryFromHeightLast::from_block_last_and_binary_block::<PercentageDollarsF32, _, _>(
ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("unrealized_profit_rel_to_own_total_unrealized_pnl"),
cfg.version + v1,
&unrealized.unrealized_profit,
&unrealized.total_unrealized_pnl,
),
cfg.indexes,
)?,
unrealized_loss_rel_to_own_total_unrealized_pnl:
LazyBinaryFromHeightLast::from_block_last_and_binary_block::<PercentageDollarsF32, _, _>(
ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("unrealized_loss_rel_to_own_total_unrealized_pnl"),
cfg.version + v1,
&unrealized.unrealized_loss,
&unrealized.total_unrealized_pnl,
),
cfg.indexes,
)?,
neg_unrealized_loss_rel_to_own_total_unrealized_pnl:
LazyBinaryFromHeightLast::from_block_last_and_binary_block::<NegPercentageDollarsF32, _, _>(
ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("neg_unrealized_loss_rel_to_own_total_unrealized_pnl"),
cfg.version + v1,
&unrealized.unrealized_loss,
&unrealized.total_unrealized_pnl,
),
cfg.indexes,
)?,
net_unrealized_pnl_rel_to_own_total_unrealized_pnl:
LazyBinaryFromHeightLast::from_both_binary_block::<PercentageDollarsF32, _, _, _, _>(
ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("net_unrealized_pnl_rel_to_own_total_unrealized_pnl"),
cfg.version + v2,
&unrealized.net_unrealized_pnl,
&unrealized.total_unrealized_pnl,
),
}
cfg.indexes,
)?,
})
}
pub(crate) fn compute(
&mut self,
max_from: Height,
unrealized: &UnrealizedBase,
exit: &Exit,
) -> Result<()> {
self.unrealized_profit_rel_to_own_total_unrealized_pnl
.compute_binary::<Dollars, Dollars, PercentageDollarsF32>(
max_from, &unrealized.unrealized_profit.height, &unrealized.total_unrealized_pnl.height, exit,
)?;
self.unrealized_loss_rel_to_own_total_unrealized_pnl
.compute_binary::<Dollars, Dollars, PercentageDollarsF32>(
max_from, &unrealized.unrealized_loss.height, &unrealized.total_unrealized_pnl.height, exit,
)?;
self.neg_unrealized_loss_rel_to_own_total_unrealized_pnl
.compute_binary::<Dollars, Dollars, NegPercentageDollarsF32>(
max_from, &unrealized.unrealized_loss.height, &unrealized.total_unrealized_pnl.height, exit,
)?;
self.net_unrealized_pnl_rel_to_own_total_unrealized_pnl
.compute_binary::<Dollars, Dollars, PercentageDollarsF32>(
max_from, &unrealized.net_unrealized_pnl.height, &unrealized.total_unrealized_pnl.height, exit,
)?;
Ok(())
}
}

View File

@@ -1,43 +1,58 @@
use brk_types::Dollars;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Dollars, Height, Sats};
use derive_more::{Deref, DerefMut};
use vecdb::{Exit, ReadableVec, Rw, StorageMode};
use crate::internal::ComputedFromHeightLast;
use crate::distribution::metrics::{ImportConfig, RealizedBase, SupplyMetrics, UnrealizedBase};
use crate::distribution::metrics::{ImportConfig, RealizedBase, UnrealizedBase};
use super::{RelativeBase, RelativeExtendedOwnPnl, RelativePeakRegret};
/// Relative metrics for the "all" cohort (base + own_pnl + peak_regret, NO rel_to_all).
#[derive(Clone, Deref, DerefMut, Traversable)]
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct RelativeForAll {
pub struct RelativeForAll<M: StorageMode = Rw> {
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub base: RelativeBase,
pub base: RelativeBase<M>,
#[traversable(flatten)]
pub extended_own_pnl: RelativeExtendedOwnPnl,
pub extended_own_pnl: RelativeExtendedOwnPnl<M>,
#[traversable(flatten)]
pub peak_regret: RelativePeakRegret,
pub peak_regret: RelativePeakRegret<M>,
}
impl RelativeForAll {
pub(crate) fn forced_import(
cfg: &ImportConfig,
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
Ok(Self {
base: RelativeBase::forced_import(cfg)?,
extended_own_pnl: RelativeExtendedOwnPnl::forced_import(cfg)?,
peak_regret: RelativePeakRegret::forced_import(cfg)?,
})
}
#[allow(clippy::too_many_arguments)]
pub(crate) fn compute(
&mut self,
max_from: Height,
unrealized: &UnrealizedBase,
supply: &SupplyMetrics,
realized_base: &RealizedBase,
peak_regret: &ComputedFromHeightLast<Dollars>,
) -> Self {
// For the "all" cohort, market_cap = own market cap
let market_cap = &supply.total.usd;
Self {
base: RelativeBase::forced_import(
cfg, unrealized, supply, market_cap, &realized_base.realized_cap,
),
extended_own_pnl: RelativeExtendedOwnPnl::forced_import(cfg, unrealized),
peak_regret: RelativePeakRegret::forced_import(cfg, peak_regret, market_cap),
}
realized: &RealizedBase,
supply_total_sats: &impl ReadableVec<Height, Sats>,
market_cap: &impl ReadableVec<Height, Dollars>,
peak_regret_val: &impl ReadableVec<Height, Dollars>,
exit: &Exit,
) -> Result<()> {
self.base.compute(
max_from,
unrealized,
realized,
supply_total_sats,
market_cap,
exit,
)?;
self.extended_own_pnl.compute(max_from, unrealized, exit)?;
self.peak_regret
.compute(max_from, peak_regret_val, market_cap, exit)?;
Ok(())
}
}

View File

@@ -1,36 +1,46 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Dollars, Sats, StoredF32};
use brk_types::{Dollars, Height, StoredF32};
use vecdb::{Exit, ReadableVec, Rw, StorageMode};
use crate::internal::{
ComputedFromHeightLast, LazyBinaryComputedFromHeightLast, LazyBinaryFromHeightLast,
PercentageDollarsF32,
ComputedFromHeightLast, PercentageDollarsF32,
};
use crate::distribution::metrics::ImportConfig;
/// Peak regret relative metric.
#[derive(Clone, Traversable)]
pub struct RelativePeakRegret {
#[derive(Traversable)]
pub struct RelativePeakRegret<M: StorageMode = Rw> {
pub unrealized_peak_regret_rel_to_market_cap:
LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
ComputedFromHeightLast<StoredF32, M>,
}
impl RelativePeakRegret {
pub(crate) fn forced_import(
cfg: &ImportConfig,
peak_regret: &ComputedFromHeightLast<Dollars>,
market_cap: &LazyBinaryComputedFromHeightLast<Dollars, Sats, Dollars>,
) -> Self {
Self {
) -> Result<Self> {
Ok(Self {
unrealized_peak_regret_rel_to_market_cap:
LazyBinaryFromHeightLast::from_block_last_and_lazy_binary_computed_block_last::<
PercentageDollarsF32, _, _,
>(
ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("unrealized_peak_regret_rel_to_market_cap"),
cfg.version,
peak_regret,
market_cap,
),
}
cfg.indexes,
)?,
})
}
pub(crate) fn compute(
&mut self,
max_from: Height,
peak_regret: &impl ReadableVec<Height, Dollars>,
market_cap: &impl ReadableVec<Height, Dollars>,
exit: &Exit,
) -> Result<()> {
self.unrealized_peak_regret_rel_to_market_cap
.compute_binary::<Dollars, Dollars, PercentageDollarsF32>(
max_from, peak_regret, market_cap, exit,
)
}
}

View File

@@ -1,53 +1,72 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Sats, StoredF64, Version};
use brk_types::{Height, Sats, StoredF64};
use vecdb::{Exit, ReadableVec, Rw, StorageMode};
use crate::internal::{LazyBinaryFromHeightLast, PercentageSatsF64};
use crate::internal::{ComputedFromHeightLast, PercentageSatsF64};
use crate::distribution::metrics::{ImportConfig, SupplyMetrics, UnrealizedBase};
use crate::distribution::metrics::{ImportConfig, UnrealizedBase};
/// Relative-to-all metrics (not present for the "all" cohort itself).
#[derive(Clone, Traversable)]
pub struct RelativeToAll {
#[derive(Traversable)]
pub struct RelativeToAll<M: StorageMode = Rw> {
pub supply_rel_to_circulating_supply:
LazyBinaryFromHeightLast<StoredF64, Sats, Sats>,
ComputedFromHeightLast<StoredF64, M>,
pub supply_in_profit_rel_to_circulating_supply:
LazyBinaryFromHeightLast<StoredF64, Sats, Sats>,
ComputedFromHeightLast<StoredF64, M>,
pub supply_in_loss_rel_to_circulating_supply:
LazyBinaryFromHeightLast<StoredF64, Sats, Sats>,
ComputedFromHeightLast<StoredF64, M>,
}
impl RelativeToAll {
pub(crate) fn forced_import(
cfg: &ImportConfig,
unrealized: &UnrealizedBase,
supply: &SupplyMetrics,
all_supply: &SupplyMetrics,
) -> Self {
let v1 = Version::ONE;
let gs = &all_supply.total.sats;
Self {
) -> Result<Self> {
Ok(Self {
supply_rel_to_circulating_supply:
LazyBinaryFromHeightLast::from_computed_last::<PercentageSatsF64>(
ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("supply_rel_to_circulating_supply"),
cfg.version + v1,
&supply.total.sats,
gs,
),
cfg.version + brk_types::Version::ONE,
cfg.indexes,
)?,
supply_in_profit_rel_to_circulating_supply:
LazyBinaryFromHeightLast::from_computed_last::<PercentageSatsF64>(
ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("supply_in_profit_rel_to_circulating_supply"),
cfg.version + v1,
&unrealized.supply_in_profit.sats,
gs,
),
cfg.version + brk_types::Version::ONE,
cfg.indexes,
)?,
supply_in_loss_rel_to_circulating_supply:
LazyBinaryFromHeightLast::from_computed_last::<PercentageSatsF64>(
ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("supply_in_loss_rel_to_circulating_supply"),
cfg.version + v1,
&unrealized.supply_in_loss.sats,
gs,
),
}
cfg.version + brk_types::Version::ONE,
cfg.indexes,
)?,
})
}
pub(crate) fn compute(
&mut self,
max_from: Height,
unrealized: &UnrealizedBase,
supply_total_sats: &impl ReadableVec<Height, Sats>,
all_supply_sats: &impl ReadableVec<Height, Sats>,
exit: &Exit,
) -> Result<()> {
self.supply_rel_to_circulating_supply
.compute_binary::<Sats, Sats, PercentageSatsF64>(
max_from, supply_total_sats, all_supply_sats, exit,
)?;
self.supply_in_profit_rel_to_circulating_supply
.compute_binary::<Sats, Sats, PercentageSatsF64>(
max_from, &unrealized.supply_in_profit.sats.height, all_supply_sats, exit,
)?;
self.supply_in_loss_rel_to_circulating_supply
.compute_binary::<Sats, Sats, PercentageSatsF64>(
max_from, &unrealized.supply_in_loss.sats.height, all_supply_sats, exit,
)?;
Ok(())
}
}

View File

@@ -1,10 +1,10 @@
use brk_types::Dollars;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Dollars, Height, Sats};
use derive_more::{Deref, DerefMut};
use vecdb::{Exit, ReadableVec, Rw, StorageMode};
use crate::internal::ComputedFromHeightLast;
use crate::distribution::metrics::{ImportConfig, RealizedBase, SupplyMetrics, UnrealizedBase};
use crate::distribution::metrics::{ImportConfig, RealizedBase, UnrealizedBase};
use super::{
RelativeBase, RelativeExtendedOwnMarketCap, RelativeExtendedOwnPnl,
@@ -13,44 +13,52 @@ use super::{
/// Full extended relative metrics (base + rel_to_all + own_market_cap + own_pnl + peak_regret).
/// Used by: sth, lth, age_range cohorts.
#[derive(Clone, Deref, DerefMut, Traversable)]
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct RelativeWithExtended {
pub struct RelativeWithExtended<M: StorageMode = Rw> {
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub base: RelativeBase,
pub base: RelativeBase<M>,
#[traversable(flatten)]
pub rel_to_all: RelativeToAll,
pub rel_to_all: RelativeToAll<M>,
#[traversable(flatten)]
pub extended_own_market_cap: RelativeExtendedOwnMarketCap,
pub extended_own_market_cap: RelativeExtendedOwnMarketCap<M>,
#[traversable(flatten)]
pub extended_own_pnl: RelativeExtendedOwnPnl,
pub extended_own_pnl: RelativeExtendedOwnPnl<M>,
#[traversable(flatten)]
pub peak_regret: RelativePeakRegret,
pub peak_regret: RelativePeakRegret<M>,
}
impl RelativeWithExtended {
pub(crate) fn forced_import(
cfg: &ImportConfig,
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
Ok(Self {
base: RelativeBase::forced_import(cfg)?,
rel_to_all: RelativeToAll::forced_import(cfg)?,
extended_own_market_cap: RelativeExtendedOwnMarketCap::forced_import(cfg)?,
extended_own_pnl: RelativeExtendedOwnPnl::forced_import(cfg)?,
peak_regret: RelativePeakRegret::forced_import(cfg)?,
})
}
#[allow(clippy::too_many_arguments)]
pub(crate) fn compute(
&mut self,
max_from: Height,
unrealized: &UnrealizedBase,
supply: &SupplyMetrics,
all_supply: &SupplyMetrics,
realized_base: &RealizedBase,
peak_regret: &ComputedFromHeightLast<Dollars>,
) -> Self {
let market_cap = &all_supply.total.usd;
let own_market_cap = &supply.total.usd;
Self {
base: RelativeBase::forced_import(
cfg, unrealized, supply, market_cap, &realized_base.realized_cap,
),
rel_to_all: RelativeToAll::forced_import(cfg, unrealized, supply, all_supply),
extended_own_market_cap: RelativeExtendedOwnMarketCap::forced_import(
cfg, unrealized, own_market_cap,
),
extended_own_pnl: RelativeExtendedOwnPnl::forced_import(cfg, unrealized),
peak_regret: RelativePeakRegret::forced_import(cfg, peak_regret, market_cap),
}
realized: &RealizedBase,
supply_total_sats: &impl ReadableVec<Height, Sats>,
market_cap: &impl ReadableVec<Height, Dollars>,
all_supply_sats: &impl ReadableVec<Height, Sats>,
own_market_cap: &impl ReadableVec<Height, Dollars>,
peak_regret_val: &impl ReadableVec<Height, Dollars>,
exit: &Exit,
) -> Result<()> {
self.base.compute(max_from, unrealized, realized, supply_total_sats, market_cap, exit)?;
self.rel_to_all.compute(max_from, unrealized, supply_total_sats, all_supply_sats, exit)?;
self.extended_own_market_cap.compute(max_from, unrealized, own_market_cap, exit)?;
self.extended_own_pnl.compute(max_from, unrealized, exit)?;
self.peak_regret.compute(max_from, peak_regret_val, market_cap, exit)?;
Ok(())
}
}

View File

@@ -1,44 +1,66 @@
use brk_types::Dollars;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Dollars, Height, Sats};
use derive_more::{Deref, DerefMut};
use vecdb::{Exit, ReadableVec, Rw, StorageMode};
use crate::internal::ComputedFromHeightLast;
use crate::distribution::metrics::{ImportConfig, RealizedBase, SupplyMetrics, UnrealizedBase};
use crate::distribution::metrics::{ImportConfig, RealizedBase, UnrealizedBase};
use super::{RelativeBase, RelativePeakRegret, RelativeToAll};
/// Relative metrics with rel_to_all + peak_regret (no extended).
/// Used by: max_age, min_age cohorts.
#[derive(Clone, Deref, DerefMut, Traversable)]
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct RelativeWithPeakRegret {
pub struct RelativeWithPeakRegret<M: StorageMode = Rw> {
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub base: RelativeBase,
pub base: RelativeBase<M>,
#[traversable(flatten)]
pub rel_to_all: RelativeToAll,
pub rel_to_all: RelativeToAll<M>,
#[traversable(flatten)]
pub peak_regret: RelativePeakRegret,
pub peak_regret: RelativePeakRegret<M>,
}
impl RelativeWithPeakRegret {
pub(crate) fn forced_import(
cfg: &ImportConfig,
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
Ok(Self {
base: RelativeBase::forced_import(cfg)?,
rel_to_all: RelativeToAll::forced_import(cfg)?,
peak_regret: RelativePeakRegret::forced_import(cfg)?,
})
}
#[allow(clippy::too_many_arguments)]
pub(crate) fn compute(
&mut self,
max_from: Height,
unrealized: &UnrealizedBase,
supply: &SupplyMetrics,
all_supply: &SupplyMetrics,
realized_base: &RealizedBase,
peak_regret: &ComputedFromHeightLast<Dollars>,
) -> Self {
let market_cap = &all_supply.total.usd;
Self {
base: RelativeBase::forced_import(
cfg, unrealized, supply, market_cap, &realized_base.realized_cap,
),
rel_to_all: RelativeToAll::forced_import(cfg, unrealized, supply, all_supply),
peak_regret: RelativePeakRegret::forced_import(cfg, peak_regret, market_cap),
}
realized: &RealizedBase,
supply_total_sats: &impl ReadableVec<Height, Sats>,
market_cap: &impl ReadableVec<Height, Dollars>,
all_supply_sats: &impl ReadableVec<Height, Sats>,
peak_regret_val: &impl ReadableVec<Height, Dollars>,
exit: &Exit,
) -> Result<()> {
self.base.compute(
max_from,
unrealized,
realized,
supply_total_sats,
market_cap,
exit,
)?;
self.rel_to_all.compute(
max_from,
unrealized,
supply_total_sats,
all_supply_sats,
exit,
)?;
self.peak_regret
.compute(max_from, peak_regret_val, market_cap, exit)?;
Ok(())
}
}

View File

@@ -1,37 +1,60 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Dollars, Height, Sats};
use derive_more::{Deref, DerefMut};
use vecdb::{Exit, ReadableVec, Rw, StorageMode};
use crate::distribution::metrics::{ImportConfig, RealizedBase, SupplyMetrics, UnrealizedBase};
use crate::distribution::metrics::{ImportConfig, RealizedBase, UnrealizedBase};
use super::{RelativeBase, RelativeToAll};
/// Relative metrics with rel_to_all (no extended, no peak_regret).
/// Used by: epoch, year, type, amount, address cohorts.
#[derive(Clone, Deref, DerefMut, Traversable)]
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct RelativeWithRelToAll {
pub struct RelativeWithRelToAll<M: StorageMode = Rw> {
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub base: RelativeBase,
pub base: RelativeBase<M>,
#[traversable(flatten)]
pub rel_to_all: RelativeToAll,
pub rel_to_all: RelativeToAll<M>,
}
impl RelativeWithRelToAll {
pub(crate) fn forced_import(
cfg: &ImportConfig,
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
Ok(Self {
base: RelativeBase::forced_import(cfg)?,
rel_to_all: RelativeToAll::forced_import(cfg)?,
})
}
#[allow(clippy::too_many_arguments)]
pub(crate) fn compute(
&mut self,
max_from: Height,
unrealized: &UnrealizedBase,
supply: &SupplyMetrics,
all_supply: &SupplyMetrics,
realized_base: &RealizedBase,
) -> Self {
let market_cap = &all_supply.total.usd;
Self {
base: RelativeBase::forced_import(
cfg, unrealized, supply, market_cap, &realized_base.realized_cap,
),
rel_to_all: RelativeToAll::forced_import(cfg, unrealized, supply, all_supply),
}
realized: &RealizedBase,
supply_total_sats: &impl ReadableVec<Height, Sats>,
market_cap: &impl ReadableVec<Height, Dollars>,
all_supply_sats: &impl ReadableVec<Height, Sats>,
exit: &Exit,
) -> Result<()> {
self.base.compute(
max_from,
unrealized,
realized,
supply_total_sats,
market_cap,
exit,
)?;
self.rel_to_all.compute(
max_from,
unrealized,
supply_total_sats,
all_supply_sats,
exit,
)?;
Ok(())
}
}

View File

@@ -2,13 +2,13 @@ use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Sats, Version};
use crate::{ComputeIndexes, blocks};
use crate::{ComputeIndexes, blocks, prices};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, AnyVec, Exit, Rw, StorageMode, WritableVec};
use crate::internal::{
HalfPriceTimesSats, HalveDollars, HalveSats, HalveSatsToBitcoin,
LazyBinaryValueFromHeightLast, ValueChangeFromHeight, ValueFromHeightLast,
HalveDollars, HalveSats, HalveSatsToBitcoin,
LazyValueFromHeightLast, ValueChangeFromHeight, ValueFromHeightLast,
};
use super::ImportConfig;
@@ -17,7 +17,7 @@ use super::ImportConfig;
#[derive(Traversable)]
pub struct SupplyMetrics<M: StorageMode = Rw> {
pub total: ValueFromHeightLast<M>,
pub halved: LazyBinaryValueFromHeightLast,
pub halved: LazyValueFromHeightLast,
/// 30-day change in supply (net position change) - sats, btc, usd
pub _30d_change: ValueChangeFromHeight<M>,
}
@@ -30,15 +30,13 @@ impl SupplyMetrics {
&cfg.name("supply"),
cfg.version,
cfg.indexes,
cfg.prices,
)?;
let supply_halved = LazyBinaryValueFromHeightLast::from_block_source::<
let supply_halved = LazyValueFromHeightLast::from_block_source::<
HalveSats,
HalveSatsToBitcoin,
HalfPriceTimesSats,
HalveDollars,
>(&cfg.name("supply_halved"), &supply, cfg.prices, cfg.version);
>(&cfg.name("supply_halved"), &supply, cfg.version);
let _30d_change = ValueChangeFromHeight::forced_import(
cfg.db,
@@ -67,7 +65,21 @@ impl SupplyMetrics {
/// Returns a parallel iterator over all vecs for parallel writing.
pub(crate) fn par_iter_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
vec![&mut self.total.sats.height as &mut dyn AnyStoredVec].into_par_iter()
vec![
&mut self.total.sats.height as &mut dyn AnyStoredVec,
&mut self.total.usd.height as &mut dyn AnyStoredVec,
]
.into_par_iter()
}
/// Eagerly compute USD height values from sats × price.
pub(crate) fn compute(
&mut self,
prices: &prices::Vecs,
max_from: Height,
exit: &Exit,
) -> Result<()> {
self.total.compute(prices, max_from, exit)
}
/// Validate computed versions against base version.

View File

@@ -10,8 +10,7 @@ use crate::{
ComputeIndexes,
distribution::state::UnrealizedState,
internal::{
ComputedFromHeightLast, DollarsMinus, DollarsPlus, LazyBinaryFromHeightLast,
LazyFromHeightLast, ValueFromHeightLast,
ComputedFromHeightLast, LazyFromHeightLast, ValueFromHeightLast,
},
prices,
};
@@ -48,8 +47,8 @@ pub struct UnrealizedBase<M: StorageMode = Rw> {
pub neg_unrealized_loss: LazyFromHeightLast<Dollars>,
// === Net and Total ===
pub net_unrealized_pnl: LazyBinaryFromHeightLast<Dollars>,
pub total_unrealized_pnl: LazyBinaryFromHeightLast<Dollars>,
pub net_unrealized_pnl: ComputedFromHeightLast<Dollars, M>,
pub total_unrealized_pnl: ComputedFromHeightLast<Dollars, M>,
}
impl UnrealizedBase {
@@ -59,14 +58,12 @@ impl UnrealizedBase {
&cfg.name("supply_in_profit"),
cfg.version,
cfg.indexes,
cfg.prices,
)?;
let supply_in_loss = ValueFromHeightLast::forced_import(
cfg.db,
&cfg.name("supply_in_loss"),
cfg.version,
cfg.indexes,
cfg.prices,
)?;
let unrealized_profit = ComputedFromHeightLast::forced_import(
@@ -142,18 +139,18 @@ impl UnrealizedBase {
&unrealized_loss,
);
let net_unrealized_pnl = LazyBinaryFromHeightLast::from_computed_last::<DollarsMinus>(
let net_unrealized_pnl = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("net_unrealized_pnl"),
cfg.version,
&unrealized_profit,
&unrealized_loss,
);
let total_unrealized_pnl = LazyBinaryFromHeightLast::from_computed_last::<DollarsPlus>(
cfg.indexes,
)?;
let total_unrealized_pnl = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("total_unrealized_pnl"),
cfg.version,
&unrealized_profit,
&unrealized_loss,
);
cfg.indexes,
)?;
Ok(Self {
supply_in_profit,
@@ -240,7 +237,9 @@ impl UnrealizedBase {
pub(crate) fn collect_vecs_mut(&mut self) -> Vec<&mut dyn AnyStoredVec> {
vec![
&mut self.supply_in_profit.sats.height as &mut dyn AnyStoredVec,
&mut self.supply_in_loss.sats.height,
&mut self.supply_in_profit.usd.height as &mut dyn AnyStoredVec,
&mut self.supply_in_loss.sats.height as &mut dyn AnyStoredVec,
&mut self.supply_in_loss.usd.height as &mut dyn AnyStoredVec,
&mut self.unrealized_profit.height,
&mut self.unrealized_loss.height,
&mut self.invested_capital_in_profit.height,
@@ -419,6 +418,19 @@ impl UnrealizedBase {
exit,
)?;
self.net_unrealized_pnl.height.compute_subtract(
starting_indexes.height,
&self.unrealized_profit.height,
&self.unrealized_loss.height,
exit,
)?;
self.total_unrealized_pnl.height.compute_add(
starting_indexes.height,
&self.unrealized_profit.height,
&self.unrealized_loss.height,
exit,
)?;
Ok(())
}

View File

@@ -9,8 +9,8 @@ use brk_types::{
};
use tracing::{debug, info};
use vecdb::{
AnyVec, BytesVec, Database, Exit, WritableVec, ImportableVec, ReadableCloneableVec,
ReadableVec, Rw, StorageMode, LazyVecFrom1, PAGE_SIZE, Stamp,
AnyVec, BytesVec, Database, Exit, ImportableVec, LazyVecFrom1, PAGE_SIZE, ReadOnlyClone,
ReadableCloneableVec, ReadableVec, Rw, Stamp, StorageMode, WritableVec,
};
use crate::{
@@ -50,12 +50,12 @@ pub struct Vecs<M: StorageMode = Rw> {
pub empty_addr_count: AddrCountsVecs<M>,
pub address_activity: AddressActivityVecs<M>,
/// Total addresses ever seen (addr_count + empty_addr_count) - lazy, global + per-type
pub total_addr_count: TotalAddrCountVecs,
/// Total addresses ever seen (addr_count + empty_addr_count) - stored, global + per-type
pub total_addr_count: TotalAddrCountVecs<M>,
/// New addresses per block (delta of total) - lazy height, stored day1 stats, global + per-type
pub new_addr_count: NewAddrCountVecs<M>,
/// Growth rate (new / addr_count) - lazy ratio with distribution stats, global + per-type
pub growth_rate: GrowthRateVecs,
/// Growth rate (new / addr_count) - stored ratio with distribution stats, global + per-type
pub growth_rate: GrowthRateVecs<M>,
pub fundedaddressindex:
LazyVecFrom1<FundedAddressIndex, FundedAddressIndex, FundedAddressIndex, FundedAddressData>,
@@ -70,7 +70,6 @@ impl Vecs {
parent: &Path,
parent_version: Version,
indexes: &indexes::Vecs,
prices: &prices::Vecs,
) -> Result<Self> {
let db_path = parent.join(super::DB_NAME);
let states_path = db_path.join("states");
@@ -81,17 +80,9 @@ impl Vecs {
let version = parent_version + VERSION;
let utxo_cohorts = UTXOCohorts::forced_import(&db, version, indexes, prices, &states_path)?;
let utxo_cohorts = UTXOCohorts::forced_import(&db, version, indexes, &states_path)?;
// Create address cohorts with reference to utxo "all" cohort's supply for global ratios
let address_cohorts = AddressCohorts::forced_import(
&db,
version,
indexes,
prices,
&states_path,
&utxo_cohorts.all.metrics.supply,
)?;
let address_cohorts = AddressCohorts::forced_import(&db, version, indexes, &states_path)?;
// Create address data BytesVecs first so we can also use them for identity mappings
let fundedaddressindex_to_fundedaddressdata = BytesVec::forced_import_with(
@@ -123,21 +114,15 @@ impl Vecs {
let address_activity =
AddressActivityVecs::forced_import(&db, "address_activity", version, indexes)?;
// Lazy total = addr_count + empty_addr_count (global + per-type, with all derived indexes)
let total_addr_count = TotalAddrCountVecs::forced_import(
version,
indexes,
&addr_count,
&empty_addr_count,
)?;
// Stored total = addr_count + empty_addr_count (global + per-type, with all derived indexes)
let total_addr_count = TotalAddrCountVecs::forced_import(&db, version, indexes)?;
// Lazy delta of total (global + per-type)
let new_addr_count =
NewAddrCountVecs::forced_import(&db, version, indexes, &total_addr_count)?;
// Growth rate: new / addr_count (global + per-type)
let growth_rate =
GrowthRateVecs::forced_import(version, indexes, &new_addr_count, &addr_count)?;
let growth_rate = GrowthRateVecs::forced_import(&db, version, indexes)?;
let this = Self {
supply_state: BytesVec::forced_import_with(
@@ -359,14 +344,38 @@ impl Vecs {
self.empty_addr_count
.compute_rest(blocks, starting_indexes, exit)?;
// 6d. Compute new_addr_count cumulative (height is lazy delta)
// 6c. Compute total_addr_count = addr_count + empty_addr_count
self.total_addr_count.compute(
starting_indexes.height,
&self.addr_count,
&self.empty_addr_count,
exit,
)?;
// 6d. Compute new_addr_count cumulative + rolling (height is lazy delta)
let window_starts = blocks.count.window_starts();
self.new_addr_count
.compute_cumulative(starting_indexes, exit)?;
.compute(starting_indexes.height, &window_starts, exit)?;
// 6e. Compute growth_rate = new_addr_count / addr_count
self.growth_rate.compute(
starting_indexes.height,
&window_starts,
&self.new_addr_count,
&self.addr_count,
exit,
)?;
// 7. Compute rest part2 (relative metrics)
let supply_metrics = &self.utxo_cohorts.all.metrics.supply;
let height_to_market_cap = supply_metrics.total.usd.height.clone();
let height_to_market_cap = self
.utxo_cohorts
.all
.metrics
.supply
.total
.usd
.height
.read_only_clone();
aggregates::compute_rest_part2(
&mut self.utxo_cohorts,
@@ -400,5 +409,4 @@ impl Vecs {
.min(Height::from(self.empty_addr_count.min_stateful_height()))
.min(Height::from(self.address_activity.min_stateful_height()))
}
}

View File

@@ -27,7 +27,7 @@ impl Vecs {
self.rolling.compute(
starting_indexes.height,
&window_starts,
self.height.sum_cum.sum.inner(),
self.height.sum_cumulative.sum.inner(),
exit,
)?;

View File

@@ -0,0 +1,15 @@
//! Base generic struct with 2 type parameters — one per rolling window duration.
//!
//! Foundation for tx-derived rolling window types (1h, 24h — actual time-based).
use brk_traversable::Traversable;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct BlockWindows<A, B = A> {
#[traversable(rename = "1h")]
pub _1h: A,
#[traversable(rename = "24h")]
pub _24h: B,
}

View File

@@ -314,3 +314,108 @@ where
Ok(())
}
/// Compute distribution stats from windowed ranges of a source vec.
///
/// For each index `i`, reads all source items from groups `window_starts[i]..=i`
/// and computes average, min, max, median, and percentiles across the full window.
#[allow(clippy::too_many_arguments)]
pub(crate) fn compute_aggregations_windowed<I, T, A>(
max_from: I,
source: &impl ReadableVec<A, T>,
first_indexes: &impl ReadableVec<I, A>,
count_indexes: &impl ReadableVec<I, StoredU64>,
window_starts: &impl ReadableVec<I, I>,
exit: &Exit,
min: &mut EagerVec<PcoVec<I, T>>,
max: &mut EagerVec<PcoVec<I, T>>,
average: &mut EagerVec<PcoVec<I, T>>,
median: &mut EagerVec<PcoVec<I, T>>,
pct10: &mut EagerVec<PcoVec<I, T>>,
pct25: &mut EagerVec<PcoVec<I, T>>,
pct75: &mut EagerVec<PcoVec<I, T>>,
pct90: &mut EagerVec<PcoVec<I, T>>,
) -> Result<()>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
A: VecIndex + VecValue + CheckedSub<A>,
{
let combined_version =
source.version() + first_indexes.version() + count_indexes.version() + window_starts.version();
let mut idx = max_from;
for vec in [&mut *min, &mut *max, &mut *average, &mut *median, &mut *pct10, &mut *pct25, &mut *pct75, &mut *pct90] {
idx = validate_and_start(vec, combined_version, idx)?;
}
let index = idx;
let start = index.to_usize();
let fi_len = first_indexes.len();
let first_indexes_batch: Vec<A> = first_indexes.collect_range_at(start, fi_len);
let count_indexes_batch: Vec<StoredU64> = count_indexes.collect_range_at(start, fi_len);
let window_starts_batch: Vec<I> = window_starts.collect_range_at(start, fi_len);
let zero = T::from(0_usize);
first_indexes_batch
.iter()
.zip(count_indexes_batch.iter())
.zip(window_starts_batch.iter())
.enumerate()
.try_for_each(|(j, ((fi, ci), ws))| -> Result<()> {
let idx = start + j;
let window_start_offset = ws.to_usize();
// Last tx index (exclusive) of current block
let count = u64::from(*ci) as usize;
let range_end_usize = fi.to_usize() + count;
// First tx index of the window start block
let range_start_usize = if window_start_offset >= start {
first_indexes_batch[window_start_offset - start].to_usize()
} else {
first_indexes
.collect_one_at(window_start_offset)
.unwrap()
.to_usize()
};
let effective_count = range_end_usize.saturating_sub(range_start_usize);
if effective_count == 0 {
for vec in [&mut *min, &mut *max, &mut *average, &mut *median, &mut *pct10, &mut *pct25, &mut *pct75, &mut *pct90] {
vec.truncate_push_at(idx, zero)?;
}
} else {
let mut values: Vec<T> =
source.collect_range_at(range_start_usize, range_end_usize);
// Compute sum before sorting
let len = values.len();
let sum_val = values.iter().copied().fold(T::from(0), |a, b| a + b);
let avg = sum_val / len;
values.sort_unstable();
max.truncate_push_at(idx, *values.last().unwrap())?;
pct90.truncate_push_at(idx, get_percentile(&values, 0.90))?;
pct75.truncate_push_at(idx, get_percentile(&values, 0.75))?;
median.truncate_push_at(idx, get_percentile(&values, 0.50))?;
pct25.truncate_push_at(idx, get_percentile(&values, 0.25))?;
pct10.truncate_push_at(idx, get_percentile(&values, 0.10))?;
min.truncate_push_at(idx, *values.first().unwrap())?;
average.truncate_push_at(idx, avg)?;
}
Ok(())
})?;
let _lock = exit.lock();
for vec in [min, max, average, median, pct10, pct25, pct75, pct90] {
vec.write()?;
}
Ok(())
}

View File

@@ -7,43 +7,44 @@ use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{
Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4, Minute1, Minute10,
Minute30, Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10,
Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour4, Hour12, Minute1, Minute5,
Minute10, Minute30, Month1, Month3, Month6, Version, Week1, Year1, Year10,
};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, PcoVec, ReadableVec, Rw, StorageMode, VecIndex};
use crate::{
indexes,
indexes_from,
internal::{ComputedVecValue, Indexes, NumericValue},
ComputeIndexes,
use vecdb::{
Database, EagerVec, Exit, ImportableVec, PcoVec, ReadableVec, Rw, StorageMode, VecIndex,
};
pub type EagerIndexesInner<T, M> = Indexes<
<M as StorageMode>::Stored<EagerVec<PcoVec<Minute1, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Minute5, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Minute10, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Minute30, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Hour1, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Hour4, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Hour12, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Day1, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Day3, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Week1, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Month1, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Month3, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Month6, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Year1, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Year10, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<HalvingEpoch, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<DifficultyEpoch, T>>>,
>;
use crate::{
ComputeIndexes, indexes, indexes_from,
internal::{ComputedVecValue, Indexes, NumericValue},
};
#[derive(Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct EagerIndexes<T, M: StorageMode = Rw>(pub EagerIndexesInner<T, M>)
pub struct EagerIndexes<T, M: StorageMode = Rw>(
#[allow(clippy::type_complexity)]
pub Indexes<
<M as StorageMode>::Stored<EagerVec<PcoVec<Minute1, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Minute5, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Minute10, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Minute30, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Hour1, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Hour4, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Hour12, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Day1, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Day3, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Week1, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Month1, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Month3, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Month6, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Year1, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Year10, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<HalvingEpoch, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<DifficultyEpoch, T>>>,
>,
)
where
T: ComputedVecValue + PartialOrd + JsonSchema;

View File

@@ -5,8 +5,8 @@
use brk_traversable::Traversable;
use brk_types::{
Day1, Day3, DifficultyEpoch, HalvingEpoch, Minute1, Minute10, Minute30, Minute5, Month1,
Month3, Month6, Version, Week1, Year1, Year10, Hour1, Hour4, Hour12,
Day1, Day3, DifficultyEpoch, HalvingEpoch, Hour1, Hour4, Hour12, Minute1, Minute5, Minute10,
Minute30, Month1, Month3, Month6, Version, Week1, Year1, Year10,
};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
@@ -17,29 +17,30 @@ use crate::{
internal::{ComputedVecValue, EagerIndexes, Indexes},
};
pub type LazyEagerIndexesInner<T, S> = Indexes<
LazyVecFrom1<Minute1, T, Minute1, S>,
LazyVecFrom1<Minute5, T, Minute5, S>,
LazyVecFrom1<Minute10, T, Minute10, S>,
LazyVecFrom1<Minute30, T, Minute30, S>,
LazyVecFrom1<Hour1, T, Hour1, S>,
LazyVecFrom1<Hour4, T, Hour4, S>,
LazyVecFrom1<Hour12, T, Hour12, S>,
LazyVecFrom1<Day1, T, Day1, S>,
LazyVecFrom1<Day3, T, Day3, S>,
LazyVecFrom1<Week1, T, Week1, S>,
LazyVecFrom1<Month1, T, Month1, S>,
LazyVecFrom1<Month3, T, Month3, S>,
LazyVecFrom1<Month6, T, Month6, S>,
LazyVecFrom1<Year1, T, Year1, S>,
LazyVecFrom1<Year10, T, Year10, S>,
LazyVecFrom1<HalvingEpoch, T, HalvingEpoch, S>,
LazyVecFrom1<DifficultyEpoch, T, DifficultyEpoch, S>,
>;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct LazyEagerIndexes<T, S>(pub LazyEagerIndexesInner<T, S>)
pub struct LazyEagerIndexes<T, S>(
#[allow(clippy::type_complexity)]
pub Indexes<
LazyVecFrom1<Minute1, T, Minute1, S>,
LazyVecFrom1<Minute5, T, Minute5, S>,
LazyVecFrom1<Minute10, T, Minute10, S>,
LazyVecFrom1<Minute30, T, Minute30, S>,
LazyVecFrom1<Hour1, T, Hour1, S>,
LazyVecFrom1<Hour4, T, Hour4, S>,
LazyVecFrom1<Hour12, T, Hour12, S>,
LazyVecFrom1<Day1, T, Day1, S>,
LazyVecFrom1<Day3, T, Day3, S>,
LazyVecFrom1<Week1, T, Week1, S>,
LazyVecFrom1<Month1, T, Month1, S>,
LazyVecFrom1<Month3, T, Month3, S>,
LazyVecFrom1<Month6, T, Month6, S>,
LazyVecFrom1<Year1, T, Year1, S>,
LazyVecFrom1<Year10, T, Year10, S>,
LazyVecFrom1<HalvingEpoch, T, HalvingEpoch, S>,
LazyVecFrom1<DifficultyEpoch, T, DifficultyEpoch, S>,
>,
)
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S: ComputedVecValue;

View File

@@ -1,3 +1,4 @@
mod block_windows;
mod compute;
mod distribution_stats;
mod eager_indexes;
@@ -8,6 +9,7 @@ mod single;
mod traits;
mod windows;
pub(crate) use block_windows::*;
pub(crate) use compute::*;
pub(crate) use distribution_stats::*;
pub(crate) use eager_indexes::*;

View File

@@ -1,309 +0,0 @@
//! Lazy binary transform from two SumCum sources, producing Last (cumulative) ratios only.
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, ReadableBoxedVec, ReadableCloneableVec, LazyVecFrom2};
use crate::{
indexes_from,
internal::{
ComputedFromHeightLast, ComputedFromHeightSumCum, ComputedHeightDerivedLast,
ComputedVecValue, LazyBinaryComputedFromHeightLast, LazyBinaryHeightDerivedLast,
LazyBinaryTransformLast, LazyFromHeightLast, NumericValue,
},
};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyBinaryFromHeightLast<T, S1T = T, S2T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
pub height: LazyVecFrom2<Height, T, Height, S1T, Height, S2T>,
#[deref]
#[deref_mut]
pub rest: Box<LazyBinaryHeightDerivedLast<T, S1T, S2T>>,
}
const VERSION: Version = Version::ZERO;
/// Helper macro: given two deref-able sources whose `.$p` fields implement
/// `ReadableCloneableVec`, build all 17 period fields of a `LazyBinaryHeightDerivedLast`.
macro_rules! build_rest {
($name:expr, $v:expr, $source1:expr, $source2:expr) => {{
macro_rules! period {
($p:ident) => {
LazyBinaryTransformLast::from_vecs::<F>(
$name,
$v,
$source1.$p.read_only_boxed_clone(),
$source2.$p.read_only_boxed_clone(),
)
};
}
Box::new(LazyBinaryHeightDerivedLast(indexes_from!(period)))
}};
}
impl<T, S1T, S2T> LazyBinaryFromHeightLast<T, S1T, S2T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
pub(crate) fn from_computed_sum_cum<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &ComputedFromHeightSumCum<S1T>,
source2: &ComputedFromHeightSumCum<S2T>,
) -> Self
where
S1T: PartialOrd,
S2T: PartialOrd,
{
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(
name,
v,
source1.height_cumulative.read_only_boxed_clone(),
source2.height_cumulative.read_only_boxed_clone(),
),
rest: Box::new(LazyBinaryHeightDerivedLast::from_computed_sum_cum::<F>(name, v, source1, source2)),
}
}
pub(crate) fn from_computed_last<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &ComputedFromHeightLast<S1T>,
source2: &ComputedFromHeightLast<S2T>,
) -> Self
where
S1T: NumericValue,
S2T: NumericValue,
{
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(
name,
v,
source1.height.read_only_boxed_clone(),
source2.height.read_only_boxed_clone(),
),
rest: Box::new(LazyBinaryHeightDerivedLast::from_computed_last::<F>(name, v, source1, source2)),
}
}
pub(crate) fn from_block_last_and_lazy_block_last<F, S2SourceT>(
name: &str,
version: Version,
source1: &ComputedFromHeightLast<S1T>,
source2: &LazyFromHeightLast<S2T, S2SourceT>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S1T: NumericValue,
S2SourceT: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(
name,
v,
source1.height.read_only_boxed_clone(),
source2.height.read_only_boxed_clone(),
),
rest: Box::new(LazyBinaryHeightDerivedLast::from_block_last_and_lazy_block_last::<F, _>(
name, v, source1, source2,
)),
}
}
pub(crate) fn from_lazy_block_last_and_block_last<F, S1SourceT>(
name: &str,
version: Version,
source1: &LazyFromHeightLast<S1T, S1SourceT>,
source2: &ComputedFromHeightLast<S2T>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S2T: NumericValue,
S1SourceT: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(
name,
v,
source1.height.read_only_boxed_clone(),
source2.height.read_only_boxed_clone(),
),
rest: Box::new(LazyBinaryHeightDerivedLast::from_lazy_block_last_and_block_last::<F, _>(
name, v, source1, source2,
)),
}
}
/// Create from a ComputedFromHeightLast and a LazyBinaryFromHeightLast.
pub(crate) fn from_block_last_and_binary_block<F, S2aT, S2bT>(
name: &str,
version: Version,
source1: &ComputedFromHeightLast<S1T>,
source2: &LazyBinaryFromHeightLast<S2T, S2aT, S2bT>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S1T: NumericValue,
S2aT: ComputedVecValue + JsonSchema,
S2bT: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(
name,
v,
source1.height.read_only_boxed_clone(),
source2.height.read_only_boxed_clone(),
),
rest: build_rest!(name, v, source1, source2),
}
}
/// Create from two LazyBinaryFromHeightLast sources.
pub(crate) fn from_both_binary_block<F, S1aT, S1bT, S2aT, S2bT>(
name: &str,
version: Version,
source1: &LazyBinaryFromHeightLast<S1T, S1aT, S1bT>,
source2: &LazyBinaryFromHeightLast<S2T, S2aT, S2bT>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S1aT: ComputedVecValue + JsonSchema,
S1bT: ComputedVecValue + JsonSchema,
S2aT: ComputedVecValue + JsonSchema,
S2bT: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(
name,
v,
source1.height.read_only_boxed_clone(),
source2.height.read_only_boxed_clone(),
),
rest: build_rest!(name, v, source1, source2),
}
}
/// Create from separate height sources and two `ComputedHeightDerivedLast` structs.
pub(crate) fn from_height_and_derived_last<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
height_source1: ReadableBoxedVec<Height, S1T>,
height_source2: ReadableBoxedVec<Height, S2T>,
derived1: &ComputedHeightDerivedLast<S1T>,
derived2: &ComputedHeightDerivedLast<S2T>,
) -> Self
where
S1T: NumericValue,
S2T: NumericValue,
{
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(name, v, height_source1, height_source2),
rest: build_rest!(name, v, derived1, derived2),
}
}
/// Create from a ComputedFromHeightLast and a LazyBinaryComputedFromHeightLast.
pub(crate) fn from_block_last_and_lazy_binary_computed_block_last<F, S2aT, S2bT>(
name: &str,
version: Version,
source1: &ComputedFromHeightLast<S1T>,
source2: &LazyBinaryComputedFromHeightLast<S2T, S2aT, S2bT>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S1T: NumericValue,
S2aT: ComputedVecValue + JsonSchema,
S2bT: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(
name,
v,
source1.height.read_only_boxed_clone(),
source2.height.read_only_boxed_clone(),
),
rest: build_rest!(name, v, source1, source2),
}
}
/// Create from two LazyBinaryComputedFromHeightLast sources.
pub(crate) fn from_both_lazy_binary_computed_block_last<F, S1aT, S1bT, S2aT, S2bT>(
name: &str,
version: Version,
source1: &LazyBinaryComputedFromHeightLast<S1T, S1aT, S1bT>,
source2: &LazyBinaryComputedFromHeightLast<S2T, S2aT, S2bT>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S1aT: ComputedVecValue + JsonSchema,
S1bT: ComputedVecValue + JsonSchema,
S2aT: ComputedVecValue + JsonSchema,
S2bT: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(
name,
v,
source1.height.read_only_boxed_clone(),
source2.height.read_only_boxed_clone(),
),
rest: build_rest!(name, v, source1, source2),
}
}
/// Create from a LazyBinaryFromHeightLast and a LazyBinaryComputedFromHeightLast.
pub(crate) fn from_binary_block_and_lazy_binary_block_last<F, S1aT, S1bT, S2aT, S2bT>(
name: &str,
version: Version,
source1: &LazyBinaryFromHeightLast<S1T, S1aT, S1bT>,
source2: &LazyBinaryComputedFromHeightLast<S2T, S2aT, S2bT>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S1aT: ComputedVecValue + JsonSchema,
S1bT: ComputedVecValue + JsonSchema,
S2aT: ComputedVecValue + JsonSchema,
S2bT: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(
name,
v,
source1.height.read_only_boxed_clone(),
source2.height.read_only_boxed_clone(),
),
rest: build_rest!(name, v, source1, source2),
}
}
}

View File

@@ -1,38 +1,34 @@
//! ComputedFromHeightCum - stored height + LazyLast + cumulative (from height).
//! ComputedFromHeightCumulative - stored height + LazyAggVec + cumulative (from height).
//!
//! Like ComputedFromHeightCumSum but without RollingWindows.
//! Like ComputedFromHeightCumulativeSum but without RollingWindows.
//! Used for distribution metrics where rolling is optional per cohort.
//! Cumulative gets its own ComputedFromHeightLast so it has LazyLast index views.
//! Cumulative gets its own ComputedFromHeightLast so it has LazyAggVec index views.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, PcoVec, Rw, StorageMode};
use vecdb::{Database, EagerVec, Exit, ImportableVec, PcoVec, Rw, StorageMode};
use crate::{
indexes,
internal::{ComputedFromHeightLast, NumericValue},
};
#[derive(Deref, DerefMut, Traversable)]
#[derive(Traversable)]
#[traversable(merge)]
pub struct ComputedFromHeightCum<T, M: StorageMode = Rw>
pub struct ComputedFromHeightCumulative<T, M: StorageMode = Rw>
where
T: NumericValue + JsonSchema,
{
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub last: ComputedFromHeightLast<T, M>,
pub height: M::Stored<EagerVec<PcoVec<Height, T>>>,
#[traversable(flatten)]
pub cumulative: ComputedFromHeightLast<T, M>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedFromHeightCum<T>
impl<T> ComputedFromHeightCumulative<T>
where
T: NumericValue + JsonSchema,
{
@@ -44,15 +40,11 @@ where
) -> Result<Self> {
let v = version + VERSION;
let last = ComputedFromHeightLast::forced_import(db, name, v, indexes)?;
let cumulative = ComputedFromHeightLast::forced_import(
db,
&format!("{name}_cumulative"),
v,
indexes,
)?;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let cumulative =
ComputedFromHeightLast::forced_import(db, &format!("{name}_cumulative"), v, indexes)?;
Ok(Self { last, cumulative })
Ok(Self { height, cumulative })
}
/// Compute height data via closure, then cumulative only (no rolling).
@@ -65,25 +57,18 @@ where
where
T: Default,
{
compute_height(&mut self.last.height)?;
self.cumulative
.height
.compute_cumulative(max_from, &self.last.height, exit)?;
Ok(())
compute_height(&mut self.height)?;
self.compute_rest(max_from, exit)
}
/// Compute cumulative from already-filled height vec.
pub(crate) fn compute_cumulative(
&mut self,
max_from: Height,
exit: &Exit,
) -> Result<()>
pub(crate) fn compute_rest(&mut self, max_from: Height, exit: &Exit) -> Result<()>
where
T: Default,
{
self.cumulative
.height
.compute_cumulative(max_from, &self.last.height, exit)?;
.compute_cumulative(max_from, &self.height, exit)?;
Ok(())
}
}

View File

@@ -1,32 +1,28 @@
//! ComputedFromHeightCumFull - stored height + LazyLast + cumulative (from height) + RollingFull.
//! ComputedFromHeightCumulativeFull - stored height + LazyAggVec + cumulative (from height) + RollingFull.
//!
//! For metrics with stored per-block data, cumulative sums, and rolling windows.
//! Cumulative gets its own ComputedFromHeightLast so it has LazyLast index views too.
//! Cumulative gets its own ComputedFromHeightLast so it has LazyAggVec index views too.
use std::ops::SubAssign;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, PcoVec, Rw, StorageMode};
use vecdb::{Database, EagerVec, Exit, ImportableVec, PcoVec, Rw, StorageMode};
use crate::{
indexes,
internal::{ComputedFromHeightLast, NumericValue, RollingFull, WindowStarts},
};
#[derive(Deref, DerefMut, Traversable)]
#[derive(Traversable)]
#[traversable(merge)]
pub struct ComputedFromHeightCumFull<T, M: StorageMode = Rw>
pub struct ComputedFromHeightCumulativeFull<T, M: StorageMode = Rw>
where
T: NumericValue + JsonSchema,
{
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub last: ComputedFromHeightLast<T, M>,
pub height: M::Stored<EagerVec<PcoVec<Height, T>>>,
#[traversable(flatten)]
pub cumulative: ComputedFromHeightLast<T, M>,
#[traversable(flatten)]
@@ -35,7 +31,7 @@ where
const VERSION: Version = Version::ZERO;
impl<T> ComputedFromHeightCumFull<T>
impl<T> ComputedFromHeightCumulativeFull<T>
where
T: NumericValue + JsonSchema,
{
@@ -47,17 +43,13 @@ where
) -> Result<Self> {
let v = version + VERSION;
let last = ComputedFromHeightLast::forced_import(db, name, v, indexes)?;
let cumulative = ComputedFromHeightLast::forced_import(
db,
&format!("{name}_cumulative"),
v,
indexes,
)?;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let cumulative =
ComputedFromHeightLast::forced_import(db, &format!("{name}_cumulative"), v, indexes)?;
let rolling = RollingFull::forced_import(db, name, v, indexes)?;
Ok(Self {
last,
height,
cumulative,
rolling,
})
@@ -75,12 +67,12 @@ where
T: From<f64> + Default + SubAssign + Copy + Ord,
f64: From<T>,
{
compute_height(&mut self.last.height)?;
compute_height(&mut self.height)?;
self.cumulative
.height
.compute_cumulative(max_from, &self.last.height, exit)?;
.compute_cumulative(max_from, &self.height, exit)?;
self.rolling
.compute(max_from, windows, &self.last.height, exit)?;
.compute(max_from, windows, &self.height, exit)?;
Ok(())
}
}

View File

@@ -1,33 +1,29 @@
//! ComputedFromHeightCumSum - stored height + LazyLast + cumulative (from height) + RollingWindows (sum).
//! ComputedFromHeightCumulativeSum - stored height + LazyAggVec + cumulative (from height) + RollingWindows (sum).
//!
//! Like ComputedFromHeightCumFull but with rolling sum only (no distribution).
//! Like ComputedFromHeightCumulativeFull but with rolling sum only (no distribution).
//! Used for count metrics where distribution stats aren't meaningful.
//! Cumulative gets its own ComputedFromHeightLast so it has LazyLast index views too.
//! Cumulative gets its own ComputedFromHeightLast so it has LazyAggVec index views too.
use std::ops::SubAssign;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, PcoVec, Rw, StorageMode};
use vecdb::{Database, EagerVec, Exit, ImportableVec, PcoVec, Rw, StorageMode};
use crate::{
indexes,
internal::{ComputedFromHeightLast, NumericValue, RollingWindows, WindowStarts},
};
#[derive(Deref, DerefMut, Traversable)]
#[derive(Traversable)]
#[traversable(merge)]
pub struct ComputedFromHeightCumSum<T, M: StorageMode = Rw>
pub struct ComputedFromHeightCumulativeSum<T, M: StorageMode = Rw>
where
T: NumericValue + JsonSchema,
{
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub last: ComputedFromHeightLast<T, M>,
pub height: M::Stored<EagerVec<PcoVec<Height, T>>>,
#[traversable(flatten)]
pub cumulative: ComputedFromHeightLast<T, M>,
#[traversable(flatten)]
@@ -36,7 +32,7 @@ where
const VERSION: Version = Version::ZERO;
impl<T> ComputedFromHeightCumSum<T>
impl<T> ComputedFromHeightCumulativeSum<T>
where
T: NumericValue + JsonSchema,
{
@@ -48,17 +44,13 @@ where
) -> Result<Self> {
let v = version + VERSION;
let last = ComputedFromHeightLast::forced_import(db, name, v, indexes)?;
let cumulative = ComputedFromHeightLast::forced_import(
db,
&format!("{name}_cumulative"),
v,
indexes,
)?;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let cumulative =
ComputedFromHeightLast::forced_import(db, &format!("{name}_cumulative"), v, indexes)?;
let rolling = RollingWindows::forced_import(db, name, v, indexes)?;
Ok(Self {
last,
height,
cumulative,
rolling,
})
@@ -75,12 +67,12 @@ where
where
T: Default + SubAssign,
{
compute_height(&mut self.last.height)?;
compute_height(&mut self.height)?;
self.cumulative
.height
.compute_cumulative(max_from, &self.last.height, exit)?;
.compute_cumulative(max_from, &self.height, exit)?;
self.rolling
.compute_rolling_sum(max_from, windows, &self.last.height, exit)?;
.compute_rolling_sum(max_from, windows, &self.height, exit)?;
Ok(())
}
}

View File

@@ -1,5 +1,6 @@
//! ComputedFromHeight using Distribution aggregation (no sum/cumulative).
//!
//! Stored height data + LazyAggVec index views + rolling distribution windows.
//! Use for block-based metrics where sum/cumulative would be misleading
//! (e.g., activity counts that can't be deduplicated across blocks).
@@ -7,25 +8,22 @@ use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, ImportableVec, PcoVec, ReadableCloneableVec, Rw, StorageMode};
use vecdb::{Database, EagerVec, Exit, ImportableVec, PcoVec, Rw, StorageMode};
use crate::indexes;
use crate::internal::{ComputedHeightDerivedDistribution, ComputedVecValue, NumericValue};
use crate::internal::{ComputedVecValue, NumericValue, RollingDistribution, WindowStarts};
#[derive(Deref, DerefMut, Traversable)]
#[derive(Traversable)]
#[traversable(merge)]
pub struct ComputedFromHeightDistribution<T, M: StorageMode = Rw>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
#[traversable(rename = "base")]
pub height: M::Stored<EagerVec<PcoVec<Height, T>>>,
#[deref]
#[deref_mut]
pub rest: Box<ComputedHeightDerivedDistribution<T>>,
#[traversable(flatten)]
pub rolling: RollingDistribution<T, M>,
}
const VERSION: Version = Version::ZERO;
@@ -43,14 +41,26 @@ where
let v = version + VERSION;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let rolling = RollingDistribution::forced_import(db, name, v, indexes)?;
let rest = ComputedHeightDerivedDistribution::forced_import(
name,
height.read_only_boxed_clone(),
v,
indexes,
);
Ok(Self { height, rolling })
}
Ok(Self { height, rest: Box::new(rest) })
/// Compute height data via closure, then rolling distribution.
pub(crate) fn compute(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
exit: &Exit,
compute_height: impl FnOnce(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
) -> Result<()>
where
T: Copy + Ord + From<f64> + Default,
f64: From<T>,
{
compute_height(&mut self.height)?;
self.rolling
.compute_distribution(max_from, windows, &self.height, exit)?;
Ok(())
}
}

View File

@@ -1,57 +0,0 @@
//! ComputedFromHeight with full stats aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, ImportableVec, PcoVec, ReadableCloneableVec, Rw, StorageMode};
use crate::indexes;
use crate::internal::{ComputedHeightDerivedFull, ComputedVecValue, NumericValue};
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedFromHeightFull<T, M: StorageMode = Rw>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
#[traversable(rename = "base")]
pub height: M::Stored<EagerVec<PcoVec<Height, T>>>,
#[deref]
#[deref_mut]
pub rest: Box<ComputedHeightDerivedFull<T, M>>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedFromHeightFull<T>
where
T: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let rest = ComputedHeightDerivedFull::forced_import(
db,
name,
height.read_only_boxed_clone(),
v,
indexes,
)?;
Ok(Self {
height,
rest: Box::new(rest),
})
}
}

View File

@@ -6,7 +6,10 @@ use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, ImportableVec, PcoVec, ReadableCloneableVec, Rw, StorageMode};
use vecdb::{
BinaryTransform, Database, EagerVec, Exit, ImportableVec, PcoVec, ReadableCloneableVec,
ReadableVec, Rw, StorageMode, VecValue,
};
use crate::indexes;
@@ -41,9 +44,39 @@ where
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let rest =
ComputedHeightDerivedLast::forced_import(name, height.read_only_boxed_clone(), v, indexes);
let rest = ComputedHeightDerivedLast::forced_import(
name,
height.read_only_boxed_clone(),
v,
indexes,
);
Ok(Self { height, rest: Box::new(rest) })
Ok(Self {
height,
rest: Box::new(rest),
})
}
/// Eagerly compute this vec as a binary transform of two sources.
pub(crate) fn compute_binary<S1T, S2T, F>(
&mut self,
max_from: Height,
source1: &impl ReadableVec<Height, S1T>,
source2: &impl ReadableVec<Height, S2T>,
exit: &Exit,
) -> Result<()>
where
S1T: VecValue,
S2T: VecValue,
F: BinaryTransform<S1T, S2T, T>,
{
self.height.compute_transform2(
max_from,
source1,
source2,
|(h, s1, s2, ..)| (h, F::apply(s1, s2)),
exit,
)?;
Ok(())
}
}

View File

@@ -1,61 +0,0 @@
//! LazyBinaryComputedFromHeightDistribution - lazy binary transform with distribution stats.
//!
//! Height-level values are lazy: `transform(source1[h], source2[h])`.
//! Uses Distribution aggregation (no sum/cumulative) - appropriate for ratios.
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, ReadableBoxedVec, ReadableCloneableVec, LazyVecFrom2};
use crate::{
indexes,
internal::{ComputedHeightDerivedDistribution, ComputedVecValue, NumericValue},
};
const VERSION: Version = Version::ZERO;
/// Lazy binary transform at height with distribution stats (no sum/cumulative).
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyBinaryComputedFromHeightDistribution<T, S1T = T, S2T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
#[traversable(rename = "base")]
pub height: LazyVecFrom2<Height, T, Height, S1T, Height, S2T>,
#[deref]
#[deref_mut]
pub rest: Box<ComputedHeightDerivedDistribution<T>>,
}
impl<T, S1T, S2T> LazyBinaryComputedFromHeightDistribution<T, S1T, S2T>
where
T: NumericValue + JsonSchema,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
pub(crate) fn forced_import<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: ReadableBoxedVec<Height, S1T>,
source2: ReadableBoxedVec<Height, S2T>,
indexes: &indexes::Vecs,
) -> Self {
let v = version + VERSION;
let height = LazyVecFrom2::transformed::<F>(name, v, source1, source2);
let rest = ComputedHeightDerivedDistribution::forced_import(
name,
height.read_only_boxed_clone(),
v,
indexes,
);
Self { height, rest: Box::new(rest) }
}
}

View File

@@ -1,69 +0,0 @@
//! LazyBinaryComputedFromHeightFull - block full with lazy binary transform at height level.
//!
//! Height-level values are lazy: `transform(source1[h], source2[h])`.
//! Cumulative, day1 stats, and difficultyepoch are stored since they
//! require aggregation across heights.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, Database, Exit, ReadableBoxedVec, ReadableCloneableVec, LazyVecFrom2, Rw, StorageMode};
use crate::{
ComputeIndexes, indexes,
internal::{ComputedHeightDerivedFull, ComputedVecValue, NumericValue},
};
const VERSION: Version = Version::ZERO;
/// Block full aggregation with lazy binary transform at height + computed derived indexes.
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyBinaryComputedFromHeightFull<T, S1T = T, S2T = T, M: StorageMode = Rw>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
#[traversable(rename = "base")]
pub height: LazyVecFrom2<Height, T, Height, S1T, Height, S2T>,
#[deref]
#[deref_mut]
pub rest: Box<ComputedHeightDerivedFull<T, M>>,
}
impl<T, S1T, S2T> LazyBinaryComputedFromHeightFull<T, S1T, S2T>
where
T: NumericValue + JsonSchema,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
pub(crate) fn forced_import<F: BinaryTransform<S1T, S2T, T>>(
db: &Database,
name: &str,
version: Version,
source1: ReadableBoxedVec<Height, S1T>,
source2: ReadableBoxedVec<Height, S2T>,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height = LazyVecFrom2::transformed::<F>(name, v, source1, source2);
let rest =
ComputedHeightDerivedFull::forced_import(db, name, height.read_only_boxed_clone(), v, indexes)?;
Ok(Self { height, rest: Box::new(rest) })
}
pub(crate) fn compute_cumulative(
&mut self,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.rest
.compute_cumulative(starting_indexes, &self.height, exit)
}
}

View File

@@ -1,58 +0,0 @@
//! LazyBinaryComputedFromHeightLast - block last with lazy binary transform at height level.
//!
//! Height-level value is lazy: `transform(source1[h], source2[h])`.
//! Day1 last is stored since it requires finding the last value within each date
//! (which may span multiple heights with varying prices).
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, ReadableBoxedVec, ReadableCloneableVec, LazyVecFrom2};
use crate::{
indexes,
internal::{ComputedHeightDerivedLast, ComputedVecValue, NumericValue},
};
const VERSION: Version = Version::ZERO;
/// Block last aggregation with lazy binary transform at height + computed derived indexes.
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyBinaryComputedFromHeightLast<T, S1T = T, S2T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
pub height: LazyVecFrom2<Height, T, Height, S1T, Height, S2T>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: Box<ComputedHeightDerivedLast<T>>,
}
impl<T, S1T, S2T> LazyBinaryComputedFromHeightLast<T, S1T, S2T>
where
T: NumericValue + JsonSchema,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
pub(crate) fn forced_import<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: ReadableBoxedVec<Height, S1T>,
source2: ReadableBoxedVec<Height, S2T>,
indexes: &indexes::Vecs,
) -> Self {
let v = version + VERSION;
let height = LazyVecFrom2::transformed::<F>(name, v, source1, source2);
let rest =
ComputedHeightDerivedLast::forced_import(name, height.read_only_boxed_clone(), v, indexes);
Self { height, rest: Box::new(rest) }
}
}

View File

@@ -1,71 +0,0 @@
//! LazyBinaryComputedFromHeightSumCum - block sum_cum with lazy binary transform at height level.
//!
//! Height-level sum is lazy: `transform(source1[h], source2[h])`.
//! Cumulative and day1 stats are stored since they require aggregation
//! across heights.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, Database, Exit, ReadableBoxedVec, ReadableCloneableVec, LazyVecFrom2, Rw, StorageMode};
use crate::{
ComputeIndexes,
indexes,
internal::{ComputedHeightDerivedSumCum, ComputedVecValue, NumericValue},
};
const VERSION: Version = Version::ZERO;
/// Block sum_cum aggregation with lazy binary transform at height + computed derived indexes.
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyBinaryComputedFromHeightSumCum<T, S1T = T, S2T = T, M: StorageMode = Rw>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
#[traversable(rename = "sum")]
pub height: LazyVecFrom2<Height, T, Height, S1T, Height, S2T>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: Box<ComputedHeightDerivedSumCum<T, M>>,
}
impl<T, S1T, S2T> LazyBinaryComputedFromHeightSumCum<T, S1T, S2T>
where
T: NumericValue + JsonSchema,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
pub(crate) fn forced_import<F: BinaryTransform<S1T, S2T, T>>(
db: &Database,
name: &str,
version: Version,
source1: ReadableBoxedVec<Height, S1T>,
source2: ReadableBoxedVec<Height, S2T>,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height = LazyVecFrom2::transformed::<F>(name, v, source1, source2);
let rest =
ComputedHeightDerivedSumCum::forced_import(db, name, height.read_only_boxed_clone(), v, indexes)?;
Ok(Self { height, rest: Box::new(rest) })
}
pub(crate) fn compute_cumulative(
&mut self,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.rest
.derive_from(starting_indexes, &self.height, exit)
}
}

View File

@@ -1,33 +1,34 @@
//! LazyComputedFromHeightFull - block full with lazy height transform.
//! LazyComputedFromHeightCumulativeFull - block full with lazy height transform + cumulative + rolling.
use std::ops::SubAssign;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, Exit, ReadableCloneableVec, LazyVecFrom1, UnaryTransform, Rw, StorageMode};
use vecdb::{Database, Exit, LazyVecFrom1, ReadableCloneableVec, Rw, StorageMode, UnaryTransform};
use crate::{
ComputeIndexes,
indexes,
internal::{ComputedVecValue, ComputedHeightDerivedFull, NumericValue},
internal::{ComputedHeightDerivedCumulativeFull, ComputedVecValue, NumericValue, WindowStarts},
};
const VERSION: Version = Version::ZERO;
/// Block full aggregation with lazy height transform + computed derived indexes.
/// Block full aggregation with lazy height transform + cumulative + rolling windows.
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyComputedFromHeightFull<T, S = T, M: StorageMode = Rw>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
T: NumericValue + JsonSchema,
S: ComputedVecValue,
{
#[traversable(rename = "base")]
pub height: LazyVecFrom1<Height, T, Height, S>,
#[deref]
#[deref_mut]
pub rest: Box<ComputedHeightDerivedFull<T, M>>,
pub rest: Box<ComputedHeightDerivedCumulativeFull<T, M>>,
}
impl<T, S> LazyComputedFromHeightFull<T, S>
@@ -46,18 +47,29 @@ where
let height = LazyVecFrom1::transformed::<F>(name, v, source.read_only_boxed_clone());
let rest =
ComputedHeightDerivedFull::forced_import(db, name, height.read_only_boxed_clone(), v, indexes)?;
let rest = ComputedHeightDerivedCumulativeFull::forced_import(
db,
name,
v,
indexes,
)?;
Ok(Self { height, rest: Box::new(rest) })
Ok(Self {
height,
rest: Box::new(rest),
})
}
pub(crate) fn compute_cumulative(
pub(crate) fn compute(
&mut self,
starting_indexes: &ComputeIndexes,
max_from: Height,
windows: &WindowStarts<'_>,
exit: &Exit,
) -> Result<()> {
self.rest
.compute_cumulative(starting_indexes, &self.height, exit)
) -> Result<()>
where
T: From<f64> + Default + SubAssign + Copy + Ord,
f64: From<T>,
{
self.rest.compute(max_from, windows, &self.height, exit)
}
}

View File

@@ -1,75 +0,0 @@
//! LazyComputedFromHeightSumCum - block sum+cumulative with lazy height transform.
//!
//! Use this when you need:
//! - Lazy height (binary transform from two sources)
//! - Stored cumulative and day1 aggregates
//! - Lazy coarser period lookups
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, Exit, ReadableCloneableVec, LazyVecFrom2, Rw, StorageMode};
use crate::{indexes, ComputeIndexes};
use crate::internal::{ComputedVecValue, ComputedHeightDerivedSumCum, NumericValue};
/// Block sum+cumulative with lazy binary height transform + computed derived indexes.
///
/// Height is a lazy binary transform (e.g., mask × source, or price × sats).
/// Cumulative and day1 are stored (computed from lazy height).
/// Coarser periods are lazy lookups.
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyComputedFromHeightSumCum<T, S1T = T, S2T = T, M: StorageMode = Rw>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
#[traversable(rename = "sum")]
pub height: LazyVecFrom2<Height, T, Height, S1T, Height, S2T>,
#[deref]
#[deref_mut]
pub rest: Box<ComputedHeightDerivedSumCum<T, M>>,
}
const VERSION: Version = Version::ZERO;
impl<T, S1T, S2T> LazyComputedFromHeightSumCum<T, S1T, S2T>
where
T: NumericValue + JsonSchema,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
height: LazyVecFrom2<Height, T, Height, S1T, Height, S2T>,
) -> Result<Self> {
let v = version + VERSION;
let rest = ComputedHeightDerivedSumCum::forced_import(
db,
name,
height.read_only_boxed_clone(),
v,
indexes,
)?;
Ok(Self { height, rest: Box::new(rest) })
}
pub(crate) fn compute_cumulative(
&mut self,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.rest
.derive_from(starting_indexes, &self.height, exit)
}
}

View File

@@ -1,50 +0,0 @@
//! Lazy unary transform from height with Full aggregation.
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{ReadableBoxedVec, LazyVecFrom1, UnaryTransform};
use crate::internal::{
ComputedFromHeightFull, ComputedVecValue, LazyHeightDerivedFull,
NumericValue,
};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyFromHeightFull<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
#[traversable(rename = "base")]
pub height: LazyVecFrom1<Height, T, Height, S1T>,
#[deref]
#[deref_mut]
pub rest: Box<LazyHeightDerivedFull<T, S1T>>,
}
const VERSION: Version = Version::ZERO;
impl<T, S1T> LazyFromHeightFull<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
pub(crate) fn from_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
height_source: ReadableBoxedVec<Height, S1T>,
source: &ComputedFromHeightFull<S1T>,
) -> Self
where
S1T: NumericValue,
{
let v = version + VERSION;
Self {
height: LazyVecFrom1::transformed::<F>(name, v, height_source),
rest: Box::new(LazyHeightDerivedFull::from_derived_computed::<F>(name, v, &source.rest)),
}
}
}

View File

@@ -6,10 +6,11 @@ use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{ReadableBoxedVec, ReadableCloneableVec, LazyVecFrom1, UnaryTransform};
use crate::internal::{
ComputedFromHeightLast,
ComputedVecValue, LazyBinaryComputedFromHeightLast, LazyBinaryFromHeightLast,
LazyHeightDerivedLast, NumericValue,
use crate::{
indexes,
internal::{
ComputedFromHeightLast, ComputedVecValue, LazyHeightDerivedLast, NumericValue,
},
};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
@@ -48,22 +49,19 @@ where
}
}
pub(crate) fn from_lazy_binary_computed<F, S1aT, S1bT>(
pub(crate) fn from_height_source<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
height_source: ReadableBoxedVec<Height, S1T>,
source: &LazyBinaryComputedFromHeightLast<S1T, S1aT, S1bT>,
indexes: &indexes::Vecs,
) -> Self
where
F: UnaryTransform<S1T, T>,
S1T: NumericValue,
S1aT: ComputedVecValue + JsonSchema,
S1bT: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
Self {
height: LazyVecFrom1::transformed::<F>(name, v, height_source),
rest: Box::new(LazyHeightDerivedLast::from_derived_computed::<F>(name, v, &source.rest)),
height: LazyVecFrom1::transformed::<F>(name, v, height_source.clone()),
rest: Box::new(LazyHeightDerivedLast::from_height_source::<F>(name, v, height_source, indexes)),
}
}
@@ -84,21 +82,4 @@ where
}
}
/// Create by unary-transforming a LazyBinaryFromHeightLast source.
pub(crate) fn from_binary<F, S1aT, S1bT>(
name: &str,
version: Version,
source: &LazyBinaryFromHeightLast<S1T, S1aT, S1bT>,
) -> Self
where
F: UnaryTransform<S1T, T>,
S1aT: ComputedVecValue + JsonSchema,
S1bT: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
Self {
height: LazyVecFrom1::transformed::<F>(name, v, source.height.read_only_boxed_clone()),
rest: Box::new(LazyHeightDerivedLast::from_binary::<F, _, _>(name, v, &source.rest)),
}
}
}

View File

@@ -1,66 +0,0 @@
//! Lazy unary transform from height with SumCum aggregation.
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{ReadableBoxedVec, LazyVecFrom1, UnaryTransform};
use crate::internal::{
ComputedFromHeightSumCum, ComputedHeightDerivedSumCum, ComputedVecValue,
LazyHeightDerivedSumCum, NumericValue,
};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyFromHeightSumCum<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
#[traversable(rename = "sum")]
pub height: LazyVecFrom1<Height, T, Height, S1T>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: Box<LazyHeightDerivedSumCum<T, S1T>>,
}
const VERSION: Version = Version::ZERO;
impl<T, S1T> LazyFromHeightSumCum<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
pub(crate) fn from_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
height_source: ReadableBoxedVec<Height, S1T>,
source: &ComputedFromHeightSumCum<S1T>,
) -> Self
where
S1T: NumericValue,
{
let v = version + VERSION;
Self {
height: LazyVecFrom1::transformed::<F>(name, v, height_source),
rest: Box::new(LazyHeightDerivedSumCum::from_derived_computed::<F>(name, v, &source.rest)),
}
}
pub(crate) fn from_derived<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
height_source: ReadableBoxedVec<Height, S1T>,
source: &ComputedHeightDerivedSumCum<S1T>,
) -> Self
where
S1T: NumericValue,
{
let v = version + VERSION;
Self {
height: LazyVecFrom1::transformed::<F>(name, v, height_source),
rest: Box::new(LazyHeightDerivedSumCum::from_derived_computed::<F>(name, v, source)),
}
}
}

View File

@@ -1,54 +0,0 @@
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Dollars, Height, Sats, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{BinaryTransform, ReadableBoxedVec, LazyVecFrom1, LazyVecFrom2, UnaryTransform};
use crate::internal::LazyDerivedValuesHeight;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyFromHeightValue {
#[traversable(rename = "sats")]
pub sats: LazyVecFrom1<Height, Sats, Height, Sats>,
#[deref]
#[deref_mut]
pub rest: LazyDerivedValuesHeight,
}
impl LazyFromHeightValue {
pub(crate) fn from_sources<SatsTransform, BitcoinTransform, DollarsTransform>(
name: &str,
sats_source: ReadableBoxedVec<Height, Sats>,
price_source: ReadableBoxedVec<Height, Dollars>,
version: Version,
) -> Self
where
SatsTransform: UnaryTransform<Sats, Sats>,
BitcoinTransform: UnaryTransform<Sats, Bitcoin>,
DollarsTransform: BinaryTransform<Dollars, Sats, Dollars>,
{
let v = version + VERSION;
let sats = LazyVecFrom1::transformed::<SatsTransform>(name, v, sats_source.clone());
let btc = LazyVecFrom1::transformed::<BitcoinTransform>(
&format!("{name}_btc"),
v,
sats_source.clone(),
);
let usd = LazyVecFrom2::transformed::<DollarsTransform>(
&format!("{name}_usd"),
v,
price_source,
sats_source,
);
Self {
sats,
rest: LazyDerivedValuesHeight { btc, usd },
}
}
}

View File

@@ -1,67 +1,41 @@
mod binary_last;
mod constant;
mod cum;
mod cum_rolling_full;
mod cum_rolling_sum;
mod cumulative;
mod cumulative_rolling_full;
mod cumulative_rolling_sum;
mod distribution;
mod full;
mod last;
mod lazy_binary_computed_distribution;
mod lazy_binary_computed_full;
mod lazy_binary_computed_last;
mod lazy_binary_computed_sum_cum;
mod lazy_computed_full;
mod lazy_computed_sum_cum;
mod lazy_full;
mod lazy_last;
mod lazy_sum_cum;
mod lazy_value;
mod percentiles;
mod price;
mod ratio;
mod stddev;
mod stored_value_last;
mod sum_cum;
mod value_change;
mod value_ema;
mod value_full;
mod value_last;
mod value_lazy_binary_last;
mod value_lazy_computed_cum;
mod value_lazy_computed_cumulative;
mod value_lazy_last;
mod value_lazy_sum_cum;
mod value_sum_cum;
mod value_sum_cumulative;
pub use binary_last::*;
pub use constant::*;
pub use cum::*;
pub use cum_rolling_full::*;
pub use cum_rolling_sum::*;
pub use cumulative::*;
pub use cumulative_rolling_full::*;
pub use cumulative_rolling_sum::*;
pub use distribution::*;
pub use full::*;
pub use last::*;
pub use lazy_binary_computed_distribution::*;
pub use lazy_binary_computed_full::*;
pub use lazy_binary_computed_last::*;
pub use lazy_binary_computed_sum_cum::*;
pub use lazy_computed_full::*;
pub use lazy_computed_sum_cum::*;
pub use lazy_full::*;
pub use lazy_last::*;
pub use lazy_sum_cum::*;
pub use lazy_value::*;
pub use percentiles::*;
pub use price::*;
pub use ratio::*;
pub use stddev::*;
pub use stored_value_last::*;
pub use sum_cum::*;
pub use value_change::*;
pub use value_ema::*;
pub use value_full::*;
pub use value_last::*;
pub use value_lazy_binary_last::*;
pub use value_lazy_computed_cum::*;
pub use value_lazy_computed_cumulative::*;
pub use value_lazy_last::*;
pub use value_lazy_sum_cum::*;
pub use value_sum_cum::*;
pub use value_sum_cumulative::*;

View File

@@ -1,12 +1,10 @@
use brk_error::Result;
use brk_traversable::{Traversable, TreeNode};
use brk_types::{Dollars, Height, StoredF32, Version};
use vecdb::{
AnyExportableVec, Database, ReadOnlyClone, Ro, Rw, StorageMode, WritableVec,
};
use vecdb::{AnyExportableVec, Database, ReadOnlyClone, Ro, Rw, StorageMode, WritableVec};
use crate::indexes;
use crate::internal::{ComputedFromHeightLast, Price, PriceFromHeight};
use crate::internal::{ComputedFromHeightLast, Price};
pub const PERCENTILES: [u8; 19] = [
5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75, 80, 85, 90, 95,
@@ -15,7 +13,10 @@ pub const PERCENTILES_LEN: usize = PERCENTILES.len();
/// Compute spot percentile rank by interpolating within percentile bands.
/// Returns a value between 0 and 100 indicating where spot sits in the distribution.
pub(crate) fn compute_spot_percentile_rank(percentile_prices: &[Dollars; PERCENTILES_LEN], spot: Dollars) -> StoredF32 {
pub(crate) fn compute_spot_percentile_rank(
percentile_prices: &[Dollars; PERCENTILES_LEN],
spot: Dollars,
) -> StoredF32 {
if spot.is_nan() || percentile_prices[0].is_nan() {
return StoredF32::NAN;
}
@@ -83,7 +84,8 @@ impl PercentilesVecs {
let vecs = PERCENTILES.map(|p| {
compute.then(|| {
let metric_name = format!("{prefix}_pct{p:02}");
PriceFromHeight::forced_import(db, &metric_name, version + VERSION, indexes).unwrap()
Price::forced_import(db, &metric_name, version + VERSION, indexes)
.unwrap()
})
});
@@ -98,7 +100,7 @@ impl PercentilesVecs {
) -> Result<()> {
for (i, vec) in self.vecs.iter_mut().enumerate() {
if let Some(v) = vec {
v.height.truncate_push(height, percentile_prices[i])?;
v.usd.height.truncate_push(height, percentile_prices[i])?;
}
}
Ok(())
@@ -107,7 +109,7 @@ impl PercentilesVecs {
/// Validate computed versions or reset if mismatched.
pub(crate) fn validate_computed_version_or_reset(&mut self, version: Version) -> Result<()> {
for vec in self.vecs.iter_mut().flatten() {
vec.height.validate_computed_version_or_reset(version)?;
vec.usd.height.validate_computed_version_or_reset(version)?;
}
Ok(())
}
@@ -118,7 +120,10 @@ impl ReadOnlyClone for PercentilesVecs {
fn read_only_clone(&self) -> Self::ReadOnly {
PercentilesVecs {
vecs: self.vecs.each_ref().map(|v| v.as_ref().map(|p| p.read_only_clone())),
vecs: self
.vecs
.each_ref()
.map(|v| v.as_ref().map(|p| p.read_only_clone())),
}
}
}

View File

@@ -5,35 +5,25 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Cents, Dollars, SatsFract, Version};
use derive_more::{Deref, DerefMut};
use brk_types::{Dollars, SatsFract, Version};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, Database, ReadableCloneableVec, UnaryTransform};
use vecdb::{Database, ReadableCloneableVec, UnaryTransform};
use super::{ComputedFromHeightLast, LazyBinaryFromHeightLast, LazyFromHeightLast};
use super::{ComputedFromHeightLast, LazyFromHeightLast};
use crate::{
indexes,
internal::{ComputedVecValue, DollarsToSatsFract, NumericValue},
};
/// Generic price metric with both USD and sats representations.
///
/// Derefs to the usd metric, so existing code works unchanged.
/// Access `.sats` for the sats exchange rate version.
#[derive(Clone, Deref, DerefMut, Traversable)]
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct Price<U> {
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub usd: U,
pub sats: LazyFromHeightLast<SatsFract, Dollars>,
}
// --- PriceFromHeight ---
pub type PriceFromHeight = Price<ComputedFromHeightLast<Dollars>>;
impl Price<ComputedFromHeightLast<Dollars>> {
pub(crate) fn forced_import(
db: &Database,
@@ -42,28 +32,16 @@ impl Price<ComputedFromHeightLast<Dollars>> {
indexes: &indexes::Vecs,
) -> Result<Self> {
let usd = ComputedFromHeightLast::forced_import(db, name, version, indexes)?;
Ok(Self::from_computed(name, version, usd))
}
pub(crate) fn from_computed(
name: &str,
version: Version,
usd: ComputedFromHeightLast<Dollars>,
) -> Self {
let sats = LazyFromHeightLast::from_computed::<DollarsToSatsFract>(
&format!("{name}_sats"),
version,
usd.height.read_only_boxed_clone(),
&usd,
);
Self { usd, sats }
Ok(Self { usd, sats })
}
}
// --- LazyPriceFromHeight ---
pub type LazyPriceFromHeight<ST> = Price<LazyFromHeightLast<Dollars, ST>>;
impl<ST> Price<LazyFromHeightLast<Dollars, ST>>
where
ST: ComputedVecValue + NumericValue + JsonSchema + 'static,
@@ -88,106 +66,3 @@ where
}
}
// --- LazyPriceFromCents ---
pub type LazyPriceFromCents = Price<LazyFromHeightLast<Dollars, Cents>>;
// --- LazyBinaryPriceFromHeight ---
pub type LazyBinaryPriceFromHeight = Price<LazyBinaryFromHeightLast<Dollars, Dollars, Dollars>>;
impl Price<LazyBinaryFromHeightLast<Dollars, Dollars, Dollars>> {
/// Create from a PriceFromHeight (source1) and a LazyPriceFromCents (source2).
pub(crate) fn from_price_and_lazy_price<F: BinaryTransform<Dollars, Dollars, Dollars>>(
name: &str,
version: Version,
source1: &PriceFromHeight,
source2: &LazyPriceFromCents,
) -> Self {
let usd = LazyBinaryFromHeightLast::from_block_last_and_lazy_block_last::<F, Cents>(
name,
version,
&source1.usd,
&source2.usd,
);
let sats = LazyFromHeightLast::from_binary::<DollarsToSatsFract, _, _>(
&format!("{name}_sats"),
version,
&usd,
);
Self { usd, sats }
}
/// Create from a LazyPriceFromCents (source1) and a PriceFromHeight (source2).
pub(crate) fn from_lazy_price_and_price<F: BinaryTransform<Dollars, Dollars, Dollars>>(
name: &str,
version: Version,
source1: &LazyPriceFromCents,
source2: &PriceFromHeight,
) -> Self {
let usd = LazyBinaryFromHeightLast::from_lazy_block_last_and_block_last::<F, Cents>(
name,
version,
&source1.usd,
&source2.usd,
);
let sats = LazyFromHeightLast::from_binary::<DollarsToSatsFract, _, _>(
&format!("{name}_sats"),
version,
&usd,
);
Self { usd, sats }
}
}
// --- Price bands (for stddev/ratio) ---
impl<S2T> Price<LazyBinaryFromHeightLast<Dollars, Dollars, S2T>>
where
S2T: ComputedVecValue + NumericValue + JsonSchema,
{
/// Create a price band from a computed price and a computed band.
pub(crate) fn from_computed_price_and_band<F: BinaryTransform<Dollars, S2T, Dollars>>(
name: &str,
version: Version,
price: &ComputedFromHeightLast<Dollars>,
band: &ComputedFromHeightLast<S2T>,
) -> Self {
let usd = LazyBinaryFromHeightLast::from_computed_last::<F>(name, version, price, band);
let sats = LazyFromHeightLast::from_binary::<DollarsToSatsFract, _, _>(
&format!("{name}_sats"),
version,
&usd,
);
Self { usd, sats }
}
/// Create a price band from a lazy price and a computed band.
pub(crate) fn from_lazy_price_and_band<F: BinaryTransform<Dollars, S2T, Dollars>, S1T>(
name: &str,
version: Version,
price: &LazyFromHeightLast<Dollars, S1T>,
band: &ComputedFromHeightLast<S2T>,
) -> Self
where
S1T: ComputedVecValue + JsonSchema,
{
let usd = LazyBinaryFromHeightLast::from_lazy_block_last_and_block_last::<F, S1T>(
name, version, price, band,
);
let sats = LazyFromHeightLast::from_binary::<DollarsToSatsFract, _, _>(
&format!("{name}_sats"),
version,
&usd,
);
Self { usd, sats }
}
}

View File

@@ -1,23 +1,19 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Dollars, Height, StoredF32, Version};
use schemars::JsonSchema;
use vecdb::{
AnyStoredVec, AnyVec, Database, EagerVec, Exit, WritableVec, ReadableVec,
PcoVec, Rw, StorageMode, VecIndex,
AnyStoredVec, AnyVec, Database, EagerVec, Exit, PcoVec, ReadableVec, Rw, StorageMode, VecIndex,
WritableVec,
};
use crate::{
blocks, indexes, ComputeIndexes,
internal::{
ComputedFromHeightStdDev, ComputedVecValue, LazyBinaryFromHeightLast,
LazyFromHeightLast, Price, PriceTimesRatio, StandardDeviationVecsOptions,
},
ComputeIndexes, blocks, indexes,
internal::{ComputedFromHeightStdDev, Price, StandardDeviationVecsOptions},
prices,
utils::get_percentile,
};
use super::{ComputedFromHeightLast, PriceFromHeight};
use super::ComputedFromHeightLast;
#[derive(Traversable)]
pub struct ComputedFromHeightRatio<M: StorageMode = Rw> {
@@ -32,12 +28,12 @@ pub struct ComputedFromHeightRatio<M: StorageMode = Rw> {
pub ratio_pct5: Option<ComputedFromHeightLast<StoredF32, M>>,
pub ratio_pct2: Option<ComputedFromHeightLast<StoredF32, M>>,
pub ratio_pct1: Option<ComputedFromHeightLast<StoredF32, M>>,
pub ratio_pct99_usd: Option<Price<LazyBinaryFromHeightLast<Dollars, Dollars, StoredF32>>>,
pub ratio_pct98_usd: Option<Price<LazyBinaryFromHeightLast<Dollars, Dollars, StoredF32>>>,
pub ratio_pct95_usd: Option<Price<LazyBinaryFromHeightLast<Dollars, Dollars, StoredF32>>>,
pub ratio_pct5_usd: Option<Price<LazyBinaryFromHeightLast<Dollars, Dollars, StoredF32>>>,
pub ratio_pct2_usd: Option<Price<LazyBinaryFromHeightLast<Dollars, Dollars, StoredF32>>>,
pub ratio_pct1_usd: Option<Price<LazyBinaryFromHeightLast<Dollars, Dollars, StoredF32>>>,
pub ratio_pct99_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub ratio_pct98_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub ratio_pct95_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub ratio_pct5_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub ratio_pct2_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub ratio_pct1_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub ratio_sd: Option<ComputedFromHeightStdDev<M>>,
pub ratio_4y_sd: Option<ComputedFromHeightStdDev<M>>,
@@ -74,10 +70,7 @@ impl ComputedFromHeightRatio {
// Only compute price internally when metric_price is None
let price = metric_price
.is_none()
.then(|| PriceFromHeight::forced_import(db, name, v, indexes).unwrap());
// Use provided metric_price, falling back to internally computed price
let effective_price = metric_price.or(price.as_ref().map(|p| &p.usd));
.then(|| Price::forced_import(db, name, v, indexes).unwrap());
macro_rules! import_sd {
($suffix:expr, $days:expr) => {
@@ -88,7 +81,6 @@ impl ComputedFromHeightRatio {
v,
indexes,
StandardDeviationVecsOptions::default().add_all(),
effective_price,
)
.unwrap()
};
@@ -103,14 +95,19 @@ impl ComputedFromHeightRatio {
macro_rules! lazy_usd {
($ratio:expr, $suffix:expr) => {
effective_price.zip($ratio.as_ref()).map(|(mp, r)| {
Price::from_computed_price_and_band::<PriceTimesRatio>(
&format!("{name}_{}", $suffix),
v,
mp,
r,
)
})
if !extended {
None
} else {
$ratio.as_ref().map(|_| {
Price::forced_import(
db,
&format!("{name}_{}", $suffix),
v,
indexes,
)
.unwrap()
})
}
};
}
@@ -138,10 +135,9 @@ impl ComputedFromHeightRatio {
})
}
pub(crate) fn forced_import_from_lazy<S1T: ComputedVecValue + JsonSchema>(
pub(crate) fn forced_import_from_lazy(
db: &Database,
name: &str,
metric_price: &LazyFromHeightLast<Dollars, S1T>,
version: Version,
indexes: &indexes::Vecs,
extended: bool,
@@ -169,7 +165,6 @@ impl ComputedFromHeightRatio {
v,
indexes,
StandardDeviationVecsOptions::default().add_all(),
Some(metric_price),
)
.unwrap()
};
@@ -184,13 +179,9 @@ impl ComputedFromHeightRatio {
macro_rules! lazy_usd {
($ratio:expr, $suffix:expr) => {
$ratio.as_ref().map(|r| {
Price::from_lazy_price_and_band::<PriceTimesRatio, S1T>(
&format!("{name}_{}", $suffix),
v,
metric_price,
r,
)
$ratio.as_ref().map(|_| {
Price::forced_import(db, &format!("{name}_{}", $suffix), v, indexes)
.unwrap()
})
};
}
@@ -398,6 +389,51 @@ impl ComputedFromHeightRatio {
Ok(())
}
/// Compute USD ratio bands: usd_band = metric_price * ratio_percentile
pub(crate) fn compute_usd_bands(
&mut self,
starting_indexes: &ComputeIndexes,
metric_price: &impl ReadableVec<Height, Dollars>,
exit: &Exit,
) -> Result<()> {
use crate::internal::PriceTimesRatio;
macro_rules! compute_band {
($usd_field:ident, $band_field:ident) => {
if let Some(usd) = self.$usd_field.as_mut() {
if let Some(band) = self.$band_field.as_ref() {
usd.usd
.compute_binary::<Dollars, StoredF32, PriceTimesRatio>(
starting_indexes.height,
metric_price,
&band.height,
exit,
)?;
}
}
};
}
compute_band!(ratio_pct99_usd, ratio_pct99);
compute_band!(ratio_pct98_usd, ratio_pct98);
compute_band!(ratio_pct95_usd, ratio_pct95);
compute_band!(ratio_pct5_usd, ratio_pct5);
compute_band!(ratio_pct2_usd, ratio_pct2);
compute_band!(ratio_pct1_usd, ratio_pct1);
// Stddev USD bands
macro_rules! compute_sd_usd {
($($field:ident),*) => {
$(if let Some(sd) = self.$field.as_mut() {
sd.compute_usd_bands(starting_indexes, metric_price, exit)?;
})*
};
}
compute_sd_usd!(ratio_sd, ratio_4y_sd, ratio_2y_sd, ratio_1y_sd);
Ok(())
}
fn mut_ratio_vecs(&mut self) -> Vec<&mut EagerVec<PcoVec<Height, StoredF32>>> {
macro_rules! collect_vecs {
($($field:ident),*) => {{

View File

@@ -3,18 +3,14 @@ use std::mem;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Dollars, Height, StoredF32, Version};
use schemars::JsonSchema;
use vecdb::{
AnyStoredVec, AnyVec, Database, EagerVec, Exit, WritableVec, ReadableVec,
PcoVec, Rw, StorageMode, VecIndex,
AnyStoredVec, AnyVec, Database, EagerVec, Exit, PcoVec, ReadableVec, Rw, StorageMode, VecIndex,
WritableVec,
};
use crate::{blocks, indexes, ComputeIndexes};
use crate::{ComputeIndexes, blocks, indexes};
use crate::internal::{
ComputedFromHeightLast, ComputedVecValue, LazyBinaryFromHeightLast, LazyFromHeightLast,
Price, PriceTimesRatio,
};
use crate::internal::{ComputedFromHeightLast, Price};
#[derive(Default)]
pub struct StandardDeviationVecsOptions {
@@ -67,19 +63,19 @@ pub struct ComputedFromHeightStdDev<M: StorageMode = Rw> {
pub m2_5sd: Option<ComputedFromHeightLast<StoredF32, M>>,
pub m3sd: Option<ComputedFromHeightLast<StoredF32, M>>,
pub _0sd_usd: Option<Price<LazyBinaryFromHeightLast<Dollars, Dollars, StoredF32>>>,
pub p0_5sd_usd: Option<Price<LazyBinaryFromHeightLast<Dollars, Dollars, StoredF32>>>,
pub p1sd_usd: Option<Price<LazyBinaryFromHeightLast<Dollars, Dollars, StoredF32>>>,
pub p1_5sd_usd: Option<Price<LazyBinaryFromHeightLast<Dollars, Dollars, StoredF32>>>,
pub p2sd_usd: Option<Price<LazyBinaryFromHeightLast<Dollars, Dollars, StoredF32>>>,
pub p2_5sd_usd: Option<Price<LazyBinaryFromHeightLast<Dollars, Dollars, StoredF32>>>,
pub p3sd_usd: Option<Price<LazyBinaryFromHeightLast<Dollars, Dollars, StoredF32>>>,
pub m0_5sd_usd: Option<Price<LazyBinaryFromHeightLast<Dollars, Dollars, StoredF32>>>,
pub m1sd_usd: Option<Price<LazyBinaryFromHeightLast<Dollars, Dollars, StoredF32>>>,
pub m1_5sd_usd: Option<Price<LazyBinaryFromHeightLast<Dollars, Dollars, StoredF32>>>,
pub m2sd_usd: Option<Price<LazyBinaryFromHeightLast<Dollars, Dollars, StoredF32>>>,
pub m2_5sd_usd: Option<Price<LazyBinaryFromHeightLast<Dollars, Dollars, StoredF32>>>,
pub m3sd_usd: Option<Price<LazyBinaryFromHeightLast<Dollars, Dollars, StoredF32>>>,
pub _0sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub p0_5sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub p1sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub p1_5sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub p2sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub p2_5sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub p3sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub m0_5sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub m1sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub m1_5sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub m2sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub m2_5sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub m3sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
}
impl ComputedFromHeightStdDev {
@@ -91,7 +87,6 @@ impl ComputedFromHeightStdDev {
parent_version: Version,
indexes: &indexes::Vecs,
options: StandardDeviationVecsOptions,
metric_price: Option<&ComputedFromHeightLast<Dollars>>,
) -> Result<Self> {
let version = parent_version + Version::TWO;
@@ -121,23 +116,21 @@ impl ComputedFromHeightStdDev {
let m2_5sd = options.bands().then(|| import!("m2_5sd"));
let m3sd = options.bands().then(|| import!("m3sd"));
// Create USD bands using the metric price (the denominator of the ratio).
// This converts ratio bands back to USD: usd_band = metric_price * ratio_band
// Import USD price band vecs (computed eagerly at compute time)
macro_rules! lazy_usd {
($band:expr, $suffix:expr) => {
if !options.price_bands() {
None
} else if let Some(mp) = metric_price {
$band.as_ref().map(|b| {
Price::from_computed_price_and_band::<PriceTimesRatio>(
} else {
$band.as_ref().map(|_| {
Price::forced_import(
db,
&format!("{name}_{}", $suffix),
version,
mp,
b,
indexes,
)
.unwrap()
})
} else {
None
}
};
}
@@ -177,15 +170,13 @@ impl ComputedFromHeightStdDev {
})
}
#[allow(clippy::too_many_arguments)]
pub(crate) fn forced_import_from_lazy<S1T: ComputedVecValue + JsonSchema>(
pub(crate) fn forced_import_from_lazy(
db: &Database,
name: &str,
days: usize,
parent_version: Version,
indexes: &indexes::Vecs,
options: StandardDeviationVecsOptions,
metric_price: Option<&LazyFromHeightLast<Dollars, S1T>>,
) -> Result<Self> {
let version = parent_version + Version::TWO;
@@ -216,21 +207,21 @@ impl ComputedFromHeightStdDev {
let m3sd = options.bands().then(|| import!("m3sd"));
// For lazy metric price, use from_lazy_block_last_and_block_last.
// PriceTimesRatio: BinaryTransform<Dollars, StoredF32, Dollars>
// source1 = metric_price (Dollars, lazy), source2 = band (StoredF32, computed)
macro_rules! lazy_usd {
($band:expr, $suffix:expr) => {
metric_price
.zip($band.as_ref())
.filter(|_| options.price_bands())
.map(|(mp, b)| {
Price::from_lazy_price_and_band::<PriceTimesRatio, S1T>(
if !options.price_bands() {
None
} else {
$band.as_ref().map(|_| {
Price::forced_import(
db,
&format!("{name}_{}", $suffix),
version,
mp,
b,
indexes,
)
.unwrap()
})
}
};
}
@@ -277,29 +268,21 @@ impl ComputedFromHeightStdDev {
// 1. Compute SMA using the appropriate lookback vec (or full-history SMA)
if self.days != usize::MAX {
let window_starts = blocks.count.start_vec(self.days);
self.sma
.as_mut()
.unwrap()
.height
.compute_rolling_average(
starting_indexes.height,
window_starts,
source,
exit,
)?;
self.sma.as_mut().unwrap().height.compute_rolling_average(
starting_indexes.height,
window_starts,
source,
exit,
)?;
} else {
// Full history SMA (days == usize::MAX)
self.sma
.as_mut()
.unwrap()
.height
.compute_sma_(
starting_indexes.height,
source,
self.days,
exit,
None,
)?;
self.sma.as_mut().unwrap().height.compute_sma_(
starting_indexes.height,
source,
self.days,
exit,
None,
)?;
}
let sma_opt: Option<&EagerVec<PcoVec<Height, StoredF32>>> = None;
@@ -407,7 +390,8 @@ impl ComputedFromHeightStdDev {
// This is the population SD of all daily values relative to the current SMA
let sd = if n > 0 {
let nf = n as f64;
let variance = welford_sum_sq / nf - 2.0 * avg_f64 * welford_sum / nf + avg_f64 * avg_f64;
let variance =
welford_sum_sq / nf - 2.0 * avg_f64 * welford_sum / nf + avg_f64 * avg_f64;
StoredF32::from(variance.max(0.0).sqrt() as f32)
} else {
StoredF32::from(0.0_f32)
@@ -471,6 +455,48 @@ impl ComputedFromHeightStdDev {
Ok(())
}
/// Compute USD price bands: usd_band = metric_price * band_ratio
pub(crate) fn compute_usd_bands(
&mut self,
starting_indexes: &ComputeIndexes,
metric_price: &impl ReadableVec<Height, Dollars>,
exit: &Exit,
) -> Result<()> {
use crate::internal::PriceTimesRatio;
macro_rules! compute_band {
($usd_field:ident, $band_field:ident) => {
if let Some(usd) = self.$usd_field.as_mut() {
if let Some(band) = self.$band_field.as_ref() {
usd.usd
.compute_binary::<Dollars, StoredF32, PriceTimesRatio>(
starting_indexes.height,
metric_price,
&band.height,
exit,
)?;
}
}
};
}
compute_band!(_0sd_usd, sma);
compute_band!(p0_5sd_usd, p0_5sd);
compute_band!(p1sd_usd, p1sd);
compute_band!(p1_5sd_usd, p1_5sd);
compute_band!(p2sd_usd, p2sd);
compute_band!(p2_5sd_usd, p2_5sd);
compute_band!(p3sd_usd, p3sd);
compute_band!(m0_5sd_usd, m0_5sd);
compute_band!(m1sd_usd, m1sd);
compute_band!(m1_5sd_usd, m1_5sd);
compute_band!(m2sd_usd, m2sd);
compute_band!(m2_5sd_usd, m2_5sd);
compute_band!(m3sd_usd, m3sd);
Ok(())
}
fn mut_stateful_computed(
&mut self,
) -> impl Iterator<Item = &mut ComputedFromHeightLast<StoredF32>> {

View File

@@ -1,71 +0,0 @@
//! ComputedFromHeight using SumCum aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{
Database, EagerVec, Exit, ImportableVec, PcoVec, ReadableCloneableVec, Rw, StorageMode,
};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedHeightDerivedSumCum, ComputedVecValue, NumericValue};
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedFromHeightSumCum<T, M: StorageMode = Rw>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
#[traversable(rename = "sum")]
pub height: M::Stored<EagerVec<PcoVec<Height, T>>>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: Box<ComputedHeightDerivedSumCum<T, M>>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedFromHeightSumCum<T>
where
T: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let rest =
ComputedHeightDerivedSumCum::forced_import(db, name, height.read_only_boxed_clone(), v, indexes)?;
Ok(Self { height, rest: Box::new(rest) })
}
/// Compute height_cumulative from self.height.
pub(crate) fn compute_cumulative(
&mut self,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.rest.derive_from(starting_indexes, &self.height, exit)
}
pub(crate) fn compute(
&mut self,
starting_indexes: &ComputeIndexes,
exit: &Exit,
mut compute: impl FnMut(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
) -> Result<()> {
compute(&mut self.height)?;
self.compute_cumulative(starting_indexes, exit)
}
}

View File

@@ -1,31 +1,27 @@
//! Value type for Full pattern from Height.
//!
//! Height-level USD stats are lazy: `sats * price`.
//! Cumulative and day1 stats are stored since they require aggregation
//! across heights with varying prices.
//! Height-level USD stats are stored (eagerly computed from sats × price).
//! Uses CumFull: stored base + cumulative + rolling windows.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Dollars, Height, Sats, Version};
use vecdb::{Database, EagerVec, Exit, ReadableCloneableVec, PcoVec, Rw, StorageMode};
use vecdb::{Database, EagerVec, Exit, PcoVec, ReadableCloneableVec, Rw, StorageMode};
use crate::{
ComputeIndexes, indexes,
internal::{
ComputedFromHeightFull, LazyBinaryComputedFromHeightFull, LazyFromHeightFull,
SatsTimesPrice, SatsToBitcoin,
},
indexes,
internal::{ComputedFromHeightCumulativeFull, LazyFromHeightLast, SatsToBitcoin, WindowStarts},
prices,
};
#[derive(Traversable)]
pub struct ValueFromHeightFull<M: StorageMode = Rw> {
pub sats: ComputedFromHeightFull<Sats, M>,
pub btc: LazyFromHeightFull<Bitcoin, Sats>,
pub usd: LazyBinaryComputedFromHeightFull<Dollars, Sats, Dollars, M>,
pub sats: ComputedFromHeightCumulativeFull<Sats, M>,
pub btc: LazyFromHeightLast<Bitcoin, Sats>,
pub usd: ComputedFromHeightCumulativeFull<Dollars, M>,
}
const VERSION: Version = Version::ONE; // Bumped for lazy height dollars
const VERSION: Version = Version::TWO; // Bumped for stored height dollars
impl ValueFromHeightFull {
pub(crate) fn forced_import(
@@ -33,44 +29,45 @@ impl ValueFromHeightFull {
name: &str,
version: Version,
indexes: &indexes::Vecs,
prices: &prices::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let sats = ComputedFromHeightFull::forced_import(db, name, v, indexes)?;
let sats = ComputedFromHeightCumulativeFull::forced_import(db, name, v, indexes)?;
let btc = LazyFromHeightFull::from_computed::<SatsToBitcoin>(
let btc = LazyFromHeightLast::from_height_source::<SatsToBitcoin>(
&format!("{name}_btc"),
v,
sats.height.read_only_boxed_clone(),
&sats,
indexes,
);
let usd = LazyBinaryComputedFromHeightFull::forced_import::<SatsTimesPrice>(
db,
&format!("{name}_usd"),
v,
sats.height.read_only_boxed_clone(),
prices.usd.price.read_only_boxed_clone(),
indexes,
)?;
let usd =
ComputedFromHeightCumulativeFull::forced_import(db, &format!("{name}_usd"), v, indexes)?;
Ok(Self {
sats,
btc,
usd,
})
Ok(Self { sats, btc, usd })
}
pub(crate) fn compute(
&mut self,
starting_indexes: &ComputeIndexes,
max_from: Height,
windows: &WindowStarts<'_>,
prices: &prices::Vecs,
exit: &Exit,
mut compute: impl FnMut(&mut EagerVec<PcoVec<Height, Sats>>) -> Result<()>,
compute_sats: impl FnOnce(&mut EagerVec<PcoVec<Height, Sats>>) -> Result<()>,
) -> Result<()> {
compute(&mut self.sats.height)?;
self.sats.rest.compute_cumulative(starting_indexes, &self.sats.height, exit)?;
self.usd.compute_cumulative(starting_indexes, exit)?;
Ok(())
self.sats.compute(max_from, windows, exit, compute_sats)?;
self.usd.compute(max_from, windows, exit, |vec| {
Ok(vec.compute_transform2(
max_from,
&self.sats.height,
&prices.usd.price,
|(h, sats, price, ..)| {
let btc = *sats as f64 / 100_000_000.0;
(h, Dollars::from(*price * btc))
},
exit,
)?)
})
}
}

View File

@@ -1,30 +1,26 @@
//! Value type for Last pattern from Height.
//!
//! Height-level USD value is lazy: `sats * price`.
//! Height-level USD value is stored (eagerly computed from sats × price).
//! Day1 last is stored since it requires finding the last value within each date.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Dollars, Sats, Version};
use vecdb::{Database, ReadableCloneableVec, Rw, StorageMode};
use brk_types::{Bitcoin, Dollars, Height, Sats, Version};
use vecdb::{Database, Exit, ReadableCloneableVec, Rw, StorageMode};
use crate::{
indexes,
internal::{
ComputedFromHeightLast, LazyBinaryComputedFromHeightLast, LazyFromHeightLast,
SatsTimesPrice, SatsToBitcoin,
},
prices,
indexes, prices,
internal::{ComputedFromHeightLast, LazyFromHeightLast, SatsToBitcoin},
};
#[derive(Traversable)]
pub struct ValueFromHeightLast<M: StorageMode = Rw> {
pub sats: ComputedFromHeightLast<Sats, M>,
pub btc: LazyFromHeightLast<Bitcoin, Sats>,
pub usd: LazyBinaryComputedFromHeightLast<Dollars, Sats, Dollars>,
pub usd: ComputedFromHeightLast<Dollars, M>,
}
const VERSION: Version = Version::ONE; // Bumped for lazy height dollars
const VERSION: Version = Version::TWO; // Bumped for stored height dollars
impl ValueFromHeightLast {
pub(crate) fn forced_import(
@@ -32,7 +28,6 @@ impl ValueFromHeightLast {
name: &str,
version: Version,
indexes: &indexes::Vecs,
prices: &prices::Vecs,
) -> Result<Self> {
let v = version + VERSION;
@@ -45,13 +40,7 @@ impl ValueFromHeightLast {
&sats,
);
let usd = LazyBinaryComputedFromHeightLast::forced_import::<SatsTimesPrice>(
&format!("{name}_usd"),
v,
sats.height.read_only_boxed_clone(),
prices.usd.price.read_only_boxed_clone(),
indexes,
);
let usd = ComputedFromHeightLast::forced_import(db, &format!("{name}_usd"), v, indexes)?;
Ok(Self {
sats,
@@ -59,4 +48,24 @@ impl ValueFromHeightLast {
usd,
})
}
/// Eagerly compute USD height values: sats[h] * price[h].
pub(crate) fn compute(
&mut self,
prices: &prices::Vecs,
max_from: Height,
exit: &Exit,
) -> Result<()> {
self.usd.height.compute_transform2(
max_from,
&self.sats.height,
&prices.usd.price,
|(h, sats, price, ..)| {
let btc = *sats as f64 / 100_000_000.0;
(h, Dollars::from(*price * btc))
},
exit,
)?;
Ok(())
}
}

View File

@@ -1,67 +0,0 @@
//! Lazy binary value wrapper combining height (with price) + all derived last transforms.
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Dollars, Sats, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{BinaryTransform, ReadableCloneableVec, UnaryTransform};
use super::LazyFromHeightValue;
use crate::internal::{LazyValueHeightDerivedLast, ValueFromHeightLast};
use crate::prices;
const VERSION: Version = Version::ZERO;
/// Lazy binary value wrapper with height (using price binary transform) + all derived last transforms.
///
/// Use this when the height-level dollars need a binary transform (e.g., price * sats)
/// rather than a unary transform from existing dollars.
///
/// All coarser-than-height periods (minute1 through difficultyepoch) use unary transforms
/// on the pre-computed values from the source.
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyBinaryValueFromHeightLast {
#[traversable(flatten)]
pub height: LazyFromHeightValue,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: Box<LazyValueHeightDerivedLast>,
}
impl LazyBinaryValueFromHeightLast {
pub(crate) fn from_block_source<
SatsTransform,
BitcoinTransform,
HeightDollarsTransform,
DateDollarsTransform,
>(
name: &str,
source: &ValueFromHeightLast,
prices: &prices::Vecs,
version: Version,
) -> Self
where
SatsTransform: UnaryTransform<Sats, Sats>,
BitcoinTransform: UnaryTransform<Sats, Bitcoin>,
HeightDollarsTransform: BinaryTransform<Dollars, Sats, Dollars>,
DateDollarsTransform: UnaryTransform<Dollars, Dollars>,
{
let v = version + VERSION;
let price_source = prices.usd.price.read_only_boxed_clone();
let height = LazyFromHeightValue::from_sources::<
SatsTransform,
BitcoinTransform,
HeightDollarsTransform,
>(name, source.sats.height.read_only_boxed_clone(), price_source, v);
let rest =
LazyValueHeightDerivedLast::from_block_source::<SatsTransform, BitcoinTransform, DateDollarsTransform>(
name, source, v,
);
Self { height, rest: Box::new(rest) }
}
}

View File

@@ -1,71 +0,0 @@
//! Value type with stored sats height + cumulative, lazy btc + lazy dollars.
//!
//! Like LazyComputedValueFromHeightSumCum but with Cum (no old period aggregations).
//! - Sats: stored height + cumulative (ComputedFromHeightCum)
//! - BTC: lazy transform from sats (LazyFromHeightLast)
//! - USD: lazy binary (price × sats), LazyLast per index (no stored cumulative)
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Dollars, Height, Sats, Version};
use vecdb::{Database, Exit, ReadableCloneableVec, Rw, StorageMode};
use crate::{
indexes,
internal::{
ComputedFromHeightCum, LazyBinaryComputedFromHeightLast, LazyFromHeightLast,
PriceTimesSats, SatsToBitcoin,
},
prices,
};
/// Value wrapper with stored sats height + cumulative, lazy btc + lazy usd.
#[derive(Traversable)]
pub struct LazyComputedValueFromHeightCum<M: StorageMode = Rw> {
pub sats: ComputedFromHeightCum<Sats, M>,
pub btc: LazyFromHeightLast<Bitcoin, Sats>,
pub usd: LazyBinaryComputedFromHeightLast<Dollars, Dollars, Sats>,
}
const VERSION: Version = Version::ZERO;
impl LazyComputedValueFromHeightCum {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
prices: &prices::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let sats = ComputedFromHeightCum::forced_import(db, name, v, indexes)?;
let btc = LazyFromHeightLast::from_computed::<SatsToBitcoin>(
&format!("{name}_btc"),
v,
sats.height.read_only_boxed_clone(),
&sats,
);
let usd = LazyBinaryComputedFromHeightLast::forced_import::<PriceTimesSats>(
&format!("{name}_usd"),
v,
prices.usd.price.read_only_boxed_clone(),
sats.height.read_only_boxed_clone(),
indexes,
);
Ok(Self { sats, btc, usd })
}
/// Compute cumulative from already-filled sats height vec.
pub(crate) fn compute_cumulative(
&mut self,
max_from: Height,
exit: &Exit,
) -> Result<()> {
self.sats.compute_cumulative(max_from, exit)?;
Ok(())
}
}

View File

@@ -0,0 +1,72 @@
//! Value type with stored sats height + cumulative, stored usd, lazy btc.
//!
//! - Sats: stored height + cumulative (ComputedFromHeightCumulative)
//! - BTC: lazy transform from sats (LazyFromHeightLast)
//! - USD: stored (eagerly computed from price × sats)
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Dollars, Height, Sats, Version};
use vecdb::{Database, Exit, ReadableCloneableVec, Rw, StorageMode};
use crate::{
indexes,
internal::{ComputedFromHeightCumulative, ComputedFromHeightLast, LazyFromHeightLast, SatsToBitcoin},
prices,
};
/// Value wrapper with stored sats height + cumulative, lazy btc + stored usd.
#[derive(Traversable)]
pub struct LazyComputedValueFromHeightCumulative<M: StorageMode = Rw> {
pub sats: ComputedFromHeightCumulative<Sats, M>,
pub btc: LazyFromHeightLast<Bitcoin, Sats>,
pub usd: ComputedFromHeightLast<Dollars, M>,
}
const VERSION: Version = Version::ONE; // Bumped for stored height dollars
impl LazyComputedValueFromHeightCumulative {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let sats = ComputedFromHeightCumulative::forced_import(db, name, v, indexes)?;
let btc = LazyFromHeightLast::from_height_source::<SatsToBitcoin>(
&format!("{name}_btc"),
v,
sats.height.read_only_boxed_clone(),
indexes,
);
let usd = ComputedFromHeightLast::forced_import(db, &format!("{name}_usd"), v, indexes)?;
Ok(Self { sats, btc, usd })
}
/// Compute cumulative + USD from already-filled sats height vec.
pub(crate) fn compute(
&mut self,
prices: &prices::Vecs,
max_from: Height,
exit: &Exit,
) -> Result<()> {
self.sats.compute_rest(max_from, exit)?;
self.usd.height.compute_transform2(
max_from,
&prices.usd.price,
&self.sats.height,
|(h, price, sats, ..)| {
let btc = *sats as f64 / 100_000_000.0;
(h, Dollars::from(*price * btc))
},
exit,
)?;
Ok(())
}
}

View File

@@ -1,13 +1,11 @@
//! Lazy value wrapper for ValueFromHeightLast - all transforms are lazy.
use brk_traversable::Traversable;
use brk_types::{Dollars, Sats, Version};
use brk_types::{Bitcoin, Dollars, Sats, Version};
use derive_more::{Deref, DerefMut};
use vecdb::UnaryTransform;
use crate::internal::{
LazyValueHeight, LazyValueHeightDerivedLast, SatsToBitcoin, ValueFromHeightLast,
};
use crate::internal::{LazyValueHeight, LazyValueHeightDerivedLast, ValueFromHeightLast};
const VERSION: Version = Version::ZERO;
@@ -24,22 +22,23 @@ pub struct LazyValueFromHeightLast {
}
impl LazyValueFromHeightLast {
pub(crate) fn from_block_source<SatsTransform, DollarsTransform>(
pub(crate) fn from_block_source<SatsTransform, BitcoinTransform, DollarsTransform>(
name: &str,
source: &ValueFromHeightLast,
version: Version,
) -> Self
where
SatsTransform: UnaryTransform<Sats, Sats>,
BitcoinTransform: UnaryTransform<Sats, Bitcoin>,
DollarsTransform: UnaryTransform<Dollars, Dollars>,
{
let v = version + VERSION;
let height =
LazyValueHeight::from_block_source::<SatsTransform, DollarsTransform>(name, source, v);
LazyValueHeight::from_block_source::<SatsTransform, BitcoinTransform, DollarsTransform>(name, source, v);
let rest =
LazyValueHeightDerivedLast::from_block_source::<SatsTransform, SatsToBitcoin, DollarsTransform>(
LazyValueHeightDerivedLast::from_block_source::<SatsTransform, BitcoinTransform, DollarsTransform>(
name, source, v,
);

View File

@@ -1,101 +0,0 @@
//! Value type with lazy binary height + stored derived SumCum.
//!
//! Use this when the height-level sats is a lazy binary transform (e.g., mask × source).
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Dollars, Height, Sats, Version};
use schemars::JsonSchema;
use vecdb::{
BinaryTransform, Database, Exit, ReadableBoxedVec, ReadableCloneableVec, LazyVecFrom2, Rw,
StorageMode,
};
use crate::{
ComputeIndexes, indexes,
internal::{
ComputedVecValue, LazyComputedFromHeightSumCum, LazyFromHeightSumCum, PriceTimesSats,
SatsToBitcoin,
},
prices,
};
/// Value wrapper with lazy binary height + stored derived SumCum.
///
/// Sats height is a lazy binary transform (e.g., mask × source).
/// Dollars height is also lazy (price × sats).
/// Cumulative and day1 are stored.
#[derive(Traversable)]
pub struct LazyValueFromHeightSumCum<S1T, S2T, M: StorageMode = Rw>
where
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
pub sats: LazyComputedFromHeightSumCum<Sats, S1T, S2T, M>,
pub btc: LazyFromHeightSumCum<Bitcoin, Sats>,
pub usd: LazyComputedFromHeightSumCum<Dollars, Dollars, Sats, M>,
}
const VERSION: Version = Version::ZERO;
impl<S1T, S2T> LazyValueFromHeightSumCum<S1T, S2T>
where
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
pub(crate) fn forced_import<F>(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
source1: ReadableBoxedVec<Height, S1T>,
source2: ReadableBoxedVec<Height, S2T>,
prices: &prices::Vecs,
) -> Result<Self>
where
F: BinaryTransform<S1T, S2T, Sats>,
{
let v = version + VERSION;
let sats_height = LazyVecFrom2::transformed::<F>(name, v, source1, source2);
let sats = LazyComputedFromHeightSumCum::forced_import(db, name, v, indexes, sats_height)?;
let btc = LazyFromHeightSumCum::from_derived::<SatsToBitcoin>(
&format!("{name}_btc"),
v,
sats.height.read_only_boxed_clone(),
&sats.rest,
);
let usd_height = LazyVecFrom2::transformed::<PriceTimesSats>(
&format!("{name}_usd"),
v,
prices.usd.price.read_only_boxed_clone(),
sats.height.read_only_boxed_clone(),
);
let usd = LazyComputedFromHeightSumCum::forced_import(
db,
&format!("{name}_usd"),
v,
indexes,
usd_height,
)?;
Ok(Self {
sats,
btc,
usd,
})
}
pub(crate) fn compute_cumulative(
&mut self,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.sats.compute_cumulative(starting_indexes, exit)?;
self.usd.compute_cumulative(starting_indexes, exit)?;
Ok(())
}
}

View File

@@ -1,78 +0,0 @@
//! Value type for SumCum pattern from Height.
//!
//! Height-level USD sum is lazy: `sats * price`.
//! Cumulative and day1 stats are stored since they require aggregation
//! across heights with varying prices.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Dollars, Height, Sats, Version};
use vecdb::{Database, EagerVec, Exit, ReadableCloneableVec, PcoVec, Rw, StorageMode};
use crate::{
ComputeIndexes,
indexes,
internal::{
ComputedFromHeightSumCum, LazyBinaryComputedFromHeightSumCum, LazyFromHeightSumCum,
SatsTimesPrice, SatsToBitcoin,
},
prices,
};
#[derive(Traversable)]
pub struct ValueFromHeightSumCum<M: StorageMode = Rw> {
pub sats: ComputedFromHeightSumCum<Sats, M>,
pub btc: LazyFromHeightSumCum<Bitcoin, Sats>,
pub usd: LazyBinaryComputedFromHeightSumCum<Dollars, Sats, Dollars, M>,
}
const VERSION: Version = Version::ONE; // Bumped for lazy height dollars
impl ValueFromHeightSumCum {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
prices: &prices::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let sats = ComputedFromHeightSumCum::forced_import(db, name, v, indexes)?;
let btc = LazyFromHeightSumCum::from_computed::<SatsToBitcoin>(
&format!("{name}_btc"),
v,
sats.height.read_only_boxed_clone(),
&sats,
);
let usd = LazyBinaryComputedFromHeightSumCum::forced_import::<SatsTimesPrice>(
db,
&format!("{name}_usd"),
v,
sats.height.read_only_boxed_clone(),
prices.usd.price.read_only_boxed_clone(),
indexes,
)?;
Ok(Self {
sats,
btc,
usd,
})
}
pub(crate) fn compute(
&mut self,
starting_indexes: &ComputeIndexes,
exit: &Exit,
mut compute: impl FnMut(&mut EagerVec<PcoVec<Height, Sats>>) -> Result<()>,
) -> Result<()> {
compute(&mut self.sats.height)?;
self.sats.compute_cumulative(starting_indexes, exit)?;
self.usd.compute_cumulative(starting_indexes, exit)?;
Ok(())
}
}

Some files were not shown because too many files have changed in this diff Show More