global: snapshot

This commit is contained in:
nym21
2026-03-04 14:02:00 +01:00
parent 730e8bb4d4
commit 891f0dad9e
60 changed files with 300 additions and 643 deletions

View File

@@ -26,7 +26,7 @@ impl Vecs {
indexes,
),
as_hash: ComputedFromHeight::forced_import(db, "difficulty_as_hash", version, indexes)?,
adjustment: PercentFromHeight::forced_import_bps32(db, "difficulty_adjustment", version, indexes)?,
adjustment: PercentFromHeight::forced_import(db, "difficulty_adjustment", version, indexes)?,
epoch: ComputedFromHeight::forced_import(db, "difficulty_epoch", version, indexes)?,
blocks_before_next_adjustment: ComputedFromHeight::forced_import(
db,

View File

@@ -22,7 +22,7 @@ impl Vecs {
)?;
let fullness =
PercentFromHeightDistribution::forced_import_bp16(db, "block_fullness", version, indexes)?;
PercentFromHeightDistribution::forced_import(db, "block_fullness", version, indexes)?;
Ok(Self { weight, fullness })
}

View File

@@ -11,7 +11,7 @@ use crate::{
impl Vecs {
pub(crate) fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
Ok(Self {
cointime_adj_inflation_rate: PercentFromHeight::forced_import_bps32(
cointime_adj_inflation_rate: PercentFromHeight::forced_import(
db,
"cointime_adj_inflation_rate",
version,

View File

@@ -7,7 +7,7 @@ use vecdb::{Database, PAGE_SIZE};
use super::{
ActivityVecs, AdjustedVecs, CapVecs, DB_NAME, PricingVecs, ReserveRiskVecs, SupplyVecs,
VERSION, ValueVecs, Vecs,
ValueVecs, Vecs,
};
use crate::indexes;
@@ -20,7 +20,7 @@ impl Vecs {
let db = Database::open(&parent_path.join(DB_NAME))?;
db.set_min_len(PAGE_SIZE * 1_000_000)?;
let version = parent_version + VERSION;
let version = parent_version;
let v1 = version + Version::ONE;
let activity = ActivityVecs::forced_import(&db, version, indexes)?;
let supply = SupplyVecs::forced_import(&db, v1, indexes)?;

View File

@@ -10,7 +10,6 @@ mod compute;
mod import;
use brk_traversable::Traversable;
use brk_types::Version;
use vecdb::{Database, Rw, StorageMode};
pub use activity::Vecs as ActivityVecs;
@@ -22,7 +21,6 @@ pub use supply::Vecs as SupplyVecs;
pub use value::Vecs as ValueVecs;
pub const DB_NAME: &str = "cointime";
const VERSION: Version = Version::ZERO;
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {

View File

@@ -27,7 +27,7 @@ impl GrowthRateVecs {
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let all = PercentFromHeightDistribution::forced_import_bp16(
let all = PercentFromHeightDistribution::forced_import(
db,
"growth_rate",
version,
@@ -35,7 +35,7 @@ impl GrowthRateVecs {
)?;
let by_addresstype = ByAddressType::new_with_name(|name| {
PercentFromHeightDistribution::forced_import_bp16(
PercentFromHeightDistribution::forced_import(
db,
&format!("{name}_growth_rate"),
version,

View File

@@ -19,8 +19,6 @@ use crate::distribution::metrics::{BasicCohortMetrics, CohortMetricsBase, Import
use super::super::traits::{CohortVecs, DynCohortVecs};
const VERSION: Version = Version::ZERO;
/// Address cohort with metrics and optional runtime state.
#[derive(Traversable)]
pub struct AddressCohortVecs<M: StorageMode = Rw> {
@@ -70,13 +68,13 @@ impl AddressCohortVecs {
addr_count: ComputedFromHeight::forced_import(
db,
&cfg.name("addr_count"),
version + VERSION,
version,
indexes,
)?,
addr_count_change_1m: ComputedFromHeight::forced_import(
db,
&cfg.name("addr_count_change_1m"),
version + VERSION,
version,
indexes,
)?,
})

View File

@@ -67,8 +67,8 @@ pub(crate) fn process_blocks(
let height_to_first_txoutindex = &indexer.vecs.outputs.first_txoutindex;
let height_to_first_txinindex = &indexer.vecs.inputs.first_txinindex;
let height_to_tx_count = &transactions.count.tx_count.height;
let height_to_output_count = &outputs.count.total_count.full.sum_cumulative.sum.0;
let height_to_input_count = &inputs.count.full.sum_cumulative.sum.0;
let height_to_output_count = &outputs.count.total_count.full.sum;
let height_to_input_count = &inputs.count.full.sum;
let txindex_to_output_count = &indexes.txindex.output_count;
let txindex_to_input_count = &indexes.txindex.input_count;

View File

@@ -69,7 +69,7 @@ impl<'a> ImportConfig<'a> {
suffix: &str,
offset: Version,
) -> Result<PercentFromHeight<BasisPoints16>> {
PercentFromHeight::forced_import_bp16(self.db, &self.name(suffix), self.version + offset, self.indexes)
PercentFromHeight::forced_import(self.db, &self.name(suffix), self.version + offset, self.indexes)
}
pub(crate) fn import_percent_bps16(
@@ -77,7 +77,7 @@ impl<'a> ImportConfig<'a> {
suffix: &str,
offset: Version,
) -> Result<PercentFromHeight<BasisPointsSigned16>> {
PercentFromHeight::forced_import_bps16(self.db, &self.name(suffix), self.version + offset, self.indexes)
PercentFromHeight::forced_import(self.db, &self.name(suffix), self.version + offset, self.indexes)
}
// --- Value types ---
@@ -121,7 +121,7 @@ impl<'a> ImportConfig<'a> {
}
pub(crate) fn import_percent_rolling_bp16(&self, suffix: &str, offset: Version) -> Result<PercentRollingWindows<BasisPoints16>> {
PercentRollingWindows::forced_import_bp16(self.db, &self.name(suffix), self.version + offset, self.indexes)
PercentRollingWindows::forced_import(self.db, &self.name(suffix), self.version + offset, self.indexes)
}
pub(crate) fn import_emas_1w_1m<T: NumericValue + JsonSchema>(&self, suffix: &str, offset: Version) -> Result<RollingEmas1w1m<T>> {
@@ -129,7 +129,7 @@ impl<'a> ImportConfig<'a> {
}
pub(crate) fn import_percent_emas_1w_1m_bp16(&self, suffix: &str, offset: Version) -> Result<PercentRollingEmas1w1m<BasisPoints16>> {
PercentRollingEmas1w1m::forced_import_bp16(self.db, &self.name(suffix), self.version + offset, self.indexes)
PercentRollingEmas1w1m::forced_import(self.db, &self.name(suffix), self.version + offset, self.indexes)
}
pub(crate) fn import_emas_2w(&self, suffix: &str, offset: Version) -> Result<RollingEmas2w> {

View File

@@ -53,7 +53,6 @@ pub use week1::Vecs as Week1Vecs;
pub use year1::Vecs as Year1Vecs;
pub use year10::Vecs as Year10Vecs;
const VERSION: Version = Version::ZERO;
pub const DB_NAME: &str = "indexes";
#[derive(Traversable)]
@@ -90,7 +89,7 @@ impl Vecs {
let db = Database::open(&parent.join(DB_NAME))?;
db.set_min_len(PAGE_SIZE * 10_000_000)?;
let version = parent_version + VERSION;
let version = parent_version;
let this = Self {
address: AddressVecs::forced_import(version, indexer),

View File

@@ -1,37 +1,37 @@
use brk_error::Result;
use brk_traversable::Traversable;
use schemars::JsonSchema;
use vecdb::{
Database, Exit, ReadableVec, Ro, Rw, VecIndex, VecValue, Version,
Database, EagerVec, Exit, ImportableVec, PcoVec, ReadableVec, Ro, Rw, StorageMode, StoredVec,
VecIndex, VecValue, Version,
};
use crate::internal::{
AverageVec, ComputedVecValue, DistributionStats, MaxVec, MedianVec, MinVec, Pct10Vec,
Pct25Vec, Pct75Vec, Pct90Vec,
};
use crate::internal::{ComputedVecValue, DistributionStats};
/// Distribution stats (average + min + max + percentiles) — concrete vec type alias.
pub type Distribution<I, T, M = Rw> = DistributionStats<
AverageVec<I, T, M>,
MinVec<I, T, M>,
MaxVec<I, T, M>,
Pct10Vec<I, T, M>,
Pct25Vec<I, T, M>,
MedianVec<I, T, M>,
Pct75Vec<I, T, M>,
Pct90Vec<I, T, M>,
>;
#[derive(Traversable)]
pub struct Distribution<I: VecIndex, T: ComputedVecValue + JsonSchema, M: StorageMode = Rw> {
pub average: M::Stored<EagerVec<PcoVec<I, T>>>,
pub min: M::Stored<EagerVec<PcoVec<I, T>>>,
pub max: M::Stored<EagerVec<PcoVec<I, T>>>,
pub pct10: M::Stored<EagerVec<PcoVec<I, T>>>,
pub pct25: M::Stored<EagerVec<PcoVec<I, T>>>,
pub median: M::Stored<EagerVec<PcoVec<I, T>>>,
pub pct75: M::Stored<EagerVec<PcoVec<I, T>>>,
pub pct90: M::Stored<EagerVec<PcoVec<I, T>>>,
}
impl<I: VecIndex, T: ComputedVecValue + JsonSchema> Distribution<I, T> {
pub(crate) fn forced_import(db: &Database, name: &str, version: Version) -> Result<Self> {
let s = DistributionStats::<()>::SUFFIXES;
Ok(Self {
average: AverageVec::forced_import(db, name, version)?,
min: MinVec::forced_import(db, name, version)?,
max: MaxVec::forced_import(db, name, version)?,
pct10: Pct10Vec::forced_import(db, name, version)?,
pct25: Pct25Vec::forced_import(db, name, version)?,
median: MedianVec::forced_import(db, name, version)?,
pct75: Pct75Vec::forced_import(db, name, version)?,
pct90: Pct90Vec::forced_import(db, name, version)?,
average: EagerVec::forced_import(db, &format!("{name}_{}", s[0]), version)?,
min: EagerVec::forced_import(db, &format!("{name}_{}", s[1]), version)?,
max: EagerVec::forced_import(db, &format!("{name}_{}", s[2]), version)?,
pct10: EagerVec::forced_import(db, &format!("{name}_{}", s[3]), version)?,
pct25: EagerVec::forced_import(db, &format!("{name}_{}", s[4]), version)?,
median: EagerVec::forced_import(db, &format!("{name}_{}", s[5]), version)?,
pct75: EagerVec::forced_import(db, &format!("{name}_{}", s[6]), version)?,
pct90: EagerVec::forced_import(db, &format!("{name}_{}", s[7]), version)?,
})
}
@@ -59,16 +59,16 @@ impl<I: VecIndex, T: ComputedVecValue + JsonSchema> Distribution<I, T> {
skip_count,
None, // first
None, // last
Some(&mut self.min.0),
Some(&mut self.max.0),
Some(&mut self.average.0),
Some(&mut self.min),
Some(&mut self.max),
Some(&mut self.average),
None, // sum
None, // cumulative
Some(&mut self.median.0),
Some(&mut self.pct10.0),
Some(&mut self.pct25.0),
Some(&mut self.pct75.0),
Some(&mut self.pct90.0),
Some(&mut self.median),
Some(&mut self.pct10),
Some(&mut self.pct25),
Some(&mut self.pct75),
Some(&mut self.pct90),
)
}
@@ -94,27 +94,27 @@ impl<I: VecIndex, T: ComputedVecValue + JsonSchema> Distribution<I, T> {
count_indexes,
n_blocks,
exit,
&mut self.min.0,
&mut self.max.0,
&mut self.average.0,
&mut self.median.0,
&mut self.pct10.0,
&mut self.pct25.0,
&mut self.pct75.0,
&mut self.pct90.0,
&mut self.min,
&mut self.max,
&mut self.average,
&mut self.median,
&mut self.pct10,
&mut self.pct25,
&mut self.pct75,
&mut self.pct90,
)
}
pub fn read_only_clone(&self) -> Distribution<I, T, Ro> {
DistributionStats {
average: self.average.read_only_clone(),
min: self.min.read_only_clone(),
max: self.max.read_only_clone(),
pct10: self.pct10.read_only_clone(),
pct25: self.pct25.read_only_clone(),
median: self.median.read_only_clone(),
pct75: self.pct75.read_only_clone(),
pct90: self.pct90.read_only_clone(),
Distribution {
average: StoredVec::read_only_clone(&self.average),
min: StoredVec::read_only_clone(&self.min),
max: StoredVec::read_only_clone(&self.max),
pct10: StoredVec::read_only_clone(&self.pct10),
pct25: StoredVec::read_only_clone(&self.pct25),
median: StoredVec::read_only_clone(&self.median),
pct75: StoredVec::read_only_clone(&self.pct75),
pct90: StoredVec::read_only_clone(&self.pct90),
}
}
}

View File

@@ -1,27 +1,30 @@
use brk_error::Result;
use brk_traversable::Traversable;
use schemars::JsonSchema;
use vecdb::{Database, Exit, ReadableVec, Ro, Rw, StorageMode, VecIndex, VecValue, Version};
use vecdb::{
Database, EagerVec, Exit, ImportableVec, PcoVec, ReadableVec, Ro, Rw, StorageMode, StoredVec,
VecIndex, VecValue, Version,
};
use crate::internal::ComputedVecValue;
use super::{Distribution, SumCumulative};
use super::Distribution;
/// Full stats aggregate: distribution + sum_cumulative
/// Matches the common full_stats() pattern: average + minmax + percentiles + sum + cumulative
/// Full stats aggregate: distribution + sum + cumulative
#[derive(Traversable)]
pub struct Full<I: VecIndex, T: ComputedVecValue + JsonSchema, M: StorageMode = Rw> {
#[traversable(flatten)]
pub distribution: Distribution<I, T, M>,
#[traversable(flatten)]
pub sum_cumulative: SumCumulative<I, T, M>,
pub sum: M::Stored<EagerVec<PcoVec<I, T>>>,
pub cumulative: M::Stored<EagerVec<PcoVec<I, T>>>,
}
impl<I: VecIndex, T: ComputedVecValue + JsonSchema> Full<I, T> {
pub(crate) fn forced_import(db: &Database, name: &str, version: Version) -> Result<Self> {
Ok(Self {
distribution: Distribution::forced_import(db, name, version)?,
sum_cumulative: SumCumulative::forced_import(db, name, version)?,
sum: EagerVec::forced_import(db, &format!("{name}_sum"), version)?,
cumulative: EagerVec::forced_import(db, &format!("{name}_cumulative"), version)?,
})
}
@@ -49,23 +52,24 @@ impl<I: VecIndex, T: ComputedVecValue + JsonSchema> Full<I, T> {
skip_count,
None, // first
None, // last
Some(&mut self.distribution.min.0),
Some(&mut self.distribution.max.0),
Some(&mut self.distribution.average.0),
Some(&mut self.sum_cumulative.sum.0),
Some(&mut self.sum_cumulative.cumulative.0),
Some(&mut self.distribution.median.0),
Some(&mut self.distribution.pct10.0),
Some(&mut self.distribution.pct25.0),
Some(&mut self.distribution.pct75.0),
Some(&mut self.distribution.pct90.0),
Some(&mut self.distribution.min),
Some(&mut self.distribution.max),
Some(&mut self.distribution.average),
Some(&mut self.sum),
Some(&mut self.cumulative),
Some(&mut self.distribution.median),
Some(&mut self.distribution.pct10),
Some(&mut self.distribution.pct25),
Some(&mut self.distribution.pct75),
Some(&mut self.distribution.pct90),
)
}
pub fn read_only_clone(&self) -> Full<I, T, Ro> {
Full {
distribution: self.distribution.read_only_clone(),
sum_cumulative: self.sum_cumulative.read_only_clone(),
sum: StoredVec::read_only_clone(&self.sum),
cumulative: StoredVec::read_only_clone(&self.cumulative),
}
}
}

View File

@@ -1,7 +1,5 @@
mod distribution;
mod full;
mod sum_cumulative;
pub use distribution::*;
pub use full::*;
pub use sum_cumulative::*;

View File

@@ -1,31 +0,0 @@
use brk_error::Result;
use brk_traversable::Traversable;
use schemars::JsonSchema;
use vecdb::{Database, Ro, Rw, StorageMode, VecIndex, Version};
use crate::internal::{ComputedVecValue, CumulativeVec, SumVec};
/// Sum + Cumulative (12% of usage)
#[derive(Traversable)]
pub struct SumCumulative<I: VecIndex, T: ComputedVecValue + JsonSchema, M: StorageMode = Rw> {
#[traversable(flatten)]
pub sum: SumVec<I, T, M>,
#[traversable(flatten)]
pub cumulative: CumulativeVec<I, T, M>,
}
impl<I: VecIndex, T: ComputedVecValue + JsonSchema> SumCumulative<I, T> {
pub(crate) fn forced_import(db: &Database, name: &str, version: Version) -> Result<Self> {
Ok(Self {
sum: SumVec::forced_import(db, name, version)?,
cumulative: CumulativeVec::forced_import(db, name, version)?,
})
}
pub fn read_only_clone(&self) -> SumCumulative<I, T, Ro> {
SumCumulative {
sum: self.sum.read_only_clone(),
cumulative: self.cumulative.read_only_clone(),
}
}
}

View File

@@ -26,8 +26,6 @@ where
pub rolling: RollingFull<T, M>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedHeightDerivedFull<T>
where
T: NumericValue + JsonSchema,
@@ -38,11 +36,9 @@ where
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let cumulative =
ComputedFromHeight::forced_import(db, &format!("{name}_cumulative"), v, indexes)?;
let rolling = RollingFull::forced_import(db, name, v, indexes)?;
ComputedFromHeight::forced_import(db, &format!("{name}_cumulative"), version, indexes)?;
let rolling = RollingFull::forced_import(db, name, version, indexes)?;
Ok(Self {
cumulative,

View File

@@ -52,8 +52,6 @@ where
}
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedHeightDerived<T>
where
T: NumericValue + JsonSchema,
@@ -64,13 +62,11 @@ where
version: Version,
indexes: &indexes::Vecs,
) -> Self {
let v = version + VERSION;
macro_rules! period {
($idx:ident) => {
LazyAggVec::sparse_from_first_index(
name,
v,
version,
height_source.clone(),
indexes.$idx.first_height.read_only_boxed_clone(),
)
@@ -112,7 +108,7 @@ where
($idx:ident) => {
LazyAggVec::new(
name,
v,
version,
height_source.clone(),
indexes.$idx.identity.read_only_boxed_clone(),
for_each_range_from_coarser,

View File

@@ -81,8 +81,6 @@ where
T: VecValue + PartialOrd + JsonSchema,
S1T: VecValue;
const VERSION: Version = Version::ZERO;
impl<T, S1T> LazyHeightDerived<T, S1T>
where
T: VecValue + PartialOrd + JsonSchema + 'static,
@@ -121,13 +119,11 @@ where
where
S1T: NumericValue,
{
let v = version + VERSION;
macro_rules! period {
($p:ident) => {
LazyTransformLast::from_boxed::<MapOption<F>>(
name,
v,
version,
source.$p.read_only_boxed_clone(),
)
};
@@ -135,7 +131,7 @@ where
macro_rules! epoch {
($p:ident) => {
LazyTransformLast::from_boxed::<F>(name, v, source.$p.read_only_boxed_clone())
LazyTransformLast::from_boxed::<F>(name, version, source.$p.read_only_boxed_clone())
};
}
@@ -151,13 +147,11 @@ where
F: UnaryTransform<S1T, T>,
S2T: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
macro_rules! period {
($p:ident) => {
LazyTransformLast::from_boxed::<MapOption<F>>(
name,
v,
version,
source.$p.read_only_boxed_clone(),
)
};
@@ -165,7 +159,7 @@ where
macro_rules! epoch {
($p:ident) => {
LazyTransformLast::from_boxed::<F>(name, v, source.$p.read_only_boxed_clone())
LazyTransformLast::from_boxed::<F>(name, version, source.$p.read_only_boxed_clone())
};
}

View File

@@ -6,8 +6,6 @@ use vecdb::UnaryTransform;
use crate::internal::{LazyHeightDerived, ValueFromHeight};
const VERSION: Version = Version::ZERO;
#[derive(Clone, Traversable)]
pub struct LazyValueHeightDerived {
pub sats: LazyHeightDerived<Sats, Sats>,
@@ -28,29 +26,27 @@ impl LazyValueHeightDerived {
CentsTransform: UnaryTransform<Cents, Cents>,
DollarsTransform: UnaryTransform<Dollars, Dollars>,
{
let v = version + VERSION;
let sats = LazyHeightDerived::from_derived_computed::<SatsTransform>(
name,
v,
version,
&source.sats.rest,
);
let btc = LazyHeightDerived::from_derived_computed::<BitcoinTransform>(
&format!("{name}_btc"),
v,
version,
&source.sats.rest,
);
let cents = LazyHeightDerived::from_derived_computed::<CentsTransform>(
&format!("{name}_cents"),
v,
version,
&source.cents.rest,
);
let usd = LazyHeightDerived::from_lazy::<DollarsTransform, Cents>(
&format!("{name}_usd"),
v,
version,
&source.usd.rest,
);

View File

@@ -1,19 +1,15 @@
//! Base generic struct with 8 type parameters — one per distribution statistic.
//!
//! Foundation for all distribution-style types (average, min, max, percentiles).
use brk_traversable::Traversable;
#[derive(Clone, Traversable)]
pub struct DistributionStats<A, B = A, C = A, D = A, E = A, F = A, G = A, H = A> {
pub struct DistributionStats<A> {
pub average: A,
pub min: B,
pub max: C,
pub pct10: D,
pub pct25: E,
pub median: F,
pub pct75: G,
pub pct90: H,
pub min: A,
pub max: A,
pub pct10: A,
pub pct25: A,
pub median: A,
pub pct75: A,
pub pct90: A,
}
impl<A> DistributionStats<A> {
@@ -58,5 +54,4 @@ impl<A> DistributionStats<A> {
.min(f(&self.pct75))
.min(f(&self.pct90))
}
}

View File

@@ -47,18 +47,14 @@ pub struct EagerIndexes<T, M: StorageMode = Rw>(
where
T: ComputedVecValue + PartialOrd + JsonSchema;
const VERSION: Version = Version::ZERO;
impl<T> EagerIndexes<T>
where
T: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(db: &Database, name: &str, version: Version) -> Result<Self> {
let v = version + VERSION;
macro_rules! period {
($idx:ident) => {
ImportableVec::forced_import(db, name, v)?
ImportableVec::forced_import(db, name, version)?
};
}

View File

@@ -1,13 +1,11 @@
//! Generic EMA window containers.
use brk_traversable::Traversable;
#[derive(Clone, Traversable)]
pub struct Emas1w1m<A, B = A> {
pub struct Emas1w1m<A> {
#[traversable(rename = "1w")]
pub _1w: A,
#[traversable(rename = "1m")]
pub _1m: B,
pub _1m: A,
}
impl<A> Emas1w1m<A> {
@@ -30,29 +28,3 @@ impl<A> Emas1w1m<A> {
[&mut self._1w, &mut self._1m]
}
}
#[derive(Clone, Traversable)]
pub struct Emas2w<A> {
#[traversable(rename = "2w")]
pub _2w: A,
}
impl<A> Emas2w<A> {
pub const SUFFIXES: [&'static str; 1] = ["ema_2w"];
pub fn try_from_fn<E>(
mut f: impl FnMut(&str) -> std::result::Result<A, E>,
) -> std::result::Result<Self, E> {
Ok(Self {
_2w: f(Self::SUFFIXES[0])?,
})
}
pub fn as_array(&self) -> [&A; 1] {
[&self._2w]
}
pub fn as_mut_array(&mut self) -> [&mut A; 1] {
[&mut self._2w]
}
}

View File

@@ -26,8 +26,6 @@ where
pub rolling: RollingFull<T, M>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedFromHeightAggregated<T>
where
T: NumericValue + JsonSchema,
@@ -38,10 +36,8 @@ where
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height = Full::forced_import(db, name, v)?;
let rolling = RollingFull::forced_import(db, name, v, indexes)?;
let height = Full::forced_import(db, name, version)?;
let rolling = RollingFull::forced_import(db, name, version, indexes)?;
Ok(Self {
full: height,
@@ -65,7 +61,7 @@ where
self.rolling.compute(
max_from,
windows,
self.full.sum_cumulative.sum.inner(),
&self.full.sum,
exit,
)?;
Ok(())

View File

@@ -28,8 +28,6 @@ where
pub rest: Box<ComputedHeightDerived<T>>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedFromHeight<T>
where
T: NumericValue + JsonSchema,
@@ -40,14 +38,12 @@ where
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, version)?;
let rest = ComputedHeightDerived::forced_import(
name,
height.read_only_boxed_clone(),
v,
version,
indexes,
);

View File

@@ -73,8 +73,6 @@ impl RollingFullSlot {
#[traversable(transparent)]
pub struct RollingFullByUnit<M: StorageMode = Rw>(pub Windows<RollingFullSlot<M>>);
const VERSION: Version = Version::ZERO;
impl RollingFullByUnit {
pub(crate) fn forced_import(
db: &Database,
@@ -82,9 +80,8 @@ impl RollingFullByUnit {
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
Ok(Self(Windows::try_from_fn(|suffix| {
RollingFullSlot::forced_import(db, &format!("{name}_{suffix}"), v, indexes)
RollingFullSlot::forced_import(db, &format!("{name}_{suffix}"), version, indexes)
})?))
}

View File

@@ -16,8 +16,6 @@ use crate::{
#[traversable(transparent)]
pub struct RollingSumByUnit<M: StorageMode = Rw>(pub Windows<ByUnit<M>>);
const VERSION: Version = Version::ZERO;
impl RollingSumByUnit {
pub(crate) fn forced_import(
db: &Database,
@@ -25,8 +23,7 @@ impl RollingSumByUnit {
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
Ok(Self(Windows::<ByUnit>::forced_import(db, &format!("{name}_sum"), v, indexes)?))
Ok(Self(Windows::<ByUnit>::forced_import(db, &format!("{name}_sum"), version, indexes)?))
}
pub(crate) fn compute_rolling_sum(

View File

@@ -24,8 +24,6 @@ where
pub cumulative: ComputedFromHeight<T, M>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedFromHeightCumulative<T>
where
T: NumericValue + JsonSchema,
@@ -36,11 +34,9 @@ where
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, version)?;
let cumulative =
ComputedFromHeight::forced_import(db, &format!("{name}_cumulative"), v, indexes)?;
ComputedFromHeight::forced_import(db, &format!("{name}_cumulative"), version, indexes)?;
Ok(Self { height, cumulative })
}

View File

@@ -27,8 +27,6 @@ where
pub sum: RollingWindows<T, M>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedFromHeightCumulativeSum<T>
where
T: NumericValue + JsonSchema,
@@ -39,12 +37,10 @@ where
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, version)?;
let cumulative =
ComputedFromHeight::forced_import(db, &format!("{name}_cumulative"), v, indexes)?;
let rolling = RollingWindows::forced_import(db, name, v, indexes)?;
ComputedFromHeight::forced_import(db, &format!("{name}_cumulative"), version, indexes)?;
let rolling = RollingWindows::forced_import(db, name, version, indexes)?;
Ok(Self {
height,

View File

@@ -25,8 +25,6 @@ where
pub rolling: RollingDistribution<T, M>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedFromHeightDistribution<T>
where
T: NumericValue + JsonSchema,
@@ -37,10 +35,8 @@ where
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let rolling = RollingDistribution::forced_import(db, name, v, indexes)?;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, version)?;
let rolling = RollingDistribution::forced_import(db, name, version, indexes)?;
Ok(Self { height, rolling })
}

View File

@@ -27,8 +27,6 @@ where
pub rolling: RollingFull<T, M>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedFromHeightFull<T>
where
T: NumericValue + JsonSchema,
@@ -39,12 +37,10 @@ where
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, version)?;
let cumulative =
ComputedFromHeight::forced_import(db, &format!("{name}_cumulative"), v, indexes)?;
let rolling = RollingFull::forced_import(db, name, v, indexes)?;
ComputedFromHeight::forced_import(db, &format!("{name}_cumulative"), version, indexes)?;
let rolling = RollingFull::forced_import(db, name, version, indexes)?;
Ok(Self {
height,

View File

@@ -24,8 +24,6 @@ where
pub rest: Box<LazyHeightDerived<T, S1T>>,
}
const VERSION: Version = Version::ZERO;
impl<T, S1T> LazyFromHeight<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
@@ -40,10 +38,9 @@ where
where
S1T: NumericValue,
{
let v = version + VERSION;
Self {
height: LazyVecFrom1::transformed::<F>(name, v, height_source),
rest: Box::new(LazyHeightDerived::from_computed::<F>(name, v, source)),
height: LazyVecFrom1::transformed::<F>(name, version, height_source),
rest: Box::new(LazyHeightDerived::from_computed::<F>(name, version, source)),
}
}
@@ -56,12 +53,11 @@ where
where
S1T: NumericValue,
{
let v = version + VERSION;
Self {
height: LazyVecFrom1::transformed::<F>(name, v, height_source.clone()),
height: LazyVecFrom1::transformed::<F>(name, version, height_source.clone()),
rest: Box::new(LazyHeightDerived::from_height_source::<F>(
name,
v,
version,
height_source,
indexes,
)),
@@ -78,12 +74,11 @@ where
F: UnaryTransform<S1T, T>,
S2T: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
Self {
height: LazyVecFrom1::transformed::<F>(name, v, source.height.read_only_boxed_clone()),
height: LazyVecFrom1::transformed::<F>(name, version, source.height.read_only_boxed_clone()),
rest: Box::new(LazyHeightDerived::from_lazy::<F, S2T>(
name,
v,
version,
&source.rest,
)),
}

View File

@@ -1,17 +1,11 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{
BasisPoints16, BasisPointsSigned16, BasisPointsSigned32, Height, StoredF32, Version,
};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, Database, Exit, ReadableCloneableVec, ReadableVec, Rw, StorageMode, UnaryTransform, VecValue};
use brk_types::{Height, StoredF32, Version};
use vecdb::{BinaryTransform, Database, Exit, ReadableCloneableVec, ReadableVec, Rw, StorageMode, VecValue};
use crate::{
indexes,
internal::{
Bp16ToFloat, Bp16ToPercent, Bps16ToFloat, Bps16ToPercent, Bps32ToFloat, Bps32ToPercent,
NumericValue,
},
internal::BpsType,
traits::ComputeDrawdown,
};
@@ -21,44 +15,32 @@ use super::{ComputedFromHeight, LazyFromHeight};
///
/// Stores integer basis points on disk (Pco-compressed),
/// exposes two lazy StoredF32 views:
/// - `ratio`: bps ÷ 10000 (e.g., 4523 bps 0.4523)
/// - `percent`: bps ÷ 100 (e.g., 4523 bps 45.23%)
///
/// Use for dominance, adoption, RSI, and other percentage-valued metrics.
/// - `ratio`: bps / 10000 (e.g., 4523 bps -> 0.4523)
/// - `percent`: bps / 100 (e.g., 4523 bps -> 45.23%)
#[derive(Traversable)]
pub struct PercentFromHeight<B, M: StorageMode = Rw>
where
B: NumericValue + JsonSchema,
{
pub struct PercentFromHeight<B: BpsType, M: StorageMode = Rw> {
pub bps: ComputedFromHeight<B, M>,
pub ratio: LazyFromHeight<StoredF32, B>,
pub percent: LazyFromHeight<StoredF32, B>,
}
impl<B> PercentFromHeight<B>
where
B: NumericValue + JsonSchema,
{
pub(crate) fn forced_import<RatioTransform, PercentTransform>(
impl<B: BpsType> PercentFromHeight<B> {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self>
where
RatioTransform: UnaryTransform<B, StoredF32>,
PercentTransform: UnaryTransform<B, StoredF32>,
{
) -> Result<Self> {
let bps = ComputedFromHeight::forced_import(db, &format!("{name}_bps"), version, indexes)?;
let ratio = LazyFromHeight::from_computed::<RatioTransform>(
let ratio = LazyFromHeight::from_computed::<B::ToRatio>(
&format!("{name}_ratio"),
version,
bps.height.read_only_boxed_clone(),
&bps,
);
let percent = LazyFromHeight::from_computed::<PercentTransform>(
let percent = LazyFromHeight::from_computed::<B::ToPercent>(
name,
version,
bps.height.read_only_boxed_clone(),
@@ -68,45 +50,6 @@ where
Ok(Self { bps, ratio, percent })
}
}
impl PercentFromHeight<BasisPoints16> {
pub(crate) fn forced_import_bp16(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Self::forced_import::<Bp16ToFloat, Bp16ToPercent>(db, name, version, indexes)
}
}
impl PercentFromHeight<BasisPointsSigned16> {
pub(crate) fn forced_import_bps16(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Self::forced_import::<Bps16ToFloat, Bps16ToPercent>(db, name, version, indexes)
}
}
impl PercentFromHeight<BasisPointsSigned32> {
pub(crate) fn forced_import_bps32(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Self::forced_import::<Bps32ToFloat, Bps32ToPercent>(db, name, version, indexes)
}
}
impl<B> PercentFromHeight<B>
where
B: NumericValue + JsonSchema,
{
pub(crate) fn compute_binary<S1T, S2T, F>(
&mut self,
max_from: Height,

View File

@@ -1,48 +1,37 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{BasisPoints16, Height, StoredF32, Version};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, PcoVec, ReadableCloneableVec, Rw, StorageMode, UnaryTransform};
use brk_types::{Height, StoredF32, Version};
use vecdb::{Database, EagerVec, Exit, PcoVec, ReadableCloneableVec, Rw, StorageMode};
use crate::{indexes, internal::{Bp16ToFloat, Bp16ToPercent, NumericValue, WindowStarts}};
use crate::{indexes, internal::{BpsType, WindowStarts}};
use super::{ComputedFromHeightDistribution, LazyFromHeight};
/// Like PercentFromHeight but with rolling distribution stats on the bps data.
#[derive(Traversable)]
pub struct PercentFromHeightDistribution<B, M: StorageMode = Rw>
where
B: NumericValue + JsonSchema,
{
pub struct PercentFromHeightDistribution<B: BpsType, M: StorageMode = Rw> {
pub bps: ComputedFromHeightDistribution<B, M>,
pub ratio: LazyFromHeight<StoredF32, B>,
pub percent: LazyFromHeight<StoredF32, B>,
}
impl<B> PercentFromHeightDistribution<B>
where
B: NumericValue + JsonSchema,
{
pub(crate) fn forced_import<RatioTransform, PercentTransform>(
impl<B: BpsType> PercentFromHeightDistribution<B> {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self>
where
RatioTransform: UnaryTransform<B, StoredF32>,
PercentTransform: UnaryTransform<B, StoredF32>,
{
) -> Result<Self> {
let bps = ComputedFromHeightDistribution::forced_import(db, &format!("{name}_bps"), version, indexes)?;
let ratio = LazyFromHeight::from_height_source::<RatioTransform>(
let ratio = LazyFromHeight::from_height_source::<B::ToRatio>(
&format!("{name}_ratio"),
version,
bps.height.read_only_boxed_clone(),
indexes,
);
let percent = LazyFromHeight::from_height_source::<PercentTransform>(
let percent = LazyFromHeight::from_height_source::<B::ToPercent>(
name,
version,
bps.height.read_only_boxed_clone(),
@@ -52,23 +41,6 @@ where
Ok(Self { bps, ratio, percent })
}
}
impl PercentFromHeightDistribution<BasisPoints16> {
pub(crate) fn forced_import_bp16(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Self::forced_import::<Bp16ToFloat, Bp16ToPercent>(db, name, version, indexes)
}
}
impl<B> PercentFromHeightDistribution<B>
where
B: NumericValue + JsonSchema,
{
pub(crate) fn compute(
&mut self,
max_from: Height,

View File

@@ -12,8 +12,6 @@ use crate::{
},
};
const VERSION: Version = Version::ZERO;
/// Change values indexed by height - sats (stored), btc (lazy), cents (stored), usd (lazy).
#[derive(Traversable)]
pub struct ValueFromHeightChange<M: StorageMode = Rw> {
@@ -30,13 +28,11 @@ impl ValueFromHeightChange {
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let sats = ComputedFromHeight::forced_import(db, name, v, indexes)?;
let sats = ComputedFromHeight::forced_import(db, name, version, indexes)?;
let btc = LazyFromHeight::from_computed::<SatsSignedToBitcoin>(
&format!("{name}_btc"),
v,
version,
sats.height.read_only_boxed_clone(),
&sats,
);
@@ -44,13 +40,13 @@ impl ValueFromHeightChange {
let cents = ComputedFromHeight::forced_import(
db,
&format!("{name}_cents"),
v,
version,
indexes,
)?;
let usd = LazyFromHeight::from_computed::<CentsSignedToDollars>(
&format!("{name}_usd"),
v,
version,
cents.height.read_only_boxed_clone(),
&cents,
);

View File

@@ -7,8 +7,6 @@ use vecdb::UnaryTransform;
use crate::internal::{LazyValue, LazyValueHeightDerived, ValueFromHeight};
const VERSION: Version = Version::ZERO;
/// Lazy value wrapper with height + all derived last transforms from ValueFromHeight.
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
@@ -33,14 +31,12 @@ impl LazyValueFromHeight {
CentsTransform: UnaryTransform<Cents, Cents>,
DollarsTransform: UnaryTransform<Dollars, Dollars>,
{
let v = version + VERSION;
let height =
LazyValue::from_block_source::<SatsTransform, BitcoinTransform, CentsTransform, DollarsTransform>(name, source, v);
LazyValue::from_block_source::<SatsTransform, BitcoinTransform, CentsTransform, DollarsTransform>(name, source, version);
let rest =
LazyValueHeightDerived::from_block_source::<SatsTransform, BitcoinTransform, CentsTransform, DollarsTransform>(
name, source, v,
name, source, version,
);
Self { height, rest: Box::new(rest) }

View File

@@ -25,8 +25,6 @@ pub struct ValueFromHeightRolling<M: StorageMode = Rw> {
pub rolling: ValueFromHeightWindows<M>,
}
const VERSION: Version = Version::ZERO;
impl ValueFromHeightRolling {
pub(crate) fn forced_import(
db: &Database,
@@ -34,10 +32,9 @@ impl ValueFromHeightRolling {
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
Ok(Self {
value: Value::forced_import(db, name, v)?,
rolling: ValueFromHeightWindows::forced_import(db, name, v, indexes)?,
value: Value::forced_import(db, name, version)?,
rolling: ValueFromHeightWindows::forced_import(db, name, version, indexes)?,
})
}

View File

@@ -6,8 +6,6 @@ use vecdb::{ReadableCloneableVec, LazyVecFrom1, UnaryTransform, VecIndex};
use crate::internal::ValueFromHeight;
const VERSION: Version = Version::ZERO;
/// Fully lazy value type at height level.
///
/// All fields are lazy transforms from existing sources - no storage.
@@ -31,26 +29,24 @@ impl LazyValue<Height> {
CentsTransform: UnaryTransform<Cents, Cents>,
DollarsTransform: UnaryTransform<Dollars, Dollars>,
{
let v = version + VERSION;
let sats =
LazyVecFrom1::transformed::<SatsTransform>(name, v, source.sats.height.read_only_boxed_clone());
LazyVecFrom1::transformed::<SatsTransform>(name, version, source.sats.height.read_only_boxed_clone());
let btc = LazyVecFrom1::transformed::<BitcoinTransform>(
&format!("{name}_btc"),
v,
version,
source.sats.height.read_only_boxed_clone(),
);
let cents = LazyVecFrom1::transformed::<CentsTransform>(
&format!("{name}_cents"),
v,
version,
source.cents.height.read_only_boxed_clone(),
);
let usd = LazyVecFrom1::transformed::<DollarsTransform>(
&format!("{name}_usd"),
v,
version,
source.usd.height.read_only_boxed_clone(),
);

View File

@@ -10,7 +10,6 @@ mod indexes;
mod lazy_eager_indexes;
mod lazy_value;
mod rolling;
mod stat_vec;
mod traits;
mod transform;
mod value;
@@ -28,7 +27,6 @@ pub(crate) use indexes::*;
pub(crate) use lazy_eager_indexes::*;
pub(crate) use lazy_value::*;
pub(crate) use rolling::*;
pub(crate) use stat_vec::*;
pub(crate) use traits::*;
pub use transform::*;
pub(crate) use value::*;

View File

@@ -24,8 +24,6 @@ pub struct RollingDistribution<T, M: StorageMode = Rw>(pub DistributionStats<Rol
where
T: ComputedVecValue + PartialOrd + JsonSchema;
const VERSION: Version = Version::ZERO;
impl<T> RollingDistribution<T>
where
T: NumericValue + JsonSchema,
@@ -36,9 +34,8 @@ where
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
Ok(Self(DistributionStats::try_from_fn(|suffix| {
RollingWindows::forced_import(db, &format!("{name}_{suffix}"), v, indexes)
RollingWindows::forced_import(db, &format!("{name}_{suffix}"), version, indexes)
})?))
}

View File

@@ -1,67 +1,37 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{BasisPoints16, Height, StoredF32, Version};
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, Exit, ReadableVec, Rw, StorageMode, UnaryTransform};
use vecdb::{Database, Exit, ReadableVec, Rw, StorageMode};
use crate::{
indexes,
internal::{Bp16ToFloat, Bp16ToPercent, Emas1w1m, NumericValue, PercentFromHeight},
internal::{BpsType, Emas1w1m, PercentFromHeight},
};
const VERSION: Version = Version::ZERO;
/// 2 EMA vecs (1w, 1m) sourced from 24h rolling window,
/// each storing basis points with lazy ratio and percent float views.
#[derive(Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct PercentRollingEmas1w1m<B, M: StorageMode = Rw>(pub Emas1w1m<PercentFromHeight<B, M>>)
where
B: NumericValue + JsonSchema;
pub struct PercentRollingEmas1w1m<B: BpsType, M: StorageMode = Rw>(pub Emas1w1m<PercentFromHeight<B, M>>);
impl<B> PercentRollingEmas1w1m<B>
where
B: NumericValue + JsonSchema,
{
pub(crate) fn forced_import<RatioTransform, PercentTransform>(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self>
where
RatioTransform: UnaryTransform<B, StoredF32>,
PercentTransform: UnaryTransform<B, StoredF32>,
{
let v = version + VERSION;
Ok(Self(Emas1w1m::try_from_fn(|suffix| {
PercentFromHeight::forced_import::<RatioTransform, PercentTransform>(
db,
&format!("{name}_{suffix}"),
v,
indexes,
)
})?))
}
}
impl PercentRollingEmas1w1m<BasisPoints16> {
pub(crate) fn forced_import_bp16(
impl<B: BpsType> PercentRollingEmas1w1m<B> {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Self::forced_import::<Bp16ToFloat, Bp16ToPercent>(db, name, version, indexes)
Ok(Self(Emas1w1m::try_from_fn(|suffix| {
PercentFromHeight::forced_import(
db,
&format!("{name}_{suffix}"),
version,
indexes,
)
})?))
}
}
impl<B> PercentRollingEmas1w1m<B>
where
B: NumericValue + JsonSchema,
{
pub(crate) fn compute_from_24h(
&mut self,
max_from: Height,

View File

@@ -27,8 +27,6 @@ where
pub distribution: RollingDistribution<T, M>,
}
const VERSION: Version = Version::ZERO;
impl<T> RollingFull<T>
where
T: NumericValue + JsonSchema,
@@ -39,10 +37,9 @@ where
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
Ok(Self {
sum: RollingWindows::forced_import(db, &format!("{name}_sum"), v, indexes)?,
distribution: RollingDistribution::forced_import(db, name, v, indexes)?,
sum: RollingWindows::forced_import(db, &format!("{name}_sum"), version, indexes)?,
distribution: RollingDistribution::forced_import(db, name, version, indexes)?,
})
}

View File

@@ -1,58 +1,34 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{BasisPoints16, StoredF32, Version};
use brk_types::Version;
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, Rw, StorageMode, UnaryTransform};
use vecdb::{Database, Rw, StorageMode};
use crate::{
indexes,
internal::{Bp16ToFloat, Bp16ToPercent, NumericValue, PercentFromHeight, Windows},
internal::{BpsType, PercentFromHeight, Windows},
};
const VERSION: Version = Version::ZERO;
/// 4 rolling window vecs (24h, 1w, 1m, 1y), each storing basis points
/// with lazy ratio and percent float views.
#[derive(Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct PercentRollingWindows<B, M: StorageMode = Rw>(pub Windows<PercentFromHeight<B, M>>)
where
B: NumericValue + JsonSchema;
pub struct PercentRollingWindows<B: BpsType, M: StorageMode = Rw>(pub Windows<PercentFromHeight<B, M>>);
impl<B> PercentRollingWindows<B>
where
B: NumericValue + JsonSchema,
{
pub(crate) fn forced_import<RatioTransform, PercentTransform>(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self>
where
RatioTransform: UnaryTransform<B, StoredF32>,
PercentTransform: UnaryTransform<B, StoredF32>,
{
let v = version + VERSION;
Ok(Self(Windows::try_from_fn(|suffix| {
PercentFromHeight::forced_import::<RatioTransform, PercentTransform>(
db,
&format!("{name}_{suffix}"),
v,
indexes,
)
})?))
}
}
impl PercentRollingWindows<BasisPoints16> {
pub(crate) fn forced_import_bp16(
impl<B: BpsType> PercentRollingWindows<B> {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Self::forced_import::<Bp16ToFloat, Bp16ToPercent>(db, name, version, indexes)
Ok(Self(Windows::try_from_fn(|suffix| {
PercentFromHeight::forced_import(
db,
&format!("{name}_{suffix}"),
version,
indexes,
)
})?))
}
}

View File

@@ -17,8 +17,6 @@ use crate::{
internal::{ValueFromHeight, WindowStarts, Windows},
};
const VERSION: Version = Version::ZERO;
/// Value rolling windows — window-first, currency-last.
///
/// Each window contains `ValueFromHeight` (sats + btc lazy + usd).
@@ -35,9 +33,8 @@ impl ValueFromHeightWindows {
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
Ok(Self(Windows::try_from_fn(|suffix| {
ValueFromHeight::forced_import(db, &format!("{name}_{suffix}"), v, indexes)
ValueFromHeight::forced_import(db, &format!("{name}_{suffix}"), version, indexes)
})?))
}

View File

@@ -28,8 +28,6 @@ pub struct RollingWindows<T, M: StorageMode = Rw>(pub Windows<ComputedFromHeight
where
T: ComputedVecValue + PartialOrd + JsonSchema;
const VERSION: Version = Version::ZERO;
impl<T> RollingWindows<T>
where
T: NumericValue + JsonSchema,
@@ -40,9 +38,8 @@ where
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
Ok(Self(Windows::try_from_fn(|suffix| {
ComputedFromHeight::forced_import(db, &format!("{name}_{suffix}"), v, indexes)
ComputedFromHeight::forced_import(db, &format!("{name}_{suffix}"), version, indexes)
})?))
}

View File

@@ -1,103 +0,0 @@
use brk_error::Result;
use brk_traversable::Traversable;
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{
Database, EagerVec, ImportableVec, PcoVec, Ro, Rw, StorageMode, StoredVec, VecIndex, Version,
};
use crate::internal::ComputedVecValue;
macro_rules! define_stat_vec {
($name:ident, $suffix:literal, $doc:literal) => {
#[doc = $doc]
#[derive(Deref, DerefMut, Traversable)]
pub struct $name<I: VecIndex, T: ComputedVecValue + JsonSchema, M: StorageMode = Rw>(
pub M::Stored<EagerVec<PcoVec<I, T>>>,
);
impl<I: VecIndex, T: ComputedVecValue + JsonSchema> $name<I, T> {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
) -> Result<Self> {
Ok(Self(EagerVec::forced_import(
db,
&format!("{name}_{}", $suffix),
version,
)?))
}
pub fn read_only_clone(&self) -> $name<I, T, Ro> {
$name(StoredVec::read_only_clone(&self.0))
}
}
};
}
macro_rules! define_stat_vec_transparent {
($name:ident, $suffix:literal, $doc:literal) => {
#[doc = $doc]
#[derive(Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct $name<I: VecIndex, T: ComputedVecValue + JsonSchema, M: StorageMode = Rw>(
pub M::Stored<EagerVec<PcoVec<I, T>>>,
);
impl<I: VecIndex, T: ComputedVecValue + JsonSchema> $name<I, T> {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
) -> Result<Self> {
Ok(Self(EagerVec::forced_import(
db,
&format!("{name}_{}", $suffix),
version,
)?))
}
pub fn read_only_clone(&self) -> $name<I, T, Ro> {
$name(StoredVec::read_only_clone(&self.0))
}
}
};
}
define_stat_vec!(AverageVec, "average", "Average value in an aggregation period");
define_stat_vec!(MinVec, "min", "Minimum value in an aggregation period");
define_stat_vec!(MaxVec, "max", "Maximum value in an aggregation period");
define_stat_vec!(Pct10Vec, "pct10", "10th percentile in an aggregation period");
define_stat_vec!(Pct25Vec, "pct25", "25th percentile in an aggregation period");
define_stat_vec!(MedianVec, "median", "Median (50th percentile) in an aggregation period");
define_stat_vec!(Pct75Vec, "pct75", "75th percentile in an aggregation period");
define_stat_vec!(Pct90Vec, "pct90", "90th percentile in an aggregation period");
define_stat_vec_transparent!(CumulativeVec, "cumulative", "Cumulative sum across aggregation periods");
/// Sum of values in an aggregation period
#[derive(Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct SumVec<I: VecIndex, T: ComputedVecValue + JsonSchema, M: StorageMode = Rw>(
pub M::Stored<EagerVec<PcoVec<I, T>>>,
);
impl<I: VecIndex, T: ComputedVecValue + JsonSchema> SumVec<I, T> {
pub(crate) fn forced_import(db: &Database, name: &str, version: Version) -> Result<Self> {
Ok(Self(EagerVec::forced_import(
db,
&format!("{name}_sum"),
version,
)?))
}
#[inline]
pub(crate) fn inner(&self) -> &EagerVec<PcoVec<I, T>> {
&self.0
}
pub fn read_only_clone(&self) -> SumVec<I, T, Ro> {
SumVec(StoredVec::read_only_clone(&self.0))
}
}

View File

@@ -1,9 +1,13 @@
//! Internal traits for computed vec values.
use std::ops::{Add, AddAssign, Div};
use brk_types::{BasisPoints16, BasisPointsSigned16, BasisPointsSigned32, StoredF32};
use schemars::JsonSchema;
use serde::Serialize;
use vecdb::{Formattable, PcoVecValue};
use vecdb::{Formattable, PcoVecValue, UnaryTransform};
use crate::internal::{
Bp16ToFloat, Bp16ToPercent, Bps16ToFloat, Bps16ToPercent, Bps32ToFloat, Bps32ToPercent,
};
pub trait ComputedVecValue
where
@@ -32,3 +36,24 @@ impl<T> ComputedVecValue for T where
pub trait NumericValue: ComputedVecValue + From<f64> + Into<f64> {}
impl<T> NumericValue for T where T: ComputedVecValue + From<f64> + Into<f64> {}
/// Trait that associates a basis-point type with its transforms to ratio and percent.
pub trait BpsType: NumericValue + JsonSchema {
type ToRatio: UnaryTransform<Self, StoredF32>;
type ToPercent: UnaryTransform<Self, StoredF32>;
}
impl BpsType for BasisPoints16 {
type ToRatio = Bp16ToFloat;
type ToPercent = Bp16ToPercent;
}
impl BpsType for BasisPointsSigned16 {
type ToRatio = Bps16ToFloat;
type ToPercent = Bps16ToPercent;
}
impl BpsType for BasisPointsSigned32 {
type ToRatio = Bps32ToFloat;
type ToPercent = Bps32ToPercent;
}

View File

@@ -1,19 +1,15 @@
//! Base generic struct with 4 type parameters — one per rolling window duration.
//!
//! Foundation for all rolling window types (24h, 1w, 1m, 1y).
use brk_traversable::Traversable;
#[derive(Clone, Traversable)]
pub struct Windows<A, B = A, C = A, D = A> {
pub struct Windows<A> {
#[traversable(rename = "24h")]
pub _24h: A,
#[traversable(rename = "1w")]
pub _1w: B,
pub _1w: A,
#[traversable(rename = "1m")]
pub _1m: C,
pub _1m: A,
#[traversable(rename = "1y")]
pub _1y: D,
pub _1y: A,
}
impl<A> Windows<A> {

View File

@@ -40,7 +40,7 @@ impl Vecs {
);
let price_drawdown =
PercentFromHeight::forced_import_bps16(db, "price_drawdown", v, indexes)?;
PercentFromHeight::forced_import(db, "price_drawdown", v, indexes)?;
Ok(Self {
price_ath,

View File

@@ -23,7 +23,7 @@ impl Vecs {
})?;
let period_return = ByDcaPeriod::try_new(|name, _days| {
PercentFromHeight::forced_import_bps32(
PercentFromHeight::forced_import(
db,
&format!("dca_return_{name}"),
version,
@@ -32,7 +32,7 @@ impl Vecs {
})?;
let period_cagr = ByDcaCagr::try_new(|name, _days| {
PercentFromHeight::forced_import_bps32(
PercentFromHeight::forced_import(
db,
&format!("dca_cagr_{name}"),
version,
@@ -45,7 +45,7 @@ impl Vecs {
})?;
let period_lump_sum_return = ByDcaPeriod::try_new(|name, _days| {
PercentFromHeight::forced_import_bps32(
PercentFromHeight::forced_import(
db,
&format!("lump_sum_return_{name}"),
version,
@@ -62,7 +62,7 @@ impl Vecs {
})?;
let class_return = ByDcaClass::try_new(|name, _year, _day1| {
PercentFromHeight::forced_import_bps32(
PercentFromHeight::forced_import(
db,
&format!("dca_return_{name}"),
version,

View File

@@ -33,7 +33,7 @@ impl RsiChain {
macro_rules! percent_import {
($name:expr) => {
PercentFromHeight::forced_import_bp16(
PercentFromHeight::forced_import(
db,
&format!("rsi_{}_{}", $name, tf),
version,
@@ -45,7 +45,7 @@ impl RsiChain {
let average_gain = import!("average_gain");
let average_loss = import!("average_loss");
let rsi = PercentFromHeight::forced_import_bp16(
let rsi = PercentFromHeight::forced_import(
db,
&format!("rsi_{tf}"),
version,
@@ -127,9 +127,9 @@ impl Vecs {
let rsi = Windows::try_from_fn(|tf| RsiChain::forced_import(db, tf, v, indexes))?;
let macd = Windows::try_from_fn(|tf| MacdChain::forced_import(db, tf, v, indexes))?;
let stoch_k = PercentFromHeight::forced_import_bp16(db, "stoch_k", v, indexes)?;
let stoch_d = PercentFromHeight::forced_import_bp16(db, "stoch_d", v, indexes)?;
let gini = PercentFromHeight::forced_import_bp16(db, "gini", v, indexes)?;
let stoch_k = PercentFromHeight::forced_import(db, "stoch_k", v, indexes)?;
let stoch_d = PercentFromHeight::forced_import(db, "stoch_d", v, indexes)?;
let gini = PercentFromHeight::forced_import(db, "gini", v, indexes)?;
let pi_cycle = ComputedFromHeightRatio::forced_import_raw(db, "pi_cycle", v, indexes)?;

View File

@@ -24,7 +24,7 @@ impl Vecs {
price_true_range_sum_2w: ComputedFromHeight::forced_import(
db, "price_true_range_sum_2w", version + v1, indexes,
)?,
price_choppiness_index_2w: PercentFromHeight::forced_import_bp16(
price_choppiness_index_2w: PercentFromHeight::forced_import(
db, "price_choppiness_index_2w", version + v1, indexes,
)?,
})

View File

@@ -19,7 +19,7 @@ impl Vecs {
let v1 = Version::ONE;
let price_return = ByLookbackPeriod::try_new(|name, _days| {
PercentFromHeight::forced_import_bps32(
PercentFromHeight::forced_import(
db,
&format!("price_return_{name}"),
version,
@@ -29,7 +29,7 @@ impl Vecs {
// CAGR (computed, 2y+ only)
let price_cagr = ByDcaCagr::try_new(|name, _days| {
PercentFromHeight::forced_import_bps32(
PercentFromHeight::forced_import(
db,
&format!("price_cagr_{name}"),
version,

View File

@@ -52,7 +52,7 @@ impl Vecs {
version,
indexes,
)?,
hash_rate_drawdown: PercentFromHeight::forced_import_bps16(
hash_rate_drawdown: PercentFromHeight::forced_import(
db,
"hash_rate_drawdown",
version,
@@ -82,7 +82,7 @@ impl Vecs {
version + v4,
indexes,
)?,
hash_price_rebound: PercentFromHeight::forced_import_bps32(
hash_price_rebound: PercentFromHeight::forced_import(
db,
"hash_price_rebound",
version + v4,
@@ -112,7 +112,7 @@ impl Vecs {
version + v4,
indexes,
)?,
hash_value_rebound: PercentFromHeight::forced_import_bps32(
hash_value_rebound: PercentFromHeight::forced_import(
db,
"hash_value_rebound",
version + v4,

View File

@@ -27,25 +27,25 @@ impl Vecs {
version,
indexes,
)?,
fee_dominance: PercentFromHeight::forced_import_bp16(
fee_dominance: PercentFromHeight::forced_import(
db,
"fee_dominance",
version,
indexes,
)?,
fee_dominance_rolling: PercentRollingWindows::forced_import_bp16(
fee_dominance_rolling: PercentRollingWindows::forced_import(
db,
"fee_dominance",
version,
indexes,
)?,
subsidy_dominance: PercentFromHeight::forced_import_bp16(
subsidy_dominance: PercentFromHeight::forced_import(
db,
"subsidy_dominance",
version,
indexes,
)?,
subsidy_dominance_rolling: PercentRollingWindows::forced_import_bp16(
subsidy_dominance_rolling: PercentRollingWindows::forced_import(
db,
"subsidy_dominance",
version,

View File

@@ -33,8 +33,8 @@ impl Vecs {
self.utxo_count.height.compute_transform3(
starting_indexes.height,
&*self.total_count.full.sum_cumulative.cumulative,
&*inputs_count.full.sum_cumulative.cumulative,
&self.total_count.full.cumulative,
&inputs_count.full.cumulative,
&scripts_count.opreturn.cumulative.height,
|(h, output_count, input_count, opreturn_count, ..)| {
let block_count = u64::from(h + 1_usize);

View File

@@ -58,9 +58,9 @@ impl Vecs {
ValueFromHeightCumulativeSum::forced_import(db, &suffix("coinbase"), version, indexes)?;
let dominance =
PercentFromHeight::forced_import_bp16(db, &suffix("dominance"), version, indexes)?;
PercentFromHeight::forced_import(db, &suffix("dominance"), version, indexes)?;
let dominance_rolling =
PercentRollingWindows::forced_import_bp16(db, &suffix("dominance"), version, indexes)?;
PercentRollingWindows::forced_import(db, &suffix("dominance"), version, indexes)?;
Ok(Self {
dominance,

View File

@@ -1,16 +1,16 @@
use std::path::Path;
use std::{fs, path::Path};
use brk_error::Result;
use brk_indexer::Indexer;
use brk_reader::Reader;
use brk_reader::{Reader, XOR_LEN, XORBytes};
use brk_traversable::Traversable;
use brk_types::{BlkPosition, Height, Indexes, TxIndex, Version};
use tracing::info;
use vecdb::{
AnyStoredVec, AnyVec, Database, Exit, WritableVec, ImportableVec, PAGE_SIZE, PcoVec,
ReadableVec, Rw, StorageMode, VecIndex,
AnyStoredVec, AnyVec, Database, Exit, ImportableVec, PAGE_SIZE, PcoVec, ReadableVec, Rw,
StorageMode, VecIndex, WritableVec,
};
pub const DB_NAME: &str = "positions";
#[derive(Traversable)]
@@ -58,6 +58,29 @@ impl Vecs {
Ok(())
}
fn check_xor_bytes(&mut self, reader: &Reader) -> Result<()> {
let xor_path = self.db.path().join("xor.dat");
let current = reader.xor_bytes();
let cached = fs::read(&xor_path)
.ok()
.and_then(|b| <[u8; XOR_LEN]>::try_from(b).ok())
.map(XORBytes::from);
match cached {
Some(c) if c == current => return Ok(()),
Some(_) => {
info!("XOR bytes changed, resetting positions...");
self.block_position.reset()?;
self.tx_position.reset()?;
}
None => {}
}
fs::write(&xor_path, *current)?;
Ok(())
}
fn compute_(
&mut self,
indexer: &Indexer,
@@ -65,6 +88,8 @@ impl Vecs {
parser: &Reader,
exit: &Exit,
) -> Result<()> {
self.check_xor_bytes(parser)?;
// Validate computed versions against dependencies
let dep_version = indexer.vecs.transactions.first_txindex.version()
+ indexer.vecs.transactions.height.version();

View File

@@ -24,13 +24,13 @@ impl Vecs {
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
taproot: PercentFromHeight::forced_import_bp16(
taproot: PercentFromHeight::forced_import(
db,
"taproot_adoption",
version,
indexes,
)?,
segwit: PercentFromHeight::forced_import_bp16(
segwit: PercentFromHeight::forced_import(
db,
"segwit_adoption",
version,
@@ -49,14 +49,14 @@ impl Vecs {
self.taproot.compute_binary::<_, _, RatioU64Bp16>(
starting_indexes.height,
&count.p2tr.height,
&outputs_count.total_count.full.sum_cumulative.sum.0,
&outputs_count.total_count.full.sum,
exit,
)?;
self.segwit.compute_binary::<_, _, RatioU64Bp16>(
starting_indexes.height,
&count.segwit.height,
&outputs_count.total_count.full.sum_cumulative.sum.0,
&outputs_count.total_count.full.sum,
exit,
)?;

View File

@@ -42,7 +42,7 @@ impl Vecs {
// Inflation rate
let inflation_rate =
PercentFromHeight::forced_import_bps32(&db, "inflation_rate", version, indexes)?;
PercentFromHeight::forced_import(&db, "inflation_rate", version, indexes)?;
// Velocity
let velocity = super::velocity::Vecs::forced_import(&db, version, indexes)?;
@@ -55,13 +55,13 @@ impl Vecs {
);
// Growth rates
let market_cap_growth_rate = PercentFromHeight::forced_import_bps32(
let market_cap_growth_rate = PercentFromHeight::forced_import(
&db,
"market_cap_growth_rate",
version + Version::ONE,
indexes,
)?;
let realized_cap_growth_rate = PercentFromHeight::forced_import_bps32(
let realized_cap_growth_rate = PercentFromHeight::forced_import(
&db,
"realized_cap_growth_rate",
version + Version::ONE,

View File

@@ -75,13 +75,13 @@ impl Vecs {
)?;
self.inputs_per_sec.height.compute_binary::<_, Timestamp, PerSec>(
starting_indexes.height,
&inputs_count.full.sum_cumulative.sum.0,
&inputs_count.full.sum,
&blocks.interval.height,
exit,
)?;
self.outputs_per_sec.height.compute_binary::<_, Timestamp, PerSec>(
starting_indexes.height,
&outputs_count.total_count.full.sum_cumulative.sum.0,
&outputs_count.total_count.full.sum,
&blocks.interval.height,
exit,
)?;

View File

@@ -4,9 +4,15 @@ use derive_more::Deref;
pub const XOR_LEN: usize = 8;
#[derive(Debug, Clone, Copy, Deref)]
#[derive(Debug, Clone, Copy, Deref, PartialEq, Eq)]
pub struct XORBytes([u8; XOR_LEN]);
impl From<[u8; XOR_LEN]> for XORBytes {
fn from(value: [u8; XOR_LEN]) -> Self {
Self(value)
}
}
impl From<&Path> for XORBytes {
#[inline]
fn from(value: &Path) -> Self {