computer: indexes + rolling

This commit is contained in:
nym21
2026-02-24 17:07:35 +01:00
parent cefc8cfd42
commit f74115c6e2
160 changed files with 2604 additions and 4739 deletions

95
Cargo.lock generated
View File

@@ -336,15 +336,6 @@ version = "2.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af"
[[package]]
name = "block2"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cdeb9d870516001442e364c5220d3574d2da8dc765554b4a617230d33fa58ef5"
dependencies = [
"objc2",
]
[[package]]
name = "brk"
version = "0.1.9"
@@ -826,17 +817,11 @@ version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801"
[[package]]
name = "cfg_aliases"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
[[package]]
name = "chrono"
version = "0.4.43"
version = "0.4.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fac4744fb15ae8337dc853fee7fb3f4e48c0fbaa23d0afe49c447b4fab126118"
checksum = "c673075a2e0e5f4a1dde27ce9dee1ea4558c7ffe648f576438a20ca1d2acc4b0"
dependencies = [
"iana-time-zone",
"js-sys",
@@ -1078,17 +1063,6 @@ version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5"
[[package]]
name = "ctrlc"
version = "3.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0b1fab2ae45819af2d0731d60f2afe17227ebb1a1538a236da84c93e9a60162"
dependencies = [
"dispatch2",
"nix",
"windows-sys 0.61.2",
]
[[package]]
name = "cty"
version = "0.2.2"
@@ -1153,18 +1127,6 @@ dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "dispatch2"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec"
dependencies = [
"bitflags 2.11.0",
"block2",
"libc",
"objc2",
]
[[package]]
name = "displaydoc"
version = "0.2.5"
@@ -1935,9 +1897,9 @@ checksum = "00810f1d8b74be64b13dbf3db89ac67740615d6c891f0e7b6179326533011a07"
[[package]]
name = "js-sys"
version = "0.3.88"
version = "0.3.89"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7e709f3e3d22866f9c25b3aff01af289b18422cc8b4262fb19103ee80fe513d"
checksum = "f4eacb0641a310445a4c513f2a5e23e19952e269c6a38887254d5f837a305506"
dependencies = [
"once_cell",
"wasm-bindgen",
@@ -2170,18 +2132,6 @@ dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "nix"
version = "0.31.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "225e7cfe711e0ba79a68baeddb2982723e4235247aefce1482f2f16c27865b66"
dependencies = [
"bitflags 2.11.0",
"cfg-if",
"cfg_aliases",
"libc",
]
[[package]]
name = "nom"
version = "7.1.3"
@@ -2218,21 +2168,6 @@ dependencies = [
"url",
]
[[package]]
name = "objc2"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7c2599ce0ec54857b29ce62166b0ed9b4f6f1a70ccc9a71165b6154caca8c05"
dependencies = [
"objc2-encode",
]
[[package]]
name = "objc2-encode"
version = "4.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef25abbcd74fb2609453eb695bd2f860d389e457f67dc17cafc8b8cbc89d0c33"
[[package]]
name = "object"
version = "0.37.3"
@@ -3351,7 +3286,7 @@ checksum = "8f54a172d0620933a27a4360d3db3e2ae0dd6cceae9730751a036bbf182c4b23"
name = "vecdb"
version = "0.6.8"
dependencies = [
"ctrlc",
"libc",
"log",
"lz4_flex 0.12.0",
"parking_lot",
@@ -3416,9 +3351,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen"
version = "0.2.111"
version = "0.2.112"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec1adf1535672f5b7824f817792b1afd731d7e843d2d04ec8f27e8cb51edd8ac"
checksum = "05d7d0fce354c88b7982aec4400b3e7fcf723c32737cef571bd165f7613557ee"
dependencies = [
"cfg-if",
"once_cell",
@@ -3429,9 +3364,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.111"
version = "0.2.112"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19e638317c08b21663aed4d2b9a2091450548954695ff4efa75bff5fa546b3b1"
checksum = "55839b71ba921e4f75b674cb16f843f4b1f3b26ddfcb3454de1cf65cc021ec0f"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
@@ -3439,9 +3374,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.111"
version = "0.2.112"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c64760850114d03d5f65457e96fc988f11f01d38fbaa51b254e4ab5809102af"
checksum = "caf2e969c2d60ff52e7e98b7392ff1588bffdd1ccd4769eba27222fd3d621571"
dependencies = [
"bumpalo",
"proc-macro2",
@@ -3452,9 +3387,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-shared"
version = "0.2.111"
version = "0.2.112"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60eecd4fe26177cfa3339eb00b4a36445889ba3ad37080c2429879718e20ca41"
checksum = "0861f0dcdf46ea819407495634953cdcc8a8c7215ab799a7a7ce366be71c7b30"
dependencies = [
"unicode-ident",
]
@@ -3495,9 +3430,9 @@ dependencies = [
[[package]]
name = "web-sys"
version = "0.3.88"
version = "0.3.89"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d6bb20ed2d9572df8584f6dc81d68a41a625cadc6f15999d649a70ce7e3597a"
checksum = "10053fbf9a374174094915bbce141e87a6bf32ecd9a002980db4b638405e8962"
dependencies = [
"js-sys",
"wasm-bindgen",

View File

@@ -14,11 +14,19 @@ impl Vecs {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.interval.compute(indexer, starting_indexes, exit)?;
self.time.timestamp.compute_first(
starting_indexes,
&indexer.vecs.blocks.timestamp,
indexes,
exit,
)?;
self.count
.compute(indexer, &self.time, starting_indexes, exit)?;
self.size.compute(indexer, starting_indexes, exit)?;
self.weight.compute(indexer, starting_indexes, exit)?;
self.interval
.compute(indexer, &self.count, starting_indexes, exit)?;
self.size.compute(indexer, &self.count, starting_indexes, exit)?;
self.weight
.compute(indexer, &self.count, starting_indexes, exit)?;
self.difficulty
.compute(indexer, indexes, starting_indexes, exit)?;
self.halving.compute(indexes, starting_indexes, exit)?;

View File

@@ -158,28 +158,16 @@ impl Vecs {
)?;
// Compute rolling window block counts
self.block_count_24h_sum.height.compute_transform(
let ws = crate::internal::WindowStarts {
_24h: &self.height_24h_ago,
_7d: &self.height_1w_ago,
_30d: &self.height_1m_ago,
_1y: &self.height_1y_ago,
};
self.block_count_sum.compute_rolling_sum(
starting_indexes.height,
&self.height_24h_ago,
|(h, start, ..)| (h, StoredU32::from(*h + 1 - *start)),
exit,
)?;
self.block_count_1w_sum.height.compute_transform(
starting_indexes.height,
&self.height_1w_ago,
|(h, start, ..)| (h, StoredU32::from(*h + 1 - *start)),
exit,
)?;
self.block_count_1m_sum.height.compute_transform(
starting_indexes.height,
&self.height_1m_ago,
|(h, start, ..)| (h, StoredU32::from(*h + 1 - *start)),
exit,
)?;
self.block_count_1y_sum.height.compute_transform(
starting_indexes.height,
&self.height_1y_ago,
|(h, start, ..)| (h, StoredU32::from(*h + 1 - *start)),
&ws,
&self.block_count.height,
exit,
)?;

View File

@@ -5,7 +5,7 @@ use vecdb::{Database, ImportableVec};
use super::Vecs;
use crate::{
indexes,
internal::{BlockCountTarget, ComputedFromHeightLast, ComputedFromHeightSumCum, ConstantVecs},
internal::{BlockCountTarget, ComputedFromHeightSumCum, ConstantVecs, RollingWindows},
};
impl Vecs {
@@ -52,27 +52,9 @@ impl Vecs {
height_6y_ago: ImportableVec::forced_import(db, "height_6y_ago", version)?,
height_8y_ago: ImportableVec::forced_import(db, "height_8y_ago", version)?,
height_10y_ago: ImportableVec::forced_import(db, "height_10y_ago", version)?,
block_count_24h_sum: ComputedFromHeightLast::forced_import(
block_count_sum: RollingWindows::forced_import(
db,
"block_count_24h_sum",
version,
indexes,
)?,
block_count_1w_sum: ComputedFromHeightLast::forced_import(
db,
"block_count_1w_sum",
version,
indexes,
)?,
block_count_1m_sum: ComputedFromHeightLast::forced_import(
db,
"block_count_1m_sum",
version,
indexes,
)?,
block_count_1y_sum: ComputedFromHeightLast::forced_import(
db,
"block_count_1y_sum",
"block_count_sum",
version,
indexes,
)?,

View File

@@ -2,7 +2,7 @@ use brk_traversable::Traversable;
use brk_types::{Height, StoredU32, StoredU64};
use vecdb::{EagerVec, PcoVec, Rw, StorageMode};
use crate::internal::{ComputedFromHeightLast, ComputedFromHeightSumCum, ConstantVecs};
use crate::internal::{ComputedFromHeightSumCum, ConstantVecs, RollingWindows, WindowStarts};
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
@@ -40,13 +40,20 @@ pub struct Vecs<M: StorageMode = Rw> {
pub height_8y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_10y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
// Rolling window block counts
pub block_count_24h_sum: ComputedFromHeightLast<StoredU32, M>,
pub block_count_1w_sum: ComputedFromHeightLast<StoredU32, M>,
pub block_count_1m_sum: ComputedFromHeightLast<StoredU32, M>,
pub block_count_1y_sum: ComputedFromHeightLast<StoredU32, M>,
pub block_count_sum: RollingWindows<StoredU32, M>,
}
impl Vecs {
/// Get the standard 4 rolling window start heights (24h, 7d, 30d, 1y).
pub fn window_starts(&self) -> WindowStarts<'_> {
WindowStarts {
_24h: &self.height_24h_ago,
_7d: &self.height_1w_ago,
_30d: &self.height_1m_ago,
_1y: &self.height_1y_ago,
}
}
pub fn start_vec(&self, days: usize) -> &EagerVec<PcoVec<Height, Height>> {
match days {
1 => &self.height_24h_ago,

View File

@@ -6,7 +6,7 @@ use vecdb::{Database, ReadableCloneableVec};
use super::Vecs;
use crate::{
indexes,
internal::{ComputedFromHeightLast, ComputedFromHeightSum, ComputedHeightDerivedLast},
internal::{ComputedFromHeightLast, ComputedHeightDerivedLast},
};
impl Vecs {
@@ -26,7 +26,7 @@ impl Vecs {
indexes,
),
as_hash: ComputedFromHeightLast::forced_import(db, "difficulty_as_hash", version, indexes)?,
adjustment: ComputedFromHeightSum::forced_import(db, "difficulty_adjustment", version, indexes)?,
adjustment: ComputedFromHeightLast::forced_import(db, "difficulty_adjustment", version, indexes)?,
epoch: ComputedFromHeightLast::forced_import(db, "difficulty_epoch", version, indexes)?,
blocks_before_next_adjustment: ComputedFromHeightLast::forced_import(
db,

View File

@@ -2,7 +2,7 @@ use brk_traversable::Traversable;
use brk_types::{DifficultyEpoch, StoredF32, StoredF64, StoredU32};
use vecdb::{Rw, StorageMode};
use crate::internal::{ComputedFromHeightLast, ComputedFromHeightSum, ComputedHeightDerivedLast};
use crate::internal::{ComputedFromHeightLast, ComputedHeightDerivedLast};
/// Difficulty metrics: raw difficulty, derived stats, adjustment, and countdown
#[derive(Traversable)]
@@ -10,7 +10,7 @@ pub struct Vecs<M: StorageMode = Rw> {
/// Raw difficulty with day1/period stats - merges with indexer's raw
pub raw: ComputedHeightDerivedLast<StoredF64>,
pub as_hash: ComputedFromHeightLast<StoredF32, M>,
pub adjustment: ComputedFromHeightSum<StoredF32, M>,
pub adjustment: ComputedFromHeightLast<StoredF32, M>,
pub epoch: ComputedFromHeightLast<DifficultyEpoch, M>,
pub blocks_before_next_adjustment: ComputedFromHeightLast<StoredU32, M>,
pub days_before_next_adjustment: ComputedFromHeightLast<StoredF32, M>,

View File

@@ -29,7 +29,7 @@ impl Vecs {
let interval = IntervalVecs::forced_import(&db, version, indexes)?;
let size = SizeVecs::forced_import(&db, version, indexer, indexes)?;
let weight = WeightVecs::forced_import(&db, version, indexer, indexes)?;
let time = TimeVecs::forced_import(&db, version, indexer, indexes)?;
let time = TimeVecs::forced_import(&db, version)?;
let difficulty = DifficultyVecs::forced_import(&db, version, indexer, indexes)?;
let halving = HalvingVecs::forced_import(&db, version, indexes)?;

View File

@@ -4,12 +4,13 @@ use brk_types::{CheckedSub, Timestamp};
use vecdb::{Exit, ReadableVec};
use super::Vecs;
use crate::ComputeIndexes;
use crate::{blocks, ComputeIndexes};
impl Vecs {
pub(crate) fn compute(
&mut self,
indexer: &Indexer,
count_vecs: &blocks::CountVecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
@@ -31,6 +32,15 @@ impl Vecs {
},
exit,
)?;
let window_starts = count_vecs.window_starts();
self.interval_rolling.compute_distribution(
starting_indexes.height,
&window_starts,
&self.interval.height,
exit,
)?;
Ok(())
}
}

View File

@@ -3,7 +3,10 @@ use brk_types::Version;
use vecdb::Database;
use super::Vecs;
use crate::{indexes, internal::ComputedFromHeightDistribution};
use crate::{
indexes,
internal::{ComputedFromHeightLast, RollingDistribution},
};
impl Vecs {
pub(crate) fn forced_import(
@@ -11,13 +14,15 @@ impl Vecs {
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let interval = ComputedFromHeightDistribution::forced_import(
db,
"block_interval",
version,
indexes,
)?;
let interval =
ComputedFromHeightLast::forced_import(db, "block_interval", version, indexes)?;
Ok(Self { interval })
let interval_rolling =
RollingDistribution::forced_import(db, "block_interval", version, indexes)?;
Ok(Self {
interval,
interval_rolling,
})
}
}

View File

@@ -2,10 +2,11 @@ use brk_traversable::Traversable;
use brk_types::Timestamp;
use vecdb::{Rw, StorageMode};
use crate::internal::ComputedFromHeightDistribution;
use crate::internal::{ComputedFromHeightLast, RollingDistribution};
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
#[traversable(flatten)]
pub interval: ComputedFromHeightDistribution<Timestamp, M>,
pub interval: ComputedFromHeightLast<Timestamp, M>,
pub interval_rolling: RollingDistribution<Timestamp, M>,
}

View File

@@ -1,21 +1,43 @@
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::StoredU64;
use vecdb::Exit;
use super::Vecs;
use crate::ComputeIndexes;
use crate::{blocks, ComputeIndexes};
impl Vecs {
pub(crate) fn compute(
&mut self,
indexer: &Indexer,
count_vecs: &blocks::CountVecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.size
.compute_cumulative(starting_indexes, &indexer.vecs.blocks.total_size, exit)?;
let window_starts = count_vecs.window_starts();
self.vbytes.compute_cumulative(starting_indexes, exit)?;
// vbytes = ceil(weight / 4), stored at height level
self.vbytes.compute(
starting_indexes.height,
&window_starts,
exit,
|height| {
Ok(height.compute_transform(
starting_indexes.height,
&indexer.vecs.blocks.weight,
|(h, weight, ..)| (h, StoredU64::from(weight.to_vbytes_floor())),
exit,
)?)
},
)?;
// size from indexer total_size
self.size.compute(
starting_indexes.height,
&window_starts,
&indexer.vecs.blocks.total_size,
exit,
)?;
Ok(())
}

View File

@@ -4,7 +4,10 @@ use brk_types::Version;
use vecdb::{Database, ReadableCloneableVec};
use super::Vecs;
use crate::{indexes, internal::{ComputedHeightDerivedFull, LazyComputedFromHeightFull, WeightToVbytes}};
use crate::{
indexes,
internal::{ComputedFromHeightCumFull, ComputedHeightDerivedCumFull},
};
impl Vecs {
pub(crate) fn forced_import(
@@ -14,14 +17,13 @@ impl Vecs {
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
vbytes: LazyComputedFromHeightFull::forced_import::<WeightToVbytes>(
vbytes: ComputedFromHeightCumFull::forced_import(
db,
"block_vbytes",
version,
&indexer.vecs.blocks.weight,
indexes,
)?,
size: ComputedHeightDerivedFull::forced_import(
size: ComputedHeightDerivedCumFull::forced_import(
db,
"block_size",
indexer.vecs.blocks.total_size.read_only_boxed_clone(),

View File

@@ -1,11 +1,11 @@
use brk_traversable::Traversable;
use brk_types::{StoredU64, Weight};
use brk_types::StoredU64;
use vecdb::{Rw, StorageMode};
use crate::internal::{ComputedHeightDerivedFull, LazyComputedFromHeightFull};
use crate::internal::{ComputedFromHeightCumFull, ComputedHeightDerivedCumFull};
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub vbytes: LazyComputedFromHeightFull<StoredU64, Weight, M>,
pub size: ComputedHeightDerivedFull<StoredU64, M>,
pub vbytes: ComputedFromHeightCumFull<StoredU64, M>,
pub size: ComputedHeightDerivedCumFull<StoredU64, M>,
}

View File

@@ -1,18 +1,12 @@
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::{Date, Height, Version};
use vecdb::{Database, EagerVec, ImportableVec, ReadableCloneableVec, LazyVecFrom1};
use vecdb::{Database, EagerVec, ImportableVec, LazyVecFrom1, ReadableCloneableVec};
use super::Vecs;
use crate::{indexes, internal::ComputedHeightDerivedFirst};
use crate::internal::EagerIndexes;
impl Vecs {
pub(crate) fn forced_import(
db: &Database,
version: Version,
indexer: &Indexer,
indexes: &indexes::Vecs,
) -> Result<Self> {
pub(crate) fn forced_import(db: &Database, version: Version) -> Result<Self> {
let timestamp_monotonic =
EagerVec::forced_import(db, "timestamp_monotonic", version)?;
@@ -24,12 +18,7 @@ impl Vecs {
|_height: Height, timestamp| Date::from(timestamp),
),
timestamp_monotonic,
timestamp: ComputedHeightDerivedFirst::forced_import(
"timestamp",
indexer.vecs.blocks.timestamp.read_only_boxed_clone(),
version,
indexes,
),
timestamp: EagerIndexes::forced_import(db, "timestamp", version)?,
})
}
}

View File

@@ -2,12 +2,12 @@ use brk_traversable::Traversable;
use brk_types::{Date, Height, Timestamp};
use vecdb::{EagerVec, LazyVecFrom1, PcoVec, Rw, StorageMode};
use crate::internal::ComputedHeightDerivedFirst;
use crate::internal::EagerIndexes;
/// Timestamp and date metrics for blocks
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub date: LazyVecFrom1<Height, Date, Height, Timestamp>,
pub timestamp_monotonic: M::Stored<EagerVec<PcoVec<Height, Timestamp>>>,
pub timestamp: ComputedHeightDerivedFirst<Timestamp>,
pub timestamp: EagerIndexes<Timestamp, M>,
}

View File

@@ -1,19 +1,41 @@
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::StoredF32;
use vecdb::Exit;
use super::Vecs;
use crate::ComputeIndexes;
use crate::{blocks, ComputeIndexes};
impl Vecs {
pub(crate) fn compute(
&mut self,
indexer: &Indexer,
count_vecs: &blocks::CountVecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.weight
.compute_cumulative(starting_indexes, &indexer.vecs.blocks.weight, exit)?;
let window_starts = count_vecs.window_starts();
self.weight.compute(
starting_indexes.height,
&window_starts,
&indexer.vecs.blocks.weight,
exit,
)?;
self.fullness.height.compute_transform(
starting_indexes.height,
&indexer.vecs.blocks.weight,
|(h, weight, ..)| (h, StoredF32::from(weight.fullness())),
exit,
)?;
self.fullness_rolling.compute_distribution(
starting_indexes.height,
&window_starts,
&self.fullness.height,
exit,
)?;
Ok(())
}

View File

@@ -6,7 +6,7 @@ use vecdb::{Database, ReadableCloneableVec};
use super::Vecs;
use crate::{
indexes,
internal::{ComputedHeightDerivedFull, LazyFromHeightTransformDistribution, WeightToFullness},
internal::{ComputedFromHeightLast, ComputedHeightDerivedCumFull, RollingDistribution},
};
impl Vecs {
@@ -16,7 +16,7 @@ impl Vecs {
indexer: &Indexer,
indexes: &indexes::Vecs,
) -> Result<Self> {
let weight = ComputedHeightDerivedFull::forced_import(
let weight = ComputedHeightDerivedCumFull::forced_import(
db,
"block_weight",
indexer.vecs.blocks.weight.read_only_boxed_clone(),
@@ -24,13 +24,16 @@ impl Vecs {
indexes,
)?;
let fullness = LazyFromHeightTransformDistribution::from_derived::<WeightToFullness>(
"block_fullness",
version,
indexer.vecs.blocks.weight.read_only_boxed_clone(),
&weight,
);
let fullness =
ComputedFromHeightLast::forced_import(db, "block_fullness", version, indexes)?;
Ok(Self { weight, fullness })
let fullness_rolling =
RollingDistribution::forced_import(db, "block_fullness", version, indexes)?;
Ok(Self {
weight,
fullness,
fullness_rolling,
})
}
}

View File

@@ -2,10 +2,13 @@ use brk_traversable::Traversable;
use brk_types::{StoredF32, Weight};
use vecdb::{Rw, StorageMode};
use crate::internal::{ComputedHeightDerivedFull, LazyFromHeightTransformDistribution};
use crate::internal::{
ComputedFromHeightLast, ComputedHeightDerivedCumFull, RollingDistribution,
};
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub weight: ComputedHeightDerivedFull<Weight, M>,
pub fullness: LazyFromHeightTransformDistribution<StoredF32, Weight>,
pub weight: ComputedHeightDerivedCumFull<Weight, M>,
pub fullness: ComputedFromHeightLast<StoredF32, M>,
pub fullness_rolling: RollingDistribution<StoredF32, M>,
}

View File

@@ -3,15 +3,18 @@ use brk_types::{Bitcoin, CheckedSub, StoredF64};
use vecdb::Exit;
use super::Vecs;
use crate::{ComputeIndexes, distribution};
use crate::{ComputeIndexes, blocks, distribution};
impl Vecs {
pub(crate) fn compute(
&mut self,
starting_indexes: &ComputeIndexes,
blocks: &blocks::Vecs,
distribution: &distribution::Vecs,
exit: &Exit,
) -> Result<()> {
let window_starts = blocks.count.window_starts();
let circulating_supply = &distribution
.utxo_cohorts
.all
@@ -22,7 +25,7 @@ impl Vecs {
.height;
self.coinblocks_created
.compute(starting_indexes, exit, |vec| {
.compute(starting_indexes.height, &window_starts, exit, |vec| {
vec.compute_transform(
starting_indexes.height,
circulating_supply,
@@ -40,7 +43,7 @@ impl Vecs {
.coinblocks_destroyed;
self.coinblocks_stored
.compute(starting_indexes, exit, |vec| {
.compute(starting_indexes.height, &window_starts, exit, |vec| {
vec.compute_transform2(
starting_indexes.height,
&self.coinblocks_created.height,
@@ -53,8 +56,8 @@ impl Vecs {
self.liveliness.height.compute_divide(
starting_indexes.height,
&*coinblocks_destroyed.height_cumulative,
&*self.coinblocks_created.height_cumulative,
&coinblocks_destroyed.cumulative.height,
&self.coinblocks_created.cumulative.height,
exit,
)?;

View File

@@ -5,19 +5,19 @@ use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedFromHeightLast, ComputedFromHeightSumCum},
internal::{ComputedFromHeightCumSum, ComputedFromHeightLast},
};
impl Vecs {
pub(crate) fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
Ok(Self {
coinblocks_created: ComputedFromHeightSumCum::forced_import(
coinblocks_created: ComputedFromHeightCumSum::forced_import(
db,
"coinblocks_created",
version,
indexes,
)?,
coinblocks_stored: ComputedFromHeightSumCum::forced_import(
coinblocks_stored: ComputedFromHeightCumSum::forced_import(
db,
"coinblocks_stored",
version,

View File

@@ -2,12 +2,12 @@ use brk_traversable::Traversable;
use brk_types::StoredF64;
use vecdb::{Rw, StorageMode};
use crate::internal::{ComputedFromHeightLast, ComputedFromHeightSumCum};
use crate::internal::{ComputedFromHeightCumSum, ComputedFromHeightLast};
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub coinblocks_created: ComputedFromHeightSumCum<StoredF64, M>,
pub coinblocks_stored: ComputedFromHeightSumCum<StoredF64, M>,
pub coinblocks_created: ComputedFromHeightCumSum<StoredF64, M>,
pub coinblocks_stored: ComputedFromHeightCumSum<StoredF64, M>,
pub liveliness: ComputedFromHeightLast<StoredF64, M>,
pub vaultedness: ComputedFromHeightLast<StoredF64, M>,
pub activity_to_vaultedness_ratio: ComputedFromHeightLast<StoredF64, M>,

View File

@@ -65,9 +65,9 @@ impl Vecs {
// cointime_cap = (cointime_value_destroyed_cumulative * circulating_supply) / coinblocks_stored_cumulative
self.cointime_cap.height.compute_transform3(
starting_indexes.height,
&value.cointime_value_destroyed.height_cumulative.0,
&value.cointime_value_destroyed.cumulative.height,
circulating_supply,
&activity.coinblocks_stored.height_cumulative.0,
&activity.coinblocks_stored.cumulative.height,
|(i, destroyed, supply, stored, ..)| {
let destroyed: f64 = *destroyed;
let supply: f64 = supply.into();

View File

@@ -18,7 +18,7 @@ impl Vecs {
) -> Result<()> {
// Activity computes first (liveliness, vaultedness, etc.)
self.activity
.compute(starting_indexes, distribution, exit)?;
.compute(starting_indexes, blocks, distribution, exit)?;
// Supply computes next (depends on activity)
self.supply.compute(
@@ -36,6 +36,7 @@ impl Vecs {
self.value.compute(
starting_indexes,
prices,
blocks,
distribution,
&self.activity,
exit,

View File

@@ -4,17 +4,20 @@ use vecdb::Exit;
use super::super::activity;
use super::Vecs;
use crate::{ComputeIndexes, distribution, prices};
use crate::{ComputeIndexes, blocks, distribution, prices};
impl Vecs {
pub(crate) fn compute(
&mut self,
starting_indexes: &ComputeIndexes,
prices: &prices::Vecs,
blocks: &blocks::Vecs,
distribution: &distribution::Vecs,
activity: &activity::Vecs,
exit: &Exit,
) -> Result<()> {
let window_starts = blocks.count.window_starts();
let coinblocks_destroyed = &distribution
.utxo_cohorts
.all
@@ -39,7 +42,7 @@ impl Vecs {
.height;
self.cointime_value_destroyed
.compute(starting_indexes, exit,|vec| {
.compute(starting_indexes.height, &window_starts, exit, |vec| {
vec.compute_multiply(
starting_indexes.height,
&prices.usd.price,
@@ -50,7 +53,7 @@ impl Vecs {
})?;
self.cointime_value_created
.compute(starting_indexes, exit,|vec| {
.compute(starting_indexes.height, &window_starts, exit, |vec| {
vec.compute_multiply(
starting_indexes.height,
&prices.usd.price,
@@ -61,7 +64,7 @@ impl Vecs {
})?;
self.cointime_value_stored
.compute(starting_indexes, exit,|vec| {
.compute(starting_indexes.height, &window_starts, exit, |vec| {
vec.compute_multiply(
starting_indexes.height,
&prices.usd.price,
@@ -75,7 +78,7 @@ impl Vecs {
// Supply-adjusted to account for growing supply over time
// This is a key input for Reserve Risk / HODL Bank calculation
self.vocdd
.compute(starting_indexes, exit,|vec| {
.compute(starting_indexes.height, &window_starts, exit, |vec| {
vec.compute_transform3(
starting_indexes.height,
&prices.usd.price,

View File

@@ -3,30 +3,30 @@ use brk_types::Version;
use vecdb::Database;
use super::Vecs;
use crate::{indexes, internal::ComputedFromHeightSumCum};
use crate::{indexes, internal::ComputedFromHeightCumSum};
impl Vecs {
pub(crate) fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
Ok(Self {
cointime_value_destroyed: ComputedFromHeightSumCum::forced_import(
cointime_value_destroyed: ComputedFromHeightCumSum::forced_import(
db,
"cointime_value_destroyed",
version,
indexes,
)?,
cointime_value_created: ComputedFromHeightSumCum::forced_import(
cointime_value_created: ComputedFromHeightCumSum::forced_import(
db,
"cointime_value_created",
version,
indexes,
)?,
cointime_value_stored: ComputedFromHeightSumCum::forced_import(
cointime_value_stored: ComputedFromHeightCumSum::forced_import(
db,
"cointime_value_stored",
version,
indexes,
)?,
vocdd: ComputedFromHeightSumCum::forced_import(
vocdd: ComputedFromHeightCumSum::forced_import(
db,
"vocdd",
version + Version::ONE,

View File

@@ -2,12 +2,12 @@ use brk_traversable::Traversable;
use brk_types::StoredF64;
use vecdb::{Rw, StorageMode};
use crate::internal::ComputedFromHeightSumCum;
use crate::internal::ComputedFromHeightCumSum;
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub cointime_value_destroyed: ComputedFromHeightSumCum<StoredF64, M>,
pub cointime_value_created: ComputedFromHeightSumCum<StoredF64, M>,
pub cointime_value_stored: ComputedFromHeightSumCum<StoredF64, M>,
pub vocdd: ComputedFromHeightSumCum<StoredF64, M>,
pub cointime_value_destroyed: ComputedFromHeightCumSum<StoredF64, M>,
pub cointime_value_created: ComputedFromHeightCumSum<StoredF64, M>,
pub cointime_value_stored: ComputedFromHeightCumSum<StoredF64, M>,
pub vocdd: ComputedFromHeightCumSum<StoredF64, M>,
}

View File

@@ -67,7 +67,7 @@ pub(crate) fn process_blocks(
// From transactions and inputs/outputs (via .height or .height.sum_cum.sum patterns):
let height_to_tx_count = &transactions.count.tx_count.height;
let height_to_output_count = &outputs.count.total_count.height.sum_cum.sum.0;
let height_to_output_count = &outputs.count.total_count.sum_cum.sum.0;
let height_to_input_count = &inputs.count.height.sum_cum.sum.0;
// From blocks:
let height_to_timestamp = &blocks.time.timestamp_monotonic;
@@ -114,8 +114,11 @@ pub(crate) fn process_blocks(
debug!("building txindex_to_height RangeMap");
let mut txindex_to_height: RangeMap<TxIndex, Height> = {
let first_txindex_len = indexer.vecs.transactions.first_txindex.len();
let all_first_txindexes: Vec<TxIndex> =
indexer.vecs.transactions.first_txindex.collect_range_at(0, first_txindex_len);
let all_first_txindexes: Vec<TxIndex> = indexer
.vecs
.transactions
.first_txindex
.collect_range_at(0, first_txindex_len);
let mut map = RangeMap::with_capacity(first_txindex_len);
for first_txindex in all_first_txindexes {
map.push(first_txindex);
@@ -129,14 +132,46 @@ pub(crate) fn process_blocks(
let mut txin_iters = TxInReaders::new(indexer, inputs, &mut txindex_to_height);
// Pre-collect first address indexes per type for the block range
let first_p2a_vec = indexer.vecs.addresses.first_p2aaddressindex.collect_range_at(start_usize, end_usize);
let first_p2pk33_vec = indexer.vecs.addresses.first_p2pk33addressindex.collect_range_at(start_usize, end_usize);
let first_p2pk65_vec = indexer.vecs.addresses.first_p2pk65addressindex.collect_range_at(start_usize, end_usize);
let first_p2pkh_vec = indexer.vecs.addresses.first_p2pkhaddressindex.collect_range_at(start_usize, end_usize);
let first_p2sh_vec = indexer.vecs.addresses.first_p2shaddressindex.collect_range_at(start_usize, end_usize);
let first_p2tr_vec = indexer.vecs.addresses.first_p2traddressindex.collect_range_at(start_usize, end_usize);
let first_p2wpkh_vec = indexer.vecs.addresses.first_p2wpkhaddressindex.collect_range_at(start_usize, end_usize);
let first_p2wsh_vec = indexer.vecs.addresses.first_p2wshaddressindex.collect_range_at(start_usize, end_usize);
let first_p2a_vec = indexer
.vecs
.addresses
.first_p2aaddressindex
.collect_range_at(start_usize, end_usize);
let first_p2pk33_vec = indexer
.vecs
.addresses
.first_p2pk33addressindex
.collect_range_at(start_usize, end_usize);
let first_p2pk65_vec = indexer
.vecs
.addresses
.first_p2pk65addressindex
.collect_range_at(start_usize, end_usize);
let first_p2pkh_vec = indexer
.vecs
.addresses
.first_p2pkhaddressindex
.collect_range_at(start_usize, end_usize);
let first_p2sh_vec = indexer
.vecs
.addresses
.first_p2shaddressindex
.collect_range_at(start_usize, end_usize);
let first_p2tr_vec = indexer
.vecs
.addresses
.first_p2traddressindex
.collect_range_at(start_usize, end_usize);
let first_p2wpkh_vec = indexer
.vecs
.addresses
.first_p2wpkhaddressindex
.collect_range_at(start_usize, end_usize);
let first_p2wsh_vec = indexer
.vecs
.addresses
.first_p2wshaddressindex
.collect_range_at(start_usize, end_usize);
// Track running totals - recover from previous height if resuming
debug!("recovering addr_counts from height {}", starting_height);
@@ -423,7 +458,7 @@ pub(crate) fn process_blocks(
)?;
// Compute unrealized peak regret by age range (once per day)
if let Some(day1) = day1_opt {
if day1_opt.is_some() {
vecs.utxo_cohorts.compute_and_push_peak_regret(
chain_state,
height,

View File

@@ -6,7 +6,7 @@ use vecdb::{AnyStoredVec, AnyVec, EagerVec, Exit, ImportableVec, PcoVec, Rw, Sto
use crate::{
ComputeIndexes, blocks,
internal::{ComputedFromHeightSumCum, LazyComputedValueFromHeightSumCum, ValueEmaFromHeight},
internal::{ComputedFromHeightCumSum, LazyComputedValueFromHeightCum, ValueEmaFromHeight},
};
use super::ImportConfig;
@@ -15,7 +15,7 @@ use super::ImportConfig;
#[derive(Traversable)]
pub struct ActivityMetrics<M: StorageMode = Rw> {
/// Total satoshis sent at each height + derived indexes
pub sent: LazyComputedValueFromHeightSumCum<M>,
pub sent: LazyComputedValueFromHeightCum<M>,
/// 14-day EMA of sent supply (sats, btc, usd)
pub sent_14d_ema: ValueEmaFromHeight<M>,
@@ -27,17 +27,17 @@ pub struct ActivityMetrics<M: StorageMode = Rw> {
pub satdays_destroyed: M::Stored<EagerVec<PcoVec<Height, Sats>>>,
/// Coin-blocks destroyed (in BTC rather than sats)
pub coinblocks_destroyed: ComputedFromHeightSumCum<StoredF64, M>,
pub coinblocks_destroyed: ComputedFromHeightCumSum<StoredF64, M>,
/// Coin-days destroyed (in BTC rather than sats)
pub coindays_destroyed: ComputedFromHeightSumCum<StoredF64, M>,
pub coindays_destroyed: ComputedFromHeightCumSum<StoredF64, M>,
}
impl ActivityMetrics {
/// Import activity metrics from database.
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
Ok(Self {
sent: LazyComputedValueFromHeightSumCum::forced_import(
sent: LazyComputedValueFromHeightCum::forced_import(
cfg.db,
&cfg.name("sent"),
cfg.version,
@@ -64,14 +64,14 @@ impl ActivityMetrics {
cfg.version,
)?,
coinblocks_destroyed: ComputedFromHeightSumCum::forced_import(
coinblocks_destroyed: ComputedFromHeightCumSum::forced_import(
cfg.db,
&cfg.name("coinblocks_destroyed"),
cfg.version,
cfg.indexes,
)?,
coindays_destroyed: ComputedFromHeightSumCum::forced_import(
coindays_destroyed: ComputedFromHeightCumSum::forced_import(
cfg.db,
&cfg.name("coindays_destroyed"),
cfg.version,
@@ -163,7 +163,9 @@ impl ActivityMetrics {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.sent.compute_cumulative(starting_indexes, exit)?;
let window_starts = blocks.count.window_starts();
self.sent.compute_cumulative(starting_indexes.height, exit)?;
// 14-day rolling average of sent (sats and dollars)
self.sent_14d_ema.compute_rolling_average(
@@ -174,7 +176,7 @@ impl ActivityMetrics {
exit,
)?;
self.coinblocks_destroyed.compute(starting_indexes, exit, |v| {
self.coinblocks_destroyed.compute(starting_indexes.height, &window_starts, exit, |v| {
v.compute_transform(
starting_indexes.height,
&self.satblocks_destroyed,
@@ -184,7 +186,7 @@ impl ActivityMetrics {
Ok(())
})?;
self.coindays_destroyed.compute(starting_indexes, exit, |v| {
self.coindays_destroyed.compute(starting_indexes.height, &window_starts, exit, |v| {
v.compute_transform(
starting_indexes.height,
&self.satdays_destroyed,

View File

@@ -14,12 +14,11 @@ use crate::{
ComputeIndexes, blocks,
distribution::state::RealizedState,
internal::{
CentsUnsignedToDollars, ComputedFromHeightLast, ComputedFromHeightRatio,
ComputedFromHeightSum, ComputedFromHeightSumCum, DollarsMinus, DollarsPlus,
DollarsSquaredDivide, LazyBinaryFromHeightLast, LazyBinaryFromHeightSum,
LazyBinaryFromHeightSumCum, LazyBinaryPriceFromHeight,
LazyComputedValueFromHeightSumCum, LazyFromHeightLast, LazyFromHeightSum,
LazyFromHeightSumCum, LazyPriceFromCents, PercentageDollarsF32, Price, PriceFromHeight,
CentsUnsignedToDollars, ComputedFromHeightCum, ComputedFromHeightLast,
ComputedFromHeightRatio, DollarsMinus, DollarsPlus,
DollarsSquaredDivide, LazyBinaryFromHeightLast,
LazyBinaryPriceFromHeight, LazyComputedValueFromHeightCum, LazyFromHeightLast,
LazyPriceFromCents, PercentageDollarsF32, Price, PriceFromHeight,
Ratio64, StoredF32Identity, ValueEmaFromHeight,
},
prices,
@@ -58,24 +57,24 @@ pub struct RealizedMetrics<M: StorageMode = Rw> {
pub mvrv: LazyFromHeightLast<StoredF32>,
// === Realized Profit/Loss ===
pub realized_profit: ComputedFromHeightSumCum<Dollars, M>,
pub realized_profit: ComputedFromHeightCum<Dollars, M>,
pub realized_profit_7d_ema: ComputedFromHeightLast<Dollars, M>,
pub realized_loss: ComputedFromHeightSumCum<Dollars, M>,
pub realized_loss: ComputedFromHeightCum<Dollars, M>,
pub realized_loss_7d_ema: ComputedFromHeightLast<Dollars, M>,
pub neg_realized_loss: LazyFromHeightSumCum<Dollars>,
pub net_realized_pnl: ComputedFromHeightSumCum<Dollars, M>,
pub neg_realized_loss: LazyFromHeightLast<Dollars>,
pub net_realized_pnl: ComputedFromHeightCum<Dollars, M>,
pub net_realized_pnl_7d_ema: ComputedFromHeightLast<Dollars, M>,
pub realized_value: ComputedFromHeightSum<Dollars, M>,
pub realized_value: ComputedFromHeightLast<Dollars, M>,
// === Realized vs Realized Cap Ratios (lazy) ===
pub realized_profit_rel_to_realized_cap:
LazyBinaryFromHeightSumCum<StoredF32, Dollars, Dollars>,
pub realized_loss_rel_to_realized_cap: LazyBinaryFromHeightSumCum<StoredF32, Dollars, Dollars>,
LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
pub realized_loss_rel_to_realized_cap: LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
pub net_realized_pnl_rel_to_realized_cap:
LazyBinaryFromHeightSumCum<StoredF32, Dollars, Dollars>,
LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
// === Total Realized PnL ===
pub total_realized_pnl: LazyFromHeightSum<Dollars>,
pub total_realized_pnl: LazyFromHeightLast<Dollars>,
// === Realized Profit/Loss Rolling Sums ===
pub realized_profit_24h: Option<ComputedFromHeightLast<Dollars, M>>,
@@ -94,22 +93,22 @@ pub struct RealizedMetrics<M: StorageMode = Rw> {
pub realized_profit_to_loss_ratio_1y: Option<LazyBinaryFromHeightLast<StoredF64, Dollars, Dollars>>,
// === Value Created/Destroyed Splits (stored) ===
pub profit_value_created: ComputedFromHeightSum<Dollars, M>,
pub profit_value_destroyed: ComputedFromHeightSum<Dollars, M>,
pub loss_value_created: ComputedFromHeightSum<Dollars, M>,
pub loss_value_destroyed: ComputedFromHeightSum<Dollars, M>,
pub profit_value_created: ComputedFromHeightLast<Dollars, M>,
pub profit_value_destroyed: ComputedFromHeightLast<Dollars, M>,
pub loss_value_created: ComputedFromHeightLast<Dollars, M>,
pub loss_value_destroyed: ComputedFromHeightLast<Dollars, M>,
// === Value Created/Destroyed Totals (lazy: profit + loss) ===
pub value_created: LazyBinaryFromHeightSum<Dollars, Dollars, Dollars>,
pub value_destroyed: LazyBinaryFromHeightSum<Dollars, Dollars, Dollars>,
pub value_created: LazyBinaryFromHeightLast<Dollars, Dollars, Dollars>,
pub value_destroyed: LazyBinaryFromHeightLast<Dollars, Dollars, Dollars>,
// === Capitulation/Profit Flow (lazy aliases) ===
pub capitulation_flow: LazyFromHeightSum<Dollars>,
pub profit_flow: LazyFromHeightSum<Dollars>,
pub capitulation_flow: LazyFromHeightLast<Dollars>,
pub profit_flow: LazyFromHeightLast<Dollars>,
// === Adjusted Value (lazy: cohort - up_to_1h) ===
pub adjusted_value_created: Option<LazyBinaryFromHeightSum<Dollars, Dollars, Dollars>>,
pub adjusted_value_destroyed: Option<LazyBinaryFromHeightSum<Dollars, Dollars, Dollars>>,
pub adjusted_value_created: Option<LazyBinaryFromHeightLast<Dollars, Dollars, Dollars>>,
pub adjusted_value_destroyed: Option<LazyBinaryFromHeightLast<Dollars, Dollars, Dollars>>,
// === Value Created/Destroyed Rolling Sums ===
pub value_created_24h: ComputedFromHeightLast<Dollars, M>,
@@ -179,17 +178,17 @@ pub struct RealizedMetrics<M: StorageMode = Rw> {
/// Realized peak regret: Σ((peak - sell_price) × sats)
/// where peak = max price during holding period.
/// "How much more could have been made by selling at peak instead"
pub peak_regret: ComputedFromHeightSumCum<Dollars, M>,
pub peak_regret: ComputedFromHeightCum<Dollars, M>,
/// Peak regret as % of realized cap
pub peak_regret_rel_to_realized_cap: LazyBinaryFromHeightSum<StoredF32, Dollars, Dollars>,
pub peak_regret_rel_to_realized_cap: LazyBinaryFromHeightLast<StoredF32, Dollars, Dollars>,
// === Sent in Profit/Loss ===
/// Sats sent in profit (sats/btc/usd)
pub sent_in_profit: LazyComputedValueFromHeightSumCum<M>,
pub sent_in_profit: LazyComputedValueFromHeightCum<M>,
/// 14-day EMA of sent in profit (sats, btc, usd)
pub sent_in_profit_14d_ema: ValueEmaFromHeight<M>,
/// Sats sent in loss (sats/btc/usd)
pub sent_in_loss: LazyComputedValueFromHeightSumCum<M>,
pub sent_in_loss: LazyComputedValueFromHeightCum<M>,
/// 14-day EMA of sent in loss (sats, btc, usd)
pub sent_in_loss_14d_ema: ValueEmaFromHeight<M>,
}
@@ -218,7 +217,7 @@ impl RealizedMetrics {
&realized_cap_cents,
);
let realized_profit = ComputedFromHeightSumCum::forced_import(
let realized_profit = ComputedFromHeightCum::forced_import(
cfg.db,
&cfg.name("realized_profit"),
cfg.version,
@@ -232,7 +231,7 @@ impl RealizedMetrics {
cfg.indexes,
)?;
let realized_loss = ComputedFromHeightSumCum::forced_import(
let realized_loss = ComputedFromHeightCum::forced_import(
cfg.db,
&cfg.name("realized_loss"),
cfg.version,
@@ -246,14 +245,14 @@ impl RealizedMetrics {
cfg.indexes,
)?;
let neg_realized_loss = LazyFromHeightSumCum::from_computed::<Negate>(
let neg_realized_loss = LazyFromHeightLast::from_computed::<Negate>(
&cfg.name("neg_realized_loss"),
cfg.version + v1,
realized_loss.height.read_only_boxed_clone(),
&realized_loss,
);
let net_realized_pnl = ComputedFromHeightSumCum::forced_import(
let net_realized_pnl = ComputedFromHeightCum::forced_import(
cfg.db,
&cfg.name("net_realized_pnl"),
cfg.version,
@@ -267,7 +266,7 @@ impl RealizedMetrics {
cfg.indexes,
)?;
let peak_regret = ComputedFromHeightSumCum::forced_import(
let peak_regret = ComputedFromHeightCum::forced_import(
cfg.db,
&cfg.name("realized_peak_regret"),
cfg.version + v2,
@@ -275,7 +274,7 @@ impl RealizedMetrics {
)?;
// realized_value is the source for total_realized_pnl (they're identical)
let realized_value = ComputedFromHeightSum::forced_import(
let realized_value = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("realized_value"),
cfg.version,
@@ -283,7 +282,7 @@ impl RealizedMetrics {
)?;
// total_realized_pnl is a lazy alias to realized_value
let total_realized_pnl = LazyFromHeightSum::from_computed::<Ident>(
let total_realized_pnl = LazyFromHeightLast::from_computed::<Ident>(
&cfg.name("total_realized_pnl"),
cfg.version + v1,
realized_value.height.read_only_boxed_clone(),
@@ -292,31 +291,25 @@ impl RealizedMetrics {
// Construct lazy ratio vecs
let realized_profit_rel_to_realized_cap =
LazyBinaryFromHeightSumCum::from_computed_lazy_last::<PercentageDollarsF32, _>(
LazyBinaryFromHeightLast::from_block_last_and_lazy_block_last::<PercentageDollarsF32, _>(
&cfg.name("realized_profit_rel_to_realized_cap"),
cfg.version + v1,
realized_profit.height.read_only_boxed_clone(),
realized_cap.height.read_only_boxed_clone(),
&realized_profit,
&realized_cap,
);
let realized_loss_rel_to_realized_cap =
LazyBinaryFromHeightSumCum::from_computed_lazy_last::<PercentageDollarsF32, _>(
LazyBinaryFromHeightLast::from_block_last_and_lazy_block_last::<PercentageDollarsF32, _>(
&cfg.name("realized_loss_rel_to_realized_cap"),
cfg.version + v1,
realized_loss.height.read_only_boxed_clone(),
realized_cap.height.read_only_boxed_clone(),
&realized_loss,
&realized_cap,
);
let net_realized_pnl_rel_to_realized_cap =
LazyBinaryFromHeightSumCum::from_computed_lazy_last::<PercentageDollarsF32, _>(
LazyBinaryFromHeightLast::from_block_last_and_lazy_block_last::<PercentageDollarsF32, _>(
&cfg.name("net_realized_pnl_rel_to_realized_cap"),
cfg.version + v1,
net_realized_pnl.height.read_only_boxed_clone(),
realized_cap.height.read_only_boxed_clone(),
&net_realized_pnl,
&realized_cap,
);
@@ -375,28 +368,28 @@ impl RealizedMetrics {
BytesVec::forced_import(cfg.db, &cfg.name("investor_cap_raw"), cfg.version)?;
// Import the 4 splits (stored)
let profit_value_created = ComputedFromHeightSum::forced_import(
let profit_value_created = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("profit_value_created"),
cfg.version,
cfg.indexes,
)?;
let profit_value_destroyed = ComputedFromHeightSum::forced_import(
let profit_value_destroyed = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("profit_value_destroyed"),
cfg.version,
cfg.indexes,
)?;
let loss_value_created = ComputedFromHeightSum::forced_import(
let loss_value_created = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("loss_value_created"),
cfg.version,
cfg.indexes,
)?;
let loss_value_destroyed = ComputedFromHeightSum::forced_import(
let loss_value_destroyed = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("loss_value_destroyed"),
cfg.version,
@@ -404,14 +397,14 @@ impl RealizedMetrics {
)?;
// Create lazy totals (profit + loss)
let value_created = LazyBinaryFromHeightSum::from_computed::<DollarsPlus>(
let value_created = LazyBinaryFromHeightLast::from_computed_last::<DollarsPlus>(
&cfg.name("value_created"),
cfg.version,
&profit_value_created,
&loss_value_created,
);
let value_destroyed = LazyBinaryFromHeightSum::from_computed::<DollarsPlus>(
let value_destroyed = LazyBinaryFromHeightLast::from_computed_last::<DollarsPlus>(
&cfg.name("value_destroyed"),
cfg.version,
&profit_value_destroyed,
@@ -419,14 +412,14 @@ impl RealizedMetrics {
);
// Create lazy aliases
let capitulation_flow = LazyFromHeightSum::from_computed::<Ident>(
let capitulation_flow = LazyFromHeightLast::from_computed::<Ident>(
&cfg.name("capitulation_flow"),
cfg.version,
loss_value_destroyed.height.read_only_boxed_clone(),
&loss_value_destroyed,
);
let profit_flow = LazyFromHeightSum::from_computed::<Ident>(
let profit_flow = LazyFromHeightLast::from_computed::<Ident>(
&cfg.name("profit_flow"),
cfg.version,
profit_value_destroyed.height.read_only_boxed_clone(),
@@ -437,7 +430,7 @@ impl RealizedMetrics {
let adjusted_value_created =
(compute_adjusted && cfg.up_to_1h_realized.is_some()).then(|| {
let up_to_1h = cfg.up_to_1h_realized.unwrap();
LazyBinaryFromHeightSum::from_binary::<
LazyBinaryFromHeightLast::from_both_binary_block::<
DollarsMinus,
Dollars,
Dollars,
@@ -453,7 +446,7 @@ impl RealizedMetrics {
let adjusted_value_destroyed =
(compute_adjusted && cfg.up_to_1h_realized.is_some()).then(|| {
let up_to_1h = cfg.up_to_1h_realized.unwrap();
LazyBinaryFromHeightSum::from_binary::<
LazyBinaryFromHeightLast::from_both_binary_block::<
DollarsMinus,
Dollars,
Dollars,
@@ -626,17 +619,13 @@ impl RealizedMetrics {
sell_side_risk_ratio_24h_30d_ema.height.read_only_boxed_clone(), &sell_side_risk_ratio_24h_30d_ema,
);
let peak_regret_rel_to_realized_cap = LazyBinaryFromHeightSum::from_sumcum_lazy_last::<
PercentageDollarsF32,
_,
>(
&cfg.name("peak_regret_rel_to_realized_cap"),
cfg.version + v1,
peak_regret.height.read_only_boxed_clone(),
realized_cap.height.read_only_boxed_clone(),
&peak_regret,
&realized_cap,
);
let peak_regret_rel_to_realized_cap =
LazyBinaryFromHeightLast::from_block_last_and_lazy_block_last::<PercentageDollarsF32, _>(
&cfg.name("peak_regret_rel_to_realized_cap"),
cfg.version + v1,
&peak_regret,
&realized_cap,
);
Ok(Self {
// === Realized Cap ===
@@ -814,7 +803,7 @@ impl RealizedMetrics {
peak_regret_rel_to_realized_cap,
// === Sent in Profit/Loss ===
sent_in_profit: LazyComputedValueFromHeightSumCum::forced_import(
sent_in_profit: LazyComputedValueFromHeightCum::forced_import(
cfg.db,
&cfg.name("sent_in_profit"),
cfg.version,
@@ -827,7 +816,7 @@ impl RealizedMetrics {
cfg.version,
cfg.indexes,
)?,
sent_in_loss: LazyComputedValueFromHeightSumCum::forced_import(
sent_in_loss: LazyComputedValueFromHeightCum::forced_import(
cfg.db,
&cfg.name("sent_in_loss"),
cfg.version,
@@ -1099,15 +1088,15 @@ impl RealizedMetrics {
// realized_cap_cents: ComputedFromHeightLast - day1 is lazy, nothing to compute
// investor_price_cents: ComputedFromHeightLast - day1 is lazy, nothing to compute
// realized_profit/loss: ComputedFromHeightSumCum - compute cumulative from height
// realized_profit/loss: ComputedFromHeightCum - compute cumulative from height
self.realized_profit
.compute_cumulative(starting_indexes, exit)?;
.compute_cumulative(starting_indexes.height, exit)?;
self.realized_loss
.compute_cumulative(starting_indexes, exit)?;
.compute_cumulative(starting_indexes.height, exit)?;
// net_realized_pnl = profit - loss
self.net_realized_pnl
.compute(starting_indexes, exit, |vec| {
.compute(starting_indexes.height, exit, |vec| {
vec.compute_subtract(
starting_indexes.height,
&self.realized_profit.height,
@@ -1120,7 +1109,7 @@ impl RealizedMetrics {
// realized_value = profit + loss
// Note: total_realized_pnl is a lazy alias to realized_value since both
// compute profit + loss with sum aggregation, making them identical.
// ComputedFromHeightSum: day1 is lazy, just compute the height vec directly
// ComputedFromHeightLast: day1 is lazy, just compute the height vec directly
self.realized_value.height.compute_add(
starting_indexes.height,
&self.realized_profit.height,
@@ -1130,17 +1119,17 @@ impl RealizedMetrics {
// Compute derived aggregations for the 4 splits
// (value_created, value_destroyed, capitulation_flow, profit_flow are derived lazily)
// ComputedFromHeightSum: day1 is lazy, nothing to compute
// ComputedFromHeightLast: day1 is lazy, nothing to compute
// ATH regret: ComputedFromHeightSumCum - compute cumulative from height
// ATH regret: ComputedFromHeightCum - compute cumulative from height
self.peak_regret
.compute_cumulative(starting_indexes, exit)?;
.compute_cumulative(starting_indexes.height, exit)?;
// Volume at profit/loss: LazyComputedValueFromHeightSumCum - compute cumulative
// Volume at profit/loss: LazyComputedValueFromHeightCum - compute cumulative
self.sent_in_profit
.compute_cumulative(starting_indexes, exit)?;
.compute_cumulative(starting_indexes.height, exit)?;
self.sent_in_loss
.compute_cumulative(starting_indexes, exit)?;
.compute_cumulative(starting_indexes.height, exit)?;
Ok(())
}
@@ -1378,7 +1367,7 @@ impl RealizedMetrics {
.compute_rolling_change(
starting_indexes.height,
&blocks.count.height_1m_ago,
&*self.net_realized_pnl.rest.height_cumulative,
&self.net_realized_pnl.cumulative.height,
exit,
)?;

View File

@@ -3,20 +3,21 @@ use brk_indexer::Indexer;
use vecdb::Exit;
use super::Vecs;
use crate::{indexes, ComputeIndexes};
use crate::{blocks, indexes, ComputeIndexes};
impl Vecs {
pub(crate) fn compute(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
blocks: &blocks::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.spent
.compute(&self.db, indexer, starting_indexes, exit)?;
self.count
.compute(indexer, indexes, starting_indexes, exit)?;
.compute(indexer, indexes, blocks, starting_indexes, exit)?;
let _lock = exit.lock();
self.db.compact()?;

View File

@@ -3,23 +3,34 @@ use brk_indexer::Indexer;
use vecdb::Exit;
use super::Vecs;
use crate::{ComputeIndexes, indexes};
use crate::{blocks, indexes, ComputeIndexes};
impl Vecs {
pub(crate) fn compute(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
blocks: &blocks::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.derive_from(
indexer,
indexes,
starting_indexes,
self.height.compute_with_skip(
starting_indexes.height,
&indexes.txindex.input_count,
&indexer.vecs.transactions.first_txindex,
&indexes.height.txindex_count,
exit,
0,
)?;
let window_starts = blocks.count.window_starts();
self.rolling.compute(
starting_indexes.height,
&window_starts,
self.height.sum_cum.sum.inner(),
exit,
)?;
Ok(())
}
}

View File

@@ -3,15 +3,16 @@ use brk_types::Version;
use vecdb::Database;
use super::Vecs;
use crate::{indexes, internal::TxDerivedFull};
use crate::{
indexes,
internal::{Full, RollingFull},
};
impl Vecs {
pub(crate) fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
Ok(Self(TxDerivedFull::forced_import(
db,
"input_count",
version,
indexes,
)?))
Ok(Self {
height: Full::forced_import(db, "input_count", version)?,
rolling: RollingFull::forced_import(db, "input_count", version, indexes)?,
})
}
}

View File

@@ -1,9 +1,11 @@
use brk_traversable::Traversable;
use brk_types::StoredU64;
use derive_more::{Deref, DerefMut};
use brk_types::{Height, StoredU64};
use vecdb::{Rw, StorageMode};
use crate::internal::TxDerivedFull;
use crate::internal::{Full, RollingFull};
#[derive(Deref, DerefMut, Traversable)]
pub struct Vecs<M: StorageMode = Rw>(pub TxDerivedFull<StoredU64, M>);
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub height: Full<Height, StoredU64, M>,
pub rolling: RollingFull<StoredU64, M>,
}

View File

@@ -0,0 +1,18 @@
//! Base generic struct with 8 type parameters — one per distribution statistic.
//!
//! Foundation for all distribution-style types (average, min, max, percentiles).
use brk_traversable::Traversable;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct DistributionStats<A, B = A, C = A, D = A, E = A, F = A, G = A, H = A> {
pub average: A,
pub min: B,
pub max: C,
pub p10: D,
pub p25: E,
pub median: F,
pub p75: G,
pub p90: H,
}

View File

@@ -0,0 +1,218 @@
//! EagerIndexes - newtype on Indexes with EagerVec<PcoVec<I, T>> per field.
//!
//! Used for data eagerly computed and stored per period during indexing,
//! such as timestamp (first value per period) and OHLC (first/min/max per period).
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{
Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4, Minute1, Minute10,
Minute30, Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10,
};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, PcoVec, ReadableVec, Rw, StorageMode, VecIndex};
use crate::{
indexes,
indexes_from,
internal::{ComputedVecValue, Indexes, NumericValue},
ComputeIndexes,
};
pub type EagerIndexesInner<T, M> = Indexes<
<M as StorageMode>::Stored<EagerVec<PcoVec<Minute1, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Minute5, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Minute10, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Minute30, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Hour1, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Hour4, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Hour12, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Day1, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Day3, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Week1, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Month1, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Month3, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Month6, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Year1, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<Year10, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<HalvingEpoch, T>>>,
<M as StorageMode>::Stored<EagerVec<PcoVec<DifficultyEpoch, T>>>,
>;
#[derive(Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct EagerIndexes<T, M: StorageMode = Rw>(pub EagerIndexesInner<T, M>)
where
T: ComputedVecValue + PartialOrd + JsonSchema;
const VERSION: Version = Version::ZERO;
impl<T> EagerIndexes<T>
where
T: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(db: &Database, name: &str, version: Version) -> Result<Self> {
let v = version + VERSION;
macro_rules! period {
($idx:ident) => {
ImportableVec::forced_import(db, &format!("{name}_{}", stringify!($idx)), v)?
};
}
Ok(Self(indexes_from!(period)))
}
/// Compute "first value per period" — for each period, looks up `source[first_height[period]]`.
pub(crate) fn compute_first(
&mut self,
starting_indexes: &ComputeIndexes,
height_source: &impl ReadableVec<Height, T>,
indexes: &indexes::Vecs,
exit: &Exit,
) -> Result<()> {
macro_rules! period {
($field:ident) => {
self.0.$field.compute_transform(
starting_indexes.$field,
&indexes.$field.first_height,
|(idx, first_h, _)| {
let v = height_source
.collect_one(first_h)
.unwrap_or_else(|| T::from(0_usize));
(idx, v)
},
exit,
)?;
};
}
period!(minute1);
period!(minute5);
period!(minute10);
period!(minute30);
period!(hour1);
period!(hour4);
period!(hour12);
period!(day1);
period!(day3);
period!(week1);
period!(month1);
period!(month3);
period!(month6);
period!(year1);
period!(year10);
period!(halvingepoch);
period!(difficultyepoch);
Ok(())
}
/// Compute "max value per period" — for each period, finds `max(source[first_height[period]..first_height[period+1]])`.
pub(crate) fn compute_max(
&mut self,
starting_indexes: &ComputeIndexes,
height_source: &impl ReadableVec<Height, T>,
indexes: &indexes::Vecs,
exit: &Exit,
) -> Result<()> {
let src_len = height_source.len();
macro_rules! period {
($field:ident) => {{
let fh = &indexes.$field.first_height;
self.0.$field.compute_transform(
starting_indexes.$field,
fh,
|(idx, first_h, _)| {
let end_h = Height::from(
fh.collect_one_at(idx.to_usize() + 1)
.map(|h: Height| h.to_usize())
.unwrap_or(src_len),
);
let v = height_source
.max(first_h, end_h)
.unwrap_or_else(|| T::from(0_usize));
(idx, v)
},
exit,
)?;
}};
}
period!(minute1);
period!(minute5);
period!(minute10);
period!(minute30);
period!(hour1);
period!(hour4);
period!(hour12);
period!(day1);
period!(day3);
period!(week1);
period!(month1);
period!(month3);
period!(month6);
period!(year1);
period!(year10);
period!(halvingepoch);
period!(difficultyepoch);
Ok(())
}
/// Compute "min value per period" — for each period, finds `min(source[first_height[period]..first_height[period+1]])`.
pub(crate) fn compute_min(
&mut self,
starting_indexes: &ComputeIndexes,
height_source: &impl ReadableVec<Height, T>,
indexes: &indexes::Vecs,
exit: &Exit,
) -> Result<()> {
let src_len = height_source.len();
macro_rules! period {
($field:ident) => {{
let fh = &indexes.$field.first_height;
self.0.$field.compute_transform(
starting_indexes.$field,
fh,
|(idx, first_h, _)| {
let end_h = Height::from(
fh.collect_one_at(idx.to_usize() + 1)
.map(|h: Height| h.to_usize())
.unwrap_or(src_len),
);
let v = height_source
.min(first_h, end_h)
.unwrap_or_else(|| T::from(0_usize));
(idx, v)
},
exit,
)?;
}};
}
period!(minute1);
period!(minute5);
period!(minute10);
period!(minute30);
period!(hour1);
period!(hour4);
period!(hour12);
period!(day1);
period!(day3);
period!(week1);
period!(month1);
period!(month3);
period!(month6);
period!(year1);
period!(year10);
period!(halvingepoch);
period!(difficultyepoch);
Ok(())
}
}

View File

@@ -0,0 +1,91 @@
//! Base generic struct with 17 type parameters — one per time period/epoch index.
//!
//! Foundation for all per-index types. Replaces the repetitive 17-field pattern
//! found throughout height_derived types.
use brk_traversable::Traversable;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct Indexes<M1, M5, M10, M30, H1, H4, H12, D1, D3, W1, Mo1, Mo3, Mo6, Y1, Y10, HE, DE> {
pub minute1: M1,
pub minute5: M5,
pub minute10: M10,
pub minute30: M30,
pub hour1: H1,
pub hour4: H4,
pub hour12: H12,
pub day1: D1,
pub day3: D3,
pub week1: W1,
pub month1: Mo1,
pub month3: Mo3,
pub month6: Mo6,
pub year1: Y1,
pub year10: Y10,
pub halvingepoch: HE,
pub difficultyepoch: DE,
}
/// Helper macro to construct an `Indexes` by applying a macro to each field.
///
/// Usage:
/// ```ignore
/// indexes_from!(period, epoch)
/// ```
/// where `period!($field)` and `epoch!($field)` are locally-defined macros.
#[macro_export]
macro_rules! indexes_from {
($period:ident, $epoch:ident) => {
$crate::internal::Indexes {
minute1: $period!(minute1),
minute5: $period!(minute5),
minute10: $period!(minute10),
minute30: $period!(minute30),
hour1: $period!(hour1),
hour4: $period!(hour4),
hour12: $period!(hour12),
day1: $period!(day1),
day3: $period!(day3),
week1: $period!(week1),
month1: $period!(month1),
month3: $period!(month3),
month6: $period!(month6),
year1: $period!(year1),
year10: $period!(year10),
halvingepoch: $epoch!(halvingepoch),
difficultyepoch: $epoch!(difficultyepoch),
}
};
// Variant where period and epoch use the same macro
($m:ident) => {
$crate::indexes_from!($m, $m)
};
}
/// Helper macro to apply a function/macro to each field of an `Indexes` value.
#[macro_export]
macro_rules! indexes_map {
($indexes:expr, |$field:ident| $body:expr) => {{
let src = $indexes;
$crate::internal::Indexes {
minute1: { let $field = src.minute1; $body },
minute5: { let $field = src.minute5; $body },
minute10: { let $field = src.minute10; $body },
minute30: { let $field = src.minute30; $body },
hour1: { let $field = src.hour1; $body },
hour4: { let $field = src.hour4; $body },
hour12: { let $field = src.hour12; $body },
day1: { let $field = src.day1; $body },
day3: { let $field = src.day3; $body },
week1: { let $field = src.week1; $body },
month1: { let $field = src.month1; $body },
month3: { let $field = src.month3; $body },
month6: { let $field = src.month6; $body },
year1: { let $field = src.year1; $body },
year10: { let $field = src.year10; $body },
halvingepoch: { let $field = src.halvingepoch; $body },
difficultyepoch: { let $field = src.difficultyepoch; $body },
}
}};
}

View File

@@ -0,0 +1,70 @@
//! LazyEagerIndexes - lazy per-period transform of EagerIndexes.
//!
//! Used for lazy currency transforms (e.g., cents→dollars, cents→sats)
//! of eagerly computed per-period data like OHLC.
use brk_traversable::Traversable;
use brk_types::{
Day1, Day3, DifficultyEpoch, HalvingEpoch, Minute1, Minute10, Minute30, Minute5, Month1,
Month3, Month6, Version, Week1, Year1, Year10, Hour1, Hour4, Hour12,
};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{LazyVecFrom1, ReadableCloneableVec, UnaryTransform};
use crate::{
indexes_from,
internal::{ComputedVecValue, EagerIndexes, Indexes},
};
pub type LazyEagerIndexesInner<T, S> = Indexes<
LazyVecFrom1<Minute1, T, Minute1, S>,
LazyVecFrom1<Minute5, T, Minute5, S>,
LazyVecFrom1<Minute10, T, Minute10, S>,
LazyVecFrom1<Minute30, T, Minute30, S>,
LazyVecFrom1<Hour1, T, Hour1, S>,
LazyVecFrom1<Hour4, T, Hour4, S>,
LazyVecFrom1<Hour12, T, Hour12, S>,
LazyVecFrom1<Day1, T, Day1, S>,
LazyVecFrom1<Day3, T, Day3, S>,
LazyVecFrom1<Week1, T, Week1, S>,
LazyVecFrom1<Month1, T, Month1, S>,
LazyVecFrom1<Month3, T, Month3, S>,
LazyVecFrom1<Month6, T, Month6, S>,
LazyVecFrom1<Year1, T, Year1, S>,
LazyVecFrom1<Year10, T, Year10, S>,
LazyVecFrom1<HalvingEpoch, T, HalvingEpoch, S>,
LazyVecFrom1<DifficultyEpoch, T, DifficultyEpoch, S>,
>;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct LazyEagerIndexes<T, S>(pub LazyEagerIndexesInner<T, S>)
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S: ComputedVecValue;
impl<T, S> LazyEagerIndexes<T, S>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S: ComputedVecValue + PartialOrd + JsonSchema,
{
/// Create lazy per-period transforms from an EagerIndexes source.
pub(crate) fn from_eager_indexes<Transform: UnaryTransform<S, T>>(
name: &str,
version: Version,
source: &EagerIndexes<S>,
) -> Self {
macro_rules! period {
($idx:ident) => {
LazyVecFrom1::transformed::<Transform>(
&format!("{name}_{}", stringify!($idx)),
version,
source.$idx.read_only_boxed_clone(),
)
};
}
Self(indexes_from!(period))
}
}

View File

@@ -1,9 +1,19 @@
mod compute;
mod distribution_stats;
mod eager_indexes;
mod indexes;
mod lazy_eager_indexes;
mod multi;
mod single;
mod traits;
mod windows;
pub(crate) use compute::*;
pub(crate) use distribution_stats::*;
pub(crate) use eager_indexes::*;
pub(crate) use indexes::*;
pub(crate) use lazy_eager_indexes::*;
pub(crate) use multi::*;
pub(crate) use single::*;
pub(crate) use traits::*;
pub(crate) use windows::*;

View File

@@ -1,54 +0,0 @@
//! Lazy binary transform from Full sources.
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, ReadableBoxedVec, LazyVecFrom2};
use crate::internal::{
ComputedFromHeightFull, ComputedVecValue, TxDerivedFull, LazyBinaryHeightDerivedSumCum,
NumericValue,
};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyBinaryFromHeightFull<T, S1T = T, S2T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
#[traversable(rename = "base")]
pub height: LazyVecFrom2<Height, T, Height, S1T, Height, S2T>,
#[deref]
#[deref_mut]
pub rest: Box<LazyBinaryHeightDerivedSumCum<T, S1T, S2T>>,
}
const VERSION: Version = Version::ZERO;
impl<T, S1T, S2T> LazyBinaryFromHeightFull<T, S1T, S2T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: NumericValue + JsonSchema,
S2T: NumericValue + JsonSchema,
{
pub(crate) fn from_height_and_txindex<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
height_source1: ReadableBoxedVec<Height, S1T>,
height_source2: ReadableBoxedVec<Height, S2T>,
source1: &ComputedFromHeightFull<S1T>,
source2: &TxDerivedFull<S2T>,
) -> Self {
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(name, v, height_source1, height_source2),
rest: Box::new(LazyBinaryHeightDerivedSumCum::from_full_sources::<F>(
name, v, &source1.rest, source2,
)),
}
}
}

View File

@@ -6,11 +6,13 @@ use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, ReadableBoxedVec, ReadableCloneableVec, LazyVecFrom2};
use crate::internal::{
ComputedFromHeightLast, ComputedFromHeightSumCum, ComputedHeightDerivedLast, ComputedVecValue,
LazyBinaryComputedFromHeightLast, LazyBinaryComputedFromHeightSum,
LazyBinaryHeightDerivedLast, LazyBinaryTransformLast,
LazyFromHeightLast, NumericValue,
use crate::{
indexes_from,
internal::{
ComputedFromHeightLast, ComputedFromHeightSumCum, ComputedHeightDerivedLast,
ComputedVecValue, LazyBinaryComputedFromHeightLast, LazyBinaryHeightDerivedLast,
LazyBinaryTransformLast, LazyFromHeightLast, NumericValue,
},
};
#[derive(Clone, Deref, DerefMut, Traversable)]
@@ -43,25 +45,7 @@ macro_rules! build_rest {
)
};
}
Box::new(LazyBinaryHeightDerivedLast {
minute1: period!(minute1),
minute5: period!(minute5),
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: period!(halvingepoch),
difficultyepoch: period!(difficultyepoch),
})
Box::new(LazyBinaryHeightDerivedLast(indexes_from!(period)))
}};
}
@@ -269,12 +253,12 @@ where
}
}
/// Create from a LazyBinaryComputedFromHeightLast and a LazyBinaryComputedFromHeightSum.
pub(crate) fn from_lazy_binary_block_last_and_lazy_binary_sum<F, S1aT, S1bT, S2aT, S2bT>(
/// Create from two LazyBinaryComputedFromHeightLast sources.
pub(crate) fn from_both_lazy_binary_computed_block_last<F, S1aT, S1bT, S2aT, S2bT>(
name: &str,
version: Version,
source1: &LazyBinaryComputedFromHeightLast<S1T, S1aT, S1bT>,
source2: &LazyBinaryComputedFromHeightSum<S2T, S2aT, S2bT>,
source2: &LazyBinaryComputedFromHeightLast<S2T, S2aT, S2bT>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,

View File

@@ -1,114 +0,0 @@
//! Lazy binary transform from two Sum-only sources with height level.
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, ReadableBoxedVec, ReadableCloneableVec, LazyVecFrom2};
use crate::internal::{
ComputedFromHeightSum, ComputedFromHeightSumCum, ComputedVecValue,
LazyBinaryHeightDerivedSum, LazyFromHeightLast, NumericValue,
};
const VERSION: Version = Version::ZERO;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyBinaryFromHeightSum<T, S1T, S2T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
#[traversable(rename = "sum")]
pub height: LazyVecFrom2<Height, T, Height, S1T, Height, S2T>,
#[deref]
#[deref_mut]
pub rest: Box<LazyBinaryHeightDerivedSum<T, S1T, S2T>>,
}
impl<T, S1T, S2T> LazyBinaryFromHeightSum<T, S1T, S2T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: NumericValue + JsonSchema,
S2T: NumericValue + JsonSchema,
{
pub(crate) fn from_computed<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &ComputedFromHeightSum<S1T>,
source2: &ComputedFromHeightSum<S2T>,
) -> Self {
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(
name,
v,
source1.height.read_only_boxed_clone(),
source2.height.read_only_boxed_clone(),
),
rest: Box::new(LazyBinaryHeightDerivedSum::from_derived::<F>(name, v, &source1.rest, &source2.rest)),
}
}
/// Create from two LazyBinaryFromHeightSum sources.
pub(crate) fn from_binary<F, S1aT, S1bT, S2aT, S2bT>(
name: &str,
version: Version,
source1: &LazyBinaryFromHeightSum<S1T, S1aT, S1bT>,
source2: &LazyBinaryFromHeightSum<S2T, S2aT, S2bT>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S1aT: ComputedVecValue + JsonSchema,
S1bT: ComputedVecValue + JsonSchema,
S2aT: ComputedVecValue + JsonSchema,
S2bT: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(
name,
v,
source1.height.read_only_boxed_clone(),
source2.height.read_only_boxed_clone(),
),
rest: Box::new(LazyBinaryHeightDerivedSum::from_binary::<F, _, _, _, _>(
name,
v,
&source1.rest,
&source2.rest,
)),
}
}
/// Create from a SumCum source (using only sum) and a LazyLast source.
/// Produces sum-only output (no cumulative).
pub(crate) fn from_sumcum_lazy_last<F, S2ST>(
name: &str,
version: Version,
height_source1: ReadableBoxedVec<Height, S1T>,
height_source2: ReadableBoxedVec<Height, S2T>,
source1: &ComputedFromHeightSumCum<S1T>,
source2: &LazyFromHeightLast<S2T, S2ST>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S2ST: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(name, v, height_source1, height_source2),
rest: Box::new(LazyBinaryHeightDerivedSum::from_sumcum_lazy_last::<F, S2ST>(
name,
v,
source1,
source2,
)),
}
}
}

View File

@@ -1,98 +0,0 @@
//! Lazy binary transform from two SumCum sources.
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, ReadableBoxedVec, ReadableCloneableVec, LazyVecFrom2};
use crate::internal::{
ComputedFromHeightSumCum, ComputedHeightDerivedSumCum,
ComputedVecValue, LazyBinaryHeightDerivedSumCum, LazyFromHeightLast, NumericValue,
};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyBinaryFromHeightSumCum<T, S1T = T, S2T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
#[traversable(rename = "sum")]
pub height: LazyVecFrom2<Height, T, Height, S1T, Height, S2T>,
#[traversable(rename = "cumulative")]
pub height_cumulative: LazyVecFrom2<Height, T, Height, S1T, Height, S2T>,
#[deref]
#[deref_mut]
pub rest: Box<LazyBinaryHeightDerivedSumCum<T, S1T, S2T>>,
}
const VERSION: Version = Version::ZERO;
impl<T, S1T, S2T> LazyBinaryFromHeightSumCum<T, S1T, S2T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
pub(crate) fn from_derived<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
height_source1: ReadableBoxedVec<Height, S1T>,
height_source2: ReadableBoxedVec<Height, S2T>,
source1: &ComputedHeightDerivedSumCum<S1T>,
source2: &ComputedHeightDerivedSumCum<S2T>,
) -> Self
where
S1T: PartialOrd,
S2T: PartialOrd,
{
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(name, v, height_source1, height_source2),
height_cumulative: LazyVecFrom2::transformed::<F>(
&format!("{name}_cumulative"),
v,
source1.height_cumulative.read_only_boxed_clone(),
source2.height_cumulative.read_only_boxed_clone(),
),
rest: Box::new(LazyBinaryHeightDerivedSumCum::from_computed_sum_raw::<F>(
name,
v,
source1,
source2,
)),
}
}
// --- Methods accepting SumCum + LazyLast sources ---
pub(crate) fn from_computed_lazy_last<F, S2ST>(
name: &str,
version: Version,
height_source1: ReadableBoxedVec<Height, S1T>,
height_source2: ReadableBoxedVec<Height, S2T>,
source1: &ComputedFromHeightSumCum<S1T>,
source2: &LazyFromHeightLast<S2T, S2ST>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S1T: PartialOrd,
S2T: NumericValue,
S2ST: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(name, v, height_source1, height_source2),
height_cumulative: LazyVecFrom2::transformed::<F>(
&format!("{name}_cumulative"),
v,
source1.height_cumulative.read_only_boxed_clone(),
source2.height.read_only_boxed_clone(),
),
rest: Box::new(LazyBinaryHeightDerivedSumCum::from_computed_lazy_last::<F, S2ST>(name, v, source1, source2)),
}
}
}

View File

@@ -0,0 +1,89 @@
//! ComputedFromHeightCum - stored height + LazyLast + cumulative (from height).
//!
//! Like ComputedFromHeightCumSum but without RollingWindows.
//! Used for distribution metrics where rolling is optional per cohort.
//! Cumulative gets its own ComputedFromHeightLast so it has LazyLast index views.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, PcoVec, Rw, StorageMode};
use crate::{
indexes,
internal::{ComputedFromHeightLast, NumericValue},
};
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedFromHeightCum<T, M: StorageMode = Rw>
where
T: NumericValue + JsonSchema,
{
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub last: ComputedFromHeightLast<T, M>,
#[traversable(flatten)]
pub cumulative: ComputedFromHeightLast<T, M>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedFromHeightCum<T>
where
T: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let last = ComputedFromHeightLast::forced_import(db, name, v, indexes)?;
let cumulative = ComputedFromHeightLast::forced_import(
db,
&format!("{name}_cumulative"),
v,
indexes,
)?;
Ok(Self { last, cumulative })
}
/// Compute height data via closure, then cumulative only (no rolling).
pub(crate) fn compute(
&mut self,
max_from: Height,
exit: &Exit,
compute_height: impl FnOnce(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
) -> Result<()>
where
T: Default,
{
compute_height(&mut self.last.height)?;
self.cumulative
.height
.compute_cumulative(max_from, &self.last.height, exit)?;
Ok(())
}
/// Compute cumulative from already-filled height vec.
pub(crate) fn compute_cumulative(
&mut self,
max_from: Height,
exit: &Exit,
) -> Result<()>
where
T: Default,
{
self.cumulative
.height
.compute_cumulative(max_from, &self.last.height, exit)?;
Ok(())
}
}

View File

@@ -0,0 +1,86 @@
//! ComputedFromHeightCumFull - stored height + LazyLast + cumulative (from height) + RollingFull.
//!
//! For metrics with stored per-block data, cumulative sums, and rolling windows.
//! Cumulative gets its own ComputedFromHeightLast so it has LazyLast index views too.
use std::ops::SubAssign;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, PcoVec, Rw, StorageMode};
use crate::{
indexes,
internal::{ComputedFromHeightLast, NumericValue, RollingFull, WindowStarts},
};
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedFromHeightCumFull<T, M: StorageMode = Rw>
where
T: NumericValue + JsonSchema,
{
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub last: ComputedFromHeightLast<T, M>,
#[traversable(flatten)]
pub cumulative: ComputedFromHeightLast<T, M>,
#[traversable(flatten)]
pub rolling: RollingFull<T, M>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedFromHeightCumFull<T>
where
T: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let last = ComputedFromHeightLast::forced_import(db, name, v, indexes)?;
let cumulative = ComputedFromHeightLast::forced_import(
db,
&format!("{name}_cumulative"),
v,
indexes,
)?;
let rolling = RollingFull::forced_import(db, name, v, indexes)?;
Ok(Self {
last,
cumulative,
rolling,
})
}
/// Compute height data via closure, then cumulative + rolling.
pub(crate) fn compute(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
exit: &Exit,
compute_height: impl FnOnce(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
) -> Result<()>
where
T: From<f64> + Default + SubAssign + Copy + Ord,
f64: From<T>,
{
compute_height(&mut self.last.height)?;
self.cumulative
.height
.compute_cumulative(max_from, &self.last.height, exit)?;
self.rolling
.compute(max_from, windows, &self.last.height, exit)?;
Ok(())
}
}

View File

@@ -0,0 +1,86 @@
//! ComputedFromHeightCumSum - stored height + LazyLast + cumulative (from height) + RollingWindows (sum).
//!
//! Like ComputedFromHeightCumFull but with rolling sum only (no distribution).
//! Used for count metrics where distribution stats aren't meaningful.
//! Cumulative gets its own ComputedFromHeightLast so it has LazyLast index views too.
use std::ops::SubAssign;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, PcoVec, Rw, StorageMode};
use crate::{
indexes,
internal::{ComputedFromHeightLast, NumericValue, RollingWindows, WindowStarts},
};
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedFromHeightCumSum<T, M: StorageMode = Rw>
where
T: NumericValue + JsonSchema,
{
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub last: ComputedFromHeightLast<T, M>,
#[traversable(flatten)]
pub cumulative: ComputedFromHeightLast<T, M>,
#[traversable(flatten)]
pub rolling: RollingWindows<T, M>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedFromHeightCumSum<T>
where
T: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let last = ComputedFromHeightLast::forced_import(db, name, v, indexes)?;
let cumulative = ComputedFromHeightLast::forced_import(
db,
&format!("{name}_cumulative"),
v,
indexes,
)?;
let rolling = RollingWindows::forced_import(db, name, v, indexes)?;
Ok(Self {
last,
cumulative,
rolling,
})
}
/// Compute height data via closure, then cumulative + rolling sum.
pub(crate) fn compute(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
exit: &Exit,
compute_height: impl FnOnce(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
) -> Result<()>
where
T: Default + SubAssign,
{
compute_height(&mut self.last.height)?;
self.cumulative
.height
.compute_cumulative(max_from, &self.last.height, exit)?;
self.rolling
.compute_rolling_sum(max_from, windows, &self.last.height, exit)?;
Ok(())
}
}

View File

@@ -6,11 +6,9 @@ use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{
Database, EagerVec, Exit, ImportableVec, PcoVec, ReadableCloneableVec, Rw, StorageMode,
};
use vecdb::{Database, EagerVec, ImportableVec, PcoVec, ReadableCloneableVec, Rw, StorageMode};
use crate::{ComputeIndexes, indexes};
use crate::indexes;
use crate::internal::{ComputedHeightDerivedFull, ComputedVecValue, NumericValue};
@@ -51,17 +49,9 @@ where
indexes,
)?;
Ok(Self { height, rest: Box::new(rest) })
}
pub(crate) fn compute(
&mut self,
starting_indexes: &ComputeIndexes,
exit: &Exit,
mut compute: impl FnMut(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
) -> Result<()> {
compute(&mut self.height)?;
self.rest.compute_cumulative(starting_indexes, &self.height, exit)?;
Ok(())
Ok(Self {
height,
rest: Box::new(rest),
})
}
}

View File

@@ -1,58 +0,0 @@
//! LazyBinaryComputedFromHeightSum - block sum with lazy binary transform at height level.
//!
//! Height-level sum is lazy: `transform(source1[h], source2[h])`.
//! Day1 stats are stored since they require aggregation across heights.
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, ReadableBoxedVec, ReadableCloneableVec, LazyVecFrom2};
use crate::{
indexes,
internal::{ComputedHeightDerivedSum, ComputedVecValue, NumericValue},
};
const VERSION: Version = Version::ZERO;
/// Block sum aggregation with lazy binary transform at height + computed derived indexes.
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyBinaryComputedFromHeightSum<T, S1T = T, S2T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
#[traversable(rename = "sum")]
pub height: LazyVecFrom2<Height, T, Height, S1T, Height, S2T>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: Box<ComputedHeightDerivedSum<T>>,
}
impl<T, S1T, S2T> LazyBinaryComputedFromHeightSum<T, S1T, S2T>
where
T: NumericValue + JsonSchema,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
pub(crate) fn forced_import<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: ReadableBoxedVec<Height, S1T>,
source2: ReadableBoxedVec<Height, S2T>,
indexes: &indexes::Vecs,
) -> Self {
let v = version + VERSION;
let height = LazyVecFrom2::transformed::<F>(name, v, source1, source2);
let rest =
ComputedHeightDerivedSum::forced_import(name, height.read_only_boxed_clone(), v, indexes);
Self { height, rest: Box::new(rest) }
}
}

View File

@@ -1,49 +0,0 @@
//! Lazy unary transform from height with Sum aggregation.
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{ReadableBoxedVec, LazyVecFrom1, UnaryTransform};
use crate::internal::{
ComputedFromHeightSum, ComputedVecValue, LazyHeightDerivedSum, NumericValue,
};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyFromHeightSum<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
pub height: LazyVecFrom1<Height, T, Height, S1T>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: Box<LazyHeightDerivedSum<T, S1T>>,
}
const VERSION: Version = Version::ZERO;
impl<T, S1T> LazyFromHeightSum<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
pub(crate) fn from_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
height_source: ReadableBoxedVec<Height, S1T>,
source: &ComputedFromHeightSum<S1T>,
) -> Self
where
S1T: NumericValue,
{
let v = version + VERSION;
Self {
height: LazyVecFrom1::transformed::<F>(name, v, height_source),
rest: Box::new(LazyHeightDerivedSum::from_derived_computed::<F>(name, v, &source.rest)),
}
}
}

View File

@@ -1,50 +0,0 @@
//! Lazy unary transform from height with Distribution aggregation.
//! Like LazyFromHeightFull but without sum/cumulative (for ratio/percentage metrics).
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{ReadableBoxedVec, LazyVecFrom1, UnaryTransform};
use crate::internal::{
ComputedHeightDerivedFull, ComputedVecValue, LazyHeightDerivedDistribution, NumericValue,
};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyFromHeightTransformDistribution<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
#[traversable(rename = "base")]
pub height: LazyVecFrom1<Height, T, Height, S1T>,
#[deref]
#[deref_mut]
pub rest: Box<LazyHeightDerivedDistribution<T, S1T>>,
}
const VERSION: Version = Version::ZERO;
impl<T, S1T> LazyFromHeightTransformDistribution<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
pub(crate) fn from_derived<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
height_source: ReadableBoxedVec<Height, S1T>,
source: &ComputedHeightDerivedFull<S1T>,
) -> Self
where
S1T: NumericValue,
{
let v = version + VERSION;
Self {
height: LazyVecFrom1::transformed::<F>(name, v, height_source),
rest: Box::new(LazyHeightDerivedDistribution::from_derived_computed::<F>(name, v, source)),
}
}
}

View File

@@ -1,79 +1,67 @@
mod binary_full;
mod binary_last;
mod binary_sum;
mod binary_sum_cum;
mod constant;
mod cum;
mod cum_rolling_full;
mod cum_rolling_sum;
mod distribution;
mod full;
mod last;
mod lazy_binary_computed_distribution;
mod lazy_binary_computed_full;
mod lazy_binary_computed_last;
mod lazy_binary_computed_sum;
mod lazy_binary_computed_sum_cum;
mod lazy_computed_full;
mod lazy_computed_sum_cum;
mod lazy_full;
mod lazy_last;
mod lazy_sum;
mod lazy_sum_cum;
mod lazy_transform_distribution;
mod lazy_value;
mod percentiles;
mod price;
mod ratio;
mod stddev;
mod stored_value_last;
mod sum;
mod sum_cum;
mod value_binary;
mod value_change;
mod value_ema;
mod value_full;
mod value_last;
mod value_lazy_binary_last;
mod value_lazy_computed_sum_cum;
mod value_lazy_computed_cum;
mod value_lazy_last;
mod value_lazy_sum_cum;
mod value_sum;
mod value_sum_cum;
pub use binary_full::*;
pub use binary_last::*;
pub use binary_sum::*;
pub use binary_sum_cum::*;
pub use constant::*;
pub use cum::*;
pub use cum_rolling_full::*;
pub use cum_rolling_sum::*;
pub use distribution::*;
pub use full::*;
pub use last::*;
pub use lazy_binary_computed_distribution::*;
pub use lazy_binary_computed_full::*;
pub use lazy_binary_computed_last::*;
pub use lazy_binary_computed_sum::*;
pub use lazy_binary_computed_sum_cum::*;
pub use lazy_computed_full::*;
pub use lazy_computed_sum_cum::*;
pub use lazy_full::*;
pub use lazy_last::*;
pub use lazy_sum::*;
pub use lazy_sum_cum::*;
pub use lazy_transform_distribution::*;
pub use lazy_value::*;
pub use percentiles::*;
pub use price::*;
pub use ratio::*;
pub use stddev::*;
pub use stored_value_last::*;
pub use sum::*;
pub use sum_cum::*;
pub use value_binary::*;
pub use value_change::*;
pub use value_ema::*;
pub use value_full::*;
pub use value_last::*;
pub use value_lazy_binary_last::*;
pub use value_lazy_computed_sum_cum::*;
pub use value_lazy_computed_cum::*;
pub use value_lazy_last::*;
pub use value_lazy_sum_cum::*;
pub use value_sum::*;
pub use value_sum_cum::*;

View File

@@ -1,49 +0,0 @@
//! ComputedFromHeight using Sum-only aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, ImportableVec, PcoVec, ReadableCloneableVec, Rw, StorageMode};
use crate::indexes;
use crate::internal::{ComputedHeightDerivedSum, ComputedVecValue, NumericValue};
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedFromHeightSum<T, M: StorageMode = Rw>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub height: M::Stored<EagerVec<PcoVec<Height, T>>>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: Box<ComputedHeightDerivedSum<T>>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedFromHeightSum<T>
where
T: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let rest =
ComputedHeightDerivedSum::forced_import(name, height.read_only_boxed_clone(), v, indexes);
Ok(Self { height, rest: Box::new(rest) })
}
}

View File

@@ -1,64 +0,0 @@
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Dollars, Sats, Version};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, ReadableCloneableVec};
use crate::internal::{ComputedVecValue, LazyBinaryFromHeightSumCum, LazyValueFromHeightSumCum};
/// Lazy value vecs computed from two ValueFromHeightSumCum sources via binary transforms.
/// Used for computing coinbase = subsidy + fee.
#[derive(Clone, Traversable)]
pub struct ValueBinaryFromHeight {
pub sats: LazyBinaryFromHeightSumCum<Sats, Sats, Sats>,
pub btc: LazyBinaryFromHeightSumCum<Bitcoin, Sats, Sats>,
pub usd: LazyBinaryFromHeightSumCum<Dollars, Dollars, Dollars>,
}
impl ValueBinaryFromHeight {
pub(crate) fn from_lazy<SatsF, BitcoinF, DollarsF, S1T, S2T>(
name: &str,
version: Version,
source1: &LazyValueFromHeightSumCum<S1T, S2T>,
source2: &LazyValueFromHeightSumCum<S1T, S2T>,
) -> Self
where
SatsF: BinaryTransform<Sats, Sats, Sats>,
BitcoinF: BinaryTransform<Sats, Sats, Bitcoin>,
DollarsF: BinaryTransform<Dollars, Dollars, Dollars>,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
let sats = LazyBinaryFromHeightSumCum::from_derived::<SatsF>(
name,
version,
source1.sats.height.read_only_boxed_clone(),
source2.sats.height.read_only_boxed_clone(),
&source1.sats.rest,
&source2.sats.rest,
);
let btc = LazyBinaryFromHeightSumCum::from_derived::<BitcoinF>(
&format!("{name}_btc"),
version,
source1.sats.height.read_only_boxed_clone(),
source2.sats.height.read_only_boxed_clone(),
&source1.sats.rest,
&source2.sats.rest,
);
let usd = LazyBinaryFromHeightSumCum::from_derived::<DollarsF>(
&format!("{name}_usd"),
version,
source1.usd.height.read_only_boxed_clone(),
source2.usd.height.read_only_boxed_clone(),
&source1.usd.rest,
&source2.usd.rest,
);
Self {
sats,
btc,
usd,
}
}
}

View File

@@ -0,0 +1,71 @@
//! Value type with stored sats height + cumulative, lazy btc + lazy dollars.
//!
//! Like LazyComputedValueFromHeightSumCum but with Cum (no old period aggregations).
//! - Sats: stored height + cumulative (ComputedFromHeightCum)
//! - BTC: lazy transform from sats (LazyFromHeightLast)
//! - USD: lazy binary (price × sats), LazyLast per index (no stored cumulative)
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Dollars, Height, Sats, Version};
use vecdb::{Database, Exit, ReadableCloneableVec, Rw, StorageMode};
use crate::{
indexes,
internal::{
ComputedFromHeightCum, LazyBinaryComputedFromHeightLast, LazyFromHeightLast,
PriceTimesSats, SatsToBitcoin,
},
prices,
};
/// Value wrapper with stored sats height + cumulative, lazy btc + lazy usd.
#[derive(Traversable)]
pub struct LazyComputedValueFromHeightCum<M: StorageMode = Rw> {
pub sats: ComputedFromHeightCum<Sats, M>,
pub btc: LazyFromHeightLast<Bitcoin, Sats>,
pub usd: LazyBinaryComputedFromHeightLast<Dollars, Dollars, Sats>,
}
const VERSION: Version = Version::ZERO;
impl LazyComputedValueFromHeightCum {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
prices: &prices::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let sats = ComputedFromHeightCum::forced_import(db, name, v, indexes)?;
let btc = LazyFromHeightLast::from_computed::<SatsToBitcoin>(
&format!("{name}_btc"),
v,
sats.height.read_only_boxed_clone(),
&sats,
);
let usd = LazyBinaryComputedFromHeightLast::forced_import::<PriceTimesSats>(
&format!("{name}_usd"),
v,
prices.usd.price.read_only_boxed_clone(),
sats.height.read_only_boxed_clone(),
indexes,
);
Ok(Self { sats, btc, usd })
}
/// Compute cumulative from already-filled sats height vec.
pub(crate) fn compute_cumulative(
&mut self,
max_from: Height,
exit: &Exit,
) -> Result<()> {
self.sats.compute_cumulative(max_from, exit)?;
Ok(())
}
}

View File

@@ -1,88 +0,0 @@
//! Value type with stored height + lazy dollars for SumCum pattern.
//!
//! Use this when:
//! - Sats height is stored (primary source of truth)
//! - Sats indexes are derived from height
//! - Bitcoin is lazy (transform from sats)
//! - Dollars height is lazy (price × sats), with stored indexes
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Dollars, Sats, Version};
use vecdb::{Database, Exit, ReadableCloneableVec, LazyVecFrom2, Rw, StorageMode};
use crate::{
ComputeIndexes, indexes,
internal::{
ComputedFromHeightSumCum, LazyComputedFromHeightSumCum, LazyFromHeightSumCum,
PriceTimesSats, SatsToBitcoin,
},
prices,
};
/// Value wrapper with stored sats height + lazy dollars.
///
/// Sats height is stored (computed directly or from stateful loop).
/// Dollars height is lazy (price × sats).
/// Cumulative and day1 aggregates are stored for both.
#[derive(Traversable)]
pub struct LazyComputedValueFromHeightSumCum<M: StorageMode = Rw> {
pub sats: ComputedFromHeightSumCum<Sats, M>,
pub btc: LazyFromHeightSumCum<Bitcoin, Sats>,
pub usd: LazyComputedFromHeightSumCum<Dollars, Dollars, Sats, M>,
}
const VERSION: Version = Version::ZERO;
impl LazyComputedValueFromHeightSumCum {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
prices: &prices::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let sats = ComputedFromHeightSumCum::forced_import(db, name, v, indexes)?;
let btc = LazyFromHeightSumCum::from_computed::<SatsToBitcoin>(
&format!("{name}_btc"),
v,
sats.height.read_only_boxed_clone(),
&sats,
);
let usd_height = LazyVecFrom2::transformed::<PriceTimesSats>(
&format!("{name}_usd"),
v,
prices.usd.price.read_only_boxed_clone(),
sats.height.read_only_boxed_clone(),
);
let usd = LazyComputedFromHeightSumCum::forced_import(
db,
&format!("{name}_usd"),
v,
indexes,
usd_height,
)?;
Ok(Self {
sats,
btc,
usd,
})
}
/// Compute cumulative from already-computed height.
pub(crate) fn compute_cumulative(
&mut self,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.sats.compute_cumulative(starting_indexes, exit)?;
self.usd.compute_cumulative(starting_indexes, exit)?;
Ok(())
}
}

View File

@@ -1,62 +0,0 @@
//! Value type for Sum pattern from Height.
//!
//! Height-level USD value is lazy: `sats * price`.
//! Day1 sum is stored since it requires aggregation across heights with varying prices.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Dollars, Sats, Version};
use vecdb::{Database, ReadableCloneableVec, Rw, StorageMode};
use crate::{
indexes,
internal::{
ComputedFromHeightSum, LazyBinaryComputedFromHeightSum, LazyFromHeightSum, SatsTimesPrice,
SatsToBitcoin,
},
prices,
};
#[derive(Traversable)]
pub struct ValueFromHeightSum<M: StorageMode = Rw> {
pub sats: ComputedFromHeightSum<Sats, M>,
pub btc: LazyFromHeightSum<Bitcoin, Sats>,
pub usd: LazyBinaryComputedFromHeightSum<Dollars, Sats, Dollars>,
}
const VERSION: Version = Version::ONE; // Bumped for lazy height dollars
impl ValueFromHeightSum {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
prices: &prices::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let sats = ComputedFromHeightSum::forced_import(db, name, v, indexes)?;
let btc = LazyFromHeightSum::from_computed::<SatsToBitcoin>(
&format!("{name}_btc"),
v,
sats.height.read_only_boxed_clone(),
&sats,
);
let usd = LazyBinaryComputedFromHeightSum::forced_import::<SatsTimesPrice>(
&format!("{name}_usd"),
v,
sats.height.read_only_boxed_clone(),
prices.usd.price.read_only_boxed_clone(),
indexes,
);
Ok(Self {
sats,
btc,
usd,
})
}
}

View File

@@ -1,7 +1,3 @@
mod lazy_distribution;
mod value_dollars;
mod value_full;
pub use lazy_distribution::*;
pub use value_dollars::*;
pub use value_full::*;

View File

@@ -1,204 +0,0 @@
//! Dollars from TxIndex with lazy height stats and stored day1.
//!
//! Height-level USD stats (min/max/avg/sum/percentiles) are lazy: `sats_stat * price`.
//! Height cumulative and day1 stats are stored since they require aggregation
//! across heights with varying prices.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{
Bitcoin, Day1, Day3, DifficultyEpoch, Dollars, HalvingEpoch, Height, Hour1, Hour4, Hour12,
Minute1, Minute5, Minute10, Minute30, Month1, Month3, Month6, Sats, TxIndex, Version, Week1,
Year1, Year10,
};
use vecdb::{
Database, Exit, LazyVecFrom3, ReadableBoxedVec, ReadableCloneableVec, Rw, StorageMode,
};
use crate::{
ComputeIndexes, indexes,
internal::{CumulativeVec, Full, LazyBinaryTransformFull, LazyFull, SatsTimesPrice},
};
/// Lazy dollars at TxIndex: `sats * price[height]`
pub type LazyDollarsTxIndex =
LazyVecFrom3<TxIndex, Dollars, TxIndex, Sats, TxIndex, Height, Height, Dollars>;
/// Lazy dollars height stats: `sats_height_stat * price`
pub type LazyDollarsHeightFull = LazyBinaryTransformFull<Height, Dollars, Sats, Dollars>;
/// Dollars with lazy txindex and height fields, stored day1.
///
/// Height-level stats (except cumulative) are lazy: `sats * price[height]`.
/// Cumulative at height level is stored since it requires summing historical values.
/// Day1 stats are stored since they aggregate across heights with varying prices.
#[derive(Traversable)]
#[traversable(merge)]
pub struct ValueDollarsFromTxFull<M: StorageMode = Rw> {
#[traversable(skip)]
pub txindex: LazyDollarsTxIndex,
#[traversable(flatten)]
pub height: LazyDollarsHeightFull,
#[traversable(rename = "cumulative")]
pub height_cumulative: CumulativeVec<Height, Dollars, M>,
pub minute1: LazyFull<Minute1, Dollars, Height, Height>,
pub minute5: LazyFull<Minute5, Dollars, Height, Height>,
pub minute10: LazyFull<Minute10, Dollars, Height, Height>,
pub minute30: LazyFull<Minute30, Dollars, Height, Height>,
pub hour1: LazyFull<Hour1, Dollars, Height, Height>,
pub hour4: LazyFull<Hour4, Dollars, Height, Height>,
pub hour12: LazyFull<Hour12, Dollars, Height, Height>,
pub day1: LazyFull<Day1, Dollars, Height, Height>,
pub day3: LazyFull<Day3, Dollars, Height, Height>,
pub week1: LazyFull<Week1, Dollars, Height, Height>,
pub month1: LazyFull<Month1, Dollars, Height, Height>,
pub month3: LazyFull<Month3, Dollars, Height, Height>,
pub month6: LazyFull<Month6, Dollars, Height, Height>,
pub year1: LazyFull<Year1, Dollars, Height, Height>,
pub year10: LazyFull<Year10, Dollars, Height, Height>,
pub halvingepoch: LazyFull<HalvingEpoch, Dollars, Height, HalvingEpoch>,
pub difficultyepoch: LazyFull<DifficultyEpoch, Dollars, Height, DifficultyEpoch>,
}
const VERSION: Version = Version::ONE; // Bumped for lazy height change
impl ValueDollarsFromTxFull {
#[allow(clippy::too_many_arguments)]
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
sats_height: &Full<Height, Sats>,
height_to_price: ReadableBoxedVec<Height, Dollars>,
sats_txindex: ReadableBoxedVec<TxIndex, Sats>,
txindex_to_height: ReadableBoxedVec<TxIndex, Height>,
) -> Result<Self> {
let v = version + VERSION;
let txindex = create_lazy_txindex(
name,
v,
sats_txindex,
txindex_to_height,
height_to_price.clone(),
);
// Lazy height stats: sats_stat * price
let height = LazyBinaryTransformFull::from_full_and_source::<SatsTimesPrice>(
name,
v,
sats_height,
height_to_price.clone(),
);
// Stored cumulative - must be computed by summing historical sum*price
let height_cumulative = CumulativeVec::forced_import(db, name, v)?;
macro_rules! period {
($idx:ident) => {
LazyFull::from_height_source(
name,
v,
height.boxed_sum(),
height_cumulative.read_only_boxed_clone(),
indexes.$idx.first_height.read_only_boxed_clone(),
)
};
}
macro_rules! epoch {
($idx:ident) => {
LazyFull::from_stats_aggregate(
name,
v,
height.boxed_average(),
height.boxed_min(),
height.boxed_max(),
height.boxed_sum(),
height_cumulative.read_only_boxed_clone(),
height.boxed_average(),
indexes.$idx.identity.read_only_boxed_clone(),
)
};
}
let minute1 = period!(minute1);
let minute5 = period!(minute5);
let minute10 = period!(minute10);
let minute30 = period!(minute30);
let hour1 = period!(hour1);
let hour4 = period!(hour4);
let hour12 = period!(hour12);
let day1 = period!(day1);
let day3 = period!(day3);
let week1 = period!(week1);
let month1 = period!(month1);
let month3 = period!(month3);
let month6 = period!(month6);
let year1 = period!(year1);
let year10 = period!(year10);
let halvingepoch = epoch!(halvingepoch);
let difficultyepoch = epoch!(difficultyepoch);
Ok(Self {
txindex,
height,
height_cumulative,
minute1,
minute5,
minute10,
minute30,
hour1,
hour4,
hour12,
day1,
day3,
week1,
month1,
month3,
month6,
year1,
year10,
halvingepoch,
difficultyepoch,
})
}
/// Compute stored fields (cumulative and day1) from lazy height stats.
///
/// This is MUCH faster than the old approach since it only iterates heights,
/// not all transactions per block.
pub(crate) fn derive_from(
&mut self,
_indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
// Compute height cumulative by summing lazy height.sum values
self.height_cumulative.0.compute_cumulative(
starting_indexes.height,
&self.height.sum,
exit,
)?;
Ok(())
}
}
fn create_lazy_txindex(
name: &str,
version: Version,
sats_txindex: ReadableBoxedVec<TxIndex, Sats>,
txindex_to_height: ReadableBoxedVec<TxIndex, Height>,
height_to_price: ReadableBoxedVec<Height, Dollars>,
) -> LazyDollarsTxIndex {
LazyVecFrom3::init(
&format!("{name}_txindex"),
version,
sats_txindex,
txindex_to_height,
height_to_price,
|_index, sats, _height, close| close * Bitcoin::from(sats),
)
}

View File

@@ -1,63 +0,0 @@
//! ValueFromTxFull - eager txindex Sats source + ValueTxDerivedFull (sats/bitcoin/dollars).
use brk_error::Result;
use brk_indexer::Indexer;
use brk_traversable::Traversable;
use brk_types::{Sats, TxIndex, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{Database, EagerVec, Exit, ImportableVec, PcoVec, Rw, StorageMode};
use crate::{ComputeIndexes, indexes, internal::ValueTxDerivedFull, prices};
const VERSION: Version = Version::ZERO;
#[derive(Deref, DerefMut, Traversable)]
pub struct ValueFromTxFull<M: StorageMode = Rw> {
#[traversable(rename = "txindex")]
pub base: M::Stored<EagerVec<PcoVec<TxIndex, Sats>>>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub indexes: ValueTxDerivedFull<M>,
}
impl ValueFromTxFull {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
indexer: &Indexer,
prices: &prices::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let txindex = EagerVec::forced_import(db, name, v)?;
let derived =
ValueTxDerivedFull::forced_import(db, name, v, indexes, indexer, prices, &txindex)?;
Ok(Self {
base: txindex,
indexes: derived,
})
}
/// Derive from source, skipping first N transactions per block from all calculations.
///
/// Use `skip_count: 1` to exclude coinbase transactions from fee/feerate stats.
pub(crate) fn derive_from_with_skip(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
skip_count: usize,
) -> Result<()> {
self.indexes.derive_from_with_skip(
indexer,
indexes,
starting_indexes,
&self.base,
exit,
skip_count,
)
}
}

View File

@@ -1,44 +1,53 @@
//! Lazy binary transform for derived block with Last aggregation only.
//!
//! Newtype on `Indexes` with `LazyBinaryTransformLast` per field.
use brk_traversable::Traversable;
use brk_types::{
Day1, Day3, DifficultyEpoch, HalvingEpoch, Hour1, Hour12, Hour4, Minute1, Minute10, Minute30,
Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10,
};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, ReadableCloneableVec};
use crate::internal::{
ComputedFromHeightLast, ComputedFromHeightSumCum, ComputedVecValue,
LazyBinaryTransformLast, LazyFromHeightLast, NumericValue,
use crate::{
indexes_from,
internal::{
ComputedFromHeightLast, ComputedFromHeightSumCum, ComputedVecValue, Indexes,
LazyBinaryTransformLast, LazyFromHeightLast, NumericValue,
},
};
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct LazyBinaryHeightDerivedLast<T, S1T = T, S2T = T>
pub type LazyBinaryHeightDerivedLastInner<T, S1T, S2T> = Indexes<
LazyBinaryTransformLast<Minute1, T, S1T, S2T>,
LazyBinaryTransformLast<Minute5, T, S1T, S2T>,
LazyBinaryTransformLast<Minute10, T, S1T, S2T>,
LazyBinaryTransformLast<Minute30, T, S1T, S2T>,
LazyBinaryTransformLast<Hour1, T, S1T, S2T>,
LazyBinaryTransformLast<Hour4, T, S1T, S2T>,
LazyBinaryTransformLast<Hour12, T, S1T, S2T>,
LazyBinaryTransformLast<Day1, T, S1T, S2T>,
LazyBinaryTransformLast<Day3, T, S1T, S2T>,
LazyBinaryTransformLast<Week1, T, S1T, S2T>,
LazyBinaryTransformLast<Month1, T, S1T, S2T>,
LazyBinaryTransformLast<Month3, T, S1T, S2T>,
LazyBinaryTransformLast<Month6, T, S1T, S2T>,
LazyBinaryTransformLast<Year1, T, S1T, S2T>,
LazyBinaryTransformLast<Year10, T, S1T, S2T>,
LazyBinaryTransformLast<HalvingEpoch, T, S1T, S2T>,
LazyBinaryTransformLast<DifficultyEpoch, T, S1T, S2T>,
>;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct LazyBinaryHeightDerivedLast<T, S1T = T, S2T = T>(
pub LazyBinaryHeightDerivedLastInner<T, S1T, S2T>,
)
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
pub minute1: LazyBinaryTransformLast<Minute1, T, S1T, S2T>,
pub minute5: LazyBinaryTransformLast<Minute5, T, S1T, S2T>,
pub minute10: LazyBinaryTransformLast<Minute10, T, S1T, S2T>,
pub minute30: LazyBinaryTransformLast<Minute30, T, S1T, S2T>,
pub hour1: LazyBinaryTransformLast<Hour1, T, S1T, S2T>,
pub hour4: LazyBinaryTransformLast<Hour4, T, S1T, S2T>,
pub hour12: LazyBinaryTransformLast<Hour12, T, S1T, S2T>,
pub day1: LazyBinaryTransformLast<Day1, T, S1T, S2T>,
pub day3: LazyBinaryTransformLast<Day3, T, S1T, S2T>,
pub week1: LazyBinaryTransformLast<Week1, T, S1T, S2T>,
pub month1: LazyBinaryTransformLast<Month1, T, S1T, S2T>,
pub month3: LazyBinaryTransformLast<Month3, T, S1T, S2T>,
pub month6: LazyBinaryTransformLast<Month6, T, S1T, S2T>,
pub year1: LazyBinaryTransformLast<Year1, T, S1T, S2T>,
pub year10: LazyBinaryTransformLast<Year10, T, S1T, S2T>,
pub halvingepoch: LazyBinaryTransformLast<HalvingEpoch, T, S1T, S2T>,
pub difficultyepoch: LazyBinaryTransformLast<DifficultyEpoch, T, S1T, S2T>,
}
S2T: ComputedVecValue;
const VERSION: Version = Version::ZERO;
@@ -71,25 +80,7 @@ where
};
}
Self {
minute1: period!(minute1),
minute5: period!(minute5),
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: period!(halvingepoch),
difficultyepoch: period!(difficultyepoch),
}
Self(indexes_from!(period))
}
pub(crate) fn from_computed_last<F: BinaryTransform<S1T, S2T, T>>(
@@ -115,25 +106,7 @@ where
};
}
Self {
minute1: period!(minute1),
minute5: period!(minute5),
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: period!(halvingepoch),
difficultyepoch: period!(difficultyepoch),
}
Self(indexes_from!(period))
}
pub(crate) fn from_lazy_block_last_and_block_last<F, S1SourceT>(
@@ -160,25 +133,7 @@ where
};
}
Self {
minute1: period!(minute1),
minute5: period!(minute5),
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: period!(halvingepoch),
difficultyepoch: period!(difficultyepoch),
}
Self(indexes_from!(period))
}
pub(crate) fn from_block_last_and_lazy_block_last<F, S2SourceT>(
@@ -205,24 +160,6 @@ where
};
}
Self {
minute1: period!(minute1),
minute5: period!(minute5),
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: period!(halvingepoch),
difficultyepoch: period!(difficultyepoch),
}
Self(indexes_from!(period))
}
}

View File

@@ -1,183 +0,0 @@
//! Lazy aggregated binary transform for Sum-only pattern across all time periods.
use brk_traversable::Traversable;
use brk_types::{
Day1, Day3, DifficultyEpoch, HalvingEpoch, Hour1, Hour12, Hour4, Minute1, Minute10, Minute30,
Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10,
};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, ReadableCloneableVec};
use crate::internal::{
ComputedFromHeightSumCum, ComputedHeightDerivedSum, ComputedVecValue, LazyBinaryTransformSum,
LazyFromHeightLast, NumericValue,
};
const VERSION: Version = Version::ZERO;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct LazyBinaryHeightDerivedSum<T, S1T, S2T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
pub minute1: LazyBinaryTransformSum<Minute1, T, S1T, S2T>,
pub minute5: LazyBinaryTransformSum<Minute5, T, S1T, S2T>,
pub minute10: LazyBinaryTransformSum<Minute10, T, S1T, S2T>,
pub minute30: LazyBinaryTransformSum<Minute30, T, S1T, S2T>,
pub hour1: LazyBinaryTransformSum<Hour1, T, S1T, S2T>,
pub hour4: LazyBinaryTransformSum<Hour4, T, S1T, S2T>,
pub hour12: LazyBinaryTransformSum<Hour12, T, S1T, S2T>,
pub day1: LazyBinaryTransformSum<Day1, T, S1T, S2T>,
pub day3: LazyBinaryTransformSum<Day3, T, S1T, S2T>,
pub week1: LazyBinaryTransformSum<Week1, T, S1T, S2T>,
pub month1: LazyBinaryTransformSum<Month1, T, S1T, S2T>,
pub month3: LazyBinaryTransformSum<Month3, T, S1T, S2T>,
pub month6: LazyBinaryTransformSum<Month6, T, S1T, S2T>,
pub year1: LazyBinaryTransformSum<Year1, T, S1T, S2T>,
pub year10: LazyBinaryTransformSum<Year10, T, S1T, S2T>,
pub halvingepoch: LazyBinaryTransformSum<HalvingEpoch, T, S1T, S2T>,
pub difficultyepoch: LazyBinaryTransformSum<DifficultyEpoch, T, S1T, S2T>,
}
impl<T, S1T, S2T> LazyBinaryHeightDerivedSum<T, S1T, S2T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: NumericValue + JsonSchema,
S2T: NumericValue + JsonSchema,
{
pub(crate) fn from_derived<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &ComputedHeightDerivedSum<S1T>,
source2: &ComputedHeightDerivedSum<S2T>,
) -> Self {
let v = version + VERSION;
macro_rules! period {
($p:ident) => {
LazyBinaryTransformSum::from_boxed::<F>(
name,
v,
source1.$p.read_only_boxed_clone(),
source2.$p.read_only_boxed_clone(),
)
};
}
Self {
minute1: period!(minute1),
minute5: period!(minute5),
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: period!(halvingepoch),
difficultyepoch: period!(difficultyepoch),
}
}
/// Create from two LazyBinaryHeightDerivedSum sources.
pub(crate) fn from_binary<F, S1aT, S1bT, S2aT, S2bT>(
name: &str,
version: Version,
source1: &LazyBinaryHeightDerivedSum<S1T, S1aT, S1bT>,
source2: &LazyBinaryHeightDerivedSum<S2T, S2aT, S2bT>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S1aT: ComputedVecValue + JsonSchema,
S1bT: ComputedVecValue + JsonSchema,
S2aT: ComputedVecValue + JsonSchema,
S2bT: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
macro_rules! period {
($p:ident) => {
LazyBinaryTransformSum::from_boxed::<F>(
name,
v,
source1.$p.read_only_boxed_clone(),
source2.$p.read_only_boxed_clone(),
)
};
}
Self {
minute1: period!(minute1),
minute5: period!(minute5),
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: period!(halvingepoch),
difficultyepoch: period!(difficultyepoch),
}
}
/// Create from a SumCum source (using only sum) and a LazyLast source.
pub(crate) fn from_sumcum_lazy_last<F, S2ST>(
name: &str,
version: Version,
source1: &ComputedFromHeightSumCum<S1T>,
source2: &LazyFromHeightLast<S2T, S2ST>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S2ST: ComputedVecValue + JsonSchema,
{
let v = version + VERSION;
macro_rules! period {
($p:ident) => {
LazyBinaryTransformSum::from_boxed::<F>(
name,
v,
source1.$p.sum.read_only_boxed_clone(),
source2.$p.read_only_boxed_clone(),
)
};
}
Self {
minute1: period!(minute1),
minute5: period!(minute5),
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: period!(halvingepoch),
difficultyepoch: period!(difficultyepoch),
}
}
}

View File

@@ -1,184 +0,0 @@
//! Lazy aggregated SumCum - binary transform version.
use brk_traversable::Traversable;
use brk_types::{
Day1, Day3, DifficultyEpoch, HalvingEpoch, Hour1, Hour4, Hour12, Minute1, Minute5, Minute10,
Minute30, Month1, Month3, Month6, Version, Week1, Year1, Year10,
};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, ReadableCloneableVec};
use crate::internal::{
ComputedFromHeightSumCum, ComputedHeightDerivedFull, ComputedHeightDerivedSumCum,
ComputedVecValue, LazyBinaryTransformSumCum, LazyFromHeightLast, NumericValue, TxDerivedFull,
};
const VERSION: Version = Version::ZERO;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct LazyBinaryHeightDerivedSumCum<T, S1T = T, S2T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
pub minute1: LazyBinaryTransformSumCum<Minute1, T, S1T, S2T>,
pub minute5: LazyBinaryTransformSumCum<Minute5, T, S1T, S2T>,
pub minute10: LazyBinaryTransformSumCum<Minute10, T, S1T, S2T>,
pub minute30: LazyBinaryTransformSumCum<Minute30, T, S1T, S2T>,
pub hour1: LazyBinaryTransformSumCum<Hour1, T, S1T, S2T>,
pub hour4: LazyBinaryTransformSumCum<Hour4, T, S1T, S2T>,
pub hour12: LazyBinaryTransformSumCum<Hour12, T, S1T, S2T>,
pub day1: LazyBinaryTransformSumCum<Day1, T, S1T, S2T>,
pub day3: LazyBinaryTransformSumCum<Day3, T, S1T, S2T>,
pub week1: LazyBinaryTransformSumCum<Week1, T, S1T, S2T>,
pub month1: LazyBinaryTransformSumCum<Month1, T, S1T, S2T>,
pub month3: LazyBinaryTransformSumCum<Month3, T, S1T, S2T>,
pub month6: LazyBinaryTransformSumCum<Month6, T, S1T, S2T>,
pub year1: LazyBinaryTransformSumCum<Year1, T, S1T, S2T>,
pub year10: LazyBinaryTransformSumCum<Year10, T, S1T, S2T>,
pub halvingepoch: LazyBinaryTransformSumCum<HalvingEpoch, T, S1T, S2T>,
pub difficultyepoch: LazyBinaryTransformSumCum<DifficultyEpoch, T, S1T, S2T>,
}
impl<T, S1T, S2T> LazyBinaryHeightDerivedSumCum<T, S1T, S2T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
/// Create from two ComputedHeightDerivedSumCum sources.
pub(crate) fn from_computed_sum_raw<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &ComputedHeightDerivedSumCum<S1T>,
source2: &ComputedHeightDerivedSumCum<S2T>,
) -> Self {
let v = version + VERSION;
macro_rules! period {
($p:ident) => {
LazyBinaryTransformSumCum::from_sources_sum_raw::<F>(
name,
v,
source1.$p.sum.read_only_boxed_clone(),
source2.$p.sum.read_only_boxed_clone(),
source1.$p.cumulative.read_only_boxed_clone(),
source2.$p.cumulative.read_only_boxed_clone(),
)
};
}
Self {
minute1: period!(minute1),
minute5: period!(minute5),
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: period!(halvingepoch),
difficultyepoch: period!(difficultyepoch),
}
}
/// Create from ComputedHeightDerivedFull + TxDerivedFull sources.
pub(crate) fn from_full_sources<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &ComputedHeightDerivedFull<S1T>,
source2: &TxDerivedFull<S2T>,
) -> Self
where
S1T: PartialOrd,
S2T: PartialOrd,
{
let v = version + VERSION;
macro_rules! period {
($p:ident) => {
LazyBinaryTransformSumCum::from_lazy_stats_aggregate::<F, _, _, _, _>(
name, v, &source1.$p, &source2.$p,
)
};
}
Self {
minute1: period!(minute1),
minute5: period!(minute5),
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: period!(halvingepoch),
difficultyepoch: period!(difficultyepoch),
}
}
// --- Methods accepting SumCum + LazyLast sources ---
pub(crate) fn from_computed_lazy_last<F, S2ST>(
name: &str,
version: Version,
source1: &ComputedFromHeightSumCum<S1T>,
source2: &LazyFromHeightLast<S2T, S2ST>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S1T: PartialOrd,
S2T: NumericValue,
S2ST: ComputedVecValue + schemars::JsonSchema,
{
let v = version + VERSION;
macro_rules! period {
($p:ident) => {
LazyBinaryTransformSumCum::from_sources_last_sum_raw::<F>(
name,
v,
source1.$p.sum.read_only_boxed_clone(),
source1.$p.cumulative.read_only_boxed_clone(),
source2.$p.read_only_boxed_clone(),
)
};
}
Self {
minute1: period!(minute1),
minute5: period!(minute5),
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: period!(halvingepoch),
difficultyepoch: period!(difficultyepoch),
}
}
}

View File

@@ -0,0 +1,83 @@
//! ComputedHeightDerivedCumFull - LazyLast index views + cumulative (from height) + RollingFull.
//!
//! For metrics derived from indexer sources (no stored height vec).
//! Cumulative gets its own ComputedFromHeightLast so it has LazyLast index views too.
use std::ops::SubAssign;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use schemars::JsonSchema;
use vecdb::{Database, Exit, ReadableBoxedVec, ReadableVec, Rw, StorageMode};
use crate::{
indexes,
internal::{
ComputedFromHeightLast, ComputedHeightDerivedLast, NumericValue, RollingFull, WindowStarts,
},
};
#[derive(Traversable)]
#[traversable(merge)]
pub struct ComputedHeightDerivedCumFull<T, M: StorageMode = Rw>
where
T: NumericValue + JsonSchema,
{
#[traversable(flatten)]
pub last: ComputedHeightDerivedLast<T>,
#[traversable(flatten)]
pub cumulative: ComputedFromHeightLast<T, M>,
#[traversable(flatten)]
pub rolling: RollingFull<T, M>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedHeightDerivedCumFull<T>
where
T: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
height_source: ReadableBoxedVec<Height, T>,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let last = ComputedHeightDerivedLast::forced_import(name, height_source, v, indexes);
let cumulative = ComputedFromHeightLast::forced_import(
db,
&format!("{name}_cumulative"),
v,
indexes,
)?;
let rolling = RollingFull::forced_import(db, name, v, indexes)?;
Ok(Self {
last,
cumulative,
rolling,
})
}
pub(crate) fn compute(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
height_source: &impl ReadableVec<Height, T>,
exit: &Exit,
) -> Result<()>
where
T: From<f64> + Default + SubAssign + Copy + Ord,
f64: From<T>,
{
self.cumulative
.height
.compute_cumulative(max_from, height_source, exit)?;
self.rolling.compute(max_from, windows, height_source, exit)?;
Ok(())
}
}

View File

@@ -1,97 +0,0 @@
//! ComputedHeightDerivedFirst - lazy time periods + epochs (first value).
use brk_traversable::Traversable;
use brk_types::{
Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4, Minute1, Minute10,
Minute30, Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10,
};
use schemars::JsonSchema;
use vecdb::{ReadableBoxedVec, ReadableCloneableVec};
use crate::{
indexes,
internal::{ComputedVecValue, LazyFirst, NumericValue},
};
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct ComputedHeightDerivedFirst<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub minute1: LazyFirst<Minute1, T, Height, Height>,
pub minute5: LazyFirst<Minute5, T, Height, Height>,
pub minute10: LazyFirst<Minute10, T, Height, Height>,
pub minute30: LazyFirst<Minute30, T, Height, Height>,
pub hour1: LazyFirst<Hour1, T, Height, Height>,
pub hour4: LazyFirst<Hour4, T, Height, Height>,
pub hour12: LazyFirst<Hour12, T, Height, Height>,
pub day1: LazyFirst<Day1, T, Height, Height>,
pub day3: LazyFirst<Day3, T, Height, Height>,
pub week1: LazyFirst<Week1, T, Height, Height>,
pub month1: LazyFirst<Month1, T, Height, Height>,
pub month3: LazyFirst<Month3, T, Height, Height>,
pub month6: LazyFirst<Month6, T, Height, Height>,
pub year1: LazyFirst<Year1, T, Height, Height>,
pub year10: LazyFirst<Year10, T, Height, Height>,
pub halvingepoch: LazyFirst<HalvingEpoch, T, Height, HalvingEpoch>,
pub difficultyepoch: LazyFirst<DifficultyEpoch, T, Height, DifficultyEpoch>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedHeightDerivedFirst<T>
where
T: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(
name: &str,
height_source: ReadableBoxedVec<Height, T>,
version: Version,
indexes: &indexes::Vecs,
) -> Self {
let v = version + VERSION;
macro_rules! period {
($idx:ident) => {
LazyFirst::from_height_source(
name,
v,
height_source.clone(),
indexes.$idx.first_height.read_only_boxed_clone(),
)
};
}
macro_rules! epoch {
($idx:ident) => {
LazyFirst::from_source(
name,
v,
height_source.clone(),
indexes.$idx.identity.read_only_boxed_clone(),
)
};
}
Self {
minute1: period!(minute1),
minute5: period!(minute5),
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: epoch!(halvingepoch),
difficultyepoch: epoch!(difficultyepoch),
}
}
}

View File

@@ -1,42 +1,48 @@
//! ComputedHeightDerivedLast - lazy time periods + epochs (last value).
//!
//! Newtype on `Indexes` with `LazyLast` per field.
use brk_traversable::Traversable;
use brk_types::{
Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4, Minute1, Minute10,
Minute30, Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10,
};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{ReadableBoxedVec, ReadableCloneableVec};
use crate::{
indexes,
internal::{ComputedVecValue, LazyLast, NumericValue, SparseLast},
indexes_from,
internal::{ComputedVecValue, Indexes, LazyLast, NumericValue},
};
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct ComputedHeightDerivedLast<T>
/// All 17 time-period/epoch `LazyLast` vecs, packed as a newtype on `Indexes`.
pub type ComputedHeightDerivedLastInner<T> = Indexes<
LazyLast<Minute1, T, Height, Height>,
LazyLast<Minute5, T, Height, Height>,
LazyLast<Minute10, T, Height, Height>,
LazyLast<Minute30, T, Height, Height>,
LazyLast<Hour1, T, Height, Height>,
LazyLast<Hour4, T, Height, Height>,
LazyLast<Hour12, T, Height, Height>,
LazyLast<Day1, T, Height, Height>,
LazyLast<Day3, T, Height, Height>,
LazyLast<Week1, T, Height, Height>,
LazyLast<Month1, T, Height, Height>,
LazyLast<Month3, T, Height, Height>,
LazyLast<Month6, T, Height, Height>,
LazyLast<Year1, T, Height, Height>,
LazyLast<Year10, T, Height, Height>,
LazyLast<HalvingEpoch, T, Height, HalvingEpoch>,
LazyLast<DifficultyEpoch, T, Height, DifficultyEpoch>,
>;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct ComputedHeightDerivedLast<T>(pub ComputedHeightDerivedLastInner<T>)
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub minute1: LazyLast<Minute1, T, Height, Height>,
pub minute5: LazyLast<Minute5, T, Height, Height>,
pub minute10: LazyLast<Minute10, T, Height, Height>,
pub minute30: LazyLast<Minute30, T, Height, Height>,
pub hour1: LazyLast<Hour1, T, Height, Height>,
pub hour4: LazyLast<Hour4, T, Height, Height>,
pub hour12: LazyLast<Hour12, T, Height, Height>,
pub day1: LazyLast<Day1, T, Height, Height>,
pub day3: LazyLast<Day3, T, Height, Height>,
pub week1: LazyLast<Week1, T, Height, Height>,
pub month1: LazyLast<Month1, T, Height, Height>,
pub month3: LazyLast<Month3, T, Height, Height>,
pub month6: LazyLast<Month6, T, Height, Height>,
pub year1: LazyLast<Year1, T, Height, Height>,
pub year10: LazyLast<Year10, T, Height, Height>,
pub halvingepoch: LazyLast<HalvingEpoch, T, Height, HalvingEpoch>,
pub difficultyepoch: LazyLast<DifficultyEpoch, T, Height, DifficultyEpoch>,
}
T: ComputedVecValue + PartialOrd + JsonSchema;
const VERSION: Version = Version::ZERO;
@@ -74,24 +80,6 @@ where
};
}
Self {
minute1: period!(minute1),
minute5: period!(minute5),
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: epoch!(halvingepoch),
difficultyepoch: epoch!(difficultyepoch),
}
Self(indexes_from!(period, epoch))
}
}

View File

@@ -1,96 +0,0 @@
//! Lazy aggregated Distribution for block-level sources.
//! Like LazyHeightDerivedFull but without sum/cumulative (for ratio/percentage metrics).
use brk_traversable::Traversable;
use brk_types::{
Day1, Day3, DifficultyEpoch, HalvingEpoch, Hour1, Hour12, Hour4, Minute1, Minute10, Minute30,
Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10,
};
use schemars::JsonSchema;
use vecdb::{ReadableCloneableVec, UnaryTransform};
use crate::internal::{
ComputedHeightDerivedFull, ComputedVecValue, LazyTransformDistribution, NumericValue,
};
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct LazyHeightDerivedDistribution<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
pub minute1: LazyTransformDistribution<Minute1, T, S1T>,
pub minute5: LazyTransformDistribution<Minute5, T, S1T>,
pub minute10: LazyTransformDistribution<Minute10, T, S1T>,
pub minute30: LazyTransformDistribution<Minute30, T, S1T>,
pub hour1: LazyTransformDistribution<Hour1, T, S1T>,
pub hour4: LazyTransformDistribution<Hour4, T, S1T>,
pub hour12: LazyTransformDistribution<Hour12, T, S1T>,
pub day1: LazyTransformDistribution<Day1, T, S1T>,
pub day3: LazyTransformDistribution<Day3, T, S1T>,
pub week1: LazyTransformDistribution<Week1, T, S1T>,
pub month1: LazyTransformDistribution<Month1, T, S1T>,
pub month3: LazyTransformDistribution<Month3, T, S1T>,
pub month6: LazyTransformDistribution<Month6, T, S1T>,
pub year1: LazyTransformDistribution<Year1, T, S1T>,
pub year10: LazyTransformDistribution<Year10, T, S1T>,
pub halvingepoch: LazyTransformDistribution<HalvingEpoch, T, S1T>,
pub difficultyepoch: LazyTransformDistribution<DifficultyEpoch, T, S1T>,
}
const VERSION: Version = Version::ZERO;
impl<T, S1T> LazyHeightDerivedDistribution<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
pub(crate) fn from_derived_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
source: &ComputedHeightDerivedFull<S1T>,
) -> Self
where
S1T: NumericValue,
{
let v = version + VERSION;
macro_rules! period {
($p:ident) => {
LazyTransformDistribution::from_boxed::<F>(
name,
v,
source.$p.average.read_only_boxed_clone(),
source.$p.min.read_only_boxed_clone(),
source.$p.max.read_only_boxed_clone(),
source.$p.percentiles.pct10.read_only_boxed_clone(),
source.$p.percentiles.pct25.read_only_boxed_clone(),
source.$p.percentiles.median.read_only_boxed_clone(),
source.$p.percentiles.pct75.read_only_boxed_clone(),
source.$p.percentiles.pct90.read_only_boxed_clone(),
)
};
}
Self {
minute1: period!(minute1),
minute5: period!(minute5),
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: period!(halvingepoch),
difficultyepoch: period!(difficultyepoch),
}
}
}

View File

@@ -1,43 +1,50 @@
//! Lazy aggregated Last for block-level sources.
//!
//! Newtype on `Indexes` with `LazyTransformLast` per field.
use brk_traversable::Traversable;
use brk_types::{
Day1, Day3, DifficultyEpoch, HalvingEpoch, Hour1, Hour12, Hour4, Minute1, Minute10, Minute30,
Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10,
};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{ReadableCloneableVec, UnaryTransform};
use crate::internal::{
ComputedFromHeightLast, ComputedHeightDerivedLast, ComputedVecValue,
LazyBinaryHeightDerivedLast, LazyTransformLast, NumericValue,
use crate::{
indexes_from,
internal::{
ComputedFromHeightLast, ComputedHeightDerivedLast, ComputedVecValue, Indexes,
LazyBinaryHeightDerivedLast, LazyTransformLast, NumericValue,
},
};
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct LazyHeightDerivedLast<T, S1T = T>
pub type LazyHeightDerivedLastInner<T, S1T> = Indexes<
LazyTransformLast<Minute1, T, S1T>,
LazyTransformLast<Minute5, T, S1T>,
LazyTransformLast<Minute10, T, S1T>,
LazyTransformLast<Minute30, T, S1T>,
LazyTransformLast<Hour1, T, S1T>,
LazyTransformLast<Hour4, T, S1T>,
LazyTransformLast<Hour12, T, S1T>,
LazyTransformLast<Day1, T, S1T>,
LazyTransformLast<Day3, T, S1T>,
LazyTransformLast<Week1, T, S1T>,
LazyTransformLast<Month1, T, S1T>,
LazyTransformLast<Month3, T, S1T>,
LazyTransformLast<Month6, T, S1T>,
LazyTransformLast<Year1, T, S1T>,
LazyTransformLast<Year10, T, S1T>,
LazyTransformLast<HalvingEpoch, T, S1T>,
LazyTransformLast<DifficultyEpoch, T, S1T>,
>;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct LazyHeightDerivedLast<T, S1T = T>(pub LazyHeightDerivedLastInner<T, S1T>)
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
pub minute1: LazyTransformLast<Minute1, T, S1T>,
pub minute5: LazyTransformLast<Minute5, T, S1T>,
pub minute10: LazyTransformLast<Minute10, T, S1T>,
pub minute30: LazyTransformLast<Minute30, T, S1T>,
pub hour1: LazyTransformLast<Hour1, T, S1T>,
pub hour4: LazyTransformLast<Hour4, T, S1T>,
pub hour12: LazyTransformLast<Hour12, T, S1T>,
pub day1: LazyTransformLast<Day1, T, S1T>,
pub day3: LazyTransformLast<Day3, T, S1T>,
pub week1: LazyTransformLast<Week1, T, S1T>,
pub month1: LazyTransformLast<Month1, T, S1T>,
pub month3: LazyTransformLast<Month3, T, S1T>,
pub month6: LazyTransformLast<Month6, T, S1T>,
pub year1: LazyTransformLast<Year1, T, S1T>,
pub year10: LazyTransformLast<Year10, T, S1T>,
pub halvingepoch: LazyTransformLast<HalvingEpoch, T, S1T>,
pub difficultyepoch: LazyTransformLast<DifficultyEpoch, T, S1T>,
}
S1T: ComputedVecValue;
const VERSION: Version = Version::ZERO;
@@ -62,25 +69,7 @@ where
};
}
Self {
minute1: period!(minute1),
minute5: period!(minute5),
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: period!(halvingepoch),
difficultyepoch: period!(difficultyepoch),
}
Self(indexes_from!(period))
}
pub(crate) fn from_derived_computed<F: UnaryTransform<S1T, T>>(
@@ -99,25 +88,7 @@ where
};
}
Self {
minute1: period!(minute1),
minute5: period!(minute5),
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: period!(halvingepoch),
difficultyepoch: period!(difficultyepoch),
}
Self(indexes_from!(period))
}
/// Create by unary-transforming a LazyHeightDerivedLast source.
@@ -138,25 +109,7 @@ where
};
}
Self {
minute1: period!(minute1),
minute5: period!(minute5),
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: period!(halvingepoch),
difficultyepoch: period!(difficultyepoch),
}
Self(indexes_from!(period))
}
/// Create by unary-transforming a LazyBinaryHeightDerivedLast source.
@@ -178,25 +131,6 @@ where
};
}
Self {
minute1: period!(minute1),
minute5: period!(minute5),
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: period!(halvingepoch),
difficultyepoch: period!(difficultyepoch),
}
Self(indexes_from!(period))
}
}

View File

@@ -1,84 +0,0 @@
//! Lazy aggregated Sum for block-level sources.
use brk_traversable::Traversable;
use brk_types::{
Day1, Day3, DifficultyEpoch, HalvingEpoch, Hour1, Hour12, Hour4, Minute1, Minute10, Minute30,
Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10,
};
use schemars::JsonSchema;
use vecdb::{ReadableCloneableVec, UnaryTransform};
use crate::internal::{
ComputedHeightDerivedSum, ComputedVecValue, LazyTransformSum, NumericValue,
};
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct LazyHeightDerivedSum<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
pub minute1: LazyTransformSum<Minute1, T, S1T>,
pub minute5: LazyTransformSum<Minute5, T, S1T>,
pub minute10: LazyTransformSum<Minute10, T, S1T>,
pub minute30: LazyTransformSum<Minute30, T, S1T>,
pub hour1: LazyTransformSum<Hour1, T, S1T>,
pub hour4: LazyTransformSum<Hour4, T, S1T>,
pub hour12: LazyTransformSum<Hour12, T, S1T>,
pub day1: LazyTransformSum<Day1, T, S1T>,
pub day3: LazyTransformSum<Day3, T, S1T>,
pub week1: LazyTransformSum<Week1, T, S1T>,
pub month1: LazyTransformSum<Month1, T, S1T>,
pub month3: LazyTransformSum<Month3, T, S1T>,
pub month6: LazyTransformSum<Month6, T, S1T>,
pub year1: LazyTransformSum<Year1, T, S1T>,
pub year10: LazyTransformSum<Year10, T, S1T>,
pub halvingepoch: LazyTransformSum<HalvingEpoch, T, S1T>,
pub difficultyepoch: LazyTransformSum<DifficultyEpoch, T, S1T>,
}
const VERSION: Version = Version::ZERO;
impl<T, S1T> LazyHeightDerivedSum<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
pub(crate) fn from_derived_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
source: &ComputedHeightDerivedSum<S1T>,
) -> Self
where
S1T: NumericValue,
{
let v = version + VERSION;
macro_rules! period {
($p:ident) => {
LazyTransformSum::from_boxed::<F>(name, v, source.$p.read_only_boxed_clone())
};
}
Self {
minute1: period!(minute1),
minute5: period!(minute5),
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: period!(halvingepoch),
difficultyepoch: period!(difficultyepoch),
}
}
}

View File

@@ -1,97 +0,0 @@
//! ComputedHeightDerivedMax - lazy time periods + epochs (max value).
use brk_traversable::Traversable;
use brk_types::{
Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4, Minute1, Minute10,
Minute30, Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10,
};
use schemars::JsonSchema;
use vecdb::{ReadableBoxedVec, ReadableCloneableVec};
use crate::{
indexes,
internal::{ComputedVecValue, LazyMax, NumericValue},
};
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct ComputedHeightDerivedMax<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub minute1: LazyMax<Minute1, T, Height, Height>,
pub minute5: LazyMax<Minute5, T, Height, Height>,
pub minute10: LazyMax<Minute10, T, Height, Height>,
pub minute30: LazyMax<Minute30, T, Height, Height>,
pub hour1: LazyMax<Hour1, T, Height, Height>,
pub hour4: LazyMax<Hour4, T, Height, Height>,
pub hour12: LazyMax<Hour12, T, Height, Height>,
pub day1: LazyMax<Day1, T, Height, Height>,
pub day3: LazyMax<Day3, T, Height, Height>,
pub week1: LazyMax<Week1, T, Height, Height>,
pub month1: LazyMax<Month1, T, Height, Height>,
pub month3: LazyMax<Month3, T, Height, Height>,
pub month6: LazyMax<Month6, T, Height, Height>,
pub year1: LazyMax<Year1, T, Height, Height>,
pub year10: LazyMax<Year10, T, Height, Height>,
pub halvingepoch: LazyMax<HalvingEpoch, T, Height, HalvingEpoch>,
pub difficultyepoch: LazyMax<DifficultyEpoch, T, Height, DifficultyEpoch>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedHeightDerivedMax<T>
where
T: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(
name: &str,
height_source: ReadableBoxedVec<Height, T>,
version: Version,
indexes: &indexes::Vecs,
) -> Self {
let v = version + VERSION;
macro_rules! period {
($idx:ident) => {
LazyMax::from_height_source_raw(
name,
v,
height_source.clone(),
indexes.$idx.first_height.read_only_boxed_clone(),
)
};
}
macro_rules! epoch {
($idx:ident) => {
LazyMax::from_source_raw(
name,
v,
height_source.clone(),
indexes.$idx.identity.read_only_boxed_clone(),
)
};
}
Self {
minute1: period!(minute1),
minute5: period!(minute5),
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: epoch!(halvingepoch),
difficultyepoch: epoch!(difficultyepoch),
}
}
}

View File

@@ -1,97 +0,0 @@
//! ComputedHeightDerivedMin - lazy time periods + epochs (min value).
use brk_traversable::Traversable;
use brk_types::{
Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4, Minute1, Minute10,
Minute30, Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10,
};
use schemars::JsonSchema;
use vecdb::{ReadableBoxedVec, ReadableCloneableVec};
use crate::{
indexes,
internal::{ComputedVecValue, LazyMin, NumericValue},
};
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct ComputedHeightDerivedMin<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub minute1: LazyMin<Minute1, T, Height, Height>,
pub minute5: LazyMin<Minute5, T, Height, Height>,
pub minute10: LazyMin<Minute10, T, Height, Height>,
pub minute30: LazyMin<Minute30, T, Height, Height>,
pub hour1: LazyMin<Hour1, T, Height, Height>,
pub hour4: LazyMin<Hour4, T, Height, Height>,
pub hour12: LazyMin<Hour12, T, Height, Height>,
pub day1: LazyMin<Day1, T, Height, Height>,
pub day3: LazyMin<Day3, T, Height, Height>,
pub week1: LazyMin<Week1, T, Height, Height>,
pub month1: LazyMin<Month1, T, Height, Height>,
pub month3: LazyMin<Month3, T, Height, Height>,
pub month6: LazyMin<Month6, T, Height, Height>,
pub year1: LazyMin<Year1, T, Height, Height>,
pub year10: LazyMin<Year10, T, Height, Height>,
pub halvingepoch: LazyMin<HalvingEpoch, T, Height, HalvingEpoch>,
pub difficultyepoch: LazyMin<DifficultyEpoch, T, Height, DifficultyEpoch>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedHeightDerivedMin<T>
where
T: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(
name: &str,
height_source: ReadableBoxedVec<Height, T>,
version: Version,
indexes: &indexes::Vecs,
) -> Self {
let v = version + VERSION;
macro_rules! period {
($idx:ident) => {
LazyMin::from_height_source_raw(
name,
v,
height_source.clone(),
indexes.$idx.first_height.read_only_boxed_clone(),
)
};
}
macro_rules! epoch {
($idx:ident) => {
LazyMin::from_source_raw(
name,
v,
height_source.clone(),
indexes.$idx.identity.read_only_boxed_clone(),
)
};
}
Self {
minute1: period!(minute1),
minute5: period!(minute5),
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: epoch!(halvingepoch),
difficultyepoch: epoch!(difficultyepoch),
}
}
}

View File

@@ -1,39 +1,21 @@
mod binary_last;
mod binary_sum;
mod binary_sum_cum;
mod cum_full;
mod distribution;
mod first;
mod full;
mod last;
mod lazy_distribution;
mod lazy_full;
mod lazy_last;
mod lazy_sum;
mod lazy_sum_cum;
mod max;
mod min;
mod ohlc;
mod split_ohlc;
mod sum;
mod sum_cum;
mod value_lazy_last;
pub use binary_last::*;
pub use binary_sum::*;
pub use binary_sum_cum::*;
pub use cum_full::*;
pub use distribution::*;
pub use first::*;
pub use full::*;
pub use last::*;
pub use lazy_distribution::*;
pub use lazy_full::*;
pub use lazy_last::*;
pub use lazy_sum::*;
pub use lazy_sum_cum::*;
pub use max::*;
pub use min::*;
pub use ohlc::*;
pub use split_ohlc::*;
pub use sum::*;
pub use sum_cum::*;
pub use value_lazy_last::*;

View File

@@ -1,91 +0,0 @@
//! Lazy OHLC period groupings derived from height-level data.
//!
//! Each period's OHLC is computed lazily in a single pass over the source range:
//! open = first, high = max, low = min, close = last.
use brk_traversable::Traversable;
use brk_types::{
Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4, Minute1, Minute10,
Minute30, Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10,
};
use schemars::JsonSchema;
use vecdb::{ReadableBoxedVec, ReadableCloneableVec};
use crate::{
indexes,
internal::{ComputedVecValue, LazyOHLC, OHLCRecord},
};
/// Lazy bundled OHLC vecs for all periods, derived from height-level data.
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct ComputedHeightDerivedOHLC<OHLC>
where
OHLC: OHLCRecord + 'static,
{
pub minute1: LazyOHLC<Minute1, OHLC, Height, OHLC::Inner, Height>,
pub minute5: LazyOHLC<Minute5, OHLC, Height, OHLC::Inner, Height>,
pub minute10: LazyOHLC<Minute10, OHLC, Height, OHLC::Inner, Height>,
pub minute30: LazyOHLC<Minute30, OHLC, Height, OHLC::Inner, Height>,
pub hour1: LazyOHLC<Hour1, OHLC, Height, OHLC::Inner, Height>,
pub hour4: LazyOHLC<Hour4, OHLC, Height, OHLC::Inner, Height>,
pub hour12: LazyOHLC<Hour12, OHLC, Height, OHLC::Inner, Height>,
pub day1: LazyOHLC<Day1, OHLC, Height, OHLC::Inner, Height>,
pub day3: LazyOHLC<Day3, OHLC, Height, OHLC::Inner, Height>,
pub week1: LazyOHLC<Week1, OHLC, Height, OHLC::Inner, Height>,
pub month1: LazyOHLC<Month1, OHLC, Height, OHLC::Inner, Height>,
pub month3: LazyOHLC<Month3, OHLC, Height, OHLC::Inner, Height>,
pub month6: LazyOHLC<Month6, OHLC, Height, OHLC::Inner, Height>,
pub year1: LazyOHLC<Year1, OHLC, Height, OHLC::Inner, Height>,
pub year10: LazyOHLC<Year10, OHLC, Height, OHLC::Inner, Height>,
pub halvingepoch: LazyOHLC<HalvingEpoch, OHLC, Height, OHLC::Inner, Height>,
pub difficultyepoch: LazyOHLC<DifficultyEpoch, OHLC, Height, OHLC::Inner, Height>,
}
const VERSION: Version = Version::ZERO;
impl<OHLC> ComputedHeightDerivedOHLC<OHLC>
where
OHLC: OHLCRecord + 'static,
OHLC::Inner: ComputedVecValue + JsonSchema + 'static,
{
pub(crate) fn forced_import(
name: &str,
version: Version,
indexes: &indexes::Vecs,
height_source: ReadableBoxedVec<Height, OHLC::Inner>,
) -> Self {
let v = version + VERSION;
macro_rules! period {
($idx:ident) => {
LazyOHLC::from_height_source(
name,
v,
height_source.clone(),
indexes.$idx.first_height.read_only_boxed_clone(),
)
};
}
Self {
minute1: period!(minute1),
minute5: period!(minute5),
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: period!(halvingepoch),
difficultyepoch: period!(difficultyepoch),
}
}
}

View File

@@ -1,49 +0,0 @@
//! OHLC split into separate First/Last/Max/Min period groupings derived from height-level data.
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use schemars::JsonSchema;
use vecdb::ReadableBoxedVec;
use crate::{
indexes,
internal::{
ComputedHeightDerivedFirst, ComputedHeightDerivedLast, ComputedHeightDerivedMax,
ComputedHeightDerivedMin, ComputedVecValue, NumericValue,
},
};
/// Split OHLC vecs for all periods, derived from height data.
#[derive(Clone, Traversable)]
pub struct ComputedHeightDerivedSplitOHLC<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub open: ComputedHeightDerivedFirst<T>,
pub high: ComputedHeightDerivedMax<T>,
pub low: ComputedHeightDerivedMin<T>,
pub close: ComputedHeightDerivedLast<T>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedHeightDerivedSplitOHLC<T>
where
T: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(
name: &str,
version: Version,
indexes: &indexes::Vecs,
height_source: ReadableBoxedVec<Height, T>,
) -> Self {
let v = version + VERSION;
Self {
open: ComputedHeightDerivedFirst::forced_import(&format!("{name}_open"), height_source.clone(), v, indexes),
high: ComputedHeightDerivedMax::forced_import(&format!("{name}_high"), height_source.clone(), v, indexes),
low: ComputedHeightDerivedMin::forced_import(&format!("{name}_low"), height_source.clone(), v, indexes),
close: ComputedHeightDerivedLast::forced_import(&format!("{name}_close"), height_source, v, indexes),
}
}
}

View File

@@ -1,97 +0,0 @@
//! ComputedHeightDerivedSum - lazy time periods + epochs.
use brk_traversable::Traversable;
use brk_types::{
Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4, Minute1, Minute10,
Minute30, Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10,
};
use schemars::JsonSchema;
use vecdb::{ReadableBoxedVec, ReadableCloneableVec};
use crate::{
indexes,
internal::{ComputedVecValue, LazySum, NumericValue},
};
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct ComputedHeightDerivedSum<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub minute1: LazySum<Minute1, T, Height, Height>,
pub minute5: LazySum<Minute5, T, Height, Height>,
pub minute10: LazySum<Minute10, T, Height, Height>,
pub minute30: LazySum<Minute30, T, Height, Height>,
pub hour1: LazySum<Hour1, T, Height, Height>,
pub hour4: LazySum<Hour4, T, Height, Height>,
pub hour12: LazySum<Hour12, T, Height, Height>,
pub day1: LazySum<Day1, T, Height, Height>,
pub day3: LazySum<Day3, T, Height, Height>,
pub week1: LazySum<Week1, T, Height, Height>,
pub month1: LazySum<Month1, T, Height, Height>,
pub month3: LazySum<Month3, T, Height, Height>,
pub month6: LazySum<Month6, T, Height, Height>,
pub year1: LazySum<Year1, T, Height, Height>,
pub year10: LazySum<Year10, T, Height, Height>,
pub halvingepoch: LazySum<HalvingEpoch, T, Height, HalvingEpoch>,
pub difficultyepoch: LazySum<DifficultyEpoch, T, Height, DifficultyEpoch>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedHeightDerivedSum<T>
where
T: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(
name: &str,
height_source: ReadableBoxedVec<Height, T>,
version: Version,
indexes: &indexes::Vecs,
) -> Self {
let v = version + VERSION;
macro_rules! period {
($idx:ident) => {
LazySum::from_height_source_raw(
name,
v,
height_source.clone(),
indexes.$idx.first_height.read_only_boxed_clone(),
)
};
}
macro_rules! epoch {
($idx:ident) => {
LazySum::from_source_raw(
name,
v,
height_source.clone(),
indexes.$idx.identity.read_only_boxed_clone(),
)
};
}
Self {
minute1: period!(minute1),
minute5: period!(minute5),
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: epoch!(halvingepoch),
difficultyepoch: epoch!(difficultyepoch),
}
}
}

View File

@@ -1,163 +0,0 @@
//! TxDerivedFull - aggregates from TxIndex to height Full + lazy time periods + epochs.
use brk_error::Result;
use brk_indexer::Indexer;
use brk_traversable::Traversable;
use brk_types::{
Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4, Minute1, Minute10,
Minute30, Minute5, Month1, Month3, Month6, TxIndex, Version, Week1, Year1, Year10,
};
use schemars::JsonSchema;
use vecdb::{Database, Exit, ReadableCloneableVec, ReadableVec, Rw, StorageMode};
use crate::{
indexes, ComputeIndexes,
internal::{ComputedVecValue, Full, LazyFull, NumericValue},
};
/// Aggregates from TxIndex to height/time periods with full stats.
#[derive(Traversable)]
#[traversable(merge)]
pub struct TxDerivedFull<T, M: StorageMode = Rw>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub height: Full<Height, T, M>,
pub minute1: LazyFull<Minute1, T, Height, Height>,
pub minute5: LazyFull<Minute5, T, Height, Height>,
pub minute10: LazyFull<Minute10, T, Height, Height>,
pub minute30: LazyFull<Minute30, T, Height, Height>,
pub hour1: LazyFull<Hour1, T, Height, Height>,
pub hour4: LazyFull<Hour4, T, Height, Height>,
pub hour12: LazyFull<Hour12, T, Height, Height>,
pub day1: LazyFull<Day1, T, Height, Height>,
pub day3: LazyFull<Day3, T, Height, Height>,
pub week1: LazyFull<Week1, T, Height, Height>,
pub month1: LazyFull<Month1, T, Height, Height>,
pub month3: LazyFull<Month3, T, Height, Height>,
pub month6: LazyFull<Month6, T, Height, Height>,
pub year1: LazyFull<Year1, T, Height, Height>,
pub year10: LazyFull<Year10, T, Height, Height>,
pub halvingepoch: LazyFull<HalvingEpoch, T, Height, HalvingEpoch>,
pub difficultyepoch: LazyFull<DifficultyEpoch, T, Height, DifficultyEpoch>,
}
const VERSION: Version = Version::ONE;
impl<T> TxDerivedFull<T>
where
T: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let height = Full::forced_import(db, name, version + VERSION)?;
let v = version + VERSION;
macro_rules! period {
($idx:ident) => {
LazyFull::from_height_source(
name,
v,
height.boxed_sum(),
height.boxed_cumulative(),
indexes.$idx.first_height.read_only_boxed_clone(),
)
};
}
macro_rules! epoch {
($idx:ident) => {
LazyFull::from_stats_aggregate(
name,
v,
height.boxed_average(),
height.boxed_min(),
height.boxed_max(),
height.boxed_sum(),
height.boxed_cumulative(),
height.boxed_average(),
indexes.$idx.identity.read_only_boxed_clone(),
)
};
}
let minute1 = period!(minute1);
let minute5 = period!(minute5);
let minute10 = period!(minute10);
let minute30 = period!(minute30);
let hour1 = period!(hour1);
let hour4 = period!(hour4);
let hour12 = period!(hour12);
let day1 = period!(day1);
let day3 = period!(day3);
let week1 = period!(week1);
let month1 = period!(month1);
let month3 = period!(month3);
let month6 = period!(month6);
let year1 = period!(year1);
let year10 = period!(year10);
let halvingepoch = epoch!(halvingepoch);
let difficultyepoch = epoch!(difficultyepoch);
Ok(Self {
height,
minute1,
minute5,
minute10,
minute30,
hour1,
hour4,
hour12,
day1,
day3,
week1,
month1,
month3,
month6,
year1,
year10,
halvingepoch,
difficultyepoch,
})
}
pub(crate) fn derive_from(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
txindex_source: &impl ReadableVec<TxIndex, T>,
exit: &Exit,
) -> Result<()> {
self.derive_from_with_skip(indexer, indexes, starting_indexes, txindex_source, exit, 0)
}
/// Derive from source, skipping first N transactions per block from all calculations.
///
/// Use `skip_count: 1` to exclude coinbase transactions from fee/feerate stats.
pub(crate) fn derive_from_with_skip(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
txindex_source: &impl ReadableVec<TxIndex, T>,
exit: &Exit,
skip_count: usize,
) -> Result<()> {
self.height.compute_with_skip(
starting_indexes.height,
txindex_source,
&indexer.vecs.transactions.first_txindex,
&indexes.height.txindex_count,
exit,
skip_count,
)?;
Ok(())
}
}

View File

@@ -1,94 +0,0 @@
//! Lazy transform of TxDerivedFull.
use brk_traversable::Traversable;
use brk_types::{
Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour12, Hour4, Minute1, Minute10,
Minute30, Minute5, Month1, Month3, Month6, Version, Week1, Year1, Year10,
};
use schemars::JsonSchema;
use vecdb::{ReadableCloneableVec, UnaryTransform};
use crate::internal::{ComputedVecValue, TxDerivedFull, LazyTransformFull};
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct LazyTxDerivedFull<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
pub height: LazyTransformFull<Height, T, S1T>,
pub minute1: LazyTransformFull<Minute1, T, S1T>,
pub minute5: LazyTransformFull<Minute5, T, S1T>,
pub minute10: LazyTransformFull<Minute10, T, S1T>,
pub minute30: LazyTransformFull<Minute30, T, S1T>,
pub hour1: LazyTransformFull<Hour1, T, S1T>,
pub hour4: LazyTransformFull<Hour4, T, S1T>,
pub hour12: LazyTransformFull<Hour12, T, S1T>,
pub day1: LazyTransformFull<Day1, T, S1T>,
pub day3: LazyTransformFull<Day3, T, S1T>,
pub week1: LazyTransformFull<Week1, T, S1T>,
pub month1: LazyTransformFull<Month1, T, S1T>,
pub month3: LazyTransformFull<Month3, T, S1T>,
pub month6: LazyTransformFull<Month6, T, S1T>,
pub year1: LazyTransformFull<Year1, T, S1T>,
pub year10: LazyTransformFull<Year10, T, S1T>,
pub halvingepoch: LazyTransformFull<HalvingEpoch, T, S1T>,
pub difficultyepoch: LazyTransformFull<DifficultyEpoch, T, S1T>,
}
const VERSION: Version = Version::ZERO;
impl<T, S1T> LazyTxDerivedFull<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
pub(crate) fn from_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
source: &TxDerivedFull<S1T>,
) -> Self {
let v = version + VERSION;
macro_rules! period {
($p:ident) => {
LazyTransformFull::from_boxed::<F>(
name,
v,
source.$p.average.read_only_boxed_clone(),
source.$p.min.read_only_boxed_clone(),
source.$p.max.read_only_boxed_clone(),
source.$p.percentiles.pct10.read_only_boxed_clone(),
source.$p.percentiles.pct25.read_only_boxed_clone(),
source.$p.percentiles.median.read_only_boxed_clone(),
source.$p.percentiles.pct75.read_only_boxed_clone(),
source.$p.percentiles.pct90.read_only_boxed_clone(),
source.$p.sum.read_only_boxed_clone(),
source.$p.cumulative.read_only_boxed_clone(),
)
};
}
Self {
height: LazyTransformFull::from_stats_aggregate::<F>(name, v, &source.height),
minute1: period!(minute1),
minute5: period!(minute5),
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halvingepoch: period!(halvingepoch),
difficultyepoch: period!(difficultyepoch),
}
}
}

View File

@@ -1,9 +1,3 @@
mod distribution;
mod full;
mod lazy_full;
mod value_full;
pub use distribution::*;
pub use full::*;
pub use lazy_full::*;
pub use value_full::*;

View File

@@ -1,84 +0,0 @@
//! Value type for Full pattern from TxIndex.
use brk_error::Result;
use brk_indexer::Indexer;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Sats, TxIndex, Version};
use vecdb::{Database, Exit, ReadableCloneableVec, ReadableVec, Rw, StorageMode};
use crate::{
ComputeIndexes, indexes,
internal::{LazyTxDerivedFull, SatsToBitcoin, TxDerivedFull, ValueDollarsFromTxFull},
prices,
};
#[derive(Traversable)]
pub struct ValueTxDerivedFull<M: StorageMode = Rw> {
pub sats: TxDerivedFull<Sats, M>,
pub btc: LazyTxDerivedFull<Bitcoin, Sats>,
pub usd: ValueDollarsFromTxFull<M>,
}
const VERSION: Version = Version::ZERO;
impl ValueTxDerivedFull {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
indexer: &Indexer,
prices: &prices::Vecs,
sats_txindex: &impl ReadableCloneableVec<TxIndex, Sats>,
) -> Result<Self> {
let v = version + VERSION;
let sats = TxDerivedFull::forced_import(db, name, v, indexes)?;
let btc =
LazyTxDerivedFull::from_computed::<SatsToBitcoin>(&format!("{name}_btc"), v, &sats);
let usd = ValueDollarsFromTxFull::forced_import(
db,
&format!("{name}_usd"),
v,
indexes,
&sats.height,
prices.usd.price.read_only_boxed_clone(),
sats_txindex.read_only_boxed_clone(),
indexer.vecs.transactions.height.read_only_boxed_clone(),
)?;
Ok(Self {
sats,
btc,
usd,
})
}
/// Derive from source, skipping first N transactions per block from all calculations.
///
/// Use `skip_count: 1` to exclude coinbase transactions from fee/feerate stats.
pub(crate) fn derive_from_with_skip(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
txindex_source: &impl ReadableVec<TxIndex, Sats>,
exit: &Exit,
skip_count: usize,
) -> Result<()> {
self.sats.derive_from_with_skip(
indexer,
indexes,
starting_indexes,
txindex_source,
exit,
skip_count,
)?;
self.usd.derive_from(indexes, starting_indexes, exit)?;
Ok(())
}
}

View File

@@ -68,14 +68,6 @@ impl<I: VecIndex, T: ComputedVecValue + JsonSchema> Distribution<I, T> {
self.min_max_average.boxed_average()
}
pub(crate) fn boxed_min(&self) -> ReadableBoxedVec<I, T> {
self.min_max_average.boxed_min()
}
pub(crate) fn boxed_max(&self) -> ReadableBoxedVec<I, T> {
self.min_max_average.boxed_max()
}
pub fn read_only_clone(&self) -> Distribution<I, T, Ro> {
Distribution {
min_max_average: self.min_max_average.read_only_clone(),

View File

@@ -65,27 +65,10 @@ impl<I: VecIndex, T: ComputedVecValue + JsonSchema> Full<I, T> {
)
}
// Boxed accessors
pub(crate) fn boxed_average(&self) -> ReadableBoxedVec<I, T> {
self.distribution.boxed_average()
}
pub(crate) fn boxed_min(&self) -> ReadableBoxedVec<I, T> {
self.distribution.boxed_min()
}
pub(crate) fn boxed_max(&self) -> ReadableBoxedVec<I, T> {
self.distribution.boxed_max()
}
pub(crate) fn boxed_sum(&self) -> ReadableBoxedVec<I, T> {
self.sum_cum.sum.0.read_only_boxed_clone()
}
pub(crate) fn boxed_cumulative(&self) -> ReadableBoxedVec<I, T> {
self.sum_cum.cumulative.0.read_only_boxed_clone()
}
pub fn read_only_clone(&self) -> Full<I, T, Ro> {
Full {
distribution: self.distribution.read_only_clone(),

View File

@@ -1,7 +1,9 @@
use brk_error::Result;
use brk_traversable::Traversable;
use schemars::JsonSchema;
use vecdb::{Database, ReadableBoxedVec, ReadableCloneableVec, Ro, Rw, StorageMode, VecIndex, Version};
use vecdb::{
Database, ReadableBoxedVec, ReadableCloneableVec, Ro, Rw, StorageMode, VecIndex, Version,
};
use crate::internal::{AverageVec, ComputedVecValue};
@@ -28,14 +30,6 @@ impl<I: VecIndex, T: ComputedVecValue + JsonSchema> MinMaxAverage<I, T> {
self.average.0.read_only_boxed_clone()
}
pub(crate) fn boxed_min(&self) -> ReadableBoxedVec<I, T> {
self.minmax.min.0.read_only_boxed_clone()
}
pub(crate) fn boxed_max(&self) -> ReadableBoxedVec<I, T> {
self.minmax.max.0.read_only_boxed_clone()
}
pub fn read_only_clone(&self) -> MinMaxAverage<I, T, Ro> {
MinMaxAverage {
average: self.average.read_only_clone(),

View File

@@ -1,9 +1,7 @@
use brk_error::Result;
use brk_traversable::Traversable;
use schemars::JsonSchema;
use vecdb::{
Database, ReadableBoxedVec, ReadableCloneableVec, Ro, Rw, StorageMode, VecIndex, Version,
};
use vecdb::{Database, Ro, Rw, StorageMode, VecIndex, Version};
use crate::internal::{ComputedVecValue, MedianVec, Pct10Vec, Pct25Vec, Pct75Vec, Pct90Vec};
@@ -28,27 +26,6 @@ impl<I: VecIndex, T: ComputedVecValue + JsonSchema> Percentiles<I, T> {
})
}
// Boxed accessors
pub(crate) fn boxed_pct10(&self) -> ReadableBoxedVec<I, T> {
self.pct10.0.read_only_boxed_clone()
}
pub(crate) fn boxed_pct25(&self) -> ReadableBoxedVec<I, T> {
self.pct25.0.read_only_boxed_clone()
}
pub(crate) fn boxed_median(&self) -> ReadableBoxedVec<I, T> {
self.median.0.read_only_boxed_clone()
}
pub(crate) fn boxed_pct75(&self) -> ReadableBoxedVec<I, T> {
self.pct75.0.read_only_boxed_clone()
}
pub(crate) fn boxed_pct90(&self) -> ReadableBoxedVec<I, T> {
self.pct90.0.read_only_boxed_clone()
}
pub fn read_only_clone(&self) -> Percentiles<I, T, Ro> {
Percentiles {
pct10: self.pct10.read_only_clone(),

View File

@@ -1,128 +0,0 @@
//! Lazy first-value aggregation.
use std::sync::Arc;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use schemars::JsonSchema;
use vecdb::{Cursor, FromCoarserIndex, ReadableBoxedVec, VecIndex, VecValue};
use crate::internal::ComputedVecValue;
const VERSION: Version = Version::ZERO;
type ForEachRangeFn<S1I, T, I, S2T> =
fn(usize, usize, &ReadableBoxedVec<S1I, T>, &ReadableBoxedVec<I, S2T>, &mut dyn FnMut(T));
pub struct LazyFirst<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1I: VecIndex,
S2T: VecValue,
{
name: Arc<str>,
version: Version,
source: ReadableBoxedVec<S1I, T>,
mapping: ReadableBoxedVec<I, S2T>,
for_each_range: ForEachRangeFn<S1I, T, I, S2T>,
}
impl_lazy_agg!(LazyFirst);
impl<I, T, S1I, S2T> LazyFirst<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1I: VecIndex + 'static + FromCoarserIndex<I>,
S2T: VecValue,
{
pub(crate) fn from_source(
name: &str,
version: Version,
source: ReadableBoxedVec<S1I, T>,
len_source: ReadableBoxedVec<I, S2T>,
) -> Self {
fn for_each_range<
I: VecIndex,
T: VecValue,
S1I: VecIndex + FromCoarserIndex<I>,
S2T: VecValue,
>(
from: usize,
to: usize,
source: &ReadableBoxedVec<S1I, T>,
mapping: &ReadableBoxedVec<I, S2T>,
f: &mut dyn FnMut(T),
) {
let mapping_len = mapping.len();
let mut cursor = Cursor::from_dyn(&**source);
for i in from..to {
if i >= mapping_len {
break;
}
let target = S1I::min_from(I::from(i));
if cursor.position() <= target {
cursor.advance(target - cursor.position());
if let Some(v) = cursor.next() {
f(v);
}
} else if let Some(v) = source.collect_one_at(target) {
f(v);
}
}
}
Self {
name: Arc::from(name),
version: version + VERSION,
source,
mapping: len_source,
for_each_range: for_each_range::<I, T, S1I, S2T>,
}
}
}
impl<I, T> LazyFirst<I, T, Height, Height>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
{
pub(crate) fn from_height_source(
name: &str,
version: Version,
source: ReadableBoxedVec<Height, T>,
first_height: ReadableBoxedVec<I, Height>,
) -> Self {
fn for_each_range<I: VecIndex, T: VecValue>(
from: usize,
to: usize,
source: &ReadableBoxedVec<Height, T>,
mapping: &ReadableBoxedVec<I, Height>,
f: &mut dyn FnMut(T),
) {
let heights = mapping.collect_range_dyn(from, to.min(mapping.len()));
let mut cursor = Cursor::from_dyn(&**source);
for idx in 0..(to - from) {
let Some(&first_h) = heights.get(idx) else {
continue;
};
let target = first_h.to_usize();
if cursor.position() <= target {
cursor.advance(target - cursor.position());
if let Some(v) = cursor.next() {
f(v);
}
} else if let Some(v) = source.collect_one_at(target) {
f(v);
}
}
}
Self {
name: Arc::from(name),
version: version + VERSION,
source,
mapping: first_height,
for_each_range: for_each_range::<I, T>,
}
}
}

View File

@@ -46,15 +46,6 @@ where
Self::from_source_inner(&format!("{name}_max"), version, source, len_source)
}
pub(crate) fn from_source_raw(
name: &str,
version: Version,
source: ReadableBoxedVec<S1I, T>,
len_source: ReadableBoxedVec<I, S2T>,
) -> Self {
Self::from_source_inner(name, version, source, len_source)
}
fn from_source_inner(
name: &str,
version: Version,
@@ -117,15 +108,6 @@ where
Self::from_height_source_inner(&format!("{name}_max"), version, source, first_height)
}
pub(crate) fn from_height_source_raw(
name: &str,
version: Version,
source: ReadableBoxedVec<Height, T>,
first_height: ReadableBoxedVec<I, Height>,
) -> Self {
Self::from_height_source_inner(name, version, source, first_height)
}
fn from_height_source_inner(
name: &str,
version: Version,

View File

@@ -46,15 +46,6 @@ where
Self::from_source_inner(&format!("{name}_min"), version, source, len_source)
}
pub(crate) fn from_source_raw(
name: &str,
version: Version,
source: ReadableBoxedVec<S1I, T>,
len_source: ReadableBoxedVec<I, S2T>,
) -> Self {
Self::from_source_inner(name, version, source, len_source)
}
fn from_source_inner(
name: &str,
version: Version,
@@ -117,15 +108,6 @@ where
Self::from_height_source_inner(&format!("{name}_min"), version, source, first_height)
}
pub(crate) fn from_height_source_raw(
name: &str,
version: Version,
source: ReadableBoxedVec<Height, T>,
first_height: ReadableBoxedVec<I, Height>,
) -> Self {
Self::from_height_source_inner(name, version, source, first_height)
}
fn from_height_source_inner(
name: &str,
version: Version,

View File

@@ -181,12 +181,10 @@ macro_rules! impl_lazy_agg {
mod average;
mod cumulative;
mod distribution;
mod first;
mod full;
mod last;
mod max;
mod min;
mod ohlc;
mod percentile;
mod percentiles;
mod sparse_last;
@@ -196,12 +194,10 @@ mod sum_cum;
pub use average::*;
pub use cumulative::*;
pub use distribution::*;
pub use first::*;
pub use full::*;
pub use last::*;
pub use max::*;
pub use min::*;
pub use ohlc::*;
pub use percentile::*;
pub use percentiles::*;
pub use sparse_last::*;

View File

@@ -1,370 +0,0 @@
//! Lazy OHLC aggregation — single-pass first/max/min/last from height-level data.
use std::sync::Arc;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use schemars::JsonSchema;
use serde::Serialize;
use vecdb::{Cursor, Formattable, ReadableBoxedVec, VecIndex, VecValue};
use brk_types::{Cents, Close, Dollars, High, Low, OHLCCents, OHLCDollars, OHLCSats, Open, Sats};
use crate::internal::ComputedVecValue;
/// Trait for OHLC bundle types that can be constructed from / decomposed into
/// their open/high/low/close components.
pub trait OHLCRecord: VecValue + Formattable + Serialize + JsonSchema {
type Inner: ComputedVecValue + JsonSchema + Copy;
fn ohlc_open(&self) -> Self::Inner;
fn ohlc_high(&self) -> Self::Inner;
fn ohlc_low(&self) -> Self::Inner;
fn ohlc_close(&self) -> Self::Inner;
fn from_parts(
open: Self::Inner,
high: Self::Inner,
low: Self::Inner,
close: Self::Inner,
) -> Self;
}
impl OHLCRecord for OHLCCents {
type Inner = Cents;
fn ohlc_open(&self) -> Cents {
*self.open
}
fn ohlc_high(&self) -> Cents {
*self.high
}
fn ohlc_low(&self) -> Cents {
*self.low
}
fn ohlc_close(&self) -> Cents {
*self.close
}
fn from_parts(open: Cents, high: Cents, low: Cents, close: Cents) -> Self {
Self {
open: Open::new(open),
high: High::new(high),
low: Low::new(low),
close: Close::new(close),
}
}
}
impl OHLCRecord for OHLCDollars {
type Inner = Dollars;
fn ohlc_open(&self) -> Dollars {
*self.open
}
fn ohlc_high(&self) -> Dollars {
*self.high
}
fn ohlc_low(&self) -> Dollars {
*self.low
}
fn ohlc_close(&self) -> Dollars {
*self.close
}
fn from_parts(open: Dollars, high: Dollars, low: Dollars, close: Dollars) -> Self {
Self {
open: Open::new(open),
high: High::new(high),
low: Low::new(low),
close: Close::new(close),
}
}
}
impl OHLCRecord for OHLCSats {
type Inner = Sats;
fn ohlc_open(&self) -> Sats {
*self.open
}
fn ohlc_high(&self) -> Sats {
*self.high
}
fn ohlc_low(&self) -> Sats {
*self.low
}
fn ohlc_close(&self) -> Sats {
*self.close
}
fn from_parts(open: Sats, high: Sats, low: Sats, close: Sats) -> Self {
Self {
open: Open::new(open),
high: High::new(high),
low: Low::new(low),
close: Close::new(close),
}
}
}
const VERSION: Version = Version::ZERO;
type ForEachRangeFn<S1I, ST, I, S2T, OHLC> =
fn(usize, usize, &ReadableBoxedVec<S1I, ST>, &ReadableBoxedVec<I, S2T>, &mut dyn FnMut(OHLC));
/// Lazy OHLC aggregation vec. For each coarser period, computes open (first),
/// high (max), low (min), close (last) in a single pass over the source range.
pub struct LazyOHLC<I, OHLC, S1I, ST, S2T>
where
I: VecIndex,
OHLC: OHLCRecord,
S1I: VecIndex,
ST: VecValue,
S2T: VecValue,
{
name: Arc<str>,
version: Version,
source: ReadableBoxedVec<S1I, ST>,
mapping: ReadableBoxedVec<I, S2T>,
for_each_range: ForEachRangeFn<S1I, ST, I, S2T, OHLC>,
}
// --- From height source (Day1, DifficultyEpoch) ---
impl<I, OHLC, T> LazyOHLC<I, OHLC, Height, T, Height>
where
I: VecIndex,
OHLC: OHLCRecord<Inner = T> + 'static,
T: ComputedVecValue + JsonSchema + 'static,
{
pub(crate) fn from_height_source(
name: &str,
version: Version,
source: ReadableBoxedVec<Height, T>,
first_height: ReadableBoxedVec<I, Height>,
) -> Self {
fn for_each_range<
I: VecIndex,
OHLC: OHLCRecord<Inner = T>,
T: ComputedVecValue + JsonSchema,
>(
from: usize,
to: usize,
source: &ReadableBoxedVec<Height, T>,
mapping: &ReadableBoxedVec<I, Height>,
f: &mut dyn FnMut(OHLC),
) {
let map_end = (to + 1).min(mapping.len());
let heights = mapping.collect_range_dyn(from, map_end);
let source_len = source.len();
let Some(&first_h) = heights.first() else {
return;
};
let mut cursor = Cursor::from_dyn(&**source);
cursor.advance(first_h.to_usize());
for idx in 0..(to - from) {
let Some(&cur_h) = heights.get(idx) else {
continue;
};
let first = cur_h.to_usize();
let next_first = heights
.get(idx + 1)
.map(|h| h.to_usize())
.unwrap_or(source_len);
let count = next_first.saturating_sub(first);
if count == 0 {
continue;
}
if let Some(first_val) = cursor.next() {
let (high, low, close) = cursor.fold(
count - 1,
(first_val, first_val, first_val),
|(hi, lo, _), v| {
(if v > hi { v } else { hi }, if v < lo { v } else { lo }, v)
},
);
f(OHLC::from_parts(first_val, high, low, close));
}
}
}
Self {
name: Arc::from(format!("{name}_ohlc")),
version: version + VERSION,
source,
mapping: first_height,
for_each_range: for_each_range::<I, OHLC, T>,
}
}
}
// --- Trait implementations ---
impl<I, OHLC, S1I, ST, S2T> Clone for LazyOHLC<I, OHLC, S1I, ST, S2T>
where
I: VecIndex,
OHLC: OHLCRecord,
S1I: VecIndex,
ST: VecValue,
S2T: VecValue,
{
fn clone(&self) -> Self {
Self {
name: self.name.clone(),
version: self.version,
source: self.source.clone(),
mapping: self.mapping.clone(),
for_each_range: self.for_each_range,
}
}
}
impl<I, OHLC, S1I, ST, S2T> vecdb::AnyVec for LazyOHLC<I, OHLC, S1I, ST, S2T>
where
I: VecIndex,
OHLC: OHLCRecord,
S1I: VecIndex,
ST: VecValue,
S2T: VecValue,
{
fn version(&self) -> Version {
self.version + self.source.version() + self.mapping.version()
}
fn name(&self) -> &str {
&self.name
}
fn index_type_to_string(&self) -> &'static str {
I::to_string()
}
fn len(&self) -> usize {
self.mapping.len()
}
#[inline]
fn value_type_to_size_of(&self) -> usize {
size_of::<OHLC>()
}
#[inline]
fn value_type_to_string(&self) -> &'static str {
vecdb::short_type_name::<OHLC>()
}
#[inline]
fn region_names(&self) -> Vec<String> {
vec![]
}
}
impl<I, OHLC, S1I, ST, S2T> vecdb::TypedVec for LazyOHLC<I, OHLC, S1I, ST, S2T>
where
I: VecIndex,
OHLC: OHLCRecord,
S1I: VecIndex,
ST: VecValue,
S2T: VecValue,
{
type I = I;
type T = OHLC;
}
impl<I, OHLC, S1I, ST, S2T> vecdb::ReadableVec<I, OHLC> for LazyOHLC<I, OHLC, S1I, ST, S2T>
where
I: VecIndex,
OHLC: OHLCRecord,
S1I: VecIndex,
ST: VecValue,
S2T: VecValue,
{
fn read_into_at(&self, from: usize, to: usize, buf: &mut Vec<OHLC>) {
let to = to.min(self.mapping.len());
if from >= to {
return;
}
buf.reserve(to - from);
(self.for_each_range)(from, to, &self.source, &self.mapping, &mut |v| buf.push(v));
}
fn for_each_range_dyn_at(&self, from: usize, to: usize, f: &mut dyn FnMut(OHLC)) {
let to = to.min(self.mapping.len());
if from >= to {
return;
}
(self.for_each_range)(from, to, &self.source, &self.mapping, f);
}
#[inline]
fn fold_range_at<B, F: FnMut(B, OHLC) -> B>(
&self,
from: usize,
to: usize,
init: B,
mut f: F,
) -> B
where
Self: Sized,
{
let to = to.min(self.mapping.len());
if from >= to {
return init;
}
let mut acc = Some(init);
(self.for_each_range)(from, to, &self.source, &self.mapping, &mut |v| {
acc = Some(f(acc.take().unwrap(), v));
});
acc.unwrap()
}
#[inline]
fn try_fold_range_at<B, E, F: FnMut(B, OHLC) -> std::result::Result<B, E>>(
&self,
from: usize,
to: usize,
init: B,
mut f: F,
) -> std::result::Result<B, E>
where
Self: Sized,
{
let to = to.min(self.mapping.len());
if from >= to {
return Ok(init);
}
let mut acc: Option<std::result::Result<B, E>> = Some(Ok(init));
(self.for_each_range)(from, to, &self.source, &self.mapping, &mut |v| {
if let Some(Ok(a)) = acc.take() {
acc = Some(f(a, v));
}
});
acc.unwrap()
}
#[inline]
fn collect_one_at(&self, index: usize) -> Option<OHLC> {
if index >= self.mapping.len() {
return None;
}
let mut result = None;
(self.for_each_range)(index, index + 1, &self.source, &self.mapping, &mut |v| {
result = Some(v)
});
result
}
}
impl<I, OHLC, S1I, ST, S2T> Traversable for LazyOHLC<I, OHLC, S1I, ST, S2T>
where
I: VecIndex + 'static,
OHLC: OHLCRecord + 'static,
S1I: VecIndex + 'static,
ST: VecValue,
S2T: VecValue,
{
fn iter_any_exportable(&self) -> impl Iterator<Item = &dyn vecdb::AnyExportableVec> {
std::iter::once(self as &dyn vecdb::AnyExportableVec)
}
fn to_tree_node(&self) -> brk_types::TreeNode {
use vecdb::AnyVec;
let index_str = I::to_string();
let index = brk_types::Index::try_from(index_str).ok();
let indexes = index.into_iter().collect();
let leaf = brk_types::MetricLeaf::new(
self.name().to_string(),
self.value_type_to_string().to_string(),
indexes,
);
let schema = schemars::SchemaGenerator::default().into_root_schema_for::<OHLC>();
let schema_json = serde_json::to_value(schema).unwrap_or_default();
brk_types::TreeNode::Leaf(brk_types::MetricLeafWithSchema::new(leaf, schema_json))
}
}

View File

@@ -1,101 +0,0 @@
//! Lazy binary transform for Full (without cumulative).
//!
//! Used for USD conversion where `usd = sats * price[height]`.
//! Cumulative cannot be lazy because `cum_usd ≠ cum_sats * price` -
//! it must be computed by summing historical `sum * price` values.
use brk_traversable::Traversable;
use brk_types::Version;
use schemars::JsonSchema;
use vecdb::{BinaryTransform, ReadableBoxedVec, ReadableCloneableVec, LazyVecFrom2, VecIndex};
use crate::internal::{ComputedVecValue, Full};
use super::LazyBinaryPercentiles;
/// Lazy binary transform for Full stats (excluding cumulative).
///
/// For USD conversion: each stat is computed as `sats_stat * price`.
/// Cumulative is excluded because it requires summing historical values.
#[derive(Clone, Traversable)]
pub struct LazyBinaryTransformFull<I, T, S1T = T, S2T = T>
where
I: VecIndex,
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
pub average: LazyVecFrom2<I, T, I, S1T, I, S2T>,
pub min: LazyVecFrom2<I, T, I, S1T, I, S2T>,
pub max: LazyVecFrom2<I, T, I, S1T, I, S2T>,
#[traversable(flatten)]
pub percentiles: LazyBinaryPercentiles<I, T, S1T, S2T>,
pub sum: LazyVecFrom2<I, T, I, S1T, I, S2T>,
}
impl<I, T, S1T, S2T> LazyBinaryTransformFull<I, T, S1T, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
/// Create from Full source and a second source (e.g., price).
///
/// The transform F is applied as `F(source1_stat, source2)` for each stat.
pub(crate) fn from_full_and_source<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &Full<I, S1T>,
source2: ReadableBoxedVec<I, S2T>,
) -> Self {
Self {
average: LazyVecFrom2::transformed::<F>(
&format!("{name}_average"),
version,
source1.boxed_average(),
source2.clone(),
),
min: LazyVecFrom2::transformed::<F>(
&format!("{name}_min"),
version,
source1.boxed_min(),
source2.clone(),
),
max: LazyVecFrom2::transformed::<F>(
&format!("{name}_max"),
version,
source1.boxed_max(),
source2.clone(),
),
percentiles: LazyBinaryPercentiles::from_percentiles::<F>(
name,
version,
&source1.distribution.percentiles,
source2.clone(),
),
sum: LazyVecFrom2::transformed::<F>(
&format!("{name}_sum"),
version,
source1.boxed_sum(),
source2,
),
}
}
pub(crate) fn boxed_average(&self) -> ReadableBoxedVec<I, T> {
self.average.read_only_boxed_clone()
}
pub(crate) fn boxed_min(&self) -> ReadableBoxedVec<I, T> {
self.min.read_only_boxed_clone()
}
pub(crate) fn boxed_max(&self) -> ReadableBoxedVec<I, T> {
self.max.read_only_boxed_clone()
}
pub(crate) fn boxed_sum(&self) -> ReadableBoxedVec<I, T> {
self.sum.read_only_boxed_clone()
}
}

View File

@@ -1,71 +0,0 @@
//! Lazy binary transform for Percentiles.
use brk_traversable::Traversable;
use brk_types::Version;
use schemars::JsonSchema;
use vecdb::{BinaryTransform, ReadableBoxedVec, LazyVecFrom2, VecIndex};
use crate::internal::{ComputedVecValue, Percentiles};
#[derive(Clone, Traversable)]
pub struct LazyBinaryPercentiles<I, T, S1T = T, S2T = T>
where
I: VecIndex,
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
pub pct10: LazyVecFrom2<I, T, I, S1T, I, S2T>,
pub pct25: LazyVecFrom2<I, T, I, S1T, I, S2T>,
pub median: LazyVecFrom2<I, T, I, S1T, I, S2T>,
pub pct75: LazyVecFrom2<I, T, I, S1T, I, S2T>,
pub pct90: LazyVecFrom2<I, T, I, S1T, I, S2T>,
}
impl<I, T, S1T, S2T> LazyBinaryPercentiles<I, T, S1T, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
pub(crate) fn from_percentiles<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source: &Percentiles<I, S1T>,
source2: ReadableBoxedVec<I, S2T>,
) -> Self {
Self {
pct10: LazyVecFrom2::transformed::<F>(
&format!("{name}_pct10"),
version,
source.boxed_pct10(),
source2.clone(),
),
pct25: LazyVecFrom2::transformed::<F>(
&format!("{name}_pct25"),
version,
source.boxed_pct25(),
source2.clone(),
),
median: LazyVecFrom2::transformed::<F>(
&format!("{name}_median"),
version,
source.boxed_median(),
source2.clone(),
),
pct75: LazyVecFrom2::transformed::<F>(
&format!("{name}_pct75"),
version,
source.boxed_pct75(),
source2.clone(),
),
pct90: LazyVecFrom2::transformed::<F>(
&format!("{name}_pct90"),
version,
source.boxed_pct90(),
source2,
),
}
}
}

View File

@@ -1,40 +0,0 @@
//! Lazy binary transform for Sum-only aggregation at a single index level.
use brk_traversable::Traversable;
use brk_types::Version;
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, ReadableBoxedVec, LazyVecFrom2, VecIndex};
use crate::internal::ComputedVecValue;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(wrap = "sum")]
pub struct LazyBinaryTransformSum<I, T, S1T, S2T>(pub LazyVecFrom2<I, T, I, S1T, I, S2T>)
where
I: VecIndex,
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue;
impl<I, T, S1T, S2T> LazyBinaryTransformSum<I, T, S1T, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
pub(crate) fn from_boxed<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: ReadableBoxedVec<I, S1T>,
source2: ReadableBoxedVec<I, S2T>,
) -> Self {
let v = version + VERSION;
Self(LazyVecFrom2::transformed::<F>(name, v, source1, source2))
}
}

View File

@@ -1,98 +0,0 @@
//! Lazy binary transform for SumCum.
use brk_traversable::Traversable;
use brk_types::Version;
use schemars::JsonSchema;
use vecdb::{
BinaryTransform, LazyVecFrom2, ReadableBoxedVec, ReadableCloneableVec, VecIndex, VecValue,
};
use crate::internal::{ComputedVecValue, LazyFull};
#[derive(Clone, Traversable)]
pub struct LazyBinaryTransformSumCum<I, T, S1T = T, S2T = T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
pub sum: LazyVecFrom2<I, T, I, S1T, I, S2T>,
pub cumulative: LazyVecFrom2<I, T, I, S1T, I, S2T>,
}
impl<I, T, S1T, S2T> LazyBinaryTransformSumCum<I, T, S1T, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
/// Create from sources without adding _sum suffix.
pub(crate) fn from_sources_sum_raw<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
sum_source1: ReadableBoxedVec<I, S1T>,
sum_source2: ReadableBoxedVec<I, S2T>,
cum_source1: ReadableBoxedVec<I, S1T>,
cum_source2: ReadableBoxedVec<I, S2T>,
) -> Self {
Self {
sum: LazyVecFrom2::transformed::<F>(name, version, sum_source1, sum_source2),
cumulative: LazyVecFrom2::transformed::<F>(
&format!("{name}_cumulative"),
version,
cum_source1,
cum_source2,
),
}
}
pub(crate) fn from_lazy_stats_aggregate<F, S1I, S1L, S2I, S2L>(
name: &str,
version: Version,
source1: &LazyFull<I, S1T, S1I, S1L>,
source2: &LazyFull<I, S2T, S2I, S2L>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S1I: VecIndex + 'static,
S1L: VecValue,
S2I: VecIndex + 'static,
S2L: VecValue,
{
Self {
sum: LazyVecFrom2::transformed::<F>(
&format!("{name}_sum"),
version,
source1.sum.read_only_boxed_clone(),
source2.sum.read_only_boxed_clone(),
),
cumulative: LazyVecFrom2::transformed::<F>(
&format!("{name}_cumulative"),
version,
source1.cumulative.read_only_boxed_clone(),
source2.cumulative.read_only_boxed_clone(),
),
}
}
/// Create from boxed SumCum + Last sources without adding _sum suffix.
pub(crate) fn from_sources_last_sum_raw<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
sum_source1: ReadableBoxedVec<I, S1T>,
cum_source1: ReadableBoxedVec<I, S1T>,
last_source: ReadableBoxedVec<I, S2T>,
) -> Self {
Self {
sum: LazyVecFrom2::transformed::<F>(name, version, sum_source1, last_source.clone()),
cumulative: LazyVecFrom2::transformed::<F>(
&format!("{name}_cumulative"),
version,
cum_source1,
last_source,
),
}
}
}

View File

@@ -1,56 +0,0 @@
//! Lazy unary transform for Distribution metrics.
//! Has average, min, max, and percentiles - but no sum/cumulative.
//! Use for ratio/percentage metrics where aggregation doesn't make sense.
use brk_traversable::Traversable;
use brk_types::Version;
use schemars::JsonSchema;
use vecdb::{ReadableBoxedVec, LazyVecFrom1, UnaryTransform, VecIndex};
use crate::internal::ComputedVecValue;
use super::LazyPercentiles;
/// Distribution stats: average, min, max, percentiles.
/// Excludes sum and cumulative (meaningless for ratios/percentages).
#[derive(Clone, Traversable)]
pub struct LazyTransformDistribution<I, T, S1T = T>
where
I: VecIndex,
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
pub average: LazyVecFrom1<I, T, I, S1T>,
pub min: LazyVecFrom1<I, T, I, S1T>,
pub max: LazyVecFrom1<I, T, I, S1T>,
#[traversable(flatten)]
pub percentiles: LazyPercentiles<I, T, S1T>,
}
impl<I, T, S1T> LazyTransformDistribution<I, T, S1T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
#[allow(clippy::too_many_arguments)]
pub(crate) fn from_boxed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
average: ReadableBoxedVec<I, S1T>,
min: ReadableBoxedVec<I, S1T>,
max: ReadableBoxedVec<I, S1T>,
pct10: ReadableBoxedVec<I, S1T>,
pct25: ReadableBoxedVec<I, S1T>,
median: ReadableBoxedVec<I, S1T>,
pct75: ReadableBoxedVec<I, S1T>,
pct90: ReadableBoxedVec<I, S1T>,
) -> Self {
Self {
average: LazyVecFrom1::transformed::<F>(&format!("{name}_average"), version, average),
min: LazyVecFrom1::transformed::<F>(&format!("{name}_min"), version, min),
max: LazyVecFrom1::transformed::<F>(&format!("{name}_max"), version, max),
percentiles: LazyPercentiles::from_boxed::<F>(name, version, pct10, pct25, median, pct75, pct90),
}
}
}

View File

@@ -3,9 +3,9 @@
use brk_traversable::Traversable;
use brk_types::Version;
use schemars::JsonSchema;
use vecdb::{ReadableBoxedVec, LazyVecFrom1, UnaryTransform, VecIndex};
use vecdb::{LazyVecFrom1, ReadableBoxedVec, UnaryTransform, VecIndex};
use crate::internal::{ComputedVecValue, Full};
use crate::internal::ComputedVecValue;
use super::LazyPercentiles;
@@ -31,27 +31,6 @@ where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
pub(crate) fn from_stats_aggregate<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
source: &Full<I, S1T>,
) -> Self {
Self::from_boxed::<F>(
name,
version,
source.boxed_average(),
source.boxed_min(),
source.boxed_max(),
source.distribution.percentiles.boxed_pct10(),
source.distribution.percentiles.boxed_pct25(),
source.distribution.percentiles.boxed_median(),
source.distribution.percentiles.boxed_pct75(),
source.distribution.percentiles.boxed_pct90(),
source.boxed_sum(),
source.boxed_cumulative(),
)
}
#[allow(clippy::too_many_arguments)]
pub(crate) fn from_boxed<F: UnaryTransform<S1T, T>>(
name: &str,
@@ -71,9 +50,15 @@ where
average: LazyVecFrom1::transformed::<F>(&format!("{name}_average"), version, average),
min: LazyVecFrom1::transformed::<F>(&format!("{name}_min"), version, min),
max: LazyVecFrom1::transformed::<F>(&format!("{name}_max"), version, max),
percentiles: LazyPercentiles::from_boxed::<F>(name, version, pct10, pct25, median, pct75, pct90),
percentiles: LazyPercentiles::from_boxed::<F>(
name, version, pct10, pct25, median, pct75, pct90,
),
sum: LazyVecFrom1::transformed::<F>(&format!("{name}_sum"), version, sum),
cumulative: LazyVecFrom1::transformed::<F>(&format!("{name}_cumulative"), version, cumulative),
cumulative: LazyVecFrom1::transformed::<F>(
&format!("{name}_cumulative"),
version,
cumulative,
),
}
}
}

View File

@@ -1,23 +1,11 @@
mod binary_full;
mod binary_last;
mod binary_percentiles;
mod binary_sum;
mod binary_sum_cum;
mod distribution;
mod full;
mod last;
mod percentiles;
mod sum;
mod sum_cum;
pub use binary_full::*;
pub use binary_last::*;
pub use binary_percentiles::*;
pub use binary_sum::*;
pub use binary_sum_cum::*;
pub use distribution::*;
pub use full::*;
pub use last::*;
pub use percentiles::*;
pub use sum::*;
pub use sum_cum::*;

View File

@@ -1,32 +0,0 @@
//! Lazy unary transform for Sum-only.
use brk_traversable::Traversable;
use brk_types::Version;
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{ReadableBoxedVec, LazyVecFrom1, UnaryTransform, VecIndex};
use crate::internal::ComputedVecValue;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(wrap = "sum")]
pub struct LazyTransformSum<I, T, S1T = T>(pub LazyVecFrom1<I, T, I, S1T>)
where
I: VecIndex,
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue;
impl<I, T, S1T> LazyTransformSum<I, T, S1T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
pub(crate) fn from_boxed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
sum_source: ReadableBoxedVec<I, S1T>,
) -> Self {
Self(LazyVecFrom1::transformed::<F>(name, version, sum_source))
}
}

View File

@@ -4,14 +4,14 @@ mod group;
mod height;
mod lazy;
mod lazy_transform;
mod rolling;
mod transform;
mod tx;
mod vec;
pub use group::*;
pub use height::*;
pub use lazy::*;
pub use lazy_transform::*;
pub use rolling::*;
pub use transform::*;
pub use tx::*;
pub use vec::*;

View File

@@ -0,0 +1,131 @@
//! RollingDistribution - 8 distribution stats, each a RollingWindows.
//!
//! Computes average, min, max, p10, p25, median, p75, p90 rolling windows
//! from a single source vec.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, Exit, ReadableVec, Rw, StorageMode};
use crate::{
indexes,
internal::{ComputedVecValue, DistributionStats, NumericValue, RollingWindows, WindowStarts},
traits::compute_rolling_percentiles_from_starts,
};
/// 8 distribution stats × 4 windows = 32 stored height vecs, each with 17 index views.
#[derive(Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct RollingDistribution<T, M: StorageMode = Rw>(
pub DistributionStats<RollingWindows<T, M>>,
)
where
T: ComputedVecValue + PartialOrd + JsonSchema;
const VERSION: Version = Version::ZERO;
impl<T> RollingDistribution<T>
where
T: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
Ok(Self(DistributionStats {
average: RollingWindows::forced_import(db, &format!("{name}_average"), v, indexes)?,
min: RollingWindows::forced_import(db, &format!("{name}_min"), v, indexes)?,
max: RollingWindows::forced_import(db, &format!("{name}_max"), v, indexes)?,
p10: RollingWindows::forced_import(db, &format!("{name}_p10"), v, indexes)?,
p25: RollingWindows::forced_import(db, &format!("{name}_p25"), v, indexes)?,
median: RollingWindows::forced_import(db, &format!("{name}_median"), v, indexes)?,
p75: RollingWindows::forced_import(db, &format!("{name}_p75"), v, indexes)?,
p90: RollingWindows::forced_import(db, &format!("{name}_p90"), v, indexes)?,
}))
}
/// Compute all 8 distribution stats across all 4 windows from a single source.
///
/// - average: running sum / count (O(n) per window)
/// - min/max: deque-based (O(n) amortized per window)
/// - p10/p25/median/p75/p90: single-pass sorted vec per window
pub(crate) fn compute_distribution(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
source: &impl ReadableVec<Height, T>,
exit: &Exit,
) -> Result<()>
where
T: Copy + Ord + From<f64> + Default,
f64: From<T>,
{
// Average: O(n) per window using running sum
self.0
.average
.compute_rolling_average(max_from, windows, source, exit)?;
// Min/Max: O(n) amortized per window using deques
self.0
.min
.compute_rolling_min(max_from, windows, source, exit)?;
self.0
.max
.compute_rolling_max(max_from, windows, source, exit)?;
// Percentiles + median: single-pass per window using sorted vec
compute_rolling_percentiles_from_starts(
max_from,
windows._24h,
source,
&mut self.0.p10._24h.height,
&mut self.0.p25._24h.height,
&mut self.0.median._24h.height,
&mut self.0.p75._24h.height,
&mut self.0.p90._24h.height,
exit,
)?;
compute_rolling_percentiles_from_starts(
max_from,
windows._7d,
source,
&mut self.0.p10._7d.height,
&mut self.0.p25._7d.height,
&mut self.0.median._7d.height,
&mut self.0.p75._7d.height,
&mut self.0.p90._7d.height,
exit,
)?;
compute_rolling_percentiles_from_starts(
max_from,
windows._30d,
source,
&mut self.0.p10._30d.height,
&mut self.0.p25._30d.height,
&mut self.0.median._30d.height,
&mut self.0.p75._30d.height,
&mut self.0.p90._30d.height,
exit,
)?;
compute_rolling_percentiles_from_starts(
max_from,
windows._1y,
source,
&mut self.0.p10._1y.height,
&mut self.0.p25._1y.height,
&mut self.0.median._1y.height,
&mut self.0.p75._1y.height,
&mut self.0.p90._1y.height,
exit,
)?;
Ok(())
}
}

View File

@@ -0,0 +1,69 @@
//! RollingFull - Sum + Distribution per rolling window.
//!
//! 36 stored height vecs per metric (4 sum + 32 distribution), each with 17 index views.
use std::ops::SubAssign;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use schemars::JsonSchema;
use vecdb::{Database, Exit, ReadableVec, Rw, StorageMode};
use crate::{
indexes,
internal::{ComputedVecValue, NumericValue, RollingDistribution, RollingWindows, WindowStarts},
};
/// Sum (4 windows) + Distribution (8 stats × 4 windows) = 36 stored height vecs.
#[derive(Traversable)]
#[traversable(merge)]
pub struct RollingFull<T, M: StorageMode = Rw>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
#[traversable(flatten)]
pub sum: RollingWindows<T, M>,
#[traversable(flatten)]
pub distribution: RollingDistribution<T, M>,
}
const VERSION: Version = Version::ZERO;
impl<T> RollingFull<T>
where
T: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
Ok(Self {
sum: RollingWindows::forced_import(db, &format!("{name}_sum"), v, indexes)?,
distribution: RollingDistribution::forced_import(db, name, v, indexes)?,
})
}
/// Compute rolling sum + all 8 distribution stats across all 4 windows.
pub(crate) fn compute(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
source: &impl ReadableVec<Height, T>,
exit: &Exit,
) -> Result<()>
where
T: From<f64> + Default + SubAssign + Copy + Ord,
f64: From<T>,
{
self.sum
.compute_rolling_sum(max_from, windows, source, exit)?;
self.distribution
.compute_distribution(max_from, windows, source, exit)?;
Ok(())
}
}

View File

@@ -0,0 +1,9 @@
mod distribution;
mod full;
mod value_windows;
mod windows;
pub use distribution::*;
pub use full::*;
pub use value_windows::*;
pub use windows::*;

View File

@@ -0,0 +1,89 @@
//! StoredValueRollingWindows - window-first ordering.
//!
//! Access pattern: `coinbase_sum._24h.sats.height`
//! Each window (24h, 7d, 30d, 1y) contains sats (stored) + btc (lazy) + usd (stored).
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{Database, Exit, ReadableVec, Rw, StorageMode};
use brk_types::{Dollars, Sats};
use crate::{
indexes,
internal::{StoredValueFromHeightLast, WindowStarts, Windows},
};
const VERSION: Version = Version::ZERO;
/// Stored value rolling windows — window-first, currency-last.
///
/// Each window contains `StoredValueFromHeightLast` (sats + btc lazy + usd).
#[derive(Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct StoredValueRollingWindows<M: StorageMode = Rw>(
pub Windows<StoredValueFromHeightLast<M>>,
);
impl StoredValueRollingWindows {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
Ok(Self(Windows {
_24h: StoredValueFromHeightLast::forced_import(
db,
&format!("{name}_24h"),
v,
indexes,
)?,
_7d: StoredValueFromHeightLast::forced_import(
db,
&format!("{name}_7d"),
v,
indexes,
)?,
_30d: StoredValueFromHeightLast::forced_import(
db,
&format!("{name}_30d"),
v,
indexes,
)?,
_1y: StoredValueFromHeightLast::forced_import(
db,
&format!("{name}_1y"),
v,
indexes,
)?,
}))
}
pub(crate) fn compute_rolling_sum(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
sats_source: &impl ReadableVec<Height, Sats>,
usd_source: &impl ReadableVec<Height, Dollars>,
exit: &Exit,
) -> Result<()> {
self.0
._24h
.compute_rolling_sum(max_from, windows._24h, sats_source, usd_source, exit)?;
self.0
._7d
.compute_rolling_sum(max_from, windows._7d, sats_source, usd_source, exit)?;
self.0
._30d
.compute_rolling_sum(max_from, windows._30d, sats_source, usd_source, exit)?;
self.0
._1y
.compute_rolling_sum(max_from, windows._1y, sats_source, usd_source, exit)?;
Ok(())
}
}

Some files were not shown because too many files have changed in this diff Show More