global: snapshot

This commit is contained in:
nym21
2026-03-26 15:57:22 +01:00
parent 6d3307c0df
commit 18bb4186a8
72 changed files with 2013 additions and 1150 deletions

View File

@@ -17,12 +17,10 @@ impl Vecs {
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
// Sequential: time → lookback (dependency chain)
self.time
.timestamp
.compute(indexer, indexes, starting_indexes, exit)?;
self.lookback
.compute(&self.time, starting_indexes, exit)?;
self.db.sync_bg_tasks()?;
// lookback depends on indexes.timestamp.monotonic
self.lookback.compute(indexes, starting_indexes, exit)?;
// Parallel: remaining sub-modules are independent of each other.
// size depends on lookback (already computed above).
@@ -52,8 +50,11 @@ impl Vecs {
Ok(())
})?;
let _lock = exit.lock();
self.db.compact()?;
let exit = exit.clone();
self.db.run_bg(move |db| {
let _lock = exit.lock();
db.compact()
});
Ok(())
}
}

View File

@@ -10,7 +10,7 @@ use crate::{
};
use super::{
CountVecs, DifficultyVecs, HalvingVecs, IntervalVecs, LookbackVecs, SizeVecs, TimeVecs, Vecs,
CountVecs, DifficultyVecs, HalvingVecs, IntervalVecs, LookbackVecs, SizeVecs, Vecs,
WeightVecs,
};
@@ -30,7 +30,6 @@ impl Vecs {
let interval = IntervalVecs::forced_import(&db, version, indexes, cached_starts)?;
let size = SizeVecs::forced_import(&db, version, indexes, cached_starts)?;
let weight = WeightVecs::forced_import(&db, version, indexes, cached_starts, &size)?;
let time = TimeVecs::forced_import(&db, version, indexes)?;
let difficulty = DifficultyVecs::forced_import(&db, version, indexer, indexes)?;
let halving = HalvingVecs::forced_import(&db, version, indexes)?;
@@ -41,7 +40,6 @@ impl Vecs {
interval,
size,
weight,
time,
difficulty,
halving,
};

View File

@@ -3,9 +3,10 @@ use brk_traversable::Traversable;
use brk_types::{Height, Indexes, Timestamp, Version};
use vecdb::{AnyVec, CachedVec, Cursor, Database, EagerVec, Exit, ImportableVec, PcoVec, ReadableVec, Rw, StorageMode, VecIndex};
use crate::internal::{CachedWindowStarts, Windows, WindowStarts};
use super::time;
use crate::{
indexes,
internal::{CachedWindowStarts, Windows, WindowStarts},
};
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
@@ -178,71 +179,71 @@ impl Vecs {
pub(crate) fn compute(
&mut self,
time: &time::Vecs,
indexes: &indexes::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.compute_rolling_start_hours(time, starting_indexes, exit, 1, |s| {
self.compute_rolling_start_hours(indexes, starting_indexes, exit, 1, |s| {
&mut s._1h
})?;
self.compute_rolling_start(time, starting_indexes, exit, 1, |s| &mut s._24h)?;
self.compute_rolling_start(time, starting_indexes, exit, 3, |s| &mut s._3d)?;
self.compute_rolling_start(time, starting_indexes, exit, 7, |s| &mut s._1w)?;
self.compute_rolling_start(time, starting_indexes, exit, 8, |s| &mut s._8d)?;
self.compute_rolling_start(time, starting_indexes, exit, 9, |s| &mut s._9d)?;
self.compute_rolling_start(time, starting_indexes, exit, 12, |s| &mut s._12d)?;
self.compute_rolling_start(time, starting_indexes, exit, 13, |s| &mut s._13d)?;
self.compute_rolling_start(time, starting_indexes, exit, 14, |s| &mut s._2w)?;
self.compute_rolling_start(time, starting_indexes, exit, 21, |s| &mut s._21d)?;
self.compute_rolling_start(time, starting_indexes, exit, 26, |s| &mut s._26d)?;
self.compute_rolling_start(time, starting_indexes, exit, 30, |s| &mut s._1m)?;
self.compute_rolling_start(time, starting_indexes, exit, 34, |s| &mut s._34d)?;
self.compute_rolling_start(time, starting_indexes, exit, 55, |s| &mut s._55d)?;
self.compute_rolling_start(time, starting_indexes, exit, 60, |s| &mut s._2m)?;
self.compute_rolling_start(time, starting_indexes, exit, 63, |s| &mut s._9w)?;
self.compute_rolling_start(time, starting_indexes, exit, 84, |s| &mut s._12w)?;
self.compute_rolling_start(time, starting_indexes, exit, 89, |s| &mut s._89d)?;
self.compute_rolling_start(time, starting_indexes, exit, 90, |s| &mut s._3m)?;
self.compute_rolling_start(time, starting_indexes, exit, 98, |s| &mut s._14w)?;
self.compute_rolling_start(time, starting_indexes, exit, 111, |s| {
self.compute_rolling_start(indexes, starting_indexes, exit, 1, |s| &mut s._24h)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 3, |s| &mut s._3d)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 7, |s| &mut s._1w)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 8, |s| &mut s._8d)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 9, |s| &mut s._9d)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 12, |s| &mut s._12d)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 13, |s| &mut s._13d)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 14, |s| &mut s._2w)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 21, |s| &mut s._21d)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 26, |s| &mut s._26d)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 30, |s| &mut s._1m)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 34, |s| &mut s._34d)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 55, |s| &mut s._55d)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 60, |s| &mut s._2m)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 63, |s| &mut s._9w)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 84, |s| &mut s._12w)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 89, |s| &mut s._89d)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 90, |s| &mut s._3m)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 98, |s| &mut s._14w)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 111, |s| {
&mut s._111d
})?;
self.compute_rolling_start(time, starting_indexes, exit, 144, |s| {
self.compute_rolling_start(indexes, starting_indexes, exit, 144, |s| {
&mut s._144d
})?;
self.compute_rolling_start(time, starting_indexes, exit, 180, |s| &mut s._6m)?;
self.compute_rolling_start(time, starting_indexes, exit, 182, |s| &mut s._26w)?;
self.compute_rolling_start(time, starting_indexes, exit, 200, |s| {
self.compute_rolling_start(indexes, starting_indexes, exit, 180, |s| &mut s._6m)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 182, |s| &mut s._26w)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 200, |s| {
&mut s._200d
})?;
self.compute_rolling_start(time, starting_indexes, exit, 270, |s| &mut s._9m)?;
self.compute_rolling_start(time, starting_indexes, exit, 350, |s| {
self.compute_rolling_start(indexes, starting_indexes, exit, 270, |s| &mut s._9m)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 350, |s| {
&mut s._350d
})?;
self.compute_rolling_start(time, starting_indexes, exit, 360, |s| &mut s._12m)?;
self.compute_rolling_start(time, starting_indexes, exit, 365, |s| &mut s._1y)?;
self.compute_rolling_start(time, starting_indexes, exit, 420, |s| &mut s._14m)?;
self.compute_rolling_start(time, starting_indexes, exit, 730, |s| &mut s._2y)?;
self.compute_rolling_start(time, starting_indexes, exit, 780, |s| &mut s._26m)?;
self.compute_rolling_start(time, starting_indexes, exit, 1095, |s| &mut s._3y)?;
self.compute_rolling_start(time, starting_indexes, exit, 1400, |s| {
self.compute_rolling_start(indexes, starting_indexes, exit, 360, |s| &mut s._12m)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 365, |s| &mut s._1y)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 420, |s| &mut s._14m)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 730, |s| &mut s._2y)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 780, |s| &mut s._26m)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 1095, |s| &mut s._3y)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 1400, |s| {
&mut s._200w
})?;
self.compute_rolling_start(time, starting_indexes, exit, 1460, |s| &mut s._4y)?;
self.compute_rolling_start(time, starting_indexes, exit, 1825, |s| &mut s._5y)?;
self.compute_rolling_start(time, starting_indexes, exit, 2190, |s| &mut s._6y)?;
self.compute_rolling_start(time, starting_indexes, exit, 2920, |s| &mut s._8y)?;
self.compute_rolling_start(time, starting_indexes, exit, 3285, |s| &mut s._9y)?;
self.compute_rolling_start(time, starting_indexes, exit, 3650, |s| {
self.compute_rolling_start(indexes, starting_indexes, exit, 1460, |s| &mut s._4y)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 1825, |s| &mut s._5y)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 2190, |s| &mut s._6y)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 2920, |s| &mut s._8y)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 3285, |s| &mut s._9y)?;
self.compute_rolling_start(indexes, starting_indexes, exit, 3650, |s| {
&mut s._10y
})?;
self.compute_rolling_start(time, starting_indexes, exit, 4380, |s| {
self.compute_rolling_start(indexes, starting_indexes, exit, 4380, |s| {
&mut s._12y
})?;
self.compute_rolling_start(time, starting_indexes, exit, 5110, |s| {
self.compute_rolling_start(indexes, starting_indexes, exit, 5110, |s| {
&mut s._14y
})?;
self.compute_rolling_start(time, starting_indexes, exit, 9490, |s| {
self.compute_rolling_start(indexes, starting_indexes, exit, 9490, |s| {
&mut s._26y
})?;
@@ -251,7 +252,7 @@ impl Vecs {
fn compute_rolling_start<F>(
&mut self,
time: &time::Vecs,
indexes: &indexes::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
days: usize,
@@ -260,14 +261,14 @@ impl Vecs {
where
F: FnOnce(&mut Self) -> &mut EagerVec<PcoVec<Height, Height>>,
{
self.compute_rolling_start_inner(time, starting_indexes, exit, get_field, |t, prev_ts| {
self.compute_rolling_start_inner(indexes, starting_indexes, exit, get_field, |t, prev_ts| {
t.difference_in_days_between(prev_ts) >= days
})
}
fn compute_rolling_start_hours<F>(
&mut self,
time: &time::Vecs,
indexes: &indexes::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
hours: usize,
@@ -276,14 +277,14 @@ impl Vecs {
where
F: FnOnce(&mut Self) -> &mut EagerVec<PcoVec<Height, Height>>,
{
self.compute_rolling_start_inner(time, starting_indexes, exit, get_field, |t, prev_ts| {
self.compute_rolling_start_inner(indexes, starting_indexes, exit, get_field, |t, prev_ts| {
t.difference_in_hours_between(prev_ts) >= hours
})
}
fn compute_rolling_start_inner<F, D>(
&mut self,
time: &time::Vecs,
indexes: &indexes::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
get_field: F,
@@ -300,12 +301,12 @@ impl Vecs {
} else {
Height::ZERO
};
let mut cursor = Cursor::new(&time.timestamp_monotonic);
let mut cursor = Cursor::new(&indexes.timestamp.monotonic);
cursor.advance(prev.to_usize());
let mut prev_ts = cursor.next().unwrap();
Ok(field.compute_transform(
starting_indexes.height,
&time.timestamp_monotonic,
&indexes.timestamp.monotonic,
|(h, t, ..)| {
while expired(t, prev_ts) {
prev.increment();

View File

@@ -4,7 +4,6 @@ pub mod halving;
pub mod interval;
pub mod lookback;
pub mod size;
pub mod time;
pub mod weight;
mod compute;
@@ -19,7 +18,6 @@ pub use halving::Vecs as HalvingVecs;
pub use interval::Vecs as IntervalVecs;
pub use lookback::Vecs as LookbackVecs;
pub use size::Vecs as SizeVecs;
pub use time::Vecs as TimeVecs;
pub use weight::Vecs as WeightVecs;
pub const DB_NAME: &str = "blocks";
@@ -37,7 +35,7 @@ pub(crate) const ONE_TERA_HASH: f64 = 1_000_000_000_000.0;
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
#[traversable(skip)]
pub(crate) db: Database,
pub db: Database,
pub count: CountVecs<M>,
pub lookback: LookbackVecs<M>,
@@ -46,7 +44,6 @@ pub struct Vecs<M: StorageMode = Rw> {
pub size: SizeVecs<M>,
#[traversable(flatten)]
pub weight: WeightVecs<M>,
pub time: TimeVecs<M>,
pub difficulty: DifficultyVecs<M>,
pub halving: HalvingVecs<M>,
}

View File

@@ -1,34 +0,0 @@
use brk_error::Result;
use brk_indexer::Indexer;
use vecdb::{Exit, ReadableVec};
use super::Vecs;
impl Vecs {
pub(crate) fn compute(
&mut self,
indexer: &Indexer,
starting_height: brk_types::Height,
exit: &Exit,
) -> Result<()> {
let mut prev_timestamp_monotonic = None;
self.timestamp_monotonic.compute_transform(
starting_height,
&indexer.vecs.blocks.timestamp,
|(h, timestamp, this)| {
if prev_timestamp_monotonic.is_none()
&& let Some(prev_h) = h.decremented()
{
prev_timestamp_monotonic.replace(this.collect_one(prev_h).unwrap());
}
let timestamp_monotonic =
prev_timestamp_monotonic.map_or(timestamp, |prev_d| prev_d.max(timestamp));
prev_timestamp_monotonic.replace(timestamp_monotonic);
(h, timestamp_monotonic)
},
exit,
)?;
Ok(())
}
}

View File

@@ -1,66 +0,0 @@
use brk_error::Result;
use brk_types::{Date, Height, Version};
use vecdb::{Database, EagerVec, ImportableVec, LazyVecFrom1, ReadableCloneableVec};
use super::{TimestampIndexes, Vecs};
use crate::indexes;
impl Vecs {
pub(crate) fn forced_import(
db: &Database,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let timestamp_monotonic = EagerVec::forced_import(db, "timestamp_monotonic", version)?;
Ok(Self {
date: LazyVecFrom1::init(
"date",
version,
timestamp_monotonic.read_only_boxed_clone(),
|_height: Height, timestamp| Date::from(timestamp),
),
timestamp_monotonic,
timestamp: TimestampIndexes::forced_import(db, version, indexes)?,
})
}
}
impl TimestampIndexes {
fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
macro_rules! period {
($field:ident) => {
LazyVecFrom1::init(
"timestamp",
version,
indexes.$field.first_height.read_only_boxed_clone(),
|idx, _: Height| idx.to_timestamp(),
)
};
}
macro_rules! epoch {
($field:ident) => {
ImportableVec::forced_import(db, "timestamp", version)?
};
}
Ok(Self(crate::internal::PerResolution {
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halving: epoch!(halving),
epoch: epoch!(difficulty),
}))
}
}

View File

@@ -1,5 +0,0 @@
mod compute;
mod import;
mod vecs;
pub use vecs::{TimestampIndexes, Vecs};

View File

@@ -1,80 +0,0 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{
Date, Day1, Day3, Epoch, Halving, Height, Hour1, Hour4, Hour12, Indexes,
Minute10, Minute30, Month1, Month3, Month6, Timestamp, Week1, Year1, Year10,
};
use derive_more::{Deref, DerefMut};
use vecdb::{EagerVec, Exit, LazyVecFrom1, PcoVec, ReadableVec, Rw, StorageMode};
use crate::{indexes, internal::PerResolution};
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub date: LazyVecFrom1<Height, Date, Height, Timestamp>,
pub timestamp_monotonic: M::Stored<EagerVec<PcoVec<Height, Timestamp>>>,
pub timestamp: TimestampIndexes<M>,
}
/// Per-period timestamp indexes.
///
/// Time-based periods (minute10year10) are lazy: `idx.to_timestamp()` is a pure
/// function of the index, so no storage or decompression is needed.
/// Epoch-based periods (halving, difficulty) are eager: their timestamps
/// come from block data via `compute_indirect_sequential`.
#[derive(Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct TimestampIndexes<M: StorageMode = Rw>(
#[allow(clippy::type_complexity)]
pub PerResolution<
LazyVecFrom1<Minute10, Timestamp, Minute10, Height>,
LazyVecFrom1<Minute30, Timestamp, Minute30, Height>,
LazyVecFrom1<Hour1, Timestamp, Hour1, Height>,
LazyVecFrom1<Hour4, Timestamp, Hour4, Height>,
LazyVecFrom1<Hour12, Timestamp, Hour12, Height>,
LazyVecFrom1<Day1, Timestamp, Day1, Height>,
LazyVecFrom1<Day3, Timestamp, Day3, Height>,
LazyVecFrom1<Week1, Timestamp, Week1, Height>,
LazyVecFrom1<Month1, Timestamp, Month1, Height>,
LazyVecFrom1<Month3, Timestamp, Month3, Height>,
LazyVecFrom1<Month6, Timestamp, Month6, Height>,
LazyVecFrom1<Year1, Timestamp, Year1, Height>,
LazyVecFrom1<Year10, Timestamp, Year10, Height>,
M::Stored<EagerVec<PcoVec<Halving, Timestamp>>>,
M::Stored<EagerVec<PcoVec<Epoch, Timestamp>>>,
>,
);
impl TimestampIndexes {
/// Compute epoch timestamps via indirect lookup from block timestamps.
/// Time-based periods are lazy (idx.to_timestamp()) and need no compute.
pub(crate) fn compute(
&mut self,
indexer: &brk_indexer::Indexer,
indexes: &indexes::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
let prev_height = starting_indexes.height.decremented().unwrap_or_default();
self.halving.compute_indirect_sequential(
indexes
.height
.halving
.collect_one(prev_height)
.unwrap_or_default(),
&indexes.halving.first_height,
&indexer.vecs.blocks.timestamp,
exit,
)?;
self.epoch.compute_indirect_sequential(
indexes
.height
.epoch
.collect_one(prev_height)
.unwrap_or_default(),
&indexes.epoch.first_height,
&indexer.vecs.blocks.timestamp,
exit,
)?;
Ok(())
}
}

View File

@@ -17,6 +17,8 @@ impl Vecs {
distribution: &distribution::Vecs,
exit: &Exit,
) -> Result<()> {
self.db.sync_bg_tasks()?;
// Activity computes first (liveliness, vaultedness, etc.)
self.activity
.compute(starting_indexes, distribution, exit)?;
@@ -80,8 +82,11 @@ impl Vecs {
r3?;
r4?;
let _lock = exit.lock();
self.db.compact()?;
let exit = exit.clone();
self.db.run_bg(move |db| {
let _lock = exit.lock();
db.compact()
});
Ok(())
}
}

View File

@@ -227,6 +227,8 @@ impl Vecs {
starting_indexes: &mut Indexes,
exit: &Exit,
) -> Result<()> {
self.db.sync_bg_tasks()?;
// 1. Find minimum height we have data for across stateful vecs
let current_height = Height::from(self.supply_state.len());
let min_stateful = self.min_stateful_len();
@@ -300,7 +302,7 @@ impl Vecs {
.cents
.height
.len()
.min(blocks.time.timestamp_monotonic.len());
.min(indexes.timestamp.monotonic.len());
let cache_current_len = self.caches.prices.len();
if cache_target_len < cache_current_len {
self.caches.prices.truncate(cache_target_len);
@@ -312,9 +314,9 @@ impl Vecs {
.cents
.height
.collect_range_at(cache_current_len, cache_target_len);
let new_timestamps = blocks
.time
.timestamp_monotonic
let new_timestamps = indexes
.timestamp
.monotonic
.collect_range_at(cache_current_len, cache_target_len);
self.caches.prices.extend(new_prices);
self.caches.timestamps.extend(new_timestamps);
@@ -499,8 +501,11 @@ impl Vecs {
self.addr_cohorts
.compute_rest_part2(prices, starting_indexes, &all_utxo_count, exit)?;
let _lock = exit.lock();
self.db.compact()?;
let exit = exit.clone();
self.db.run_bg(move |db| {
let _lock = exit.lock();
db.compact()
});
Ok(())
}

View File

@@ -13,6 +13,7 @@ mod minute30;
mod month1;
mod month3;
mod month6;
pub mod timestamp;
mod tx_index;
mod txin_index;
mod txout_index;
@@ -31,13 +32,11 @@ use brk_types::{
};
use vecdb::{CachedVec, Database, Exit, ReadableVec, Rw, StorageMode};
use crate::{
blocks,
internal::db_utils::{finalize_db, open_db},
};
use crate::internal::db_utils::{finalize_db, open_db};
pub use cached_mappings::CachedMappings;
pub use addr::Vecs as AddrVecs;
pub use timestamp::Timestamps;
pub use cached_mappings::CachedMappings;
pub use day1::Vecs as Day1Vecs;
pub use day3::Vecs as Day3Vecs;
pub use epoch::Vecs as EpochVecs;
@@ -85,6 +84,7 @@ pub struct Vecs<M: StorageMode = Rw> {
pub tx_index: TxIndexVecs<M>,
pub txin_index: TxInIndexVecs,
pub txout_index: TxOutIndexVecs,
pub timestamp: Timestamps<M>,
}
impl Vecs {
@@ -136,6 +136,11 @@ impl Vecs {
epoch_identity: CachedVec::new(&epoch.identity),
};
let timestamp = Timestamps::forced_import_from_locals(
&db, version, &minute10, &minute30, &hour1, &hour4, &hour12, &day1, &day3, &week1,
&month1, &month3, &month6, &year1, &year10,
)?;
let this = Self {
cached_mappings,
addr,
@@ -158,6 +163,7 @@ impl Vecs {
tx_index,
txin_index,
txout_index,
timestamp,
db,
};
@@ -168,36 +174,24 @@ impl Vecs {
pub(crate) fn compute(
&mut self,
indexer: &Indexer,
blocks: &mut blocks::Vecs,
starting_indexes: Indexes,
exit: &Exit,
) -> Result<Indexes> {
blocks
.time
.compute(indexer, starting_indexes.height, exit)?;
let indexes = self.compute_(indexer, &blocks.time, starting_indexes, exit)?;
let _lock = exit.lock();
self.db.compact()?;
Ok(indexes)
}
self.db.sync_bg_tasks()?;
// timestamp_monotonic must be computed first — other mappings read it
self.timestamp
.compute_monotonic(indexer, starting_indexes.height, exit)?;
fn compute_(
&mut self,
indexer: &Indexer,
blocks_time: &blocks::time::Vecs,
starting_indexes: Indexes,
exit: &Exit,
) -> Result<Indexes> {
self.compute_tx_indexes(indexer, &starting_indexes, exit)?;
self.compute_height_indexes(indexer, &starting_indexes, exit)?;
let prev_height = starting_indexes.height.decremented().unwrap_or_default();
self.compute_timestamp_mappings(blocks_time, &starting_indexes, exit)?;
self.compute_timestamp_mappings(&starting_indexes, exit)?;
let starting_day1 = self.compute_calendar_mappings(
indexer,
blocks_time,
&starting_indexes,
prev_height,
exit,
@@ -205,13 +199,26 @@ impl Vecs {
self.compute_period_vecs(
indexer,
blocks_time,
&starting_indexes,
prev_height,
starting_day1,
exit,
)?;
self.timestamp.compute_per_resolution(
indexer,
&self.height,
&self.halving,
&self.epoch,
&starting_indexes,
exit,
)?;
let exit = exit.clone();
self.db.run_bg(move |db| {
let _lock = exit.lock();
db.compact()
});
Ok(starting_indexes)
}
@@ -266,7 +273,6 @@ impl Vecs {
fn compute_timestamp_mappings(
&mut self,
blocks_time: &blocks::time::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
@@ -274,7 +280,7 @@ impl Vecs {
($field:ident, $period:ty) => {
self.height.$field.compute_transform(
starting_indexes.height,
&blocks_time.timestamp_monotonic,
&self.timestamp.monotonic,
|(h, ts, _)| (h, <$period>::from_timestamp(ts)),
exit,
)?;
@@ -294,7 +300,6 @@ impl Vecs {
fn compute_calendar_mappings(
&mut self,
indexer: &Indexer,
blocks_time: &blocks::time::Vecs,
starting_indexes: &Indexes,
prev_height: Height,
exit: &Exit,
@@ -307,8 +312,8 @@ impl Vecs {
self.height.day1.compute_transform(
starting_indexes.height,
&blocks_time.date,
|(h, d, ..)| (h, Day1::try_from(d).unwrap()),
&self.timestamp.monotonic,
|(h, ts, ..)| (h, Day1::try_from(Date::from(ts)).unwrap()),
exit,
)?;
@@ -318,7 +323,7 @@ impl Vecs {
starting_day1
};
self.compute_epoch(indexer, blocks_time, starting_indexes, prev_height, exit)?;
self.compute_epoch(indexer, starting_indexes, prev_height, exit)?;
self.height.week1.compute_transform(
starting_indexes.height,
@@ -363,7 +368,6 @@ impl Vecs {
fn compute_epoch(
&mut self,
indexer: &Indexer,
blocks_time: &blocks::time::Vecs,
starting_indexes: &Indexes,
prev_height: Height,
exit: &Exit,
@@ -389,14 +393,12 @@ impl Vecs {
&self.epoch.first_height,
exit,
)?;
self.epoch
.height_count
.compute_count_from_indexes(
starting_difficulty,
&self.epoch.first_height,
&blocks_time.date,
exit,
)?;
self.epoch.height_count.compute_count_from_indexes(
starting_difficulty,
&self.epoch.first_height,
&self.timestamp.monotonic,
exit,
)?;
let starting_halving = self
.height
@@ -426,7 +428,6 @@ impl Vecs {
fn compute_period_vecs(
&mut self,
indexer: &Indexer,
blocks_time: &blocks::time::Vecs,
starting_indexes: &Indexes,
prev_height: Height,
starting_day1: Day1,
@@ -478,7 +479,7 @@ impl Vecs {
exit,
)?;
let date = &blocks_time.date;
let ts = &self.timestamp.monotonic;
macro_rules! dated_period {
($period:ident) => {{
@@ -500,7 +501,7 @@ impl Vecs {
self.$period.date.compute_transform(
start,
&self.$period.first_height,
|(idx, first_h, _)| (idx, date.collect_one(first_h).unwrap()),
|(idx, first_h, _)| (idx, Date::from(ts.collect_one(first_h).unwrap())),
exit,
)?;
}};

View File

@@ -0,0 +1,150 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{
Day1, Day3, Epoch, Halving, Height, Hour1, Hour4, Hour12, Indexes, Minute10, Minute30, Month1,
Month3, Month6, Timestamp, Week1, Year1, Year10,
};
use derive_more::{Deref, DerefMut};
use vecdb::{
Database, EagerVec, Exit, ImportableVec, LazyVecFrom1, PcoVec, ReadableCloneableVec,
ReadableVec, Rw, StorageMode, Version,
};
use crate::internal::PerResolution;
/// Timestamps: monotonic height→timestamp + per-period timestamp lookups.
///
/// Time-based periods (minute10year10) are lazy: `idx.to_timestamp()` is a pure
/// function of the index, so no storage or decompression is needed.
/// Epoch-based periods (halving, difficulty) are eager: their timestamps
/// come from block data via `compute_indirect_sequential`.
#[derive(Deref, DerefMut, Traversable)]
pub struct Timestamps<M: StorageMode = Rw> {
pub monotonic: M::Stored<EagerVec<PcoVec<Height, Timestamp>>>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
#[allow(clippy::type_complexity)]
pub resolutions: PerResolution<
LazyVecFrom1<Minute10, Timestamp, Minute10, Height>,
LazyVecFrom1<Minute30, Timestamp, Minute30, Height>,
LazyVecFrom1<Hour1, Timestamp, Hour1, Height>,
LazyVecFrom1<Hour4, Timestamp, Hour4, Height>,
LazyVecFrom1<Hour12, Timestamp, Hour12, Height>,
LazyVecFrom1<Day1, Timestamp, Day1, Height>,
LazyVecFrom1<Day3, Timestamp, Day3, Height>,
LazyVecFrom1<Week1, Timestamp, Week1, Height>,
LazyVecFrom1<Month1, Timestamp, Month1, Height>,
LazyVecFrom1<Month3, Timestamp, Month3, Height>,
LazyVecFrom1<Month6, Timestamp, Month6, Height>,
LazyVecFrom1<Year1, Timestamp, Year1, Height>,
LazyVecFrom1<Year10, Timestamp, Year10, Height>,
M::Stored<EagerVec<PcoVec<Halving, Timestamp>>>,
M::Stored<EagerVec<PcoVec<Epoch, Timestamp>>>,
>,
}
impl Timestamps {
#[allow(clippy::too_many_arguments)]
pub(crate) fn forced_import_from_locals(
db: &Database,
version: Version,
minute10: &super::Minute10Vecs,
minute30: &super::Minute30Vecs,
hour1: &super::Hour1Vecs,
hour4: &super::Hour4Vecs,
hour12: &super::Hour12Vecs,
day1: &super::Day1Vecs,
day3: &super::Day3Vecs,
week1: &super::Week1Vecs,
month1: &super::Month1Vecs,
month3: &super::Month3Vecs,
month6: &super::Month6Vecs,
year1: &super::Year1Vecs,
year10: &super::Year10Vecs,
) -> Result<Self> {
let monotonic = EagerVec::forced_import(db, "timestamp_monotonic", version)?;
macro_rules! period {
($field:ident) => {
LazyVecFrom1::init(
"timestamp",
version,
$field.first_height.read_only_boxed_clone(),
|idx, _: Height| idx.to_timestamp(),
)
};
}
Ok(Self {
monotonic,
resolutions: PerResolution {
minute10: period!(minute10),
minute30: period!(minute30),
hour1: period!(hour1),
hour4: period!(hour4),
hour12: period!(hour12),
day1: period!(day1),
day3: period!(day3),
week1: period!(week1),
month1: period!(month1),
month3: period!(month3),
month6: period!(month6),
year1: period!(year1),
year10: period!(year10),
halving: ImportableVec::forced_import(db, "timestamp", version)?,
epoch: ImportableVec::forced_import(db, "timestamp", version)?,
},
})
}
pub(crate) fn compute_monotonic(
&mut self,
indexer: &brk_indexer::Indexer,
starting_height: Height,
exit: &Exit,
) -> Result<()> {
let mut prev = None;
self.monotonic.compute_transform(
starting_height,
&indexer.vecs.blocks.timestamp,
|(h, timestamp, this)| {
if prev.is_none()
&& let Some(prev_h) = h.decremented()
{
prev.replace(this.collect_one(prev_h).unwrap());
}
let monotonic = prev.map_or(timestamp, |p| p.max(timestamp));
prev.replace(monotonic);
(h, monotonic)
},
exit,
)?;
Ok(())
}
pub(crate) fn compute_per_resolution(
&mut self,
indexer: &brk_indexer::Indexer,
height: &super::HeightVecs,
halving_vecs: &super::HalvingVecs,
epoch_vecs: &super::EpochVecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
let prev_height = starting_indexes.height.decremented().unwrap_or_default();
self.resolutions.halving.compute_indirect_sequential(
height.halving.collect_one(prev_height).unwrap_or_default(),
&halving_vecs.first_height,
&indexer.vecs.blocks.timestamp,
exit,
)?;
self.resolutions.epoch.compute_indirect_sequential(
height.epoch.collect_one(prev_height).unwrap_or_default(),
&epoch_vecs.first_height,
&indexer.vecs.blocks.timestamp,
exit,
)?;
Ok(())
}
}

View File

@@ -16,6 +16,8 @@ impl Vecs {
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.db.sync_bg_tasks()?;
// Puell Multiple: daily_subsidy_usd / sma_365d_subsidy_usd
self.puell_multiple
.bps
@@ -199,8 +201,11 @@ impl Vecs {
exit,
)?;
let _lock = exit.lock();
self.db.compact()?;
let exit = exit.clone();
self.db.run_bg(move |db| {
let _lock = exit.lock();
db.compact()
});
Ok(())
}
}

View File

@@ -3,7 +3,7 @@ use std::path::Path;
use brk_error::Result;
use brk_types::Version;
use super::Vecs;
use super::{Vecs, thermometer::Thermometer};
use crate::{
indexes,
internal::{PerBlock, PercentPerBlock, RatioPerBlock, db_utils::{finalize_db, open_db}},
@@ -38,6 +38,8 @@ impl Vecs {
let seller_exhaustion =
PerBlock::forced_import(&db, "seller_exhaustion", v, indexes)?;
let thermometer = Thermometer::forced_import(&db, v, indexes)?;
let this = Self {
db,
puell_multiple,
@@ -50,6 +52,7 @@ impl Vecs {
dormancy,
stock_to_flow,
seller_exhaustion,
thermometer,
};
finalize_db(&this.db, &this)?;
Ok(this)

View File

@@ -1,6 +1,7 @@
mod compute;
mod gini;
mod import;
pub mod thermometer;
mod vecs;
pub use vecs::Vecs;

View File

@@ -0,0 +1,254 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Cents, Height, Indexes, StoredI8, Version};
use vecdb::{AnyVec, Database, EagerVec, Exit, PcoVec, ReadableVec, Rw, StorageMode, WritableVec};
use crate::{
cointime,
distribution,
indexes,
internal::{PerBlock, Price, RatioPerBlockPercentiles},
prices,
};
#[derive(Traversable)]
pub struct Thermometer<M: StorageMode = Rw> {
pub pct0_5: Price<PerBlock<Cents, M>>,
pub pct1: Price<PerBlock<Cents, M>>,
pub pct2: Price<PerBlock<Cents, M>>,
pub pct5: Price<PerBlock<Cents, M>>,
pub pct95: Price<PerBlock<Cents, M>>,
pub pct98: Price<PerBlock<Cents, M>>,
pub pct99: Price<PerBlock<Cents, M>>,
pub pct99_5: Price<PerBlock<Cents, M>>,
pub zone: PerBlock<StoredI8, M>,
pub score: PerBlock<StoredI8, M>,
}
const VERSION: Version = Version::new(2);
impl Thermometer {
pub(crate) fn forced_import(
db: &Database,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
Ok(Self {
pct0_5: Price::forced_import(db, "thermometer_pct0_5", v, indexes)?,
pct1: Price::forced_import(db, "thermometer_pct01", v, indexes)?,
pct2: Price::forced_import(db, "thermometer_pct02", v, indexes)?,
pct5: Price::forced_import(db, "thermometer_pct05", v, indexes)?,
pct95: Price::forced_import(db, "thermometer_pct95", v, indexes)?,
pct98: Price::forced_import(db, "thermometer_pct98", v, indexes)?,
pct99: Price::forced_import(db, "thermometer_pct99", v, indexes)?,
pct99_5: Price::forced_import(db, "thermometer_pct99_5", v, indexes)?,
zone: PerBlock::forced_import(db, "thermometer_zone", v, indexes)?,
score: PerBlock::forced_import(db, "thermometer_score", v, indexes)?,
})
}
pub(crate) fn compute(
&mut self,
distribution: &distribution::Vecs,
cointime: &cointime::Vecs,
prices: &prices::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
let realized = &distribution.utxo_cohorts.all.metrics.realized;
let ct = &cointime.prices;
let sth_realized = &distribution.utxo_cohorts.sth.metrics.realized;
let lth_realized = &distribution.utxo_cohorts.lth.metrics.realized;
let models: [&RatioPerBlockPercentiles; 10] = [
&realized.price_ratio_percentiles,
&realized.investor.price.percentiles,
&sth_realized.price_ratio_percentiles,
&sth_realized.investor.price.percentiles,
&lth_realized.price_ratio_percentiles,
&lth_realized.investor.price.percentiles,
&ct.vaulted.percentiles,
&ct.active.percentiles,
&ct.true_market_mean.percentiles,
&ct.cointime.percentiles,
];
macro_rules! sources {
($pct:ident) => {
models.each_ref().map(|m| &m.$pct.price.cents.height)
};
}
// Lower percentiles: max across all models (tightest lower bound)
self.pct0_5.cents.height.compute_max_of_others(starting_indexes.height, &sources!(pct0_5), exit)?;
self.pct1.cents.height.compute_max_of_others(starting_indexes.height, &sources!(pct1), exit)?;
self.pct2.cents.height.compute_max_of_others(starting_indexes.height, &sources!(pct2), exit)?;
self.pct5.cents.height.compute_max_of_others(starting_indexes.height, &sources!(pct5), exit)?;
// Upper percentiles: min across all models (tightest upper bound)
self.pct95.cents.height.compute_min_of_others(starting_indexes.height, &sources!(pct95), exit)?;
self.pct98.cents.height.compute_min_of_others(starting_indexes.height, &sources!(pct98), exit)?;
self.pct99.cents.height.compute_min_of_others(starting_indexes.height, &sources!(pct99), exit)?;
self.pct99_5.cents.height.compute_min_of_others(starting_indexes.height, &sources!(pct99_5), exit)?;
let spot = &prices.spot.cents.height;
// Zone: spot vs own envelope bands (-4 to +4)
self.compute_zone(spot, starting_indexes, exit)?;
// Temperature: per-model band crossings (-40 to +40)
self.compute_score(&models, spot, starting_indexes, exit)?;
Ok(())
}
fn compute_zone(
&mut self,
spot: &EagerVec<PcoVec<Height, Cents>>,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
let bands: [&_; 8] = [
&self.pct0_5.cents.height,
&self.pct1.cents.height,
&self.pct2.cents.height,
&self.pct5.cents.height,
&self.pct95.cents.height,
&self.pct98.cents.height,
&self.pct99.cents.height,
&self.pct99_5.cents.height,
];
let dep_version: Version = bands.iter().map(|b| b.version()).sum::<Version>() + spot.version();
self.zone.height.validate_computed_version_or_reset(dep_version)?;
self.zone.height.truncate_if_needed(starting_indexes.height)?;
self.zone.height.repeat_until_complete(exit, |vec| {
let skip = vec.len();
let source_end = bands.iter().map(|b| b.len()).min().unwrap().min(spot.len());
let end = vec.batch_end(source_end);
if skip >= end {
return Ok(());
}
let spot_batch = spot.collect_range_at(skip, end);
let b: [Vec<Cents>; 8] = bands.each_ref().map(|v| v.collect_range_at(skip, end));
for j in 0..(end - skip) {
let price = spot_batch[j];
let mut score: i8 = 0;
if price < b[3][j] { score -= 1; }
if price < b[2][j] { score -= 1; }
if price < b[1][j] { score -= 1; }
if price < b[0][j] { score -= 1; }
if price > b[4][j] { score += 1; }
if price > b[5][j] { score += 1; }
if price > b[6][j] { score += 1; }
if price > b[7][j] { score += 1; }
vec.push(StoredI8::new(score));
}
Ok(())
})?;
Ok(())
}
fn compute_score(
&mut self,
models: &[&RatioPerBlockPercentiles; 10],
spot: &EagerVec<PcoVec<Height, Cents>>,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
let dep_version: Version = models
.iter()
.map(|p| {
p.pct0_5.price.cents.height.version()
+ p.pct1.price.cents.height.version()
+ p.pct2.price.cents.height.version()
+ p.pct5.price.cents.height.version()
+ p.pct95.price.cents.height.version()
+ p.pct98.price.cents.height.version()
+ p.pct99.price.cents.height.version()
+ p.pct99_5.price.cents.height.version()
})
.sum::<Version>()
+ spot.version();
self.score.height.validate_computed_version_or_reset(dep_version)?;
self.score.height.truncate_if_needed(starting_indexes.height)?;
self.score.height.repeat_until_complete(exit, |vec| {
let skip = vec.len();
let source_end = models
.iter()
.flat_map(|p| {
[
p.pct0_5.price.cents.height.len(),
p.pct1.price.cents.height.len(),
p.pct2.price.cents.height.len(),
p.pct5.price.cents.height.len(),
p.pct95.price.cents.height.len(),
p.pct98.price.cents.height.len(),
p.pct99.price.cents.height.len(),
p.pct99_5.price.cents.height.len(),
]
})
.min()
.unwrap()
.min(spot.len());
let end = vec.batch_end(source_end);
if skip >= end {
return Ok(());
}
let spot_batch = spot.collect_range_at(skip, end);
let bands: Vec<[Vec<Cents>; 8]> = models
.iter()
.map(|p| {
[
p.pct0_5.price.cents.height.collect_range_at(skip, end),
p.pct1.price.cents.height.collect_range_at(skip, end),
p.pct2.price.cents.height.collect_range_at(skip, end),
p.pct5.price.cents.height.collect_range_at(skip, end),
p.pct95.price.cents.height.collect_range_at(skip, end),
p.pct98.price.cents.height.collect_range_at(skip, end),
p.pct99.price.cents.height.collect_range_at(skip, end),
p.pct99_5.price.cents.height.collect_range_at(skip, end),
]
})
.collect();
for j in 0..(end - skip) {
let price = spot_batch[j];
let mut total: i8 = 0;
for model in &bands {
if price < model[3][j] { total -= 1; }
if price < model[2][j] { total -= 1; }
if price < model[1][j] { total -= 1; }
if price < model[0][j] { total -= 1; }
if price > model[4][j] { total += 1; }
if price > model[5][j] { total += 1; }
if price > model[6][j] { total += 1; }
if price > model[7][j] { total += 1; }
}
vec.push(StoredI8::new(total));
}
Ok(())
})?;
Ok(())
}
}

View File

@@ -2,6 +2,7 @@ use brk_traversable::Traversable;
use brk_types::{BasisPoints16, BasisPoints32, StoredF32};
use vecdb::{Database, Rw, StorageMode};
use super::thermometer::Thermometer;
use crate::internal::{PerBlock, PercentPerBlock, RatioPerBlock};
#[derive(Traversable)]
@@ -24,4 +25,5 @@ pub struct Vecs<M: StorageMode = Rw> {
pub dormancy: DormancyVecs<M>,
pub stock_to_flow: PerBlock<StoredF32, M>,
pub seller_exhaustion: PerBlock<StoredF32, M>,
pub thermometer: Thermometer<M>,
}

View File

@@ -15,13 +15,18 @@ impl Vecs {
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.db.sync_bg_tasks()?;
self.spent
.compute(indexer, starting_indexes, exit)?;
self.count
.compute(indexer, indexes, blocks, starting_indexes, exit)?;
let _lock = exit.lock();
self.db.compact()?;
let exit = exit.clone();
self.db.run_bg(move |db| {
let _lock = exit.lock();
db.compact()
});
Ok(())
}
}

View File

@@ -71,20 +71,20 @@ impl ExpandingPercentiles {
self.tree.add(Self::to_bucket(value), &1);
}
/// Compute 6 percentiles in one call via kth. O(6 × log N) but with
/// shared tree traversal across all 6 targets for better cache locality.
/// Compute 8 percentiles in one call via kth. O(8 × log N) but with
/// shared tree traversal across all 8 targets for better cache locality.
/// Quantiles q must be sorted ascending in (0, 1). Output is in BPS.
pub fn quantiles(&self, qs: &[f64; 6], out: &mut [u32; 6]) {
pub fn quantiles(&self, qs: &[f64; 8], out: &mut [u32; 8]) {
if self.count == 0 {
out.iter_mut().for_each(|o| *o = 0);
return;
}
let mut targets = [0u32; 6];
let mut targets = [0u32; 8];
for (i, &q) in qs.iter().enumerate() {
let k = ((q * self.count as f64).ceil() as u32).clamp(1, self.count);
targets[i] = k - 1; // 0-indexed
}
let mut buckets = [0usize; 6];
let mut buckets = [0usize; 8];
self.tree.kth(&targets, &|n: &u32| *n, &mut buckets);
for (i, bucket) in buckets.iter().enumerate() {
out[i] = *bucket as u32 * BUCKET_BPS as u32;
@@ -97,8 +97,8 @@ mod tests {
use super::*;
fn quantile(ep: &ExpandingPercentiles, q: f64) -> u32 {
let mut out = [0u32; 6];
ep.quantiles(&[q, q, q, q, q, q], &mut out);
let mut out = [0u32; 8];
ep.quantiles(&[q, q, q, q, q, q, q, q], &mut out);
out[0]
}

View File

@@ -22,18 +22,20 @@ pub struct RatioBand<M: StorageMode = Rw> {
#[derive(Traversable)]
pub struct RatioPerBlockPercentiles<M: StorageMode = Rw> {
pub pct99_5: RatioBand<M>,
pub pct99: RatioBand<M>,
pub pct98: RatioBand<M>,
pub pct95: RatioBand<M>,
pub pct5: RatioBand<M>,
pub pct2: RatioBand<M>,
pub pct1: RatioBand<M>,
pub pct0_5: RatioBand<M>,
#[traversable(skip)]
expanding_pct: ExpandingPercentiles,
}
const VERSION: Version = Version::new(5);
const VERSION: Version = Version::new(6);
/// First height included in ratio percentile computation (first halving).
/// Earlier blocks lack meaningful market data and pollute the distribution.
@@ -70,12 +72,14 @@ impl RatioPerBlockPercentiles {
}
Ok(Self {
pct99_5: import_band!("pct99_5"),
pct99: import_band!("pct99"),
pct98: import_band!("pct98"),
pct95: import_band!("pct95"),
pct5: import_band!("pct5"),
pct2: import_band!("pct2"),
pct1: import_band!("pct1"),
pct0_5: import_band!("pct0_5"),
expanding_pct: ExpandingPercentiles::default(),
})
}
@@ -114,16 +118,18 @@ impl RatioPerBlockPercentiles {
}
let new_ratios = ratio_source.collect_range_at(start, ratio_len);
let mut pct_vecs: [&mut EagerVec<PcoVec<Height, BasisPoints32>>; 6] = [
let mut pct_vecs: [&mut EagerVec<PcoVec<Height, BasisPoints32>>; 8] = [
&mut self.pct0_5.ratio.bps.height,
&mut self.pct1.ratio.bps.height,
&mut self.pct2.ratio.bps.height,
&mut self.pct5.ratio.bps.height,
&mut self.pct95.ratio.bps.height,
&mut self.pct98.ratio.bps.height,
&mut self.pct99.ratio.bps.height,
&mut self.pct99_5.ratio.bps.height,
];
const PCTS: [f64; 6] = [0.01, 0.02, 0.05, 0.95, 0.98, 0.99];
let mut out = [0u32; 6];
const PCTS: [f64; 8] = [0.005, 0.01, 0.02, 0.05, 0.95, 0.98, 0.99, 0.995];
let mut out = [0u32; 8];
for vec in pct_vecs.iter_mut() {
vec.truncate_if_needed_at(start)?;
@@ -160,12 +166,14 @@ impl RatioPerBlockPercentiles {
};
}
compute_band!(pct99_5);
compute_band!(pct99);
compute_band!(pct98);
compute_band!(pct95);
compute_band!(pct5);
compute_band!(pct2);
compute_band!(pct1);
compute_band!(pct0_5);
Ok(())
}
@@ -174,12 +182,14 @@ impl RatioPerBlockPercentiles {
&mut self,
) -> impl Iterator<Item = &mut EagerVec<PcoVec<Height, BasisPoints32>>> {
[
&mut self.pct0_5.ratio.bps.height,
&mut self.pct1.ratio.bps.height,
&mut self.pct2.ratio.bps.height,
&mut self.pct5.ratio.bps.height,
&mut self.pct95.ratio.bps.height,
&mut self.pct98.ratio.bps.height,
&mut self.pct99.ratio.bps.height,
&mut self.pct99_5.ratio.bps.height,
]
.into_iter()
}

View File

@@ -1,5 +1,7 @@
use brk_traversable::Traversable;
use crate::market::lookback::ByLookbackPeriod;
/// DCA period identifiers with their day counts
pub const DCA_PERIOD_DAYS: ByDcaPeriod<u32> = ByDcaPeriod {
_1w: 7,
@@ -173,6 +175,26 @@ impl<T> ByDcaPeriod<T> {
}
}
impl<T> ByDcaPeriod<&T> {
/// Get the DCA-matching subset from lookback (excludes 24h)
pub(crate) fn from_lookback(lookback: &ByLookbackPeriod<T>) -> ByDcaPeriod<&T> {
ByDcaPeriod {
_1w: &lookback._1w,
_1m: &lookback._1m,
_3m: &lookback._3m,
_6m: &lookback._6m,
_1y: &lookback._1y,
_2y: &lookback._2y,
_3y: &lookback._3y,
_4y: &lookback._4y,
_5y: &lookback._5y,
_6y: &lookback._6y,
_8y: &lookback._8y,
_10y: &lookback._10y,
}
}
}
/// Generic wrapper for DCA CAGR data (periods >= 2 years)
#[derive(Clone, Default, Traversable)]
pub struct ByDcaCagr<T> {

View File

@@ -2,8 +2,8 @@ use brk_error::Result;
use brk_types::{BasisPointsSigned32, Bitcoin, Cents, Date, Day1, Dollars, Indexes, Sats};
use vecdb::{AnyVec, Exit, ReadableOptionVec, ReadableVec, VecIndex};
use super::Vecs;
use crate::{blocks, indexes, internal::RatioDiffCentsBps32, market::lookback, prices};
use super::{ByDcaPeriod, Vecs};
use crate::{blocks, indexes, internal::RatioDiffCentsBps32, market, prices};
const DCA_AMOUNT: Dollars = Dollars::mint(100.0);
@@ -13,10 +13,12 @@ impl Vecs {
indexes: &indexes::Vecs,
prices: &prices::Vecs,
blocks: &blocks::Vecs,
lookback: &lookback::Vecs,
lookback: &market::lookback::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.db.sync_bg_tasks()?;
let h2d = &indexes.height.day1;
let close = &prices.split.close.usd.day1;
@@ -71,17 +73,12 @@ impl Vecs {
self.period.cost_basis.zip_mut_with_days(&self.period.stack)
{
let days = days as usize;
let start = average_price.cents.height.len().min(starting_height);
let stack_data = stack
.sats
.height
.collect_range_at(start, stack.sats.height.len());
average_price.cents.height.compute_transform(
average_price.cents.height.compute_transform2(
starting_indexes.height,
h2d,
|(h, di, _)| {
&stack.sats.height,
|(h, di, stack_sats, ..)| {
let di_usize = di.to_usize();
let stack_sats = stack_data[h.to_usize() - start];
let avg = if di_usize > first_price_di {
let num_days = days.min(di_usize + 1 - first_price_di);
Cents::from(DCA_AMOUNT * num_days / Bitcoin::from(stack_sats))
@@ -125,21 +122,16 @@ impl Vecs {
}
// Lump sum by period - stack
let lookback_dca = lookback.price_past.as_dca_period();
let lookback_dca = ByDcaPeriod::from_lookback(&lookback.price_past);
for (stack, lookback_price, days) in
self.period.lump_sum_stack.zip_mut_with_days(&lookback_dca)
{
let total_invested = DCA_AMOUNT * days as usize;
let ls_start = stack.sats.height.len().min(starting_height);
let lookback_data = lookback_price
.cents
.height
.collect_range_at(ls_start, lookback_price.cents.height.len());
stack.sats.height.compute_transform(
stack.sats.height.compute_transform2(
starting_indexes.height,
h2d,
|(h, _di, _)| {
let lp = lookback_data[h.to_usize() - ls_start];
&lookback_price.cents.height,
|(h, _di, lp, ..)| {
let sats = if lp == Cents::ZERO {
Sats::ZERO
} else {
@@ -238,20 +230,15 @@ impl Vecs {
.zip(start_days)
{
let from_usize = from.to_usize();
let cls_start = average_price.cents.height.len().min(starting_height);
let stack_data = stack
.sats
.height
.collect_range_at(cls_start, stack.sats.height.len());
average_price.cents.height.compute_transform(
average_price.cents.height.compute_transform2(
starting_indexes.height,
h2d,
|(h, di, _)| {
&stack.sats.height,
|(h, di, stack_sats, ..)| {
let di_usize = di.to_usize();
if di_usize < from_usize {
return (h, Cents::ZERO);
}
let stack_sats = stack_data[h.to_usize() - cls_start];
let num_days = di_usize + 1 - from_usize;
let avg = Cents::from(DCA_AMOUNT * num_days / Bitcoin::from(stack_sats));
(h, avg)
@@ -275,6 +262,11 @@ impl Vecs {
)?;
}
let exit = exit.clone();
self.db.run_bg(move |db| {
let _lock = exit.lock();
db.compact()
});
Ok(())
}
}

View File

@@ -1,43 +1,50 @@
use std::path::Path;
use brk_error::Result;
use brk_types::Version;
use vecdb::{Database, ImportableVec};
use vecdb::ImportableVec;
use super::{ByDcaCagr, ByDcaClass, ByDcaPeriod, Vecs};
use super::vecs::{ClassVecs, PeriodVecs};
use crate::{
indexes,
internal::{AmountPerBlock, PercentPerBlock, Price},
internal::{
db_utils::{finalize_db, open_db},
AmountPerBlock, PercentPerBlock, Price,
},
};
impl Vecs {
pub(crate) fn forced_import(
db: &Database,
version: Version,
parent_path: &Path,
parent_version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let db = open_db(parent_path, super::DB_NAME, 50_000)?;
let version = parent_version;
let stack = ByDcaPeriod::try_new(|name, _days| {
AmountPerBlock::forced_import(db, &format!("dca_stack_{name}"), version, indexes)
AmountPerBlock::forced_import(&db, &format!("dca_stack_{name}"), version, indexes)
})?;
let cost_basis = ByDcaPeriod::try_new(|name, _days| {
Price::forced_import(db, &format!("dca_cost_basis_{name}"), version, indexes)
Price::forced_import(&db, &format!("dca_cost_basis_{name}"), version, indexes)
})?;
let r#return = ByDcaPeriod::try_new(|name, _days| {
PercentPerBlock::forced_import(db, &format!("dca_return_{name}"), version, indexes)
PercentPerBlock::forced_import(&db, &format!("dca_return_{name}"), version, indexes)
})?;
let cagr = ByDcaCagr::try_new(|name, _days| {
PercentPerBlock::forced_import(db, &format!("dca_cagr_{name}"), version, indexes)
PercentPerBlock::forced_import(&db, &format!("dca_cagr_{name}"), version, indexes)
})?;
let lump_sum_stack = ByDcaPeriod::try_new(|name, _days| {
AmountPerBlock::forced_import(db, &format!("lump_sum_stack_{name}"), version, indexes)
AmountPerBlock::forced_import(&db, &format!("lump_sum_stack_{name}"), version, indexes)
})?;
let lump_sum_return = ByDcaPeriod::try_new(|name, _days| {
PercentPerBlock::forced_import(
db,
&db,
&format!("lump_sum_return_{name}"),
version,
indexes,
@@ -45,19 +52,19 @@ impl Vecs {
})?;
let class_stack = ByDcaClass::try_new(|name, _year, _day1| {
AmountPerBlock::forced_import(db, &format!("dca_stack_{name}"), version, indexes)
AmountPerBlock::forced_import(&db, &format!("dca_stack_{name}"), version, indexes)
})?;
let class_cost_basis = ByDcaClass::try_new(|name, _year, _day1| {
Price::forced_import(db, &format!("dca_cost_basis_{name}"), version, indexes)
Price::forced_import(&db, &format!("dca_cost_basis_{name}"), version, indexes)
})?;
let class_return = ByDcaClass::try_new(|name, _year, _day1| {
PercentPerBlock::forced_import(db, &format!("dca_return_{name}"), version, indexes)
PercentPerBlock::forced_import(&db, &format!("dca_return_{name}"), version, indexes)
})?;
Ok(Self {
sats_per_day: ImportableVec::forced_import(db, "dca_sats_per_day", version)?,
let this = Self {
sats_per_day: ImportableVec::forced_import(&db, "dca_sats_per_day", version)?,
period: PeriodVecs {
stack,
cost_basis,
@@ -71,6 +78,9 @@ impl Vecs {
cost_basis: class_cost_basis,
r#return: class_return,
},
})
db,
};
finalize_db(&this.db, &this)?;
Ok(this)
}
}

View File

@@ -7,3 +7,5 @@ mod vecs;
pub use by_class::*;
pub use by_period::*;
pub use vecs::Vecs;
pub const DB_NAME: &str = "investing";

View File

@@ -1,6 +1,6 @@
use brk_traversable::Traversable;
use brk_types::{BasisPointsSigned32, Cents, Height, Sats};
use vecdb::{EagerVec, PcoVec, Rw, StorageMode};
use vecdb::{Database, EagerVec, PcoVec, Rw, StorageMode};
use super::{ByDcaCagr, ByDcaClass, ByDcaPeriod};
use crate::internal::{AmountPerBlock, PerBlock, PercentPerBlock, Price};
@@ -24,6 +24,8 @@ pub struct ClassVecs<M: StorageMode = Rw> {
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
#[traversable(skip)]
pub(crate) db: Database,
pub sats_per_day: M::Stored<EagerVec<PcoVec<Height, Sats>>>,
pub period: PeriodVecs<M>,
pub class: ClassVecs<M>,

View File

@@ -18,6 +18,7 @@ mod indicators;
pub mod indexes;
mod inputs;
mod internal;
mod investing;
mod market;
mod mining;
mod outputs;
@@ -39,6 +40,7 @@ pub struct Computer<M: StorageMode = Rw> {
pub constants: Box<constants::Vecs>,
pub indexes: Box<indexes::Vecs<M>>,
pub indicators: Box<indicators::Vecs<M>>,
pub investing: Box<investing::Vecs<M>>,
pub market: Box<market::Vecs<M>>,
pub pools: Box<pools::Vecs<M>>,
pub prices: Box<prices::Vecs<M>>,
@@ -180,8 +182,8 @@ impl Computer {
// Market, indicators, and distribution are independent; import in parallel.
// Supply depends on distribution so it runs after.
let (distribution, market, indicators) =
timed("Imported distribution/market/indicators", || {
let (distribution, market, indicators, investing) =
timed("Imported distribution/market/indicators/investing", || {
thread::scope(|s| -> Result<_> {
let market_handle = big_thread().spawn_scoped(s, || -> Result<_> {
Ok(Box::new(market::Vecs::forced_import(
@@ -199,6 +201,14 @@ impl Computer {
)?))
})?;
let investing_handle = big_thread().spawn_scoped(s, || -> Result<_> {
Ok(Box::new(investing::Vecs::forced_import(
&computed_path,
VERSION,
&indexes,
)?))
})?;
let distribution = Box::new(distribution::Vecs::forced_import(
&computed_path,
VERSION,
@@ -208,7 +218,8 @@ impl Computer {
let market = market_handle.join().unwrap()?;
let indicators = indicators_handle.join().unwrap()?;
Ok((distribution, market, indicators))
let investing = investing_handle.join().unwrap()?;
Ok((distribution, market, indicators, investing))
})
})?;
@@ -232,6 +243,7 @@ impl Computer {
scripts,
constants,
indicators,
investing,
market,
distribution,
supply,
@@ -260,6 +272,7 @@ impl Computer {
cointime::DB_NAME,
indicators::DB_NAME,
indexes::DB_NAME,
investing::DB_NAME,
market::DB_NAME,
pools::DB_NAME,
prices::DB_NAME,
@@ -305,7 +318,7 @@ impl Computer {
let mut starting_indexes = timed("Computed indexes", || {
self.indexes
.compute(indexer, &mut self.blocks, starting_indexes, exit)
.compute(indexer, starting_indexes, exit)
})?;
thread::scope(|scope| -> Result<()> {
@@ -339,8 +352,8 @@ impl Computer {
let market = scope.spawn(|| {
timed("Computed market", || {
self.market.compute(
&self.indexes,
&self.prices,
&self.indexes,
&self.blocks,
&starting_indexes,
exit,
@@ -422,6 +435,19 @@ impl Computer {
})
});
let investing = scope.spawn(|| {
timed("Computed investing", || {
self.investing.compute(
&self.indexes,
&self.prices,
&self.blocks,
&self.market.lookback,
&starting_indexes_clone,
exit,
)
})
});
timed("Computed distribution", || {
self.distribution.compute(
indexer,
@@ -437,6 +463,7 @@ impl Computer {
})?;
pools.join().unwrap()?;
investing.join().unwrap()?;
Ok(())
})?;
@@ -485,6 +512,14 @@ impl Computer {
Ok(())
})?;
self.indicators.thermometer.compute(
&self.distribution,
&self.cointime,
&self.prices,
&starting_indexes,
exit,
)?;
info!("Total compute time: {:?}", compute_start.elapsed());
Ok(())
}
@@ -517,7 +552,7 @@ macro_rules! impl_iter_named {
}
impl_iter_named!(blocks, mining, transactions, scripts, positions, cointime,
constants, indicators, indexes, market, pools, prices, distribution, supply, inputs, outputs);
constants, indicators, indexes, investing, market, pools, prices, distribution, supply, inputs, outputs);
fn timed<T>(label: &str, f: impl FnOnce() -> T) -> T {
let start = Instant::now();

View File

@@ -3,13 +3,13 @@ use brk_types::{Indexes, StoredF32, Timestamp};
use vecdb::{Exit, ReadableVec, VecIndex};
use super::Vecs;
use crate::{blocks, prices};
use crate::{indexes, prices};
impl Vecs {
pub(crate) fn compute(
&mut self,
prices: &prices::Vecs,
blocks: &blocks::Vecs,
indexes: &indexes::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
@@ -24,7 +24,7 @@ impl Vecs {
starting_indexes.height,
&self.high.cents.height,
&prices.spot.cents.height,
&blocks.time.timestamp_monotonic,
&indexes.timestamp.monotonic,
|(i, ath, price, ts, slf)| {
if ath_ts.is_none() {
let idx = i.to_usize();

View File

@@ -9,17 +9,19 @@ use super::Vecs;
impl Vecs {
pub(crate) fn compute(
&mut self,
indexes: &indexes::Vecs,
prices: &prices::Vecs,
indexes: &indexes::Vecs,
blocks: &blocks::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.db.sync_bg_tasks()?;
// Phase 1: Independent sub-modules in parallel
let (r1, r2) = rayon::join(
|| {
rayon::join(
|| self.ath.compute(prices, blocks, starting_indexes, exit),
|| self.ath.compute(prices, indexes, starting_indexes, exit),
|| self.lookback.compute(blocks, prices, starting_indexes, exit),
)
},
@@ -39,24 +41,8 @@ impl Vecs {
r2.1?;
// Phase 2: Depend on lookback
let (r3, r4) = rayon::join(
|| {
self.returns
.compute(prices, blocks, &self.lookback, starting_indexes, exit)
},
|| {
self.dca.compute(
indexes,
prices,
blocks,
&self.lookback,
starting_indexes,
exit,
)
},
);
r3?;
r4?;
self.returns
.compute(prices, blocks, &self.lookback, starting_indexes, exit)?;
// Phase 3: Depends on returns, moving_average
self.technical.compute(
@@ -68,8 +54,11 @@ impl Vecs {
exit,
)?;
let _lock = exit.lock();
self.db.compact()?;
let exit = exit.clone();
self.db.run_bg(move |db| {
let _lock = exit.lock();
db.compact()
});
Ok(())
}
}

View File

@@ -9,7 +9,7 @@ use crate::{
};
use super::{
AthVecs, DcaVecs, TechnicalVecs, LookbackVecs, MovingAverageVecs, RangeVecs, ReturnsVecs,
AthVecs, TechnicalVecs, LookbackVecs, MovingAverageVecs, RangeVecs, ReturnsVecs,
Vecs, VolatilityVecs,
};
@@ -28,7 +28,6 @@ impl Vecs {
let volatility = VolatilityVecs::forced_import(version, &returns)?;
let range = RangeVecs::forced_import(&db, version, indexes)?;
let moving_average = MovingAverageVecs::forced_import(&db, version, indexes)?;
let dca = DcaVecs::forced_import(&db, version, indexes)?;
let technical = TechnicalVecs::forced_import(&db, version, indexes)?;
let this = Self {
@@ -39,7 +38,6 @@ impl Vecs {
volatility,
range,
moving_average,
dca,
technical,
};
finalize_db(&this.db, &this)?;

View File

@@ -1,7 +1,5 @@
use brk_traversable::Traversable;
use crate::market::dca::ByDcaPeriod;
/// Lookback period days (includes 24h, unlike DCA)
pub const LOOKBACK_PERIOD_DAYS: ByLookbackPeriod<u32> = ByLookbackPeriod {
_24h: 1,
@@ -117,22 +115,4 @@ impl<T> ByLookbackPeriod<T> {
]
.into_iter()
}
/// Get the DCA-matching subset (excludes 24h)
pub(crate) fn as_dca_period(&self) -> ByDcaPeriod<&T> {
ByDcaPeriod {
_1w: &self._1w,
_1m: &self._1m,
_3m: &self._3m,
_6m: &self._6m,
_1y: &self._1y,
_2y: &self._2y,
_3y: &self._3y,
_4y: &self._4y,
_5y: &self._5y,
_6y: &self._6y,
_8y: &self._8y,
_10y: &self._10y,
}
}
}

View File

@@ -1,6 +1,5 @@
pub mod ath;
mod compute;
pub mod dca;
mod import;
pub mod technical;
pub mod lookback;
@@ -13,7 +12,6 @@ use brk_traversable::Traversable;
use vecdb::{Database, Rw, StorageMode};
pub use ath::Vecs as AthVecs;
pub use dca::Vecs as DcaVecs;
pub use technical::Vecs as TechnicalVecs;
pub use lookback::Vecs as LookbackVecs;
pub use moving_average::Vecs as MovingAverageVecs;
@@ -32,6 +30,5 @@ pub struct Vecs<M: StorageMode = Rw> {
pub volatility: VolatilityVecs,
pub range: RangeVecs<M>,
pub moving_average: MovingAverageVecs<M>,
pub dca: DcaVecs<M>,
pub technical: TechnicalVecs<M>,
}

View File

@@ -3,7 +3,7 @@ use brk_types::{BasisPointsSigned32, Dollars, Indexes};
use vecdb::Exit;
use super::Vecs;
use crate::{blocks, internal::RatioDiffDollarsBps32, market::lookback, prices};
use crate::{blocks, internal::RatioDiffDollarsBps32, investing::ByDcaPeriod, market::lookback, prices};
impl Vecs {
pub(crate) fn compute(
@@ -29,7 +29,7 @@ impl Vecs {
}
// CAGR computed from returns at height level (2y+ periods only)
let price_return_dca = self.periods.as_dca_period();
let price_return_dca = ByDcaPeriod::from_lookback(&self.periods);
for (cagr, returns, days) in self.cagr.zip_mut_with_period(&price_return_dca) {
let years = days as f64 / 365.0;
cagr.bps.height.compute_transform(

View File

@@ -7,7 +7,7 @@ use super::Vecs;
use crate::{
indexes,
internal::{StdDevPerBlock, PercentPerBlock, Windows},
market::dca::ByDcaCagr,
investing::ByDcaCagr,
};
impl Vecs {

View File

@@ -4,7 +4,8 @@ use vecdb::{Rw, StorageMode};
use crate::{
internal::{PercentPerBlock, StdDevPerBlock, Windows},
market::{dca::ByDcaCagr, lookback::ByLookbackPeriod},
investing::ByDcaCagr,
market::lookback::ByLookbackPeriod,
};
#[derive(Traversable)]

View File

@@ -18,6 +18,8 @@ impl Vecs {
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.db.sync_bg_tasks()?;
// Block rewards (coinbase, subsidy, fee_dominance, etc.)
self.rewards.compute(
indexer,
@@ -39,8 +41,11 @@ impl Vecs {
exit,
)?;
let _lock = exit.lock();
self.db.compact()?;
let exit = exit.clone();
self.db.run_bg(move |db| {
let _lock = exit.lock();
db.compact()
});
Ok(())
}
}

View File

@@ -18,6 +18,8 @@ impl Vecs {
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.db.sync_bg_tasks()?;
self.count.compute(
indexer,
indexes,
@@ -27,10 +29,13 @@ impl Vecs {
starting_indexes,
exit,
)?;
let _lock = self
let lock = self
.spent
.compute(indexer, inputs, starting_indexes, exit)?;
self.db.compact()?;
self.db.run_bg(move |db| {
let _lock = lock;
db.compact()
});
Ok(())
}
}

View File

@@ -10,13 +10,13 @@ use crate::inputs;
const HEIGHT_BATCH: u32 = 10_000;
impl Vecs {
pub(crate) fn compute<'a>(
pub(crate) fn compute(
&mut self,
indexer: &Indexer,
inputs: &inputs::Vecs,
starting_indexes: &Indexes,
exit: &'a Exit,
) -> Result<ExitGuard<'a>> {
exit: &Exit,
) -> Result<ExitGuard> {
let target_height = indexer.vecs.blocks.blockhash.len();
if target_height == 0 {
return Ok(exit.lock());

View File

@@ -86,6 +86,8 @@ impl Vecs {
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.db.sync_bg_tasks()?;
self.compute_pool(indexer, indexes, starting_indexes, exit)?;
self.major.par_iter_mut().try_for_each(|(_, vecs)| {
@@ -103,8 +105,11 @@ impl Vecs {
vecs.compute(starting_indexes, &self.pool, blocks, exit)
})?;
let _lock = exit.lock();
self.db.compact()?;
let exit = exit.clone();
self.db.run_bg(move |db| {
let _lock = exit.lock();
db.compact()
});
Ok(())
}

View File

@@ -45,9 +45,14 @@ impl Vecs {
reader: &Reader,
exit: &Exit,
) -> Result<()> {
self.db.sync_bg_tasks()?;
self.compute_(indexer, starting_indexes, reader, exit)?;
let _lock = exit.lock();
self.db.compact()?;
let exit = exit.clone();
self.db.run_bg(move |db| {
let _lock = exit.lock();
db.compact()
});
Ok(())
}

View File

@@ -18,6 +18,8 @@ impl Vecs {
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.db.sync_bg_tasks()?;
self.compute_prices(indexer, starting_indexes, exit)?;
self.split.open.cents.compute_first(
starting_indexes,
@@ -47,8 +49,11 @@ impl Vecs {
exit,
)?;
let _lock = exit.lock();
self.db().compact()?;
let exit = exit.clone();
self.db.run_bg(move |db| {
let _lock = exit.lock();
db.compact()
});
Ok(())
}

View File

@@ -25,7 +25,7 @@ pub const DB_NAME: &str = "prices";
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
#[traversable(skip)]
pub(crate) db: Database,
pub db: Database,
pub split: SplitByUnit<M>,
pub ohlc: OhlcByUnit<M>,
@@ -183,7 +183,4 @@ impl Vecs {
})
}
pub(crate) fn db(&self) -> &Database {
&self.db
}
}

View File

@@ -15,13 +15,18 @@ impl Vecs {
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.db.sync_bg_tasks()?;
self.count.compute(indexer, starting_indexes, exit)?;
self.value
.compute(indexer, prices, starting_indexes, exit)?;
let _lock = exit.lock();
self.db.compact()?;
let exit = exit.clone();
self.db.run_bg(move |db| {
let _lock = exit.lock();
db.compact()
});
Ok(())
}
}

View File

@@ -21,6 +21,8 @@ impl Vecs {
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.db.sync_bg_tasks()?;
// 1. Compute burned/unspendable supply
self.burned.compute(
scripts,
@@ -76,8 +78,11 @@ impl Vecs {
)?;
}
let _lock = exit.lock();
self.db.compact()?;
let exit = exit.clone();
self.db.run_bg(move |db| {
let _lock = exit.lock();
db.compact()
});
Ok(())
}

View File

@@ -20,6 +20,8 @@ impl Vecs {
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.db.sync_bg_tasks()?;
let (r1, (r2, r3)) = rayon::join(
|| {
self.count
@@ -57,8 +59,11 @@ impl Vecs {
exit,
)?;
let _lock = exit.lock();
self.db.compact()?;
let exit = exit.clone();
self.db.run_bg(move |db| {
let _lock = exit.lock();
db.compact()
});
Ok(())
}
}