global: snapshot

This commit is contained in:
nym21
2026-03-02 13:34:45 +01:00
parent 7cb1bfa667
commit 4d97cec869
57 changed files with 1724 additions and 2011 deletions

View File

@@ -72,97 +72,93 @@ impl Vecs {
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 55, |s| {
&mut s.height_55d_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 2 * 30, |s| {
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 60, |s| {
&mut s.height_2m_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 63, |s| {
&mut s.height_9w_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 84, |s| {
&mut s.height_12w_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 89, |s| {
&mut s.height_89d_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 3 * 30, |s| {
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 90, |s| {
&mut s.height_3m_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 98, |s| {
&mut s.height_14w_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 111, |s| {
&mut s.height_111d_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 144, |s| {
&mut s.height_144d_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 6 * 30, |s| {
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 180, |s| {
&mut s.height_6m_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 182, |s| {
&mut s.height_26w_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 200, |s| {
&mut s.height_200d_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 270, |s| {
&mut s.height_9m_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 350, |s| {
&mut s.height_350d_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 360, |s| {
&mut s.height_12m_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 365, |s| {
&mut s.height_1y_ago
})?;
self.compute_rolling_start(
&monotonic_data,
time,
starting_indexes,
exit,
2 * 365,
|s| &mut s.height_2y_ago,
)?;
self.compute_rolling_start(
&monotonic_data,
time,
starting_indexes,
exit,
200 * 7,
|s| &mut s.height_200w_ago,
)?;
self.compute_rolling_start(
&monotonic_data,
time,
starting_indexes,
exit,
3 * 365,
|s| &mut s.height_3y_ago,
)?;
self.compute_rolling_start(
&monotonic_data,
time,
starting_indexes,
exit,
4 * 365,
|s| &mut s.height_4y_ago,
)?;
self.compute_rolling_start(
&monotonic_data,
time,
starting_indexes,
exit,
5 * 365,
|s| &mut s.height_5y_ago,
)?;
self.compute_rolling_start(
&monotonic_data,
time,
starting_indexes,
exit,
6 * 365,
|s| &mut s.height_6y_ago,
)?;
self.compute_rolling_start(
&monotonic_data,
time,
starting_indexes,
exit,
8 * 365,
|s| &mut s.height_8y_ago,
)?;
self.compute_rolling_start(
&monotonic_data,
time,
starting_indexes,
exit,
10 * 365,
|s| &mut s.height_10y_ago,
)?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 420, |s| {
&mut s.height_14m_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 730, |s| {
&mut s.height_2y_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 780, |s| {
&mut s.height_26m_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 1095, |s| {
&mut s.height_3y_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 1400, |s| {
&mut s.height_200w_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 1460, |s| {
&mut s.height_4y_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 1825, |s| {
&mut s.height_5y_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 2190, |s| {
&mut s.height_6y_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 2920, |s| {
&mut s.height_8y_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 3285, |s| {
&mut s.height_9y_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 3650, |s| {
&mut s.height_10y_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 4380, |s| {
&mut s.height_12y_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 5110, |s| {
&mut s.height_14y_ago
})?;
self.compute_rolling_start(&monotonic_data, time, starting_indexes, exit, 9490, |s| {
&mut s.height_26y_ago
})?;
// Compute rolling window block counts (both block_count's own rolling + separate block_count_sum)
let ws = crate::internal::WindowStarts {

View File

@@ -37,22 +37,34 @@ impl Vecs {
height_34d_ago: ImportableVec::forced_import(db, "height_34d_ago", version)?,
height_55d_ago: ImportableVec::forced_import(db, "height_55d_ago", version)?,
height_2m_ago: ImportableVec::forced_import(db, "height_2m_ago", version)?,
height_9w_ago: ImportableVec::forced_import(db, "height_9w_ago", version)?,
height_12w_ago: ImportableVec::forced_import(db, "height_12w_ago", version)?,
height_89d_ago: ImportableVec::forced_import(db, "height_89d_ago", version)?,
height_3m_ago: ImportableVec::forced_import(db, "height_3m_ago", version)?,
height_14w_ago: ImportableVec::forced_import(db, "height_14w_ago", version)?,
height_111d_ago: ImportableVec::forced_import(db, "height_111d_ago", version)?,
height_144d_ago: ImportableVec::forced_import(db, "height_144d_ago", version)?,
height_3m_ago: ImportableVec::forced_import(db, "height_3m_ago", version)?,
height_6m_ago: ImportableVec::forced_import(db, "height_6m_ago", version)?,
height_26w_ago: ImportableVec::forced_import(db, "height_26w_ago", version)?,
height_200d_ago: ImportableVec::forced_import(db, "height_200d_ago", version)?,
height_9m_ago: ImportableVec::forced_import(db, "height_9m_ago", version)?,
height_350d_ago: ImportableVec::forced_import(db, "height_350d_ago", version)?,
height_12m_ago: ImportableVec::forced_import(db, "height_12m_ago", version)?,
height_1y_ago: ImportableVec::forced_import(db, "height_1y_ago", version)?,
height_14m_ago: ImportableVec::forced_import(db, "height_14m_ago", version)?,
height_2y_ago: ImportableVec::forced_import(db, "height_2y_ago", version)?,
height_200w_ago: ImportableVec::forced_import(db, "height_200w_ago", version)?,
height_26m_ago: ImportableVec::forced_import(db, "height_26m_ago", version)?,
height_3y_ago: ImportableVec::forced_import(db, "height_3y_ago", version)?,
height_200w_ago: ImportableVec::forced_import(db, "height_200w_ago", version)?,
height_4y_ago: ImportableVec::forced_import(db, "height_4y_ago", version)?,
height_5y_ago: ImportableVec::forced_import(db, "height_5y_ago", version)?,
height_6y_ago: ImportableVec::forced_import(db, "height_6y_ago", version)?,
height_8y_ago: ImportableVec::forced_import(db, "height_8y_ago", version)?,
height_9y_ago: ImportableVec::forced_import(db, "height_9y_ago", version)?,
height_10y_ago: ImportableVec::forced_import(db, "height_10y_ago", version)?,
height_12y_ago: ImportableVec::forced_import(db, "height_12y_ago", version)?,
height_14y_ago: ImportableVec::forced_import(db, "height_14y_ago", version)?,
height_26y_ago: ImportableVec::forced_import(db, "height_26y_ago", version)?,
block_count_sum: RollingWindows::forced_import(
db,
"block_count_sum",

View File

@@ -12,37 +12,50 @@ pub struct Vecs<M: StorageMode = Rw> {
pub block_count: ComputedFromHeightCumulativeSum<StoredU32, M>,
pub block_count_sum: RollingWindows<StoredU32, M>,
// Window starts sorted by duration
pub height_1h_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_24h_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_24h_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 1d
pub height_3d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_1w_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_1w_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 7d
pub height_8d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_9d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_12d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_13d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_2w_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_2w_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 14d
pub height_21d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_26d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_1m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_1m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 30d
pub height_34d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_55d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_2m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_2m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 60d
pub height_9w_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 63d
pub height_12w_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 84d
pub height_89d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_3m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 90d
pub height_14w_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 98d
pub height_111d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_144d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_3m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_6m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_6m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 180d
pub height_26w_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 182d
pub height_200d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_9m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 270d
pub height_350d_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_1y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_2y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_200w_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_3y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_4y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_5y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_6y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_8y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_10y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>,
pub height_12m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 360d
pub height_1y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 365d
pub height_14m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 420d
pub height_2y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 730d
pub height_26m_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 780d
pub height_3y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 1095d
pub height_200w_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 1400d
pub height_4y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 1460d
pub height_5y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 1825d
pub height_6y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 2190d
pub height_8y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 2920d
pub height_9y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 3285d
pub height_10y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 3650d
pub height_12y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 4380d
pub height_14y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 5110d
pub height_26y_ago: M::Stored<EagerVec<PcoVec<Height, Height>>>, // 9490d
}
impl Vecs {
@@ -72,22 +85,34 @@ impl Vecs {
34 => &self.height_34d_ago,
55 => &self.height_55d_ago,
60 => &self.height_2m_ago,
63 => &self.height_9w_ago,
84 => &self.height_12w_ago,
89 => &self.height_89d_ago,
90 => &self.height_3m_ago,
98 => &self.height_14w_ago,
111 => &self.height_111d_ago,
144 => &self.height_144d_ago,
180 => &self.height_6m_ago,
182 => &self.height_26w_ago,
200 => &self.height_200d_ago,
270 => &self.height_9m_ago,
350 => &self.height_350d_ago,
360 => &self.height_12m_ago,
365 => &self.height_1y_ago,
420 => &self.height_14m_ago,
730 => &self.height_2y_ago,
780 => &self.height_26m_ago,
1095 => &self.height_3y_ago,
1400 => &self.height_200w_ago,
1460 => &self.height_4y_ago,
1825 => &self.height_5y_ago,
2190 => &self.height_6y_ago,
2920 => &self.height_8y_ago,
3285 => &self.height_9y_ago,
3650 => &self.height_10y_ago,
4380 => &self.height_12y_ago,
5110 => &self.height_14y_ago,
9490 => &self.height_26y_ago,
_ => panic!("No start vec for {days} days"),
}
}

View File

@@ -1,6 +1,6 @@
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::{StoredF32, StoredU32};
use brk_types::{StoredF64, StoredU32};
use vecdb::Exit;
use super::super::TARGET_BLOCKS_PER_DAY_F32;
@@ -22,7 +22,7 @@ impl Vecs {
self.as_hash.height.compute_transform(
starting_indexes.height,
&indexer.vecs.blocks.difficulty,
|(i, v, ..)| (i, StoredF32::from(*v * multiplier)),
|(i, v, ..)| (i, StoredF64::from(*v * multiplier)),
exit,
)?;

View File

@@ -8,7 +8,7 @@ use crate::internal::{ComputedFromHeight, ComputedHeightDerived};
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub raw: ComputedHeightDerived<StoredF64>,
pub as_hash: ComputedFromHeight<StoredF32, M>,
pub as_hash: ComputedFromHeight<StoredF64, M>,
pub adjustment: ComputedFromHeight<StoredF32, M>,
pub epoch: ComputedFromHeight<DifficultyEpoch, M>,
pub blocks_before_next_adjustment: ComputedFromHeight<StoredU32, M>,

View File

@@ -48,6 +48,7 @@ pub(crate) fn process_blocks(
starting_height: Height,
last_height: Height,
chain_state: &mut Vec<BlockState>,
txindex_to_height: &mut RangeMap<TxIndex, Height>,
exit: &Exit,
) -> Result<()> {
// Create computation context with pre-computed vectors for thread-safe access
@@ -110,26 +111,28 @@ pub(crate) fn process_blocks(
let mut vr = VecsReaders::new(&vecs.any_address_indexes, &vecs.addresses_data);
debug!("VecsReaders created");
// Build txindex -> height lookup map for efficient prev_height computation
debug!("building txindex_to_height RangeMap");
let mut txindex_to_height: RangeMap<TxIndex, Height> = {
let first_txindex_len = indexer.vecs.transactions.first_txindex.len();
let all_first_txindexes: Vec<TxIndex> = indexer
// Extend txindex_to_height RangeMap with new entries (incremental, O(new_blocks))
let target_len = indexer.vecs.transactions.first_txindex.len();
let current_len = txindex_to_height.len();
if current_len < target_len {
debug!("extending txindex_to_height RangeMap from {} to {}", current_len, target_len);
let new_entries: Vec<TxIndex> = indexer
.vecs
.transactions
.first_txindex
.collect_range_at(0, first_txindex_len);
let mut map = RangeMap::with_capacity(first_txindex_len);
for first_txindex in all_first_txindexes {
map.push(first_txindex);
.collect_range_at(current_len, target_len);
for first_txindex in new_entries {
txindex_to_height.push(first_txindex);
}
map
};
debug!("txindex_to_height RangeMap built");
} else if current_len > target_len {
debug!("truncating txindex_to_height RangeMap from {} to {}", current_len, target_len);
txindex_to_height.truncate(target_len);
}
debug!("txindex_to_height RangeMap ready ({} entries)", txindex_to_height.len());
// Create reusable iterators and buffers for per-block reads
let mut txout_iters = TxOutReaders::new(indexer);
let mut txin_iters = TxInReaders::new(indexer, inputs, &mut txindex_to_height);
let mut txin_iters = TxInReaders::new(indexer, inputs, txindex_to_height);
let mut txout_to_txindex_buf = IndexToTxIndexBuf::new();
let mut txin_to_txindex_buf = IndexToTxIndexBuf::new();

View File

@@ -12,7 +12,7 @@ const CACHE_SIZE: usize = 8;
///
/// Includes an LRU cache of recently accessed ranges to avoid binary search
/// when there's locality in access patterns.
#[derive(Debug)]
#[derive(Debug, Clone)]
pub struct RangeMap<I, V> {
/// Sorted vec of first_index values. Position in vec = value.
first_indexes: Vec<I>,
@@ -44,6 +44,17 @@ impl<I: Ord + Copy + Default, V: From<usize> + Copy + Default> RangeMap<I, V> {
}
}
/// Number of ranges stored.
pub(crate) fn len(&self) -> usize {
self.first_indexes.len()
}
/// Truncate to `new_len` ranges and clear the cache.
pub(crate) fn truncate(&mut self, new_len: usize) {
self.first_indexes.truncate(new_len);
self.cache_len = 0;
}
/// Push a new first_index. Value is implicitly the current length.
/// Must be called in order (first_index must be >= all previous).
#[inline]

View File

@@ -5,7 +5,7 @@ use brk_indexer::Indexer;
use brk_traversable::Traversable;
use brk_types::{
Day1, EmptyAddressData, EmptyAddressIndex, FundedAddressData, FundedAddressIndex, Height,
SupplyState, Version,
SupplyState, TxIndex, Version,
};
use tracing::{debug, info};
use vecdb::{
@@ -23,7 +23,7 @@ use crate::{
};
use super::{
AddressCohorts, AddressesDataVecs, AnyAddressIndexesVecs, UTXOCohorts,
AddressCohorts, AddressesDataVecs, AnyAddressIndexesVecs, RangeMap, UTXOCohorts,
address::{
AddrCountsVecs, AddressActivityVecs, GrowthRateVecs, NewAddrCountVecs, TotalAddrCountVecs,
},
@@ -61,6 +61,14 @@ pub struct Vecs<M: StorageMode = Rw> {
LazyVecFrom1<FundedAddressIndex, FundedAddressIndex, FundedAddressIndex, FundedAddressData>,
pub emptyaddressindex:
LazyVecFrom1<EmptyAddressIndex, EmptyAddressIndex, EmptyAddressIndex, EmptyAddressData>,
/// In-memory block state for UTXO processing. Persisted via supply_state.
/// Kept across compute() calls to avoid O(n) rebuild on resume.
#[traversable(skip)]
chain_state: Vec<BlockState>,
/// In-memory txindex→height reverse lookup. Kept across compute() calls.
#[traversable(skip)]
txindex_to_height: RangeMap<TxIndex, Height>,
}
const SAVED_STAMPED_CHANGES: u16 = 10;
@@ -148,6 +156,9 @@ impl Vecs {
fundedaddressindex,
emptyaddressindex,
chain_state: Vec::new(),
txindex_to_height: RangeMap::default(),
db,
states_path,
};
@@ -230,8 +241,12 @@ impl Vecs {
debug!("recovered_height={}", recovered_height);
// Fresh start: reset all state
let (starting_height, mut chain_state) = if recovered_height.is_zero() {
// Take chain_state and txindex_to_height out of self to avoid borrow conflicts
let mut chain_state = std::mem::take(&mut self.chain_state);
let mut txindex_to_height = std::mem::take(&mut self.txindex_to_height);
// Recover or reuse chain_state
let starting_height = if recovered_height.is_zero() {
self.supply_state.reset()?;
self.addr_count.reset_height()?;
self.empty_addr_count.reset_height()?;
@@ -243,11 +258,18 @@ impl Vecs {
&mut self.address_cohorts,
)?;
chain_state.clear();
txindex_to_height.truncate(0);
info!("State recovery: fresh start");
(Height::ZERO, vec![])
Height::ZERO
} else if chain_state.len() == usize::from(recovered_height) {
// Normal resume: chain_state already matches, reuse as-is
debug!("reusing in-memory chain_state ({} entries)", chain_state.len());
recovered_height
} else {
// Recover chain_state from stored values
debug!("recovering chain_state from stored values");
// Rollback or first run after restart: rebuild from supply_state
debug!("rebuilding chain_state from stored values");
let height_to_timestamp = &blocks.time.timestamp_monotonic;
let height_to_price = &prices.price.cents.height;
@@ -257,7 +279,7 @@ impl Vecs {
debug!("building supply_state vec for {} heights", recovered_height);
let supply_state_data: Vec<_> = self.supply_state.collect_range_at(0, end);
let chain_state = supply_state_data
chain_state = supply_state_data
.into_iter()
.enumerate()
.map(|(h, supply)| BlockState {
@@ -266,9 +288,12 @@ impl Vecs {
timestamp: timestamp_data[h],
})
.collect();
debug!("chain_state vec built");
debug!("chain_state rebuilt");
(recovered_height, chain_state)
// Truncate RangeMap to match (entries are immutable, safe to keep)
txindex_to_height.truncate(end);
recovered_height
};
// Update starting_indexes if we need to recompute from an earlier point
@@ -316,10 +341,15 @@ impl Vecs {
starting_height,
last_height,
&mut chain_state,
&mut txindex_to_height,
exit,
)?;
}
// Put chain_state and txindex_to_height back
self.chain_state = chain_state;
self.txindex_to_height = txindex_to_height;
// 5. Compute aggregates (overlapping cohorts from separate cohorts)
aggregates::compute_overlapping(
&mut self.utxo_cohorts,

View File

@@ -0,0 +1,46 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{StoredF32, Version};
use schemars::JsonSchema;
use vecdb::{Database, ReadableCloneableVec, Rw, StorageMode, UnaryTransform};
use crate::indexes;
use super::{ComputedFromHeight, LazyFromHeight};
use crate::internal::NumericValue;
/// Basis-point storage with lazy float view.
///
/// Stores integer basis points on disk (Pco-compressed),
/// exposes a lazy StoredF32 view (bps / 100).
#[derive(Traversable)]
pub struct BpsFromHeight<B, M: StorageMode = Rw>
where
B: NumericValue + JsonSchema,
{
pub bps: ComputedFromHeight<B, M>,
pub float: LazyFromHeight<StoredF32, B>,
}
impl<B> BpsFromHeight<B>
where
B: NumericValue + JsonSchema,
{
pub(crate) fn forced_import<F: UnaryTransform<B, StoredF32>>(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let bps = ComputedFromHeight::forced_import(db, name, version, indexes)?;
let float = LazyFromHeight::from_computed::<F>(
&format!("{name}_float"),
version,
bps.height.read_only_boxed_clone(),
&bps,
);
Ok(Self { bps, float })
}
}

View File

@@ -1,5 +1,6 @@
mod aggregated;
mod base;
mod bps;
mod by_unit;
mod constant;
mod cumulative;
@@ -16,6 +17,7 @@ mod value;
pub use aggregated::*;
pub use base::*;
pub use bps::*;
pub use by_unit::*;
pub use constant::*;
pub use cumulative::*;

View File

@@ -7,7 +7,6 @@ use crate::{
ComputeIndexes, blocks, indexes,
internal::{ComputedFromHeightStdDevExtended, Price},
};
use brk_types::get_percentile;
use super::super::ComputedFromHeight;
@@ -34,7 +33,7 @@ pub struct ComputedFromHeightRatioExtension<M: StorageMode = Rw> {
pub ratio_1y_sd: ComputedFromHeightStdDevExtended<M>,
}
const VERSION: Version = Version::TWO;
const VERSION: Version = Version::new(3);
impl ComputedFromHeightRatioExtension {
pub(crate) fn forced_import(
@@ -119,7 +118,8 @@ impl ComputedFromHeightRatioExtension {
exit,
)?;
// Percentiles: insert into sorted array on day boundaries
// Percentiles via order-statistic Fenwick tree with coordinate compression.
// O(n log n) total vs O(n²) for the naive sorted-insert approach.
let ratio_version = ratio_source.version();
self.mut_ratio_vecs()
.try_for_each(|v| -> Result<()> {
@@ -135,68 +135,85 @@ impl ComputedFromHeightRatioExtension {
.min(starting_indexes.height);
let start = starting_height.to_usize();
let day_start = &blocks.count.height_24h_ago;
let ratio_len = ratio_source.len();
// Collect sorted history up to starting point (one per day boundary)
let mut sorted = {
let ratio_data = ratio_source.collect_range_at(0, start);
let day_start_hist = day_start.collect_range_at(0, start);
let mut sorted: Vec<StoredF32> = Vec::new();
let mut last_day_start = Height::from(0_usize);
for (h, ratio) in ratio_data.into_iter().enumerate() {
let cur_day_start = day_start_hist[h];
if h == 0 || cur_day_start != last_day_start {
sorted.push(ratio);
last_day_start = cur_day_start;
if ratio_len > start {
let all_ratios = ratio_source.collect_range_at(0, ratio_len);
// Coordinate compression: unique sorted values → integer ranks
let coords = {
let mut c = all_ratios.clone();
c.sort_unstable();
c.dedup();
c
};
let m = coords.len();
// Build Fenwick tree (BIT) from elements [0, start) in O(m)
let mut bit = vec![0u32; m + 1]; // 1-indexed
for &v in &all_ratios[..start] {
bit[coords.binary_search(&v).unwrap() + 1] += 1;
}
for i in 1..=m {
let j = i + (i & i.wrapping_neg());
if j <= m {
bit[j] += bit[i];
}
}
sorted.sort_unstable();
sorted
};
let pct1_vec = &mut self.ratio_pct1.height;
let pct2_vec = &mut self.ratio_pct2.height;
let pct5_vec = &mut self.ratio_pct5.height;
let pct95_vec = &mut self.ratio_pct95.height;
let pct98_vec = &mut self.ratio_pct98.height;
let pct99_vec = &mut self.ratio_pct99.height;
// Highest power of 2 <= m (for binary-lifting kth query)
let log2 = {
let mut b = 1usize;
while b <= m {
b <<= 1;
}
b >> 1
};
let ratio_len = ratio_source.len();
let ratio_data = ratio_source.collect_range_at(start, ratio_len);
let mut last_day_start = if start > 0 {
day_start
.collect_one_at(start - 1)
.unwrap_or(Height::from(0_usize))
} else {
Height::from(0_usize)
};
// Find rank of k-th smallest element (k is 1-indexed) in O(log m)
let kth = |bit: &[u32], mut k: u32| -> usize {
let mut pos = 0;
let mut b = log2;
while b > 0 {
let next = pos + b;
if next <= m && bit[next] < k {
k -= bit[next];
pos = next;
}
b >>= 1;
}
pos
};
let day_start_data = day_start.collect_range_at(start, ratio_len);
let mut pct_vecs: [&mut EagerVec<PcoVec<Height, StoredF32>>; 6] = [
&mut self.ratio_pct1.height,
&mut self.ratio_pct2.height,
&mut self.ratio_pct5.height,
&mut self.ratio_pct95.height,
&mut self.ratio_pct98.height,
&mut self.ratio_pct99.height,
];
const PCTS: [f64; 6] = [0.01, 0.02, 0.05, 0.95, 0.98, 0.99];
for (offset, ratio) in ratio_data.into_iter().enumerate() {
let index = start + offset;
let mut count = start;
for (offset, &ratio) in all_ratios[start..].iter().enumerate() {
count += 1;
let cur_day_start = day_start_data[offset];
if index == 0 || cur_day_start != last_day_start {
let pos = sorted.binary_search(&ratio).unwrap_or_else(|p| p);
sorted.insert(pos, ratio);
last_day_start = cur_day_start;
}
// Insert into Fenwick tree: O(log m)
let mut i = coords.binary_search(&ratio).unwrap() + 1;
while i <= m {
bit[i] += 1;
i += i & i.wrapping_neg();
}
if sorted.is_empty() {
pct1_vec.truncate_push_at(index, StoredF32::NAN)?;
pct2_vec.truncate_push_at(index, StoredF32::NAN)?;
pct5_vec.truncate_push_at(index, StoredF32::NAN)?;
pct95_vec.truncate_push_at(index, StoredF32::NAN)?;
pct98_vec.truncate_push_at(index, StoredF32::NAN)?;
pct99_vec.truncate_push_at(index, StoredF32::NAN)?;
} else {
pct1_vec.truncate_push_at(index, get_percentile(&sorted, 0.01))?;
pct2_vec.truncate_push_at(index, get_percentile(&sorted, 0.02))?;
pct5_vec.truncate_push_at(index, get_percentile(&sorted, 0.05))?;
pct95_vec.truncate_push_at(index, get_percentile(&sorted, 0.95))?;
pct98_vec.truncate_push_at(index, get_percentile(&sorted, 0.98))?;
pct99_vec.truncate_push_at(index, get_percentile(&sorted, 0.99))?;
// Nearest-rank percentile: one kth query each
let idx = start + offset;
let cf = count as f64;
for (vec, &pct) in pct_vecs.iter_mut().zip(PCTS.iter()) {
let k = (cf * pct).ceil().max(1.0) as u32;
let val = coords[kth(&bit, k)];
vec.truncate_push_at(idx, val)?;
}
}
}

View File

@@ -5,7 +5,7 @@ pub use extended::*;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, StoredF32, Version};
use vecdb::{AnyStoredVec, AnyVec, Database, Exit, ReadableVec, Rw, StorageMode, VecIndex, WritableVec};
use vecdb::{Database, Exit, ReadableVec, Rw, StorageMode};
use crate::{ComputeIndexes, blocks, indexes};
@@ -51,118 +51,23 @@ impl ComputedFromHeightStdDev {
exit: &Exit,
source: &impl ReadableVec<Height, StoredF32>,
) -> Result<()> {
// 1. Compute SMA using the appropriate lookback vec (or full-history SMA)
if self.days != usize::MAX {
let window_starts = blocks.count.start_vec(self.days);
self.sma.height.compute_rolling_average(
starting_indexes.height,
window_starts,
source,
exit,
)?;
} else {
// Full history SMA (days == usize::MAX)
self.sma.height.compute_sma_(
starting_indexes.height,
source,
self.days,
exit,
None,
)?;
}
let window_starts = blocks.count.start_vec(self.days);
// Split borrows: sd is mutated, sma is read
compute_sd(
&mut self.sd,
blocks,
starting_indexes,
exit,
&self.sma.height,
self.sma.height.compute_rolling_average(
starting_indexes.height,
window_starts,
source,
)
exit,
)?;
self.sd.height.compute_rolling_sd(
starting_indexes.height,
window_starts,
source,
&self.sma.height,
exit,
)?;
Ok(())
}
}
fn compute_sd(
sd: &mut ComputedFromHeight<StoredF32>,
blocks: &blocks::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
sma: &impl ReadableVec<Height, StoredF32>,
source: &impl ReadableVec<Height, StoredF32>,
) -> Result<()> {
let source_version = source.version();
sd.height
.validate_computed_version_or_reset(source_version)?;
let starting_height = Height::from(sd.height.len()).min(starting_indexes.height);
let day_start = &blocks.count.height_24h_ago;
let start = starting_height.to_usize();
let mut n: usize = 0;
let mut welford_sum: f64 = 0.0;
let mut welford_sum_sq: f64 = 0.0;
if start > 0 {
let day_start_hist = day_start.collect_range_at(0, start);
let source_hist = source.collect_range_at(0, start);
let mut last_ds = Height::from(0_usize);
for h in 0..start {
let cur_ds = day_start_hist[h];
if h == 0 || cur_ds != last_ds {
let val = *source_hist[h] as f64;
n += 1;
welford_sum += val;
welford_sum_sq += val * val;
last_ds = cur_ds;
}
}
}
let source_len = source.len();
let source_data = source.collect_range_at(start, source_len);
let sma_data = sma.collect_range_at(start, sma.len());
let mut last_day_start = if start > 0 {
day_start
.collect_one_at(start - 1)
.unwrap_or(Height::from(0_usize))
} else {
Height::from(0_usize)
};
let day_start_data = day_start.collect_range_at(start, source_len);
for (offset, ratio) in source_data.into_iter().enumerate() {
let index = start + offset;
let cur_day_start = day_start_data[offset];
if index == 0 || cur_day_start != last_day_start {
let val = *ratio as f64;
n += 1;
welford_sum += val;
welford_sum_sq += val * val;
last_day_start = cur_day_start;
}
let average = sma_data[offset];
let avg_f64 = *average as f64;
let sd_val = if n > 0 {
let nf = n as f64;
let variance =
welford_sum_sq / nf - 2.0 * avg_f64 * welford_sum / nf + avg_f64 * avg_f64;
StoredF32::from(variance.max(0.0).sqrt() as f32)
} else {
StoredF32::from(0.0_f32)
};
sd.height.truncate_push_at(index, sd_val)?;
}
{
let _lock = exit.lock();
sd.height.flush()?;
}
Ok(())
}

View File

@@ -0,0 +1,11 @@
use brk_types::{BasisPoints16, StoredF32};
use vecdb::UnaryTransform;
pub struct Bp16ToFloat;
impl UnaryTransform<BasisPoints16, StoredF32> for Bp16ToFloat {
#[inline(always)]
fn apply(bp: BasisPoints16) -> StoredF32 {
StoredF32::from(bp.to_f32())
}
}

View File

@@ -0,0 +1,11 @@
use brk_types::{BasisPoints32, StoredF32};
use vecdb::UnaryTransform;
pub struct Bp32ToFloat;
impl UnaryTransform<BasisPoints32, StoredF32> for Bp32ToFloat {
#[inline(always)]
fn apply(bp: BasisPoints32) -> StoredF32 {
StoredF32::from(bp.to_f32())
}
}

View File

@@ -0,0 +1,11 @@
use brk_types::{BasisPointsSigned16, StoredF32};
use vecdb::UnaryTransform;
pub struct Bps16ToFloat;
impl UnaryTransform<BasisPointsSigned16, StoredF32> for Bps16ToFloat {
#[inline(always)]
fn apply(bp: BasisPointsSigned16) -> StoredF32 {
StoredF32::from(bp.to_f32())
}
}

View File

@@ -0,0 +1,11 @@
use brk_types::{BasisPointsSigned32, StoredF32};
use vecdb::UnaryTransform;
pub struct Bps32ToFloat;
impl UnaryTransform<BasisPointsSigned32, StoredF32> for Bps32ToFloat {
#[inline(always)]
fn apply(bp: BasisPointsSigned32) -> StoredF32 {
StoredF32::from(bp.to_f32())
}
}

View File

@@ -1,3 +1,7 @@
mod bp16_to_float;
mod bp32_to_float;
mod bps16_to_float;
mod bps32_to_float;
mod block_count_target;
mod cents_halve;
mod cents_identity;
@@ -42,6 +46,10 @@ mod volatility_sqrt30;
mod volatility_sqrt365;
mod volatility_sqrt7;
pub use bp16_to_float::*;
pub use bp32_to_float::*;
pub use bps16_to_float::*;
pub use bps32_to_float::*;
pub use block_count_target::*;
pub use cents_halve::*;
pub use cents_identity::*;

View File

@@ -1,14 +1,15 @@
use brk_error::Result;
use brk_types::StoredU16;
use brk_types::{Day1, StoredU16};
use vecdb::{Exit, ReadableVec, VecIndex};
use super::Vecs;
use crate::{ComputeIndexes, prices, traits::ComputeDrawdown};
use crate::{ComputeIndexes, indexes, prices, traits::ComputeDrawdown};
impl Vecs {
pub(crate) fn compute(
&mut self,
prices: &prices::Vecs,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
@@ -18,27 +19,29 @@ impl Vecs {
exit,
)?;
let mut prev = None;
self.days_since_price_ath.height.compute_transform2(
let mut ath_day: Option<Day1> = None;
self.days_since_price_ath.height.compute_transform3(
starting_indexes.height,
&self.price_ath.cents.height,
&prices.price.cents.height,
|(i, ath, price, slf)| {
if prev.is_none() {
let i = i.to_usize();
prev.replace(if i > 0 {
slf.collect_one_at(i - 1).unwrap()
&indexes.height.day1,
|(i, ath, price, day, slf)| {
if ath_day.is_none() {
let idx = i.to_usize();
ath_day = Some(if idx > 0 {
let prev_days_since = slf.collect_one_at(idx - 1).unwrap();
Day1::from(day.to_usize().saturating_sub(usize::from(prev_days_since)))
} else {
StoredU16::default()
day
});
}
let days = if price == ath {
StoredU16::default()
if price == ath {
ath_day = Some(day);
(i, StoredU16::default())
} else {
prev.unwrap() + StoredU16::new(1)
};
prev.replace(days);
(i, days)
let days_since = (day.to_usize() - ath_day.unwrap().to_usize()) as u16;
(i, StoredU16::from(days_since))
}
},
exit,
)?;

View File

@@ -19,7 +19,7 @@ impl Vecs {
exit: &Exit,
) -> Result<()> {
// ATH metrics (independent)
self.ath.compute(prices, starting_indexes, exit)?;
self.ath.compute(prices, indexes, starting_indexes, exit)?;
// Lookback metrics (independent)
self.lookback
@@ -27,7 +27,7 @@ impl Vecs {
// Returns metrics (depends on lookback)
self.returns
.compute(indexes, prices, blocks, &self.lookback, starting_indexes, exit)?;
.compute(prices, blocks, &self.lookback, starting_indexes, exit)?;
// Volatility (depends on returns)
self.volatility
@@ -39,7 +39,7 @@ impl Vecs {
// Moving average metrics (independent)
self.moving_average
.compute(blocks, prices, indexes, starting_indexes, exit)?;
.compute(blocks, prices, starting_indexes, exit)?;
// DCA metrics (depends on lookback for lump sum comparison)
self.dca

View File

@@ -1,8 +1,8 @@
use brk_error::Result;
use brk_types::{Bitcoin, Cents, Day1, Date, Dollars, Height, Sats, StoredF32, StoredU32};
use vecdb::{AnyVec, EagerVec, Exit, ReadableOptionVec, ReadableVec, PcoVec, PcoVecValue, VecIndex};
use brk_types::{Bitcoin, Cents, Date, Day1, Dollars, Sats, StoredF32};
use vecdb::{AnyVec, Exit, ReadableOptionVec, ReadableVec, VecIndex};
use super::{ByDcaClass, ByDcaPeriod, Vecs};
use super::Vecs;
use crate::{
ComputeIndexes, blocks, indexes,
internal::{ComputedFromHeight, PercentageDiffCents},
@@ -103,36 +103,20 @@ impl Vecs {
)?;
}
// DCA by period - CAGR (computed from returns)
// DCA by period - CAGR (computed from returns at height level)
for (cagr, returns, days) in self.period_cagr.zip_mut_with_period(&self.period_returns) {
let years = days as f32 / 365.0;
let returns_data: Vec<StoredF32> = returns.day1.collect_or_default();
cagr.height.compute_transform(
starting_indexes.height,
h2d,
|(h, di, _)| {
let v = returns_data.get(di.to_usize())
.map(|r| ((**r / 100.0 + 1.0).powf(1.0 / years) - 1.0) * 100.0)
.unwrap_or(0.0);
&returns.height,
|(h, r, ..)| {
let v = ((*r / 100.0 + 1.0).powf(1.0 / years) - 1.0) * 100.0;
(h, StoredF32::from(v))
},
exit,
)?;
}
// DCA by period - profitability
compute_period_rolling(
&mut self.period_days_in_profit,
&mut self.period_days_in_loss,
&mut self.period_min_return,
&mut self.period_max_return,
&self.period_returns,
blocks,
h2d,
starting_indexes,
exit,
)?;
// Lump sum by period - stack
let lookback_dca = lookback.price_ago.as_dca_period();
for (stack, lookback_price, days) in
@@ -171,19 +155,6 @@ impl Vecs {
)?;
}
// Lump sum by period - profitability
compute_period_rolling(
&mut self.period_lump_sum_days_in_profit,
&mut self.period_lump_sum_days_in_loss,
&mut self.period_lump_sum_min_return,
&mut self.period_lump_sum_max_return,
&self.period_lump_sum_returns,
blocks,
h2d,
starting_indexes,
exit,
)?;
// DCA by year class - stack (cumulative sum from class start date)
let start_days = super::ByDcaClass::<()>::start_days();
for (stack, day1) in self.class_stack.iter_mut().zip(start_days) {
@@ -265,7 +236,6 @@ impl Vecs {
.iter_mut()
.zip(self.class_average_price.iter())
{
returns.compute_binary::<Cents, Cents, PercentageDiffCents>(
starting_indexes.height,
&prices.price.cents.height,
@@ -274,18 +244,6 @@ impl Vecs {
)?;
}
// DCA by year class - profitability
compute_class_cumulative(
&mut self.class_days_in_profit,
&mut self.class_days_in_loss,
&mut self.class_min_return,
&mut self.class_max_return,
&self.class_returns,
h2d,
starting_indexes,
exit,
)?;
Ok(())
}
}
@@ -297,217 +255,3 @@ fn sats_from_dca(price: Dollars) -> Sats {
Sats::from(Bitcoin::from(DCA_AMOUNT / price))
}
}
#[allow(clippy::too_many_arguments)]
fn compute_period_rolling(
days_in_profit: &mut ByDcaPeriod<ComputedFromHeight<StoredU32>>,
days_in_loss: &mut ByDcaPeriod<ComputedFromHeight<StoredU32>>,
min_return: &mut ByDcaPeriod<ComputedFromHeight<StoredF32>>,
max_return: &mut ByDcaPeriod<ComputedFromHeight<StoredF32>>,
returns: &ByDcaPeriod<ComputedFromHeight<StoredF32>>,
blocks: &blocks::Vecs,
h2d: &EagerVec<PcoVec<Height, Day1>>,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
for ((((dip, dil), minr), maxr), (ret, days)) in days_in_profit
.iter_mut()
.zip(days_in_loss.iter_mut())
.zip(min_return.iter_mut())
.zip(max_return.iter_mut())
.zip(returns.iter_with_days())
{
let window_starts = blocks.count.start_vec(days as usize);
let returns_data: Vec<StoredF32> = ret.day1.collect_or_default();
compute_rolling(
&mut dip.height, h2d, &returns_data, window_starts, starting_indexes.height, exit,
|buf| StoredU32::from(buf.iter().copied().filter(|r| **r > 0.0).count()),
)?;
compute_rolling(
&mut dil.height, h2d, &returns_data, window_starts, starting_indexes.height, exit,
|buf| StoredU32::from(buf.iter().copied().filter(|r| **r < 0.0).count()),
)?;
compute_rolling(
&mut minr.height, h2d, &returns_data, window_starts, starting_indexes.height, exit,
|buf| {
buf.iter()
.copied()
.reduce(|a, b| if *b < *a { b } else { a })
.unwrap_or_default()
},
)?;
compute_rolling(
&mut maxr.height, h2d, &returns_data, window_starts, starting_indexes.height, exit,
|buf| {
buf.iter()
.copied()
.reduce(|a, b| if *b > *a { b } else { a })
.unwrap_or_default()
},
)?;
}
Ok(())
}
#[allow(clippy::too_many_arguments)]
fn compute_class_cumulative(
days_in_profit: &mut ByDcaClass<ComputedFromHeight<StoredU32>>,
days_in_loss: &mut ByDcaClass<ComputedFromHeight<StoredU32>>,
min_return: &mut ByDcaClass<ComputedFromHeight<StoredF32>>,
max_return: &mut ByDcaClass<ComputedFromHeight<StoredF32>>,
returns: &ByDcaClass<ComputedFromHeight<StoredF32>>,
h2d: &EagerVec<PcoVec<Height, Day1>>,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
let start_days = ByDcaClass::<()>::start_days();
for (((((dip, dil), minr), maxr), ret), from) in days_in_profit
.iter_mut()
.zip(days_in_loss.iter_mut())
.zip(min_return.iter_mut())
.zip(max_return.iter_mut())
.zip(returns.iter())
.zip(start_days)
{
compute_cumulative(
&mut dip.height, h2d, &ret.day1, from, starting_indexes.height, exit,
StoredU32::ZERO,
|prev, ret| if *ret > 0.0 { prev + StoredU32::ONE } else { prev },
)?;
compute_cumulative(
&mut dil.height, h2d, &ret.day1, from, starting_indexes.height, exit,
StoredU32::ZERO,
|prev, ret| if *ret < 0.0 { prev + StoredU32::ONE } else { prev },
)?;
compute_cumulative(
&mut minr.height, h2d, &ret.day1, from, starting_indexes.height, exit,
StoredF32::from(f32::MAX),
|prev, ret| if *ret < *prev { ret } else { prev },
)?;
compute_cumulative(
&mut maxr.height, h2d, &ret.day1, from, starting_indexes.height, exit,
StoredF32::from(f32::MIN),
|prev, ret| if *ret > *prev { ret } else { prev },
)?;
}
Ok(())
}
/// Compute a rolling day-window metric at height level using _start vecs.
#[allow(clippy::too_many_arguments)]
fn compute_rolling<T: PcoVecValue + Default>(
output: &mut EagerVec<PcoVec<Height, T>>,
h2d: &EagerVec<PcoVec<Height, Day1>>,
returns_data: &[StoredF32],
window_starts: &EagerVec<PcoVec<Height, Height>>,
starting_height: Height,
exit: &Exit,
mut aggregate: impl FnMut(&[StoredF32]) -> T,
) -> Result<()> {
// Cursor + cache avoids per-height PcoVec page decompression for the
// h2d lookback read. Window-start heights are non-decreasing so the
// cursor only moves forward; the cache handles repeated values.
let mut h2d_cursor = h2d.cursor();
let mut last_ws = Height::ZERO;
let mut last_ws_di = Day1::default();
output.compute_transform2(
starting_height,
h2d,
window_starts,
|(h, di, window_start, ..)| {
let window_start_di = if window_start == last_ws {
last_ws_di
} else {
let target = window_start.to_usize();
let ws_di = if target >= h2d_cursor.position() {
h2d_cursor.advance(target - h2d_cursor.position());
h2d_cursor.next().unwrap_or_default()
} else {
// Cursor past target (batch boundary); rare fallback
h2d.collect_one(window_start).unwrap_or_default()
};
last_ws = window_start;
last_ws_di = ws_di;
ws_di
};
let start = window_start_di.to_usize();
let end = di.to_usize() + 1;
if start >= end {
return (h, T::default());
}
(h, aggregate(&returns_data[start..end]))
},
exit,
)?;
Ok(())
}
/// Compute a cumulative metric at height level starting from a fixed date.
#[allow(clippy::too_many_arguments)]
fn compute_cumulative<T: PcoVecValue + Default>(
output: &mut EagerVec<PcoVec<Height, T>>,
h2d: &EagerVec<PcoVec<Height, Day1>>,
returns: &impl ReadableOptionVec<Day1, StoredF32>,
from_day1: Day1,
starting_height: Height,
exit: &Exit,
initial: T,
mut accumulate: impl FnMut(T, StoredF32) -> T,
) -> Result<()> {
let mut last_di: Option<Day1> = None;
let sh = starting_height.to_usize();
let mut prev_value = if sh > 0 {
output.collect_one_at(sh - 1).unwrap_or_default()
} else {
T::default()
};
output.compute_transform(
starting_height,
h2d,
|(h, di, _)| {
let hi = h.to_usize();
if last_di.is_none() && hi > 0 {
last_di = Some(h2d.collect_one_at(hi - 1).unwrap());
}
if di < from_day1 {
last_di = Some(di);
prev_value = T::default();
return (h, T::default());
}
let prev_di = last_di;
last_di = Some(di);
let same_day = prev_di.is_some_and(|prev| prev == di);
let result = if same_day {
prev_value
} else {
let prev = if hi > 0 && prev_di.is_some_and(|pd| pd >= from_day1) {
prev_value
} else {
initial
};
let ret = returns.collect_one_flat(di).unwrap_or_default();
accumulate(prev, ret)
};
prev_value = result;
(h, result)
},
exit,
)?;
Ok(())
}

View File

@@ -5,10 +5,7 @@ use vecdb::{Database, ImportableVec};
use super::{ByDcaCagr, ByDcaClass, ByDcaPeriod, Vecs};
use crate::{
indexes,
internal::{
ComputedFromHeight, Price,
ValueFromHeight,
},
internal::{ComputedFromHeight, Price, ValueFromHeight},
};
impl Vecs {
@@ -17,88 +14,26 @@ impl Vecs {
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
// DCA by period - stack (KISS)
let period_stack = ByDcaPeriod::try_new(|name, _days| {
ValueFromHeight::forced_import(
db,
&format!("{name}_dca_stack"),
version,
indexes,
)
ValueFromHeight::forced_import(db, &format!("{name}_dca_stack"), version, indexes)
})?;
// DCA by period - average price
let period_average_price = ByDcaPeriod::try_new(|name, _days| {
Price::forced_import(
db,
&format!("{name}_dca_average_price"),
version,
indexes,
)
Price::forced_import(db, &format!("{name}_dca_average_price"), version, indexes)
})?;
let period_returns = ByDcaPeriod::try_new(|name, _days| {
ComputedFromHeight::forced_import(
db,
&format!("{name}_dca_returns"),
version,
indexes,
)
ComputedFromHeight::forced_import(db, &format!("{name}_dca_returns"), version, indexes)
})?;
// DCA by period - CAGR
let period_cagr = ByDcaCagr::try_new(|name, _days| {
ComputedFromHeight::forced_import(db, &format!("{name}_dca_cagr"), version, indexes)
})?;
// DCA by period - profitability
let period_days_in_profit = ByDcaPeriod::try_new(|name, _days| {
ComputedFromHeight::forced_import(
db,
&format!("{name}_dca_days_in_profit"),
version + Version::ONE,
indexes,
)
})?;
let period_days_in_loss = ByDcaPeriod::try_new(|name, _days| {
ComputedFromHeight::forced_import(
db,
&format!("{name}_dca_days_in_loss"),
version + Version::ONE,
indexes,
)
})?;
let period_min_return = ByDcaPeriod::try_new(|name, _days| {
ComputedFromHeight::forced_import(
db,
&format!("{name}_dca_min_return"),
version,
indexes,
)
})?;
let period_max_return = ByDcaPeriod::try_new(|name, _days| {
ComputedFromHeight::forced_import(
db,
&format!("{name}_dca_max_return"),
version,
indexes,
)
})?;
// Lump sum by period - stack (KISS)
let period_lump_sum_stack = ByDcaPeriod::try_new(|name, _days| {
ValueFromHeight::forced_import(
db,
&format!("{name}_lump_sum_stack"),
version,
indexes,
)
ValueFromHeight::forced_import(db, &format!("{name}_lump_sum_stack"), version, indexes)
})?;
// Lump sum by period - returns
let period_lump_sum_returns = ByDcaPeriod::try_new(|name, _days| {
ComputedFromHeight::forced_import(
db,
@@ -108,92 +43,16 @@ impl Vecs {
)
})?;
// Lump sum by period - profitability
let period_lump_sum_days_in_profit = ByDcaPeriod::try_new(|name, _days| {
ComputedFromHeight::forced_import(
db,
&format!("{name}_lump_sum_days_in_profit"),
version + Version::ONE,
indexes,
)
})?;
let period_lump_sum_days_in_loss = ByDcaPeriod::try_new(|name, _days| {
ComputedFromHeight::forced_import(
db,
&format!("{name}_lump_sum_days_in_loss"),
version + Version::ONE,
indexes,
)
})?;
let period_lump_sum_min_return = ByDcaPeriod::try_new(|name, _days| {
ComputedFromHeight::forced_import(
db,
&format!("{name}_lump_sum_min_return"),
version,
indexes,
)
})?;
let period_lump_sum_max_return = ByDcaPeriod::try_new(|name, _days| {
ComputedFromHeight::forced_import(
db,
&format!("{name}_lump_sum_max_return"),
version,
indexes,
)
})?;
// DCA by year class - stack (KISS)
let class_stack = ByDcaClass::try_new(|name, _year, _day1| {
ValueFromHeight::forced_import(
db,
&format!("{name}_stack"),
version,
indexes,
)
ValueFromHeight::forced_import(db, &format!("{name}_stack"), version, indexes)
})?;
// DCA by year class - average price
let class_average_price = ByDcaClass::try_new(|name, _year, _day1| {
Price::forced_import(db, &format!("{name}_average_price"), version, indexes)
})?;
let class_returns = ByDcaClass::try_new(|name, _year, _day1| {
ComputedFromHeight::forced_import(
db,
&format!("{name}_returns"),
version,
indexes,
)
})?;
// DCA by year class - profitability
let class_days_in_profit = ByDcaClass::try_new(|name, _year, _day1| {
ComputedFromHeight::forced_import(
db,
&format!("{name}_days_in_profit"),
version,
indexes,
)
})?;
let class_days_in_loss = ByDcaClass::try_new(|name, _year, _day1| {
ComputedFromHeight::forced_import(
db,
&format!("{name}_days_in_loss"),
version,
indexes,
)
})?;
let class_min_return = ByDcaClass::try_new(|name, _year, _day1| {
ComputedFromHeight::forced_import(db, &format!("{name}_min_return"), version, indexes)
})?;
let class_max_return = ByDcaClass::try_new(|name, _year, _day1| {
ComputedFromHeight::forced_import(db, &format!("{name}_max_return"), version, indexes)
ComputedFromHeight::forced_import(db, &format!("{name}_returns"), version, indexes)
})?;
Ok(Self {
@@ -202,23 +61,11 @@ impl Vecs {
period_average_price,
period_returns,
period_cagr,
period_days_in_profit,
period_days_in_loss,
period_min_return,
period_max_return,
period_lump_sum_stack,
period_lump_sum_returns,
period_lump_sum_days_in_profit,
period_lump_sum_days_in_loss,
period_lump_sum_min_return,
period_lump_sum_max_return,
class_stack,
class_average_price,
class_returns,
class_days_in_profit,
class_days_in_loss,
class_min_return,
class_max_return,
})
}
}

View File

@@ -1,11 +1,9 @@
use brk_traversable::Traversable;
use brk_types::{Cents, Height, Sats, StoredF32, StoredU32};
use brk_types::{Cents, Height, Sats, StoredF32};
use vecdb::{EagerVec, PcoVec, Rw, StorageMode};
use super::{ByDcaCagr, ByDcaClass, ByDcaPeriod};
use crate::internal::{
ComputedFromHeight, Price, ValueFromHeight,
};
use crate::internal::{ComputedFromHeight, Price, ValueFromHeight};
/// Dollar-cost averaging metrics by time period and year class
#[derive(Traversable)]
@@ -14,36 +12,18 @@ pub struct Vecs<M: StorageMode = Rw> {
/// Computed once, reused by all period rolling sums.
pub dca_sats_per_day: M::Stored<EagerVec<PcoVec<Height, Sats>>>,
// DCA by period - KISS types
// DCA by period
pub period_stack: ByDcaPeriod<ValueFromHeight<M>>,
pub period_average_price: ByDcaPeriod<Price<ComputedFromHeight<Cents, M>>>,
pub period_returns: ByDcaPeriod<ComputedFromHeight<StoredF32, M>>,
pub period_cagr: ByDcaCagr<ComputedFromHeight<StoredF32, M>>,
// DCA by period - profitability
pub period_days_in_profit: ByDcaPeriod<ComputedFromHeight<StoredU32, M>>,
pub period_days_in_loss: ByDcaPeriod<ComputedFromHeight<StoredU32, M>>,
pub period_min_return: ByDcaPeriod<ComputedFromHeight<StoredF32, M>>,
pub period_max_return: ByDcaPeriod<ComputedFromHeight<StoredF32, M>>,
// Lump sum by period (for comparison with DCA) - KISS types
// Lump sum by period (for comparison with DCA)
pub period_lump_sum_stack: ByDcaPeriod<ValueFromHeight<M>>,
pub period_lump_sum_returns: ByDcaPeriod<ComputedFromHeight<StoredF32, M>>,
// Lump sum by period - profitability
pub period_lump_sum_days_in_profit: ByDcaPeriod<ComputedFromHeight<StoredU32, M>>,
pub period_lump_sum_days_in_loss: ByDcaPeriod<ComputedFromHeight<StoredU32, M>>,
pub period_lump_sum_min_return: ByDcaPeriod<ComputedFromHeight<StoredF32, M>>,
pub period_lump_sum_max_return: ByDcaPeriod<ComputedFromHeight<StoredF32, M>>,
// DCA by year class - KISS types
// DCA by year class
pub class_stack: ByDcaClass<ValueFromHeight<M>>,
pub class_average_price: ByDcaClass<Price<ComputedFromHeight<Cents, M>>>,
pub class_returns: ByDcaClass<ComputedFromHeight<StoredF32, M>>,
// DCA by year class - profitability
pub class_days_in_profit: ByDcaClass<ComputedFromHeight<StoredU32, M>>,
pub class_days_in_loss: ByDcaClass<ComputedFromHeight<StoredU32, M>>,
pub class_min_return: ByDcaClass<ComputedFromHeight<StoredF32, M>>,
pub class_max_return: ByDcaClass<ComputedFromHeight<StoredF32, M>>,
}

View File

@@ -9,6 +9,16 @@ use crate::{
mining, prices, transactions,
};
fn tf_multiplier(tf: &str) -> usize {
match tf {
"1d" => 1,
"1w" => 7,
"1m" => 30,
"1y" => 365,
_ => unreachable!(),
}
}
impl Vecs {
#[allow(clippy::too_many_arguments)]
pub(crate) fn compute(
@@ -60,18 +70,22 @@ impl Vecs {
)?;
}
// Pre-collect Height→Day1 mapping
let h2d: Vec<Day1> = indexes.height.day1.collect();
let total_heights = h2d.len();
// RSI per timeframe
for (tf, rsi_chain) in self.rsi.iter_mut() {
let m = tf_multiplier(tf);
let returns_source = match tf {
"1d" => &returns.price_returns._24h.height,
"1w" => &returns.price_returns._1w.height,
"1m" => &returns.price_returns._1m.height,
"1y" => &returns.price_returns._1y.height,
_ => unreachable!(),
};
super::rsi::compute(
rsi_chain,
tf,
returns,
&h2d,
total_heights,
blocks,
returns_source,
14 * m,
3 * m,
starting_indexes,
exit,
)?;
@@ -79,18 +93,22 @@ impl Vecs {
// MACD per timeframe
for (tf, macd_chain) in self.macd.iter_mut() {
let m = tf_multiplier(tf);
super::macd::compute(
macd_chain,
tf,
blocks,
prices,
&h2d,
total_heights,
12 * m,
26 * m,
9 * m,
starting_indexes,
exit,
)?;
}
// Gini (daily only, expanded to Height)
// Gini (daily, expanded to Height)
let h2d: Vec<Day1> = indexes.height.day1.collect();
let total_heights = h2d.len();
super::gini::compute(
&mut self.gini,
distribution,

View File

@@ -1,5 +1,5 @@
use brk_error::Result;
use brk_types::{Day1, Sats, StoredF32, StoredU64, Version};
use brk_types::{Day1, StoredF32, Version};
use vecdb::{AnyStoredVec, AnyVec, Exit, ReadableOptionVec, VecIndex, WritableVec};
use crate::{ComputeIndexes, distribution, internal::ComputedFromHeight};
@@ -44,35 +44,39 @@ pub(super) fn compute(
return Ok(());
}
// Pre-collect all daily data
let supply_data: Vec<Vec<Sats>> = supply_vecs
let num_days = supply_vecs
.iter()
.map(|v| v.collect_or_default())
.collect();
let count_data: Vec<Vec<StoredU64>> = count_vecs
.iter()
.map(|v| v.collect_or_default())
.collect();
let num_days = supply_data.first().map_or(0, |v| v.len());
.map(|v| v.len())
.min()
.unwrap_or(0)
.min(count_vecs.iter().map(|v| v.len()).min().unwrap_or(0));
// Compute gini per day in-memory
let mut gini_daily = Vec::with_capacity(num_days);
let mut buckets: Vec<(u64, u64)> = Vec::with_capacity(supply_data.len());
for di in 0..num_days {
// Only compute gini for new days (each day is independent)
let start_day = if start_height > 0 {
h2d[start_height].to_usize()
} else {
0
};
let mut gini_new: Vec<f32> = Vec::with_capacity(num_days.saturating_sub(start_day));
let mut buckets: Vec<(u64, u64)> = Vec::with_capacity(supply_vecs.len());
for di in start_day..num_days {
buckets.clear();
buckets.extend(supply_data.iter().zip(count_data.iter()).map(|(s, c)| {
let count: u64 = c[di].into();
let supply: u64 = s[di].into();
(count, supply)
}));
gini_daily.push(gini_from_lorenz(&buckets));
let day = Day1::from(di);
for (sv, cv) in supply_vecs.iter().zip(count_vecs.iter()) {
let supply: u64 = sv.collect_one_flat(day).unwrap_or_default().into();
let count: u64 = cv.collect_one_flat(day).unwrap_or_default().into();
buckets.push((count, supply));
}
gini_new.push(gini_from_lorenz(&buckets));
}
// Expand to Height
(start_height..total_heights).for_each(|h| {
let di = h2d[h].to_usize();
let val = if di < gini_daily.len() {
StoredF32::from(gini_daily[di])
let offset = di.saturating_sub(start_day);
let val = if offset < gini_new.len() {
StoredF32::from(gini_new[offset])
} else {
StoredF32::NAN
};
@@ -92,7 +96,7 @@ fn gini_from_lorenz(buckets: &[(u64, u64)]) -> f32 {
let total_supply: u64 = buckets.iter().map(|(_, s)| s).sum();
if total_count == 0 || total_supply == 0 {
return 0.0;
return f32::NAN;
}
let (mut cumulative_count, mut cumulative_supply, mut area) = (0u64, 0u64, 0.0f64);

View File

@@ -81,6 +81,18 @@ impl MacdChain {
)?;
Ok(Self {
ema_fast: ComputedFromHeight::forced_import(
db,
&format!("macd_ema_fast_{tf}"),
version,
indexes,
)?,
ema_slow: ComputedFromHeight::forced_import(
db,
&format!("macd_ema_slow_{tf}"),
version,
indexes,
)?,
line,
signal,
histogram,

View File

@@ -1,103 +1,61 @@
use brk_error::Result;
use brk_types::{Day1, StoredF32};
use vecdb::{AnyStoredVec, AnyVec, Exit, VecIndex, WritableVec};
use vecdb::Exit;
use super::{MacdChain, smoothing::compute_ema, timeframe::{collect_closes, date_to_period}};
use crate::{ComputeIndexes, prices};
use super::MacdChain;
use crate::{ComputeIndexes, blocks, prices};
#[allow(clippy::too_many_arguments)]
pub(super) fn compute(
chain: &mut MacdChain,
tf: &str,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
h2d: &[Day1],
total_heights: usize,
fast_days: usize,
slow_days: usize,
signal_days: usize,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
let source_version = prices.price.usd.height.version();
let close = &prices.price.usd.height;
let ws_fast = blocks.count.start_vec(fast_days);
let ws_slow = blocks.count.start_vec(slow_days);
let ws_signal = blocks.count.start_vec(signal_days);
chain
.line
.height
.validate_computed_version_or_reset(source_version)?;
chain
.signal
.height
.validate_computed_version_or_reset(source_version)?;
chain.line.height.truncate_if_needed_at(
chain
.line
.height
.len()
.min(starting_indexes.height.to_usize()),
)?;
chain.signal.height.truncate_if_needed_at(
chain
.signal
.height
.len()
.min(starting_indexes.height.to_usize()),
chain.ema_fast.height.compute_rolling_ema(
starting_indexes.height,
ws_fast,
close,
exit,
)?;
chain
.histogram
.height
.validate_computed_version_or_reset(source_version)?;
chain.histogram.height.truncate_if_needed_at(
chain
.histogram
.height
.len()
.min(starting_indexes.height.to_usize()),
chain.ema_slow.height.compute_rolling_ema(
starting_indexes.height,
ws_slow,
close,
exit,
)?;
let start_height = chain.line.height.len();
if start_height >= total_heights {
return Ok(());
}
// MACD line = ema_fast - ema_slow
chain.line.height.compute_subtract(
starting_indexes.height,
&chain.ema_fast.height,
&chain.ema_slow.height,
exit,
)?;
// Collect close prices at timeframe level
let closes = collect_closes(tf, prices);
let closes_f32: Vec<f32> = closes.iter().map(|d| **d as f32).collect();
// Signal = EMA of MACD line
chain.signal.height.compute_rolling_ema(
starting_indexes.height,
ws_signal,
&chain.line.height,
exit,
)?;
// Compute MACD in-memory
let ema12 = compute_ema(&closes_f32, 12);
let ema26 = compute_ema(&closes_f32, 26);
let macd_line: Vec<f32> = ema12.iter().zip(ema26.iter()).map(|(a, b)| a - b).collect();
let macd_signal = compute_ema(&macd_line, 9);
let macd_histogram: Vec<f32> = macd_line.iter().zip(macd_signal.iter()).map(|(a, b)| a - b).collect();
// Expand to Height
(start_height..total_heights).for_each(|h| {
let pi = date_to_period(tf, h2d[h]);
chain.line.height.push(if pi < macd_line.len() {
StoredF32::from(macd_line[pi])
} else {
StoredF32::NAN
});
chain.signal.height.push(if pi < macd_signal.len() {
StoredF32::from(macd_signal[pi])
} else {
StoredF32::NAN
});
chain.histogram.height.push(if pi < macd_histogram.len() {
StoredF32::from(macd_histogram[pi])
} else {
StoredF32::NAN
});
});
{
let _lock = exit.lock();
chain.line.height.write()?;
chain.signal.height.write()?;
chain.histogram.height.write()?;
}
// Histogram = line - signal
chain.histogram.height.compute_subtract(
starting_indexes.height,
&chain.line.height,
&chain.signal.height,
exit,
)?;
Ok(())
}

View File

@@ -3,8 +3,6 @@ mod gini;
mod import;
mod macd;
mod rsi;
mod smoothing;
mod timeframe;
mod vecs;
pub use vecs::{ByIndicatorTimeframe, MacdChain, RsiChain, Vecs};

View File

@@ -1,126 +1,115 @@
use brk_error::Result;
use brk_types::{Day1, StoredF32};
use vecdb::{AnyStoredVec, AnyVec, Exit, VecIndex, WritableVec};
use brk_types::{Height, StoredF32};
use vecdb::{Exit, ReadableVec};
use super::{
RsiChain,
smoothing::{compute_rma, compute_rolling_max, compute_rolling_min, compute_sma},
timeframe::{collect_returns, date_to_period},
};
use crate::{ComputeIndexes, market::returns::Vecs as ReturnsVecs};
use super::RsiChain;
use crate::{ComputeIndexes, blocks};
#[allow(clippy::too_many_arguments)]
pub(super) fn compute(
chain: &mut RsiChain,
tf: &str,
returns: &ReturnsVecs,
h2d: &[Day1],
total_heights: usize,
blocks: &blocks::Vecs,
returns_source: &impl ReadableVec<Height, StoredF32>,
rma_days: usize,
stoch_sma_days: usize,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
let source_version = returns.price_returns._24h.height.version();
let ws_rma = blocks.count.start_vec(rma_days);
let ws_sma = blocks.count.start_vec(stoch_sma_days);
let vecs = [
&mut chain.gains.height,
&mut chain.losses.height,
&mut chain.average_gain.height,
&mut chain.average_loss.height,
&mut chain.rsi.height,
&mut chain.rsi_min.height,
&mut chain.rsi_max.height,
&mut chain.stoch_rsi.height,
&mut chain.stoch_rsi_k.height,
&mut chain.stoch_rsi_d.height,
];
// Gains = max(return, 0)
chain.gains.height.compute_transform(
starting_indexes.height,
returns_source,
|(h, r, ..)| (h, StoredF32::from((*r).max(0.0))),
exit,
)?;
for v in vecs {
v.validate_computed_version_or_reset(source_version)?;
v.truncate_if_needed_at(v.len().min(starting_indexes.height.to_usize()))?;
}
// Losses = max(-return, 0)
chain.losses.height.compute_transform(
starting_indexes.height,
returns_source,
|(h, r, ..)| (h, StoredF32::from((-*r).max(0.0))),
exit,
)?;
let start_height = chain.gains.height.len();
if start_height >= total_heights {
return Ok(());
}
// Average gain = RMA of gains
chain.average_gain.height.compute_rolling_rma(
starting_indexes.height,
ws_rma,
&chain.gains.height,
exit,
)?;
// Collect returns at the appropriate timeframe level
let period_returns = collect_returns(tf, returns);
// Average loss = RMA of losses
chain.average_loss.height.compute_rolling_rma(
starting_indexes.height,
ws_rma,
&chain.losses.height,
exit,
)?;
// Compute in-memory
let gains: Vec<f32> = period_returns.iter().map(|r| r.max(0.0)).collect();
let losses: Vec<f32> = period_returns.iter().map(|r| (-r).max(0.0)).collect();
let avg_gain = compute_rma(&gains, 14);
let avg_loss = compute_rma(&losses, 14);
// RSI = 100 * avg_gain / (avg_gain + avg_loss)
chain.rsi.height.compute_transform2(
starting_indexes.height,
&chain.average_gain.height,
&chain.average_loss.height,
|(h, g, l, ..)| {
let sum = *g + *l;
let rsi = if sum == 0.0 { 50.0 } else { 100.0 * *g / sum };
(h, StoredF32::from(rsi))
},
exit,
)?;
let rsi: Vec<f32> = avg_gain
.iter()
.zip(avg_loss.iter())
.map(|(g, l)| {
let sum = g + l;
if sum == 0.0 { 50.0 } else { 100.0 * g / sum }
})
.collect();
// Rolling min/max of RSI over rma_days window
chain.rsi_min.height.compute_rolling_min_from_starts(
starting_indexes.height,
ws_rma,
&chain.rsi.height,
exit,
)?;
let rsi_min = compute_rolling_min(&rsi, 14);
let rsi_max = compute_rolling_max(&rsi, 14);
chain.rsi_max.height.compute_rolling_max_from_starts(
starting_indexes.height,
ws_rma,
&chain.rsi.height,
exit,
)?;
let stoch_rsi: Vec<f32> = rsi
.iter()
.zip(rsi_min.iter())
.zip(rsi_max.iter())
.map(|((r, mn), mx)| {
let range = mx - mn;
if range == 0.0 {
f32::NAN
// StochRSI = (rsi - rsi_min) / (rsi_max - rsi_min) * 100
chain.stoch_rsi.height.compute_transform3(
starting_indexes.height,
&chain.rsi.height,
&chain.rsi_min.height,
&chain.rsi_max.height,
|(h, r, mn, mx, ..)| {
let range = *mx - *mn;
let stoch = if range == 0.0 {
StoredF32::NAN
} else {
(r - mn) / range * 100.0
}
})
.collect();
StoredF32::from((*r - *mn) / range * 100.0)
};
(h, stoch)
},
exit,
)?;
let stoch_rsi_k = compute_sma(&stoch_rsi, 3);
let stoch_rsi_d = compute_sma(&stoch_rsi_k, 3);
// StochRSI K = SMA of StochRSI
chain.stoch_rsi_k.height.compute_rolling_average(
starting_indexes.height,
ws_sma,
&chain.stoch_rsi.height,
exit,
)?;
// Expand to Height
macro_rules! expand {
($target:expr, $buffer:expr) => {
for h in start_height..total_heights {
let pi = date_to_period(tf, h2d[h]);
let val = if pi < $buffer.len() {
StoredF32::from($buffer[pi])
} else {
StoredF32::NAN
};
$target.push(val);
}
};
}
expand!(chain.gains.height, gains);
expand!(chain.losses.height, losses);
expand!(chain.average_gain.height, avg_gain);
expand!(chain.average_loss.height, avg_loss);
expand!(chain.rsi.height, rsi);
expand!(chain.rsi_min.height, rsi_min);
expand!(chain.rsi_max.height, rsi_max);
expand!(chain.stoch_rsi.height, stoch_rsi);
expand!(chain.stoch_rsi_k.height, stoch_rsi_k);
expand!(chain.stoch_rsi_d.height, stoch_rsi_d);
{
let _lock = exit.lock();
chain.gains.height.write()?;
chain.losses.height.write()?;
chain.average_gain.height.write()?;
chain.average_loss.height.write()?;
chain.rsi.height.write()?;
chain.rsi_min.height.write()?;
chain.rsi_max.height.write()?;
chain.stoch_rsi.height.write()?;
chain.stoch_rsi_k.height.write()?;
chain.stoch_rsi_d.height.write()?;
}
// StochRSI D = SMA of K
chain.stoch_rsi_d.height.compute_rolling_average(
starting_indexes.height,
ws_sma,
&chain.stoch_rsi_k.height,
exit,
)?;
Ok(())
}

View File

@@ -1,89 +0,0 @@
use std::collections::VecDeque;
pub(super) fn compute_rma(source: &[f32], period: usize) -> Vec<f32> {
let mut result = Vec::with_capacity(source.len());
let k = 1.0 / period as f32;
let mut sum = 0.0f32;
for (i, &val) in source.iter().enumerate() {
if i < period {
sum += val;
result.push(sum / (i + 1) as f32);
} else {
let prev = result[i - 1];
result.push(val * k + prev * (1.0 - k));
}
}
result
}
pub(super) fn compute_ema(source: &[f32], period: usize) -> Vec<f32> {
let mut result = Vec::with_capacity(source.len());
let k = 2.0 / (period as f32 + 1.0);
let mut sum = 0.0f32;
for (i, &val) in source.iter().enumerate() {
if i < period {
sum += val;
result.push(sum / (i + 1) as f32);
} else {
let prev = result[i - 1];
result.push(val * k + prev * (1.0 - k));
}
}
result
}
pub(super) fn compute_sma(source: &[f32], window: usize) -> Vec<f32> {
let mut result = Vec::with_capacity(source.len());
let mut sum = 0.0f32;
for (i, &val) in source.iter().enumerate() {
sum += val;
if i >= window {
sum -= source[i - window];
}
let count = (i + 1).min(window);
result.push(sum / count as f32);
}
result
}
pub(super) fn compute_rolling_min(source: &[f32], window: usize) -> Vec<f32> {
let mut result = Vec::with_capacity(source.len());
let mut deque = VecDeque::new();
for (i, &val) in source.iter().enumerate() {
while deque.back().is_some_and(|&(_, v): &(usize, f32)| v >= val) {
deque.pop_back();
}
deque.push_back((i, val));
if deque.front().unwrap().0 + window <= i {
deque.pop_front();
}
result.push(deque.front().unwrap().1);
}
result
}
pub(super) fn compute_rolling_max(source: &[f32], window: usize) -> Vec<f32> {
let mut result = Vec::with_capacity(source.len());
let mut deque = VecDeque::new();
for (i, &val) in source.iter().enumerate() {
while deque.back().is_some_and(|&(_, v): &(usize, f32)| v <= val) {
deque.pop_back();
}
deque.push_back((i, val));
if deque.front().unwrap().0 + window <= i {
deque.pop_front();
}
result.push(deque.front().unwrap().1);
}
result
}

View File

@@ -1,36 +0,0 @@
use brk_types::{Day1, Dollars, Month1, StoredF32, Week1, Year1};
use vecdb::{ReadableOptionVec, VecIndex};
use crate::{market::returns::Vecs as ReturnsVecs, prices};
pub(super) fn collect_returns(tf: &str, returns: &ReturnsVecs) -> Vec<f32> {
let data: Vec<StoredF32> = match tf {
"1d" => returns.price_returns._24h.day1.collect_or_default(),
"1w" => returns.price_returns._1w.week1.collect_or_default(),
"1m" => returns.price_returns._1m.month1.collect_or_default(),
"1y" => returns.price_returns._1y.year1.collect_or_default(),
_ => unreachable!(),
};
data.into_iter().map(|v| *v).collect()
}
pub(super) fn collect_closes(tf: &str, prices: &prices::Vecs) -> Vec<Dollars> {
match tf {
"1d" => prices.split.close.usd.day1.collect_or_default(),
"1w" => prices.split.close.usd.week1.collect_or_default(),
"1m" => prices.split.close.usd.month1.collect_or_default(),
"1y" => prices.split.close.usd.year1.collect_or_default(),
_ => unreachable!(),
}
}
#[inline]
pub(super) fn date_to_period(tf: &str, di: Day1) -> usize {
match tf {
"1d" => di.to_usize(),
"1w" => Week1::from(di).to_usize(),
"1m" => Month1::from(di).to_usize(),
"1y" => Year1::from(Month1::from(di)).to_usize(),
_ => unreachable!(),
}
}

View File

@@ -51,6 +51,8 @@ pub struct RsiChain<M: StorageMode = Rw> {
#[derive(Traversable)]
pub struct MacdChain<M: StorageMode = Rw> {
pub ema_fast: ComputedFromHeight<StoredF32, M>,
pub ema_slow: ComputedFromHeight<StoredF32, M>,
pub line: ComputedFromHeight<StoredF32, M>,
pub signal: ComputedFromHeight<StoredF32, M>,
pub histogram: ComputedFromHeight<StoredF32, M>,

View File

@@ -1,6 +1,5 @@
use brk_error::Result;
use brk_types::Cents;
use vecdb::{Exit, ReadableVec, VecIndex};
use vecdb::Exit;
use super::Vecs;
use crate::{blocks, ComputeIndexes, prices};
@@ -13,17 +12,14 @@ impl Vecs {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
let close_data: Vec<Cents> = prices.price.cents.height.collect();
let price = &prices.price.cents.height;
for (price_ago, days) in self.price_ago.iter_mut_with_days() {
let window_starts = blocks.count.start_vec(days as usize);
price_ago.cents.height.compute_transform(
price_ago.cents.height.compute_lookback(
starting_indexes.height,
window_starts,
|(h, start_h, _)| {
let val = close_data[start_h.to_usize()];
(h, val)
},
price,
exit,
)?;
}

View File

@@ -1,16 +1,14 @@
use brk_error::Result;
use brk_types::Cents;
use vecdb::{Exit, ReadableOptionVec, VecIndex};
use vecdb::Exit;
use super::Vecs;
use crate::{ComputeIndexes, blocks, indexes, prices};
use crate::{ComputeIndexes, blocks, prices};
impl Vecs {
pub(crate) fn compute(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
@@ -41,9 +39,6 @@ impl Vecs {
})?;
}
let h2d = &indexes.height.day1;
let closes: Vec<Cents> = prices.split.close.cents.day1.collect_or_default();
for (ema, period) in [
(&mut self.price_1w_ema, 7),
(&mut self.price_8d_ema, 8),
@@ -62,18 +57,9 @@ impl Vecs {
(&mut self.price_200w_ema, 200 * 7),
(&mut self.price_4y_ema, 4 * 365),
] {
let k = 2.0f64 / (period as f64 + 1.0);
// Compute date-level EMA, then expand to height level
let date_ema = compute_date_ema(&closes, k);
let window_starts = blocks.count.start_vec(period);
ema.compute_all(blocks, prices, starting_indexes, exit, |v| {
v.compute_transform(
starting_indexes.height,
h2d,
|(h, date, ..)| (h, Cents::from(date_ema[date.to_usize()])),
exit,
)?;
v.compute_rolling_ema(starting_indexes.height, window_starts, close, exit)?;
Ok(())
})?;
}
@@ -81,18 +67,3 @@ impl Vecs {
Ok(())
}
}
fn compute_date_ema(closes: &[Cents], k: f64) -> Vec<f64> {
let mut date_ema: Vec<f64> = Vec::with_capacity(closes.len());
let mut ema_val = 0.0f64;
for (d, close) in closes.iter().enumerate() {
let close = f64::from(*close);
if d == 0 {
ema_val = close;
} else {
ema_val = close * k + ema_val * (1.0 - k);
}
date_ema.push(ema_val);
}
date_ema
}

View File

@@ -3,10 +3,7 @@ use brk_types::StoredF32;
use vecdb::{Exit, ReadableVec, VecIndex};
use super::Vecs;
use crate::{
blocks, ComputeIndexes, prices,
traits::{ComputeRollingMaxFromStarts, ComputeRollingMinFromStarts},
};
use crate::{blocks, ComputeIndexes, prices};
impl Vecs {
pub(crate) fn compute(

View File

@@ -1,14 +1,13 @@
use brk_error::Result;
use brk_types::{Dollars, StoredF32};
use vecdb::{Exit, ReadableOptionVec};
use vecdb::Exit;
use super::Vecs;
use crate::{ComputeIndexes, blocks, indexes, internal::PercentageDiffDollars, market::lookback, prices};
use crate::{ComputeIndexes, blocks, internal::PercentageDiffDollars, market::lookback, prices};
impl Vecs {
pub(crate) fn compute(
&mut self,
indexes: &indexes::Vecs,
prices: &prices::Vecs,
blocks: &blocks::Vecs,
lookback: &lookback::Vecs,
@@ -29,27 +28,16 @@ impl Vecs {
)?;
}
// CAGR computed from returns (2y+ periods only)
let h2d = &indexes.height.day1;
// CAGR computed from returns at height level (2y+ periods only)
let price_returns_dca = self.price_returns.as_dca_period();
for (cagr, returns, days) in self.cagr.zip_mut_with_period(&price_returns_dca) {
let years = days as f32 / 365.0;
let mut cached_di = None;
let mut cached_val = StoredF32::from(0.0);
cagr.height.compute_transform(
starting_indexes.height,
h2d,
|(h, di, _)| {
if cached_di != Some(di) {
cached_di = Some(di);
cached_val = StoredF32::from(
returns.day1
.collect_one_flat(di)
.map(|r| ((*r / 100.0 + 1.0).powf(1.0 / years) - 1.0) * 100.0)
.unwrap_or(0.0)
);
}
(h, cached_val)
&returns.height,
|(h, r, ..)| {
let v = ((*r / 100.0 + 1.0).powf(1.0 / years) - 1.0) * 100.0;
(h, StoredF32::from(v))
},
exit,
)?;

View File

@@ -1,6 +1,6 @@
use brk_error::Result;
use brk_types::{Height, StoredF32};
use vecdb::Exit;
use vecdb::{EagerVec, Exit, PcoVec, ReadableVec};
use super::super::returns;
use super::Vecs;
@@ -13,73 +13,39 @@ impl Vecs {
exit: &Exit,
) -> Result<()> {
// Sharpe ratios: returns / volatility
self.sharpe_1w.height.compute_transform2(
starting_indexes_height,
&returns.price_returns._1w.height,
&self.price_1w_volatility.height,
|(h, ret, vol, ..)| {
let ratio = if *vol == 0.0 { 0.0 } else { *ret / *vol };
(h, StoredF32::from(ratio))
},
exit,
)?;
self.sharpe_1m.height.compute_transform2(
starting_indexes_height,
&returns.price_returns._1m.height,
&self.price_1m_volatility.height,
|(h, ret, vol, ..)| {
let ratio = if *vol == 0.0 { 0.0 } else { *ret / *vol };
(h, StoredF32::from(ratio))
},
exit,
)?;
self.sharpe_1y.height.compute_transform2(
starting_indexes_height,
&returns.price_returns._1y.height,
&self.price_1y_volatility.height,
|(h, ret, vol, ..)| {
let ratio = if *vol == 0.0 { 0.0 } else { *ret / *vol };
(h, StoredF32::from(ratio))
},
exit,
)?;
for (out, ret, vol) in [
(&mut self.sharpe_1w, &returns.price_returns._1w.height, &self.price_1w_volatility.height),
(&mut self.sharpe_1m, &returns.price_returns._1m.height, &self.price_1m_volatility.height),
(&mut self.sharpe_1y, &returns.price_returns._1y.height, &self.price_1y_volatility.height),
] {
compute_ratio(&mut out.height, starting_indexes_height, ret, vol, exit)?;
}
// Sortino ratios: returns / downside volatility
self.sortino_1w.height.compute_transform2(
starting_indexes_height,
&returns.price_returns._1w.height,
&returns.downside_1w_sd.sd.height,
|(h, ret, vol, ..)| {
let ratio = if *vol == 0.0 { 0.0 } else { *ret / *vol };
(h, StoredF32::from(ratio))
},
exit,
)?;
self.sortino_1m.height.compute_transform2(
starting_indexes_height,
&returns.price_returns._1m.height,
&returns.downside_1m_sd.sd.height,
|(h, ret, vol, ..)| {
let ratio = if *vol == 0.0 { 0.0 } else { *ret / *vol };
(h, StoredF32::from(ratio))
},
exit,
)?;
self.sortino_1y.height.compute_transform2(
starting_indexes_height,
&returns.price_returns._1y.height,
&returns.downside_1y_sd.sd.height,
|(h, ret, vol, ..)| {
let ratio = if *vol == 0.0 { 0.0 } else { *ret / *vol };
(h, StoredF32::from(ratio))
},
exit,
)?;
compute_ratio(&mut self.sortino_1w.height, starting_indexes_height, &returns.price_returns._1w.height, &returns.downside_1w_sd.sd.height, exit)?;
compute_ratio(&mut self.sortino_1m.height, starting_indexes_height, &returns.price_returns._1m.height, &returns.downside_1m_sd.sd.height, exit)?;
compute_ratio(&mut self.sortino_1y.height, starting_indexes_height, &returns.price_returns._1y.height, &returns.downside_1y_sd.sd.height, exit)?;
Ok(())
}
}
fn compute_ratio(
out: &mut EagerVec<PcoVec<Height, StoredF32>>,
starting_indexes_height: Height,
ret: &impl ReadableVec<Height, StoredF32>,
vol: &impl ReadableVec<Height, StoredF32>,
exit: &Exit,
) -> Result<()> {
out.compute_transform2(
starting_indexes_height,
ret,
vol,
|(h, ret, vol, ..)| {
let ratio = if *vol == 0.0 { 0.0 } else { *ret / *vol };
(h, StoredF32::from(ratio))
},
exit,
)?;
Ok(())
}

View File

@@ -9,9 +9,9 @@ use crate::internal::ComputedFromHeight;
pub struct Vecs<M: StorageMode = Rw> {
pub hash_rate: ComputedFromHeight<StoredF64, M>,
pub hash_rate_1w_sma: ComputedFromHeight<StoredF64, M>,
pub hash_rate_1m_sma: ComputedFromHeight<StoredF32, M>,
pub hash_rate_2m_sma: ComputedFromHeight<StoredF32, M>,
pub hash_rate_1y_sma: ComputedFromHeight<StoredF32, M>,
pub hash_rate_1m_sma: ComputedFromHeight<StoredF64, M>,
pub hash_rate_2m_sma: ComputedFromHeight<StoredF64, M>,
pub hash_rate_1y_sma: ComputedFromHeight<StoredF64, M>,
pub hash_rate_ath: ComputedFromHeight<StoredF64, M>,
pub hash_rate_drawdown: ComputedFromHeight<StoredF32, M>,
pub hash_price_ths: ComputedFromHeight<StoredF32, M>,

View File

@@ -7,155 +7,6 @@ use vecdb::{
use crate::internal::sliding_window::SlidingWindowSorted;
/// Unified rolling extremum (min or max) from window starts.
///
/// `should_replace` determines whether to evict the deque back:
/// - For min: `|back, new| *back >= *new`
/// - For max: `|back, new| *back <= *new`
pub fn compute_rolling_extremum_from_starts<I, T, A>(
out: &mut EagerVec<PcoVec<I, T>>,
max_from: I,
window_starts: &impl ReadableVec<I, I>,
values: &impl ReadableVec<I, A>,
should_replace: fn(&A, &A) -> bool,
exit: &Exit,
) -> Result<()>
where
I: VecIndex,
T: PcoVecValue + From<A>,
A: VecValue + Ord,
{
out.validate_and_truncate(window_starts.version() + values.version(), max_from)?;
out.repeat_until_complete(exit, |this| {
let skip = this.len();
let mut deque: std::collections::VecDeque<(usize, A)> =
std::collections::VecDeque::new();
let start_offset = if skip > 0 {
window_starts.collect_one_at(skip - 1).unwrap().to_usize()
} else {
0
};
let end = window_starts.len().min(values.len());
let starts_batch = window_starts.collect_range_at(start_offset, end);
let values_batch = values.collect_range_at(start_offset, end);
for (j, (start, value)) in starts_batch.into_iter().zip(values_batch).enumerate() {
let i = start_offset + j;
let start_usize = start.to_usize();
while let Some(&(idx, _)) = deque.front() {
if idx < start_usize {
deque.pop_front();
} else {
break;
}
}
while let Some((_, back)) = deque.back() {
if should_replace(back, &value) {
deque.pop_back();
} else {
break;
}
}
deque.push_back((i, value));
if i >= skip {
let extremum = deque.front().unwrap().1.clone();
this.checked_push_at(i, T::from(extremum))?;
if this.batch_limit_reached() {
break;
}
}
}
Ok(())
})?;
Ok(())
}
pub trait ComputeRollingMinFromStarts<I: VecIndex, T> {
fn compute_rolling_min_from_starts<A>(
&mut self,
max_from: I,
window_starts: &impl ReadableVec<I, I>,
values: &impl ReadableVec<I, A>,
exit: &Exit,
) -> Result<()>
where
A: VecValue + Ord,
T: From<A>;
}
impl<I, T> ComputeRollingMinFromStarts<I, T> for EagerVec<PcoVec<I, T>>
where
I: VecIndex,
T: PcoVecValue,
{
fn compute_rolling_min_from_starts<A>(
&mut self,
max_from: I,
window_starts: &impl ReadableVec<I, I>,
values: &impl ReadableVec<I, A>,
exit: &Exit,
) -> Result<()>
where
A: VecValue + Ord,
T: From<A>,
{
compute_rolling_extremum_from_starts(
self,
max_from,
window_starts,
values,
|back, new| *back >= *new,
exit,
)
}
}
pub trait ComputeRollingMaxFromStarts<I: VecIndex, T> {
fn compute_rolling_max_from_starts<A>(
&mut self,
max_from: I,
window_starts: &impl ReadableVec<I, I>,
values: &impl ReadableVec<I, A>,
exit: &Exit,
) -> Result<()>
where
A: VecValue + Ord,
T: From<A>;
}
impl<I, T> ComputeRollingMaxFromStarts<I, T> for EagerVec<PcoVec<I, T>>
where
I: VecIndex,
T: PcoVecValue,
{
fn compute_rolling_max_from_starts<A>(
&mut self,
max_from: I,
window_starts: &impl ReadableVec<I, I>,
values: &impl ReadableVec<I, A>,
exit: &Exit,
) -> Result<()>
where
A: VecValue + Ord,
T: From<A>,
{
compute_rolling_extremum_from_starts(
self,
max_from,
window_starts,
values,
|back, new| *back <= *new,
exit,
)
}
}
pub trait ComputeRollingMedianFromStarts<I: VecIndex, T> {
fn compute_rolling_median_from_starts<A>(
&mut self,