global: MASSIVE snapshot

This commit is contained in:
nym21
2026-01-02 19:08:20 +01:00
parent ac6175688d
commit 3e9b1cc2b2
462 changed files with 34975 additions and 20072 deletions

View File

@@ -0,0 +1,60 @@
use brk_error::Result;
use brk_indexer::Indexer;
use vecdb::Exit;
use crate::{ComputeIndexes, indexes, price, transactions};
use super::Vecs;
impl Vecs {
pub fn compute(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
transactions: &transactions::Vecs,
starting_indexes: &ComputeIndexes,
price: Option<&price::Vecs>,
exit: &Exit,
) -> Result<()> {
// Core block metrics
self.count
.compute(indexer, indexes, &self.time, starting_indexes, exit)?;
self.interval.compute(indexes, starting_indexes, exit)?;
self.size
.compute(indexer, indexes, starting_indexes, exit)?;
self.weight
.compute(indexer, indexes, starting_indexes, exit)?;
// Time metrics (timestamps)
self.time.compute(indexes, starting_indexes, exit)?;
// Epoch metrics
self.difficulty.compute(indexes, starting_indexes, exit)?;
self.halving.compute(indexes, starting_indexes, exit)?;
// Rewards depends on count and transactions fees
self.rewards.compute(
indexer,
indexes,
&self.count,
&transactions.fees,
starting_indexes,
price,
exit,
)?;
// Mining depends on count and rewards
self.mining.compute(
indexer,
indexes,
&self.count,
&self.rewards,
starting_indexes,
exit,
)?;
let _lock = exit.lock();
self.db.compact()?;
Ok(())
}
}

View File

@@ -0,0 +1,85 @@
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::{Height, StoredU32};
use vecdb::{Exit, TypedVecIterator};
use super::super::time;
use super::Vecs;
use crate::{indexes, ComputeIndexes};
impl Vecs {
pub fn compute(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
time: &time::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
let mut height_to_timestamp_fixed_iter =
time.height_to_timestamp_fixed.into_iter();
let mut prev = Height::ZERO;
self.height_to_24h_block_count.compute_transform(
starting_indexes.height,
&time.height_to_timestamp_fixed,
|(h, t, ..)| {
while t.difference_in_days_between(height_to_timestamp_fixed_iter.get_unwrap(prev))
> 0
{
prev.increment();
if prev > h {
unreachable!()
}
}
(h, StoredU32::from(*h + 1 - *prev))
},
exit,
)?;
self.indexes_to_block_count
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_range(
starting_indexes.height,
&indexer.vecs.block.height_to_weight,
|h| (h, StoredU32::from(1_u32)),
exit,
)?;
Ok(())
})?;
self.indexes_to_1w_block_count
.compute_all(starting_indexes, exit, |v| {
v.compute_sum(
starting_indexes.dateindex,
self.indexes_to_block_count.dateindex.unwrap_sum(),
7,
exit,
)?;
Ok(())
})?;
self.indexes_to_1m_block_count
.compute_all(starting_indexes, exit, |v| {
v.compute_sum(
starting_indexes.dateindex,
self.indexes_to_block_count.dateindex.unwrap_sum(),
30,
exit,
)?;
Ok(())
})?;
self.indexes_to_1y_block_count
.compute_all(starting_indexes, exit, |v| {
v.compute_sum(
starting_indexes.dateindex,
self.indexes_to_block_count.dateindex.unwrap_sum(),
365,
exit,
)?;
Ok(())
})?;
Ok(())
}
}

View File

@@ -0,0 +1,108 @@
use brk_error::Result;
use brk_types::{StoredU64, Version};
use vecdb::{Database, EagerVec, ImportableVec, IterableCloneableVec, LazyVecFrom1};
use super::Vecs;
use crate::{
blocks::{
TARGET_BLOCKS_PER_DAY, TARGET_BLOCKS_PER_DECADE, TARGET_BLOCKS_PER_MONTH,
TARGET_BLOCKS_PER_QUARTER, TARGET_BLOCKS_PER_SEMESTER, TARGET_BLOCKS_PER_WEEK,
TARGET_BLOCKS_PER_YEAR,
},
indexes,
internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeight, Source, VecBuilderOptions},
};
impl Vecs {
pub fn forced_import(
db: &Database,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v0 = Version::ZERO;
let last = || VecBuilderOptions::default().add_last();
let sum_cum = || VecBuilderOptions::default().add_sum().add_cumulative();
Ok(Self {
dateindex_to_block_count_target: LazyVecFrom1::init(
"block_count_target",
version + v0,
indexes.time.dateindex_to_dateindex.boxed_clone(),
|_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_DAY)),
),
weekindex_to_block_count_target: LazyVecFrom1::init(
"block_count_target",
version + v0,
indexes.time.weekindex_to_weekindex.boxed_clone(),
|_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_WEEK)),
),
monthindex_to_block_count_target: LazyVecFrom1::init(
"block_count_target",
version + v0,
indexes.time.monthindex_to_monthindex.boxed_clone(),
|_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_MONTH)),
),
quarterindex_to_block_count_target: LazyVecFrom1::init(
"block_count_target",
version + v0,
indexes.time.quarterindex_to_quarterindex.boxed_clone(),
|_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_QUARTER)),
),
semesterindex_to_block_count_target: LazyVecFrom1::init(
"block_count_target",
version + v0,
indexes.time.semesterindex_to_semesterindex.boxed_clone(),
|_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_SEMESTER)),
),
yearindex_to_block_count_target: LazyVecFrom1::init(
"block_count_target",
version + v0,
indexes.time.yearindex_to_yearindex.boxed_clone(),
|_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_YEAR)),
),
decadeindex_to_block_count_target: LazyVecFrom1::init(
"block_count_target",
version + v0,
indexes.time.decadeindex_to_decadeindex.boxed_clone(),
|_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_DECADE)),
),
height_to_24h_block_count: EagerVec::forced_import(
db,
"24h_block_count",
version + v0,
)?,
indexes_to_block_count: ComputedVecsFromHeight::forced_import(
db,
"block_count",
Source::Compute,
version + v0,
indexes,
sum_cum(),
)?,
indexes_to_1w_block_count: ComputedVecsFromDateIndex::forced_import(
db,
"1w_block_count",
Source::Compute,
version + v0,
indexes,
last(),
)?,
indexes_to_1m_block_count: ComputedVecsFromDateIndex::forced_import(
db,
"1m_block_count",
Source::Compute,
version + v0,
indexes,
last(),
)?,
indexes_to_1y_block_count: ComputedVecsFromDateIndex::forced_import(
db,
"1y_block_count",
Source::Compute,
version + v0,
indexes,
last(),
)?,
})
}
}

View File

@@ -0,0 +1,5 @@
mod compute;
mod import;
mod vecs;
pub use vecs::Vecs;

View File

@@ -0,0 +1,24 @@
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex,
StoredU32, StoredU64, WeekIndex, YearIndex,
};
use vecdb::LazyVecFrom1;
use crate::internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeight};
#[derive(Clone, Traversable)]
pub struct Vecs {
pub dateindex_to_block_count_target: LazyVecFrom1<DateIndex, StoredU64, DateIndex, DateIndex>,
pub weekindex_to_block_count_target: LazyVecFrom1<WeekIndex, StoredU64, WeekIndex, WeekIndex>,
pub monthindex_to_block_count_target: LazyVecFrom1<MonthIndex, StoredU64, MonthIndex, MonthIndex>,
pub quarterindex_to_block_count_target: LazyVecFrom1<QuarterIndex, StoredU64, QuarterIndex, QuarterIndex>,
pub semesterindex_to_block_count_target: LazyVecFrom1<SemesterIndex, StoredU64, SemesterIndex, SemesterIndex>,
pub yearindex_to_block_count_target: LazyVecFrom1<YearIndex, StoredU64, YearIndex, YearIndex>,
pub decadeindex_to_block_count_target: LazyVecFrom1<DecadeIndex, StoredU64, DecadeIndex, DecadeIndex>,
pub height_to_24h_block_count: vecdb::EagerVec<vecdb::PcoVec<brk_types::Height, StoredU32>>,
pub indexes_to_block_count: ComputedVecsFromHeight<StoredU32>,
pub indexes_to_1w_block_count: ComputedVecsFromDateIndex<StoredU32>,
pub indexes_to_1m_block_count: ComputedVecsFromDateIndex<StoredU32>,
pub indexes_to_1y_block_count: ComputedVecsFromDateIndex<StoredU32>,
}

View File

@@ -0,0 +1,63 @@
use brk_error::Result;
use brk_types::StoredU32;
use vecdb::{Exit, TypedVecIterator};
use super::Vecs;
use super::super::TARGET_BLOCKS_PER_DAY_F32;
use crate::{indexes, ComputeIndexes};
impl Vecs {
pub fn compute(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
let mut height_to_difficultyepoch_iter =
indexes.block.height_to_difficultyepoch.into_iter();
self.indexes_to_difficultyepoch
.compute_all(starting_indexes, exit, |vec| {
let mut height_count_iter = indexes.time.dateindex_to_height_count.into_iter();
vec.compute_transform(
starting_indexes.dateindex,
&indexes.time.dateindex_to_first_height,
|(di, height, ..)| {
(
di,
height_to_difficultyepoch_iter
.get_unwrap(height + (*height_count_iter.get_unwrap(di) - 1)),
)
},
exit,
)?;
Ok(())
})?;
self.indexes_to_blocks_before_next_difficulty_adjustment
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_transform(
starting_indexes.height,
&indexes.block.height_to_height,
|(h, ..)| (h, StoredU32::from(h.left_before_next_diff_adj())),
exit,
)?;
Ok(())
})?;
self.indexes_to_days_before_next_difficulty_adjustment
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_transform(
starting_indexes.height,
self.indexes_to_blocks_before_next_difficulty_adjustment
.height
.as_ref()
.unwrap(),
|(h, blocks, ..)| (h, (*blocks as f32 / TARGET_BLOCKS_PER_DAY_F32).into()),
exit,
)?;
Ok(())
})?;
Ok(())
}
}

View File

@@ -0,0 +1,46 @@
use brk_error::Result;
use brk_types::Version;
use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeight, Source, VecBuilderOptions},
};
impl Vecs {
pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
let v0 = Version::ZERO;
let v2 = Version::TWO;
let last = || VecBuilderOptions::default().add_last();
Ok(Self {
indexes_to_difficultyepoch: ComputedVecsFromDateIndex::forced_import(
db,
"difficultyepoch",
Source::Compute,
version + v0,
indexes,
last(),
)?,
indexes_to_blocks_before_next_difficulty_adjustment:
ComputedVecsFromHeight::forced_import(
db,
"blocks_before_next_difficulty_adjustment",
Source::Compute,
version + v2,
indexes,
last(),
)?,
indexes_to_days_before_next_difficulty_adjustment:
ComputedVecsFromHeight::forced_import(
db,
"days_before_next_difficulty_adjustment",
Source::Compute,
version + v2,
indexes,
last(),
)?,
})
}
}

View File

@@ -0,0 +1,5 @@
mod compute;
mod import;
mod vecs;
pub use vecs::Vecs;

View File

@@ -0,0 +1,12 @@
use brk_traversable::Traversable;
use brk_types::{DifficultyEpoch, StoredF32, StoredU32};
use crate::internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeight};
/// Difficulty epoch metrics and countdown
#[derive(Clone, Traversable)]
pub struct Vecs {
pub indexes_to_difficultyepoch: ComputedVecsFromDateIndex<DifficultyEpoch>,
pub indexes_to_blocks_before_next_difficulty_adjustment: ComputedVecsFromHeight<StoredU32>,
pub indexes_to_days_before_next_difficulty_adjustment: ComputedVecsFromHeight<StoredF32>,
}

View File

@@ -0,0 +1,70 @@
use brk_error::Result;
use brk_types::StoredU32;
use vecdb::{Exit, TypedVecIterator};
use super::Vecs;
use super::super::TARGET_BLOCKS_PER_DAY_F32;
use crate::{indexes, ComputeIndexes};
impl Vecs {
pub fn compute(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
let mut height_to_halvingepoch_iter = indexes.block.height_to_halvingepoch.into_iter();
self.indexes_to_halvingepoch
.compute_all(starting_indexes, exit, |vec| {
let mut height_count_iter = indexes.time.dateindex_to_height_count.into_iter();
vec.compute_transform(
starting_indexes.dateindex,
&indexes.time.dateindex_to_first_height,
|(di, height, ..)| {
(
di,
height_to_halvingepoch_iter
.get_unwrap(height + (*height_count_iter.get_unwrap(di) - 1)),
)
},
exit,
)?;
Ok(())
})?;
self.indexes_to_blocks_before_next_halving.compute_all(
indexes,
starting_indexes,
exit,
|v| {
v.compute_transform(
starting_indexes.height,
&indexes.block.height_to_height,
|(h, ..)| (h, StoredU32::from(h.left_before_next_halving())),
exit,
)?;
Ok(())
},
)?;
self.indexes_to_days_before_next_halving.compute_all(
indexes,
starting_indexes,
exit,
|v| {
v.compute_transform(
starting_indexes.height,
self.indexes_to_blocks_before_next_halving
.height
.as_ref()
.unwrap(),
|(h, blocks, ..)| (h, (*blocks as f32 / TARGET_BLOCKS_PER_DAY_F32).into()),
exit,
)?;
Ok(())
},
)?;
Ok(())
}
}

View File

@@ -0,0 +1,44 @@
use brk_error::Result;
use brk_types::Version;
use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeight, Source, VecBuilderOptions},
};
impl Vecs {
pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
let v0 = Version::ZERO;
let v2 = Version::TWO;
let last = || VecBuilderOptions::default().add_last();
Ok(Self {
indexes_to_halvingepoch: ComputedVecsFromDateIndex::forced_import(
db,
"halvingepoch",
Source::Compute,
version + v0,
indexes,
last(),
)?,
indexes_to_blocks_before_next_halving: ComputedVecsFromHeight::forced_import(
db,
"blocks_before_next_halving",
Source::Compute,
version + v2,
indexes,
last(),
)?,
indexes_to_days_before_next_halving: ComputedVecsFromHeight::forced_import(
db,
"days_before_next_halving",
Source::Compute,
version + v2,
indexes,
last(),
)?,
})
}
}

View File

@@ -0,0 +1,5 @@
mod compute;
mod import;
mod vecs;
pub use vecs::Vecs;

View File

@@ -0,0 +1,12 @@
use brk_traversable::Traversable;
use brk_types::{HalvingEpoch, StoredF32, StoredU32};
use crate::internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeight};
/// Halving epoch metrics and countdown
#[derive(Clone, Traversable)]
pub struct Vecs {
pub indexes_to_halvingepoch: ComputedVecsFromDateIndex<HalvingEpoch>,
pub indexes_to_blocks_before_next_halving: ComputedVecsFromHeight<StoredU32>,
pub indexes_to_days_before_next_halving: ComputedVecsFromHeight<StoredF32>,
}

View File

@@ -0,0 +1,62 @@
use std::path::Path;
use brk_error::Result;
use brk_indexer::Indexer;
use brk_traversable::Traversable;
use brk_types::Version;
use vecdb::{Database, PAGE_SIZE};
use crate::{indexes, price};
use super::{
CountVecs, DifficultyVecs, HalvingVecs, IntervalVecs, MiningVecs,
RewardsVecs, SizeVecs, TimeVecs, Vecs, WeightVecs,
};
impl Vecs {
pub fn forced_import(
parent_path: &Path,
parent_version: Version,
indexer: &Indexer,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
) -> Result<Self> {
let db = Database::open(&parent_path.join(super::DB_NAME))?;
db.set_min_len(PAGE_SIZE * 50_000_000)?;
let version = parent_version + Version::ZERO;
let compute_dollars = price.is_some();
let count = CountVecs::forced_import(&db, version, indexes)?;
let interval = IntervalVecs::forced_import(&db, version, indexer, indexes)?;
let size = SizeVecs::forced_import(&db, version, indexer, indexes)?;
let weight = WeightVecs::forced_import(&db, version, indexer, indexes)?;
let time = TimeVecs::forced_import(&db, version, indexer, indexes)?;
let mining = MiningVecs::forced_import(&db, version, indexer, indexes)?;
let rewards = RewardsVecs::forced_import(&db, version, indexes, compute_dollars)?;
let difficulty = DifficultyVecs::forced_import(&db, version, indexes)?;
let halving = HalvingVecs::forced_import(&db, version, indexes)?;
let this = Self {
db,
count,
interval,
size,
weight,
time,
mining,
rewards,
difficulty,
halving,
};
this.db.retain_regions(
this.iter_any_exportable()
.flat_map(|v| v.region_names())
.collect(),
)?;
this.db.compact()?;
Ok(this)
}
}

View File

@@ -0,0 +1,23 @@
use brk_error::Result;
use vecdb::Exit;
use super::Vecs;
use crate::{ComputeIndexes, indexes};
impl Vecs {
pub fn compute(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_block_interval.compute_rest(
indexes,
starting_indexes,
exit,
Some(&self.height_to_interval),
)?;
Ok(())
}
}

View File

@@ -0,0 +1,56 @@
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::{CheckedSub, Height, Timestamp, Version};
use vecdb::{Database, IterableCloneableVec, LazyVecFrom1};
use super::Vecs;
use crate::{
indexes,
internal::{ComputedVecsFromHeight, Source, VecBuilderOptions},
};
impl Vecs {
pub fn forced_import(
db: &Database,
version: Version,
indexer: &Indexer,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v0 = Version::ZERO;
let stats = || {
VecBuilderOptions::default()
.add_average()
.add_minmax()
.add_percentiles()
};
let height_to_interval = LazyVecFrom1::init(
"interval",
version + v0,
indexer.vecs.block.height_to_timestamp.boxed_clone(),
|height: Height, timestamp_iter| {
let timestamp = timestamp_iter.get(height)?;
let interval = height.decremented().map_or(Timestamp::ZERO, |prev_h| {
timestamp_iter
.get(prev_h)
.map_or(Timestamp::ZERO, |prev_t| {
timestamp.checked_sub(prev_t).unwrap_or(Timestamp::ZERO)
})
});
Some(interval)
},
);
Ok(Self {
indexes_to_block_interval: ComputedVecsFromHeight::forced_import(
db,
"block_interval",
Source::Vec(height_to_interval.boxed_clone()),
version + v0,
indexes,
stats(),
)?,
height_to_interval,
})
}
}

View File

@@ -0,0 +1,5 @@
mod compute;
mod import;
mod vecs;
pub use vecs::Vecs;

View File

@@ -0,0 +1,11 @@
use brk_traversable::Traversable;
use brk_types::{Height, Timestamp};
use vecdb::LazyVecFrom1;
use crate::internal::ComputedVecsFromHeight;
#[derive(Clone, Traversable)]
pub struct Vecs {
pub height_to_interval: LazyVecFrom1<Height, Timestamp, Height, Timestamp>,
pub indexes_to_block_interval: ComputedVecsFromHeight<Timestamp>,
}

View File

@@ -0,0 +1,252 @@
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::{StoredF32, StoredF64};
use vecdb::Exit;
use super::Vecs;
use super::super::{count, rewards, ONE_TERA_HASH, TARGET_BLOCKS_PER_DAY_F64};
use crate::{
indexes,
utils::OptionExt,
ComputeIndexes,
};
impl Vecs {
pub fn compute(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
count_vecs: &count::Vecs,
rewards_vecs: &rewards::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_difficulty.compute_rest(
indexes,
starting_indexes,
exit,
Some(&indexer.vecs.block.height_to_difficulty),
)?;
self.indexes_to_difficulty_as_hash
.compute_all(indexes, starting_indexes, exit, |v| {
let multiplier = 2.0_f64.powi(32) / 600.0;
v.compute_transform(
starting_indexes.height,
&indexer.vecs.block.height_to_difficulty,
|(i, v, ..)| (i, StoredF32::from(*v * multiplier)),
exit,
)?;
Ok(())
})?;
self.indexes_to_hash_rate
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_transform2(
starting_indexes.height,
&count_vecs.height_to_24h_block_count,
self.indexes_to_difficulty_as_hash.height.u(),
|(i, block_count_sum, difficulty_as_hash, ..)| {
(
i,
StoredF64::from(
(f64::from(block_count_sum) / TARGET_BLOCKS_PER_DAY_F64)
* f64::from(difficulty_as_hash),
),
)
},
exit,
)?;
Ok(())
})?;
self.indexes_to_hash_rate_1w_sma
.compute_all(starting_indexes, exit, |v| {
v.compute_sma(
starting_indexes.dateindex,
self.indexes_to_hash_rate.dateindex.unwrap_last(),
7,
exit,
)?;
Ok(())
})?;
self.indexes_to_hash_rate_1m_sma
.compute_all(starting_indexes, exit, |v| {
v.compute_sma(
starting_indexes.dateindex,
self.indexes_to_hash_rate.dateindex.unwrap_last(),
30,
exit,
)?;
Ok(())
})?;
self.indexes_to_hash_rate_2m_sma
.compute_all(starting_indexes, exit, |v| {
v.compute_sma(
starting_indexes.dateindex,
self.indexes_to_hash_rate.dateindex.unwrap_last(),
2 * 30,
exit,
)?;
Ok(())
})?;
self.indexes_to_hash_rate_1y_sma
.compute_all(starting_indexes, exit, |v| {
v.compute_sma(
starting_indexes.dateindex,
self.indexes_to_hash_rate.dateindex.unwrap_last(),
365,
exit,
)?;
Ok(())
})?;
self.indexes_to_difficulty_adjustment.compute_all(
indexes,
starting_indexes,
exit,
|v| {
v.compute_percentage_change(
starting_indexes.height,
&indexer.vecs.block.height_to_difficulty,
1,
exit,
)?;
Ok(())
},
)?;
self.indexes_to_hash_price_ths
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_transform2(
starting_indexes.height,
&rewards_vecs.height_to_24h_coinbase_usd_sum,
self.indexes_to_hash_rate.height.u(),
|(i, coinbase_sum, hashrate, ..)| {
let hashrate_ths = *hashrate / ONE_TERA_HASH;
let price = if hashrate_ths == 0.0 {
StoredF32::NAN
} else {
(*coinbase_sum / hashrate_ths).into()
};
(i, price)
},
exit,
)?;
Ok(())
})?;
self.indexes_to_hash_price_phs
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_transform(
starting_indexes.height,
self.indexes_to_hash_price_ths.height.u(),
|(i, price, ..)| (i, (*price * 1000.0).into()),
exit,
)?;
Ok(())
})?;
self.indexes_to_hash_value_ths
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_transform2(
starting_indexes.height,
&rewards_vecs.height_to_24h_coinbase_sum,
self.indexes_to_hash_rate.height.u(),
|(i, coinbase_sum, hashrate, ..)| {
let hashrate_ths = *hashrate / ONE_TERA_HASH;
let value = if hashrate_ths == 0.0 {
StoredF32::NAN
} else {
StoredF32::from(*coinbase_sum as f64 / hashrate_ths)
};
(i, value)
},
exit,
)?;
Ok(())
})?;
self.indexes_to_hash_value_phs
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_transform(
starting_indexes.height,
self.indexes_to_hash_value_ths.height.u(),
|(i, value, ..)| (i, (*value * 1000.0).into()),
exit,
)?;
Ok(())
})?;
self.indexes_to_hash_price_ths_min
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_all_time_low_(
starting_indexes.height,
self.indexes_to_hash_price_ths.height.u(),
exit,
true,
)?;
Ok(())
})?;
self.indexes_to_hash_price_phs_min
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_all_time_low_(
starting_indexes.height,
self.indexes_to_hash_price_phs.height.u(),
exit,
true,
)?;
Ok(())
})?;
self.indexes_to_hash_value_ths_min
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_all_time_low_(
starting_indexes.height,
self.indexes_to_hash_value_ths.height.u(),
exit,
true,
)?;
Ok(())
})?;
self.indexes_to_hash_value_phs_min
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_all_time_low_(
starting_indexes.height,
self.indexes_to_hash_value_phs.height.u(),
exit,
true,
)?;
Ok(())
})?;
self.indexes_to_hash_price_rebound
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_percentage_difference(
starting_indexes.height,
self.indexes_to_hash_price_phs.height.u(),
self.indexes_to_hash_price_phs_min.height.u(),
exit,
)?;
Ok(())
})?;
self.indexes_to_hash_value_rebound
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_percentage_difference(
starting_indexes.height,
self.indexes_to_hash_value_phs.height.u(),
self.indexes_to_hash_value_phs_min.height.u(),
exit,
)?;
Ok(())
})?;
Ok(())
}
}

View File

@@ -0,0 +1,173 @@
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::Version;
use vecdb::{Database, IterableCloneableVec};
use super::Vecs;
use crate::{
internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeight, Source, VecBuilderOptions},
indexes,
};
impl Vecs {
pub fn forced_import(
db: &Database,
version: Version,
indexer: &Indexer,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v0 = Version::ZERO;
let v4 = Version::new(4);
let v5 = Version::new(5);
let last = || VecBuilderOptions::default().add_last();
let sum = || VecBuilderOptions::default().add_sum();
Ok(Self {
indexes_to_hash_rate: ComputedVecsFromHeight::forced_import(
db,
"hash_rate",
Source::Compute,
version + v5,
indexes,
last(),
)?,
indexes_to_hash_rate_1w_sma: ComputedVecsFromDateIndex::forced_import(
db,
"hash_rate_1w_sma",
Source::Compute,
version + v0,
indexes,
last(),
)?,
indexes_to_hash_rate_1m_sma: ComputedVecsFromDateIndex::forced_import(
db,
"hash_rate_1m_sma",
Source::Compute,
version + v0,
indexes,
last(),
)?,
indexes_to_hash_rate_2m_sma: ComputedVecsFromDateIndex::forced_import(
db,
"hash_rate_2m_sma",
Source::Compute,
version + v0,
indexes,
last(),
)?,
indexes_to_hash_rate_1y_sma: ComputedVecsFromDateIndex::forced_import(
db,
"hash_rate_1y_sma",
Source::Compute,
version + v0,
indexes,
last(),
)?,
indexes_to_hash_price_ths: ComputedVecsFromHeight::forced_import(
db,
"hash_price_ths",
Source::Compute,
version + v4,
indexes,
last(),
)?,
indexes_to_hash_price_ths_min: ComputedVecsFromHeight::forced_import(
db,
"hash_price_ths_min",
Source::Compute,
version + v4,
indexes,
last(),
)?,
indexes_to_hash_price_phs: ComputedVecsFromHeight::forced_import(
db,
"hash_price_phs",
Source::Compute,
version + v4,
indexes,
last(),
)?,
indexes_to_hash_price_phs_min: ComputedVecsFromHeight::forced_import(
db,
"hash_price_phs_min",
Source::Compute,
version + v4,
indexes,
last(),
)?,
indexes_to_hash_price_rebound: ComputedVecsFromHeight::forced_import(
db,
"hash_price_rebound",
Source::Compute,
version + v4,
indexes,
last(),
)?,
indexes_to_hash_value_ths: ComputedVecsFromHeight::forced_import(
db,
"hash_value_ths",
Source::Compute,
version + v4,
indexes,
last(),
)?,
indexes_to_hash_value_ths_min: ComputedVecsFromHeight::forced_import(
db,
"hash_value_ths_min",
Source::Compute,
version + v4,
indexes,
last(),
)?,
indexes_to_hash_value_phs: ComputedVecsFromHeight::forced_import(
db,
"hash_value_phs",
Source::Compute,
version + v4,
indexes,
last(),
)?,
indexes_to_hash_value_phs_min: ComputedVecsFromHeight::forced_import(
db,
"hash_value_phs_min",
Source::Compute,
version + v4,
indexes,
last(),
)?,
indexes_to_hash_value_rebound: ComputedVecsFromHeight::forced_import(
db,
"hash_value_rebound",
Source::Compute,
version + v4,
indexes,
last(),
)?,
indexes_to_difficulty: ComputedVecsFromHeight::forced_import(
db,
"difficulty",
Source::Vec(indexer.vecs.block.height_to_difficulty.boxed_clone()),
version + v0,
indexes,
last(),
)?,
indexes_to_difficulty_as_hash: ComputedVecsFromHeight::forced_import(
db,
"difficulty_as_hash",
Source::Compute,
version + v0,
indexes,
last(),
)?,
indexes_to_difficulty_adjustment: ComputedVecsFromHeight::forced_import(
db,
"difficulty_adjustment",
Source::Compute,
version + v0,
indexes,
sum(),
)?,
})
}
}

View File

@@ -0,0 +1,5 @@
mod compute;
mod import;
mod vecs;
pub use vecs::Vecs;

View File

@@ -0,0 +1,27 @@
use brk_traversable::Traversable;
use brk_types::{StoredF32, StoredF64};
use crate::internal::{ComputedVecsFromDateIndex, ComputedVecsFromHeight};
/// Mining-related metrics: hash rate, hash price, hash value, difficulty
#[derive(Clone, Traversable)]
pub struct Vecs {
pub indexes_to_hash_rate: ComputedVecsFromHeight<StoredF64>,
pub indexes_to_hash_rate_1w_sma: ComputedVecsFromDateIndex<StoredF64>,
pub indexes_to_hash_rate_1m_sma: ComputedVecsFromDateIndex<StoredF32>,
pub indexes_to_hash_rate_2m_sma: ComputedVecsFromDateIndex<StoredF32>,
pub indexes_to_hash_rate_1y_sma: ComputedVecsFromDateIndex<StoredF32>,
pub indexes_to_hash_price_ths: ComputedVecsFromHeight<StoredF32>,
pub indexes_to_hash_price_ths_min: ComputedVecsFromHeight<StoredF32>,
pub indexes_to_hash_price_phs: ComputedVecsFromHeight<StoredF32>,
pub indexes_to_hash_price_phs_min: ComputedVecsFromHeight<StoredF32>,
pub indexes_to_hash_price_rebound: ComputedVecsFromHeight<StoredF32>,
pub indexes_to_hash_value_ths: ComputedVecsFromHeight<StoredF32>,
pub indexes_to_hash_value_ths_min: ComputedVecsFromHeight<StoredF32>,
pub indexes_to_hash_value_phs: ComputedVecsFromHeight<StoredF32>,
pub indexes_to_hash_value_phs_min: ComputedVecsFromHeight<StoredF32>,
pub indexes_to_hash_value_rebound: ComputedVecsFromHeight<StoredF32>,
pub indexes_to_difficulty: ComputedVecsFromHeight<StoredF64>,
pub indexes_to_difficulty_as_hash: ComputedVecsFromHeight<StoredF32>,
pub indexes_to_difficulty_adjustment: ComputedVecsFromHeight<StoredF32>,
}

View File

@@ -0,0 +1,54 @@
pub mod count;
pub mod difficulty;
pub mod halving;
pub mod interval;
pub mod mining;
pub mod rewards;
pub mod size;
pub mod time;
pub mod weight;
mod compute;
mod import;
use brk_traversable::Traversable;
use vecdb::Database;
pub use count::Vecs as CountVecs;
pub use difficulty::Vecs as DifficultyVecs;
pub use halving::Vecs as HalvingVecs;
pub use interval::Vecs as IntervalVecs;
pub use mining::Vecs as MiningVecs;
pub use rewards::Vecs as RewardsVecs;
pub use size::Vecs as SizeVecs;
pub use time::Vecs as TimeVecs;
pub use weight::Vecs as WeightVecs;
pub const DB_NAME: &str = "blocks";
pub(crate) const TARGET_BLOCKS_PER_DAY_F64: f64 = 144.0;
pub(crate) const TARGET_BLOCKS_PER_DAY_F32: f32 = 144.0;
pub(crate) const TARGET_BLOCKS_PER_DAY: u64 = 144;
pub(crate) const TARGET_BLOCKS_PER_WEEK: u64 = 7 * TARGET_BLOCKS_PER_DAY;
pub(crate) const TARGET_BLOCKS_PER_MONTH: u64 = 30 * TARGET_BLOCKS_PER_DAY;
pub(crate) const TARGET_BLOCKS_PER_QUARTER: u64 = 3 * TARGET_BLOCKS_PER_MONTH;
pub(crate) const TARGET_BLOCKS_PER_SEMESTER: u64 = 2 * TARGET_BLOCKS_PER_QUARTER;
pub(crate) const TARGET_BLOCKS_PER_YEAR: u64 = 2 * TARGET_BLOCKS_PER_SEMESTER;
pub(crate) const TARGET_BLOCKS_PER_DECADE: u64 = 10 * TARGET_BLOCKS_PER_YEAR;
pub(crate) const ONE_TERA_HASH: f64 = 1_000_000_000_000.0;
#[derive(Clone, Traversable)]
pub struct Vecs {
#[traversable(skip)]
pub(crate) db: Database,
pub count: CountVecs,
pub interval: IntervalVecs,
pub size: SizeVecs,
pub weight: WeightVecs,
pub time: TimeVecs,
pub mining: MiningVecs,
pub rewards: RewardsVecs,
pub difficulty: DifficultyVecs,
pub halving: HalvingVecs,
}

View File

@@ -0,0 +1,193 @@
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::{CheckedSub, Dollars, HalvingEpoch, Height, Sats, StoredF32, TxOutIndex};
use vecdb::{Exit, IterableVec, TypedVecIterator, VecIndex};
use super::Vecs;
use crate::{
transactions,
ComputeIndexes,
indexes, price,
utils::OptionExt,
};
use super::super::count;
impl Vecs {
#[allow(clippy::too_many_arguments)]
pub fn compute(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
count_vecs: &count::Vecs,
transactions_fees: &transactions::FeesVecs,
starting_indexes: &ComputeIndexes,
price: Option<&price::Vecs>,
exit: &Exit,
) -> Result<()> {
self.indexes_to_coinbase
.compute_all(indexes, price, starting_indexes, exit, |vec| {
let mut txindex_to_first_txoutindex_iter =
indexer.vecs.tx.txindex_to_first_txoutindex.iter()?;
let mut txindex_to_output_count_iter =
indexes.transaction.txindex_to_output_count.iter();
let mut txoutindex_to_value_iter = indexer.vecs.txout.txoutindex_to_value.iter()?;
vec.compute_transform(
starting_indexes.height,
&indexer.vecs.tx.height_to_first_txindex,
|(height, txindex, ..)| {
let first_txoutindex = txindex_to_first_txoutindex_iter
.get_unwrap(txindex)
.to_usize();
let output_count = txindex_to_output_count_iter.get_unwrap(txindex);
let mut sats = Sats::ZERO;
(first_txoutindex..first_txoutindex + usize::from(output_count)).for_each(
|txoutindex| {
sats += txoutindex_to_value_iter
.get_unwrap(TxOutIndex::from(txoutindex));
},
);
(height, sats)
},
exit,
)?;
Ok(())
})?;
let mut height_to_coinbase_iter = self
.indexes_to_coinbase
.sats
.height
.as_ref()
.unwrap()
.into_iter();
self.height_to_24h_coinbase_sum.compute_transform(
starting_indexes.height,
&count_vecs.height_to_24h_block_count,
|(h, count, ..)| {
let range = *h - (*count - 1)..=*h;
let sum = range
.map(Height::from)
.map(|h| height_to_coinbase_iter.get_unwrap(h))
.sum::<Sats>();
(h, sum)
},
exit,
)?;
drop(height_to_coinbase_iter);
if let Some(mut height_to_coinbase_iter) = self
.indexes_to_coinbase
.dollars
.as_ref()
.map(|c| c.height.u().into_iter())
{
self.height_to_24h_coinbase_usd_sum.compute_transform(
starting_indexes.height,
&count_vecs.height_to_24h_block_count,
|(h, count, ..)| {
let range = *h - (*count - 1)..=*h;
let sum = range
.map(Height::from)
.map(|h| height_to_coinbase_iter.get_unwrap(h))
.sum::<Dollars>();
(h, sum)
},
exit,
)?;
}
self.indexes_to_subsidy
.compute_all(indexes, price, starting_indexes, exit, |vec| {
vec.compute_transform2(
starting_indexes.height,
self.indexes_to_coinbase.sats.height.u(),
transactions_fees.indexes_to_fee.sats.height.unwrap_sum(),
|(height, coinbase, fees, ..)| {
(
height,
coinbase.checked_sub(fees).unwrap_or_else(|| {
dbg!(height, coinbase, fees);
panic!()
}),
)
},
exit,
)?;
Ok(())
})?;
self.indexes_to_unclaimed_rewards.compute_all(
indexes,
price,
starting_indexes,
exit,
|vec| {
vec.compute_transform(
starting_indexes.height,
self.indexes_to_subsidy.sats.height.u(),
|(height, subsidy, ..)| {
let halving = HalvingEpoch::from(height);
let expected = Sats::FIFTY_BTC / 2_usize.pow(halving.to_usize() as u32);
(height, expected.checked_sub(subsidy).unwrap())
},
exit,
)?;
Ok(())
},
)?;
self.dateindex_to_fee_dominance.compute_transform2(
starting_indexes.dateindex,
transactions_fees.indexes_to_fee.sats.dateindex.unwrap_sum(),
self.indexes_to_coinbase.sats.dateindex.unwrap_sum(),
|(i, fee, coinbase, ..)| {
let coinbase_f64 = u64::from(coinbase) as f64;
let dominance = if coinbase_f64 == 0.0 {
StoredF32::NAN
} else {
StoredF32::from(u64::from(fee) as f64 / coinbase_f64 * 100.0)
};
(i, dominance)
},
exit,
)?;
self.dateindex_to_subsidy_dominance.compute_transform2(
starting_indexes.dateindex,
self.indexes_to_subsidy.sats.dateindex.unwrap_sum(),
self.indexes_to_coinbase.sats.dateindex.unwrap_sum(),
|(i, subsidy, coinbase, ..)| {
let coinbase_f64 = u64::from(coinbase) as f64;
let dominance = if coinbase_f64 == 0.0 {
StoredF32::NAN
} else {
StoredF32::from(u64::from(subsidy) as f64 / coinbase_f64 * 100.0)
};
(i, dominance)
},
exit,
)?;
if let Some(sma) = self.indexes_to_subsidy_usd_1y_sma.as_mut() {
let date_to_coinbase_usd_sum = self
.indexes_to_coinbase
.dollars
.as_ref()
.unwrap()
.dateindex
.unwrap_sum();
sma.compute_all(starting_indexes, exit, |v| {
v.compute_sma(
starting_indexes.dateindex,
date_to_coinbase_usd_sum,
365,
exit,
)?;
Ok(())
})?;
}
Ok(())
}
}

View File

@@ -0,0 +1,89 @@
use brk_error::Result;
use brk_types::Version;
use vecdb::{Database, EagerVec, ImportableVec};
use super::Vecs;
use crate::{
internal::{ComputedValueVecsFromHeight, ComputedVecsFromDateIndex, Source, VecBuilderOptions},
indexes,
};
impl Vecs {
pub fn forced_import(
db: &Database,
version: Version,
indexes: &indexes::Vecs,
compute_dollars: bool,
) -> Result<Self> {
let v0 = Version::ZERO;
let last = || VecBuilderOptions::default().add_last();
Ok(Self {
height_to_24h_coinbase_sum: EagerVec::forced_import(
db,
"24h_coinbase_sum",
version + v0,
)?,
height_to_24h_coinbase_usd_sum: EagerVec::forced_import(
db,
"24h_coinbase_usd_sum",
version + v0,
)?,
indexes_to_coinbase: ComputedValueVecsFromHeight::forced_import(
db,
"coinbase",
Source::Compute,
version + v0,
VecBuilderOptions::default()
.add_sum()
.add_cumulative()
.add_percentiles()
.add_minmax()
.add_average(),
compute_dollars,
indexes,
)?,
indexes_to_subsidy: ComputedValueVecsFromHeight::forced_import(
db,
"subsidy",
Source::Compute,
version + v0,
VecBuilderOptions::default()
.add_percentiles()
.add_sum()
.add_cumulative()
.add_minmax()
.add_average(),
compute_dollars,
indexes,
)?,
indexes_to_unclaimed_rewards: ComputedValueVecsFromHeight::forced_import(
db,
"unclaimed_rewards",
Source::Compute,
version + v0,
VecBuilderOptions::default().add_sum().add_cumulative(),
compute_dollars,
indexes,
)?,
dateindex_to_fee_dominance: EagerVec::forced_import(db, "fee_dominance", version + v0)?,
dateindex_to_subsidy_dominance: EagerVec::forced_import(
db,
"subsidy_dominance",
version + v0,
)?,
indexes_to_subsidy_usd_1y_sma: compute_dollars
.then(|| {
ComputedVecsFromDateIndex::forced_import(
db,
"subsidy_usd_1y_sma",
Source::Compute,
version + v0,
indexes,
last(),
)
})
.transpose()?,
})
}
}

View File

@@ -0,0 +1,5 @@
mod compute;
mod import;
mod vecs;
pub use vecs::Vecs;

View File

@@ -0,0 +1,18 @@
use brk_traversable::Traversable;
use brk_types::{DateIndex, Dollars, Height, Sats, StoredF32};
use vecdb::{EagerVec, PcoVec};
use crate::internal::{ComputedValueVecsFromHeight, ComputedVecsFromDateIndex};
/// Coinbase/subsidy/rewards metrics
#[derive(Clone, Traversable)]
pub struct Vecs {
pub height_to_24h_coinbase_sum: EagerVec<PcoVec<Height, Sats>>,
pub height_to_24h_coinbase_usd_sum: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_coinbase: ComputedValueVecsFromHeight,
pub indexes_to_subsidy: ComputedValueVecsFromHeight,
pub indexes_to_unclaimed_rewards: ComputedValueVecsFromHeight,
pub dateindex_to_fee_dominance: EagerVec<PcoVec<DateIndex, StoredF32>>,
pub dateindex_to_subsidy_dominance: EagerVec<PcoVec<DateIndex, StoredF32>>,
pub indexes_to_subsidy_usd_1y_sma: Option<ComputedVecsFromDateIndex<Dollars>>,
}

View File

@@ -0,0 +1,32 @@
use brk_error::Result;
use brk_indexer::Indexer;
use vecdb::Exit;
use super::Vecs;
use crate::{indexes, ComputeIndexes};
impl Vecs {
pub fn compute(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_block_size.compute_rest(
indexes,
starting_indexes,
exit,
Some(&indexer.vecs.block.height_to_total_size),
)?;
self.indexes_to_block_vbytes.compute_rest(
indexes,
starting_indexes,
exit,
Some(&self.height_to_vbytes),
)?;
Ok(())
}
}

View File

@@ -0,0 +1,60 @@
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::{Height, StoredU64, Version};
use vecdb::{Database, IterableCloneableVec, LazyVecFrom1, VecIndex};
use super::Vecs;
use crate::{
indexes,
internal::{ComputedVecsFromHeight, Source, VecBuilderOptions},
};
impl Vecs {
pub fn forced_import(
db: &Database,
version: Version,
indexer: &Indexer,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v0 = Version::ZERO;
let full_stats = || {
VecBuilderOptions::default()
.add_average()
.add_minmax()
.add_percentiles()
.add_sum()
.add_cumulative()
};
let height_to_vbytes = LazyVecFrom1::init(
"vbytes",
version + v0,
indexer.vecs.block.height_to_weight.boxed_clone(),
|height: Height, weight_iter| {
weight_iter
.get_at(height.to_usize())
.map(|w| StoredU64::from(w.to_vbytes_floor()))
},
);
Ok(Self {
indexes_to_block_size: ComputedVecsFromHeight::forced_import(
db,
"block_size",
Source::Vec(indexer.vecs.block.height_to_total_size.boxed_clone()),
version + v0,
indexes,
full_stats(),
)?,
indexes_to_block_vbytes: ComputedVecsFromHeight::forced_import(
db,
"block_vbytes",
Source::Vec(height_to_vbytes.boxed_clone()),
version + v0,
indexes,
full_stats(),
)?,
height_to_vbytes,
})
}
}

View File

@@ -0,0 +1,5 @@
mod compute;
mod import;
mod vecs;
pub use vecs::Vecs;

View File

@@ -0,0 +1,12 @@
use brk_traversable::Traversable;
use brk_types::{Height, StoredU64, Weight};
use vecdb::LazyVecFrom1;
use crate::internal::ComputedVecsFromHeight;
#[derive(Clone, Traversable)]
pub struct Vecs {
pub height_to_vbytes: LazyVecFrom1<Height, StoredU64, Height, Weight>,
pub indexes_to_block_size: ComputedVecsFromHeight<StoredU64>,
pub indexes_to_block_vbytes: ComputedVecsFromHeight<StoredU64>,
}

View File

@@ -0,0 +1,62 @@
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::Timestamp;
use vecdb::{Exit, TypedVecIterator};
use super::Vecs;
use crate::{indexes, ComputeIndexes};
impl Vecs {
/// Compute height-to-time fields early, before indexes are computed.
/// These are needed by indexes::block to compute height_to_dateindex.
pub fn compute_early(
&mut self,
indexer: &Indexer,
starting_height: brk_types::Height,
exit: &Exit,
) -> Result<()> {
let mut prev_timestamp_fixed = None;
self.height_to_timestamp_fixed.compute_transform(
starting_height,
&indexer.vecs.block.height_to_timestamp,
|(h, timestamp, height_to_timestamp_fixed_iter)| {
if prev_timestamp_fixed.is_none()
&& let Some(prev_h) = h.decremented()
{
prev_timestamp_fixed.replace(
height_to_timestamp_fixed_iter
.into_iter()
.get_unwrap(prev_h),
);
}
let timestamp_fixed =
prev_timestamp_fixed.map_or(timestamp, |prev_d| prev_d.max(timestamp));
prev_timestamp_fixed.replace(timestamp_fixed);
(h, timestamp_fixed)
},
exit,
)?;
Ok(())
}
pub fn compute(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.timeindexes_to_timestamp
.compute_all(starting_indexes, exit, |vec| {
vec.compute_transform(
starting_indexes.dateindex,
&indexes.time.dateindex_to_date,
|(di, d, ..)| (di, Timestamp::from(d)),
exit,
)?;
Ok(())
})?;
Ok(())
}
}

View File

@@ -0,0 +1,61 @@
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::{Date, DifficultyEpoch, Height, Version};
use vecdb::{
Database, EagerVec, ImportableVec, IterableCloneableVec, LazyVecFrom1, LazyVecFrom2, VecIndex,
};
use super::Vecs;
use crate::{
indexes,
internal::{ComputedVecsFromDateIndex, Source, VecBuilderOptions},
};
impl Vecs {
pub fn forced_import(
db: &Database,
version: Version,
indexer: &Indexer,
indexes: &indexes::Vecs,
) -> Result<Self> {
let height_to_timestamp_fixed =
EagerVec::forced_import(db, "timestamp_fixed", version + Version::ZERO)?;
Ok(Self {
height_to_date: LazyVecFrom1::init(
"date",
version + Version::ZERO,
indexer.vecs.block.height_to_timestamp.boxed_clone(),
|height: Height, timestamp_iter| {
timestamp_iter.get_at(height.to_usize()).map(Date::from)
},
),
height_to_date_fixed: LazyVecFrom1::init(
"date_fixed",
version + Version::ZERO,
height_to_timestamp_fixed.boxed_clone(),
|height: Height, timestamp_iter| timestamp_iter.get(height).map(Date::from),
),
height_to_timestamp_fixed,
difficultyepoch_to_timestamp: LazyVecFrom2::init(
"timestamp",
version + Version::ZERO,
indexes.block.difficultyepoch_to_first_height.boxed_clone(),
indexer.vecs.block.height_to_timestamp.boxed_clone(),
|di: DifficultyEpoch, first_height_iter, timestamp_iter| {
first_height_iter
.get(di)
.and_then(|h: Height| timestamp_iter.get(h))
},
),
timeindexes_to_timestamp: ComputedVecsFromDateIndex::forced_import(
db,
"timestamp",
Source::Compute,
version + Version::ZERO,
indexes,
VecBuilderOptions::default().add_first(),
)?,
})
}
}

View File

@@ -0,0 +1,5 @@
mod compute;
mod import;
mod vecs;
pub use vecs::Vecs;

View File

@@ -0,0 +1,16 @@
use brk_traversable::Traversable;
use brk_types::{Date, DifficultyEpoch, Height, Timestamp};
use vecdb::{EagerVec, LazyVecFrom1, LazyVecFrom2, PcoVec};
use crate::internal::ComputedVecsFromDateIndex;
/// Timestamp and date metrics for blocks
#[derive(Clone, Traversable)]
pub struct Vecs {
pub height_to_date: LazyVecFrom1<Height, Date, Height, Timestamp>,
pub height_to_date_fixed: LazyVecFrom1<Height, Date, Height, Timestamp>,
pub height_to_timestamp_fixed: EagerVec<PcoVec<Height, Timestamp>>,
pub difficultyepoch_to_timestamp:
LazyVecFrom2<DifficultyEpoch, Timestamp, DifficultyEpoch, Height, Height, Timestamp>,
pub timeindexes_to_timestamp: ComputedVecsFromDateIndex<Timestamp>,
}

View File

@@ -0,0 +1,25 @@
use brk_error::Result;
use brk_indexer::Indexer;
use vecdb::Exit;
use super::Vecs;
use crate::{indexes, ComputeIndexes};
impl Vecs {
pub fn compute(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_block_weight.compute_rest(
indexes,
starting_indexes,
exit,
Some(&indexer.vecs.block.height_to_weight),
)?;
Ok(())
}
}

View File

@@ -0,0 +1,50 @@
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::Version;
use vecdb::{Database, IterableCloneableVec};
use super::Vecs;
use crate::{
indexes,
internal::{ComputedVecsFromHeight, LazyVecsFromHeight, Source, VecBuilderOptions, WeightToFullness},
};
impl Vecs {
pub fn forced_import(
db: &Database,
version: Version,
indexer: &Indexer,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v0 = Version::ZERO;
let full_stats = || {
VecBuilderOptions::default()
.add_average()
.add_minmax()
.add_percentiles()
.add_sum()
.add_cumulative()
};
let indexes_to_block_weight = ComputedVecsFromHeight::forced_import(
db,
"block_weight",
Source::Vec(indexer.vecs.block.height_to_weight.boxed_clone()),
version + v0,
indexes,
full_stats(),
)?;
let indexes_to_block_fullness = LazyVecsFromHeight::from_computed::<WeightToFullness>(
"block_fullness",
version + v0,
indexer.vecs.block.height_to_weight.boxed_clone(),
&indexes_to_block_weight,
);
Ok(Self {
indexes_to_block_weight,
indexes_to_block_fullness,
})
}
}

View File

@@ -0,0 +1,5 @@
mod compute;
mod import;
mod vecs;
pub use vecs::Vecs;

View File

@@ -0,0 +1,11 @@
use brk_traversable::Traversable;
use brk_types::{StoredF32, Weight};
use crate::internal::{ComputedVecsFromHeight, LazyVecsFromHeight};
#[derive(Clone, Traversable)]
pub struct Vecs {
pub indexes_to_block_weight: ComputedVecsFromHeight<Weight>,
/// Block fullness as percentage of max block weight (0-100%)
pub indexes_to_block_fullness: LazyVecsFromHeight<StoredF32, Weight>,
}