server: snapshot

This commit is contained in:
nym21
2025-12-15 16:32:45 +01:00
parent 882a3525af
commit 825a4a77c0
100 changed files with 2677 additions and 3438 deletions

View File

@@ -0,0 +1,43 @@
use brk_error::Result;
use brk_types::{BlockFeeRatesEntry, FeeRatePercentiles, TimePeriod};
use vecdb::{IterableVec, VecIndex};
use super::dateindex_iter::DateIndexIter;
use crate::Query;
impl Query {
pub fn block_fee_rates(&self, time_period: TimePeriod) -> Result<Vec<BlockFeeRatesEntry>> {
let computer = self.computer();
let current_height = self.height();
let start = current_height
.to_usize()
.saturating_sub(time_period.block_count());
let iter = DateIndexIter::new(computer, start, current_height.to_usize());
let vecs = &computer.chain.indexes_to_fee_rate.dateindex;
let mut min = vecs.unwrap_min().iter();
let mut pct10 = vecs.unwrap_pct10().iter();
let mut pct25 = vecs.unwrap_pct25().iter();
let mut median = vecs.unwrap_median().iter();
let mut pct75 = vecs.unwrap_pct75().iter();
let mut pct90 = vecs.unwrap_pct90().iter();
let mut max = vecs.unwrap_max().iter();
Ok(iter.collect(|di, ts, h| {
Some(BlockFeeRatesEntry {
avg_height: h,
timestamp: ts,
percentiles: FeeRatePercentiles::new(
min.get(di).unwrap_or_default(),
pct10.get(di).unwrap_or_default(),
pct25.get(di).unwrap_or_default(),
median.get(di).unwrap_or_default(),
pct75.get(di).unwrap_or_default(),
pct90.get(di).unwrap_or_default(),
max.get(di).unwrap_or_default(),
),
})
}))
}
}

View File

@@ -0,0 +1,34 @@
use brk_error::Result;
use brk_types::{BlockFeesEntry, TimePeriod};
use vecdb::{IterableVec, VecIndex};
use super::dateindex_iter::DateIndexIter;
use crate::Query;
impl Query {
pub fn block_fees(&self, time_period: TimePeriod) -> Result<Vec<BlockFeesEntry>> {
let computer = self.computer();
let current_height = self.height();
let start = current_height
.to_usize()
.saturating_sub(time_period.block_count());
let iter = DateIndexIter::new(computer, start, current_height.to_usize());
let mut fees = computer
.chain
.indexes_to_fee
.sats
.dateindex
.unwrap_average()
.iter();
Ok(iter.collect(|di, ts, h| {
fees.get(di).map(|fee| BlockFeesEntry {
avg_height: h,
timestamp: ts,
avg_fees: fee,
})
}))
}
}

View File

@@ -0,0 +1,35 @@
use brk_error::Result;
use brk_types::{BlockRewardsEntry, TimePeriod};
use vecdb::{IterableVec, VecIndex};
use super::dateindex_iter::DateIndexIter;
use crate::Query;
impl Query {
pub fn block_rewards(&self, time_period: TimePeriod) -> Result<Vec<BlockRewardsEntry>> {
let computer = self.computer();
let current_height = self.height();
let start = current_height
.to_usize()
.saturating_sub(time_period.block_count());
let iter = DateIndexIter::new(computer, start, current_height.to_usize());
// coinbase = subsidy + fees
let mut rewards = computer
.chain
.indexes_to_coinbase
.sats
.dateindex
.unwrap_average()
.iter();
Ok(iter.collect(|di, ts, h| {
rewards.get(di).map(|reward| BlockRewardsEntry {
avg_height: h.into(),
timestamp: *ts,
avg_rewards: *reward,
})
}))
}
}

View File

@@ -0,0 +1,61 @@
use brk_error::Result;
use brk_types::{BlockSizeEntry, BlockSizesWeights, BlockWeightEntry, TimePeriod};
use vecdb::{IterableVec, VecIndex};
use super::dateindex_iter::DateIndexIter;
use crate::Query;
impl Query {
pub fn block_sizes_weights(&self, time_period: TimePeriod) -> Result<BlockSizesWeights> {
let computer = self.computer();
let current_height = self.height();
let start = current_height
.to_usize()
.saturating_sub(time_period.block_count());
let iter = DateIndexIter::new(computer, start, current_height.to_usize());
let mut sizes_vec = computer
.chain
.indexes_to_block_size
.dateindex
.unwrap_average()
.iter();
let mut weights_vec = computer
.chain
.indexes_to_block_weight
.dateindex
.unwrap_average()
.iter();
let entries: Vec<_> = iter.collect(|di, ts, h| {
let size = sizes_vec.get(di).map(|s| *s);
let weight = weights_vec.get(di).map(|w| *w);
Some((h.into(), (*ts), size, weight))
});
let sizes = entries
.iter()
.filter_map(|(h, ts, size, _)| {
size.map(|s| BlockSizeEntry {
avg_height: *h,
timestamp: *ts,
avg_size: s,
})
})
.collect();
let weights = entries
.iter()
.filter_map(|(h, ts, _, weight)| {
weight.map(|w| BlockWeightEntry {
avg_height: *h,
timestamp: *ts,
avg_weight: w,
})
})
.collect();
Ok(BlockSizesWeights { sizes, weights })
}
}

View File

@@ -0,0 +1,71 @@
use brk_computer::Computer;
use brk_types::{DateIndex, Height, Timestamp};
use vecdb::{GenericStoredVec, IterableVec, VecIndex};
/// Helper for iterating over dateindex ranges with sampling.
pub struct DateIndexIter<'a> {
computer: &'a Computer,
start_di: DateIndex,
end_di: DateIndex,
step: usize,
}
impl<'a> DateIndexIter<'a> {
pub fn new(computer: &'a Computer, start_height: usize, end_height: usize) -> Self {
let start_di = computer
.indexes
.height_to_dateindex
.read_once(Height::from(start_height))
.unwrap_or_default();
let end_di = computer
.indexes
.height_to_dateindex
.read_once(Height::from(end_height))
.unwrap_or_default();
let total = end_di.to_usize().saturating_sub(start_di.to_usize()) + 1;
let step = (total / 200).max(1);
Self {
computer,
start_di,
end_di,
step,
}
}
/// Iterate and collect entries using the provided transform function.
pub fn collect<T, F>(&self, mut transform: F) -> Vec<T>
where
F: FnMut(DateIndex, Timestamp, Height) -> Option<T>,
{
let total = self
.end_di
.to_usize()
.saturating_sub(self.start_di.to_usize())
+ 1;
let mut timestamps = self
.computer
.chain
.timeindexes_to_timestamp
.dateindex_extra
.unwrap_first()
.iter();
let mut heights = self.computer.indexes.dateindex_to_first_height.iter();
let mut entries = Vec::with_capacity(total / self.step + 1);
let mut i = self.start_di.to_usize();
while i <= self.end_di.to_usize() {
let di = DateIndex::from(i);
if let (Some(ts), Some(h)) = (timestamps.get(di), heights.get(di))
&& let Some(entry) = transform(di, ts, h)
{
entries.push(entry);
}
i += self.step;
}
entries
}
}

View File

@@ -0,0 +1,121 @@
use std::time::{SystemTime, UNIX_EPOCH};
use brk_error::Result;
use brk_types::{DifficultyAdjustment, DifficultyEpoch, Height};
use vecdb::GenericStoredVec;
use crate::Query;
/// Blocks per difficulty epoch (2 weeks target)
const BLOCKS_PER_EPOCH: u32 = 2016;
/// Target block time in seconds (10 minutes)
const TARGET_BLOCK_TIME: u64 = 600;
impl Query {
pub fn difficulty_adjustment(&self) -> Result<DifficultyAdjustment> {
let indexer = self.indexer();
let computer = self.computer();
let current_height = self.height();
let current_height_u32: u32 = current_height.into();
// Get current epoch
let current_epoch = computer
.indexes
.height_to_difficultyepoch
.read_once(current_height)?;
let current_epoch_usize: usize = current_epoch.into();
// Get epoch start height
let epoch_start_height = computer
.indexes
.difficultyepoch_to_first_height
.read_once(current_epoch)?;
let epoch_start_u32: u32 = epoch_start_height.into();
// Calculate epoch progress
let next_retarget_height = epoch_start_u32 + BLOCKS_PER_EPOCH;
let blocks_into_epoch = current_height_u32 - epoch_start_u32;
let remaining_blocks = next_retarget_height - current_height_u32;
let progress_percent = (blocks_into_epoch as f64 / BLOCKS_PER_EPOCH as f64) * 100.0;
// Get timestamps using difficultyepoch_to_timestamp for epoch start
let epoch_start_timestamp = computer
.chain
.difficultyepoch_to_timestamp
.read_once(current_epoch)?;
let current_timestamp = indexer
.vecs
.block
.height_to_timestamp
.read_once(current_height)?;
// Calculate average block time in current epoch
let elapsed_time = (*current_timestamp - *epoch_start_timestamp) as u64;
let time_avg = if blocks_into_epoch > 0 {
elapsed_time / blocks_into_epoch as u64
} else {
TARGET_BLOCK_TIME
};
// Estimate remaining time and retarget date
let remaining_time = remaining_blocks as u64 * time_avg;
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|d| d.as_secs())
.unwrap_or(*current_timestamp as u64);
let estimated_retarget_date = now + remaining_time;
// Calculate expected vs actual time for difficulty change estimate
let expected_time = blocks_into_epoch as u64 * TARGET_BLOCK_TIME;
let difficulty_change = if elapsed_time > 0 && blocks_into_epoch > 0 {
((expected_time as f64 / elapsed_time as f64) - 1.0) * 100.0
} else {
0.0
};
// Time offset from expected schedule
let time_offset = expected_time as i64 - elapsed_time as i64;
// Calculate previous retarget using stored difficulty values
let previous_retarget = if current_epoch_usize > 0 {
let prev_epoch = DifficultyEpoch::from(current_epoch_usize - 1);
let prev_epoch_start = computer
.indexes
.difficultyepoch_to_first_height
.read_once(prev_epoch)?;
let prev_difficulty = indexer
.vecs
.block
.height_to_difficulty
.read_once(prev_epoch_start)?;
let curr_difficulty = indexer
.vecs
.block
.height_to_difficulty
.read_once(epoch_start_height)?;
if *prev_difficulty > 0.0 {
((*curr_difficulty / *prev_difficulty) - 1.0) * 100.0
} else {
0.0
}
} else {
0.0
};
Ok(DifficultyAdjustment {
progress_percent,
difficulty_change,
estimated_retarget_date,
remaining_blocks,
remaining_time,
previous_retarget,
next_retarget_height: Height::from(next_retarget_height),
time_avg,
adjusted_time_avg: time_avg,
time_offset,
})
}
}

View File

@@ -0,0 +1,26 @@
use brk_error::Result;
use brk_types::{DifficultyAdjustmentEntry, TimePeriod};
use vecdb::VecIndex;
use super::epochs::iter_difficulty_epochs;
use crate::Query;
impl Query {
pub fn difficulty_adjustments(
&self,
time_period: Option<TimePeriod>,
) -> Result<Vec<DifficultyAdjustmentEntry>> {
let current_height = self.height();
let end = current_height.to_usize();
let start = match time_period {
Some(tp) => end.saturating_sub(tp.block_count()),
None => 0,
};
let mut entries = iter_difficulty_epochs(self.computer(), start, end);
// Return in reverse chronological order (newest first)
entries.reverse();
Ok(entries)
}
}

View File

@@ -0,0 +1,63 @@
use brk_computer::Computer;
use brk_types::{DifficultyAdjustmentEntry, DifficultyEpoch, Height};
use vecdb::{GenericStoredVec, IterableVec, VecIndex};
/// Iterate over difficulty epochs within a height range.
pub fn iter_difficulty_epochs(
computer: &Computer,
start_height: usize,
end_height: usize,
) -> Vec<DifficultyAdjustmentEntry> {
let start_epoch = computer
.indexes
.height_to_difficultyepoch
.read_once(Height::from(start_height))
.unwrap_or_default();
let end_epoch = computer
.indexes
.height_to_difficultyepoch
.read_once(Height::from(end_height))
.unwrap_or_default();
let mut epoch_to_height_iter = computer.indexes.difficultyepoch_to_first_height.iter();
let mut epoch_to_timestamp_iter = computer.chain.difficultyepoch_to_timestamp.iter();
let mut epoch_to_difficulty_iter = computer
.chain
.indexes_to_difficulty
.difficultyepoch
.unwrap_last()
.iter();
let mut results = Vec::with_capacity(end_epoch.to_usize() - start_epoch.to_usize() + 1);
let mut prev_difficulty: Option<f64> = None;
for epoch_usize in start_epoch.to_usize()..=end_epoch.to_usize() {
let epoch = DifficultyEpoch::from(epoch_usize);
let epoch_height = epoch_to_height_iter.get(epoch).unwrap_or_default();
// Skip epochs before our start height but track difficulty
if epoch_height.to_usize() < start_height {
prev_difficulty = epoch_to_difficulty_iter.get(epoch).map(|d| *d);
continue;
}
let epoch_timestamp = epoch_to_timestamp_iter.get(epoch).unwrap_or_default();
let epoch_difficulty = *epoch_to_difficulty_iter.get(epoch).unwrap_or_default();
let change_percent = match prev_difficulty {
Some(prev) if prev > 0.0 => ((epoch_difficulty / prev) - 1.0) * 100.0,
_ => 0.0,
};
results.push(DifficultyAdjustmentEntry {
timestamp: epoch_timestamp,
height: epoch_height,
difficulty: epoch_difficulty,
change_percent,
});
prev_difficulty = Some(epoch_difficulty);
}
results
}

View File

@@ -0,0 +1,100 @@
use brk_error::Result;
use brk_types::{DateIndex, DifficultyEntry, HashrateEntry, HashrateSummary, Height, TimePeriod};
use vecdb::{GenericStoredVec, IterableVec, VecIndex};
use super::epochs::iter_difficulty_epochs;
use crate::Query;
impl Query {
pub fn hashrate(&self, time_period: Option<TimePeriod>) -> Result<HashrateSummary> {
let indexer = self.indexer();
let computer = self.computer();
let current_height = self.height();
// Get current difficulty
let current_difficulty = *indexer
.vecs
.block
.height_to_difficulty
.read_once(current_height)?;
// Get current hashrate
let current_dateindex = computer
.indexes
.height_to_dateindex
.read_once(current_height)?;
let current_hashrate = *computer
.chain
.indexes_to_hash_rate
.dateindex
.unwrap_last()
.read_once(current_dateindex)? as u128;
// Calculate start height based on time period
let end = current_height.to_usize();
let start = match time_period {
Some(tp) => end.saturating_sub(tp.block_count()),
None => 0,
};
// Get hashrate entries using iterators for efficiency
let start_dateindex = computer
.indexes
.height_to_dateindex
.read_once(Height::from(start))?;
let end_dateindex = current_dateindex;
// Sample at regular intervals to avoid too many data points
let total_days = end_dateindex
.to_usize()
.saturating_sub(start_dateindex.to_usize())
+ 1;
let step = (total_days / 200).max(1); // Max ~200 data points
// Create iterators for the loop
let mut hashrate_iter = computer
.chain
.indexes_to_hash_rate
.dateindex
.unwrap_last()
.iter();
let mut timestamp_iter = computer
.chain
.timeindexes_to_timestamp
.dateindex_extra
.unwrap_first()
.iter();
let mut hashrates = Vec::with_capacity(total_days / step + 1);
let mut di = start_dateindex.to_usize();
while di <= end_dateindex.to_usize() {
let dateindex = DateIndex::from(di);
if let (Some(hr), Some(timestamp)) =
(hashrate_iter.get(dateindex), timestamp_iter.get(dateindex))
{
hashrates.push(HashrateEntry {
timestamp,
avg_hashrate: (*hr) as u128,
});
}
di += step;
}
// Get difficulty adjustments within the period
let difficulty: Vec<DifficultyEntry> = iter_difficulty_epochs(computer, start, end)
.into_iter()
.map(|e| DifficultyEntry {
timestamp: e.timestamp,
difficulty: e.difficulty,
height: e.height,
})
.collect();
Ok(HashrateSummary {
hashrates,
difficulty,
current_hashrate,
current_difficulty,
})
}
}

View File

@@ -0,0 +1,11 @@
mod block_fee_rates;
mod block_fees;
mod block_rewards;
mod block_sizes;
mod dateindex_iter;
mod difficulty;
mod difficulty_adjustments;
mod epochs;
mod hashrate;
mod pools;
mod reward_stats;

View File

@@ -0,0 +1,171 @@
use brk_error::{Error, Result};
use brk_types::{
Height, PoolBlockCounts, PoolBlockShares, PoolDetail, PoolDetailInfo, PoolInfo, PoolSlug,
PoolStats, PoolsSummary, TimePeriod, pools,
};
use vecdb::{AnyVec, IterableVec, VecIndex};
use crate::Query;
impl Query {
pub fn mining_pools(&self, time_period: TimePeriod) -> Result<PoolsSummary> {
let computer = self.computer();
let current_height = self.height();
let end = current_height.to_usize();
// No blocks indexed yet
if computer.pools.height_to_pool.len() == 0 {
return Ok(PoolsSummary {
pools: vec![],
block_count: 0,
last_estimated_hashrate: 0,
});
}
// Calculate start height based on time period
let start = end.saturating_sub(time_period.block_count());
let pools = pools();
let mut pool_data: Vec<(&'static brk_types::Pool, u32)> = Vec::new();
// For each pool, get cumulative count at end and start, subtract to get range count
for (pool_id, pool_vecs) in &computer.pools.vecs {
let mut cumulative = pool_vecs
.indexes_to_blocks_mined
.height_extra
.unwrap_cumulative()
.iter();
let count_at_end: u32 = *cumulative.get(current_height).unwrap_or_default();
let count_at_start: u32 = if start == 0 {
0
} else {
*cumulative.get(Height::from(start - 1)).unwrap_or_default()
};
let block_count = count_at_end.saturating_sub(count_at_start);
// Only include pools that mined at least one block in the period
if block_count > 0 {
pool_data.push((pools.get(*pool_id), block_count));
}
}
// Sort by block count descending
pool_data.sort_by(|a, b| b.1.cmp(&a.1));
let total_blocks: u32 = pool_data.iter().map(|(_, count)| count).sum();
// Build stats with ranks
let pool_stats: Vec<PoolStats> = pool_data
.into_iter()
.enumerate()
.map(|(idx, (pool, block_count))| {
let share = if total_blocks > 0 {
block_count as f64 / total_blocks as f64
} else {
0.0
};
PoolStats::new(pool, block_count, (idx + 1) as u32, share)
})
.collect();
// TODO: Calculate actual hashrate from difficulty
let last_estimated_hashrate = 0u128;
Ok(PoolsSummary {
pools: pool_stats,
block_count: total_blocks,
last_estimated_hashrate,
})
}
pub fn all_pools(&self) -> Vec<PoolInfo> {
pools().iter().map(PoolInfo::from).collect()
}
pub fn pool_detail(&self, slug: PoolSlug) -> Result<PoolDetail> {
let computer = self.computer();
let current_height = self.height();
let end = current_height.to_usize();
let pools_list = pools();
let pool = pools_list.get(slug);
// Get pool vecs for this specific pool
let pool_vecs = computer
.pools
.vecs
.get(&slug)
.ok_or_else(|| Error::Str("Pool data not found"))?;
let mut cumulative = pool_vecs
.indexes_to_blocks_mined
.height_extra
.unwrap_cumulative()
.iter();
// Get total blocks (all time)
let total_all: u32 = *cumulative.get(current_height).unwrap_or_default();
// Get blocks for 24h (144 blocks)
let start_24h = end.saturating_sub(144);
let count_before_24h: u32 = if start_24h == 0 {
0
} else {
*cumulative
.get(Height::from(start_24h - 1))
.unwrap_or_default()
};
let total_24h = total_all.saturating_sub(count_before_24h);
// Get blocks for 1w (1008 blocks)
let start_1w = end.saturating_sub(1008);
let count_before_1w: u32 = if start_1w == 0 {
0
} else {
*cumulative
.get(Height::from(start_1w - 1))
.unwrap_or_default()
};
let total_1w = total_all.saturating_sub(count_before_1w);
// Calculate total network blocks for share calculation
let network_blocks_all = (end + 1) as u32;
let network_blocks_24h = (end - start_24h + 1) as u32;
let network_blocks_1w = (end - start_1w + 1) as u32;
let share_all = if network_blocks_all > 0 {
total_all as f64 / network_blocks_all as f64
} else {
0.0
};
let share_24h = if network_blocks_24h > 0 {
total_24h as f64 / network_blocks_24h as f64
} else {
0.0
};
let share_1w = if network_blocks_1w > 0 {
total_1w as f64 / network_blocks_1w as f64
} else {
0.0
};
Ok(PoolDetail {
pool: PoolDetailInfo::from(pool),
block_count: PoolBlockCounts {
all: total_all,
day: total_24h,
week: total_1w,
},
block_share: PoolBlockShares {
all: share_all,
day: share_24h,
week: share_1w,
},
estimated_hashrate: 0, // TODO: Calculate from share and network hashrate
reported_hashrate: None,
})
}
}

View File

@@ -0,0 +1,66 @@
use brk_error::Result;
use brk_types::{Height, RewardStats, Sats};
use vecdb::{IterableVec, VecIndex};
use crate::Query;
impl Query {
pub fn reward_stats(&self, block_count: usize) -> Result<RewardStats> {
let computer = self.computer();
let current_height = self.height();
let end_block = current_height;
let start_block = Height::from(current_height.to_usize().saturating_sub(block_count - 1));
let mut coinbase_iter = computer
.chain
.indexes_to_coinbase
.sats
.height
.as_ref()
.unwrap()
.iter();
let mut fee_iter = computer
.chain
.indexes_to_fee
.sats
.height
.unwrap_sum()
.iter();
let mut tx_count_iter = computer
.chain
.indexes_to_tx_count
.height
.as_ref()
.unwrap()
.iter();
let mut total_reward = Sats::ZERO;
let mut total_fee = Sats::ZERO;
let mut total_tx: u64 = 0;
for height in start_block.to_usize()..=end_block.to_usize() {
let h = Height::from(height);
if let Some(coinbase) = coinbase_iter.get(h) {
total_reward += coinbase;
}
if let Some(fee) = fee_iter.get(h) {
total_fee += fee;
}
if let Some(tx_count) = tx_count_iter.get(h) {
total_tx += *tx_count;
}
}
Ok(RewardStats {
start_block,
end_block,
total_reward,
total_fee,
total_tx,
})
}
}