global: snapshot

This commit is contained in:
nym21
2026-03-10 11:22:17 +01:00
parent 64ef63a056
commit 5ede3dc416
40 changed files with 408 additions and 259 deletions

View File

@@ -0,0 +1,78 @@
use brk_error::Result;
use brk_types::{Dollars, Indexes};
use vecdb::Exit;
use super::{gini, Vecs};
use crate::{distribution, internal::RatioDollarsBp32, mining, transactions};
impl Vecs {
pub(crate) fn compute(
&mut self,
mining: &mining::Vecs,
distribution: &distribution::Vecs,
transactions: &transactions::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
// Puell Multiple: daily_subsidy_usd / sma_365d_subsidy_usd
self.puell_multiple
.bps
.compute_binary::<Dollars, Dollars, RatioDollarsBp32>(
starting_indexes.height,
&mining.rewards.subsidy.base.usd.height,
&mining.rewards.subsidy_sma_1y.usd.height,
exit,
)?;
// Gini coefficient (UTXO distribution inequality)
gini::compute(&mut self.gini, distribution, starting_indexes, exit)?;
// RHODL Ratio: 1d-1w realized cap / 1y-2y realized cap
self.rhodl_ratio
.bps
.compute_binary::<Dollars, Dollars, RatioDollarsBp32>(
starting_indexes.height,
&distribution
.utxo_cohorts
.age_range
._1d_to_1w
.metrics
.realized
.cap
.usd
.height,
&distribution
.utxo_cohorts
.age_range
._1y_to_2y
.metrics
.realized
.cap
.usd
.height,
exit,
)?;
// NVT: market_cap / tx_volume_24h
let market_cap = &distribution
.utxo_cohorts
.all
.metrics
.supply
.total
.usd
.height;
self.nvt
.bps
.compute_binary::<Dollars, Dollars, RatioDollarsBp32>(
starting_indexes.height,
market_cap,
&transactions.volume.sent_sum.rolling._24h.usd.height,
exit,
)?;
let _lock = exit.lock();
self.db.compact()?;
Ok(())
}
}

View File

@@ -0,0 +1,107 @@
use brk_error::Result;
use brk_types::{BasisPoints16, Indexes, Sats, StoredU64, Version};
use vecdb::{AnyStoredVec, AnyVec, Exit, ReadableVec, VecIndex, WritableVec};
use crate::{distribution, internal::PercentPerBlock};
pub(super) fn compute(
gini: &mut PercentPerBlock<BasisPoints16>,
distribution: &distribution::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
let amount_range = &distribution.utxo_cohorts.amount_range;
let supply_vecs: Vec<&_> = amount_range
.iter()
.map(|c| &c.metrics.supply.total.sats.height)
.collect();
let count_vecs: Vec<&_> = amount_range
.iter()
.map(|c| &c.metrics.outputs.utxo_count.height)
.collect();
if supply_vecs.is_empty() || supply_vecs.len() != count_vecs.len() {
return Ok(());
}
let source_version = supply_vecs
.iter()
.fold(Version::ZERO, |acc, v| acc + v.version())
+ count_vecs
.iter()
.fold(Version::ZERO, |acc, v| acc + v.version());
gini.bps
.height
.validate_computed_version_or_reset(source_version)?;
gini.bps.height.truncate_if_needed_at(
gini.bps
.height
.len()
.min(starting_indexes.height.to_usize()),
)?;
let total_heights = supply_vecs
.iter()
.map(|v| v.len())
.min()
.unwrap_or(0)
.min(count_vecs.iter().map(|v| v.len()).min().unwrap_or(0));
let start_height = gini.bps.height.len();
if start_height >= total_heights {
return Ok(());
}
// Batch-collect all cohort data for the range [start_height, total_heights)
let n_cohorts = supply_vecs.len();
let supply_data: Vec<Vec<Sats>> = supply_vecs
.iter()
.map(|v| v.collect_range_at(start_height, total_heights))
.collect();
let count_data: Vec<Vec<StoredU64>> = count_vecs
.iter()
.map(|v| v.collect_range_at(start_height, total_heights))
.collect();
let mut buckets: Vec<(u64, u64)> = Vec::with_capacity(n_cohorts);
for offset in 0..total_heights - start_height {
buckets.clear();
for c in 0..n_cohorts {
let supply: u64 = supply_data[c][offset].into();
let count: u64 = count_data[c][offset].into();
buckets.push((count, supply));
}
gini.bps.height.push(gini_from_lorenz(&buckets));
}
{
let _lock = exit.lock();
gini.bps.height.write()?;
}
Ok(())
}
fn gini_from_lorenz(buckets: &[(u64, u64)]) -> BasisPoints16 {
let total_count: u64 = buckets.iter().map(|(c, _)| c).sum();
let total_supply: u64 = buckets.iter().map(|(_, s)| s).sum();
if total_count == 0 || total_supply == 0 {
return BasisPoints16::ZERO;
}
let (mut cumulative_count, mut cumulative_supply, mut area) = (0u64, 0u64, 0.0f64);
let (tc, ts) = (total_count as f64, total_supply as f64);
for &(count, supply) in buckets {
let (p0, w0) = (cumulative_count as f64 / tc, cumulative_supply as f64 / ts);
cumulative_count += count;
cumulative_supply += supply;
let (p1, w1) = (cumulative_count as f64 / tc, cumulative_supply as f64 / ts);
area += (p1 - p0) * (w0 + w1) / 2.0;
}
BasisPoints16::from(1.0 - 2.0 * area)
}

View File

@@ -0,0 +1,38 @@
use std::path::Path;
use brk_error::Result;
use brk_types::Version;
use super::Vecs;
use crate::{
indexes,
internal::{finalize_db, open_db, PercentPerBlock, RatioPerBlock},
};
const VERSION: Version = Version::new(1);
impl Vecs {
pub(crate) fn forced_import(
parent_path: &Path,
parent_version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let db = open_db(parent_path, super::DB_NAME, 100_000)?;
let v = parent_version + VERSION;
let puell_multiple = RatioPerBlock::forced_import_raw(&db, "puell_multiple", v, indexes)?;
let nvt = RatioPerBlock::forced_import_raw(&db, "nvt", v, indexes)?;
let gini = PercentPerBlock::forced_import(&db, "gini", v, indexes)?;
let rhodl_ratio = RatioPerBlock::forced_import_raw(&db, "rhodl_ratio", v, indexes)?;
let this = Self {
db,
puell_multiple,
nvt,
gini,
rhodl_ratio,
};
finalize_db(&this.db, &this)?;
Ok(this)
}
}

View File

@@ -0,0 +1,8 @@
mod compute;
mod gini;
mod import;
mod vecs;
pub use vecs::Vecs;
pub const DB_NAME: &str = "indicators";

View File

@@ -0,0 +1,15 @@
use brk_traversable::Traversable;
use brk_types::{BasisPoints16, BasisPoints32};
use vecdb::{Database, Rw, StorageMode};
use crate::internal::{PercentPerBlock, RatioPerBlock};
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
#[traversable(skip)]
pub(crate) db: Database,
pub puell_multiple: RatioPerBlock<BasisPoints32, M>,
pub nvt: RatioPerBlock<BasisPoints32, M>,
pub gini: PercentPerBlock<BasisPoints16, M>,
pub rhodl_ratio: RatioPerBlock<BasisPoints32, M>,
}