global: snapshot

This commit is contained in:
nym21
2026-03-07 13:00:10 +01:00
parent bf07570848
commit 1011825949
87 changed files with 1304 additions and 1201 deletions

View File

@@ -1397,6 +1397,44 @@ impl RatioPattern {
}
}
/// Pattern struct for repeated tree structure.
pub struct GrossInvestedInvestorNegNetSupplyUnrealizedPattern {
pub gross_pnl: CentsUsdPattern,
pub invested_capital_in_loss: CentsUsdPattern,
pub invested_capital_in_loss_raw: MetricPattern18<CentsSats>,
pub invested_capital_in_profit: CentsUsdPattern,
pub invested_capital_in_profit_raw: MetricPattern18<CentsSats>,
pub investor_cap_in_loss_raw: MetricPattern18<CentsSquaredSats>,
pub investor_cap_in_profit_raw: MetricPattern18<CentsSquaredSats>,
pub neg_unrealized_loss: MetricPattern1<Dollars>,
pub net_unrealized_pnl: CentsUsdPattern,
pub supply_in_loss: BtcCentsSatsUsdPattern,
pub supply_in_profit: BtcCentsSatsUsdPattern,
pub unrealized_loss: CentsUsdPattern,
pub unrealized_profit: CentsUsdPattern,
}
impl GrossInvestedInvestorNegNetSupplyUnrealizedPattern {
/// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self {
gross_pnl: CentsUsdPattern::new(client.clone(), _m(&acc, "unrealized_gross_pnl")),
invested_capital_in_loss: CentsUsdPattern::new(client.clone(), _m(&acc, "invested_capital_in_loss")),
invested_capital_in_loss_raw: MetricPattern18::new(client.clone(), _m(&acc, "invested_capital_in_loss_raw")),
invested_capital_in_profit: CentsUsdPattern::new(client.clone(), _m(&acc, "invested_capital_in_profit")),
invested_capital_in_profit_raw: MetricPattern18::new(client.clone(), _m(&acc, "invested_capital_in_profit_raw")),
investor_cap_in_loss_raw: MetricPattern18::new(client.clone(), _m(&acc, "investor_cap_in_loss_raw")),
investor_cap_in_profit_raw: MetricPattern18::new(client.clone(), _m(&acc, "investor_cap_in_profit_raw")),
neg_unrealized_loss: MetricPattern1::new(client.clone(), _m(&acc, "neg_unrealized_loss")),
net_unrealized_pnl: CentsUsdPattern::new(client.clone(), _m(&acc, "net_unrealized_pnl")),
supply_in_loss: BtcCentsSatsUsdPattern::new(client.clone(), _m(&acc, "supply_in_loss")),
supply_in_profit: BtcCentsSatsUsdPattern::new(client.clone(), _m(&acc, "supply_in_profit")),
unrealized_loss: CentsUsdPattern::new(client.clone(), _m(&acc, "unrealized_loss")),
unrealized_profit: CentsUsdPattern::new(client.clone(), _m(&acc, "unrealized_profit")),
}
}
}
/// Pattern struct for repeated tree structure.
pub struct _10y1m1w1y2y3m3y4y5y6m6y8yPattern2 {
pub _10y: BpsPercentRatioPattern,
@@ -1639,30 +1677,30 @@ impl AverageGainsLossesRsiStochPattern {
/// Pattern struct for repeated tree structure.
pub struct AllP2aP2pk33P2pk65P2pkhP2shP2trP2wpkhP2wshPattern {
pub all: ChangeCountPattern,
pub p2a: ChangeCountPattern,
pub p2pk33: ChangeCountPattern,
pub p2pk65: ChangeCountPattern,
pub p2pkh: ChangeCountPattern,
pub p2sh: ChangeCountPattern,
pub p2tr: ChangeCountPattern,
pub p2wpkh: ChangeCountPattern,
pub p2wsh: ChangeCountPattern,
pub all: MetricPattern1<StoredU64>,
pub p2a: MetricPattern1<StoredU64>,
pub p2pk33: MetricPattern1<StoredU64>,
pub p2pk65: MetricPattern1<StoredU64>,
pub p2pkh: MetricPattern1<StoredU64>,
pub p2sh: MetricPattern1<StoredU64>,
pub p2tr: MetricPattern1<StoredU64>,
pub p2wpkh: MetricPattern1<StoredU64>,
pub p2wsh: MetricPattern1<StoredU64>,
}
impl AllP2aP2pk33P2pk65P2pkhP2shP2trP2wpkhP2wshPattern {
/// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self {
all: ChangeCountPattern::new(client.clone(), acc.clone()),
p2a: ChangeCountPattern::new(client.clone(), _p("p2a", &acc)),
p2pk33: ChangeCountPattern::new(client.clone(), _p("p2pk33", &acc)),
p2pk65: ChangeCountPattern::new(client.clone(), _p("p2pk65", &acc)),
p2pkh: ChangeCountPattern::new(client.clone(), _p("p2pkh", &acc)),
p2sh: ChangeCountPattern::new(client.clone(), _p("p2sh", &acc)),
p2tr: ChangeCountPattern::new(client.clone(), _p("p2tr", &acc)),
p2wpkh: ChangeCountPattern::new(client.clone(), _p("p2wpkh", &acc)),
p2wsh: ChangeCountPattern::new(client.clone(), _p("p2wsh", &acc)),
all: MetricPattern1::new(client.clone(), acc.clone()),
p2a: MetricPattern1::new(client.clone(), _p("p2a", &acc)),
p2pk33: MetricPattern1::new(client.clone(), _p("p2pk33", &acc)),
p2pk65: MetricPattern1::new(client.clone(), _p("p2pk65", &acc)),
p2pkh: MetricPattern1::new(client.clone(), _p("p2pkh", &acc)),
p2sh: MetricPattern1::new(client.clone(), _p("p2sh", &acc)),
p2tr: MetricPattern1::new(client.clone(), _p("p2tr", &acc)),
p2wpkh: MetricPattern1::new(client.clone(), _p("p2wpkh", &acc)),
p2wsh: MetricPattern1::new(client.clone(), _p("p2wsh", &acc)),
}
}
}
@@ -1697,36 +1735,6 @@ impl AverageMaxMedianMinPct10Pct25Pct75Pct90SumPattern2 {
}
}
/// Pattern struct for repeated tree structure.
pub struct AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern {
pub average: _1m1w1y24hPattern<BasisPoints16>,
pub height: MetricPattern18<BasisPoints16>,
pub max: _1m1w1y24hPattern<BasisPoints16>,
pub median: _1m1w1y24hPattern<BasisPoints16>,
pub min: _1m1w1y24hPattern<BasisPoints16>,
pub pct10: _1m1w1y24hPattern<BasisPoints16>,
pub pct25: _1m1w1y24hPattern<BasisPoints16>,
pub pct75: _1m1w1y24hPattern<BasisPoints16>,
pub pct90: _1m1w1y24hPattern<BasisPoints16>,
}
impl AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern {
/// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self {
average: _1m1w1y24hPattern::new(client.clone(), _m(&acc, "average")),
height: MetricPattern18::new(client.clone(), acc.clone()),
max: _1m1w1y24hPattern::new(client.clone(), _m(&acc, "max")),
median: _1m1w1y24hPattern::new(client.clone(), _m(&acc, "median")),
min: _1m1w1y24hPattern::new(client.clone(), _m(&acc, "min")),
pct10: _1m1w1y24hPattern::new(client.clone(), _m(&acc, "p10")),
pct25: _1m1w1y24hPattern::new(client.clone(), _m(&acc, "p25")),
pct75: _1m1w1y24hPattern::new(client.clone(), _m(&acc, "p75")),
pct90: _1m1w1y24hPattern::new(client.clone(), _m(&acc, "p90")),
}
}
}
/// Pattern struct for repeated tree structure.
pub struct AverageMaxMedianMinPct10Pct25Pct75Pct90SumPattern {
pub average: _1m1w1y24hPattern<StoredU64>,
@@ -1875,7 +1883,7 @@ pub struct ActivityCostOutputsRealizedRelativeSupplyUnrealizedPattern {
pub realized: MvrvNegNetRealizedSentSoprValuePattern,
pub relative: InvestedNetNuplSupplyUnrealizedPattern,
pub supply: ChangeHalvedTotalPattern,
pub unrealized: GreedGrossInvestedInvestorNegNetPainSupplyUnrealizedPattern,
pub unrealized: GrossInvestedInvestorNegNetSupplyUnrealizedPattern,
}
impl ActivityCostOutputsRealizedRelativeSupplyUnrealizedPattern {
@@ -1888,7 +1896,7 @@ impl ActivityCostOutputsRealizedRelativeSupplyUnrealizedPattern {
realized: MvrvNegNetRealizedSentSoprValuePattern::new(client.clone(), acc.clone()),
relative: InvestedNetNuplSupplyUnrealizedPattern::new(client.clone(), acc.clone()),
supply: ChangeHalvedTotalPattern::new(client.clone(), _m(&acc, "supply")),
unrealized: GreedGrossInvestedInvestorNegNetPainSupplyUnrealizedPattern::new(client.clone(), acc.clone()),
unrealized: GrossInvestedInvestorNegNetSupplyUnrealizedPattern::new(client.clone(), acc.clone()),
}
}
}
@@ -2277,24 +2285,6 @@ impl BaseCumulativeSumPattern {
}
}
/// Pattern struct for repeated tree structure.
pub struct BpsPercentRatioPattern3 {
pub bps: AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern,
pub percent: MetricPattern1<StoredF32>,
pub ratio: MetricPattern1<StoredF32>,
}
impl BpsPercentRatioPattern3 {
/// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self {
bps: AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern::new(client.clone(), _m(&acc, "bps")),
percent: MetricPattern1::new(client.clone(), acc.clone()),
ratio: MetricPattern1::new(client.clone(), _m(&acc, "ratio")),
}
}
}
/// Pattern struct for repeated tree structure.
pub struct BpsPercentRatioPattern {
pub bps: MetricPattern1<BasisPoints16>,
@@ -2518,17 +2508,17 @@ impl CentsUsdPattern {
}
/// Pattern struct for repeated tree structure.
pub struct ChangeCountPattern {
pub change_1m: MetricPattern1<StoredF64>,
pub count: MetricPattern1<StoredU64>,
pub struct ChangeRatePattern {
pub change: _1m1w1y24hPattern<StoredU64>,
pub rate: _1m1w1y24hPattern2,
}
impl ChangeCountPattern {
impl ChangeRatePattern {
/// Create a new pattern node with accumulated metric name.
pub fn new(client: Arc<BrkClientBase>, acc: String) -> Self {
Self {
change_1m: MetricPattern1::new(client.clone(), _m(&acc, "change_1m")),
count: MetricPattern1::new(client.clone(), acc.clone()),
change: _1m1w1y24hPattern::new(client.clone(), _m(&acc, "change")),
rate: _1m1w1y24hPattern2::new(client.clone(), _m(&acc, "rate")),
}
}
}
@@ -5409,9 +5399,9 @@ pub struct MetricsTree_Distribution {
pub addr_count: AllP2aP2pk33P2pk65P2pkhP2shP2trP2wpkhP2wshPattern,
pub empty_addr_count: AllP2aP2pk33P2pk65P2pkhP2shP2trP2wpkhP2wshPattern,
pub address_activity: MetricsTree_Distribution_AddressActivity,
pub total_addr_count: MetricsTree_Distribution_TotalAddrCount,
pub total_addr_count: AllP2aP2pk33P2pk65P2pkhP2shP2trP2wpkhP2wshPattern,
pub new_addr_count: MetricsTree_Distribution_NewAddrCount,
pub growth_rate: MetricsTree_Distribution_GrowthRate,
pub delta: MetricsTree_Distribution_Delta,
pub fundedaddressindex: MetricPattern34<FundedAddressIndex>,
pub emptyaddressindex: MetricPattern35<EmptyAddressIndex>,
}
@@ -5427,9 +5417,9 @@ impl MetricsTree_Distribution {
addr_count: AllP2aP2pk33P2pk65P2pkhP2shP2trP2wpkhP2wshPattern::new(client.clone(), "addr_count".to_string()),
empty_addr_count: AllP2aP2pk33P2pk65P2pkhP2shP2trP2wpkhP2wshPattern::new(client.clone(), "empty_addr_count".to_string()),
address_activity: MetricsTree_Distribution_AddressActivity::new(client.clone(), format!("{base_path}_address_activity")),
total_addr_count: MetricsTree_Distribution_TotalAddrCount::new(client.clone(), format!("{base_path}_total_addr_count")),
total_addr_count: AllP2aP2pk33P2pk65P2pkhP2shP2trP2wpkhP2wshPattern::new(client.clone(), "total_addr_count".to_string()),
new_addr_count: MetricsTree_Distribution_NewAddrCount::new(client.clone(), format!("{base_path}_new_addr_count")),
growth_rate: MetricsTree_Distribution_GrowthRate::new(client.clone(), format!("{base_path}_growth_rate")),
delta: MetricsTree_Distribution_Delta::new(client.clone(), format!("{base_path}_delta")),
fundedaddressindex: MetricPattern34::new(client.clone(), "fundedaddressindex".to_string()),
emptyaddressindex: MetricPattern35::new(client.clone(), "emptyaddressindex".to_string()),
}
@@ -6194,35 +6184,6 @@ impl MetricsTree_Distribution_AddressActivity {
}
}
/// Metrics tree node.
pub struct MetricsTree_Distribution_TotalAddrCount {
pub all: MetricPattern1<StoredU64>,
pub p2pk65: MetricPattern1<StoredU64>,
pub p2pk33: MetricPattern1<StoredU64>,
pub p2pkh: MetricPattern1<StoredU64>,
pub p2sh: MetricPattern1<StoredU64>,
pub p2wpkh: MetricPattern1<StoredU64>,
pub p2wsh: MetricPattern1<StoredU64>,
pub p2tr: MetricPattern1<StoredU64>,
pub p2a: MetricPattern1<StoredU64>,
}
impl MetricsTree_Distribution_TotalAddrCount {
pub fn new(client: Arc<BrkClientBase>, base_path: String) -> Self {
Self {
all: MetricPattern1::new(client.clone(), "total_addr_count".to_string()),
p2pk65: MetricPattern1::new(client.clone(), "p2pk65_total_addr_count".to_string()),
p2pk33: MetricPattern1::new(client.clone(), "p2pk33_total_addr_count".to_string()),
p2pkh: MetricPattern1::new(client.clone(), "p2pkh_total_addr_count".to_string()),
p2sh: MetricPattern1::new(client.clone(), "p2sh_total_addr_count".to_string()),
p2wpkh: MetricPattern1::new(client.clone(), "p2wpkh_total_addr_count".to_string()),
p2wsh: MetricPattern1::new(client.clone(), "p2wsh_total_addr_count".to_string()),
p2tr: MetricPattern1::new(client.clone(), "p2tr_total_addr_count".to_string()),
p2a: MetricPattern1::new(client.clone(), "p2a_total_addr_count".to_string()),
}
}
}
/// Metrics tree node.
pub struct MetricsTree_Distribution_NewAddrCount {
pub all: AverageCumulativeHeightMaxMedianMinPct10Pct25Pct75Pct90SumPattern,
@@ -6253,30 +6214,30 @@ impl MetricsTree_Distribution_NewAddrCount {
}
/// Metrics tree node.
pub struct MetricsTree_Distribution_GrowthRate {
pub all: BpsPercentRatioPattern3,
pub p2pk65: BpsPercentRatioPattern3,
pub p2pk33: BpsPercentRatioPattern3,
pub p2pkh: BpsPercentRatioPattern3,
pub p2sh: BpsPercentRatioPattern3,
pub p2wpkh: BpsPercentRatioPattern3,
pub p2wsh: BpsPercentRatioPattern3,
pub p2tr: BpsPercentRatioPattern3,
pub p2a: BpsPercentRatioPattern3,
pub struct MetricsTree_Distribution_Delta {
pub all: ChangeRatePattern,
pub p2pk65: ChangeRatePattern,
pub p2pk33: ChangeRatePattern,
pub p2pkh: ChangeRatePattern,
pub p2sh: ChangeRatePattern,
pub p2wpkh: ChangeRatePattern,
pub p2wsh: ChangeRatePattern,
pub p2tr: ChangeRatePattern,
pub p2a: ChangeRatePattern,
}
impl MetricsTree_Distribution_GrowthRate {
impl MetricsTree_Distribution_Delta {
pub fn new(client: Arc<BrkClientBase>, base_path: String) -> Self {
Self {
all: BpsPercentRatioPattern3::new(client.clone(), "growth_rate".to_string()),
p2pk65: BpsPercentRatioPattern3::new(client.clone(), "p2pk65_growth_rate".to_string()),
p2pk33: BpsPercentRatioPattern3::new(client.clone(), "p2pk33_growth_rate".to_string()),
p2pkh: BpsPercentRatioPattern3::new(client.clone(), "p2pkh_growth_rate".to_string()),
p2sh: BpsPercentRatioPattern3::new(client.clone(), "p2sh_growth_rate".to_string()),
p2wpkh: BpsPercentRatioPattern3::new(client.clone(), "p2wpkh_growth_rate".to_string()),
p2wsh: BpsPercentRatioPattern3::new(client.clone(), "p2wsh_growth_rate".to_string()),
p2tr: BpsPercentRatioPattern3::new(client.clone(), "p2tr_growth_rate".to_string()),
p2a: BpsPercentRatioPattern3::new(client.clone(), "p2a_growth_rate".to_string()),
all: ChangeRatePattern::new(client.clone(), "addr_count".to_string()),
p2pk65: ChangeRatePattern::new(client.clone(), "p2pk65_addr_count".to_string()),
p2pk33: ChangeRatePattern::new(client.clone(), "p2pk33_addr_count".to_string()),
p2pkh: ChangeRatePattern::new(client.clone(), "p2pkh_addr_count".to_string()),
p2sh: ChangeRatePattern::new(client.clone(), "p2sh_addr_count".to_string()),
p2wpkh: ChangeRatePattern::new(client.clone(), "p2wpkh_addr_count".to_string()),
p2wsh: ChangeRatePattern::new(client.clone(), "p2wsh_addr_count".to_string()),
p2tr: ChangeRatePattern::new(client.clone(), "p2tr_addr_count".to_string()),
p2a: ChangeRatePattern::new(client.clone(), "p2a_addr_count".to_string()),
}
}
}

View File

@@ -1,7 +1,7 @@
use brk_cohort::ByAddressType;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Indexes, StoredF64, StoredU64, Version};
use brk_types::{Height, Indexes, StoredU64, Version};
use derive_more::{Deref, DerefMut};
use rayon::prelude::*;
use vecdb::{
@@ -9,15 +9,12 @@ use vecdb::{
WritableVec,
};
use crate::{blocks, indexes, internal::ComputedFromHeight};
use crate::{indexes, internal::ComputedFromHeight};
/// Address count with 1m change metric for a single type.
#[derive(Traversable)]
pub struct AddrCountVecs<M: StorageMode = Rw> {
#[traversable(flatten)]
pub count: ComputedFromHeight<StoredU64, M>,
pub change_1m: ComputedFromHeight<StoredF64, M>,
}
#[derive(Deref, DerefMut, Traversable)]
pub struct AddrCountVecs<M: StorageMode = Rw>(
#[traversable(flatten)] pub ComputedFromHeight<StoredU64, M>,
);
impl AddrCountVecs {
pub(crate) fn forced_import(
@@ -26,31 +23,9 @@ impl AddrCountVecs {
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
count: ComputedFromHeight::forced_import(db, name, version, indexes)?,
change_1m: ComputedFromHeight::forced_import(
db,
&format!("{name}_change_1m"),
version,
indexes,
)?,
})
}
pub(crate) fn compute_rest(
&mut self,
blocks: &blocks::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.change_1m.height.compute_rolling_change(
starting_indexes.height,
&blocks.count.height_1m_ago,
&self.count.height,
exit,
)?;
Ok(())
Ok(Self(ComputedFromHeight::forced_import(
db, name, version, indexes,
)?))
}
}
@@ -72,56 +47,48 @@ impl From<(&AddressTypeToAddrCountVecs, Height)> for AddressTypeToAddressCount {
Self(ByAddressType {
p2pk65: groups
.p2pk65
.count
.height
.collect_one(prev_height)
.unwrap()
.into(),
p2pk33: groups
.p2pk33
.count
.height
.collect_one(prev_height)
.unwrap()
.into(),
p2pkh: groups
.p2pkh
.count
.height
.collect_one(prev_height)
.unwrap()
.into(),
p2sh: groups
.p2sh
.count
.height
.collect_one(prev_height)
.unwrap()
.into(),
p2wpkh: groups
.p2wpkh
.count
.height
.collect_one(prev_height)
.unwrap()
.into(),
p2wsh: groups
.p2wsh
.count
.height
.collect_one(prev_height)
.unwrap()
.into(),
p2tr: groups
.p2tr
.count
.height
.collect_one(prev_height)
.unwrap()
.into(),
p2a: groups
.p2a
.count
.height
.collect_one(prev_height)
.unwrap()
@@ -133,7 +100,7 @@ impl From<(&AddressTypeToAddrCountVecs, Height)> for AddressTypeToAddressCount {
}
}
/// Address count per address type, with height + derived indexes + 1m change.
/// Address count per address type, with height + derived indexes.
#[derive(Deref, DerefMut, Traversable)]
pub struct AddressTypeToAddrCountVecs<M: StorageMode = Rw>(ByAddressType<AddrCountVecs<M>>);
@@ -159,7 +126,7 @@ impl AddressTypeToAddrCountVecs {
}
pub(crate) fn min_stateful_height(&self) -> usize {
self.0.values().map(|v| v.count.height.len()).min().unwrap()
self.0.values().map(|v| v.height.len()).min().unwrap()
}
pub(crate) fn par_iter_height_mut(
@@ -167,7 +134,7 @@ impl AddressTypeToAddrCountVecs {
) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
self.0
.par_values_mut()
.map(|v| &mut v.count.height as &mut dyn AnyStoredVec)
.map(|v| &mut v.height as &mut dyn AnyStoredVec)
}
pub(crate) fn truncate_push_height(
@@ -176,7 +143,7 @@ impl AddressTypeToAddrCountVecs {
addr_counts: &AddressTypeToAddressCount,
) -> Result<()> {
for (vecs, &count) in self.0.values_mut().zip(addr_counts.values()) {
vecs.count.height.truncate_push(height, count.into())?;
vecs.height.truncate_push(height, count.into())?;
}
Ok(())
}
@@ -184,25 +151,13 @@ impl AddressTypeToAddrCountVecs {
pub(crate) fn reset_height(&mut self) -> Result<()> {
use vecdb::WritableVec;
for v in self.0.values_mut() {
v.count.height.reset()?;
}
Ok(())
}
pub(crate) fn compute_rest(
&mut self,
blocks: &blocks::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
for v in self.0.values_mut() {
v.compute_rest(blocks, starting_indexes, exit)?;
v.height.reset()?;
}
Ok(())
}
pub(crate) fn by_height(&self) -> Vec<&EagerVec<PcoVec<Height, StoredU64>>> {
self.0.values().map(|v| &v.count.height).collect()
self.0.values().map(|v| &v.height).collect()
}
}
@@ -227,22 +182,18 @@ impl AddrCountsVecs {
}
pub(crate) fn min_stateful_height(&self) -> usize {
self.all
.count
.height
.len()
.min(self.by_addresstype.min_stateful_height())
self.all.height.len().min(self.by_addresstype.min_stateful_height())
}
pub(crate) fn par_iter_height_mut(
&mut self,
) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
rayon::iter::once(&mut self.all.count.height as &mut dyn AnyStoredVec)
rayon::iter::once(&mut self.all.height as &mut dyn AnyStoredVec)
.chain(self.by_addresstype.par_iter_height_mut())
}
pub(crate) fn reset_height(&mut self) -> Result<()> {
self.all.count.height.reset()?;
self.all.height.reset()?;
self.by_addresstype.reset_height()?;
Ok(())
}
@@ -253,7 +204,7 @@ impl AddrCountsVecs {
total: u64,
addr_counts: &AddressTypeToAddressCount,
) -> Result<()> {
self.all.count.height.truncate_push(height, total.into())?;
self.all.height.truncate_push(height, total.into())?;
self.by_addresstype
.truncate_push_height(height, addr_counts)?;
Ok(())
@@ -261,26 +212,13 @@ impl AddrCountsVecs {
pub(crate) fn compute_rest(
&mut self,
blocks: &blocks::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.by_addresstype
.compute_rest(blocks, starting_indexes, exit)?;
let sources = self.by_addresstype.by_height();
self.all
.count
.height
.compute_sum_of_others(starting_indexes.height, &sources, exit)?;
self.all.change_1m.height.compute_rolling_change(
starting_indexes.height,
&blocks.count.height_1m_ago,
&self.all.count.height,
exit,
)?;
Ok(())
}
}

View File

@@ -0,0 +1,61 @@
use brk_cohort::ByAddressType;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, StoredI64, StoredU64, Version};
use vecdb::{Database, Exit, Rw, StorageMode};
use crate::{
indexes,
internal::{WindowStarts, RollingDelta},
};
use super::AddrCountsVecs;
#[derive(Traversable)]
pub struct DeltaVecs<M: StorageMode = Rw> {
pub all: RollingDelta<StoredU64, StoredI64, M>,
#[traversable(flatten)]
pub by_addresstype: ByAddressType<RollingDelta<StoredU64, StoredI64, M>>,
}
impl DeltaVecs {
pub(crate) fn forced_import(
db: &Database,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let version = version + Version::ONE;
let all = RollingDelta::forced_import(db, "addr_count", version, indexes)?;
let by_addresstype = ByAddressType::new_with_name(|name| {
RollingDelta::forced_import(db, &format!("{name}_addr_count"), version, indexes)
})?;
Ok(Self {
all,
by_addresstype,
})
}
pub(crate) fn compute(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
addr_count: &AddrCountsVecs,
exit: &Exit,
) -> Result<()> {
self.all
.compute(max_from, windows, &addr_count.all.height, exit)?;
for ((_, growth), (_, addr)) in self
.by_addresstype
.iter_mut()
.zip(addr_count.by_addresstype.iter())
{
growth.compute(max_from, windows, &addr.height, exit)?;
}
Ok(())
}
}

View File

@@ -1,99 +0,0 @@
use brk_cohort::ByAddressType;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{BasisPoints16, Height, StoredU64, Version};
use vecdb::{Database, EagerVec, Exit, PcoVec, ReadableVec, Rw, StorageMode};
use crate::{
indexes,
internal::{PercentFromHeightDistribution, WindowStarts},
};
use super::{AddrCountsVecs, NewAddrCountVecs};
/// Growth rate: new_addr_count / addr_count (global + per-type)
#[derive(Traversable)]
pub struct GrowthRateVecs<M: StorageMode = Rw> {
pub all: PercentFromHeightDistribution<BasisPoints16, M>,
#[traversable(flatten)]
pub by_addresstype: ByAddressType<PercentFromHeightDistribution<BasisPoints16, M>>,
}
impl GrowthRateVecs {
pub(crate) fn forced_import(
db: &Database,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let all =
PercentFromHeightDistribution::forced_import(db, "growth_rate", version, indexes)?;
let by_addresstype = ByAddressType::new_with_name(|name| {
PercentFromHeightDistribution::forced_import(
db,
&format!("{name}_growth_rate"),
version,
indexes,
)
})?;
Ok(Self {
all,
by_addresstype,
})
}
pub(crate) fn compute(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
new_addr_count: &NewAddrCountVecs,
addr_count: &AddrCountsVecs,
exit: &Exit,
) -> Result<()> {
self.all.compute(max_from, windows, exit, |target| {
compute_ratio(
target,
max_from,
&new_addr_count.all.height,
&addr_count.all.count.height,
exit,
)
})?;
for ((_, growth), ((_, new), (_, addr))) in self.by_addresstype.iter_mut().zip(
new_addr_count
.by_addresstype
.iter()
.zip(addr_count.by_addresstype.iter()),
) {
growth.compute(max_from, windows, exit, |target| {
compute_ratio(target, max_from, &new.height, &addr.count.height, exit)
})?;
}
Ok(())
}
}
fn compute_ratio(
target: &mut EagerVec<PcoVec<Height, BasisPoints16>>,
max_from: Height,
numerator: &impl ReadableVec<Height, StoredU64>,
denominator: &impl ReadableVec<Height, StoredU64>,
exit: &Exit,
) -> Result<()> {
target.compute_transform2(
max_from,
numerator,
denominator,
|(h, num, den, ..)| {
let n = *num as f64;
let d = *den as f64;
let ratio = if d == 0.0 { 0.0 } else { n / d };
(h, BasisPoints16::from(ratio))
},
exit,
)?;
Ok(())
}

View File

@@ -1,7 +1,7 @@
mod activity;
mod address_count;
mod data;
mod growth_rate;
mod delta;
mod indexes;
mod new_addr_count;
mod total_addr_count;
@@ -10,7 +10,7 @@ mod type_map;
pub use activity::{AddressActivityVecs, AddressTypeToActivityCounts};
pub use address_count::{AddrCountsVecs, AddressTypeToAddressCount};
pub use data::AddressesDataVecs;
pub use growth_rate::GrowthRateVecs;
pub use delta::DeltaVecs;
pub use indexes::AnyAddressIndexesVecs;
pub use new_addr_count::NewAddrCountVecs;
pub use total_addr_count::TotalAddrCountVecs;

View File

@@ -50,8 +50,8 @@ impl TotalAddrCountVecs {
) -> Result<()> {
self.all.height.compute_add(
max_from,
&addr_count.all.count.height,
&empty_addr_count.all.count.height,
&addr_count.all.height,
&empty_addr_count.all.height,
exit,
)?;
@@ -63,7 +63,7 @@ impl TotalAddrCountVecs {
) {
total
.height
.compute_add(max_from, &addr.count.height, &empty.count.height, exit)?;
.compute_add(max_from, &addr.height, &empty.height, exit)?;
}
Ok(())

View File

@@ -378,46 +378,6 @@ impl UTXOCohorts<Rw> {
.try_for_each(|v| v.compute_rest_part1(blocks, prices, starting_indexes, exit))?;
}
// 2. Compute net_sentiment.height for aggregate cohorts (weighted average).
// Separate cohorts already computed net_sentiment in step 1 (inside compute_rest_part1).
// Note: min_age, max_age, epoch, class are Core tier — no net_sentiment.
// Note: ge_amount, lt_amount, amount_range are Minimal tier — no net_sentiment.
{
let Self {
all,
sth,
lth,
age_range,
..
} = self;
let ar = &*age_range;
let si = starting_indexes;
let tasks: Vec<Box<dyn FnOnce() -> Result<()> + Send + '_>> = vec![
Box::new(|| {
let sources = filter_sources_from(ar.iter(), None);
all.metrics
.compute_net_sentiment_from_others(si, &sources, exit)
}),
Box::new(|| {
let sources = filter_sources_from(ar.iter(), Some(sth.metrics.filter()));
sth.metrics
.compute_net_sentiment_from_others(si, &sources, exit)
}),
Box::new(|| {
let sources = filter_sources_from(ar.iter(), Some(lth.metrics.filter()));
lth.metrics
.compute_net_sentiment_from_others(si, &sources, exit)
}),
];
tasks
.into_par_iter()
.map(|f| f())
.collect::<Result<Vec<_>>>()?;
}
Ok(())
}

View File

@@ -4,7 +4,7 @@ use brk_types::{Bitcoin, Height, Indexes, Sats, StoredF64, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{AnyStoredVec, AnyVec, Exit, Rw, StorageMode, WritableVec};
use crate::internal::ComputedFromHeightCumulativeSum;
use crate::internal::{ComputedFromHeightCumulative, ComputedFromHeightCumulativeSum};
use crate::{blocks, distribution::metrics::ImportConfig, prices};
@@ -17,7 +17,7 @@ pub struct ActivityFull<M: StorageMode = Rw> {
#[traversable(flatten)]
pub base: ActivityBase<M>,
pub coinblocks_destroyed: ComputedFromHeightCumulativeSum<StoredF64, M>,
pub coinblocks_destroyed: ComputedFromHeightCumulative<StoredF64, M>,
pub coindays_destroyed: ComputedFromHeightCumulativeSum<StoredF64, M>,
}
@@ -94,11 +94,10 @@ impl ActivityFull {
self.base
.compute_rest_part1(blocks, prices, starting_indexes, exit)?;
let window_starts = blocks.count.window_starts();
self.coinblocks_destroyed
.compute_rest(starting_indexes.height, &window_starts, exit)?;
.compute_rest(starting_indexes.height, exit)?;
let window_starts = blocks.count.window_starts();
self.coindays_destroyed
.compute_rest(starting_indexes.height, &window_starts, exit)?;

View File

@@ -35,6 +35,7 @@ pub struct AllCohortMetrics<M: StorageMode = Rw> {
impl CohortMetricsBase for AllCohortMetrics {
type RealizedVecs = RealizedFull;
type UnrealizedVecs = UnrealizedFull;
type CostBasisVecs = CostBasisWithExtended;
impl_cohort_accessors!();

View File

@@ -8,7 +8,7 @@ use crate::{blocks, prices};
use crate::distribution::metrics::{
ActivityFull, CohortMetricsBase, CostBasisBase, ImportConfig, OutputsMetrics, RealizedBase,
RelativeWithRelToAll, SupplyMetrics, UnrealizedFull,
RelativeWithRelToAll, SupplyMetrics, UnrealizedBase,
};
/// Basic cohort metrics: no extensions, with relative (rel_to_all).
@@ -22,12 +22,13 @@ pub struct BasicCohortMetrics<M: StorageMode = Rw> {
pub activity: Box<ActivityFull<M>>,
pub realized: Box<RealizedBase<M>>,
pub cost_basis: Box<CostBasisBase<M>>,
pub unrealized: Box<UnrealizedFull<M>>,
pub unrealized: Box<UnrealizedBase<M>>,
pub relative: Box<RelativeWithRelToAll<M>>,
}
impl CohortMetricsBase for BasicCohortMetrics {
type RealizedVecs = RealizedBase;
type UnrealizedVecs = UnrealizedBase;
type CostBasisVecs = CostBasisBase;
impl_cohort_accessors!();
@@ -47,7 +48,7 @@ impl CohortMetricsBase for BasicCohortMetrics {
impl BasicCohortMetrics {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let supply = SupplyMetrics::forced_import(cfg)?;
let unrealized = UnrealizedFull::forced_import(cfg)?;
let unrealized = UnrealizedBase::forced_import(cfg)?;
let realized = RealizedBase::forced_import(cfg)?;
let relative = RelativeWithRelToAll::forced_import(cfg)?;

View File

@@ -8,7 +8,7 @@ use crate::{blocks, prices};
use crate::distribution::metrics::{
ActivityBase, CohortMetricsBase, RealizedBase, ImportConfig, OutputsMetrics,
RelativeBaseWithRelToAll, SupplyMetrics, UnrealizedBase,
RelativeBaseWithRelToAll, SupplyMetrics, UnrealizedCore,
};
#[derive(Traversable)]
@@ -19,7 +19,7 @@ pub struct CoreCohortMetrics<M: StorageMode = Rw> {
pub outputs: Box<OutputsMetrics<M>>,
pub activity: Box<ActivityBase<M>>,
pub realized: Box<RealizedBase<M>>,
pub unrealized: Box<UnrealizedBase<M>>,
pub unrealized: Box<UnrealizedCore<M>>,
pub relative: Box<RelativeBaseWithRelToAll<M>>,
}
@@ -31,7 +31,7 @@ impl CoreCohortMetrics {
outputs: Box::new(OutputsMetrics::forced_import(cfg)?),
activity: Box::new(ActivityBase::forced_import(cfg)?),
realized: Box::new(RealizedBase::forced_import(cfg)?),
unrealized: Box::new(UnrealizedBase::forced_import(cfg)?),
unrealized: Box::new(UnrealizedCore::forced_import(cfg)?),
relative: Box::new(RelativeBaseWithRelToAll::forced_import(cfg)?),
})
}
@@ -90,7 +90,7 @@ impl CoreCohortMetrics {
)?;
self.unrealized.compute_from_stateful(
starting_indexes,
&others.iter().map(|v| &v.unrealized_full().base).collect::<Vec<_>>(),
&others.iter().map(|v| &v.unrealized_base().core).collect::<Vec<_>>(),
exit,
)?;

View File

@@ -33,6 +33,7 @@ pub struct ExtendedCohortMetrics<M: StorageMode = Rw> {
impl CohortMetricsBase for ExtendedCohortMetrics {
type RealizedVecs = RealizedFull;
type UnrealizedVecs = UnrealizedFull;
type CostBasisVecs = CostBasisWithExtended;
impl_cohort_accessors!();

View File

@@ -8,6 +8,7 @@ use crate::{blocks, prices};
use crate::distribution::metrics::{
CohortMetricsBase, CostBasisWithExtended, ImportConfig, RealizedAdjusted, RealizedFull,
UnrealizedFull,
};
use super::ExtendedCohortMetrics;
@@ -27,6 +28,7 @@ pub struct ExtendedAdjustedCohortMetrics<M: StorageMode = Rw> {
impl CohortMetricsBase for ExtendedAdjustedCohortMetrics {
type RealizedVecs = RealizedFull;
type UnrealizedVecs = UnrealizedFull;
type CostBasisVecs = CostBasisWithExtended;
impl_cohort_accessors!();

View File

@@ -27,8 +27,8 @@ macro_rules! impl_cohort_accessors {
fn activity_mut(&mut self) -> &mut $crate::distribution::metrics::ActivityFull { &mut self.activity }
fn realized(&self) -> &Self::RealizedVecs { &self.realized }
fn realized_mut(&mut self) -> &mut Self::RealizedVecs { &mut self.realized }
fn unrealized_full(&self) -> &$crate::distribution::metrics::UnrealizedFull { &self.unrealized }
fn unrealized_full_mut(&mut self) -> &mut $crate::distribution::metrics::UnrealizedFull { &mut self.unrealized }
fn unrealized(&self) -> &Self::UnrealizedVecs { &self.unrealized }
fn unrealized_mut(&mut self) -> &mut Self::UnrealizedVecs { &mut self.unrealized }
fn cost_basis(&self) -> &Self::CostBasisVecs { &self.cost_basis }
fn cost_basis_mut(&mut self) -> &mut Self::CostBasisVecs { &mut self.cost_basis }
};
@@ -58,7 +58,7 @@ pub use relative::{
RelativeBaseWithRelToAll, RelativeForAll, RelativeWithExtended, RelativeWithRelToAll,
};
pub use supply::SupplyMetrics;
pub use unrealized::{UnrealizedBase, UnrealizedFull};
pub use unrealized::{UnrealizedBase, UnrealizedCore, UnrealizedFull, UnrealizedLike};
use brk_cohort::Filter;
use brk_error::Result;
@@ -92,6 +92,7 @@ impl<M: StorageMode> CohortMetricsState for AllCohortMetrics<M> {
pub trait CohortMetricsBase: CohortMetricsState<Realized = RealizedState> + Send + Sync {
type RealizedVecs: RealizedLike;
type UnrealizedVecs: UnrealizedLike;
type CostBasisVecs: CostBasisLike;
fn filter(&self) -> &Filter;
@@ -103,8 +104,8 @@ pub trait CohortMetricsBase: CohortMetricsState<Realized = RealizedState> + Send
fn activity_mut(&mut self) -> &mut ActivityFull;
fn realized(&self) -> &Self::RealizedVecs;
fn realized_mut(&mut self) -> &mut Self::RealizedVecs;
fn unrealized_full(&self) -> &UnrealizedFull;
fn unrealized_full_mut(&mut self) -> &mut UnrealizedFull;
fn unrealized(&self) -> &Self::UnrealizedVecs;
fn unrealized_mut(&mut self) -> &mut Self::UnrealizedVecs;
fn cost_basis(&self) -> &Self::CostBasisVecs;
fn cost_basis_mut(&mut self) -> &mut Self::CostBasisVecs;
@@ -112,6 +113,10 @@ pub trait CohortMetricsBase: CohortMetricsState<Realized = RealizedState> + Send
fn realized_base(&self) -> &RealizedBase { self.realized().as_base() }
fn realized_base_mut(&mut self) -> &mut RealizedBase { self.realized_mut().as_base_mut() }
/// Convenience: access unrealized as `&UnrealizedBase` (via `UnrealizedLike::as_base`).
fn unrealized_base(&self) -> &UnrealizedBase { self.unrealized().as_base() }
fn unrealized_base_mut(&mut self) -> &mut UnrealizedBase { self.unrealized_mut().as_base_mut() }
/// Convenience: access cost basis as `&CostBasisBase` (via `CostBasisLike::as_base`).
fn cost_basis_base(&self) -> &CostBasisBase { self.cost_basis().as_base() }
fn cost_basis_base_mut(&mut self) -> &mut CostBasisBase { self.cost_basis_mut().as_base_mut() }
@@ -134,7 +139,7 @@ pub trait CohortMetricsBase: CohortMetricsState<Realized = RealizedState> + Send
self.cost_basis_base_mut()
.truncate_push_minmax(height, state)?;
let unrealized_state = state.compute_unrealized_state(height_price);
self.unrealized_full_mut()
self.unrealized_mut()
.truncate_push(height, &unrealized_state)?;
Ok(())
}
@@ -160,7 +165,7 @@ pub trait CohortMetricsBase: CohortMetricsState<Realized = RealizedState> + Send
.min(self.outputs().min_len())
.min(self.activity().min_len())
.min(self.realized().min_stateful_height_len())
.min(self.unrealized_full().min_stateful_height_len())
.min(self.unrealized().min_stateful_height_len())
.min(self.cost_basis_base().min_stateful_height_len())
}
@@ -180,31 +185,6 @@ pub trait CohortMetricsBase: CohortMetricsState<Realized = RealizedState> + Send
Ok(())
}
/// Compute net_sentiment.height as capital-weighted average of component cohorts.
fn compute_net_sentiment_from_others<T: CohortMetricsBase>(
&mut self,
starting_indexes: &Indexes,
others: &[&T],
exit: &Exit,
) -> Result<()> {
let weights: Vec<_> = others
.iter()
.map(|o| &o.realized_base().realized_cap.height)
.collect();
let values: Vec<_> = others
.iter()
.map(|o| &o.unrealized_full().net_sentiment.cents.height)
.collect();
self.unrealized_full_mut()
.net_sentiment
.cents
.height
.compute_weighted_average_of_others(starting_indexes.height, &weights, &values, exit)?;
Ok(())
}
/// First phase of computed metrics (indexes from height).
fn compute_rest_part1(
&mut self,
@@ -234,19 +214,18 @@ pub trait CohortMetricsBase: CohortMetricsState<Realized = RealizedState> + Send
self.realized_mut()
.compute_rest_part1(starting_indexes, exit)?;
self.unrealized_full_mut()
self.unrealized_mut()
.compute_rest(prices, starting_indexes, exit)?;
Ok(())
}
/// Compute net_sentiment.height for separate cohorts (greed - pain).
fn compute_net_sentiment_height(
&mut self,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.unrealized_full_mut()
self.unrealized_mut()
.compute_net_sentiment_height(starting_indexes, exit)?;
Ok(())
}
@@ -278,9 +257,9 @@ pub trait CohortMetricsBase: CohortMetricsState<Realized = RealizedState> + Send
&others.iter().map(|v| v.realized_base()).collect::<Vec<_>>(),
exit,
)?;
self.unrealized_full_mut().compute_from_stateful(
self.unrealized_base_mut().compute_from_stateful(
starting_indexes,
&others.iter().map(|v| v.unrealized_full()).collect::<Vec<_>>(),
&others.iter().map(|v| v.unrealized_base()).collect::<Vec<_>>(),
exit,
)?;
self.cost_basis_base_mut().compute_from_stateful(

View File

@@ -10,9 +10,8 @@ use crate::{
blocks,
distribution::state::RealizedOps,
internal::{
ComputedFromHeight, ComputedFromHeightCumulative,
LazyFromHeight, NegCentsUnsignedToDollars, RatioCents64,
RollingWindows, ValueFromHeightCumulative,
ByUnit, ComputedFromHeight, ComputedFromHeightCumulative, LazyFromHeight,
NegCentsUnsignedToDollars, RatioCents64, RollingWindows, SatsToCents,
},
prices,
};

View File

@@ -5,7 +5,7 @@ use vecdb::{Exit, ReadableVec, Rw, StorageMode};
use crate::internal::{PercentFromHeight, RatioSatsBp16};
use crate::distribution::metrics::{ImportConfig, UnrealizedBase};
use crate::distribution::metrics::{ImportConfig, UnrealizedCore};
/// Relative metrics for the Complete tier.
#[derive(Traversable)]
@@ -29,7 +29,7 @@ impl RelativeBase {
pub(crate) fn compute(
&mut self,
max_from: Height,
unrealized: &UnrealizedBase,
unrealized: &UnrealizedCore,
supply_total_sats: &impl ReadableVec<Height, Sats>,
exit: &Exit,
) -> Result<()> {

View File

@@ -5,7 +5,7 @@ use vecdb::{Exit, ReadableVec, Rw, StorageMode};
use crate::internal::{PercentFromHeight, RatioDollarsBp16, RatioDollarsBp32, RatioDollarsBps32};
use crate::distribution::metrics::{ImportConfig, UnrealizedFull};
use crate::distribution::metrics::{ImportConfig, UnrealizedBase};
/// Extended relative metrics for own market cap (extended && rel_to_all).
#[derive(Traversable)]
@@ -32,7 +32,7 @@ impl RelativeExtendedOwnMarketCap {
pub(crate) fn compute(
&mut self,
max_from: Height,
unrealized: &UnrealizedFull,
unrealized: &UnrealizedBase,
own_market_cap: &impl ReadableVec<Height, Dollars>,
exit: &Exit,
) -> Result<()> {

View File

@@ -5,7 +5,7 @@ use vecdb::{Exit, Rw, StorageMode};
use crate::internal::{PercentFromHeight, RatioDollarsBp16, RatioDollarsBps32};
use crate::distribution::metrics::{ImportConfig, UnrealizedFull};
use crate::distribution::metrics::{ImportConfig, UnrealizedBase};
/// Extended relative metrics for own total unrealized PnL (extended only).
#[derive(Traversable)]
@@ -32,7 +32,7 @@ impl RelativeExtendedOwnPnl {
pub(crate) fn compute(
&mut self,
max_from: Height,
unrealized: &UnrealizedFull,
unrealized: &UnrealizedBase,
exit: &Exit,
) -> Result<()> {
self.unrealized_profit_rel_to_own_gross_pnl

View File

@@ -4,7 +4,7 @@ use brk_types::{Dollars, Height, Sats};
use derive_more::{Deref, DerefMut};
use vecdb::{Exit, ReadableVec, Rw, StorageMode};
use crate::distribution::metrics::{ImportConfig, RealizedBase, UnrealizedFull};
use crate::distribution::metrics::{ImportConfig, RealizedBase, UnrealizedBase};
use super::{RelativeFull, RelativeExtendedOwnPnl};
@@ -30,7 +30,7 @@ impl RelativeForAll {
pub(crate) fn compute(
&mut self,
max_from: Height,
unrealized: &UnrealizedFull,
unrealized: &UnrealizedBase,
realized: &RealizedBase,
supply_total_sats: &impl ReadableVec<Height, Sats>,
market_cap: &impl ReadableVec<Height, Dollars>,

View File

@@ -10,7 +10,7 @@ use crate::internal::{
Bps32ToFloat, LazyFromHeight, PercentFromHeight, RatioDollarsBp16, RatioDollarsBps32,
};
use crate::distribution::metrics::{ImportConfig, RealizedBase, UnrealizedFull};
use crate::distribution::metrics::{ImportConfig, RealizedBase, UnrealizedBase};
use super::RelativeBase;
@@ -73,7 +73,7 @@ impl RelativeFull {
pub(crate) fn compute(
&mut self,
max_from: Height,
unrealized: &UnrealizedFull,
unrealized: &UnrealizedBase,
realized: &RealizedBase,
supply_total_sats: &impl ReadableVec<Height, Sats>,
market_cap: &impl ReadableVec<Height, Dollars>,
@@ -81,7 +81,7 @@ impl RelativeFull {
) -> Result<()> {
self.base.compute(
max_from,
&unrealized.base,
&unrealized.core,
supply_total_sats,
exit,
)?;

View File

@@ -4,7 +4,7 @@ use brk_types::{Dollars, Height, Sats};
use derive_more::{Deref, DerefMut};
use vecdb::{Exit, ReadableVec, Rw, StorageMode};
use crate::distribution::metrics::{ImportConfig, RealizedBase, UnrealizedFull};
use crate::distribution::metrics::{ImportConfig, RealizedBase, UnrealizedBase};
use super::{RelativeFull, RelativeExtendedOwnMarketCap, RelativeExtendedOwnPnl, RelativeToAll};
@@ -38,7 +38,7 @@ impl RelativeWithExtended {
pub(crate) fn compute(
&mut self,
max_from: Height,
unrealized: &UnrealizedFull,
unrealized: &UnrealizedBase,
realized: &RealizedBase,
supply_total_sats: &impl ReadableVec<Height, Sats>,
market_cap: &impl ReadableVec<Height, Dollars>,

View File

@@ -4,7 +4,7 @@ use brk_types::{Dollars, Height, Sats};
use derive_more::{Deref, DerefMut};
use vecdb::{Exit, ReadableVec, Rw, StorageMode};
use crate::distribution::metrics::{ImportConfig, RealizedBase, UnrealizedFull};
use crate::distribution::metrics::{ImportConfig, RealizedBase, UnrealizedBase};
use super::{RelativeFull, RelativeToAll};
@@ -32,7 +32,7 @@ impl RelativeWithRelToAll {
pub(crate) fn compute(
&mut self,
max_from: Height,
unrealized: &UnrealizedFull,
unrealized: &UnrealizedBase,
realized: &RealizedBase,
supply_total_sats: &impl ReadableVec<Height, Sats>,
market_cap: &impl ReadableVec<Height, Dollars>,

View File

@@ -4,7 +4,7 @@ use brk_types::{Height, Sats};
use derive_more::{Deref, DerefMut};
use vecdb::{Exit, ReadableVec, Rw, StorageMode};
use crate::distribution::metrics::{ImportConfig, UnrealizedBase};
use crate::distribution::metrics::{ImportConfig, UnrealizedCore};
use super::{RelativeBase, RelativeToAll};
@@ -31,7 +31,7 @@ impl RelativeBaseWithRelToAll {
pub(crate) fn compute(
&mut self,
max_from: Height,
unrealized: &UnrealizedBase,
unrealized: &UnrealizedCore,
supply_total_sats: &impl ReadableVec<Height, Sats>,
all_supply_sats: &impl ReadableVec<Height, Sats>,
exit: &Exit,

View File

@@ -1,73 +1,75 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Cents, CentsSigned, Height, Indexes, Version};
use vecdb::{AnyStoredVec, AnyVec, Exit, ReadableCloneableVec, Rw, StorageMode, WritableVec};
use brk_types::{Cents, CentsSats, CentsSquaredSats, Height, Indexes, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{AnyStoredVec, AnyVec, BytesVec, Exit, ReadableVec, Rw, StorageMode, WritableVec};
use crate::{
distribution::state::UnrealizedState,
internal::{
CentsSubtractToCentsSigned, FiatFromHeight, LazyFromHeight, NegCentsUnsignedToDollars,
ValueFromHeight,
},
internal::FiatFromHeight,
prices,
};
use brk_types::Dollars;
use crate::distribution::metrics::ImportConfig;
/// Unrealized metrics for the Complete tier (~6 fields).
///
/// Excludes source-only fields (invested_capital, raw BytesVecs)
/// and extended-only fields (pain_index, greed_index, net_sentiment).
#[derive(Traversable)]
use super::UnrealizedCore;
#[derive(Deref, DerefMut, Traversable)]
pub struct UnrealizedBase<M: StorageMode = Rw> {
pub supply_in_profit: ValueFromHeight<M>,
pub supply_in_loss: ValueFromHeight<M>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub core: UnrealizedCore<M>,
pub unrealized_profit: FiatFromHeight<Cents, M>,
pub unrealized_loss: FiatFromHeight<Cents, M>,
pub gross_pnl: FiatFromHeight<Cents, M>,
pub neg_unrealized_loss: LazyFromHeight<Dollars, Cents>,
pub invested_capital_in_profit: FiatFromHeight<Cents, M>,
pub invested_capital_in_loss: FiatFromHeight<Cents, M>,
pub net_unrealized_pnl: FiatFromHeight<CentsSigned, M>,
pub invested_capital_in_profit_raw: M::Stored<BytesVec<Height, CentsSats>>,
pub invested_capital_in_loss_raw: M::Stored<BytesVec<Height, CentsSats>>,
pub investor_cap_in_profit_raw: M::Stored<BytesVec<Height, CentsSquaredSats>>,
pub investor_cap_in_loss_raw: M::Stored<BytesVec<Height, CentsSquaredSats>>,
}
impl UnrealizedBase {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let v0 = Version::ZERO;
let supply_in_profit = cfg.import("supply_in_profit", v0)?;
let supply_in_loss = cfg.import("supply_in_loss", v0)?;
let unrealized_profit = cfg.import("unrealized_profit", v0)?;
let unrealized_loss: FiatFromHeight<Cents> = cfg.import("unrealized_loss", v0)?;
let core = UnrealizedCore::forced_import(cfg)?;
let neg_unrealized_loss = LazyFromHeight::from_computed::<NegCentsUnsignedToDollars>(
&cfg.name("neg_unrealized_loss"),
cfg.version,
unrealized_loss.cents.height.read_only_boxed_clone(),
&unrealized_loss.cents,
);
let gross_pnl = cfg.import("unrealized_gross_pnl", v0)?;
let net_unrealized_pnl = cfg.import("net_unrealized_pnl", v0)?;
let invested_capital_in_profit = cfg.import("invested_capital_in_profit", v0)?;
let invested_capital_in_loss = cfg.import("invested_capital_in_loss", v0)?;
let invested_capital_in_profit_raw =
cfg.import("invested_capital_in_profit_raw", v0)?;
let invested_capital_in_loss_raw = cfg.import("invested_capital_in_loss_raw", v0)?;
let investor_cap_in_profit_raw = cfg.import("investor_cap_in_profit_raw", v0)?;
let investor_cap_in_loss_raw = cfg.import("investor_cap_in_loss_raw", v0)?;
Ok(Self {
supply_in_profit,
supply_in_loss,
unrealized_profit,
unrealized_loss,
neg_unrealized_loss,
net_unrealized_pnl,
core,
gross_pnl,
invested_capital_in_profit,
invested_capital_in_loss,
invested_capital_in_profit_raw,
invested_capital_in_loss_raw,
investor_cap_in_profit_raw,
investor_cap_in_loss_raw,
})
}
pub(crate) fn min_stateful_height_len(&self) -> usize {
self.supply_in_profit
.sats
.height
.len()
.min(self.supply_in_loss.sats.height.len())
.min(self.unrealized_profit.cents.height.len())
.min(self.unrealized_loss.cents.height.len())
self.core
.min_stateful_height_len()
.min(self.invested_capital_in_profit.cents.height.len())
.min(self.invested_capital_in_loss.cents.height.len())
.min(self.invested_capital_in_profit_raw.len())
.min(self.invested_capital_in_loss_raw.len())
.min(self.investor_cap_in_profit_raw.len())
.min(self.investor_cap_in_loss_raw.len())
}
pub(crate) fn truncate_push(
@@ -75,35 +77,46 @@ impl UnrealizedBase {
height: Height,
height_state: &UnrealizedState,
) -> Result<()> {
self.supply_in_profit
.sats
.height
.truncate_push(height, height_state.supply_in_profit)?;
self.supply_in_loss
.sats
.height
.truncate_push(height, height_state.supply_in_loss)?;
self.unrealized_profit
self.core.truncate_push(height, height_state)?;
self.invested_capital_in_profit
.cents
.height
.truncate_push(height, height_state.unrealized_profit)?;
self.unrealized_loss
.truncate_push(height, height_state.invested_capital_in_profit)?;
self.invested_capital_in_loss
.cents
.height
.truncate_push(height, height_state.unrealized_loss)?;
.truncate_push(height, height_state.invested_capital_in_loss)?;
self.invested_capital_in_profit_raw.truncate_push(
height,
CentsSats::new(height_state.invested_capital_in_profit_raw),
)?;
self.invested_capital_in_loss_raw.truncate_push(
height,
CentsSats::new(height_state.invested_capital_in_loss_raw),
)?;
self.investor_cap_in_profit_raw.truncate_push(
height,
CentsSquaredSats::new(height_state.investor_cap_in_profit_raw),
)?;
self.investor_cap_in_loss_raw.truncate_push(
height,
CentsSquaredSats::new(height_state.investor_cap_in_loss_raw),
)?;
Ok(())
}
pub(crate) fn collect_vecs_mut(&mut self) -> Vec<&mut dyn AnyStoredVec> {
vec![
&mut self.supply_in_profit.base.sats.height as &mut dyn AnyStoredVec,
&mut self.supply_in_profit.base.cents.height as &mut dyn AnyStoredVec,
&mut self.supply_in_loss.base.sats.height as &mut dyn AnyStoredVec,
&mut self.supply_in_loss.base.cents.height as &mut dyn AnyStoredVec,
&mut self.unrealized_profit.cents.height,
&mut self.unrealized_loss.cents.height,
]
let mut vecs = self.core.collect_vecs_mut();
vecs.push(&mut self.invested_capital_in_profit.cents.height as &mut dyn AnyStoredVec);
vecs.push(&mut self.invested_capital_in_loss.cents.height as &mut dyn AnyStoredVec);
vecs.push(&mut self.invested_capital_in_profit_raw as &mut dyn AnyStoredVec);
vecs.push(&mut self.invested_capital_in_loss_raw as &mut dyn AnyStoredVec);
vecs.push(&mut self.investor_cap_in_profit_raw as &mut dyn AnyStoredVec);
vecs.push(&mut self.investor_cap_in_loss_raw as &mut dyn AnyStoredVec);
vecs
}
pub(crate) fn compute_from_stateful(
@@ -112,29 +125,89 @@ impl UnrealizedBase {
others: &[&Self],
exit: &Exit,
) -> Result<()> {
sum_others!(self, starting_indexes, others, exit; supply_in_profit.sats.height);
sum_others!(self, starting_indexes, others, exit; supply_in_loss.sats.height);
sum_others!(self, starting_indexes, others, exit; unrealized_profit.cents.height);
sum_others!(self, starting_indexes, others, exit; unrealized_loss.cents.height);
let core_refs: Vec<&UnrealizedCore> =
others.iter().map(|o| &o.core).collect();
self.core
.compute_from_stateful(starting_indexes, &core_refs, exit)?;
sum_others!(self, starting_indexes, others, exit; invested_capital_in_profit.cents.height);
sum_others!(self, starting_indexes, others, exit; invested_capital_in_loss.cents.height);
let start = self
.invested_capital_in_profit_raw
.len()
.min(self.invested_capital_in_loss_raw.len())
.min(self.investor_cap_in_profit_raw.len())
.min(self.investor_cap_in_loss_raw.len());
let end = others
.iter()
.map(|o| o.invested_capital_in_profit_raw.len())
.min()
.unwrap_or(0);
let invested_profit_ranges: Vec<Vec<CentsSats>> = others
.iter()
.map(|o| {
o.invested_capital_in_profit_raw
.collect_range_at(start, end)
})
.collect();
let invested_loss_ranges: Vec<Vec<CentsSats>> = others
.iter()
.map(|o| o.invested_capital_in_loss_raw.collect_range_at(start, end))
.collect();
let investor_profit_ranges: Vec<Vec<CentsSquaredSats>> = others
.iter()
.map(|o| o.investor_cap_in_profit_raw.collect_range_at(start, end))
.collect();
let investor_loss_ranges: Vec<Vec<CentsSquaredSats>> = others
.iter()
.map(|o| o.investor_cap_in_loss_raw.collect_range_at(start, end))
.collect();
for i in start..end {
let height = Height::from(i);
let local_i = i - start;
let mut sum_invested_profit = CentsSats::ZERO;
let mut sum_invested_loss = CentsSats::ZERO;
let mut sum_investor_profit = CentsSquaredSats::ZERO;
let mut sum_investor_loss = CentsSquaredSats::ZERO;
for idx in 0..others.len() {
sum_invested_profit += invested_profit_ranges[idx][local_i];
sum_invested_loss += invested_loss_ranges[idx][local_i];
sum_investor_profit += investor_profit_ranges[idx][local_i];
sum_investor_loss += investor_loss_ranges[idx][local_i];
}
self.invested_capital_in_profit_raw
.truncate_push(height, sum_invested_profit)?;
self.invested_capital_in_loss_raw
.truncate_push(height, sum_invested_loss)?;
self.investor_cap_in_profit_raw
.truncate_push(height, sum_investor_profit)?;
self.investor_cap_in_loss_raw
.truncate_push(height, sum_investor_loss)?;
}
Ok(())
}
/// Compute derived metrics from stored values.
pub(crate) fn compute_rest(
&mut self,
_prices: &prices::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.net_unrealized_pnl
.cents
.height
.compute_binary::<Cents, Cents, CentsSubtractToCentsSigned>(
starting_indexes.height,
&self.unrealized_profit.cents.height,
&self.unrealized_loss.cents.height,
exit,
)?;
self.core.compute_rest(starting_indexes, exit)?;
self.gross_pnl.cents.height.compute_add(
starting_indexes.height,
&self.core.unrealized_profit.cents.height,
&self.core.unrealized_loss.cents.height,
exit,
)?;
Ok(())
}

View File

@@ -0,0 +1,137 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Cents, CentsSigned, Height, Indexes, Version};
use vecdb::{AnyStoredVec, AnyVec, Exit, ReadableCloneableVec, Rw, StorageMode, WritableVec};
use crate::{
distribution::state::UnrealizedState,
internal::{
CentsSubtractToCentsSigned, FiatFromHeight, LazyFromHeight, NegCentsUnsignedToDollars,
ValueFromHeight,
},
};
use brk_types::Dollars;
use crate::distribution::metrics::ImportConfig;
#[derive(Traversable)]
pub struct UnrealizedCore<M: StorageMode = Rw> {
pub supply_in_profit: ValueFromHeight<M>,
pub supply_in_loss: ValueFromHeight<M>,
pub unrealized_profit: FiatFromHeight<Cents, M>,
pub unrealized_loss: FiatFromHeight<Cents, M>,
pub neg_unrealized_loss: LazyFromHeight<Dollars, Cents>,
pub net_unrealized_pnl: FiatFromHeight<CentsSigned, M>,
}
impl UnrealizedCore {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let v0 = Version::ZERO;
let supply_in_profit = cfg.import("supply_in_profit", v0)?;
let supply_in_loss = cfg.import("supply_in_loss", v0)?;
let unrealized_profit = cfg.import("unrealized_profit", v0)?;
let unrealized_loss: FiatFromHeight<Cents> = cfg.import("unrealized_loss", v0)?;
let neg_unrealized_loss = LazyFromHeight::from_computed::<NegCentsUnsignedToDollars>(
&cfg.name("neg_unrealized_loss"),
cfg.version,
unrealized_loss.cents.height.read_only_boxed_clone(),
&unrealized_loss.cents,
);
let net_unrealized_pnl = cfg.import("net_unrealized_pnl", v0)?;
Ok(Self {
supply_in_profit,
supply_in_loss,
unrealized_profit,
unrealized_loss,
neg_unrealized_loss,
net_unrealized_pnl,
})
}
pub(crate) fn min_stateful_height_len(&self) -> usize {
self.supply_in_profit
.sats
.height
.len()
.min(self.supply_in_loss.sats.height.len())
.min(self.unrealized_profit.cents.height.len())
.min(self.unrealized_loss.cents.height.len())
}
pub(crate) fn truncate_push(
&mut self,
height: Height,
height_state: &UnrealizedState,
) -> Result<()> {
self.supply_in_profit
.sats
.height
.truncate_push(height, height_state.supply_in_profit)?;
self.supply_in_loss
.sats
.height
.truncate_push(height, height_state.supply_in_loss)?;
self.unrealized_profit
.cents
.height
.truncate_push(height, height_state.unrealized_profit)?;
self.unrealized_loss
.cents
.height
.truncate_push(height, height_state.unrealized_loss)?;
Ok(())
}
pub(crate) fn collect_vecs_mut(&mut self) -> Vec<&mut dyn AnyStoredVec> {
vec![
&mut self.supply_in_profit.base.sats.height as &mut dyn AnyStoredVec,
&mut self.supply_in_profit.base.cents.height as &mut dyn AnyStoredVec,
&mut self.supply_in_loss.base.sats.height as &mut dyn AnyStoredVec,
&mut self.supply_in_loss.base.cents.height as &mut dyn AnyStoredVec,
&mut self.unrealized_profit.cents.height,
&mut self.unrealized_loss.cents.height,
]
}
pub(crate) fn compute_from_stateful(
&mut self,
starting_indexes: &Indexes,
others: &[&Self],
exit: &Exit,
) -> Result<()> {
sum_others!(self, starting_indexes, others, exit; supply_in_profit.sats.height);
sum_others!(self, starting_indexes, others, exit; supply_in_loss.sats.height);
sum_others!(self, starting_indexes, others, exit; unrealized_profit.cents.height);
sum_others!(self, starting_indexes, others, exit; unrealized_loss.cents.height);
Ok(())
}
/// Compute derived metrics from stored values.
pub(crate) fn compute_rest(
&mut self,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.net_unrealized_pnl
.cents
.height
.compute_binary::<Cents, Cents, CentsSubtractToCentsSigned>(
starting_indexes.height,
&self.unrealized_profit.cents.height,
&self.unrealized_loss.cents.height,
exit,
)?;
Ok(())
}
}

View File

@@ -1,42 +1,23 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Cents, CentsSats, CentsSigned, CentsSquaredSats, Height, Indexes, Version};
use brk_types::{Cents, CentsSigned, Indexes, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{AnyStoredVec, AnyVec, BytesVec, Exit, ReadableVec, Rw, StorageMode, WritableVec};
use vecdb::{Exit, Rw, StorageMode};
use crate::{
distribution::state::UnrealizedState,
internal::{CentsSubtractToCentsSigned, FiatFromHeight},
prices,
};
use crate::internal::{CentsSubtractToCentsSigned, FiatFromHeight};
use crate::prices;
use crate::distribution::metrics::ImportConfig;
use super::UnrealizedBase;
/// Full unrealized metrics (Source/Extended tier).
///
/// Contains all Complete-tier fields (via Deref to UnrealizedBase) plus:
/// - Source-only: invested_capital, raw BytesVecs
/// - Extended-only: pain_index, greed_index, net_sentiment
#[derive(Deref, DerefMut, Traversable)]
pub struct UnrealizedFull<M: StorageMode = Rw> {
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub base: UnrealizedBase<M>,
pub inner: UnrealizedBase<M>,
pub gross_pnl: FiatFromHeight<Cents, M>,
pub invested_capital_in_profit: FiatFromHeight<Cents, M>,
pub invested_capital_in_loss: FiatFromHeight<Cents, M>,
pub invested_capital_in_profit_raw: M::Stored<BytesVec<Height, CentsSats>>,
pub invested_capital_in_loss_raw: M::Stored<BytesVec<Height, CentsSats>>,
pub investor_cap_in_profit_raw: M::Stored<BytesVec<Height, CentsSquaredSats>>,
pub investor_cap_in_loss_raw: M::Stored<BytesVec<Height, CentsSquaredSats>>,
// --- Extended-only fields ---
pub pain_index: FiatFromHeight<Cents, M>,
pub greed_index: FiatFromHeight<Cents, M>,
pub net_sentiment: FiatFromHeight<CentsSigned, M>,
@@ -44,198 +25,32 @@ pub struct UnrealizedFull<M: StorageMode = Rw> {
impl UnrealizedFull {
pub(crate) fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let v0 = Version::ZERO;
let inner = UnrealizedBase::forced_import(cfg)?;
let base = UnrealizedBase::forced_import(cfg)?;
let gross_pnl = cfg.import("unrealized_gross_pnl", v0)?;
let invested_capital_in_profit = cfg.import("invested_capital_in_profit", v0)?;
let invested_capital_in_loss = cfg.import("invested_capital_in_loss", v0)?;
let invested_capital_in_profit_raw =
cfg.import("invested_capital_in_profit_raw", v0)?;
let invested_capital_in_loss_raw = cfg.import("invested_capital_in_loss_raw", v0)?;
let investor_cap_in_profit_raw = cfg.import("investor_cap_in_profit_raw", v0)?;
let investor_cap_in_loss_raw = cfg.import("investor_cap_in_loss_raw", v0)?;
let pain_index = cfg.import("pain_index", v0)?;
let greed_index = cfg.import("greed_index", v0)?;
let pain_index = cfg.import("pain_index", Version::ZERO)?;
let greed_index = cfg.import("greed_index", Version::ZERO)?;
let net_sentiment = cfg.import("net_sentiment", Version::ONE)?;
Ok(Self {
base,
gross_pnl,
invested_capital_in_profit,
invested_capital_in_loss,
invested_capital_in_profit_raw,
invested_capital_in_loss_raw,
investor_cap_in_profit_raw,
investor_cap_in_loss_raw,
inner,
pain_index,
greed_index,
net_sentiment,
})
}
pub(crate) fn min_stateful_height_len(&self) -> usize {
self.base
.min_stateful_height_len()
.min(self.invested_capital_in_profit.cents.height.len())
.min(self.invested_capital_in_loss.cents.height.len())
.min(self.invested_capital_in_profit_raw.len())
.min(self.invested_capital_in_loss_raw.len())
.min(self.investor_cap_in_profit_raw.len())
.min(self.investor_cap_in_loss_raw.len())
}
pub(crate) fn truncate_push(
&mut self,
height: Height,
height_state: &UnrealizedState,
) -> Result<()> {
self.base.truncate_push(height, height_state)?;
self.invested_capital_in_profit
.cents
.height
.truncate_push(height, height_state.invested_capital_in_profit)?;
self.invested_capital_in_loss
.cents
.height
.truncate_push(height, height_state.invested_capital_in_loss)?;
self.invested_capital_in_profit_raw.truncate_push(
height,
CentsSats::new(height_state.invested_capital_in_profit_raw),
)?;
self.invested_capital_in_loss_raw.truncate_push(
height,
CentsSats::new(height_state.invested_capital_in_loss_raw),
)?;
self.investor_cap_in_profit_raw.truncate_push(
height,
CentsSquaredSats::new(height_state.investor_cap_in_profit_raw),
)?;
self.investor_cap_in_loss_raw.truncate_push(
height,
CentsSquaredSats::new(height_state.investor_cap_in_loss_raw),
)?;
Ok(())
}
pub(crate) fn collect_vecs_mut(&mut self) -> Vec<&mut dyn AnyStoredVec> {
let mut vecs = self.base.collect_vecs_mut();
vecs.push(&mut self.invested_capital_in_profit.cents.height as &mut dyn AnyStoredVec);
vecs.push(&mut self.invested_capital_in_loss.cents.height as &mut dyn AnyStoredVec);
vecs.push(&mut self.invested_capital_in_profit_raw as &mut dyn AnyStoredVec);
vecs.push(&mut self.invested_capital_in_loss_raw as &mut dyn AnyStoredVec);
vecs.push(&mut self.investor_cap_in_profit_raw as &mut dyn AnyStoredVec);
vecs.push(&mut self.investor_cap_in_loss_raw as &mut dyn AnyStoredVec);
vecs
}
pub(crate) fn compute_from_stateful(
&mut self,
starting_indexes: &Indexes,
others: &[&Self],
exit: &Exit,
) -> Result<()> {
// Delegate Complete-tier aggregation
let base_refs: Vec<&UnrealizedBase> =
others.iter().map(|o| &o.base).collect();
self.base
.compute_from_stateful(starting_indexes, &base_refs, exit)?;
// Source-only: invested_capital
sum_others!(self, starting_indexes, others, exit; invested_capital_in_profit.cents.height);
sum_others!(self, starting_indexes, others, exit; invested_capital_in_loss.cents.height);
// Source-only: raw BytesVec aggregation
let start = self
.invested_capital_in_profit_raw
.len()
.min(self.invested_capital_in_loss_raw.len())
.min(self.investor_cap_in_profit_raw.len())
.min(self.investor_cap_in_loss_raw.len());
let end = others
.iter()
.map(|o| o.invested_capital_in_profit_raw.len())
.min()
.unwrap_or(0);
// Pre-collect all cohort data to avoid per-element BytesVec reads in nested loop
let invested_profit_ranges: Vec<Vec<CentsSats>> = others
.iter()
.map(|o| {
o.invested_capital_in_profit_raw
.collect_range_at(start, end)
})
.collect();
let invested_loss_ranges: Vec<Vec<CentsSats>> = others
.iter()
.map(|o| o.invested_capital_in_loss_raw.collect_range_at(start, end))
.collect();
let investor_profit_ranges: Vec<Vec<CentsSquaredSats>> = others
.iter()
.map(|o| o.investor_cap_in_profit_raw.collect_range_at(start, end))
.collect();
let investor_loss_ranges: Vec<Vec<CentsSquaredSats>> = others
.iter()
.map(|o| o.investor_cap_in_loss_raw.collect_range_at(start, end))
.collect();
for i in start..end {
let height = Height::from(i);
let local_i = i - start;
let mut sum_invested_profit = CentsSats::ZERO;
let mut sum_invested_loss = CentsSats::ZERO;
let mut sum_investor_profit = CentsSquaredSats::ZERO;
let mut sum_investor_loss = CentsSquaredSats::ZERO;
for idx in 0..others.len() {
sum_invested_profit += invested_profit_ranges[idx][local_i];
sum_invested_loss += invested_loss_ranges[idx][local_i];
sum_investor_profit += investor_profit_ranges[idx][local_i];
sum_investor_loss += investor_loss_ranges[idx][local_i];
}
self.invested_capital_in_profit_raw
.truncate_push(height, sum_invested_profit)?;
self.invested_capital_in_loss_raw
.truncate_push(height, sum_invested_loss)?;
self.investor_cap_in_profit_raw
.truncate_push(height, sum_investor_profit)?;
self.investor_cap_in_loss_raw
.truncate_push(height, sum_investor_loss)?;
}
Ok(())
}
/// Compute derived metrics from stored values + price.
pub(crate) fn compute_rest(
&mut self,
prices: &prices::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.base.compute_rest(starting_indexes, exit)?;
self.inner.compute_rest(prices, starting_indexes, exit)?;
self.gross_pnl.cents.height.compute_add(
starting_indexes.height,
&self.base.unrealized_profit.cents.height,
&self.base.unrealized_loss.cents.height,
exit,
)?;
// Pain index (investor_price_of_losers - spot)
self.pain_index.cents.height.compute_transform3(
starting_indexes.height,
&self.investor_cap_in_loss_raw,
&self.invested_capital_in_loss_raw,
&self.inner.investor_cap_in_loss_raw,
&self.inner.invested_capital_in_loss_raw,
&prices.price.cents.height,
|(h, investor_cap, invested_cap, spot, ..)| {
if invested_cap.inner() == 0 {
@@ -248,11 +63,10 @@ impl UnrealizedFull {
exit,
)?;
// Extended-only: Greed index (spot - investor_price_of_winners)
self.greed_index.cents.height.compute_transform3(
starting_indexes.height,
&self.investor_cap_in_profit_raw,
&self.invested_capital_in_profit_raw,
&self.inner.investor_cap_in_profit_raw,
&self.inner.invested_capital_in_profit_raw,
&prices.price.cents.height,
|(h, investor_cap, invested_cap, spot, ..)| {
if invested_cap.inner() == 0 {
@@ -268,7 +82,6 @@ impl UnrealizedFull {
Ok(())
}
/// Compute net_sentiment.height for separate cohorts (greed - pain).
pub(crate) fn compute_net_sentiment_height(
&mut self,
starting_indexes: &Indexes,

View File

@@ -1,5 +1,52 @@
mod base;
mod core;
mod full;
pub use base::UnrealizedBase;
pub use self::core::UnrealizedCore;
pub use full::UnrealizedFull;
use brk_error::Result;
use brk_types::{Height, Indexes};
use vecdb::Exit;
use crate::{distribution::state::UnrealizedState, prices};
pub trait UnrealizedLike: Send + Sync {
fn as_base(&self) -> &UnrealizedBase;
fn as_base_mut(&mut self) -> &mut UnrealizedBase;
fn min_stateful_height_len(&self) -> usize;
fn truncate_push(&mut self, height: Height, state: &UnrealizedState) -> Result<()>;
fn compute_rest(&mut self, prices: &prices::Vecs, starting_indexes: &Indexes, exit: &Exit) -> Result<()>;
fn compute_net_sentiment_height(&mut self, starting_indexes: &Indexes, exit: &Exit) -> Result<()>;
}
impl UnrealizedLike for UnrealizedBase {
fn as_base(&self) -> &UnrealizedBase { self }
fn as_base_mut(&mut self) -> &mut UnrealizedBase { self }
fn min_stateful_height_len(&self) -> usize { self.min_stateful_height_len() }
fn truncate_push(&mut self, height: Height, state: &UnrealizedState) -> Result<()> {
self.truncate_push(height, state)
}
fn compute_rest(&mut self, prices: &prices::Vecs, starting_indexes: &Indexes, exit: &Exit) -> Result<()> {
self.compute_rest(prices, starting_indexes, exit)
}
fn compute_net_sentiment_height(&mut self, _starting_indexes: &Indexes, _exit: &Exit) -> Result<()> {
Ok(())
}
}
impl UnrealizedLike for UnrealizedFull {
fn as_base(&self) -> &UnrealizedBase { &self.inner }
fn as_base_mut(&mut self) -> &mut UnrealizedBase { &mut self.inner }
fn min_stateful_height_len(&self) -> usize { self.inner.min_stateful_height_len() }
fn truncate_push(&mut self, height: Height, state: &UnrealizedState) -> Result<()> {
self.inner.truncate_push(height, state)
}
fn compute_rest(&mut self, prices: &prices::Vecs, starting_indexes: &Indexes, exit: &Exit) -> Result<()> {
self.compute_rest(prices, starting_indexes, exit)
}
fn compute_net_sentiment_height(&mut self, starting_indexes: &Indexes, exit: &Exit) -> Result<()> {
self.compute_net_sentiment_height(starting_indexes, exit)
}
}

View File

@@ -30,7 +30,7 @@ use crate::{
use super::{
AddressCohorts, AddressesDataVecs, AnyAddressIndexesVecs, RangeMap, UTXOCohorts,
address::{
AddrCountsVecs, AddressActivityVecs, GrowthRateVecs, NewAddrCountVecs, TotalAddrCountVecs,
AddrCountsVecs, AddressActivityVecs, DeltaVecs, NewAddrCountVecs, TotalAddrCountVecs,
},
compute::aggregates,
};
@@ -57,8 +57,8 @@ pub struct Vecs<M: StorageMode = Rw> {
pub total_addr_count: TotalAddrCountVecs<M>,
/// New addresses per block (delta of total) - stored height + cumulative + rolling, global + per-type
pub new_addr_count: NewAddrCountVecs<M>,
/// Growth rate (new / addr_count) - stored ratio with distribution stats, global + per-type
pub growth_rate: GrowthRateVecs<M>,
/// Windowed change + growth rate for addr_count, global + per-type
pub delta: DeltaVecs<M>,
pub fundedaddressindex:
LazyVecFrom1<FundedAddressIndex, FundedAddressIndex, FundedAddressIndex, FundedAddressData>,
@@ -141,7 +141,7 @@ impl Vecs {
let new_addr_count = NewAddrCountVecs::forced_import(&db, version, indexes)?;
// Growth rate: new / addr_count (global + per-type)
let growth_rate = GrowthRateVecs::forced_import(&db, version, indexes)?;
let delta = DeltaVecs::forced_import(&db, version, indexes)?;
let this = Self {
supply_state: BytesVec::forced_import_with(
@@ -154,7 +154,7 @@ impl Vecs {
address_activity,
total_addr_count,
new_addr_count,
growth_rate,
delta,
utxo_cohorts,
address_cohorts,
@@ -400,11 +400,11 @@ impl Vecs {
exit,
)?;
// 6b. Compute address count day1 vecs (by addresstype + all)
// 6b. Compute address count sum (by addresstype all)
self.addr_count
.compute_rest(blocks, starting_indexes, exit)?;
.compute_rest(starting_indexes, exit)?;
self.empty_addr_count
.compute_rest(blocks, starting_indexes, exit)?;
.compute_rest(starting_indexes, exit)?;
// 6c. Compute total_addr_count = addr_count + empty_addr_count
self.total_addr_count.compute(
@@ -425,11 +425,9 @@ impl Vecs {
exit,
)?;
// 6e. Compute growth_rate = new_addr_count / addr_count
self.growth_rate.compute(
self.delta.compute(
starting_indexes.height,
&window_starts,
&self.new_addr_count,
&self.addr_count,
exit,
)?;

View File

@@ -0,0 +1,107 @@
//! RollingDelta - raw change + growth rate (%) across 4 time windows.
//!
//! For a monotonic source (e.g., cumulative address count):
//! - `change._24h` = count_now - count_24h_ago
//! - `rate._24h` = (count_now - count_24h_ago) / count_24h_ago in BPS
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{BasisPoints16, Height, Version};
use schemars::JsonSchema;
use vecdb::{Database, Exit, ReadableVec, Rw, StorageMode};
use crate::{
indexes,
internal::{NumericValue, PercentRollingWindows, RollingWindows, WindowStarts},
};
#[derive(Traversable)]
pub struct RollingDelta<S, C = S, M: StorageMode = Rw>
where
S: NumericValue + JsonSchema,
C: NumericValue + JsonSchema,
{
pub change: RollingWindows<C, M>,
pub rate: PercentRollingWindows<BasisPoints16, M>,
_phantom: std::marker::PhantomData<S>,
}
impl<S, C> RollingDelta<S, C>
where
S: NumericValue + JsonSchema,
C: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
change: RollingWindows::forced_import(
db,
&format!("{name}_change"),
version,
indexes,
)?,
rate: PercentRollingWindows::forced_import(
db,
&format!("{name}_rate"),
version,
indexes,
)?,
_phantom: std::marker::PhantomData,
})
}
pub(crate) fn compute(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
source: &impl ReadableVec<Height, S>,
exit: &Exit,
) -> Result<()>
where
S: Default,
{
// Step 1: change = current - ago
for (change_w, starts) in self.change.0.as_mut_array().into_iter().zip(windows.as_array())
{
change_w.height.compute_transform(
max_from,
*starts,
|(h, ago_h, ..)| {
let current: f64 = source.collect_one(h).unwrap_or_default().into();
let ago: f64 = source.collect_one(ago_h).unwrap_or_default().into();
(h, C::from(current - ago))
},
exit,
)?;
}
// Step 2: rate = change / ago = change / (current - change)
for (growth_w, change_w) in self
.rate
.0
.as_mut_array()
.into_iter()
.zip(self.change.0.as_array())
{
growth_w.bps.height.compute_transform2(
max_from,
source,
&change_w.height,
|(h, current, change, ..)| {
let current_f: f64 = current.into();
let change_f: f64 = change.into();
let ago = current_f - change_f;
let rate = if ago == 0.0 { 0.0 } else { change_f / ago };
(h, BasisPoints16::from(rate))
},
exit,
)?;
}
Ok(())
}
}

View File

@@ -1,75 +0,0 @@
//! ComputedFromHeight using Distribution aggregation (no sum/cumulative).
//!
//! Stored height data + LazyAggVec index views + rolling distribution windows.
//! Use for block-based metrics where sum/cumulative would be misleading
//! (e.g., activity counts that can't be deduplicated across blocks).
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, PcoVec, Rw, StorageMode};
use crate::indexes;
use crate::internal::{ComputedVecValue, NumericValue, RollingDistribution, WindowStarts};
#[derive(Traversable)]
pub struct ComputedFromHeightDistribution<T, M: StorageMode = Rw>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub height: M::Stored<EagerVec<PcoVec<Height, T>>>,
#[traversable(flatten)]
pub rolling: RollingDistribution<T, M>,
}
impl<T> ComputedFromHeightDistribution<T>
where
T: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, version)?;
let rolling = RollingDistribution::forced_import(db, name, version, indexes)?;
Ok(Self { height, rolling })
}
/// Compute height data via closure, then rolling distribution.
pub(crate) fn compute(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
exit: &Exit,
compute_height: impl FnOnce(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
) -> Result<()>
where
T: Copy + Ord + From<f64> + Default,
f64: From<T>,
{
compute_height(&mut self.height)?;
self.compute_rest(max_from, windows, exit)
}
/// Compute rolling distribution from already-populated height data.
pub(crate) fn compute_rest(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
exit: &Exit,
) -> Result<()>
where
T: Copy + Ord + From<f64> + Default,
f64: From<T>,
{
self.rolling
.compute_distribution(max_from, windows, &self.height, exit)?;
Ok(())
}
}

View File

@@ -1,13 +1,13 @@
mod aggregated;
mod cumulative;
mod cumulative_sum;
mod distribution;
mod full;
mod delta;
mod rolling_average;
pub use aggregated::*;
pub use cumulative::*;
pub use cumulative_sum::*;
pub use distribution::*;
pub use full::*;
pub use delta::*;
pub use rolling_average::*;

View File

@@ -37,7 +37,7 @@ where
) -> Result<Self> {
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, version)?;
let average =
RollingWindows::forced_import(db, &format!("{name}_average"), version, indexes)?;
RollingWindows::forced_import(db, &format!("{name}_average"), version + Version::ONE, indexes)?;
Ok(Self { height, average })
}

View File

@@ -1,69 +0,0 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, StoredF32, Version};
use vecdb::{Database, EagerVec, Exit, PcoVec, ReadableCloneableVec, Rw, StorageMode};
use crate::{
indexes,
internal::{BpsType, WindowStarts},
};
use crate::internal::{ComputedFromHeightDistribution, LazyFromHeight};
/// Like PercentFromHeight but with rolling distribution stats on the bps data.
#[derive(Traversable)]
pub struct PercentFromHeightDistribution<B: BpsType, M: StorageMode = Rw> {
pub bps: ComputedFromHeightDistribution<B, M>,
pub ratio: LazyFromHeight<StoredF32, B>,
pub percent: LazyFromHeight<StoredF32, B>,
}
impl<B: BpsType> PercentFromHeightDistribution<B> {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let bps = ComputedFromHeightDistribution::forced_import(
db,
&format!("{name}_bps"),
version,
indexes,
)?;
let ratio = LazyFromHeight::from_height_source::<B::ToRatio>(
&format!("{name}_ratio"),
version,
bps.height.read_only_boxed_clone(),
indexes,
);
let percent = LazyFromHeight::from_height_source::<B::ToPercent>(
name,
version,
bps.height.read_only_boxed_clone(),
indexes,
);
Ok(Self {
bps,
ratio,
percent,
})
}
pub(crate) fn compute(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
exit: &Exit,
compute_height: impl FnOnce(&mut EagerVec<PcoVec<Height, B>>) -> Result<()>,
) -> Result<()>
where
B: Copy + Ord + From<f64> + Default,
f64: From<B>,
{
self.bps.compute(max_from, windows, exit, compute_height)
}
}

View File

@@ -1,7 +1,5 @@
mod base;
mod distribution;
mod rolling_average;
pub use base::*;
pub use distribution::*;
pub use rolling_average::*;

View File

@@ -63,8 +63,9 @@ impl fmt::Display for AnyAddressIndex {
impl Formattable for AnyAddressIndex {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format((self.0).0));
Ok(())
}
}

View File

@@ -165,7 +165,8 @@ impl std::fmt::Display for BasisPoints16 {
impl Formattable for BasisPoints16 {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -138,7 +138,8 @@ impl std::fmt::Display for BasisPoints32 {
impl Formattable for BasisPoints32 {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -161,7 +161,8 @@ impl std::fmt::Display for BasisPointsSigned16 {
impl Formattable for BasisPointsSigned16 {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -167,7 +167,8 @@ impl std::fmt::Display for BasisPointsSigned32 {
impl Formattable for BasisPointsSigned32 {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -144,7 +144,8 @@ impl std::fmt::Display for Bitcoin {
impl Formattable for Bitcoin {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = ryu::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -40,7 +40,8 @@ impl std::fmt::Display for BlkPosition {
impl Formattable for BlkPosition {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -283,7 +283,8 @@ impl std::fmt::Display for Cents {
impl Formattable for Cents {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -110,8 +110,9 @@ impl Div<usize> for CentsSquaredSats {
impl Formattable for CentsSquaredSats {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -217,7 +217,8 @@ impl fmt::Display for Day1 {
impl Formattable for Day1 {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -82,7 +82,8 @@ impl std::fmt::Display for Day3 {
impl Formattable for Day3 {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -435,7 +435,8 @@ impl std::fmt::Display for Dollars {
impl Formattable for Dollars {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = ryu::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -83,7 +83,8 @@ impl std::fmt::Display for EmptyAddressIndex {
impl Formattable for EmptyAddressIndex {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format((self.0).0));
Ok(())
}
}

View File

@@ -123,7 +123,8 @@ impl std::fmt::Display for FeeRate {
impl Formattable for FeeRate {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = ryu::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -287,8 +287,9 @@ impl std::fmt::Display for Height {
impl Formattable for Height {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -82,7 +82,8 @@ impl std::fmt::Display for Hour12 {
impl Formattable for Hour12 {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -151,6 +151,7 @@ mod stored_bool;
mod stored_f32;
mod stored_f64;
mod stored_i16;
mod stored_i64;
mod stored_i8;
mod stored_string;
mod stored_u16;
@@ -343,6 +344,7 @@ pub use stored_f32::*;
pub use stored_f64::*;
pub use stored_i8::*;
pub use stored_i16::*;
pub use stored_i64::*;
pub use stored_string::*;
pub use stored_u8::*;
pub use stored_u16::*;

View File

@@ -82,7 +82,8 @@ impl std::fmt::Display for Minute30 {
impl Formattable for Minute30 {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -136,7 +136,8 @@ impl std::fmt::Display for Month1 {
impl Formattable for Month1 {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -130,7 +130,8 @@ impl std::fmt::Display for Month3 {
impl Formattable for Month3 {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -122,7 +122,8 @@ impl std::fmt::Display for Month6 {
impl Formattable for Month6 {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -85,7 +85,8 @@ impl std::fmt::Display for OpReturnIndex {
impl Formattable for OpReturnIndex {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format((self.0).0));
Ok(())
}
}

View File

@@ -85,7 +85,8 @@ impl std::fmt::Display for P2MSOutputIndex {
impl Formattable for P2MSOutputIndex {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format((self.0).0));
Ok(())
}
}

View File

@@ -106,7 +106,8 @@ impl std::fmt::Display for P2PK33AddressIndex {
impl Formattable for P2PK33AddressIndex {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format((self.0).0));
Ok(())
}
}

View File

@@ -106,7 +106,8 @@ impl std::fmt::Display for P2PKHAddressIndex {
impl Formattable for P2PKHAddressIndex {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format((self.0).0));
Ok(())
}
}

View File

@@ -106,7 +106,8 @@ impl std::fmt::Display for P2TRAddressIndex {
impl Formattable for P2TRAddressIndex {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format((self.0).0));
Ok(())
}
}

View File

@@ -116,7 +116,8 @@ impl std::fmt::Display for PairOutputIndex {
impl Formattable for PairOutputIndex {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -377,85 +377,34 @@ pub enum PoolSlug {
}
impl PoolSlug {
/// Pools with ≥0.01% all-time dominance get full metrics (rolling windows, rewards, etc.).
/// Pools with dominance above per-window thresholds get full metrics.
/// Thresholds: all-time>=1.0%, 1y>=1.0%, 1m>=0.75%, 1w>=0.5%.
/// Generated by `scripts/pool_major_threshold.py`.
pub fn is_major(&self) -> bool {
matches!(
self,
Self::Unknown
| Self::AntPool
| Self::AsicMiner
| Self::BatPool
Self::AntPool
| Self::BinancePool
| Self::BitClub
| Self::BitcoinAffiliateNetwork
| Self::BitcoinCom
| Self::BitcoinIndia
| Self::BitcoinRussia
| Self::BitFury
| Self::BitMinter
| Self::Bitparking
| Self::Bixin
| Self::BraiinsPool
| Self::Btcc
| Self::BtcCom
| Self::BtcGuild
| Self::BtcTop
| Self::BtPool
| Self::Btcc
| Self::BwPool
| Self::BytePool
| Self::CanoePool
| Self::CoinLab
| Self::ConnectBtc
| Self::DigitalBtc
| Self::DPool
| Self::EclipseMc
| Self::Eligius
| Self::EmcdPool
| Self::ExxBw
| Self::F2Pool
| Self::FiftyEightCoin
| Self::FoundryUsa
| Self::GbMiners
| Self::GhashIo
| Self::Hhtt
| Self::HuobiPool
| Self::KanoPool
| Self::KncMiner
| Self::KuCoinPool
| Self::LubianCom
| Self::Luxor
| Self::MaraPool
| Self::MaxBtc
| Self::MegaBigPower
| Self::NiceHash
| Self::NmcBit
| Self::NovaBlock
| Self::Ocean
| Self::OkExPool
| Self::Okkong
| Self::OkpoolTop
| Self::OneThash
| Self::OzCoin
| Self::PegaPool
| Self::Polmine
| Self::Pool50btc
| Self::Poolin
| Self::SbiCrypto
| Self::SecPool
| Self::SecretSuperstar
| Self::SigmapoolCom
| Self::SoloCk
| Self::SpiderPool
| Self::Tangpool
| Self::TatmasPool
| Self::TerraPool
| Self::TigerpoolNet
| Self::TripleMining
| Self::UltimusPool
| Self::Unknown
| Self::ViaBtc
| Self::WayiCn
| Self::WhitePool
| Self::YourbtcNet
)
}
}

View File

@@ -347,7 +347,8 @@ impl std::fmt::Display for Sats {
impl Formattable for Sats {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -215,7 +215,8 @@ impl std::fmt::Display for SatsSigned {
impl Formattable for SatsSigned {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -274,7 +274,8 @@ impl std::fmt::Display for StoredF32 {
impl Formattable for StoredF32 {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = ryu::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -118,7 +118,8 @@ impl std::fmt::Display for StoredI16 {
impl Formattable for StoredI16 {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -0,0 +1,140 @@
use std::ops::{Add, AddAssign, Div, Sub, SubAssign};
use derive_more::Deref;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex};
/// Fixed-size 64-bit signed integer optimized for on-disk storage
#[derive(
Debug,
Default,
Deref,
Clone,
Copy,
PartialEq,
Eq,
PartialOrd,
Ord,
Serialize,
Deserialize,
Pco,
JsonSchema,
)]
pub struct StoredI64(i64);
impl StoredI64 {
pub const ZERO: Self = Self(0);
pub fn new(v: i64) -> Self {
Self(v)
}
}
impl From<i64> for StoredI64 {
#[inline]
fn from(value: i64) -> Self {
Self(value)
}
}
impl From<StoredI64> for i64 {
#[inline]
fn from(value: StoredI64) -> Self {
value.0
}
}
impl From<usize> for StoredI64 {
#[inline]
fn from(value: usize) -> Self {
Self(value as i64)
}
}
impl From<StoredI64> for usize {
#[inline]
fn from(value: StoredI64) -> Self {
value.0 as usize
}
}
impl CheckedSub<StoredI64> for StoredI64 {
fn checked_sub(self, rhs: Self) -> Option<Self> {
self.0.checked_sub(rhs.0).map(Self)
}
}
impl Div<usize> for StoredI64 {
type Output = Self;
fn div(self, rhs: usize) -> Self::Output {
Self(self.0 / rhs as i64)
}
}
impl Add for StoredI64 {
type Output = Self;
fn add(self, rhs: Self) -> Self::Output {
Self(self.0 + rhs.0)
}
}
impl AddAssign for StoredI64 {
fn add_assign(&mut self, rhs: Self) {
*self = *self + rhs
}
}
impl Sub for StoredI64 {
type Output = Self;
fn sub(self, rhs: Self) -> Self::Output {
Self(self.0 - rhs.0)
}
}
impl SubAssign for StoredI64 {
fn sub_assign(&mut self, rhs: Self) {
*self = *self - rhs
}
}
impl From<f64> for StoredI64 {
#[inline]
fn from(value: f64) -> Self {
Self(value as i64)
}
}
impl From<StoredI64> for f64 {
#[inline]
fn from(value: StoredI64) -> Self {
value.0 as f64
}
}
impl PrintableIndex for StoredI64 {
fn to_string() -> &'static str {
"i64"
}
fn to_possible_strings() -> &'static [&'static str] {
&["i64"]
}
}
impl std::fmt::Display for StoredI64 {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut buf = itoa::Buffer::new();
let str = buf.format(self.0);
f.write_str(str)
}
}
impl Formattable for StoredI64 {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -118,7 +118,8 @@ impl std::fmt::Display for StoredI8 {
impl Formattable for StoredI8 {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -209,7 +209,8 @@ impl std::fmt::Display for StoredU16 {
impl Formattable for StoredU16 {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -261,7 +261,8 @@ impl std::fmt::Display for StoredU32 {
impl Formattable for StoredU32 {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -276,7 +276,8 @@ impl std::fmt::Display for StoredU64 {
impl Formattable for StoredU64 {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -109,7 +109,8 @@ impl std::fmt::Display for StoredU8 {
impl Formattable for StoredU8 {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -190,7 +190,8 @@ impl std::fmt::Display for Timestamp {
impl Formattable for Timestamp {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -171,7 +171,8 @@ impl std::fmt::Display for TxIndex {
impl Formattable for TxIndex {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -132,7 +132,8 @@ impl std::fmt::Display for TxInIndex {
impl Formattable for TxInIndex {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -133,7 +133,8 @@ impl std::fmt::Display for TxOutIndex {
impl Formattable for TxOutIndex {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -66,7 +66,8 @@ impl std::fmt::Display for TxVersion {
impl Formattable for TxVersion {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -148,7 +148,8 @@ impl std::fmt::Display for TypeIndex {
impl Formattable for TypeIndex {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -99,7 +99,8 @@ impl std::fmt::Display for Vout {
impl Formattable for Vout {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -122,7 +122,8 @@ impl std::fmt::Display for VSize {
impl Formattable for VSize {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -146,7 +146,8 @@ impl std::fmt::Display for Week1 {
impl Formattable for Week1 {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -138,7 +138,8 @@ impl std::fmt::Display for Weight {
impl Formattable for Weight {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -125,7 +125,8 @@ impl std::fmt::Display for Year {
impl Formattable for Year {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -143,7 +143,8 @@ impl std::fmt::Display for Year1 {
impl Formattable for Year1 {
#[inline(always)]
fn fmt_csv(&self, f: &mut String) -> std::fmt::Result {
use std::fmt::Write;
write!(f, "{}", self)
let mut buf = itoa::Buffer::new();
f.push_str(buf.format(self.0));
Ok(())
}
}

View File

@@ -2051,6 +2051,47 @@ function createRatioPattern(client, acc) {
};
}
/**
* @typedef {Object} GrossInvestedInvestorNegNetSupplyUnrealizedPattern
* @property {CentsUsdPattern} grossPnl
* @property {CentsUsdPattern} investedCapitalInLoss
* @property {MetricPattern18<CentsSats>} investedCapitalInLossRaw
* @property {CentsUsdPattern} investedCapitalInProfit
* @property {MetricPattern18<CentsSats>} investedCapitalInProfitRaw
* @property {MetricPattern18<CentsSquaredSats>} investorCapInLossRaw
* @property {MetricPattern18<CentsSquaredSats>} investorCapInProfitRaw
* @property {MetricPattern1<Dollars>} negUnrealizedLoss
* @property {CentsUsdPattern} netUnrealizedPnl
* @property {BtcCentsSatsUsdPattern} supplyInLoss
* @property {BtcCentsSatsUsdPattern} supplyInProfit
* @property {CentsUsdPattern} unrealizedLoss
* @property {CentsUsdPattern} unrealizedProfit
*/
/**
* Create a GrossInvestedInvestorNegNetSupplyUnrealizedPattern pattern node
* @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name
* @returns {GrossInvestedInvestorNegNetSupplyUnrealizedPattern}
*/
function createGrossInvestedInvestorNegNetSupplyUnrealizedPattern(client, acc) {
return {
grossPnl: createCentsUsdPattern(client, _m(acc, 'unrealized_gross_pnl')),
investedCapitalInLoss: createCentsUsdPattern(client, _m(acc, 'invested_capital_in_loss')),
investedCapitalInLossRaw: createMetricPattern18(client, _m(acc, 'invested_capital_in_loss_raw')),
investedCapitalInProfit: createCentsUsdPattern(client, _m(acc, 'invested_capital_in_profit')),
investedCapitalInProfitRaw: createMetricPattern18(client, _m(acc, 'invested_capital_in_profit_raw')),
investorCapInLossRaw: createMetricPattern18(client, _m(acc, 'investor_cap_in_loss_raw')),
investorCapInProfitRaw: createMetricPattern18(client, _m(acc, 'investor_cap_in_profit_raw')),
negUnrealizedLoss: createMetricPattern1(client, _m(acc, 'neg_unrealized_loss')),
netUnrealizedPnl: createCentsUsdPattern(client, _m(acc, 'net_unrealized_pnl')),
supplyInLoss: createBtcCentsSatsUsdPattern(client, _m(acc, 'supply_in_loss')),
supplyInProfit: createBtcCentsSatsUsdPattern(client, _m(acc, 'supply_in_profit')),
unrealizedLoss: createCentsUsdPattern(client, _m(acc, 'unrealized_loss')),
unrealizedProfit: createCentsUsdPattern(client, _m(acc, 'unrealized_profit')),
};
}
/**
* @typedef {Object} _10y1m1w1y2y3m3y4y5y6m6y8yPattern2
* @property {BpsPercentRatioPattern} _10y
@@ -2314,15 +2355,15 @@ function createAverageGainsLossesRsiStochPattern(client, acc) {
/**
* @typedef {Object} AllP2aP2pk33P2pk65P2pkhP2shP2trP2wpkhP2wshPattern
* @property {ChangeCountPattern} all
* @property {ChangeCountPattern} p2a
* @property {ChangeCountPattern} p2pk33
* @property {ChangeCountPattern} p2pk65
* @property {ChangeCountPattern} p2pkh
* @property {ChangeCountPattern} p2sh
* @property {ChangeCountPattern} p2tr
* @property {ChangeCountPattern} p2wpkh
* @property {ChangeCountPattern} p2wsh
* @property {MetricPattern1<StoredU64>} all
* @property {MetricPattern1<StoredU64>} p2a
* @property {MetricPattern1<StoredU64>} p2pk33
* @property {MetricPattern1<StoredU64>} p2pk65
* @property {MetricPattern1<StoredU64>} p2pkh
* @property {MetricPattern1<StoredU64>} p2sh
* @property {MetricPattern1<StoredU64>} p2tr
* @property {MetricPattern1<StoredU64>} p2wpkh
* @property {MetricPattern1<StoredU64>} p2wsh
*/
/**
@@ -2333,15 +2374,15 @@ function createAverageGainsLossesRsiStochPattern(client, acc) {
*/
function createAllP2aP2pk33P2pk65P2pkhP2shP2trP2wpkhP2wshPattern(client, acc) {
return {
all: createChangeCountPattern(client, acc),
p2a: createChangeCountPattern(client, _p('p2a', acc)),
p2pk33: createChangeCountPattern(client, _p('p2pk33', acc)),
p2pk65: createChangeCountPattern(client, _p('p2pk65', acc)),
p2pkh: createChangeCountPattern(client, _p('p2pkh', acc)),
p2sh: createChangeCountPattern(client, _p('p2sh', acc)),
p2tr: createChangeCountPattern(client, _p('p2tr', acc)),
p2wpkh: createChangeCountPattern(client, _p('p2wpkh', acc)),
p2wsh: createChangeCountPattern(client, _p('p2wsh', acc)),
all: createMetricPattern1(client, acc),
p2a: createMetricPattern1(client, _p('p2a', acc)),
p2pk33: createMetricPattern1(client, _p('p2pk33', acc)),
p2pk65: createMetricPattern1(client, _p('p2pk65', acc)),
p2pkh: createMetricPattern1(client, _p('p2pkh', acc)),
p2sh: createMetricPattern1(client, _p('p2sh', acc)),
p2tr: createMetricPattern1(client, _p('p2tr', acc)),
p2wpkh: createMetricPattern1(client, _p('p2wpkh', acc)),
p2wsh: createMetricPattern1(client, _p('p2wsh', acc)),
};
}
@@ -2378,39 +2419,6 @@ function createAverageMaxMedianMinPct10Pct25Pct75Pct90SumPattern2(client, acc) {
};
}
/**
* @typedef {Object} AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern
* @property {_1m1w1y24hPattern<BasisPoints16>} average
* @property {MetricPattern18<BasisPoints16>} height
* @property {_1m1w1y24hPattern<BasisPoints16>} max
* @property {_1m1w1y24hPattern<BasisPoints16>} median
* @property {_1m1w1y24hPattern<BasisPoints16>} min
* @property {_1m1w1y24hPattern<BasisPoints16>} pct10
* @property {_1m1w1y24hPattern<BasisPoints16>} pct25
* @property {_1m1w1y24hPattern<BasisPoints16>} pct75
* @property {_1m1w1y24hPattern<BasisPoints16>} pct90
*/
/**
* Create a AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern pattern node
* @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name
* @returns {AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern}
*/
function createAverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern(client, acc) {
return {
average: create_1m1w1y24hPattern(client, _m(acc, 'average')),
height: createMetricPattern18(client, acc),
max: create_1m1w1y24hPattern(client, _m(acc, 'max')),
median: create_1m1w1y24hPattern(client, _m(acc, 'median')),
min: create_1m1w1y24hPattern(client, _m(acc, 'min')),
pct10: create_1m1w1y24hPattern(client, _m(acc, 'p10')),
pct25: create_1m1w1y24hPattern(client, _m(acc, 'p25')),
pct75: create_1m1w1y24hPattern(client, _m(acc, 'p75')),
pct90: create_1m1w1y24hPattern(client, _m(acc, 'p90')),
};
}
/**
* @typedef {Object} AverageMaxMedianMinPct10Pct25Pct75Pct90SumPattern
* @property {_1m1w1y24hPattern<StoredU64>} average
@@ -2576,7 +2584,7 @@ function create_10y2y3y4y5y6y8yPattern(client, acc) {
* @property {MvrvNegNetRealizedSentSoprValuePattern} realized
* @property {InvestedNetNuplSupplyUnrealizedPattern} relative
* @property {ChangeHalvedTotalPattern} supply
* @property {GreedGrossInvestedInvestorNegNetPainSupplyUnrealizedPattern} unrealized
* @property {GrossInvestedInvestorNegNetSupplyUnrealizedPattern} unrealized
*/
/**
@@ -2593,7 +2601,7 @@ function createActivityCostOutputsRealizedRelativeSupplyUnrealizedPattern(client
realized: createMvrvNegNetRealizedSentSoprValuePattern(client, acc),
relative: createInvestedNetNuplSupplyUnrealizedPattern(client, acc),
supply: createChangeHalvedTotalPattern(client, _m(acc, 'supply')),
unrealized: createGreedGrossInvestedInvestorNegNetPainSupplyUnrealizedPattern(client, acc),
unrealized: createGrossInvestedInvestorNegNetSupplyUnrealizedPattern(client, acc),
};
}
@@ -3039,27 +3047,6 @@ function createBaseCumulativeSumPattern(client, acc) {
};
}
/**
* @typedef {Object} BpsPercentRatioPattern3
* @property {AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern} bps
* @property {MetricPattern1<StoredF32>} percent
* @property {MetricPattern1<StoredF32>} ratio
*/
/**
* Create a BpsPercentRatioPattern3 pattern node
* @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name
* @returns {BpsPercentRatioPattern3}
*/
function createBpsPercentRatioPattern3(client, acc) {
return {
bps: createAverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern(client, _m(acc, 'bps')),
percent: createMetricPattern1(client, acc),
ratio: createMetricPattern1(client, _m(acc, 'ratio')),
};
}
/**
* @typedef {Object} BpsPercentRatioPattern
* @property {MetricPattern1<BasisPoints16>} bps
@@ -3326,21 +3313,21 @@ function createCentsUsdPattern(client, acc) {
}
/**
* @typedef {Object} ChangeCountPattern
* @property {MetricPattern1<StoredF64>} change1m
* @property {MetricPattern1<StoredU64>} count
* @typedef {Object} ChangeRatePattern
* @property {_1m1w1y24hPattern<StoredU64>} change
* @property {_1m1w1y24hPattern2} rate
*/
/**
* Create a ChangeCountPattern pattern node
* Create a ChangeRatePattern pattern node
* @param {BrkClientBase} client
* @param {string} acc - Accumulated metric name
* @returns {ChangeCountPattern}
* @returns {ChangeRatePattern}
*/
function createChangeCountPattern(client, acc) {
function createChangeRatePattern(client, acc) {
return {
change1m: createMetricPattern1(client, _m(acc, 'change_1m')),
count: createMetricPattern1(client, acc),
change: create_1m1w1y24hPattern(client, _m(acc, 'change')),
rate: create_1m1w1y24hPattern2(client, _m(acc, 'rate')),
};
}
@@ -4721,9 +4708,9 @@ function create_2wPattern(client, acc) {
* @property {AllP2aP2pk33P2pk65P2pkhP2shP2trP2wpkhP2wshPattern} addrCount
* @property {AllP2aP2pk33P2pk65P2pkhP2shP2trP2wpkhP2wshPattern} emptyAddrCount
* @property {MetricsTree_Distribution_AddressActivity} addressActivity
* @property {MetricsTree_Distribution_TotalAddrCount} totalAddrCount
* @property {AllP2aP2pk33P2pk65P2pkhP2shP2trP2wpkhP2wshPattern} totalAddrCount
* @property {MetricsTree_Distribution_NewAddrCount} newAddrCount
* @property {MetricsTree_Distribution_GrowthRate} growthRate
* @property {MetricsTree_Distribution_Delta} delta
* @property {MetricPattern34<FundedAddressIndex>} fundedaddressindex
* @property {MetricPattern35<EmptyAddressIndex>} emptyaddressindex
*/
@@ -5074,19 +5061,6 @@ function create_2wPattern(client, acc) {
* @property {BothReactivatedReceivingSendingPattern} p2a
*/
/**
* @typedef {Object} MetricsTree_Distribution_TotalAddrCount
* @property {MetricPattern1<StoredU64>} all
* @property {MetricPattern1<StoredU64>} p2pk65
* @property {MetricPattern1<StoredU64>} p2pk33
* @property {MetricPattern1<StoredU64>} p2pkh
* @property {MetricPattern1<StoredU64>} p2sh
* @property {MetricPattern1<StoredU64>} p2wpkh
* @property {MetricPattern1<StoredU64>} p2wsh
* @property {MetricPattern1<StoredU64>} p2tr
* @property {MetricPattern1<StoredU64>} p2a
*/
/**
* @typedef {Object} MetricsTree_Distribution_NewAddrCount
* @property {AverageCumulativeHeightMaxMedianMinPct10Pct25Pct75Pct90SumPattern} all
@@ -5101,16 +5075,16 @@ function create_2wPattern(client, acc) {
*/
/**
* @typedef {Object} MetricsTree_Distribution_GrowthRate
* @property {BpsPercentRatioPattern3} all
* @property {BpsPercentRatioPattern3} p2pk65
* @property {BpsPercentRatioPattern3} p2pk33
* @property {BpsPercentRatioPattern3} p2pkh
* @property {BpsPercentRatioPattern3} p2sh
* @property {BpsPercentRatioPattern3} p2wpkh
* @property {BpsPercentRatioPattern3} p2wsh
* @property {BpsPercentRatioPattern3} p2tr
* @property {BpsPercentRatioPattern3} p2a
* @typedef {Object} MetricsTree_Distribution_Delta
* @property {ChangeRatePattern} all
* @property {ChangeRatePattern} p2pk65
* @property {ChangeRatePattern} p2pk33
* @property {ChangeRatePattern} p2pkh
* @property {ChangeRatePattern} p2sh
* @property {ChangeRatePattern} p2wpkh
* @property {ChangeRatePattern} p2wsh
* @property {ChangeRatePattern} p2tr
* @property {ChangeRatePattern} p2a
*/
/**
@@ -7270,17 +7244,7 @@ class BrkClient extends BrkClientBase {
p2tr: createBothReactivatedReceivingSendingPattern(this, 'p2tr_address_activity'),
p2a: createBothReactivatedReceivingSendingPattern(this, 'p2a_address_activity'),
},
totalAddrCount: {
all: createMetricPattern1(this, 'total_addr_count'),
p2pk65: createMetricPattern1(this, 'p2pk65_total_addr_count'),
p2pk33: createMetricPattern1(this, 'p2pk33_total_addr_count'),
p2pkh: createMetricPattern1(this, 'p2pkh_total_addr_count'),
p2sh: createMetricPattern1(this, 'p2sh_total_addr_count'),
p2wpkh: createMetricPattern1(this, 'p2wpkh_total_addr_count'),
p2wsh: createMetricPattern1(this, 'p2wsh_total_addr_count'),
p2tr: createMetricPattern1(this, 'p2tr_total_addr_count'),
p2a: createMetricPattern1(this, 'p2a_total_addr_count'),
},
totalAddrCount: createAllP2aP2pk33P2pk65P2pkhP2shP2trP2wpkhP2wshPattern(this, 'total_addr_count'),
newAddrCount: {
all: createAverageCumulativeHeightMaxMedianMinPct10Pct25Pct75Pct90SumPattern(this, 'new_addr_count'),
p2pk65: createAverageCumulativeHeightMaxMedianMinPct10Pct25Pct75Pct90SumPattern(this, 'p2pk65_new_addr_count'),
@@ -7292,16 +7256,16 @@ class BrkClient extends BrkClientBase {
p2tr: createAverageCumulativeHeightMaxMedianMinPct10Pct25Pct75Pct90SumPattern(this, 'p2tr_new_addr_count'),
p2a: createAverageCumulativeHeightMaxMedianMinPct10Pct25Pct75Pct90SumPattern(this, 'p2a_new_addr_count'),
},
growthRate: {
all: createBpsPercentRatioPattern3(this, 'growth_rate'),
p2pk65: createBpsPercentRatioPattern3(this, 'p2pk65_growth_rate'),
p2pk33: createBpsPercentRatioPattern3(this, 'p2pk33_growth_rate'),
p2pkh: createBpsPercentRatioPattern3(this, 'p2pkh_growth_rate'),
p2sh: createBpsPercentRatioPattern3(this, 'p2sh_growth_rate'),
p2wpkh: createBpsPercentRatioPattern3(this, 'p2wpkh_growth_rate'),
p2wsh: createBpsPercentRatioPattern3(this, 'p2wsh_growth_rate'),
p2tr: createBpsPercentRatioPattern3(this, 'p2tr_growth_rate'),
p2a: createBpsPercentRatioPattern3(this, 'p2a_growth_rate'),
delta: {
all: createChangeRatePattern(this, 'addr_count'),
p2pk65: createChangeRatePattern(this, 'p2pk65_addr_count'),
p2pk33: createChangeRatePattern(this, 'p2pk33_addr_count'),
p2pkh: createChangeRatePattern(this, 'p2pkh_addr_count'),
p2sh: createChangeRatePattern(this, 'p2sh_addr_count'),
p2wpkh: createChangeRatePattern(this, 'p2wpkh_addr_count'),
p2wsh: createChangeRatePattern(this, 'p2wsh_addr_count'),
p2tr: createChangeRatePattern(this, 'p2tr_addr_count'),
p2a: createChangeRatePattern(this, 'p2a_addr_count'),
},
fundedaddressindex: createMetricPattern34(this, 'fundedaddressindex'),
emptyaddressindex: createMetricPattern35(this, 'emptyaddressindex'),

View File

@@ -2293,6 +2293,25 @@ class RatioPattern:
self.ratio_sma_1m: BpsRatioPattern = BpsRatioPattern(client, _m(acc, 'sma_1m'))
self.ratio_sma_1w: BpsRatioPattern = BpsRatioPattern(client, _m(acc, 'sma_1w'))
class GrossInvestedInvestorNegNetSupplyUnrealizedPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.gross_pnl: CentsUsdPattern = CentsUsdPattern(client, _m(acc, 'unrealized_gross_pnl'))
self.invested_capital_in_loss: CentsUsdPattern = CentsUsdPattern(client, _m(acc, 'invested_capital_in_loss'))
self.invested_capital_in_loss_raw: MetricPattern18[CentsSats] = MetricPattern18(client, _m(acc, 'invested_capital_in_loss_raw'))
self.invested_capital_in_profit: CentsUsdPattern = CentsUsdPattern(client, _m(acc, 'invested_capital_in_profit'))
self.invested_capital_in_profit_raw: MetricPattern18[CentsSats] = MetricPattern18(client, _m(acc, 'invested_capital_in_profit_raw'))
self.investor_cap_in_loss_raw: MetricPattern18[CentsSquaredSats] = MetricPattern18(client, _m(acc, 'investor_cap_in_loss_raw'))
self.investor_cap_in_profit_raw: MetricPattern18[CentsSquaredSats] = MetricPattern18(client, _m(acc, 'investor_cap_in_profit_raw'))
self.neg_unrealized_loss: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'neg_unrealized_loss'))
self.net_unrealized_pnl: CentsUsdPattern = CentsUsdPattern(client, _m(acc, 'net_unrealized_pnl'))
self.supply_in_loss: BtcCentsSatsUsdPattern = BtcCentsSatsUsdPattern(client, _m(acc, 'supply_in_loss'))
self.supply_in_profit: BtcCentsSatsUsdPattern = BtcCentsSatsUsdPattern(client, _m(acc, 'supply_in_profit'))
self.unrealized_loss: CentsUsdPattern = CentsUsdPattern(client, _m(acc, 'unrealized_loss'))
self.unrealized_profit: CentsUsdPattern = CentsUsdPattern(client, _m(acc, 'unrealized_profit'))
class _10y1m1w1y2y3m3y4y5y6m6y8yPattern2:
"""Pattern struct for repeated tree structure."""
@@ -2418,15 +2437,15 @@ class AllP2aP2pk33P2pk65P2pkhP2shP2trP2wpkhP2wshPattern:
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.all: ChangeCountPattern = ChangeCountPattern(client, acc)
self.p2a: ChangeCountPattern = ChangeCountPattern(client, _p('p2a', acc))
self.p2pk33: ChangeCountPattern = ChangeCountPattern(client, _p('p2pk33', acc))
self.p2pk65: ChangeCountPattern = ChangeCountPattern(client, _p('p2pk65', acc))
self.p2pkh: ChangeCountPattern = ChangeCountPattern(client, _p('p2pkh', acc))
self.p2sh: ChangeCountPattern = ChangeCountPattern(client, _p('p2sh', acc))
self.p2tr: ChangeCountPattern = ChangeCountPattern(client, _p('p2tr', acc))
self.p2wpkh: ChangeCountPattern = ChangeCountPattern(client, _p('p2wpkh', acc))
self.p2wsh: ChangeCountPattern = ChangeCountPattern(client, _p('p2wsh', acc))
self.all: MetricPattern1[StoredU64] = MetricPattern1(client, acc)
self.p2a: MetricPattern1[StoredU64] = MetricPattern1(client, _p('p2a', acc))
self.p2pk33: MetricPattern1[StoredU64] = MetricPattern1(client, _p('p2pk33', acc))
self.p2pk65: MetricPattern1[StoredU64] = MetricPattern1(client, _p('p2pk65', acc))
self.p2pkh: MetricPattern1[StoredU64] = MetricPattern1(client, _p('p2pkh', acc))
self.p2sh: MetricPattern1[StoredU64] = MetricPattern1(client, _p('p2sh', acc))
self.p2tr: MetricPattern1[StoredU64] = MetricPattern1(client, _p('p2tr', acc))
self.p2wpkh: MetricPattern1[StoredU64] = MetricPattern1(client, _p('p2wpkh', acc))
self.p2wsh: MetricPattern1[StoredU64] = MetricPattern1(client, _p('p2wsh', acc))
class AverageMaxMedianMinPct10Pct25Pct75Pct90SumPattern2:
"""Pattern struct for repeated tree structure."""
@@ -2443,21 +2462,6 @@ class AverageMaxMedianMinPct10Pct25Pct75Pct90SumPattern2:
self.pct90: BtcCentsSatsUsdPattern = BtcCentsSatsUsdPattern(client, _m(acc, 'p90'))
self.sum: BtcCentsSatsUsdPattern = BtcCentsSatsUsdPattern(client, _m(acc, 'sum'))
class AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.average: _1m1w1y24hPattern[BasisPoints16] = _1m1w1y24hPattern(client, _m(acc, 'average'))
self.height: MetricPattern18[BasisPoints16] = MetricPattern18(client, acc)
self.max: _1m1w1y24hPattern[BasisPoints16] = _1m1w1y24hPattern(client, _m(acc, 'max'))
self.median: _1m1w1y24hPattern[BasisPoints16] = _1m1w1y24hPattern(client, _m(acc, 'median'))
self.min: _1m1w1y24hPattern[BasisPoints16] = _1m1w1y24hPattern(client, _m(acc, 'min'))
self.pct10: _1m1w1y24hPattern[BasisPoints16] = _1m1w1y24hPattern(client, _m(acc, 'p10'))
self.pct25: _1m1w1y24hPattern[BasisPoints16] = _1m1w1y24hPattern(client, _m(acc, 'p25'))
self.pct75: _1m1w1y24hPattern[BasisPoints16] = _1m1w1y24hPattern(client, _m(acc, 'p75'))
self.pct90: _1m1w1y24hPattern[BasisPoints16] = _1m1w1y24hPattern(client, _m(acc, 'p90'))
class AverageMaxMedianMinPct10Pct25Pct75Pct90SumPattern:
"""Pattern struct for repeated tree structure."""
@@ -2539,7 +2543,7 @@ class ActivityCostOutputsRealizedRelativeSupplyUnrealizedPattern:
self.realized: MvrvNegNetRealizedSentSoprValuePattern = MvrvNegNetRealizedSentSoprValuePattern(client, acc)
self.relative: InvestedNetNuplSupplyUnrealizedPattern = InvestedNetNuplSupplyUnrealizedPattern(client, acc)
self.supply: ChangeHalvedTotalPattern = ChangeHalvedTotalPattern(client, _m(acc, 'supply'))
self.unrealized: GreedGrossInvestedInvestorNegNetPainSupplyUnrealizedPattern = GreedGrossInvestedInvestorNegNetPainSupplyUnrealizedPattern(client, acc)
self.unrealized: GrossInvestedInvestorNegNetSupplyUnrealizedPattern = GrossInvestedInvestorNegNetSupplyUnrealizedPattern(client, acc)
class MvrvRealizedPattern:
"""Pattern struct for repeated tree structure."""
@@ -2733,15 +2737,6 @@ class BaseCumulativeSumPattern:
self.cumulative: BtcCentsSatsUsdPattern = BtcCentsSatsUsdPattern(client, _m(acc, 'cumulative'))
self.sum: _1m1w1y24hPattern5 = _1m1w1y24hPattern5(client, _m(acc, 'sum'))
class BpsPercentRatioPattern3:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.bps: AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern = AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern(client, _m(acc, 'bps'))
self.percent: MetricPattern1[StoredF32] = MetricPattern1(client, acc)
self.ratio: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'ratio'))
class BpsPercentRatioPattern:
"""Pattern struct for repeated tree structure."""
@@ -2853,13 +2848,13 @@ class CentsUsdPattern:
self.cents: MetricPattern1[Cents] = MetricPattern1(client, _m(acc, 'cents'))
self.usd: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'usd'))
class ChangeCountPattern:
class ChangeRatePattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.change_1m: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'change_1m'))
self.count: MetricPattern1[StoredU64] = MetricPattern1(client, acc)
self.change: _1m1w1y24hPattern[StoredU64] = _1m1w1y24hPattern(client, _m(acc, 'change'))
self.rate: _1m1w1y24hPattern2 = _1m1w1y24hPattern2(client, _m(acc, 'rate'))
class MaxMinPattern:
"""Pattern struct for repeated tree structure."""
@@ -4590,20 +4585,6 @@ class MetricsTree_Distribution_AddressActivity:
self.p2tr: BothReactivatedReceivingSendingPattern = BothReactivatedReceivingSendingPattern(client, 'p2tr_address_activity')
self.p2a: BothReactivatedReceivingSendingPattern = BothReactivatedReceivingSendingPattern(client, 'p2a_address_activity')
class MetricsTree_Distribution_TotalAddrCount:
"""Metrics tree node."""
def __init__(self, client: BrkClientBase, base_path: str = ''):
self.all: MetricPattern1[StoredU64] = MetricPattern1(client, 'total_addr_count')
self.p2pk65: MetricPattern1[StoredU64] = MetricPattern1(client, 'p2pk65_total_addr_count')
self.p2pk33: MetricPattern1[StoredU64] = MetricPattern1(client, 'p2pk33_total_addr_count')
self.p2pkh: MetricPattern1[StoredU64] = MetricPattern1(client, 'p2pkh_total_addr_count')
self.p2sh: MetricPattern1[StoredU64] = MetricPattern1(client, 'p2sh_total_addr_count')
self.p2wpkh: MetricPattern1[StoredU64] = MetricPattern1(client, 'p2wpkh_total_addr_count')
self.p2wsh: MetricPattern1[StoredU64] = MetricPattern1(client, 'p2wsh_total_addr_count')
self.p2tr: MetricPattern1[StoredU64] = MetricPattern1(client, 'p2tr_total_addr_count')
self.p2a: MetricPattern1[StoredU64] = MetricPattern1(client, 'p2a_total_addr_count')
class MetricsTree_Distribution_NewAddrCount:
"""Metrics tree node."""
@@ -4618,19 +4599,19 @@ class MetricsTree_Distribution_NewAddrCount:
self.p2tr: AverageCumulativeHeightMaxMedianMinPct10Pct25Pct75Pct90SumPattern = AverageCumulativeHeightMaxMedianMinPct10Pct25Pct75Pct90SumPattern(client, 'p2tr_new_addr_count')
self.p2a: AverageCumulativeHeightMaxMedianMinPct10Pct25Pct75Pct90SumPattern = AverageCumulativeHeightMaxMedianMinPct10Pct25Pct75Pct90SumPattern(client, 'p2a_new_addr_count')
class MetricsTree_Distribution_GrowthRate:
class MetricsTree_Distribution_Delta:
"""Metrics tree node."""
def __init__(self, client: BrkClientBase, base_path: str = ''):
self.all: BpsPercentRatioPattern3 = BpsPercentRatioPattern3(client, 'growth_rate')
self.p2pk65: BpsPercentRatioPattern3 = BpsPercentRatioPattern3(client, 'p2pk65_growth_rate')
self.p2pk33: BpsPercentRatioPattern3 = BpsPercentRatioPattern3(client, 'p2pk33_growth_rate')
self.p2pkh: BpsPercentRatioPattern3 = BpsPercentRatioPattern3(client, 'p2pkh_growth_rate')
self.p2sh: BpsPercentRatioPattern3 = BpsPercentRatioPattern3(client, 'p2sh_growth_rate')
self.p2wpkh: BpsPercentRatioPattern3 = BpsPercentRatioPattern3(client, 'p2wpkh_growth_rate')
self.p2wsh: BpsPercentRatioPattern3 = BpsPercentRatioPattern3(client, 'p2wsh_growth_rate')
self.p2tr: BpsPercentRatioPattern3 = BpsPercentRatioPattern3(client, 'p2tr_growth_rate')
self.p2a: BpsPercentRatioPattern3 = BpsPercentRatioPattern3(client, 'p2a_growth_rate')
self.all: ChangeRatePattern = ChangeRatePattern(client, 'addr_count')
self.p2pk65: ChangeRatePattern = ChangeRatePattern(client, 'p2pk65_addr_count')
self.p2pk33: ChangeRatePattern = ChangeRatePattern(client, 'p2pk33_addr_count')
self.p2pkh: ChangeRatePattern = ChangeRatePattern(client, 'p2pkh_addr_count')
self.p2sh: ChangeRatePattern = ChangeRatePattern(client, 'p2sh_addr_count')
self.p2wpkh: ChangeRatePattern = ChangeRatePattern(client, 'p2wpkh_addr_count')
self.p2wsh: ChangeRatePattern = ChangeRatePattern(client, 'p2wsh_addr_count')
self.p2tr: ChangeRatePattern = ChangeRatePattern(client, 'p2tr_addr_count')
self.p2a: ChangeRatePattern = ChangeRatePattern(client, 'p2a_addr_count')
class MetricsTree_Distribution:
"""Metrics tree node."""
@@ -4644,9 +4625,9 @@ class MetricsTree_Distribution:
self.addr_count: AllP2aP2pk33P2pk65P2pkhP2shP2trP2wpkhP2wshPattern = AllP2aP2pk33P2pk65P2pkhP2shP2trP2wpkhP2wshPattern(client, 'addr_count')
self.empty_addr_count: AllP2aP2pk33P2pk65P2pkhP2shP2trP2wpkhP2wshPattern = AllP2aP2pk33P2pk65P2pkhP2shP2trP2wpkhP2wshPattern(client, 'empty_addr_count')
self.address_activity: MetricsTree_Distribution_AddressActivity = MetricsTree_Distribution_AddressActivity(client)
self.total_addr_count: MetricsTree_Distribution_TotalAddrCount = MetricsTree_Distribution_TotalAddrCount(client)
self.total_addr_count: AllP2aP2pk33P2pk65P2pkhP2shP2trP2wpkhP2wshPattern = AllP2aP2pk33P2pk65P2pkhP2shP2trP2wpkhP2wshPattern(client, 'total_addr_count')
self.new_addr_count: MetricsTree_Distribution_NewAddrCount = MetricsTree_Distribution_NewAddrCount(client)
self.growth_rate: MetricsTree_Distribution_GrowthRate = MetricsTree_Distribution_GrowthRate(client)
self.delta: MetricsTree_Distribution_Delta = MetricsTree_Distribution_Delta(client)
self.fundedaddressindex: MetricPattern34[FundedAddressIndex] = MetricPattern34(client, 'fundedaddressindex')
self.emptyaddressindex: MetricPattern35[EmptyAddressIndex] = MetricPattern35(client, 'emptyaddressindex')

View File

@@ -0,0 +1,188 @@
#!/usr/bin/env python3
"""Determine which mining pools qualify as "major" based on dominance.
A pool is major if ANY window's dominance exceeds its threshold:
all-time >= 1%, 1y >= 1%, 1m >= 0.75%, 1w >= 0.5%
Computes dominance from blocks_mined_cumulative / block_count_cumulative,
so it works for ALL pools (major and minor alike).
Usage:
python3 scripts/pool_major_threshold.py
python3 scripts/pool_major_threshold.py --all-time 5 --1y 1 --1m 0.75 --1w 0.5
"""
import argparse
import json
import re
import urllib.request
import concurrent.futures
from pathlib import Path
API_BASE = "https://bitview.space/api/metric"
POOLSLUG_PATH = Path(__file__).resolve().parent.parent / "crates/brk_types/src/poolslug.rs"
HEADERS = {"User-Agent": "pool-threshold-script"}
WINDOWS = {"1w": 7, "1m": 30, "1y": 365}
def parse_pool_variants():
"""Return [(VariantName, lowercase_slug), ...] from the PoolSlug enum."""
src = POOLSLUG_PATH.read_text()
m = re.search(r"pub enum PoolSlug\s*\{(.*?)^\}", src, re.DOTALL | re.MULTILINE)
if not m:
raise RuntimeError("Could not find PoolSlug enum")
body = m.group(1)
variants = []
for line in body.splitlines():
line = line.strip().rstrip(",")
if not line or line.startswith("#[") or line.startswith("//"):
continue
name = line.split("(")[0].split("{")[0].strip()
if not name or not name[0].isupper():
continue
if name.startswith("Dummy"):
continue
variants.append((name, name.lower()))
return variants
def fetch_json(url):
try:
req = urllib.request.Request(url, headers=HEADERS)
with urllib.request.urlopen(req, timeout=15) as resp:
return json.loads(resp.read())
except Exception:
return None
def fetch_cumulative(slug, days):
url = f"{API_BASE}/{slug}_blocks_mined_cumulative/dateindex?from=-{days}"
return fetch_json(url)
def fetch_total_cumulative(days):
url = f"{API_BASE}/block_count_cumulative/dateindex?from=-{days}"
return fetch_json(url)
def is_major(doms, thresholds):
"""Check if any window meets its threshold."""
for label, thresh in thresholds.items():
v = doms.get(label)
if v is not None and v >= thresh:
return True
return False
def main():
parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("--all-time", type=float, default=1.0, help="All-time dominance threshold %% (default: 1.0)")
parser.add_argument("--1y", type=float, default=1.0, dest="t1y", help="1y rolling dominance threshold %% (default: 1.0)")
parser.add_argument("--1m", type=float, default=0.75, dest="t1m", help="1m rolling dominance threshold %% (default: 0.75)")
parser.add_argument("--1w", type=float, default=0.5, dest="t1w", help="1w rolling dominance threshold %% (default: 0.5)")
args = parser.parse_args()
thresholds = {
"all-time": args.all_time,
"1y": args.t1y,
"1m": args.t1m,
"1w": args.t1w,
}
variants = parse_pool_variants()
print(f"Found {len(variants)} pool variants in {POOLSLUG_PATH.name}")
print(f"Thresholds: {', '.join(f'{k}>={v}%' for k, v in thresholds.items())}")
max_days = max(WINDOWS.values()) + 1
print(f"Fetching blocks_mined_cumulative for all pools...")
total_data = fetch_total_cumulative(max_days)
if not total_data:
print("ERROR: Could not fetch block_count_cumulative")
return
total_cum = total_data["data"]
pool_cum = {}
with concurrent.futures.ThreadPoolExecutor(max_workers=20) as ex:
futures = {ex.submit(fetch_cumulative, slug, max_days): (name, slug)
for name, slug in variants}
for fut in concurrent.futures.as_completed(futures):
name, slug = futures[fut]
result = fut.result()
if result and result.get("data"):
pool_cum[name] = {"slug": slug, "data": result["data"]}
results = []
for name, info in pool_cum.items():
pool_data = info["data"]
n = len(pool_data)
doms = {}
if n > 0 and len(total_cum) > 0:
doms["all-time"] = pool_data[-1] / total_cum[-1] * 100 if total_cum[-1] > 0 else 0
for label, days in WINDOWS.items():
if n > days and len(total_cum) > days:
pool_diff = pool_data[-1] - pool_data[-(days + 1)]
total_diff = total_cum[-1] - total_cum[-(days + 1)]
doms[label] = pool_diff / total_diff * 100 if total_diff > 0 else 0
else:
doms[label] = None
values = [v for v in doms.values() if v is not None]
max_dom = max(values) if values else None
major = is_major(doms, thresholds)
results.append((name, info["slug"], doms, max_dom, major))
results.sort(key=lambda x: -(x[3] or 0))
def fmt(v):
return f"{v:8.4f}%" if v is not None else " N/A"
header = f"{'Pool':<30} {'all-time':>9} {'1w':>9} {'1m':>9} {'1y':>9} Major?"
thr_line = f"{'threshold:':<30} {'>=' + str(thresholds['all-time']) + '%':>9} {'>=' + str(thresholds['1w']) + '%':>9} {'>=' + str(thresholds['1m']) + '%':>9} {'>=' + str(thresholds['1y']) + '%':>9}"
print(f"\n{header}")
print(thr_line)
print("-" * len(header))
for name, slug, doms, max_dom, major in results:
at = fmt(doms.get("all-time"))
w1w = fmt(doms.get("1w"))
w1m = fmt(doms.get("1m"))
w1y = fmt(doms.get("1y"))
marker = "***" if major else ""
print(f"{name:<30} {at} {w1w} {w1m} {w1y} {marker}")
major_list = [(n, s, d, m, mj) for n, s, d, m, mj in results if mj]
print(f"\n--- {len(major_list)} major pools ---")
print(f"\n--- Qualifying windows ---")
for name, slug, doms, max_dom, _ in major_list:
qualifying = []
for label, thresh in thresholds.items():
v = doms.get(label)
if v is not None and v >= thresh:
qualifying.append(f"{label}={v:.2f}%")
print(f" {name:<30} ({', '.join(qualifying)})")
major_names = sorted(set(["Unknown"] + [n for n, _, _, _, _ in major_list]))
thresholds_str = ", ".join(f"{k}>={v}%" for k, v in thresholds.items())
print(f"\n--- Rust is_major() match arms ---\n")
print(f" /// Pools with dominance above per-window thresholds get full metrics.")
print(f" /// Thresholds: {thresholds_str}.")
print(f" /// Generated by `scripts/pool_major_threshold.py`.")
print(f" pub fn is_major(&self) -> bool {{")
print(f" matches!(")
print(f" self,")
for i, name in enumerate(major_names):
if i == 0:
print(f" Self::{name}", end="")
else:
print(f"\n | Self::{name}", end="")
print()
print(f" )")
print(f" }}")
if __name__ == "__main__":
main()