global: snapshot

This commit is contained in:
nym21
2026-02-27 18:48:37 +01:00
parent 6845ad409b
commit d5ec291579
62 changed files with 1960 additions and 1449 deletions

View File

@@ -18,9 +18,9 @@ fn main() -> brk_client::Result<()> {
let price_close = client
.metrics()
.prices
.usd
.split
.close
.usd
.by
.day1()
.last(3)

File diff suppressed because it is too large Load Diff

View File

@@ -36,7 +36,7 @@ impl Vecs {
self.thermo_cap.height.compute_transform(
starting_indexes.height,
&mining.rewards.subsidy.usd.cumulative.height,
&mining.rewards.subsidy.cumulative.usd.height,
|(i, v, ..)| (i, v),
exit,
)?;

View File

@@ -30,8 +30,8 @@ use super::{
vecs::Vecs,
},
BIP30_DUPLICATE_HEIGHT_1, BIP30_DUPLICATE_HEIGHT_2, BIP30_ORIGINAL_HEIGHT_1,
BIP30_ORIGINAL_HEIGHT_2, ComputeContext, FLUSH_INTERVAL, TxInReaders, TxOutReaders,
IndexToTxIndexBuf, VecsReaders,
BIP30_ORIGINAL_HEIGHT_2, ComputeContext, FLUSH_INTERVAL, IndexToTxIndexBuf, TxInReaders,
TxOutReaders, VecsReaders,
};
/// Process all blocks from starting_height to last_height.
@@ -278,7 +278,7 @@ pub(crate) fn process_blocks(
let outputs_handle = scope.spawn(|| {
// Process outputs (receive)
process_outputs(
&txoutindex_to_txindex,
txoutindex_to_txindex,
&txoutdata_vec,
&first_addressindexes,
&cache,

View File

@@ -82,6 +82,7 @@ impl ActivityMetrics {
/// Get minimum length across height-indexed vectors.
pub(crate) fn min_len(&self) -> usize {
self.sent
.base
.sats
.height
.len()
@@ -97,7 +98,7 @@ impl ActivityMetrics {
satblocks_destroyed: Sats,
satdays_destroyed: Sats,
) -> Result<()> {
self.sent.sats.height.truncate_push(height, sent)?;
self.sent.base.sats.height.truncate_push(height, sent)?;
self.satblocks_destroyed
.truncate_push(height, satblocks_destroyed)?;
self.satdays_destroyed
@@ -108,7 +109,7 @@ impl ActivityMetrics {
/// Returns a parallel iterator over all vecs for parallel writing.
pub(crate) fn par_iter_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
vec![
&mut self.sent.sats.height as &mut dyn AnyStoredVec,
&mut self.sent.base.sats.height as &mut dyn AnyStoredVec,
&mut self.satblocks_destroyed as &mut dyn AnyStoredVec,
&mut self.satdays_destroyed as &mut dyn AnyStoredVec,
]
@@ -128,11 +129,11 @@ impl ActivityMetrics {
others: &[&Self],
exit: &Exit,
) -> Result<()> {
self.sent.sats.height.compute_sum_of_others(
self.sent.base.sats.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.sent.sats.height)
.map(|v| &v.sent.base.sats.height)
.collect::<Vec<_>>(),
exit,
)?;
@@ -168,8 +169,8 @@ impl ActivityMetrics {
self.sent_14d_ema.compute_rolling_average(
starting_indexes.height,
&blocks.count.height_2w_ago,
&self.sent.sats.height,
&self.sent.usd.height,
&self.sent.base.sats.height,
&self.sent.base.usd.height,
exit,
)?;

View File

@@ -582,8 +582,8 @@ impl RealizedBase {
.min(self.loss_value_created.height.len())
.min(self.loss_value_destroyed.height.len())
.min(self.peak_regret.height.len())
.min(self.sent_in_profit.sats.height.len())
.min(self.sent_in_loss.sats.height.len())
.min(self.sent_in_profit.base.sats.height.len())
.min(self.sent_in_loss.base.sats.height.len())
}
/// Push realized state values to height-indexed vectors.
@@ -619,10 +619,12 @@ impl RealizedBase {
.height
.truncate_push(height, state.peak_regret().to_dollars())?;
self.sent_in_profit
.base
.sats
.height
.truncate_push(height, state.sent_in_profit())?;
self.sent_in_loss
.base
.sats
.height
.truncate_push(height, state.sent_in_loss())?;
@@ -644,8 +646,8 @@ impl RealizedBase {
&mut self.loss_value_created.height,
&mut self.loss_value_destroyed.height,
&mut self.peak_regret.height,
&mut self.sent_in_profit.sats.height,
&mut self.sent_in_loss.sats.height,
&mut self.sent_in_profit.base.sats.height,
&mut self.sent_in_loss.base.sats.height,
]
}
@@ -778,19 +780,19 @@ impl RealizedBase {
.collect::<Vec<_>>(),
exit,
)?;
self.sent_in_profit.sats.height.compute_sum_of_others(
self.sent_in_profit.base.sats.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.sent_in_profit.sats.height)
.map(|v| &v.sent_in_profit.base.sats.height)
.collect::<Vec<_>>(),
exit,
)?;
self.sent_in_loss.sats.height.compute_sum_of_others(
self.sent_in_loss.base.sats.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.sent_in_loss.sats.height)
.map(|v| &v.sent_in_loss.base.sats.height)
.collect::<Vec<_>>(),
exit,
)?;
@@ -1045,15 +1047,15 @@ impl RealizedBase {
self.sent_in_profit_14d_ema.compute_rolling_average(
starting_indexes.height,
&blocks.count.height_2w_ago,
&self.sent_in_profit.sats.height,
&self.sent_in_profit.usd.height,
&self.sent_in_profit.base.sats.height,
&self.sent_in_profit.base.usd.height,
exit,
)?;
self.sent_in_loss_14d_ema.compute_rolling_average(
starting_indexes.height,
&blocks.count.height_2w_ago,
&self.sent_in_loss.sats.height,
&self.sent_in_loss.usd.height,
&self.sent_in_loss.base.sats.height,
&self.sent_in_loss.base.usd.height,
exit,
)?;

View File

@@ -66,8 +66,8 @@ impl SupplyMetrics {
/// Returns a parallel iterator over all vecs for parallel writing.
pub(crate) fn par_iter_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
vec![
&mut self.total.sats.height as &mut dyn AnyStoredVec,
&mut self.total.usd.height as &mut dyn AnyStoredVec,
&mut self.total.base.sats.height as &mut dyn AnyStoredVec,
&mut self.total.base.usd.height as &mut dyn AnyStoredVec,
]
.into_par_iter()
}

View File

@@ -236,10 +236,10 @@ impl UnrealizedBase {
pub(crate) fn collect_vecs_mut(&mut self) -> Vec<&mut dyn AnyStoredVec> {
vec![
&mut self.supply_in_profit.sats.height as &mut dyn AnyStoredVec,
&mut self.supply_in_profit.usd.height as &mut dyn AnyStoredVec,
&mut self.supply_in_loss.sats.height as &mut dyn AnyStoredVec,
&mut self.supply_in_loss.usd.height as &mut dyn AnyStoredVec,
&mut self.supply_in_profit.base.sats.height as &mut dyn AnyStoredVec,
&mut self.supply_in_profit.base.usd.height as &mut dyn AnyStoredVec,
&mut self.supply_in_loss.base.sats.height as &mut dyn AnyStoredVec,
&mut self.supply_in_loss.base.usd.height as &mut dyn AnyStoredVec,
&mut self.unrealized_profit.height,
&mut self.unrealized_loss.height,
&mut self.invested_capital_in_profit.height,

View File

@@ -9,11 +9,11 @@ pub struct DistributionStats<A, B = A, C = A, D = A, E = A, F = A, G = A, H = A>
pub average: A,
pub min: B,
pub max: C,
pub p10: D,
pub p25: E,
pub pct10: D,
pub pct25: E,
pub median: F,
pub p75: G,
pub p90: H,
pub pct75: G,
pub pct90: H,
}
impl<A> DistributionStats<A> {
@@ -22,11 +22,11 @@ impl<A> DistributionStats<A> {
f(&mut self.average)?;
f(&mut self.min)?;
f(&mut self.max)?;
f(&mut self.p10)?;
f(&mut self.p25)?;
f(&mut self.pct10)?;
f(&mut self.pct25)?;
f(&mut self.median)?;
f(&mut self.p75)?;
f(&mut self.p90)?;
f(&mut self.pct75)?;
f(&mut self.pct90)?;
Ok(())
}
@@ -35,11 +35,11 @@ impl<A> DistributionStats<A> {
f(&self.average)
.min(f(&self.min))
.min(f(&self.max))
.min(f(&self.p10))
.min(f(&self.p25))
.min(f(&self.pct10))
.min(f(&self.pct25))
.min(f(&self.median))
.min(f(&self.p75))
.min(f(&self.p90))
.min(f(&self.pct75))
.min(f(&self.pct90))
}
}

View File

@@ -0,0 +1,46 @@
mod rolling_full;
mod rolling_sum;
mod windows;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Dollars, Sats, Version};
use vecdb::{Database, ReadableCloneableVec, Rw, StorageMode};
use crate::{
indexes,
internal::{ComputedFromHeightLast, LazyFromHeightLast, SatsToBitcoin},
};
pub use rolling_full::*;
pub use rolling_sum::*;
#[derive(Traversable)]
pub struct ByUnit<M: StorageMode = Rw> {
pub sats: ComputedFromHeightLast<Sats, M>,
pub btc: LazyFromHeightLast<Bitcoin, Sats>,
pub usd: ComputedFromHeightLast<Dollars, M>,
}
impl ByUnit {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let sats = ComputedFromHeightLast::forced_import(db, name, version, indexes)?;
let btc = LazyFromHeightLast::from_computed::<SatsToBitcoin>(
&format!("{name}_btc"),
version,
sats.height.read_only_boxed_clone(),
&sats,
);
let usd =
ComputedFromHeightLast::forced_import(db, &format!("{name}_usd"), version, indexes)?;
Ok(Self { sats, btc, usd })
}
}

View File

@@ -0,0 +1,116 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Dollars, Height, Sats, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{Database, Exit, ReadableVec, Rw, StorageMode};
use crate::{
indexes,
internal::{ByUnit, DistributionStats, WindowStarts, Windows},
traits::compute_rolling_distribution_from_starts,
};
/// One window slot: sum + 8 distribution stats, each a ByUnit.
///
/// Tree: `sum.sats.height`, `average.sats.height`, etc.
#[derive(Traversable)]
pub struct RollingFullSlot<M: StorageMode = Rw> {
pub sum: ByUnit<M>,
#[traversable(flatten)]
pub distribution: DistributionStats<ByUnit<M>>,
}
impl RollingFullSlot {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
sum: ByUnit::forced_import(db, &format!("{name}_sum"), version, indexes)?,
distribution: DistributionStats {
average: ByUnit::forced_import(db, &format!("{name}_average"), version, indexes)?,
min: ByUnit::forced_import(db, &format!("{name}_min"), version, indexes)?,
max: ByUnit::forced_import(db, &format!("{name}_max"), version, indexes)?,
pct10: ByUnit::forced_import(db, &format!("{name}_p10"), version, indexes)?,
pct25: ByUnit::forced_import(db, &format!("{name}_p25"), version, indexes)?,
median: ByUnit::forced_import(db, &format!("{name}_median"), version, indexes)?,
pct75: ByUnit::forced_import(db, &format!("{name}_p75"), version, indexes)?,
pct90: ByUnit::forced_import(db, &format!("{name}_p90"), version, indexes)?,
},
})
}
pub(crate) fn compute(
&mut self,
max_from: Height,
starts: &impl ReadableVec<Height, Height>,
sats_source: &impl ReadableVec<Height, Sats>,
usd_source: &impl ReadableVec<Height, Dollars>,
exit: &Exit,
) -> Result<()> {
self.sum.sats.height.compute_rolling_sum(max_from, starts, sats_source, exit)?;
self.sum.usd.height.compute_rolling_sum(max_from, starts, usd_source, exit)?;
let d = &mut self.distribution;
compute_rolling_distribution_from_starts(
max_from, starts, sats_source,
&mut d.average.sats.height, &mut d.min.sats.height,
&mut d.max.sats.height, &mut d.pct10.sats.height,
&mut d.pct25.sats.height, &mut d.median.sats.height,
&mut d.pct75.sats.height, &mut d.pct90.sats.height, exit,
)?;
compute_rolling_distribution_from_starts(
max_from, starts, usd_source,
&mut d.average.usd.height, &mut d.min.usd.height,
&mut d.max.usd.height, &mut d.pct10.usd.height,
&mut d.pct25.usd.height, &mut d.median.usd.height,
&mut d.pct75.usd.height, &mut d.pct90.usd.height, exit,
)?;
Ok(())
}
}
/// Rolling sum + distribution across 4 windows, window-first.
///
/// Tree: `_24h.sum.sats.height`, `_24h.average.sats.height`, etc.
#[derive(Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct RollingFullByUnit<M: StorageMode = Rw>(pub Windows<RollingFullSlot<M>>);
const VERSION: Version = Version::ZERO;
impl RollingFullByUnit {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
Ok(Self(Windows {
_24h: RollingFullSlot::forced_import(db, &format!("{name}_24h"), v, indexes)?,
_7d: RollingFullSlot::forced_import(db, &format!("{name}_7d"), v, indexes)?,
_30d: RollingFullSlot::forced_import(db, &format!("{name}_30d"), v, indexes)?,
_1y: RollingFullSlot::forced_import(db, &format!("{name}_1y"), v, indexes)?,
}))
}
pub(crate) fn compute(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
sats_source: &impl ReadableVec<Height, Sats>,
usd_source: &impl ReadableVec<Height, Dollars>,
exit: &Exit,
) -> Result<()> {
for (slot, starts) in self.0.as_mut_array().into_iter().zip(windows.as_array()) {
slot.compute(max_from, starts, sats_source, usd_source, exit)?;
}
Ok(())
}
}

View File

@@ -0,0 +1,46 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Dollars, Height, Sats, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{Database, Exit, ReadableVec, Rw, StorageMode};
use crate::{
indexes,
internal::{ByUnit, WindowStarts, Windows},
};
/// Rolling sum only, window-first then unit.
///
/// Tree: `_24h.sats.height`, `_24h.btc.height`, etc.
#[derive(Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct RollingSumByUnit<M: StorageMode = Rw>(pub Windows<ByUnit<M>>);
const VERSION: Version = Version::ZERO;
impl RollingSumByUnit {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
Ok(Self(Windows::<ByUnit>::forced_import(db, &format!("{name}_sum"), v, indexes)?))
}
pub(crate) fn compute_rolling_sum(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
sats_source: &impl ReadableVec<Height, Sats>,
usd_source: &impl ReadableVec<Height, Dollars>,
exit: &Exit,
) -> Result<()> {
for (w, starts) in self.0.as_mut_array().into_iter().zip(windows.as_array()) {
w.sats.height.compute_rolling_sum(max_from, starts, sats_source, exit)?;
w.usd.height.compute_rolling_sum(max_from, starts, usd_source, exit)?;
}
Ok(())
}
}

View File

@@ -0,0 +1,24 @@
use brk_error::Result;
use brk_types::Version;
use vecdb::Database;
use crate::{
indexes,
internal::{ByUnit, Windows},
};
impl Windows<ByUnit> {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
_24h: ByUnit::forced_import(db, &format!("{name}_24h"), version, indexes)?,
_7d: ByUnit::forced_import(db, &format!("{name}_7d"), version, indexes)?,
_30d: ByUnit::forced_import(db, &format!("{name}_30d"), version, indexes)?,
_1y: ByUnit::forced_import(db, &format!("{name}_1y"), version, indexes)?,
})
}
}

View File

@@ -23,6 +23,7 @@ where
{
pub height: M::Stored<EagerVec<PcoVec<Height, T>>>,
pub cumulative: ComputedFromHeightLast<T, M>,
#[traversable(flatten)]
pub rolling: RollingFull<T, M>,
}

View File

@@ -1,3 +1,4 @@
mod by_unit;
mod constant;
mod cumulative;
mod cumulative_rolling_full;
@@ -21,6 +22,7 @@ mod value_lazy_computed_cumulative;
mod value_lazy_last;
mod value_sum_cumulative;
pub use by_unit::*;
pub use constant::*;
pub use cumulative::*;
pub use cumulative_rolling_full::*;

View File

@@ -1,26 +1,22 @@
//! Stored value type for Last pattern from Height.
//!
//! Both sats and USD are stored eagerly at the height level.
//! Used for rolling-window sums where USD = sum(usd_per_block),
//! NOT sats * current_price.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Dollars, Height, Sats, Version};
use vecdb::{Database, Exit, ReadableCloneableVec, ReadableVec, Rw, StorageMode};
use brk_types::{Dollars, Height, Sats, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{Database, Exit, ReadableVec, Rw, StorageMode};
use crate::{
indexes,
internal::{ComputedFromHeightLast, LazyFromHeightLast, SatsToBitcoin},
internal::ByUnit,
};
const VERSION: Version = Version::ZERO;
#[derive(Traversable)]
#[derive(Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct StoredValueFromHeightLast<M: StorageMode = Rw> {
pub sats: ComputedFromHeightLast<Sats, M>,
pub btc: LazyFromHeightLast<Bitcoin, Sats>,
pub usd: ComputedFromHeightLast<Dollars, M>,
#[deref]
#[deref_mut]
pub base: ByUnit<M>,
}
impl StoredValueFromHeightLast {
@@ -31,19 +27,9 @@ impl StoredValueFromHeightLast {
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let sats = ComputedFromHeightLast::forced_import(db, name, v, indexes)?;
let btc = LazyFromHeightLast::from_computed::<SatsToBitcoin>(
&format!("{name}_btc"),
v,
sats.height.read_only_boxed_clone(),
&sats,
);
let usd = ComputedFromHeightLast::forced_import(db, &format!("{name}_usd"), v, indexes)?;
Ok(Self { sats, btc, usd })
Ok(Self {
base: ByUnit::forced_import(db, name, v, indexes)?,
})
}
pub(crate) fn compute_rolling_sum(
@@ -54,10 +40,12 @@ impl StoredValueFromHeightLast {
usd_source: &impl ReadableVec<Height, Dollars>,
exit: &Exit,
) -> Result<()> {
self.sats
self.base
.sats
.height
.compute_rolling_sum(max_from, window_starts, sats_source, exit)?;
self.usd
self.base
.usd
.height
.compute_rolling_sum(max_from, window_starts, usd_source, exit)?;
Ok(())

View File

@@ -1,23 +1,22 @@
//! Rolling average values from Height - stores sats and dollars, btc is lazy.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Dollars, Height, Sats, Version};
use vecdb::{Database, Exit, ReadableCloneableVec, ReadableVec, Rw, StorageMode};
use brk_types::{Dollars, Height, Sats, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{Database, Exit, ReadableVec, Rw, StorageMode};
use crate::{
indexes,
internal::{ComputedFromHeightLast, LazyFromHeightLast, SatsToBitcoin},
internal::ByUnit,
};
const VERSION: Version = Version::ZERO;
/// Rolling average values indexed by height - sats (stored), btc (lazy), usd (stored).
#[derive(Traversable)]
#[derive(Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct ValueEmaFromHeight<M: StorageMode = Rw> {
pub sats: ComputedFromHeightLast<Sats, M>,
pub btc: LazyFromHeightLast<Bitcoin, Sats>,
pub usd: ComputedFromHeightLast<Dollars, M>,
#[deref]
#[deref_mut]
pub base: ByUnit<M>,
}
impl ValueEmaFromHeight {
@@ -28,27 +27,11 @@ impl ValueEmaFromHeight {
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let sats = ComputedFromHeightLast::forced_import(db, name, v, indexes)?;
let btc = LazyFromHeightLast::from_computed::<SatsToBitcoin>(
&format!("{name}_btc"),
v,
sats.height.read_only_boxed_clone(),
&sats,
);
let usd = ComputedFromHeightLast::forced_import(
db,
&format!("{name}_usd"),
v,
indexes,
)?;
Ok(Self { sats, btc, usd })
Ok(Self {
base: ByUnit::forced_import(db, name, v, indexes)?,
})
}
/// Compute rolling average for both sats and dollars in one call.
pub(crate) fn compute_rolling_average(
&mut self,
starting_height: Height,
@@ -57,10 +40,12 @@ impl ValueEmaFromHeight {
dollars_source: &(impl ReadableVec<Height, Dollars> + Sync),
exit: &Exit,
) -> Result<()> {
self.sats
self.base
.sats
.height
.compute_rolling_average(starting_height, window_starts, sats_source, exit)?;
self.usd
self.base
.usd
.height
.compute_rolling_average(starting_height, window_starts, dollars_source, exit)?;
Ok(())

View File

@@ -1,30 +1,23 @@
//! Value type for Full pattern from Height.
//!
//! Height-level USD stats are stored (eagerly computed from sats × price).
//! Uses CumFull: stored base + cumulative + rolling windows.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Dollars, Height, Sats, Version};
use vecdb::{Database, EagerVec, Exit, PcoVec, ReadableCloneableVec, Rw, StorageMode};
use brk_types::{Dollars, Height, Sats, Version};
use vecdb::{Database, EagerVec, Exit, PcoVec, Rw, StorageMode};
use crate::{
indexes,
internal::{
ComputedFromHeightCumulativeFull, LazyFromHeightLast, SatsToBitcoin, SatsToDollars,
WindowStarts,
},
internal::{ByUnit, RollingFullByUnit, SatsToDollars, WindowStarts},
prices,
};
#[derive(Traversable)]
pub struct ValueFromHeightFull<M: StorageMode = Rw> {
pub sats: ComputedFromHeightCumulativeFull<Sats, M>,
pub btc: LazyFromHeightLast<Bitcoin, Sats>,
pub usd: ComputedFromHeightCumulativeFull<Dollars, M>,
pub base: ByUnit<M>,
pub cumulative: ByUnit<M>,
#[traversable(flatten)]
pub rolling: RollingFullByUnit<M>,
}
const VERSION: Version = Version::TWO; // Bumped for stored height dollars
const VERSION: Version = Version::TWO;
impl ValueFromHeightFull {
pub(crate) fn forced_import(
@@ -35,19 +28,11 @@ impl ValueFromHeightFull {
) -> Result<Self> {
let v = version + VERSION;
let sats = ComputedFromHeightCumulativeFull::forced_import(db, name, v, indexes)?;
let btc = LazyFromHeightLast::from_height_source::<SatsToBitcoin>(
&format!("{name}_btc"),
v,
sats.height.read_only_boxed_clone(),
indexes,
);
let usd =
ComputedFromHeightCumulativeFull::forced_import(db, &format!("{name}_usd"), v, indexes)?;
Ok(Self { sats, btc, usd })
Ok(Self {
base: ByUnit::forced_import(db, name, v, indexes)?,
cumulative: ByUnit::forced_import(db, &format!("{name}_cumulative"), v, indexes)?,
rolling: RollingFullByUnit::forced_import(db, name, v, indexes)?,
})
}
pub(crate) fn compute(
@@ -58,15 +43,36 @@ impl ValueFromHeightFull {
exit: &Exit,
compute_sats: impl FnOnce(&mut EagerVec<PcoVec<Height, Sats>>) -> Result<()>,
) -> Result<()> {
self.sats.compute(max_from, windows, exit, compute_sats)?;
compute_sats(&mut self.base.sats.height)?;
self.usd.compute(max_from, windows, exit, |vec| {
Ok(vec.compute_binary::<Sats, Dollars, SatsToDollars>(
self.cumulative
.sats
.height
.compute_cumulative(max_from, &self.base.sats.height, exit)?;
self.base
.usd
.height
.compute_binary::<Sats, Dollars, SatsToDollars>(
max_from,
&self.sats.height,
&self.base.sats.height,
&prices.price.usd,
exit,
)?)
})
)?;
self.cumulative
.usd
.height
.compute_cumulative(max_from, &self.base.usd.height, exit)?;
self.rolling.compute(
max_from,
windows,
&self.base.sats.height,
&self.base.usd.height,
exit,
)?;
Ok(())
}
}

View File

@@ -1,26 +1,23 @@
//! Value type for Last pattern from Height.
//!
//! Height-level USD value is stored (eagerly computed from sats × price).
//! Day1 last is stored since it requires finding the last value within each date.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Dollars, Height, Sats, Version};
use vecdb::{Database, Exit, ReadableCloneableVec, Rw, StorageMode};
use brk_types::{Dollars, Height, Sats, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{Database, Exit, Rw, StorageMode};
use crate::{
indexes, prices,
internal::{ComputedFromHeightLast, LazyFromHeightLast, SatsToBitcoin, SatsToDollars},
internal::{ByUnit, SatsToDollars},
};
#[derive(Traversable)]
#[derive(Deref, DerefMut, Traversable)]
#[traversable(transparent)]
pub struct ValueFromHeightLast<M: StorageMode = Rw> {
pub sats: ComputedFromHeightLast<Sats, M>,
pub btc: LazyFromHeightLast<Bitcoin, Sats>,
pub usd: ComputedFromHeightLast<Dollars, M>,
#[deref]
#[deref_mut]
pub base: ByUnit<M>,
}
const VERSION: Version = Version::TWO; // Bumped for stored height dollars
const VERSION: Version = Version::TWO;
impl ValueFromHeightLast {
pub(crate) fn forced_import(
@@ -30,35 +27,20 @@ impl ValueFromHeightLast {
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let sats = ComputedFromHeightLast::forced_import(db, name, v, indexes)?;
let btc = LazyFromHeightLast::from_computed::<SatsToBitcoin>(
&format!("{name}_btc"),
v,
sats.height.read_only_boxed_clone(),
&sats,
);
let usd = ComputedFromHeightLast::forced_import(db, &format!("{name}_usd"), v, indexes)?;
Ok(Self {
sats,
btc,
usd,
base: ByUnit::forced_import(db, name, v, indexes)?,
})
}
/// Eagerly compute USD height values: sats[h] * price[h].
pub(crate) fn compute(
&mut self,
prices: &prices::Vecs,
max_from: Height,
exit: &Exit,
) -> Result<()> {
self.usd.compute_binary::<Sats, Dollars, SatsToDollars>(
self.base.usd.compute_binary::<Sats, Dollars, SatsToDollars>(
max_from,
&self.sats.height,
&self.base.sats.height,
&prices.price.usd,
exit,
)?;

View File

@@ -21,6 +21,7 @@ pub struct ValueFromHeightLastRolling<M: StorageMode = Rw> {
#[deref_mut]
#[traversable(flatten)]
pub value: ValueFromHeight<M>,
#[traversable(flatten)]
pub rolling: StoredValueRollingWindows<M>,
}

View File

@@ -1,32 +1,21 @@
//! Value type with stored sats height + cumulative, stored usd, lazy btc.
//!
//! - Sats: stored height + cumulative (ComputedFromHeightCumulative)
//! - BTC: lazy transform from sats (LazyFromHeightLast)
//! - USD: stored (eagerly computed from price × sats)
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Dollars, Height, Sats, Version};
use vecdb::{Database, Exit, ReadableCloneableVec, Rw, StorageMode};
use brk_types::{Dollars, Height, Sats, Version};
use vecdb::{Database, Exit, Rw, StorageMode};
use crate::{
indexes,
internal::{
ComputedFromHeightCumulative, ComputedFromHeightLast, LazyFromHeightLast, SatsToBitcoin,
SatsToDollars,
},
internal::{ByUnit, SatsToDollars},
prices,
};
/// Value wrapper with stored sats height + cumulative, lazy btc + stored usd.
#[derive(Traversable)]
pub struct LazyComputedValueFromHeightCumulative<M: StorageMode = Rw> {
pub sats: ComputedFromHeightCumulative<Sats, M>,
pub btc: LazyFromHeightLast<Bitcoin, Sats>,
pub usd: ComputedFromHeightLast<Dollars, M>,
pub base: ByUnit<M>,
pub cumulative: ByUnit<M>,
}
const VERSION: Version = Version::ONE; // Bumped for stored height dollars
const VERSION: Version = Version::ONE;
impl LazyComputedValueFromHeightCumulative {
pub(crate) fn forced_import(
@@ -37,35 +26,37 @@ impl LazyComputedValueFromHeightCumulative {
) -> Result<Self> {
let v = version + VERSION;
let sats = ComputedFromHeightCumulative::forced_import(db, name, v, indexes)?;
let btc = LazyFromHeightLast::from_height_source::<SatsToBitcoin>(
&format!("{name}_btc"),
v,
sats.height.read_only_boxed_clone(),
indexes,
);
let usd = ComputedFromHeightLast::forced_import(db, &format!("{name}_usd"), v, indexes)?;
Ok(Self { sats, btc, usd })
Ok(Self {
base: ByUnit::forced_import(db, name, v, indexes)?,
cumulative: ByUnit::forced_import(db, &format!("{name}_cumulative"), v, indexes)?,
})
}
/// Compute cumulative + USD from already-filled sats height vec.
pub(crate) fn compute(
&mut self,
prices: &prices::Vecs,
max_from: Height,
exit: &Exit,
) -> Result<()> {
self.sats.compute_rest(max_from, exit)?;
self.cumulative
.sats
.height
.compute_cumulative(max_from, &self.base.sats.height, exit)?;
self.base
.usd
.compute_binary::<Sats, Dollars, SatsToDollars>(
max_from,
&self.base.sats.height,
&prices.price.usd,
exit,
)?;
self.cumulative
.usd
.height
.compute_cumulative(max_from, &self.base.usd.height, exit)?;
self.usd.compute_binary::<Sats, Dollars, SatsToDollars>(
max_from,
&self.sats.height,
&prices.price.usd,
exit,
)?;
Ok(())
}
}

View File

@@ -1,29 +1,22 @@
//! Value type for SumCumulative pattern from Height.
//!
//! Height-level USD sum is stored (eagerly computed from sats × price).
//! Uses CumSum: stored base + cumulative + rolling sum windows.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Dollars, Height, Sats, Version};
use vecdb::{Database, EagerVec, Exit, PcoVec, ReadableCloneableVec, Rw, StorageMode};
use brk_types::{Dollars, Height, Sats, Version};
use vecdb::{Database, EagerVec, Exit, PcoVec, Rw, StorageMode};
use crate::{
indexes, prices,
internal::{
ComputedFromHeightCumulativeSum, LazyFromHeightLast, SatsToBitcoin, SatsToDollars,
WindowStarts,
},
indexes,
internal::{ByUnit, RollingSumByUnit, SatsToDollars, WindowStarts},
prices,
};
#[derive(Traversable)]
pub struct ValueFromHeightSumCumulative<M: StorageMode = Rw> {
pub sats: ComputedFromHeightCumulativeSum<Sats, M>,
pub btc: LazyFromHeightLast<Bitcoin, Sats>,
pub usd: ComputedFromHeightCumulativeSum<Dollars, M>,
pub base: ByUnit<M>,
pub cumulative: ByUnit<M>,
pub sum: RollingSumByUnit<M>,
}
const VERSION: Version = Version::TWO; // Bumped for stored height dollars
const VERSION: Version = Version::TWO;
impl ValueFromHeightSumCumulative {
pub(crate) fn forced_import(
@@ -34,19 +27,11 @@ impl ValueFromHeightSumCumulative {
) -> Result<Self> {
let v = version + VERSION;
let sats = ComputedFromHeightCumulativeSum::forced_import(db, name, v, indexes)?;
let btc = LazyFromHeightLast::from_height_source::<SatsToBitcoin>(
&format!("{name}_btc"),
v,
sats.height.read_only_boxed_clone(),
indexes,
);
let usd =
ComputedFromHeightCumulativeSum::forced_import(db, &format!("{name}_usd"), v, indexes)?;
Ok(Self { sats, btc, usd })
Ok(Self {
base: ByUnit::forced_import(db, name, v, indexes)?,
cumulative: ByUnit::forced_import(db, &format!("{name}_cumulative"), v, indexes)?,
sum: RollingSumByUnit::forced_import(db, name, v, indexes)?,
})
}
pub(crate) fn compute(
@@ -57,15 +42,36 @@ impl ValueFromHeightSumCumulative {
exit: &Exit,
compute_sats: impl FnOnce(&mut EagerVec<PcoVec<Height, Sats>>) -> Result<()>,
) -> Result<()> {
self.sats.compute(max_from, windows, exit, compute_sats)?;
compute_sats(&mut self.base.sats.height)?;
self.usd.compute(max_from, windows, exit, |vec| {
Ok(vec.compute_binary::<Sats, Dollars, SatsToDollars>(
self.cumulative
.sats
.height
.compute_cumulative(max_from, &self.base.sats.height, exit)?;
self.base
.usd
.height
.compute_binary::<Sats, Dollars, SatsToDollars>(
max_from,
&self.sats.height,
&self.base.sats.height,
&prices.price.usd,
exit,
)?)
})
)?;
self.cumulative
.usd
.height
.compute_cumulative(max_from, &self.base.usd.height, exit)?;
self.sum.compute_rolling_sum(
max_from,
windows,
&self.base.sats.height,
&self.base.usd.height,
exit,
)?;
Ok(())
}
}

View File

@@ -2,12 +2,14 @@
use brk_traversable::Traversable;
use brk_types::{
Day1, Day3, DifficultyEpoch, HalvingEpoch, Height, Hour1, Hour4, Hour12, Minute1, Minute5,
Minute10, Minute30, Month1, Month3, Month6, Version, Week1, Year1, Year10,
Day1, Day3, DifficultyEpoch, FromCoarserIndex, HalvingEpoch, Height, Hour1, Hour4, Hour12,
Minute1, Minute5, Minute10, Minute30, Month1, Month3, Month6, Version, Week1, Year1, Year10,
};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{LazyAggVec, ReadOnlyClone, ReadableBoxedVec, ReadableCloneableVec};
use vecdb::{
Cursor, LazyAggVec, ReadOnlyClone, ReadableBoxedVec, ReadableCloneableVec, VecIndex, VecValue,
};
use crate::{
indexes, indexes_from,
@@ -77,13 +79,45 @@ where
};
}
fn for_each_range_from_coarser<
I: VecIndex,
O: VecValue,
S1I: VecIndex + FromCoarserIndex<I>,
S2T: VecValue,
>(
from: usize,
to: usize,
source: &ReadableBoxedVec<S1I, O>,
mapping: &ReadableBoxedVec<I, S2T>,
f: &mut dyn FnMut(O),
) {
let mapping_len = mapping.len();
let source_len = source.len();
let mut cursor = Cursor::from_dyn(&**source);
for i in from..to {
if i >= mapping_len {
break;
}
let target = S1I::max_from(I::from(i), source_len);
if cursor.position() <= target {
cursor.advance(target - cursor.position());
if let Some(v) = cursor.next() {
f(v);
}
} else if let Some(v) = source.collect_one_at(target) {
f(v);
}
}
}
macro_rules! epoch {
($idx:ident) => {
LazyAggVec::from_source(
LazyAggVec::new(
name,
v,
height_source.clone(),
indexes.$idx.identity.read_only_boxed_clone(),
for_each_range_from_coarser,
)
};
}

View File

@@ -1,29 +1,25 @@
use brk_error::Result;
use brk_traversable::Traversable;
use schemars::JsonSchema;
use vecdb::{
Database, Exit, ReadableVec, Ro, Rw, StorageMode, VecIndex, VecValue, Version,
Database, Exit, ReadableVec, Ro, Rw, VecIndex, VecValue, Version,
};
use crate::internal::{
AverageVec, ComputedVecValue, MaxVec, MedianVec, MinVec, Pct10Vec, Pct25Vec, Pct75Vec,
Pct90Vec,
AverageVec, ComputedVecValue, DistributionStats, MaxVec, MedianVec, MinVec, Pct10Vec,
Pct25Vec, Pct75Vec, Pct90Vec,
};
/// Distribution stats (average + min + max + percentiles) — flat 8-field struct.
#[derive(Traversable)]
pub struct Distribution<I: VecIndex, T: ComputedVecValue + JsonSchema, M: StorageMode = Rw> {
pub average: AverageVec<I, T, M>,
#[traversable(flatten)]
pub min: MinVec<I, T, M>,
#[traversable(flatten)]
pub max: MaxVec<I, T, M>,
pub pct10: Pct10Vec<I, T, M>,
pub pct25: Pct25Vec<I, T, M>,
pub median: MedianVec<I, T, M>,
pub pct75: Pct75Vec<I, T, M>,
pub pct90: Pct90Vec<I, T, M>,
}
/// Distribution stats (average + min + max + percentiles) — concrete vec type alias.
pub type Distribution<I, T, M = Rw> = DistributionStats<
AverageVec<I, T, M>,
MinVec<I, T, M>,
MaxVec<I, T, M>,
Pct10Vec<I, T, M>,
Pct25Vec<I, T, M>,
MedianVec<I, T, M>,
Pct75Vec<I, T, M>,
Pct90Vec<I, T, M>,
>;
impl<I: VecIndex, T: ComputedVecValue + JsonSchema> Distribution<I, T> {
pub(crate) fn forced_import(db: &Database, name: &str, version: Version) -> Result<Self> {
@@ -111,7 +107,7 @@ impl<I: VecIndex, T: ComputedVecValue + JsonSchema> Distribution<I, T> {
}
pub fn read_only_clone(&self) -> Distribution<I, T, Ro> {
Distribution {
DistributionStats {
average: self.average.read_only_clone(),
min: self.min.read_only_clone(),
max: self.max.read_only_clone(),

View File

@@ -41,11 +41,11 @@ where
average: RollingWindows::forced_import(db, &format!("{name}_average"), v, indexes)?,
min: RollingWindows::forced_import(db, &format!("{name}_min"), v, indexes)?,
max: RollingWindows::forced_import(db, &format!("{name}_max"), v, indexes)?,
p10: RollingWindows::forced_import(db, &format!("{name}_p10"), v, indexes)?,
p25: RollingWindows::forced_import(db, &format!("{name}_p25"), v, indexes)?,
pct10: RollingWindows::forced_import(db, &format!("{name}_p10"), v, indexes)?,
pct25: RollingWindows::forced_import(db, &format!("{name}_p25"), v, indexes)?,
median: RollingWindows::forced_import(db, &format!("{name}_median"), v, indexes)?,
p75: RollingWindows::forced_import(db, &format!("{name}_p75"), v, indexes)?,
p90: RollingWindows::forced_import(db, &format!("{name}_p90"), v, indexes)?,
pct75: RollingWindows::forced_import(db, &format!("{name}_p75"), v, indexes)?,
pct90: RollingWindows::forced_import(db, &format!("{name}_p90"), v, indexes)?,
}))
}
@@ -69,30 +69,30 @@ where
compute_rolling_distribution_from_starts(
max_from, windows._24h, source,
&mut self.0.average._24h.height, &mut self.0.min._24h.height,
&mut self.0.max._24h.height, &mut self.0.p10._24h.height,
&mut self.0.p25._24h.height, &mut self.0.median._24h.height,
&mut self.0.p75._24h.height, &mut self.0.p90._24h.height, exit,
&mut self.0.max._24h.height, &mut self.0.pct10._24h.height,
&mut self.0.pct25._24h.height, &mut self.0.median._24h.height,
&mut self.0.pct75._24h.height, &mut self.0.pct90._24h.height, exit,
)?;
compute_rolling_distribution_from_starts(
max_from, windows._7d, source,
&mut self.0.average._7d.height, &mut self.0.min._7d.height,
&mut self.0.max._7d.height, &mut self.0.p10._7d.height,
&mut self.0.p25._7d.height, &mut self.0.median._7d.height,
&mut self.0.p75._7d.height, &mut self.0.p90._7d.height, exit,
&mut self.0.max._7d.height, &mut self.0.pct10._7d.height,
&mut self.0.pct25._7d.height, &mut self.0.median._7d.height,
&mut self.0.pct75._7d.height, &mut self.0.pct90._7d.height, exit,
)?;
compute_rolling_distribution_from_starts(
max_from, windows._30d, source,
&mut self.0.average._30d.height, &mut self.0.min._30d.height,
&mut self.0.max._30d.height, &mut self.0.p10._30d.height,
&mut self.0.p25._30d.height, &mut self.0.median._30d.height,
&mut self.0.p75._30d.height, &mut self.0.p90._30d.height, exit,
&mut self.0.max._30d.height, &mut self.0.pct10._30d.height,
&mut self.0.pct25._30d.height, &mut self.0.median._30d.height,
&mut self.0.pct75._30d.height, &mut self.0.pct90._30d.height, exit,
)?;
compute_rolling_distribution_from_starts(
max_from, windows._1y, source,
&mut self.0.average._1y.height, &mut self.0.min._1y.height,
&mut self.0.max._1y.height, &mut self.0.p10._1y.height,
&mut self.0.p25._1y.height, &mut self.0.median._1y.height,
&mut self.0.p75._1y.height, &mut self.0.p90._1y.height, exit,
&mut self.0.max._1y.height, &mut self.0.pct10._1y.height,
&mut self.0.pct25._1y.height, &mut self.0.median._1y.height,
&mut self.0.pct75._1y.height, &mut self.0.pct90._1y.height, exit,
)?;
Ok(())

View File

@@ -1,15 +1,14 @@
mod block_count_target;
mod cents_to_dollars;
mod cents_to_sats;
mod ohlc_cents_to_dollars;
mod ohlc_cents_to_sats;
mod dollar_halve;
mod dollar_identity;
mod dollar_plus;
mod dollar_times_tenths;
mod dollars_to_sats_fract;
mod f32_identity;
mod ohlc_cents_to_dollars;
mod ohlc_cents_to_sats;
mod percentage_diff_close_dollars;
mod percentage_dollars_f32;
mod percentage_dollars_f32_neg;
@@ -18,7 +17,7 @@ mod percentage_u32_f32;
mod price_times_ratio;
mod ratio32;
mod ratio64;
mod ratio_u64_f32;
mod return_f32_tenths;
mod return_i8;
mod return_u16;
@@ -52,13 +51,12 @@ pub use percentage_dollars_f32_neg::*;
pub use percentage_sats_f64::*;
pub use percentage_u32_f32::*;
pub use price_times_ratio::*;
pub use ratio32::*;
pub use ratio64::*;
pub use ratio_u64_f32::*;
pub use return_f32_tenths::*;
pub use return_i8::*;
pub use return_u16::*;
pub use sat_halve::*;
pub use sat_halve_to_bitcoin::*;
pub use sat_identity::*;

View File

@@ -0,0 +1,17 @@
use brk_types::{StoredF32, StoredU64};
use vecdb::BinaryTransform;
/// (StoredU64, StoredU64) -> StoredF32 ratio (a/b)
/// Used for adoption ratio calculations (script_count / total_outputs)
pub struct RatioU64F32;
impl BinaryTransform<StoredU64, StoredU64, StoredF32> for RatioU64F32 {
#[inline(always)]
fn apply(numerator: StoredU64, denominator: StoredU64) -> StoredF32 {
if *denominator > 0 {
StoredF32::from(*numerator as f64 / *denominator as f64)
} else {
StoredF32::from(0.0)
}
}
}

View File

@@ -27,7 +27,7 @@ impl Vecs {
) -> Result<()> {
self.puell_multiple.height.compute_divide(
starting_indexes.height,
&rewards.coinbase.usd.height,
&rewards.coinbase.base.usd.height,
&rewards.subsidy_usd_1y_sma.height,
exit,
)?;

View File

@@ -31,8 +31,8 @@ impl Vecs {
self.hashrate.compute(
&blocks.count,
&blocks.difficulty,
&self.rewards.coinbase.sats.rolling.sum._24h.height,
&self.rewards.coinbase.usd.rolling.sum._24h.height,
&self.rewards.coinbase.rolling._24h.sum.sats.height,
&self.rewards.coinbase.rolling._24h.sum.usd.height,
starting_indexes,
exit,
)?;

View File

@@ -84,8 +84,8 @@ impl Vecs {
|vec| {
vec.compute_transform2(
starting_indexes.height,
&self.coinbase.sats.height,
&self.fees.sats.height,
&self.coinbase.base.sats.height,
&self.fees.base.sats.height,
|(height, coinbase, fees, ..)| {
(
height,
@@ -109,7 +109,7 @@ impl Vecs {
|vec| {
vec.compute_transform(
starting_indexes.height,
&self.subsidy.sats.height,
&self.subsidy.base.sats.height,
|(height, subsidy, ..)| {
let halving = HalvingEpoch::from(height);
let expected = Sats::FIFTY_BTC / 2_usize.pow(halving.to_usize() as u32);
@@ -124,42 +124,42 @@ impl Vecs {
// All-time cumulative fee dominance
self.fee_dominance.height.compute_percentage(
starting_indexes.height,
&self.fees.sats.cumulative.height,
&self.coinbase.sats.cumulative.height,
&self.fees.cumulative.sats.height,
&self.coinbase.cumulative.sats.height,
exit,
)?;
// Rolling fee dominance = sum(fees) / sum(coinbase) * 100
self.fee_dominance_24h.height.compute_percentage(
starting_indexes.height,
&self.fees.sats.rolling.sum._24h.height,
&self.coinbase.sats.rolling.sum._24h.height,
&self.fees.rolling._24h.sum.sats.height,
&self.coinbase.rolling._24h.sum.sats.height,
exit,
)?;
self.fee_dominance_7d.height.compute_percentage(
starting_indexes.height,
&self.fees.sats.rolling.sum._7d.height,
&self.coinbase.sats.rolling.sum._7d.height,
&self.fees.rolling._7d.sum.sats.height,
&self.coinbase.rolling._7d.sum.sats.height,
exit,
)?;
self.fee_dominance_30d.height.compute_percentage(
starting_indexes.height,
&self.fees.sats.rolling.sum._30d.height,
&self.coinbase.sats.rolling.sum._30d.height,
&self.fees.rolling._30d.sum.sats.height,
&self.coinbase.rolling._30d.sum.sats.height,
exit,
)?;
self.fee_dominance_1y.height.compute_percentage(
starting_indexes.height,
&self.fees.sats.rolling.sum._1y.height,
&self.coinbase.sats.rolling.sum._1y.height,
&self.fees.rolling._1y.sum.sats.height,
&self.coinbase.rolling._1y.sum.sats.height,
exit,
)?;
// All-time cumulative subsidy dominance
self.subsidy_dominance.height.compute_percentage(
starting_indexes.height,
&self.subsidy.sats.cumulative.height,
&self.coinbase.sats.cumulative.height,
&self.subsidy.cumulative.sats.height,
&self.coinbase.cumulative.sats.height,
exit,
)?;
@@ -193,7 +193,7 @@ impl Vecs {
self.subsidy_usd_1y_sma.height.compute_rolling_average(
starting_indexes.height,
&count_vecs.height_1y_ago,
&self.coinbase.usd.height,
&self.coinbase.base.usd.height,
exit,
)?;

View File

@@ -237,7 +237,7 @@ impl Vecs {
Ok(vec.compute_transform2(
starting_indexes.height,
&self.blocks_mined.height,
&mining.rewards.subsidy.sats.height,
&mining.rewards.subsidy.base.sats.height,
|(h, mask, val, ..)| (h, MaskSats::apply(mask, val)),
exit,
)?)
@@ -253,7 +253,7 @@ impl Vecs {
Ok(vec.compute_transform2(
starting_indexes.height,
&self.blocks_mined.height,
&mining.rewards.fees.sats.height,
&mining.rewards.fees.base.sats.height,
|(h, mask, val, ..)| (h, MaskSats::apply(mask, val)),
exit,
)?)
@@ -269,7 +269,7 @@ impl Vecs {
Ok(vec.compute_transform2(
starting_indexes.height,
&self.blocks_mined.height,
&mining.rewards.coinbase.sats.height,
&mining.rewards.coinbase.base.sats.height,
|(h, mask, val, ..)| (h, MaskSats::apply(mask, val)),
exit,
)?)

View File

@@ -0,0 +1,56 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{StoredF32, Version};
use vecdb::{Database, Exit, Rw, StorageMode};
use crate::{ComputeIndexes, indexes, internal::{ComputedFromHeightLast, RatioU64F32}, outputs};
use super::count::Vecs as CountVecs;
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub taproot: ComputedFromHeightLast<StoredF32, M>,
pub segwit: ComputedFromHeightLast<StoredF32, M>,
}
impl Vecs {
pub(crate) fn forced_import(
db: &Database,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
taproot: ComputedFromHeightLast::forced_import(
db,
"taproot_adoption",
version,
indexes,
)?,
segwit: ComputedFromHeightLast::forced_import(db, "segwit_adoption", version, indexes)?,
})
}
pub(crate) fn compute(
&mut self,
count: &CountVecs,
outputs_count: &outputs::CountVecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.taproot.compute_binary::<_, _, RatioU64F32>(
starting_indexes.height,
&count.p2tr.height,
&outputs_count.total_count.full.sum_cumulative.sum.0,
exit,
)?;
self.segwit.compute_binary::<_, _, RatioU64F32>(
starting_indexes.height,
&count.segwit.height,
&outputs_count.total_count.full.sum_cumulative.sum.0,
exit,
)?;
Ok(())
}
}

View File

@@ -17,11 +17,14 @@ impl Vecs {
exit: &Exit,
) -> Result<()> {
self.count
.compute(indexer, &blocks.count, &outputs.count, starting_indexes, exit)?;
.compute(indexer, &blocks.count, starting_indexes, exit)?;
self.value
.compute(indexer, &blocks.count, prices, starting_indexes, exit)?;
self.adoption
.compute(&self.count, &outputs.count, starting_indexes, exit)?;
let _lock = exit.lock();
self.db.compact()?;
Ok(())

View File

@@ -1,18 +1,16 @@
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::StoredF32;
use brk_types::StoredU64;
use vecdb::Exit;
use super::Vecs;
use crate::{ComputeIndexes, blocks, outputs};
use crate::{ComputeIndexes, blocks};
impl Vecs {
pub(crate) fn compute(
&mut self,
indexer: &Indexer,
count_vecs: &blocks::CountVecs,
outputs_count: &outputs::CountVecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
@@ -151,37 +149,6 @@ impl Vecs {
)?)
})?;
// Adoption ratios: per-block ratio of script type / total outputs
self.taproot_adoption.height.compute_transform2(
starting_indexes.height,
&self.p2tr.height,
&outputs_count.total_count.full.sum_cumulative.sum.0,
|(h, p2tr, total, ..)| {
let ratio = if *total > 0 {
StoredF32::from(*p2tr as f64 / *total as f64)
} else {
StoredF32::from(0.0)
};
(h, ratio)
},
exit,
)?;
self.segwit_adoption.height.compute_transform2(
starting_indexes.height,
&self.segwit.height,
&outputs_count.total_count.full.sum_cumulative.sum.0,
|(h, segwit, total, ..)| {
let ratio = if *total > 0 {
StoredF32::from(*segwit as f64 / *total as f64)
} else {
StoredF32::from(0.0)
};
(h, ratio)
},
exit,
)?;
Ok(())
}
}

View File

@@ -3,10 +3,7 @@ use brk_types::Version;
use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedFromHeightCumulativeSum, ComputedFromHeightLast},
};
use crate::{indexes, internal::ComputedFromHeightCumulativeSum};
impl Vecs {
pub(crate) fn forced_import(
@@ -58,18 +55,6 @@ impl Vecs {
indexes,
)?,
segwit,
taproot_adoption: ComputedFromHeightLast::forced_import(
db,
"taproot_adoption",
version,
indexes,
)?,
segwit_adoption: ComputedFromHeightLast::forced_import(
db,
"segwit_adoption",
version,
indexes,
)?,
})
}
}

View File

@@ -1,8 +1,8 @@
use brk_traversable::Traversable;
use brk_types::{StoredF32, StoredU64};
use brk_types::StoredU64;
use vecdb::{Rw, StorageMode};
use crate::internal::{ComputedFromHeightCumulativeSum, ComputedFromHeightLast};
use crate::internal::ComputedFromHeightCumulativeSum;
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
@@ -23,8 +23,4 @@ pub struct Vecs<M: StorageMode = Rw> {
// Aggregate counts
/// SegWit output count (p2wpkh + p2wsh + p2tr)
pub segwit: ComputedFromHeightCumulativeSum<StoredU64, M>,
// Adoption ratios (stored per-block, lazy period views)
pub taproot_adoption: ComputedFromHeightLast<StoredF32, M>,
pub segwit_adoption: ComputedFromHeightLast<StoredF32, M>,
}

View File

@@ -7,7 +7,7 @@ use vecdb::{Database, PAGE_SIZE};
use crate::indexes;
use super::{CountVecs, ValueVecs, Vecs};
use super::{AdoptionVecs, CountVecs, ValueVecs, Vecs};
impl Vecs {
pub(crate) fn forced_import(
@@ -22,8 +22,9 @@ impl Vecs {
let count = CountVecs::forced_import(&db, version, indexes)?;
let value = ValueVecs::forced_import(&db, version, indexes)?;
let adoption = AdoptionVecs::forced_import(&db, version, indexes)?;
let this = Self { db, count, value };
let this = Self { db, count, value, adoption };
this.db.retain_regions(
this.iter_any_exportable()

View File

@@ -1,3 +1,4 @@
pub mod adoption;
pub mod count;
pub mod value;
@@ -7,6 +8,7 @@ mod import;
use brk_traversable::Traversable;
use vecdb::{Database, Rw, StorageMode};
pub use adoption::Vecs as AdoptionVecs;
pub use count::Vecs as CountVecs;
pub use value::Vecs as ValueVecs;
@@ -19,4 +21,5 @@ pub struct Vecs<M: StorageMode = Rw> {
pub count: CountVecs<M>,
pub value: ValueVecs<M>,
pub adoption: AdoptionVecs<M>,
}

View File

@@ -22,11 +22,11 @@ impl Vecs {
.compute(starting_indexes.height, &window_starts, prices, exit, |height_vec| {
// Validate computed versions against dependencies
let opreturn_dep_version = scripts.value.opreturn.sats.height.version();
let opreturn_dep_version = scripts.value.opreturn.base.sats.height.version();
height_vec.validate_computed_version_or_reset(opreturn_dep_version)?;
// Copy per-block opreturn values from scripts
let scripts_target = scripts.value.opreturn.sats.height.len();
let scripts_target = scripts.value.opreturn.base.sats.height.len();
if scripts_target > 0 {
let target_height = Height::from(scripts_target - 1);
let current_len = height_vec.len();
@@ -36,7 +36,7 @@ impl Vecs {
if starting_height <= target_height {
let start = starting_height.to_usize();
let end = target_height.to_usize() + 1;
scripts.value.opreturn.sats.height.fold_range_at(
scripts.value.opreturn.base.sats.height.fold_range_at(
start, end, start,
|idx, value| {
height_vec.truncate_push(Height::from(idx), value).unwrap();
@@ -52,8 +52,8 @@ impl Vecs {
// 2. Compute unspendable supply = opreturn + unclaimed_rewards + genesis (at height 0)
// Get reference to opreturn height vec for computing unspendable
let opreturn_height = &self.opreturn.sats.height;
let unclaimed_height = &mining.rewards.unclaimed_rewards.sats.height;
let opreturn_height = &self.opreturn.base.sats.height;
let unclaimed_height = &mining.rewards.unclaimed_rewards.base.sats.height;
self.unspendable
.compute(starting_indexes.height, &window_starts, prices, exit, |height_vec| {

View File

@@ -15,7 +15,7 @@ impl Query {
let iter = Day1Iter::new(computer, start, current_height.to_usize());
let cumulative = &computer.mining.rewards.fees.sats.cumulative.height;
let cumulative = &computer.mining.rewards.fees.cumulative.sats.height;
let first_height = &computer.indexes.day1.first_height;
Ok(iter.collect(|di, ts, h| {

View File

@@ -19,11 +19,11 @@ impl Query {
.mining
.rewards
.coinbase
.sats
.rolling
._24h
.distribution
.average
._24h
.sats
.day1;
Ok(iter.collect(|di, ts, h| {

View File

@@ -12,8 +12,8 @@ impl Query {
let end_block = current_height;
let start_block = Height::from(current_height.to_usize().saturating_sub(block_count - 1));
let coinbase_vec = &computer.mining.rewards.coinbase.sats.height;
let fee_vec = &computer.mining.rewards.fees.sats.height;
let coinbase_vec = &computer.mining.rewards.coinbase.base.sats.height;
let fee_vec = &computer.mining.rewards.fees.base.sats.height;
let tx_count_vec = &computer.transactions.count.tx_count.height;
let start = start_block.to_usize();

View File

@@ -7,9 +7,9 @@ use brk_error::Error;
use jiff::Span;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use vecdb::{CheckedSub, Formattable, FromCoarserIndex, Pco, PrintableIndex};
use vecdb::{CheckedSub, Formattable, Pco, PrintableIndex};
use crate::{Year10, Month1, Month3, Month6, Week1, Year1};
use crate::{FromCoarserIndex, Month1, Month3, Month6, Week1, Year1, Year10};
use super::Date;

View File

@@ -0,0 +1,39 @@
use std::ops::RangeInclusive;
/// Maps coarser-grained indices to ranges of finer-grained indices.
///
/// Useful for hierarchical index systems where one index type represents
/// a range of another (e.g., mapping hour indices to timestamp ranges).
pub trait FromCoarserIndex<T>
where
T: Ord + From<usize>,
{
/// Returns the minimum fine-grained index represented by the coarse index.
fn min_from(coarser: T) -> usize;
/// Returns the maximum fine-grained index represented by the coarse index.
/// Note: May exceed actual data length - use `max_from` for bounded results.
fn max_from_(coarser: T) -> usize;
/// Returns the maximum fine-grained index, bounded by the data length.
/// Returns 0 if len is 0 (empty data).
fn max_from(coarser: T, len: usize) -> usize {
if len == 0 {
return 0;
}
Self::max_from_(coarser).min(len - 1)
}
/// Returns the inclusive range of fine-grained indices for the coarse index.
/// Returns an empty range (1..=0) if len is 0.
fn inclusive_range_from(coarser: T, len: usize) -> RangeInclusive<usize>
where
T: Clone,
{
if len == 0 {
#[allow(clippy::reversed_empty_ranges)]
return 1..=0; // Empty range
}
Self::min_from(coarser.clone())..=Self::max_from(coarser, len)
}
}

View File

@@ -8,13 +8,11 @@ use byteview::ByteView;
use derive_more::Deref;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use vecdb::{Bytes, CheckedSub, Formattable, FromCoarserIndex, Pco, PrintableIndex, Stamp};
use vecdb::{Bytes, CheckedSub, Formattable, Pco, PrintableIndex, Stamp};
use super::{DifficultyEpoch, HalvingEpoch};
use crate::{BLOCKS_PER_DIFF_EPOCHS, BLOCKS_PER_HALVING, FromCoarserIndex};
use crate::{BLOCKS_PER_DIFF_EPOCHS, BLOCKS_PER_HALVING};
use super::StoredU64;
use super::{DifficultyEpoch, HalvingEpoch, StoredU64};
/// Block height
#[derive(

View File

@@ -64,6 +64,7 @@ mod feerate;
mod feeratepercentiles;
mod format;
mod formatresponse;
mod from_coarser;
mod fundedaddressdata;
mod fundedaddressindex;
mod halvingepoch;
@@ -252,6 +253,7 @@ pub use feerate::*;
pub use feeratepercentiles::*;
pub use format::*;
pub use formatresponse::*;
pub use from_coarser::*;
pub use fundedaddressdata::*;
pub use fundedaddressindex::*;
pub use halvingepoch::*;

File diff suppressed because it is too large Load Diff

View File

@@ -2117,13 +2117,13 @@ class AdjustedCapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSe
self.lower_price_band: SatsUsdPattern = SatsUsdPattern(client, _m(acc, 'lower_price_band'))
self.mvrv: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'mvrv'))
self.neg_realized_loss: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'neg_realized_loss'))
self.net_realized_pnl: CumulativeHeightPattern[Dollars] = CumulativeHeightPattern(client, _m(acc, 'net_realized_pnl'))
self.net_realized_pnl: CumulativeHeightPattern = CumulativeHeightPattern(client, _m(acc, 'net_realized_pnl'))
self.net_realized_pnl_7d_ema: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'net_realized_pnl_7d_ema'))
self.net_realized_pnl_cumulative_30d_delta: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta'))
self.net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap'))
self.net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap'))
self.net_realized_pnl_rel_to_realized_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'net_realized_pnl_rel_to_realized_cap'))
self.peak_regret: CumulativeHeightPattern[Dollars] = CumulativeHeightPattern(client, _m(acc, 'realized_peak_regret'))
self.peak_regret: CumulativeHeightPattern = CumulativeHeightPattern(client, _m(acc, 'realized_peak_regret'))
self.peak_regret_rel_to_realized_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'peak_regret_rel_to_realized_cap'))
self.profit_flow: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'profit_flow'))
self.profit_value_created: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'profit_value_created'))
@@ -2132,7 +2132,7 @@ class AdjustedCapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSe
self.realized_cap_30d_delta: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_cap_30d_delta'))
self.realized_cap_cents: MetricPattern1[Cents] = MetricPattern1(client, _m(acc, 'realized_cap_cents'))
self.realized_cap_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'realized_cap_rel_to_own_market_cap'))
self.realized_loss: CumulativeHeightPattern[Dollars] = CumulativeHeightPattern(client, _m(acc, 'realized_loss'))
self.realized_loss: CumulativeHeightPattern = CumulativeHeightPattern(client, _m(acc, 'realized_loss'))
self.realized_loss_1y: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_loss_1y'))
self.realized_loss_24h: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_loss_24h'))
self.realized_loss_30d: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_loss_30d'))
@@ -2142,7 +2142,7 @@ class AdjustedCapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSe
self.realized_price: SatsUsdPattern = SatsUsdPattern(client, _m(acc, 'realized_price'))
self.realized_price_extra: RatioPattern2 = RatioPattern2(client, _m(acc, 'realized_price_ratio'))
self.realized_price_ratio_ext: RatioPattern3 = RatioPattern3(client, _m(acc, 'realized_price_ratio'))
self.realized_profit: CumulativeHeightPattern[Dollars] = CumulativeHeightPattern(client, _m(acc, 'realized_profit'))
self.realized_profit: CumulativeHeightPattern = CumulativeHeightPattern(client, _m(acc, 'realized_profit'))
self.realized_profit_1y: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_profit_1y'))
self.realized_profit_24h: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_profit_24h'))
self.realized_profit_30d: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_profit_30d'))
@@ -2167,9 +2167,9 @@ class AdjustedCapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSe
self.sell_side_risk_ratio_30d_ema: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'sell_side_risk_ratio_30d_ema'))
self.sell_side_risk_ratio_7d: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'sell_side_risk_ratio_7d'))
self.sell_side_risk_ratio_7d_ema: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'sell_side_risk_ratio_7d_ema'))
self.sent_in_loss: BtcSatsUsdPattern2 = BtcSatsUsdPattern2(client, _m(acc, 'sent_in_loss'))
self.sent_in_loss: BaseCumulativePattern = BaseCumulativePattern(client, _m(acc, 'sent_in_loss'))
self.sent_in_loss_14d_ema: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, 'sent_in_loss_14d_ema'))
self.sent_in_profit: BtcSatsUsdPattern2 = BtcSatsUsdPattern2(client, _m(acc, 'sent_in_profit'))
self.sent_in_profit: BaseCumulativePattern = BaseCumulativePattern(client, _m(acc, 'sent_in_profit'))
self.sent_in_profit_14d_ema: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, 'sent_in_profit_14d_ema'))
self.sopr: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'sopr'))
self.sopr_1y: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'sopr_1y'))
@@ -2228,13 +2228,13 @@ class AdjustedCapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSe
self.lower_price_band: SatsUsdPattern = SatsUsdPattern(client, _m(acc, 'lower_price_band'))
self.mvrv: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'mvrv'))
self.neg_realized_loss: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'neg_realized_loss'))
self.net_realized_pnl: CumulativeHeightPattern[Dollars] = CumulativeHeightPattern(client, _m(acc, 'net_realized_pnl'))
self.net_realized_pnl: CumulativeHeightPattern = CumulativeHeightPattern(client, _m(acc, 'net_realized_pnl'))
self.net_realized_pnl_7d_ema: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'net_realized_pnl_7d_ema'))
self.net_realized_pnl_cumulative_30d_delta: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta'))
self.net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap'))
self.net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap'))
self.net_realized_pnl_rel_to_realized_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'net_realized_pnl_rel_to_realized_cap'))
self.peak_regret: CumulativeHeightPattern[Dollars] = CumulativeHeightPattern(client, _m(acc, 'realized_peak_regret'))
self.peak_regret: CumulativeHeightPattern = CumulativeHeightPattern(client, _m(acc, 'realized_peak_regret'))
self.peak_regret_rel_to_realized_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'peak_regret_rel_to_realized_cap'))
self.profit_flow: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'profit_flow'))
self.profit_value_created: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'profit_value_created'))
@@ -2242,12 +2242,12 @@ class AdjustedCapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSe
self.realized_cap: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_cap'))
self.realized_cap_30d_delta: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_cap_30d_delta'))
self.realized_cap_cents: MetricPattern1[Cents] = MetricPattern1(client, _m(acc, 'realized_cap_cents'))
self.realized_loss: CumulativeHeightPattern[Dollars] = CumulativeHeightPattern(client, _m(acc, 'realized_loss'))
self.realized_loss: CumulativeHeightPattern = CumulativeHeightPattern(client, _m(acc, 'realized_loss'))
self.realized_loss_7d_ema: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_loss_7d_ema'))
self.realized_loss_rel_to_realized_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'realized_loss_rel_to_realized_cap'))
self.realized_price: SatsUsdPattern = SatsUsdPattern(client, _m(acc, 'realized_price'))
self.realized_price_extra: RatioPattern2 = RatioPattern2(client, _m(acc, 'realized_price_ratio'))
self.realized_profit: CumulativeHeightPattern[Dollars] = CumulativeHeightPattern(client, _m(acc, 'realized_profit'))
self.realized_profit: CumulativeHeightPattern = CumulativeHeightPattern(client, _m(acc, 'realized_profit'))
self.realized_profit_7d_ema: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_profit_7d_ema'))
self.realized_profit_rel_to_realized_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'realized_profit_rel_to_realized_cap'))
self.realized_value: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_value'))
@@ -2264,9 +2264,9 @@ class AdjustedCapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSe
self.sell_side_risk_ratio_30d_ema: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'sell_side_risk_ratio_30d_ema'))
self.sell_side_risk_ratio_7d: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'sell_side_risk_ratio_7d'))
self.sell_side_risk_ratio_7d_ema: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'sell_side_risk_ratio_7d_ema'))
self.sent_in_loss: BtcSatsUsdPattern2 = BtcSatsUsdPattern2(client, _m(acc, 'sent_in_loss'))
self.sent_in_loss: BaseCumulativePattern = BaseCumulativePattern(client, _m(acc, 'sent_in_loss'))
self.sent_in_loss_14d_ema: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, 'sent_in_loss_14d_ema'))
self.sent_in_profit: BtcSatsUsdPattern2 = BtcSatsUsdPattern2(client, _m(acc, 'sent_in_profit'))
self.sent_in_profit: BaseCumulativePattern = BaseCumulativePattern(client, _m(acc, 'sent_in_profit'))
self.sent_in_profit_14d_ema: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, 'sent_in_profit_14d_ema'))
self.sopr: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'sopr'))
self.sopr_1y: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'sopr_1y'))
@@ -2307,13 +2307,13 @@ class CapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSentSoprTo
self.lower_price_band: SatsUsdPattern = SatsUsdPattern(client, _m(acc, 'lower_price_band'))
self.mvrv: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'mvrv'))
self.neg_realized_loss: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'neg_realized_loss'))
self.net_realized_pnl: CumulativeHeightPattern[Dollars] = CumulativeHeightPattern(client, _m(acc, 'net_realized_pnl'))
self.net_realized_pnl: CumulativeHeightPattern = CumulativeHeightPattern(client, _m(acc, 'net_realized_pnl'))
self.net_realized_pnl_7d_ema: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'net_realized_pnl_7d_ema'))
self.net_realized_pnl_cumulative_30d_delta: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta'))
self.net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap'))
self.net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap'))
self.net_realized_pnl_rel_to_realized_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'net_realized_pnl_rel_to_realized_cap'))
self.peak_regret: CumulativeHeightPattern[Dollars] = CumulativeHeightPattern(client, _m(acc, 'realized_peak_regret'))
self.peak_regret: CumulativeHeightPattern = CumulativeHeightPattern(client, _m(acc, 'realized_peak_regret'))
self.peak_regret_rel_to_realized_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'peak_regret_rel_to_realized_cap'))
self.profit_flow: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'profit_flow'))
self.profit_value_created: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'profit_value_created'))
@@ -2322,7 +2322,7 @@ class CapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSentSoprTo
self.realized_cap_30d_delta: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_cap_30d_delta'))
self.realized_cap_cents: MetricPattern1[Cents] = MetricPattern1(client, _m(acc, 'realized_cap_cents'))
self.realized_cap_rel_to_own_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'realized_cap_rel_to_own_market_cap'))
self.realized_loss: CumulativeHeightPattern[Dollars] = CumulativeHeightPattern(client, _m(acc, 'realized_loss'))
self.realized_loss: CumulativeHeightPattern = CumulativeHeightPattern(client, _m(acc, 'realized_loss'))
self.realized_loss_1y: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_loss_1y'))
self.realized_loss_24h: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_loss_24h'))
self.realized_loss_30d: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_loss_30d'))
@@ -2332,7 +2332,7 @@ class CapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSentSoprTo
self.realized_price: SatsUsdPattern = SatsUsdPattern(client, _m(acc, 'realized_price'))
self.realized_price_extra: RatioPattern2 = RatioPattern2(client, _m(acc, 'realized_price_ratio'))
self.realized_price_ratio_ext: RatioPattern3 = RatioPattern3(client, _m(acc, 'realized_price_ratio'))
self.realized_profit: CumulativeHeightPattern[Dollars] = CumulativeHeightPattern(client, _m(acc, 'realized_profit'))
self.realized_profit: CumulativeHeightPattern = CumulativeHeightPattern(client, _m(acc, 'realized_profit'))
self.realized_profit_1y: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_profit_1y'))
self.realized_profit_24h: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_profit_24h'))
self.realized_profit_30d: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_profit_30d'))
@@ -2357,9 +2357,9 @@ class CapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSentSoprTo
self.sell_side_risk_ratio_30d_ema: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'sell_side_risk_ratio_30d_ema'))
self.sell_side_risk_ratio_7d: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'sell_side_risk_ratio_7d'))
self.sell_side_risk_ratio_7d_ema: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'sell_side_risk_ratio_7d_ema'))
self.sent_in_loss: BtcSatsUsdPattern2 = BtcSatsUsdPattern2(client, _m(acc, 'sent_in_loss'))
self.sent_in_loss: BaseCumulativePattern = BaseCumulativePattern(client, _m(acc, 'sent_in_loss'))
self.sent_in_loss_14d_ema: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, 'sent_in_loss_14d_ema'))
self.sent_in_profit: BtcSatsUsdPattern2 = BtcSatsUsdPattern2(client, _m(acc, 'sent_in_profit'))
self.sent_in_profit: BaseCumulativePattern = BaseCumulativePattern(client, _m(acc, 'sent_in_profit'))
self.sent_in_profit_14d_ema: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, 'sent_in_profit_14d_ema'))
self.sopr: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'sopr'))
self.sopr_1y: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'sopr_1y'))
@@ -2399,13 +2399,13 @@ class CapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSentSoprTo
self.lower_price_band: SatsUsdPattern = SatsUsdPattern(client, _m(acc, 'lower_price_band'))
self.mvrv: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'mvrv'))
self.neg_realized_loss: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'neg_realized_loss'))
self.net_realized_pnl: CumulativeHeightPattern[Dollars] = CumulativeHeightPattern(client, _m(acc, 'net_realized_pnl'))
self.net_realized_pnl: CumulativeHeightPattern = CumulativeHeightPattern(client, _m(acc, 'net_realized_pnl'))
self.net_realized_pnl_7d_ema: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'net_realized_pnl_7d_ema'))
self.net_realized_pnl_cumulative_30d_delta: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta'))
self.net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_market_cap'))
self.net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap'))
self.net_realized_pnl_rel_to_realized_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'net_realized_pnl_rel_to_realized_cap'))
self.peak_regret: CumulativeHeightPattern[Dollars] = CumulativeHeightPattern(client, _m(acc, 'realized_peak_regret'))
self.peak_regret: CumulativeHeightPattern = CumulativeHeightPattern(client, _m(acc, 'realized_peak_regret'))
self.peak_regret_rel_to_realized_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'peak_regret_rel_to_realized_cap'))
self.profit_flow: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'profit_flow'))
self.profit_value_created: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'profit_value_created'))
@@ -2413,12 +2413,12 @@ class CapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSentSoprTo
self.realized_cap: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_cap'))
self.realized_cap_30d_delta: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_cap_30d_delta'))
self.realized_cap_cents: MetricPattern1[Cents] = MetricPattern1(client, _m(acc, 'realized_cap_cents'))
self.realized_loss: CumulativeHeightPattern[Dollars] = CumulativeHeightPattern(client, _m(acc, 'realized_loss'))
self.realized_loss: CumulativeHeightPattern = CumulativeHeightPattern(client, _m(acc, 'realized_loss'))
self.realized_loss_7d_ema: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_loss_7d_ema'))
self.realized_loss_rel_to_realized_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'realized_loss_rel_to_realized_cap'))
self.realized_price: SatsUsdPattern = SatsUsdPattern(client, _m(acc, 'realized_price'))
self.realized_price_extra: RatioPattern2 = RatioPattern2(client, _m(acc, 'realized_price_ratio'))
self.realized_profit: CumulativeHeightPattern[Dollars] = CumulativeHeightPattern(client, _m(acc, 'realized_profit'))
self.realized_profit: CumulativeHeightPattern = CumulativeHeightPattern(client, _m(acc, 'realized_profit'))
self.realized_profit_7d_ema: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_profit_7d_ema'))
self.realized_profit_rel_to_realized_cap: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'realized_profit_rel_to_realized_cap'))
self.realized_value: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'realized_value'))
@@ -2435,9 +2435,9 @@ class CapCapitulationInvestorLossLowerMvrvNegNetPeakProfitRealizedSellSentSoprTo
self.sell_side_risk_ratio_30d_ema: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'sell_side_risk_ratio_30d_ema'))
self.sell_side_risk_ratio_7d: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'sell_side_risk_ratio_7d'))
self.sell_side_risk_ratio_7d_ema: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'sell_side_risk_ratio_7d_ema'))
self.sent_in_loss: BtcSatsUsdPattern2 = BtcSatsUsdPattern2(client, _m(acc, 'sent_in_loss'))
self.sent_in_loss: BaseCumulativePattern = BaseCumulativePattern(client, _m(acc, 'sent_in_loss'))
self.sent_in_loss_14d_ema: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, 'sent_in_loss_14d_ema'))
self.sent_in_profit: BtcSatsUsdPattern2 = BtcSatsUsdPattern2(client, _m(acc, 'sent_in_profit'))
self.sent_in_profit: BaseCumulativePattern = BaseCumulativePattern(client, _m(acc, 'sent_in_profit'))
self.sent_in_profit_14d_ema: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, 'sent_in_profit_14d_ema'))
self.sopr: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'sopr'))
self.sopr_1y: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, 'sopr_1y'))
@@ -2701,15 +2701,15 @@ class BlocksCoinbaseDaysDominanceFeeSubsidyPattern:
self.blocks_mined_1y_sum: MetricPattern1[StoredU32] = MetricPattern1(client, _m(acc, 'blocks_mined_1y_sum'))
self.blocks_mined_24h_sum: MetricPattern1[StoredU32] = MetricPattern1(client, _m(acc, 'blocks_mined_24h_sum'))
self.blocks_since_block: MetricPattern1[StoredU32] = MetricPattern1(client, _m(acc, 'blocks_since_block'))
self.coinbase: BtcSatsUsdPattern4 = BtcSatsUsdPattern4(client, _m(acc, 'coinbase'))
self.coinbase: BaseCumulativeSumPattern = BaseCumulativeSumPattern(client, _m(acc, 'coinbase'))
self.days_since_block: MetricPattern1[StoredU16] = MetricPattern1(client, _m(acc, 'days_since_block'))
self.dominance: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'dominance'))
self.dominance_1m: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'dominance_1m'))
self.dominance_1w: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'dominance_1w'))
self.dominance_1y: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'dominance_1y'))
self.dominance_24h: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'dominance_24h'))
self.fee: BtcSatsUsdPattern4 = BtcSatsUsdPattern4(client, _m(acc, 'fee'))
self.subsidy: BtcSatsUsdPattern4 = BtcSatsUsdPattern4(client, _m(acc, 'subsidy'))
self.fee: BaseCumulativeSumPattern = BaseCumulativeSumPattern(client, _m(acc, 'fee'))
self.subsidy: BaseCumulativeSumPattern = BaseCumulativeSumPattern(client, _m(acc, 'subsidy'))
class InvestedNegNetNuplSupplyUnrealizedPattern4:
"""Pattern struct for repeated tree structure."""
@@ -2816,10 +2816,27 @@ class AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90RollingSumPattern:
self.pct25: MetricPattern20[StoredU64] = MetricPattern20(client, _m(acc, 'pct25'))
self.pct75: MetricPattern20[StoredU64] = MetricPattern20(client, _m(acc, 'pct75'))
self.pct90: MetricPattern20[StoredU64] = MetricPattern20(client, _m(acc, 'pct90'))
self.rolling: AverageMaxMedianMinP10P25P75P90SumPattern = AverageMaxMedianMinP10P25P75P90SumPattern(client, acc)
self.rolling: AverageMaxMedianMinPct10Pct25Pct75Pct90SumPattern = AverageMaxMedianMinPct10Pct25Pct75Pct90SumPattern(client, acc)
self.sum: MetricPattern20[StoredU64] = MetricPattern20(client, _m(acc, 'sum'))
class AverageCumulativeMaxMedianMinP10P25P75P90SumPattern:
class AverageCumulativeHeightMaxMedianMinPct10Pct25Pct75Pct90SumPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.average: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'average'))
self.cumulative: MetricPattern1[StoredU64] = MetricPattern1(client, _m(acc, 'cumulative'))
self.height: MetricPattern20[StoredU64] = MetricPattern20(client, acc)
self.max: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'max'))
self.median: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'median'))
self.min: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'min'))
self.pct10: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'p10'))
self.pct25: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'p25'))
self.pct75: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'p75'))
self.pct90: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'p90'))
self.sum: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'sum'))
class AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
@@ -2829,10 +2846,10 @@ class AverageCumulativeMaxMedianMinP10P25P75P90SumPattern:
self.max: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'max'))
self.median: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'median'))
self.min: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'min'))
self.p10: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'p10'))
self.p25: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'p25'))
self.p75: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'p75'))
self.p90: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'p90'))
self.pct10: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'p10'))
self.pct25: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'p25'))
self.pct75: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'p75'))
self.pct90: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'p90'))
self.sum: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'sum'))
class AverageGainsLossesRsiStochPattern:
@@ -2881,7 +2898,22 @@ class AllP2aP2pk33P2pk65P2pkhP2shP2trP2wpkhP2wshPattern:
self.p2wpkh: _30dCountPattern = _30dCountPattern(client, _p('p2wpkh', acc))
self.p2wsh: _30dCountPattern = _30dCountPattern(client, _p('p2wsh', acc))
class AverageMaxMedianMinP10P25P75P90SumPattern:
class AverageMaxMedianMinPct10Pct25Pct75Pct90SumPattern2:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.average: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, 'average'))
self.max: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, 'max'))
self.median: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, 'median'))
self.min: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, 'min'))
self.pct10: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, 'p10'))
self.pct25: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, 'p25'))
self.pct75: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, 'p75'))
self.pct90: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, 'p90'))
self.sum: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, 'sum'))
class AverageMaxMedianMinPct10Pct25Pct75Pct90SumPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
@@ -2890,13 +2922,13 @@ class AverageMaxMedianMinP10P25P75P90SumPattern:
self.max: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'max'))
self.median: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'median'))
self.min: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'min'))
self.p10: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'p10'))
self.p25: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'p25'))
self.p75: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'p75'))
self.p90: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'p90'))
self.pct10: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'p10'))
self.pct25: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'p25'))
self.pct75: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'p75'))
self.pct90: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'p90'))
self.sum: _1y24h30d7dPattern[StoredU64] = _1y24h30d7dPattern(client, _m(acc, 'sum'))
class AverageHeightMaxMedianMinP10P25P75P90Pattern(Generic[T]):
class AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern(Generic[T]):
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
@@ -2906,10 +2938,10 @@ class AverageHeightMaxMedianMinP10P25P75P90Pattern(Generic[T]):
self.max: _1y24h30d7dPattern[T] = _1y24h30d7dPattern(client, _m(acc, 'max'))
self.median: _1y24h30d7dPattern[T] = _1y24h30d7dPattern(client, _m(acc, 'median'))
self.min: _1y24h30d7dPattern[T] = _1y24h30d7dPattern(client, _m(acc, 'min'))
self.p10: _1y24h30d7dPattern[T] = _1y24h30d7dPattern(client, _m(acc, 'p10'))
self.p25: _1y24h30d7dPattern[T] = _1y24h30d7dPattern(client, _m(acc, 'p25'))
self.p75: _1y24h30d7dPattern[T] = _1y24h30d7dPattern(client, _m(acc, 'p75'))
self.p90: _1y24h30d7dPattern[T] = _1y24h30d7dPattern(client, _m(acc, 'p90'))
self.pct10: _1y24h30d7dPattern[T] = _1y24h30d7dPattern(client, _m(acc, 'p10'))
self.pct25: _1y24h30d7dPattern[T] = _1y24h30d7dPattern(client, _m(acc, 'p25'))
self.pct75: _1y24h30d7dPattern[T] = _1y24h30d7dPattern(client, _m(acc, 'p75'))
self.pct90: _1y24h30d7dPattern[T] = _1y24h30d7dPattern(client, _m(acc, 'p90'))
class AverageMaxMedianMinPct10Pct25Pct75Pct90Pattern(Generic[T]):
"""Pattern struct for repeated tree structure."""
@@ -2938,6 +2970,19 @@ class _10y2y3y4y5y6y8yPattern:
self._6y: MetricPattern1[StoredF32] = MetricPattern1(client, _p('6y', acc))
self._8y: MetricPattern1[StoredF32] = MetricPattern1(client, _p('8y', acc))
class _1y24h30d7dBtcSatsUsdPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self._1y: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, '1y'))
self._24h: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, '24h'))
self._30d: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, '30d'))
self._7d: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, '7d'))
self.btc: MetricPattern20[Bitcoin] = MetricPattern20(client, _m(acc, 'btc'))
self.sats: MetricPattern20[Sats] = MetricPattern20(client, acc)
self.usd: MetricPattern20[Dollars] = MetricPattern20(client, _m(acc, 'usd'))
class ActivityCostOutputsRealizedRelativeSupplyUnrealizedPattern:
"""Pattern struct for repeated tree structure."""
@@ -2990,17 +3035,29 @@ class ActivityCostOutputsRealizedRelativeSupplyUnrealizedPattern5:
self.supply: _30dHalvedTotalPattern = _30dHalvedTotalPattern(client, acc)
self.unrealized: GreedInvestedInvestorNegNetPainPeakSupplyTotalUnrealizedPattern = GreedInvestedInvestorNegNetPainPeakSupplyTotalUnrealizedPattern(client, acc)
class _1y24h30d7dBaseCumulativePattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self._1y: AverageMaxMedianMinPct10Pct25Pct75Pct90SumPattern2 = AverageMaxMedianMinPct10Pct25Pct75Pct90SumPattern2(client, _m(acc, '1y'))
self._24h: AverageMaxMedianMinPct10Pct25Pct75Pct90SumPattern2 = AverageMaxMedianMinPct10Pct25Pct75Pct90SumPattern2(client, _m(acc, '24h'))
self._30d: AverageMaxMedianMinPct10Pct25Pct75Pct90SumPattern2 = AverageMaxMedianMinPct10Pct25Pct75Pct90SumPattern2(client, _m(acc, '30d'))
self._7d: AverageMaxMedianMinPct10Pct25Pct75Pct90SumPattern2 = AverageMaxMedianMinPct10Pct25Pct75Pct90SumPattern2(client, _m(acc, '7d'))
self.base: BtcSatsUsdPattern = BtcSatsUsdPattern(client, acc)
self.cumulative: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, 'cumulative'))
class BalanceBothReactivatedReceivingSendingPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.balance_decreased: AverageHeightMaxMedianMinP10P25P75P90Pattern[StoredU32] = AverageHeightMaxMedianMinP10P25P75P90Pattern(client, _m(acc, 'balance_decreased'))
self.balance_increased: AverageHeightMaxMedianMinP10P25P75P90Pattern[StoredU32] = AverageHeightMaxMedianMinP10P25P75P90Pattern(client, _m(acc, 'balance_increased'))
self.both: AverageHeightMaxMedianMinP10P25P75P90Pattern[StoredU32] = AverageHeightMaxMedianMinP10P25P75P90Pattern(client, _m(acc, 'both'))
self.reactivated: AverageHeightMaxMedianMinP10P25P75P90Pattern[StoredU32] = AverageHeightMaxMedianMinP10P25P75P90Pattern(client, _m(acc, 'reactivated'))
self.receiving: AverageHeightMaxMedianMinP10P25P75P90Pattern[StoredU32] = AverageHeightMaxMedianMinP10P25P75P90Pattern(client, _m(acc, 'receiving'))
self.sending: AverageHeightMaxMedianMinP10P25P75P90Pattern[StoredU32] = AverageHeightMaxMedianMinP10P25P75P90Pattern(client, _m(acc, 'sending'))
self.balance_decreased: AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern[StoredU32] = AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern(client, _m(acc, 'balance_decreased'))
self.balance_increased: AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern[StoredU32] = AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern(client, _m(acc, 'balance_increased'))
self.both: AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern[StoredU32] = AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern(client, _m(acc, 'both'))
self.reactivated: AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern[StoredU32] = AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern(client, _m(acc, 'reactivated'))
self.receiving: AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern[StoredU32] = AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern(client, _m(acc, 'receiving'))
self.sending: AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern[StoredU32] = AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern(client, _m(acc, 'sending'))
class CoinblocksCoindaysSatblocksSatdaysSentPattern:
"""Pattern struct for repeated tree structure."""
@@ -3011,7 +3068,7 @@ class CoinblocksCoindaysSatblocksSatdaysSentPattern:
self.coindays_destroyed: CumulativeHeightSumPattern[StoredF64] = CumulativeHeightSumPattern(client, _m(acc, 'coindays_destroyed'))
self.satblocks_destroyed: MetricPattern20[Sats] = MetricPattern20(client, _m(acc, 'satblocks_destroyed'))
self.satdays_destroyed: MetricPattern20[Sats] = MetricPattern20(client, _m(acc, 'satdays_destroyed'))
self.sent: BtcSatsUsdPattern2 = BtcSatsUsdPattern2(client, _m(acc, 'sent'))
self.sent: BaseCumulativePattern = BaseCumulativePattern(client, _m(acc, 'sent'))
self.sent_14d_ema: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, 'sent_14d_ema'))
class InvestedMaxMinPercentilesSpotPattern:
@@ -3036,16 +3093,6 @@ class _1y24h30d7dPattern2:
self._30d: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, '30d'))
self._7d: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, '7d'))
class BtcRollingSatsUsdPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.btc: MetricPattern20[Bitcoin] = MetricPattern20(client, _m(acc, 'btc'))
self.rolling: _1y24h30d7dPattern2 = _1y24h30d7dPattern2(client, acc)
self.sats: MetricPattern20[Sats] = MetricPattern20(client, acc)
self.usd: MetricPattern20[Dollars] = MetricPattern20(client, _m(acc, 'usd'))
class _1h24hBlockTxindexPattern(Generic[T]):
"""Pattern struct for repeated tree structure."""
@@ -3075,32 +3122,14 @@ class _30dHalvedTotalPattern:
self.halved: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, 'supply_halved'))
self.total: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, 'supply'))
class BtcSatsUsdPattern2:
class BaseCumulativeSumPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.btc: MetricPattern1[Bitcoin] = MetricPattern1(client, _m(acc, 'btc'))
self.sats: CumulativeHeightPattern[Sats] = CumulativeHeightPattern(client, acc)
self.usd: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'usd'))
class BtcSatsUsdPattern3:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.btc: MetricPattern1[Bitcoin] = MetricPattern1(client, _m(acc, 'btc'))
self.sats: CumulativeHeightRollingPattern[Sats] = CumulativeHeightRollingPattern(client, acc)
self.usd: CumulativeHeightRollingPattern[Dollars] = CumulativeHeightRollingPattern(client, _m(acc, 'usd'))
class BtcSatsUsdPattern4:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.btc: MetricPattern1[Bitcoin] = MetricPattern1(client, _m(acc, 'btc'))
self.sats: CumulativeHeightSumPattern[Sats] = CumulativeHeightSumPattern(client, acc)
self.usd: CumulativeHeightSumPattern[Dollars] = CumulativeHeightSumPattern(client, _m(acc, 'usd'))
self.base: BtcSatsUsdPattern = BtcSatsUsdPattern(client, acc)
self.cumulative: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, 'cumulative'))
self.sum: _1y24h30d7dPattern2 = _1y24h30d7dPattern2(client, _m(acc, 'sum'))
class BtcSatsUsdPattern:
"""Pattern struct for repeated tree structure."""
@@ -3129,15 +3158,6 @@ class HistogramLineSignalPattern:
self.line: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'line_1y'))
self.signal: MetricPattern1[StoredF32] = MetricPattern1(client, _m(acc, 'signal_1y'))
class CumulativeHeightRollingPattern(Generic[T]):
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.cumulative: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'cumulative'))
self.height: MetricPattern20[T] = MetricPattern20(client, acc)
self.rolling: AverageMaxMedianMinP10P25P75P90SumPattern = AverageMaxMedianMinP10P25P75P90SumPattern(client, acc)
class CumulativeHeightSumPattern(Generic[T]):
"""Pattern struct for repeated tree structure."""
@@ -3155,13 +3175,29 @@ class _30dCountPattern:
self._30d_change: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, '30d_change'))
self.count: MetricPattern1[StoredU64] = MetricPattern1(client, acc)
class BaseCumulativePattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.base: BtcSatsUsdPattern = BtcSatsUsdPattern(client, acc)
self.cumulative: BtcSatsUsdPattern = BtcSatsUsdPattern(client, _m(acc, 'cumulative'))
class BaseRestPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.base: MetricPattern20[StoredU64] = MetricPattern20(client, acc)
self.rest: AverageCumulativeMaxMedianMinP10P25P75P90SumPattern = AverageCumulativeMaxMedianMinP10P25P75P90SumPattern(client, acc)
self.rest: AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern = AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern(client, acc)
class CumulativeHeightPattern:
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.cumulative: MetricPattern1[Dollars] = MetricPattern1(client, _m(acc, 'cumulative'))
self.height: MetricPattern20[Dollars] = MetricPattern20(client, acc)
class MaxMinPattern:
"""Pattern struct for repeated tree structure."""
@@ -3195,14 +3231,6 @@ class UtxoPattern:
self.utxo_count: MetricPattern1[StoredU64] = MetricPattern1(client, acc)
self.utxo_count_30d_change: MetricPattern1[StoredF64] = MetricPattern1(client, _m(acc, '30d_change'))
class CumulativeHeightPattern(Generic[T]):
"""Pattern struct for repeated tree structure."""
def __init__(self, client: BrkClientBase, acc: str):
"""Create pattern node with accumulated metric name."""
self.cumulative: MetricPattern1[T] = MetricPattern1(client, _m(acc, 'cumulative'))
self.height: MetricPattern20[T] = MetricPattern20(client, acc)
class RatioPattern2:
"""Pattern struct for repeated tree structure."""
@@ -3264,11 +3292,11 @@ class MetricsTree_Blocks_Weight:
self.average: _1y24h30d7dPattern[Weight] = _1y24h30d7dPattern(client, 'block_weight_average')
self.min: _1y24h30d7dPattern[Weight] = _1y24h30d7dPattern(client, 'block_weight_min')
self.max: _1y24h30d7dPattern[Weight] = _1y24h30d7dPattern(client, 'block_weight_max')
self.p10: _1y24h30d7dPattern[Weight] = _1y24h30d7dPattern(client, 'block_weight_p10')
self.p25: _1y24h30d7dPattern[Weight] = _1y24h30d7dPattern(client, 'block_weight_p25')
self.pct10: _1y24h30d7dPattern[Weight] = _1y24h30d7dPattern(client, 'block_weight_p10')
self.pct25: _1y24h30d7dPattern[Weight] = _1y24h30d7dPattern(client, 'block_weight_p25')
self.median: _1y24h30d7dPattern[Weight] = _1y24h30d7dPattern(client, 'block_weight_median')
self.p75: _1y24h30d7dPattern[Weight] = _1y24h30d7dPattern(client, 'block_weight_p75')
self.p90: _1y24h30d7dPattern[Weight] = _1y24h30d7dPattern(client, 'block_weight_p90')
self.pct75: _1y24h30d7dPattern[Weight] = _1y24h30d7dPattern(client, 'block_weight_p75')
self.pct90: _1y24h30d7dPattern[Weight] = _1y24h30d7dPattern(client, 'block_weight_p90')
class MetricsTree_Blocks_Count:
"""Metrics tree node."""
@@ -3327,17 +3355,17 @@ class MetricsTree_Blocks:
self.total_size: MetricPattern20[StoredU64] = MetricPattern20(client, 'total_size')
self.weight: MetricsTree_Blocks_Weight = MetricsTree_Blocks_Weight(client)
self.count: MetricsTree_Blocks_Count = MetricsTree_Blocks_Count(client)
self.interval: AverageHeightMaxMedianMinP10P25P75P90Pattern[Timestamp] = AverageHeightMaxMedianMinP10P25P75P90Pattern(client, 'block_interval')
self.interval: AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern[Timestamp] = AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern(client, 'block_interval')
self.halving: MetricsTree_Blocks_Halving = MetricsTree_Blocks_Halving(client)
self.vbytes: CumulativeHeightRollingPattern[StoredU64] = CumulativeHeightRollingPattern(client, 'block_vbytes')
self.size: AverageCumulativeMaxMedianMinP10P25P75P90SumPattern = AverageCumulativeMaxMedianMinP10P25P75P90SumPattern(client, 'block_size')
self.fullness: AverageHeightMaxMedianMinP10P25P75P90Pattern[StoredF32] = AverageHeightMaxMedianMinP10P25P75P90Pattern(client, 'block_fullness')
self.vbytes: AverageCumulativeHeightMaxMedianMinPct10Pct25Pct75Pct90SumPattern = AverageCumulativeHeightMaxMedianMinPct10Pct25Pct75Pct90SumPattern(client, 'block_vbytes')
self.size: AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern = AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern(client, 'block_size')
self.fullness: AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern[StoredF32] = AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern(client, 'block_fullness')
class MetricsTree_Transactions_Count:
"""Metrics tree node."""
def __init__(self, client: BrkClientBase, base_path: str = ''):
self.tx_count: CumulativeHeightRollingPattern[StoredU64] = CumulativeHeightRollingPattern(client, 'tx_count')
self.tx_count: AverageCumulativeHeightMaxMedianMinPct10Pct25Pct75Pct90SumPattern = AverageCumulativeHeightMaxMedianMinPct10Pct25Pct75Pct90SumPattern(client, 'tx_count')
self.is_coinbase: MetricPattern21[StoredBool] = MetricPattern21(client, 'is_coinbase')
class MetricsTree_Transactions_Size:
@@ -3368,8 +3396,8 @@ class MetricsTree_Transactions_Volume:
"""Metrics tree node."""
def __init__(self, client: BrkClientBase, base_path: str = ''):
self.sent_sum: BtcRollingSatsUsdPattern = BtcRollingSatsUsdPattern(client, 'sent_sum')
self.received_sum: BtcRollingSatsUsdPattern = BtcRollingSatsUsdPattern(client, 'received_sum')
self.sent_sum: _1y24h30d7dBtcSatsUsdPattern = _1y24h30d7dBtcSatsUsdPattern(client, 'sent_sum')
self.received_sum: _1y24h30d7dBtcSatsUsdPattern = _1y24h30d7dBtcSatsUsdPattern(client, 'received_sum')
self.annualized_volume: BtcSatsUsdPattern = BtcSatsUsdPattern(client, 'annualized_volume')
self.tx_per_sec: MetricPattern1[StoredF32] = MetricPattern1(client, 'tx_per_sec')
self.outputs_per_sec: MetricPattern1[StoredF32] = MetricPattern1(client, 'outputs_per_sec')
@@ -3477,14 +3505,19 @@ class MetricsTree_Scripts_Count:
self.emptyoutput: CumulativeHeightSumPattern[StoredU64] = CumulativeHeightSumPattern(client, 'emptyoutput_count')
self.unknownoutput: CumulativeHeightSumPattern[StoredU64] = CumulativeHeightSumPattern(client, 'unknownoutput_count')
self.segwit: CumulativeHeightSumPattern[StoredU64] = CumulativeHeightSumPattern(client, 'segwit_count')
self.taproot_adoption: MetricPattern1[StoredF32] = MetricPattern1(client, 'taproot_adoption')
self.segwit_adoption: MetricPattern1[StoredF32] = MetricPattern1(client, 'segwit_adoption')
class MetricsTree_Scripts_Value:
"""Metrics tree node."""
def __init__(self, client: BrkClientBase, base_path: str = ''):
self.opreturn: BtcSatsUsdPattern3 = BtcSatsUsdPattern3(client, 'opreturn_value')
self.opreturn: _1y24h30d7dBaseCumulativePattern = _1y24h30d7dBaseCumulativePattern(client, 'opreturn_value')
class MetricsTree_Scripts_Adoption:
"""Metrics tree node."""
def __init__(self, client: BrkClientBase, base_path: str = ''):
self.taproot: MetricPattern1[StoredF32] = MetricPattern1(client, 'taproot_adoption')
self.segwit: MetricPattern1[StoredF32] = MetricPattern1(client, 'segwit_adoption')
class MetricsTree_Scripts:
"""Metrics tree node."""
@@ -3500,15 +3533,16 @@ class MetricsTree_Scripts:
self.unknown_to_txindex: MetricPattern35[TxIndex] = MetricPattern35(client, 'txindex')
self.count: MetricsTree_Scripts_Count = MetricsTree_Scripts_Count(client)
self.value: MetricsTree_Scripts_Value = MetricsTree_Scripts_Value(client)
self.adoption: MetricsTree_Scripts_Adoption = MetricsTree_Scripts_Adoption(client)
class MetricsTree_Mining_Rewards:
"""Metrics tree node."""
def __init__(self, client: BrkClientBase, base_path: str = ''):
self.coinbase: BtcSatsUsdPattern3 = BtcSatsUsdPattern3(client, 'coinbase')
self.subsidy: BtcSatsUsdPattern3 = BtcSatsUsdPattern3(client, 'subsidy')
self.fees: BtcSatsUsdPattern3 = BtcSatsUsdPattern3(client, 'fees')
self.unclaimed_rewards: BtcSatsUsdPattern4 = BtcSatsUsdPattern4(client, 'unclaimed_rewards')
self.coinbase: _1y24h30d7dBaseCumulativePattern = _1y24h30d7dBaseCumulativePattern(client, 'coinbase')
self.subsidy: _1y24h30d7dBaseCumulativePattern = _1y24h30d7dBaseCumulativePattern(client, 'subsidy')
self.fees: _1y24h30d7dBaseCumulativePattern = _1y24h30d7dBaseCumulativePattern(client, 'fees')
self.unclaimed_rewards: BaseCumulativeSumPattern = BaseCumulativeSumPattern(client, 'unclaimed_rewards')
self.fee_dominance: MetricPattern1[StoredF32] = MetricPattern1(client, 'fee_dominance')
self.fee_dominance_24h: MetricPattern1[StoredF32] = MetricPattern1(client, 'fee_dominance_24h')
self.fee_dominance_7d: MetricPattern1[StoredF32] = MetricPattern1(client, 'fee_dominance_7d')
@@ -3960,7 +3994,7 @@ class MetricsTree_Market_Lookback:
"""Metrics tree node."""
def __init__(self, client: BrkClientBase, base_path: str = ''):
self._1d: SatsUsdPattern = SatsUsdPattern(client, 'price_1d_ago')
self._24h: SatsUsdPattern = SatsUsdPattern(client, 'price_24h_ago')
self._1w: SatsUsdPattern = SatsUsdPattern(client, 'price_1w_ago')
self._1m: SatsUsdPattern = SatsUsdPattern(client, 'price_1m_ago')
self._3m: SatsUsdPattern = SatsUsdPattern(client, 'price_3m_ago')
@@ -3978,7 +4012,7 @@ class MetricsTree_Market_Returns_PriceReturns:
"""Metrics tree node."""
def __init__(self, client: BrkClientBase, base_path: str = ''):
self._1d: MetricPattern1[StoredF32] = MetricPattern1(client, '1d_price_returns')
self._24h: MetricPattern1[StoredF32] = MetricPattern1(client, '24h_price_returns')
self._1w: MetricPattern1[StoredF32] = MetricPattern1(client, '1w_price_returns')
self._1m: MetricPattern1[StoredF32] = MetricPattern1(client, '1m_price_returns')
self._3m: MetricPattern1[StoredF32] = MetricPattern1(client, '3m_price_returns')
@@ -4909,15 +4943,15 @@ class MetricsTree_Distribution_GrowthRate:
"""Metrics tree node."""
def __init__(self, client: BrkClientBase, base_path: str = ''):
self.all: AverageHeightMaxMedianMinP10P25P75P90Pattern[StoredF32] = AverageHeightMaxMedianMinP10P25P75P90Pattern(client, 'growth_rate')
self.p2pk65: AverageHeightMaxMedianMinP10P25P75P90Pattern[StoredF32] = AverageHeightMaxMedianMinP10P25P75P90Pattern(client, 'p2pk65_growth_rate')
self.p2pk33: AverageHeightMaxMedianMinP10P25P75P90Pattern[StoredF32] = AverageHeightMaxMedianMinP10P25P75P90Pattern(client, 'p2pk33_growth_rate')
self.p2pkh: AverageHeightMaxMedianMinP10P25P75P90Pattern[StoredF32] = AverageHeightMaxMedianMinP10P25P75P90Pattern(client, 'p2pkh_growth_rate')
self.p2sh: AverageHeightMaxMedianMinP10P25P75P90Pattern[StoredF32] = AverageHeightMaxMedianMinP10P25P75P90Pattern(client, 'p2sh_growth_rate')
self.p2wpkh: AverageHeightMaxMedianMinP10P25P75P90Pattern[StoredF32] = AverageHeightMaxMedianMinP10P25P75P90Pattern(client, 'p2wpkh_growth_rate')
self.p2wsh: AverageHeightMaxMedianMinP10P25P75P90Pattern[StoredF32] = AverageHeightMaxMedianMinP10P25P75P90Pattern(client, 'p2wsh_growth_rate')
self.p2tr: AverageHeightMaxMedianMinP10P25P75P90Pattern[StoredF32] = AverageHeightMaxMedianMinP10P25P75P90Pattern(client, 'p2tr_growth_rate')
self.p2a: AverageHeightMaxMedianMinP10P25P75P90Pattern[StoredF32] = AverageHeightMaxMedianMinP10P25P75P90Pattern(client, 'p2a_growth_rate')
self.all: AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern[StoredF32] = AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern(client, 'growth_rate')
self.p2pk65: AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern[StoredF32] = AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern(client, 'p2pk65_growth_rate')
self.p2pk33: AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern[StoredF32] = AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern(client, 'p2pk33_growth_rate')
self.p2pkh: AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern[StoredF32] = AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern(client, 'p2pkh_growth_rate')
self.p2sh: AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern[StoredF32] = AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern(client, 'p2sh_growth_rate')
self.p2wpkh: AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern[StoredF32] = AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern(client, 'p2wpkh_growth_rate')
self.p2wsh: AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern[StoredF32] = AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern(client, 'p2wsh_growth_rate')
self.p2tr: AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern[StoredF32] = AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern(client, 'p2tr_growth_rate')
self.p2a: AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern[StoredF32] = AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern(client, 'p2a_growth_rate')
class MetricsTree_Distribution:
"""Metrics tree node."""
@@ -4941,8 +4975,8 @@ class MetricsTree_Supply_Burned:
"""Metrics tree node."""
def __init__(self, client: BrkClientBase, base_path: str = ''):
self.opreturn: BtcSatsUsdPattern4 = BtcSatsUsdPattern4(client, 'opreturn_supply')
self.unspendable: BtcSatsUsdPattern4 = BtcSatsUsdPattern4(client, 'unspendable_supply')
self.opreturn: BaseCumulativeSumPattern = BaseCumulativeSumPattern(client, 'opreturn_supply')
self.unspendable: BaseCumulativeSumPattern = BaseCumulativeSumPattern(client, 'unspendable_supply')
class MetricsTree_Supply_Velocity:
"""Metrics tree node."""

View File

@@ -56,7 +56,7 @@ def test_fetch_typed_metric():
)
print(e)
f = (
client.metrics.distribution.address_cohorts.amount_range._10k_sats_to_100k_sats.activity.sent.usd.cumulative.by.day1()
client.metrics.distribution.address_cohorts.amount_range._10k_sats_to_100k_sats.activity.sent.cumulative.usd.by.day1()
.tail(10)
.fetch()
)

View File

@@ -87,7 +87,7 @@ export function createChart({ parent, brk, fitContent }) {
const getTimeEndpoint = (idx) =>
idx === "height"
? brk.metrics.blocks.time.timestampMonotonic.by[idx]
: brk.metrics.blocks.time.timestamp.by[idx];
: /** @type {any} */ (brk.metrics.blocks.time.timestamp)[idx].by[idx];
const index = {
/** @type {Set<(index: ChartableIndex) => void>} */
@@ -137,7 +137,7 @@ export function createChart({ parent, brk, fitContent }) {
if (cached) {
this.data = cached;
}
endpoint.slice(-10000).fetch((result) => {
endpoint.slice(-10000).fetch((/** @type {any} */ result) => {
if (currentGen !== generation) return;
cache.set(endpoint.path, result);
this.data = result;

View File

@@ -1,7 +1,7 @@
import { colors } from "../utils/colors.js";
import { brk } from "../client.js";
import { Unit } from "../utils/units.js";
import { dots, line, price } from "./series.js";
import { dots, line, price, rollingWindowsTree } from "./series.js";
import { satsBtcUsd, createPriceRatioCharts } from "./shared.js";
/**
@@ -255,11 +255,11 @@ export function createCointimeSection() {
name: "Compare",
tree: [
{
name: "Sum",
name: "Base",
title: "Coinblocks",
bottom: coinblocks.map(({ pattern, name, color }) =>
line({
metric: pattern.sum,
metric: pattern.height,
name,
color,
unit: Unit.coinblocks,
@@ -284,17 +284,18 @@ export function createCointimeSection() {
name,
tree: [
{
name: "Sum",
name: "Base",
title,
bottom: [
line({
metric: pattern.sum,
metric: pattern.height,
name,
color,
unit: Unit.coinblocks,
}),
],
},
rollingWindowsTree({ windows: pattern.sum, title, unit: Unit.coinblocks }),
{
name: "Cumulative",
title: `${title} (Total)`,
@@ -320,14 +321,14 @@ export function createCointimeSection() {
name: "Compare",
tree: [
{
name: "Sum",
name: "Base",
title: "Cointime Value",
bottom: [
...cointimeValues.map(({ pattern, name, color }) =>
line({ metric: pattern.sum, name, color, unit: Unit.usd }),
line({ metric: pattern.height, name, color, unit: Unit.usd }),
),
line({
metric: vocdd.pattern.sum,
metric: vocdd.pattern.height,
name: vocdd.name,
color: vocdd.color,
unit: Unit.usd,
@@ -360,12 +361,13 @@ export function createCointimeSection() {
name,
tree: [
{
name: "Sum",
name: "Base",
title,
bottom: [
line({ metric: pattern.sum, name, color, unit: Unit.usd }),
line({ metric: pattern.height, name, color, unit: Unit.usd }),
],
},
rollingWindowsTree({ windows: pattern.sum, title, unit: Unit.usd }),
{
name: "Cumulative",
title: `${title} (Total)`,
@@ -384,11 +386,11 @@ export function createCointimeSection() {
name: vocdd.name,
tree: [
{
name: "Sum",
name: "Base",
title: vocdd.title,
bottom: [
line({
metric: vocdd.pattern.sum,
metric: vocdd.pattern.height,
name: vocdd.name,
color: vocdd.color,
unit: Unit.usd,
@@ -401,6 +403,7 @@ export function createCointimeSection() {
}),
],
},
rollingWindowsTree({ windows: vocdd.pattern.sum, title: vocdd.title, unit: Unit.usd }),
{
name: "Cumulative",
title: `${vocdd.title} (Total)`,

View File

@@ -174,13 +174,13 @@ function coinsDestroyedTree(list, all, title) {
title: title("Coins Destroyed"),
bottom: flatMapCohortsWithAll(list, all, ({ name, color, tree }) => [
line({
metric: tree.activity.coinblocksDestroyed.sum,
metric: tree.activity.coinblocksDestroyed.sum._24h,
name,
color,
unit: Unit.coinblocks,
}),
line({
metric: tree.activity.coindaysDestroyed.sum,
metric: tree.activity.coindaysDestroyed.sum._24h,
name,
color,
unit: Unit.coindays,
@@ -677,19 +677,19 @@ export function createActivitySection({
defaultActive: false,
}),
line({
metric: tree.activity.sent.sats.sum,
metric: tree.activity.sent.base.sats,
name: "sum",
color,
unit: Unit.sats,
}),
line({
metric: tree.activity.sent.btc.sum,
metric: tree.activity.sent.base.btc,
name: "sum",
color,
unit: Unit.btc,
}),
line({
metric: tree.activity.sent.usd.sum,
metric: tree.activity.sent.base.usd,
name: "sum",
color,
unit: Unit.usd,
@@ -701,19 +701,19 @@ export function createActivitySection({
title: title("Sent Volume (Total)"),
bottom: [
line({
metric: tree.activity.sent.sats.cumulative,
metric: tree.activity.sent.cumulative.sats,
name: "all-time",
color,
unit: Unit.sats,
}),
line({
metric: tree.activity.sent.btc.cumulative,
metric: tree.activity.sent.cumulative.btc,
name: "all-time",
color,
unit: Unit.btc,
}),
line({
metric: tree.activity.sent.usd.cumulative,
metric: tree.activity.sent.cumulative.usd,
name: "all-time",
color,
unit: Unit.usd,
@@ -816,13 +816,13 @@ export function createActivitySection({
title: title("Coins Destroyed"),
bottom: [
line({
metric: tree.activity.coinblocksDestroyed.sum,
metric: tree.activity.coinblocksDestroyed.sum._24h,
name: "Coinblocks",
color,
unit: Unit.coinblocks,
}),
line({
metric: tree.activity.coindaysDestroyed.sum,
metric: tree.activity.coindaysDestroyed.sum._24h,
name: "Coindays",
color,
unit: Unit.coindays,
@@ -1021,9 +1021,9 @@ export function createGroupedActivitySection({
bottom: flatMapCohortsWithAll(list, all, ({ name, color, tree }) =>
satsBtcUsd({
pattern: {
sats: tree.activity.sent.sats.sum,
btc: tree.activity.sent.btc.sum,
usd: tree.activity.sent.usd.sum,
sats: tree.activity.sent.base.sats,
btc: tree.activity.sent.base.btc,
usd: tree.activity.sent.base.usd,
},
name,
color,

View File

@@ -74,7 +74,7 @@ export function createPricesSectionFull({ cohort, title }) {
context: cohort.name,
legend: "Realized",
pricePattern: tree.realized.realizedPrice,
ratio: tree.realized.realizedPriceExtra,
ratio: { ...tree.realized.realizedPriceExtra, ...tree.realized.realizedPriceRatioExt },
color,
priceTitle: title("Realized Price"),
titlePrefix: "Realized Price",
@@ -86,7 +86,7 @@ export function createPricesSectionFull({ cohort, title }) {
context: cohort.name,
legend: "Investor",
pricePattern: tree.realized.investorPrice,
ratio: tree.realized.investorPriceExtra,
ratio: { ...tree.realized.investorPriceExtra, ...tree.realized.investorPriceRatioExt },
color,
priceTitle: title("Investor Price"),
titlePrefix: "Investor Price",

View File

@@ -519,21 +519,21 @@ function realizedPnlSum(tree) {
unit: Unit.usd,
}),
dots({
metric: r.realizedProfit.sum,
metric: r.realizedProfit.height,
name: "Profit",
color: colors.profit,
unit: Unit.usd,
defaultActive: false,
}),
dots({
metric: r.negRealizedLoss.sum,
metric: r.negRealizedLoss,
name: "Negative Loss",
color: colors.loss,
unit: Unit.usd,
defaultActive: false,
}),
dots({
metric: r.realizedLoss.sum,
metric: r.realizedLoss.height,
name: "Loss",
color: colors.loss,
unit: Unit.usd,
@@ -547,13 +547,13 @@ function realizedPnlSum(tree) {
defaultActive: false,
}),
baseline({
metric: r.realizedProfitRelToRealizedCap.sum,
metric: r.realizedProfitRelToRealizedCap,
name: "Profit",
color: colors.profit,
unit: Unit.pctRcap,
}),
baseline({
metric: r.realizedLossRelToRealizedCap.sum,
metric: r.realizedLossRelToRealizedCap,
name: "Loss",
color: colors.loss,
unit: Unit.pctRcap,
@@ -575,13 +575,13 @@ function realizedNetPnlSum(tree) {
unit: Unit.usd,
}),
dotsBaseline({
metric: r.netRealizedPnl.sum,
metric: r.netRealizedPnl.height,
name: "Net",
unit: Unit.usd,
defaultActive: false,
}),
baseline({
metric: r.netRealizedPnlRelToRealizedCap.sum,
metric: r.netRealizedPnlRelToRealizedCap,
name: "Net",
unit: Unit.pctRcap,
}),
@@ -609,20 +609,20 @@ function realizedPnlCumulative(tree) {
unit: Unit.usd,
}),
line({
metric: r.negRealizedLoss.cumulative,
metric: r.negRealizedLoss,
name: "Negative Loss",
color: colors.loss,
unit: Unit.usd,
defaultActive: false,
}),
baseline({
metric: r.realizedProfitRelToRealizedCap.cumulative,
metric: r.realizedProfitRelToRealizedCap,
name: "Profit",
color: colors.profit,
unit: Unit.pctRcap,
}),
baseline({
metric: r.realizedLossRelToRealizedCap.cumulative,
metric: r.realizedLossRelToRealizedCap,
name: "Loss",
color: colors.loss,
unit: Unit.pctRcap,
@@ -644,7 +644,7 @@ function realizedNetPnlCumulative(tree) {
unit: Unit.usd,
}),
baseline({
metric: r.netRealizedPnlRelToRealizedCap.cumulative,
metric: r.netRealizedPnlRelToRealizedCap,
name: "Net",
unit: Unit.pctRcap,
}),
@@ -704,13 +704,13 @@ function sentInPnlTree(tree, title) {
}),
...satsBtcUsdFrom({
source: r.sentInProfit,
key: "sum",
key: "base",
name: "In Profit",
color: colors.profit,
}),
...satsBtcUsdFrom({
source: r.sentInLoss,
key: "sum",
key: "base",
name: "In Loss",
color: colors.loss,
}),
@@ -945,7 +945,7 @@ function realizedSubfolder(tree, title, rollingTree) {
title: title("Realized Peak Regret"),
bottom: [
line({
metric: r.peakRegret.sum,
metric: r.peakRegret.height,
name: "Peak Regret",
unit: Unit.usd,
}),
@@ -1839,7 +1839,7 @@ function groupedRealizedPnlSum(list, all, title) {
title: title("Realized Profit"),
bottom: mapCohortsWithAll(list, all, ({ name, color, tree }) =>
line({
metric: tree.realized.realizedProfit.sum,
metric: tree.realized.realizedProfit.height,
name,
color,
unit: Unit.usd,
@@ -1851,7 +1851,7 @@ function groupedRealizedPnlSum(list, all, title) {
title: title("Realized Loss"),
bottom: mapCohortsWithAll(list, all, ({ name, color, tree }) =>
line({
metric: tree.realized.negRealizedLoss.sum,
metric: tree.realized.negRealizedLoss,
name,
color,
unit: Unit.usd,
@@ -1936,7 +1936,7 @@ function groupedRealizedPnlCumulative(list, all, title) {
title: title("Cumulative Realized Loss"),
bottom: mapCohortsWithAll(list, all, ({ name, color, tree }) =>
line({
metric: tree.realized.negRealizedLoss.cumulative,
metric: tree.realized.realizedLoss.cumulative,
name,
color,
unit: Unit.usd,
@@ -1973,7 +1973,7 @@ function groupedSentInPnl(list, all, title) {
...flatMapCohortsWithAll(list, all, ({ name, color, tree }) =>
satsBtcUsdFrom({
source: tree.realized.sentInProfit,
key: "sum",
key: "base",
name,
color,
}),
@@ -1995,7 +1995,7 @@ function groupedSentInPnl(list, all, title) {
...flatMapCohortsWithAll(list, all, ({ name, color, tree }) =>
satsBtcUsdFrom({
source: tree.realized.sentInLoss,
key: "sum",
key: "base",
name,
color,
}),
@@ -2104,7 +2104,7 @@ function groupedRealizedSubfolder(list, all, title) {
title: title("Net Realized P&L"),
bottom: mapCohortsWithAll(list, all, ({ name, color, tree }) =>
baseline({
metric: tree.realized.netRealizedPnl.sum,
metric: tree.realized.netRealizedPnl.height,
name,
color,
unit: Unit.usd,
@@ -2166,7 +2166,7 @@ function groupedRealizedSubfolderWithExtras(list, all, title) {
title: title("Net Realized P&L"),
bottom: mapCohortsWithAll(list, all, ({ name, color, tree }) =>
baseline({
metric: tree.realized.netRealizedPnl.sum,
metric: tree.realized.netRealizedPnl.height,
name,
color,
unit: Unit.usd,

View File

@@ -76,7 +76,7 @@ export function createValuationSectionFull({ cohort, title }) {
createRatioChart({
title,
pricePattern: tree.realized.realizedPrice,
ratio: tree.realized.realizedPriceExtra,
ratio: { ...tree.realized.realizedPriceExtra, ...tree.realized.realizedPriceRatioExt },
color,
name: "MVRV",
}),

View File

@@ -9,11 +9,11 @@ import {
dots,
dotted,
distributionBtcSatsUsd,
rollingWindowsTree,
} from "./series.js";
import {
satsBtcUsd,
satsBtcUsdFrom,
satsBtcUsdFromFull,
revenueBtcSatsUsd,
} from "./shared.js";
import { brk } from "../client.js";
@@ -53,7 +53,7 @@ const ANTPOOL_AND_FRIENDS_IDS = /** @type {const} */ ([
* @returns {PartialOptionsGroup}
*/
export function createMiningSection() {
const { blocks, transactions, pools, mining } = brk.metrics;
const { blocks, pools, mining } = brk.metrics;
// Pre-compute pool entries with resolved names
const poolData = entries(pools.vecs).map(([id, pool]) => ({
@@ -116,44 +116,17 @@ export function createMiningSection() {
name: "Blocks Mined",
tree: [
{
name: "Sum",
name: "Base",
title: `Blocks Mined: ${name}`,
bottom: [
line({
metric: pool.blocksMined.sum,
name: "sum",
metric: pool.blocksMined.height,
name: "base",
unit: Unit.count,
}),
line({
metric: pool.blocksMined24hSum,
name: "24h",
color: colors.time._24h,
unit: Unit.count,
defaultActive: false,
}),
line({
metric: pool.blocksMined1wSum,
name: "1w",
color: colors.time._1w,
unit: Unit.count,
defaultActive: false,
}),
line({
metric: pool.blocksMined1mSum,
name: "1m",
color: colors.time._1m,
unit: Unit.count,
defaultActive: false,
}),
line({
metric: pool.blocksMined1ySum,
name: "1y",
color: colors.time._1y,
unit: Unit.count,
defaultActive: false,
}),
],
},
rollingWindowsTree({ windows: pool.blocksMined.sum, title: `Blocks Mined: ${name}`, unit: Unit.count }),
{
name: "Cumulative",
title: `Blocks Mined: ${name} (Total)`,
@@ -177,7 +150,7 @@ export function createMiningSection() {
coinbase: pool.coinbase,
subsidy: pool.subsidy,
fee: pool.fee,
key: "sum",
key: "base",
}),
},
{
@@ -372,8 +345,8 @@ export function createMiningSection() {
bottom: revenueBtcSatsUsd({
coinbase: mining.rewards.coinbase,
subsidy: mining.rewards.subsidy,
fee: transactions.fees.fee,
key: "sum",
fee: mining.rewards.fees,
key: "base",
}),
},
{
@@ -382,7 +355,7 @@ export function createMiningSection() {
bottom: revenueBtcSatsUsd({
coinbase: mining.rewards.coinbase,
subsidy: mining.rewards.subsidy,
fee: transactions.fees.fee,
fee: mining.rewards.fees,
key: "cumulative",
}),
},
@@ -394,24 +367,11 @@ export function createMiningSection() {
{
name: "Sum",
title: "Coinbase Rewards",
bottom: [
...satsBtcUsdFromFull({
source: mining.rewards.coinbase,
key: "base",
name: "sum",
}),
...satsBtcUsdFrom({
source: mining.rewards.coinbase,
key: "sum",
name: "sum",
}),
...satsBtcUsd({
pattern: mining.rewards.coinbase24hSum,
name: "24h",
color: colors.time._24h,
defaultActive: false,
}),
],
bottom: satsBtcUsdFrom({
source: mining.rewards.coinbase,
key: "base",
name: "sum",
}),
},
{
name: "Rolling",
@@ -421,22 +381,22 @@ export function createMiningSection() {
title: "Coinbase Rolling Sum",
bottom: [
...satsBtcUsd({
pattern: mining.rewards.coinbase24hSum,
pattern: mining.rewards.coinbase._24h.sum,
name: "24h",
color: colors.time._24h,
}),
...satsBtcUsd({
pattern: mining.rewards.coinbase7dSum,
pattern: mining.rewards.coinbase._7d.sum,
name: "7d",
color: colors.time._1w,
}),
...satsBtcUsd({
pattern: mining.rewards.coinbase30dSum,
pattern: mining.rewards.coinbase._30d.sum,
name: "30d",
color: colors.time._1m,
}),
...satsBtcUsd({
pattern: mining.rewards.coinbase1ySum,
pattern: mining.rewards.coinbase._1y.sum,
name: "1y",
color: colors.time._1y,
}),
@@ -446,7 +406,7 @@ export function createMiningSection() {
name: "24h",
title: "Coinbase 24h Rolling Sum",
bottom: satsBtcUsd({
pattern: mining.rewards.coinbase24hSum,
pattern: mining.rewards.coinbase._24h.sum,
name: "24h",
color: colors.time._24h,
}),
@@ -455,7 +415,7 @@ export function createMiningSection() {
name: "7d",
title: "Coinbase 7d Rolling Sum",
bottom: satsBtcUsd({
pattern: mining.rewards.coinbase7dSum,
pattern: mining.rewards.coinbase._7d.sum,
name: "7d",
color: colors.time._1w,
}),
@@ -464,7 +424,7 @@ export function createMiningSection() {
name: "30d",
title: "Coinbase 30d Rolling Sum",
bottom: satsBtcUsd({
pattern: mining.rewards.coinbase30dSum,
pattern: mining.rewards.coinbase._30d.sum,
name: "30d",
color: colors.time._1m,
}),
@@ -473,7 +433,7 @@ export function createMiningSection() {
name: "1y",
title: "Coinbase 1y Rolling Sum",
bottom: satsBtcUsd({
pattern: mining.rewards.coinbase1ySum,
pattern: mining.rewards.coinbase._1y.sum,
name: "1y",
color: colors.time._1y,
}),
@@ -483,7 +443,7 @@ export function createMiningSection() {
{
name: "Distribution",
title: "Coinbase Rewards per Block Distribution",
bottom: distributionBtcSatsUsd(mining.rewards.coinbase),
bottom: distributionBtcSatsUsd(mining.rewards.coinbase._24h),
},
{
name: "Cumulative",
@@ -503,14 +463,9 @@ export function createMiningSection() {
name: "Sum",
title: "Block Subsidy",
bottom: [
...satsBtcUsdFromFull({
source: mining.rewards.subsidy,
key: "base",
name: "sum",
}),
...satsBtcUsdFrom({
source: mining.rewards.subsidy,
key: "sum",
key: "base",
name: "sum",
}),
line({
@@ -522,10 +477,77 @@ export function createMiningSection() {
}),
],
},
{
name: "Rolling",
tree: [
{
name: "Compare",
title: "Subsidy Rolling Sum",
bottom: [
...satsBtcUsd({
pattern: mining.rewards.subsidy._24h.sum,
name: "24h",
color: colors.time._24h,
}),
...satsBtcUsd({
pattern: mining.rewards.subsidy._7d.sum,
name: "7d",
color: colors.time._1w,
}),
...satsBtcUsd({
pattern: mining.rewards.subsidy._30d.sum,
name: "30d",
color: colors.time._1m,
}),
...satsBtcUsd({
pattern: mining.rewards.subsidy._1y.sum,
name: "1y",
color: colors.time._1y,
}),
],
},
{
name: "24h",
title: "Subsidy 24h Rolling Sum",
bottom: satsBtcUsd({
pattern: mining.rewards.subsidy._24h.sum,
name: "24h",
color: colors.time._24h,
}),
},
{
name: "7d",
title: "Subsidy 7d Rolling Sum",
bottom: satsBtcUsd({
pattern: mining.rewards.subsidy._7d.sum,
name: "7d",
color: colors.time._1w,
}),
},
{
name: "30d",
title: "Subsidy 30d Rolling Sum",
bottom: satsBtcUsd({
pattern: mining.rewards.subsidy._30d.sum,
name: "30d",
color: colors.time._1m,
}),
},
{
name: "1y",
title: "Subsidy 1y Rolling Sum",
bottom: satsBtcUsd({
pattern: mining.rewards.subsidy._1y.sum,
name: "1y",
color: colors.time._1y,
}),
},
],
},
{
name: "Distribution",
title: "Block Subsidy Distribution",
bottom: distributionBtcSatsUsd(mining.rewards.subsidy),
bottom: distributionBtcSatsUsd(mining.rewards.subsidy._24h),
},
{
name: "Cumulative",
@@ -545,8 +567,8 @@ export function createMiningSection() {
name: "Sum",
title: "Transaction Fee Revenue per Block",
bottom: satsBtcUsdFrom({
source: transactions.fees.fee,
key: "sum",
source: mining.rewards.fees,
key: "base",
name: "sum",
}),
},
@@ -558,22 +580,22 @@ export function createMiningSection() {
title: "Fee Rolling Sum",
bottom: [
...satsBtcUsd({
pattern: mining.rewards.fee24hSum,
pattern: mining.rewards.fees._24h.sum,
name: "24h",
color: colors.time._24h,
}),
...satsBtcUsd({
pattern: mining.rewards.fee7dSum,
pattern: mining.rewards.fees._7d.sum,
name: "7d",
color: colors.time._1w,
}),
...satsBtcUsd({
pattern: mining.rewards.fee30dSum,
pattern: mining.rewards.fees._30d.sum,
name: "30d",
color: colors.time._1m,
}),
...satsBtcUsd({
pattern: mining.rewards.fee1ySum,
pattern: mining.rewards.fees._1y.sum,
name: "1y",
color: colors.time._1y,
}),
@@ -583,7 +605,7 @@ export function createMiningSection() {
name: "24h",
title: "Fee 24h Rolling Sum",
bottom: satsBtcUsd({
pattern: mining.rewards.fee24hSum,
pattern: mining.rewards.fees._24h.sum,
name: "24h",
color: colors.time._24h,
}),
@@ -592,7 +614,7 @@ export function createMiningSection() {
name: "7d",
title: "Fee 7d Rolling Sum",
bottom: satsBtcUsd({
pattern: mining.rewards.fee7dSum,
pattern: mining.rewards.fees._7d.sum,
name: "7d",
color: colors.time._1w,
}),
@@ -601,7 +623,7 @@ export function createMiningSection() {
name: "30d",
title: "Fee 30d Rolling Sum",
bottom: satsBtcUsd({
pattern: mining.rewards.fee30dSum,
pattern: mining.rewards.fees._30d.sum,
name: "30d",
color: colors.time._1m,
}),
@@ -610,7 +632,7 @@ export function createMiningSection() {
name: "1y",
title: "Fee 1y Rolling Sum",
bottom: satsBtcUsd({
pattern: mining.rewards.fee1ySum,
pattern: mining.rewards.fees._1y.sum,
name: "1y",
color: colors.time._1y,
}),
@@ -620,13 +642,13 @@ export function createMiningSection() {
{
name: "Distribution",
title: "Transaction Fee Revenue per Block Distribution",
bottom: distributionBtcSatsUsd(transactions.fees.fee),
bottom: distributionBtcSatsUsd(mining.rewards.fees._24h),
},
{
name: "Cumulative",
title: "Transaction Fee Revenue (Total)",
bottom: satsBtcUsdFrom({
source: transactions.fees.fee,
source: mining.rewards.fees,
key: "cumulative",
name: "all-time",
}),
@@ -813,7 +835,7 @@ export function createMiningSection() {
title: "Unclaimed Rewards",
bottom: satsBtcUsdFrom({
source: mining.rewards.unclaimedRewards,
key: "sum",
key: "base",
name: "sum",
}),
},
@@ -988,7 +1010,7 @@ export function createMiningSection() {
bottom: majorPools.flatMap((p, i) =>
satsBtcUsdFrom({
source: p.pool.coinbase,
key: "sum",
key: "base",
name: p.name,
color: colors.at(i, majorPools.length),
}),
@@ -1030,7 +1052,7 @@ export function createMiningSection() {
bottom: antpoolFriends.flatMap((p, i) =>
satsBtcUsdFrom({
source: p.pool.coinbase,
key: "sum",
key: "base",
name: p.name,
color: colors.at(i, antpoolFriends.length),
}),

View File

@@ -12,9 +12,12 @@ import {
fromSupplyPattern,
fromBaseStatsPattern,
chartsFromFullPerBlock,
chartsFromCount,
chartsFromValueFull,
fromStatsPattern,
chartsFromSumPerBlock,
statsAtWindow,
rollingWindowsTree,
} from "./series.js";
import { satsBtcUsd, satsBtcUsdFrom } from "./shared.js";
@@ -204,17 +207,34 @@ export function createNetworkSection() {
},
{
name: "New",
tree: chartsFromFullPerBlock({
pattern: distribution.newAddrCount[key],
title: `${titlePrefix}New Address Count`,
unit: Unit.count,
}),
tree: (() => {
const p = distribution.newAddrCount[key];
const t = `${titlePrefix}New Address Count`;
return [
{
name: "Sum",
title: t,
bottom: [
line({ metric: p.base, name: "base", unit: Unit.count }),
],
},
rollingWindowsTree({ windows: p.rest.sum, title: t, unit: Unit.count }),
{
name: "Cumulative",
title: `${t} (Total)`,
bottom: [
line({ metric: p.rest.cumulative, name: "all-time", unit: Unit.count }),
],
},
];
})(),
},
{
name: "Reactivated",
title: `${titlePrefix}Reactivated Addresses per Block`,
bottom: fromBaseStatsPattern({
pattern: distribution.addressActivity[key].reactivated,
window: "_24h",
unit: Unit.count,
}),
},
@@ -223,6 +243,7 @@ export function createNetworkSection() {
title: `${titlePrefix}Address Growth Rate per Block`,
bottom: fromBaseStatsPattern({
pattern: distribution.growthRate[key],
window: "_24h",
unit: Unit.ratio,
}),
},
@@ -235,6 +256,7 @@ export function createNetworkSection() {
title: `${titlePrefix}${t.title}`,
bottom: fromBaseStatsPattern({
pattern: distribution.addressActivity[key][t.key],
window: "_24h",
unit: Unit.count,
}),
})),
@@ -246,6 +268,7 @@ export function createNetworkSection() {
title: `${titlePrefix}${b.title}`,
bottom: fromBaseStatsPattern({
pattern: distribution.addressActivity[key][b.key],
window: "_24h",
unit: Unit.count,
}),
})),
@@ -287,7 +310,7 @@ export function createNetworkSection() {
unit: Unit.count,
}),
line({
metric: distribution.newAddrCount[t.key].sum,
metric: distribution.newAddrCount[t.key].rest.sum._24h,
name: t.name,
color: t.color,
unit: Unit.count,
@@ -299,13 +322,14 @@ export function createNetworkSection() {
title: `${groupName} Reactivated Addresses per Block`,
bottom: types.flatMap((t) => [
line({
metric: distribution.addressActivity[t.key].reactivated.base,
metric: distribution.addressActivity[t.key].reactivated.height,
name: t.name,
color: t.color,
unit: Unit.count,
}),
line({
metric: distribution.addressActivity[t.key].reactivated.average,
metric:
distribution.addressActivity[t.key].reactivated.average._24h,
name: t.name,
color: t.color,
unit: Unit.count,
@@ -317,13 +341,13 @@ export function createNetworkSection() {
title: `${groupName} Address Growth Rate per Block`,
bottom: types.flatMap((t) => [
dots({
metric: distribution.growthRate[t.key].base,
metric: distribution.growthRate[t.key].height,
name: t.name,
color: t.color,
unit: Unit.ratio,
}),
dots({
metric: distribution.growthRate[t.key].average,
metric: distribution.growthRate[t.key].average._24h,
name: t.name,
color: t.color,
unit: Unit.ratio,
@@ -337,13 +361,14 @@ export function createNetworkSection() {
title: `${groupName} ${tr.compareTitle}`,
bottom: types.flatMap((t) => [
line({
metric: distribution.addressActivity[t.key][tr.key].base,
metric: distribution.addressActivity[t.key][tr.key].height,
name: t.name,
color: t.color,
unit: Unit.count,
}),
line({
metric: distribution.addressActivity[t.key][tr.key].average,
metric:
distribution.addressActivity[t.key][tr.key].average._24h,
name: t.name,
color: t.color,
unit: Unit.count,
@@ -358,13 +383,14 @@ export function createNetworkSection() {
title: `${groupName} ${b.compareTitle}`,
bottom: types.flatMap((t) => [
line({
metric: distribution.addressActivity[t.key][b.key].base,
metric: distribution.addressActivity[t.key][b.key].height,
name: t.name,
color: t.color,
unit: Unit.count,
}),
line({
metric: distribution.addressActivity[t.key][b.key].average,
metric:
distribution.addressActivity[t.key][b.key].average._24h,
name: t.name,
color: t.color,
unit: Unit.count,
@@ -398,7 +424,7 @@ export function createNetworkSection() {
title: `${groupName} Output Count`,
bottom: types.map((t) =>
line({
metric: scripts.count[t.key].sum,
metric: /** @type {CountPattern<number>} */ (scripts.count[t.key]).sum._24h,
name: t.name,
color: t.color,
unit: Unit.count,
@@ -410,7 +436,7 @@ export function createNetworkSection() {
title: `${groupName} Output Count (Total)`,
bottom: types.map((t) =>
line({
metric: scripts.count[t.key].cumulative,
metric: /** @type {CountPattern<number>} */ (scripts.count[t.key]).cumulative,
name: t.name,
color: t.color,
unit: Unit.count,
@@ -454,7 +480,7 @@ export function createNetworkSection() {
title: "Unspendable Supply",
bottom: satsBtcUsdFrom({
source: supply.burned.unspendable,
key: "sum",
key: "base",
name: "sum",
}),
},
@@ -495,7 +521,7 @@ export function createNetworkSection() {
name: "Fee Rate",
title: "Transaction Fee Rate",
bottom: fromStatsPattern({
pattern: transactions.fees.feeRate,
pattern: transactions.fees.feeRate.block,
unit: Unit.feeRate,
}),
},
@@ -532,11 +558,11 @@ export function createNetworkSection() {
title: "Transaction Size",
bottom: [
...fromStatsPattern({
pattern: transactions.size.weight,
pattern: transactions.size.weight.block,
unit: Unit.wu,
}),
...fromStatsPattern({
pattern: transactions.size.vsize,
pattern: transactions.size.vsize.block,
unit: Unit.vb,
}),
],
@@ -545,12 +571,12 @@ export function createNetworkSection() {
name: "Versions",
tree: [
{
name: "Sum",
name: "Base",
title: "Transaction Versions",
bottom: entries(transactions.versions).map(
([v, data], i, arr) =>
line({
metric: data.sum,
metric: data.height,
name: v,
color: colors.at(i, arr.length),
unit: Unit.count,
@@ -600,12 +626,12 @@ export function createNetworkSection() {
name: "Count",
tree: [
{
name: "Sum",
name: "Base",
title: "Block Count",
bottom: [
line({
metric: blocks.count.blockCount.sum,
name: "sum",
metric: blocks.count.blockCount.height,
name: "base",
unit: Unit.count,
}),
line({
@@ -617,36 +643,11 @@ export function createNetworkSection() {
}),
],
},
{
name: "Rolling",
title: "Block Count (Rolling)",
bottom: [
line({
metric: blocks.count.blockCount24hSum,
name: "24h",
color: colors.time._24h,
unit: Unit.count,
}),
line({
metric: blocks.count.blockCount1wSum,
name: "1w",
color: colors.time._1w,
unit: Unit.count,
}),
line({
metric: blocks.count.blockCount1mSum,
name: "1m",
color: colors.time._1m,
unit: Unit.count,
}),
line({
metric: blocks.count.blockCount1ySum,
name: "1y",
color: colors.time._1y,
unit: Unit.count,
}),
],
},
rollingWindowsTree({
windows: blocks.count.blockCountSum,
title: "Block Count",
unit: Unit.count,
}),
{
name: "Cumulative",
title: "Block Count (Total)",
@@ -666,6 +667,7 @@ export function createNetworkSection() {
bottom: [
...fromBaseStatsPattern({
pattern: blocks.interval,
window: "_24h",
unit: Unit.secs,
}),
priceLine({ unit: Unit.secs, name: "Target", number: 600 }),
@@ -675,7 +677,7 @@ export function createNetworkSection() {
name: "Size",
tree: [
{
name: "Sum",
name: "Base",
title: "Block Size",
bottom: [
line({
@@ -683,13 +685,9 @@ export function createNetworkSection() {
name: "base",
unit: Unit.bytes,
}),
line({
metric: blocks.size.sum,
name: "sum",
unit: Unit.bytes,
}),
],
},
rollingWindowsTree({ windows: blocks.size.sum, title: "Block Size", unit: Unit.bytes }),
{
name: "Distribution",
title: "Block Size Distribution",
@@ -700,7 +698,7 @@ export function createNetworkSection() {
unit: Unit.bytes,
}),
...fromStatsPattern({
pattern: blocks.size,
pattern: statsAtWindow(blocks.size, "_24h"),
unit: Unit.bytes,
}),
],
@@ -722,7 +720,7 @@ export function createNetworkSection() {
name: "Weight",
tree: [
{
name: "Sum",
name: "Base",
title: "Block Weight",
bottom: [
line({
@@ -730,13 +728,9 @@ export function createNetworkSection() {
name: "base",
unit: Unit.wu,
}),
line({
metric: blocks.weight.sum,
name: "sum",
unit: Unit.wu,
}),
],
},
rollingWindowsTree({ windows: blocks.weight.sum, title: "Block Weight", unit: Unit.wu }),
{
name: "Distribution",
title: "Block Weight Distribution",
@@ -747,7 +741,7 @@ export function createNetworkSection() {
unit: Unit.wu,
}),
...fromStatsPattern({
pattern: blocks.weight,
pattern: statsAtWindow(blocks.weight, "_24h"),
unit: Unit.wu,
}),
],
@@ -769,32 +763,28 @@ export function createNetworkSection() {
name: "vBytes",
tree: [
{
name: "Sum",
name: "Base",
title: "Block vBytes",
bottom: [
line({
metric: blocks.vbytes.base,
metric: blocks.vbytes.height,
name: "base",
unit: Unit.vb,
}),
line({
metric: blocks.vbytes.sum,
name: "sum",
unit: Unit.vb,
}),
],
},
rollingWindowsTree({ windows: blocks.vbytes.sum, title: "Block vBytes", unit: Unit.vb }),
{
name: "Distribution",
title: "Block vBytes Distribution",
bottom: [
line({
metric: blocks.vbytes.base,
metric: blocks.vbytes.height,
name: "base",
unit: Unit.vb,
}),
...fromStatsPattern({
pattern: blocks.vbytes,
pattern: statsAtWindow(blocks.vbytes, "_24h"),
unit: Unit.vb,
}),
],
@@ -817,6 +807,7 @@ export function createNetworkSection() {
title: "Block Fullness",
bottom: fromBaseStatsPattern({
pattern: blocks.fullness,
window: "_24h",
unit: Unit.percentage,
}),
},
@@ -949,7 +940,7 @@ export function createNetworkSection() {
defaultActive: t.defaultActive,
}),
line({
metric: distribution.newAddrCount[t.key].sum,
metric: distribution.newAddrCount[t.key].rest.sum._24h,
name: t.name,
color: t.color,
unit: Unit.count,
@@ -963,7 +954,7 @@ export function createNetworkSection() {
bottom: addressTypes.flatMap((t) => [
line({
metric:
distribution.addressActivity[t.key].reactivated.base,
distribution.addressActivity[t.key].reactivated.height,
name: t.name,
color: t.color,
unit: Unit.count,
@@ -971,7 +962,7 @@ export function createNetworkSection() {
}),
line({
metric:
distribution.addressActivity[t.key].reactivated.average,
distribution.addressActivity[t.key].reactivated.average._24h,
name: t.name,
color: t.color,
unit: Unit.count,
@@ -984,14 +975,14 @@ export function createNetworkSection() {
title: "Address Growth Rate per Block by Type",
bottom: addressTypes.flatMap((t) => [
dots({
metric: distribution.growthRate[t.key].base,
metric: distribution.growthRate[t.key].height,
name: t.name,
color: t.color,
unit: Unit.ratio,
defaultActive: t.defaultActive,
}),
dots({
metric: distribution.growthRate[t.key].average,
metric: distribution.growthRate[t.key].average._24h,
name: t.name,
color: t.color,
unit: Unit.ratio,
@@ -1006,7 +997,7 @@ export function createNetworkSection() {
title: tr.compareTitle,
bottom: addressTypes.flatMap((t) => [
line({
metric: distribution.addressActivity[t.key][tr.key].base,
metric: distribution.addressActivity[t.key][tr.key].height,
name: t.name,
color: t.color,
unit: Unit.count,
@@ -1014,7 +1005,7 @@ export function createNetworkSection() {
}),
line({
metric:
distribution.addressActivity[t.key][tr.key].average,
distribution.addressActivity[t.key][tr.key].average._24h,
name: t.name,
color: t.color,
unit: Unit.count,
@@ -1030,7 +1021,7 @@ export function createNetworkSection() {
title: b.compareTitle,
bottom: addressTypes.flatMap((t) => [
line({
metric: distribution.addressActivity[t.key][b.key].base,
metric: distribution.addressActivity[t.key][b.key].height,
name: t.name,
color: t.color,
unit: Unit.count,
@@ -1038,7 +1029,7 @@ export function createNetworkSection() {
}),
line({
metric:
distribution.addressActivity[t.key][b.key].average,
distribution.addressActivity[t.key][b.key].average._24h,
name: t.name,
color: t.color,
unit: Unit.count,
@@ -1104,7 +1095,7 @@ export function createNetworkSection() {
title: "Output Count by Script Type",
bottom: scriptTypes.map((t) =>
line({
metric: scripts.count[t.key].sum,
metric: /** @type {CountPattern<number>} */ (scripts.count[t.key]).sum._24h,
name: t.name,
color: t.color,
unit: Unit.count,
@@ -1133,8 +1124,8 @@ export function createNetworkSection() {
createScriptCompare("Legacy", legacyScripts),
...legacyScripts.map((t) => ({
name: t.name,
tree: chartsFromFullPerBlock({
pattern: scripts.count[t.key],
tree: chartsFromCount({
pattern: /** @type {CountPattern<number>} */ (scripts.count[t.key]),
title: `${t.name} Output Count`,
unit: Unit.count,
}),
@@ -1147,8 +1138,8 @@ export function createNetworkSection() {
createScriptCompare("Script Hash", scriptHashScripts),
...scriptHashScripts.map((t) => ({
name: t.name,
tree: chartsFromFullPerBlock({
pattern: scripts.count[t.key],
tree: chartsFromCount({
pattern: /** @type {CountPattern<number>} */ (scripts.count[t.key]),
title: `${t.name} Output Count`,
unit: Unit.count,
}),
@@ -1161,8 +1152,8 @@ export function createNetworkSection() {
createScriptCompare("SegWit", segwitScripts),
...segwitScripts.map((t) => ({
name: t.name,
tree: chartsFromFullPerBlock({
pattern: scripts.count[t.key],
tree: chartsFromCount({
pattern: /** @type {CountPattern<number>} */ (scripts.count[t.key]),
title: `${t.name} Output Count`,
unit: Unit.count,
}),
@@ -1175,8 +1166,8 @@ export function createNetworkSection() {
createScriptCompare("Taproot", taprootAddresses),
...taprootAddresses.map((t) => ({
name: t.name,
tree: chartsFromFullPerBlock({
pattern: scripts.count[t.key],
tree: chartsFromCount({
pattern: /** @type {CountPattern<number>} */ (scripts.count[t.key]),
title: `${t.name} Output Count`,
unit: Unit.count,
}),
@@ -1189,8 +1180,8 @@ export function createNetworkSection() {
createScriptCompare("Other", otherScripts),
...otherScripts.map((t) => ({
name: t.name,
tree: chartsFromFullPerBlock({
pattern: scripts.count[t.key],
tree: chartsFromCount({
pattern: /** @type {CountPattern<number>} */ (scripts.count[t.key]),
title: `${t.name} Output Count`,
unit: Unit.count,
}),
@@ -1207,13 +1198,13 @@ export function createNetworkSection() {
title: "Script Adoption",
bottom: [
line({
metric: scripts.count.segwitAdoption.cumulative,
metric: scripts.adoption.segwit,
name: "SegWit",
color: colors.segwit,
unit: Unit.percentage,
}),
line({
metric: scripts.count.taprootAdoption.cumulative,
metric: scripts.adoption.taproot,
name: "Taproot",
color: taprootAddresses[1].color,
unit: Unit.percentage,
@@ -1225,19 +1216,8 @@ export function createNetworkSection() {
title: "SegWit Adoption",
bottom: [
line({
metric: scripts.count.segwitAdoption.base,
name: "Base",
unit: Unit.percentage,
}),
line({
metric: scripts.count.segwitAdoption.sum,
name: "Sum",
unit: Unit.percentage,
}),
line({
metric: scripts.count.segwitAdoption.cumulative,
name: "All-Time",
color: colors.time.all,
metric: scripts.adoption.segwit,
name: "Adoption",
unit: Unit.percentage,
}),
],
@@ -1247,19 +1227,8 @@ export function createNetworkSection() {
title: "Taproot Adoption",
bottom: [
line({
metric: scripts.count.taprootAdoption.base,
name: "Base",
unit: Unit.percentage,
}),
line({
metric: scripts.count.taprootAdoption.sum,
name: "Sum",
unit: Unit.percentage,
}),
line({
metric: scripts.count.taprootAdoption.cumulative,
name: "All-Time",
color: colors.time.all,
metric: scripts.adoption.taproot,
name: "Adoption",
unit: Unit.percentage,
}),
],

View File

@@ -47,7 +47,7 @@ export function price({
/**
* Create percentile series (max/min/median/pct75/pct25/pct90/pct10) from any stats pattern
* @param {StatsPattern<any> | BaseStatsPattern<any> | FullStatsPattern<any> | AnyStatsPattern} pattern
* @param {DistributionStats} pattern
* @param {Unit} unit
* @param {string} title
* @returns {AnyFetchedSeriesBlueprint[]}
@@ -356,9 +356,10 @@ export function histogram({
}
/**
* Create series from a BaseStatsPattern (base + avg + percentiles, NO sum)
* Create series from an AverageHeightMaxMedianMinP10P25P75P90Pattern (height + rolling stats)
* @param {Object} args
* @param {BaseStatsPattern<any>} args.pattern
* @param {{ height: AnyMetricPattern } & Record<string, any>} args.pattern - Pattern with .height and rolling stats (p10/p25/p75/p90 as _1y24h30d7dPattern)
* @param {string} args.window - Rolling window key (e.g., '_24h', '_7d', '_30d', '_1y')
* @param {Unit} args.unit
* @param {string} [args.title]
* @param {Color} [args.baseColor]
@@ -367,34 +368,37 @@ export function histogram({
*/
export function fromBaseStatsPattern({
pattern,
window,
unit,
title = "",
baseColor,
avgActive = true,
}) {
const { stat } = colors;
const stats = statsAtWindow(pattern, window);
return [
dots({
metric: pattern.base,
metric: pattern.height,
name: title || "base",
color: baseColor,
unit,
}),
dots({
metric: pattern.average,
metric: stats.average,
name: `${title} avg`.trim(),
color: stat.avg,
unit,
defaultActive: avgActive,
}),
...percentileSeries(pattern, unit, title),
...percentileSeries(stats, unit, title),
];
}
/**
* Create series from any pattern with avg + percentiles (works with StatsPattern, SumStatsPattern, etc.)
* Create series from a flat stats pattern (average + pct percentiles as single metrics)
* Use statsAtWindow() to extract from patterns with _1y24h30d7dPattern stats
* @param {Object} args
* @param {StatsPattern<any> | BaseStatsPattern<any> | FullStatsPattern<any> | AnyStatsPattern} args.pattern
* @param {{ average: AnyMetricPattern, median: AnyMetricPattern, max: AnyMetricPattern, min: AnyMetricPattern, pct75: AnyMetricPattern, pct25: AnyMetricPattern, pct90: AnyMetricPattern, pct10: AnyMetricPattern }} args.pattern
* @param {Unit} args.unit
* @param {string} [args.title]
* @returns {AnyFetchedSeriesBlueprint[]}
@@ -412,14 +416,96 @@ export function fromStatsPattern({ pattern, unit, title = "" }) {
}
/**
* Create distribution series for btc/sats/usd from a value pattern with stats (average + percentiles)
* @param {FullValuePattern | SumValuePattern} source
* Extract stats at a specific rolling window from patterns with _1y24h30d7dPattern stats
* @param {Record<string, any>} pattern - Pattern with pct10/pct25/pct75/pct90 and average/median/max/min as _1y24h30d7dPattern
* @param {string} window
*/
export function statsAtWindow(pattern, window) {
return {
average: pattern.average[window],
median: pattern.median[window],
max: pattern.max[window],
min: pattern.min[window],
pct75: pattern.pct75[window],
pct25: pattern.pct25[window],
pct90: pattern.pct90[window],
pct10: pattern.pct10[window],
};
}
/**
* Create a Rolling folder tree from a _1y24h30d7dPattern (4 rolling windows)
* @param {Object} args
* @param {{ _24h: AnyMetricPattern, _7d: AnyMetricPattern, _30d: AnyMetricPattern, _1y: AnyMetricPattern }} args.windows
* @param {string} args.title
* @param {Unit} args.unit
* @returns {PartialOptionsGroup}
*/
export function rollingWindowsTree({ windows, title, unit }) {
return {
name: "Rolling",
tree: [
{
name: "Compare",
title: `${title} Rolling`,
bottom: [
line({ metric: windows._24h, name: "24h", color: colors.time._24h, unit }),
line({ metric: windows._7d, name: "7d", color: colors.time._1w, unit }),
line({ metric: windows._30d, name: "30d", color: colors.time._1m, unit }),
line({ metric: windows._1y, name: "1y", color: colors.time._1y, unit }),
],
},
{
name: "24h",
title: `${title} 24h`,
bottom: [line({ metric: windows._24h, name: "24h", color: colors.time._24h, unit })],
},
{
name: "7d",
title: `${title} 7d`,
bottom: [line({ metric: windows._7d, name: "7d", color: colors.time._1w, unit })],
},
{
name: "30d",
title: `${title} 30d`,
bottom: [line({ metric: windows._30d, name: "30d", color: colors.time._1m, unit })],
},
{
name: "1y",
title: `${title} 1y`,
bottom: [line({ metric: windows._1y, name: "1y", color: colors.time._1y, unit })],
},
],
};
}
/**
* Map a rolling window slot's stats to a specific unit, producing a stats-compatible pattern
* @param {RollingWindowSlot} slot - Rolling window slot (e.g., pattern.rolling._24h)
* @param {BtcSatsUsdKey} unitKey
*/
function rollingSlotForUnit(slot, unitKey) {
return {
average: slot.average[unitKey],
median: slot.median[unitKey],
max: slot.max[unitKey],
min: slot.min[unitKey],
pct75: slot.pct75[unitKey],
pct25: slot.pct25[unitKey],
pct90: slot.pct90[unitKey],
pct10: slot.pct10[unitKey],
};
}
/**
* Create distribution series for btc/sats/usd from a rolling window slot
* @param {RollingWindowSlot} slot - Rolling window slot (e.g., pattern.rolling._24h)
* @returns {AnyFetchedSeriesBlueprint[]}
*/
export const distributionBtcSatsUsd = (source) => [
...fromStatsPattern({ pattern: source.btc, unit: Unit.btc }),
...fromStatsPattern({ pattern: source.sats, unit: Unit.sats }),
...fromStatsPattern({ pattern: source.usd, unit: Unit.usd }),
export const distributionBtcSatsUsd = (slot) => [
...fromStatsPattern({ pattern: rollingSlotForUnit(slot, "btc"), unit: Unit.btc }),
...fromStatsPattern({ pattern: rollingSlotForUnit(slot, "sats"), unit: Unit.sats }),
...fromStatsPattern({ pattern: rollingSlotForUnit(slot, "usd"), unit: Unit.usd }),
];
/**
@@ -460,7 +546,7 @@ export function fromSupplyPattern({ pattern, title, color }) {
/**
* Create distribution series (avg + percentiles)
* @param {StatsPattern<any> | BaseStatsPattern<any> | FullStatsPattern<any> | AnyStatsPattern} pattern
* @param {DistributionStats} pattern
* @param {Unit} unit
* @returns {AnyFetchedSeriesBlueprint[]}
*/
@@ -556,9 +642,10 @@ function btcSatsUsdSeries({ metrics, name, color, defaultActive }) {
}
/**
* Split pattern with base + sum + distribution + cumulative into 3 charts
* Split flat per-block pattern into charts (Sum/Rolling/Distribution/Cumulative)
* Pattern has: .height, .cumulative, .sum (windowed), .average/.pct10/... (windowed, flat)
* @param {Object} args
* @param {FullStatsPattern<any>} args.pattern
* @param {FullPerBlockPattern} args.pattern
* @param {string} args.title
* @param {Unit} args.unit
* @param {string} [args.distributionSuffix]
@@ -577,15 +664,13 @@ export function chartsFromFull({
{
name: "Sum",
title,
bottom: [
{ metric: pattern.base, title: "sum", unit },
{ metric: pattern.sum, title: "sum", unit },
],
bottom: [{ metric: pattern.height, title: "base", unit }],
},
rollingWindowsTree({ windows: pattern.sum, title, unit }),
{
name: "Distribution",
title: distTitle,
bottom: distributionSeries(pattern, unit),
bottom: distributionSeries(statsAtWindow(pattern, "_24h"), unit),
},
{
name: "Cumulative",
@@ -596,9 +681,9 @@ export function chartsFromFull({
}
/**
* Split pattern into 3 charts with "per Block" in distribution title
* Split pattern into 4 charts with "per Block" in distribution title
* @param {Object} args
* @param {FullStatsPattern<any>} args.pattern
* @param {FullPerBlockPattern} args.pattern
* @param {string} args.title
* @param {Unit} args.unit
* @returns {PartialOptionsTree}
@@ -609,7 +694,7 @@ export const chartsFromFullPerBlock = (args) =>
/**
* Split pattern with sum + distribution + cumulative into 3 charts (no base)
* @param {Object} args
* @param {AnyStatsPattern} args.pattern
* @param {FullStatsPattern} args.pattern
* @param {string} args.title
* @param {Unit} args.unit
* @param {string} [args.distributionSuffix]
@@ -647,7 +732,7 @@ export function chartsFromSum({
/**
* Split pattern into 3 charts with "per Block" in distribution title (no base)
* @param {Object} args
* @param {AnyStatsPattern} args.pattern
* @param {FullStatsPattern} args.pattern
* @param {string} args.title
* @param {Unit} args.unit
* @returns {PartialOptionsTree}
@@ -656,7 +741,7 @@ export const chartsFromSumPerBlock = (args) =>
chartsFromSum({ ...args, distributionSuffix: "per Block" });
/**
* Split pattern with sum + cumulative into 2 charts
* Split pattern with rolling sum windows + cumulative into charts
* @param {Object} args
* @param {CountPattern<any>} args.pattern
* @param {string} args.title
@@ -666,11 +751,7 @@ export const chartsFromSumPerBlock = (args) =>
*/
export function chartsFromCount({ pattern, title, unit, color }) {
return [
{
name: "Sum",
title,
bottom: [{ metric: pattern.sum, title: "sum", color, unit }],
},
rollingWindowsTree({ windows: pattern.sum, title, unit }),
{
name: "Cumulative",
title: `${title} (Total)`,
@@ -680,46 +761,7 @@ export function chartsFromCount({ pattern, title, unit, color }) {
}
/**
* Split value pattern (btc/sats/usd with sum + cumulative) into 2 charts
* @param {Object} args
* @param {ValuePattern} args.pattern
* @param {string} args.title
* @param {Color} [args.color]
* @returns {PartialOptionsTree}
*/
export function chartsFromValue({ pattern, title, color }) {
return [
{
name: "Sum",
title,
bottom: btcSatsUsdSeries({
metrics: {
btc: pattern.btc.sum,
sats: pattern.sats.sum,
usd: pattern.usd.sum,
},
name: "sum",
color,
}),
},
{
name: "Cumulative",
title: `${title} (Total)`,
bottom: btcSatsUsdSeries({
metrics: {
btc: pattern.btc.cumulative,
sats: pattern.sats.cumulative,
usd: pattern.usd.cumulative,
},
name: "all-time",
color,
}),
},
];
}
/**
* Split btc/sats/usd pattern with full stats into 3 charts
* Split BaseCumulativeRollingPattern into 3 charts (Sum/Distribution/Cumulative)
* @param {Object} args
* @param {CoinbasePattern} args.pattern
* @param {string} args.title
@@ -731,44 +773,23 @@ export function chartsFromValueFull({ pattern, title }) {
name: "Sum",
title,
bottom: [
...btcSatsUsdSeries({ metrics: pattern.base, name: "sum" }),
...btcSatsUsdSeries({
metrics: {
btc: pattern.btc.base,
sats: pattern.sats.base,
usd: pattern.usd.base,
},
name: "sum",
}),
...btcSatsUsdSeries({
metrics: {
btc: pattern.btc.sum,
sats: pattern.sats.sum,
usd: pattern.usd.sum,
},
name: "sum",
metrics: pattern._24h.sum,
name: "24h sum",
defaultActive: false,
}),
],
},
{
name: "Distribution",
title: `${title} Distribution`,
bottom: [
...distributionSeries(pattern.btc, Unit.btc),
...distributionSeries(pattern.sats, Unit.sats),
...distributionSeries(pattern.usd, Unit.usd),
],
bottom: distributionBtcSatsUsd(pattern._24h),
},
{
name: "Cumulative",
title: `${title} (Total)`,
bottom: btcSatsUsdSeries({
metrics: {
btc: pattern.btc.cumulative,
sats: pattern.sats.cumulative,
usd: pattern.usd.cumulative,
},
name: "all-time",
}),
bottom: btcSatsUsdSeries({ metrics: pattern.cumulative, name: "all-time" }),
},
];
}

View File

@@ -146,10 +146,10 @@ export function satsBtcUsdBaseline({ pattern, name, color, defaultActive }) {
}
/**
* Create sats/btc/usd series from any value pattern using sum or cumulative key
* Create sats/btc/usd series from any value pattern using base or cumulative key
* @param {Object} args
* @param {AnyValuePatternType} args.source
* @param {'sum' | 'cumulative'} args.key
* @param {'base' | 'cumulative'} args.key
* @param {string} args.name
* @param {Color} [args.color]
* @param {boolean} [args.defaultActive]
@@ -157,11 +157,7 @@ export function satsBtcUsdBaseline({ pattern, name, color, defaultActive }) {
*/
export function satsBtcUsdFrom({ source, key, name, color, defaultActive }) {
return satsBtcUsd({
pattern: {
btc: source.btc[key],
sats: source.sats[key],
usd: source.usd[key],
},
pattern: source[key],
name,
color,
defaultActive,
@@ -169,10 +165,10 @@ export function satsBtcUsdFrom({ source, key, name, color, defaultActive }) {
}
/**
* Create sats/btc/usd series from a full value pattern using base or average key
* Create sats/btc/usd series from a full value pattern using base or cumulative key
* @param {Object} args
* @param {FullValuePattern} args.source
* @param {'base' | 'average'} args.key
* @param {'base' | 'cumulative'} args.key
* @param {string} args.name
* @param {Color} [args.color]
* @param {boolean} [args.defaultActive]
@@ -186,11 +182,7 @@ export function satsBtcUsdFromFull({
defaultActive,
}) {
return satsBtcUsd({
pattern: {
btc: source.btc[key],
sats: source.sats[key],
usd: source.usd[key],
},
pattern: source[key],
name,
color,
defaultActive,
@@ -203,7 +195,7 @@ export function satsBtcUsdFromFull({
* @param {AnyValuePatternType} args.coinbase
* @param {AnyValuePatternType} args.subsidy
* @param {AnyValuePatternType} args.fee
* @param {'sum' | 'cumulative'} args.key
* @param {'base' | 'cumulative'} args.key
* @returns {FetchedLineSeriesBlueprint[]}
*/
export function revenueBtcSatsUsd({ coinbase, subsidy, fee, key }) {
@@ -229,6 +221,47 @@ export function revenueBtcSatsUsd({ coinbase, subsidy, fee, key }) {
];
}
/**
* Create sats/btc/usd series from a rolling window (24h/7d/30d/1y sum)
* @param {Object} args
* @param {AnyValuePattern} args.pattern - A BtcSatsUsdPattern (e.g., source.rolling._24h.sum)
* @param {string} args.name
* @param {Color} [args.color]
* @param {boolean} [args.defaultActive]
* @returns {FetchedLineSeriesBlueprint[]}
*/
export function satsBtcUsdRolling({ pattern, name, color, defaultActive }) {
return satsBtcUsd({ pattern, name, color, defaultActive });
}
/**
* Create coinbase/subsidy/fee rolling sum series from separate sources
* @param {Object} args
* @param {AnyValuePattern} args.coinbase - Rolling sum pattern (e.g., mining.rewards.coinbase.rolling._24h.sum)
* @param {AnyValuePattern} args.subsidy
* @param {AnyValuePattern} args.fee
* @returns {FetchedLineSeriesBlueprint[]}
*/
export function revenueRollingBtcSatsUsd({ coinbase, subsidy, fee }) {
return [
...satsBtcUsd({
pattern: coinbase,
name: "Coinbase",
color: colors.mining.coinbase,
}),
...satsBtcUsd({
pattern: subsidy,
name: "Subsidy",
color: colors.mining.subsidy,
}),
...satsBtcUsd({
pattern: fee,
name: "Fees",
color: colors.mining.fee,
}),
];
}
/**
* Build percentile USD mappings from a ratio pattern
* @param {AnyRatioPattern} ratio

View File

@@ -163,9 +163,8 @@
* - AgeRangePattern (ageRange.*)
* @typedef {LongTermPattern | AgeRangePattern} PatternWithPercentiles
*
* Patterns with RelToMarketCap in relative (RelativePattern):
* - BasicUtxoPattern (minAge.*, geAmount.*, ltAmount.*)
* @typedef {BasicUtxoPattern} PatternBasicWithMarketCap
* Patterns with RelToMarketCap in relative (geAmount.*, ltAmount.*):
* @typedef {UtxoAmountPattern | AddressAmountPattern} PatternBasicWithMarketCap
*
* Patterns without RelToMarketCap in relative (RelativePattern4):
* - EpochPattern (epoch.*, amountRange.*, year.*, type.*)

View File

@@ -45,7 +45,7 @@ export function init() {
const usdPrice = {
type: "Candlestick",
title: "Price",
metric: brk.metrics.prices.ohlc.usd,
metric: brk.metrics.prices.ohlc.usd.day1,
};
result.set(Unit.usd, [usdPrice, ...(optionTop.get(Unit.usd) ?? [])]);
@@ -54,7 +54,7 @@ export function init() {
const satsPrice = {
type: "Candlestick",
title: "Price",
metric: brk.metrics.prices.ohlc.sats,
metric: brk.metrics.prices.ohlc.sats.day1,
colors: /** @type {const} */ ([colors.bi.p1[1], colors.bi.p1[0]]),
};
result.set(Unit.sats, [satsPrice, ...(optionTop.get(Unit.sats) ?? [])]);

View File

@@ -33,40 +33,41 @@
* @typedef {Brk.MetricsTree_Distribution_UtxoCohorts} UtxoCohortTree
* @typedef {Brk.MetricsTree_Distribution_AddressCohorts} AddressCohortTree
* @typedef {Brk.MetricsTree_Distribution_UtxoCohorts_All} AllUtxoPattern
* @typedef {Brk.MetricsTree_Distribution_UtxoCohorts_Term_Short} ShortTermPattern
* @typedef {Brk.MetricsTree_Distribution_UtxoCohorts_Term_Long} LongTermPattern
* @typedef {Brk.MetricsTree_Distribution_UtxoCohorts_Sth} ShortTermPattern
* @typedef {Brk.ActivityCostOutputsRealizedRelativeSupplyUnrealizedPattern} LongTermPattern
* @typedef {Brk.MetricsTree_Distribution_UtxoCohorts_All_Relative} AllRelativePattern
* @typedef {keyof Brk.BtcSatsUsdPattern} BtcSatsUsdKey
* @typedef {Brk.BtcSatsUsdPattern} SupplyPattern
* @typedef {Brk.MetricsTree_Blocks_Size} BlockSizePattern
* @typedef {Brk.AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern} BlockSizePattern
* @typedef {keyof Brk.MetricsTree_Distribution_UtxoCohorts_Type} SpendableType
* @typedef {keyof Brk.MetricsTree_Distribution_AnyAddressIndexes} AddressableType
*
* Brk pattern types (using new pattern names)
* @typedef {Brk.ActivityCostOutputsRealizedRelativeSupplyUnrealizedPattern5} MaxAgePattern
* @typedef {Brk.ActivityCostOutputsRealizedRelativeSupplyUnrealizedPattern4} MaxAgePattern
* @typedef {Brk.ActivityCostOutputsRealizedRelativeSupplyUnrealizedPattern} AgeRangePattern
* @typedef {Brk.ActivityCostOutputsRealizedRelativeSupplyUnrealizedPattern3} UtxoAmountPattern
* @typedef {Brk.ActivityAddrCostOutputsRealizedRelativeSupplyUnrealizedPattern} AddressAmountPattern
* @typedef {Brk.ActivityCostOutputsRealizedRelativeSupplyUnrealizedPattern4} BasicUtxoPattern
* MinAgePattern: minAge cohorts have peakRegret in unrealized (Pattern6)
* @typedef {Brk.ActivityCostOutputsRealizedRelativeSupplyUnrealizedPattern6} MinAgePattern
* MinAgePattern: minAge cohorts have peakRegret in unrealized
* @typedef {Brk.ActivityCostOutputsRealizedRelativeSupplyUnrealizedPattern5} MinAgePattern
* @typedef {Brk.ActivityCostOutputsRealizedRelativeSupplyUnrealizedPattern3} EpochPattern
* @typedef {Brk.ActivityCostOutputsRealizedSupplyUnrealizedPattern} EmptyPattern
* @typedef {Brk.ActivityCostOutputsRealizedRelativeSupplyUnrealizedPattern3} EmptyPattern
* @typedef {Brk._0sdM0M1M1sdM2M2sdM3sdP0P1P1sdP2P2sdP3sdSdSmaZscorePattern} Ratio1ySdPattern
* @typedef {Brk.Dollars} Dollars
* CoinbasePattern: patterns with btc/sats/usd each having base + sum + cumulative + stats
* @typedef {Brk.BtcSatsUsdPattern3} CoinbasePattern
* CoinbasePattern: base + cumulative + rolling windows (flattened)
* @typedef {Brk._1y24h30d7dBaseCumulativePattern} CoinbasePattern
* ActivePriceRatioPattern: ratio pattern with price (extended)
* @typedef {Brk.PriceRatioPattern} ActivePriceRatioPattern
* AnyRatioPattern: full ratio patterns (with or without price) - has ratio, percentiles, z-scores
* @typedef {Brk.RatioPattern | Brk.PriceRatioPattern} AnyRatioPattern
* ValuePattern: patterns with minimal stats (sum, cumulative only) for btc/sats/usd
* @typedef {Brk.BtcSatsUsdPattern5 | Brk.BtcSatsUsdPattern2} ValuePattern
* FullValuePattern: patterns with full stats (base, sum, cumulative, average, percentiles) for btc/sats/usd
* @typedef {Brk.BtcSatsUsdPattern3} FullValuePattern
* SumValuePattern: patterns with sum stats (sum, cumulative, average, percentiles - no base) for bitcoin/sats/dollars
* @typedef {{btc: SumStatsPattern<any>, sats: SumStatsPattern<any>, usd: SumStatsPattern<any>}} SumValuePattern
* ValuePattern: patterns with base + cumulative (no rolling)
* @typedef {Brk.BaseCumulativeSumPattern | Brk.BaseCumulativePattern} ValuePattern
* FullValuePattern: base + cumulative + rolling windows (flattened)
* @typedef {Brk._1y24h30d7dBaseCumulativePattern} FullValuePattern
* RollingWindowSlot: a single rolling window with stats (average, pct10, pct25, median, pct75, pct90, max, min, sum) per unit
* @typedef {Brk.AverageMaxMedianMinPct10Pct25Pct75Pct90SumPattern2} RollingWindowSlot
* AnyValuePatternType: union of all value pattern types
* @typedef {ValuePattern | FullValuePattern} AnyValuePatternType
* @typedef {Brk._1y24h30d7dBaseCumulativePattern | Brk.BaseCumulativeSumPattern | Brk.BaseCumulativePattern} AnyValuePatternType
* @typedef {Brk.AnyMetricPattern} AnyMetricPattern
* @typedef {Brk.SatsUsdPattern} ActivePricePattern
* @typedef {Brk.AnyMetricEndpointBuilder} AnyMetricEndpoint
@@ -78,11 +79,11 @@
* - GlobalPeakRelativePattern: GlobalRelativePattern + unrealizedPeakRegretRelToMarketCap
* - OwnRelativePattern: has RelToOwnMarketCap metrics (netUnrealizedPnlRelToOwnMarketCap, etc)
* - FullRelativePattern: has BOTH RelToMarketCap AND RelToOwnMarketCap + unrealizedPeakRegretRelToMarketCap
* @typedef {Brk.InvestedSupplyPattern} BasicRelativePattern
* @typedef {Brk.InvestedNegNetNuplSupplyUnrealizedPattern} BasicRelativePattern
* @typedef {Brk.InvestedNegNetNuplSupplyUnrealizedPattern} GlobalRelativePattern
* @typedef {Brk.InvestedNegNetNuplSupplyUnrealizedPattern3} GlobalPeakRelativePattern
* @typedef {Brk.InvestedNegNetSupplyUnrealizedPattern} OwnRelativePattern
* @typedef {Brk.InvestedNegNetNuplSupplyUnrealizedPattern4} FullRelativePattern
* @typedef {Brk.InvestedNegNetNuplSupplyUnrealizedPattern4} GlobalPeakRelativePattern
* @typedef {Brk.InvestedNegNetNuplSupplyUnrealizedPattern2} OwnRelativePattern
* @typedef {Brk.InvestedNegNetNuplSupplyUnrealizedPattern2} FullRelativePattern
* @typedef {Brk.GreedInvestedInvestorNegNetPainSupplyTotalUnrealizedPattern} UnrealizedPattern
* @typedef {Brk.GreedInvestedInvestorNegNetPainPeakSupplyTotalUnrealizedPattern} UnrealizedFullPattern
*
@@ -98,37 +99,43 @@
* @typedef {Brk.MetricEndpointBuilder<T>} MetricEndpoint
*/
/**
* Stats pattern: average, min, max, percentiles (NO base)
* Stats pattern: average, min, max, percentiles (height-only indexes, NO base)
* @template T
* @typedef {Brk.AverageMaxMedianMinPct10Pct25Pct75Pct90TxindexPattern<T>} StatsPattern
* @typedef {Brk.AverageMaxMedianMinPct10Pct25Pct75Pct90Pattern<T>} StatsPattern
*/
/**
* Base stats pattern: base, average, min, max, percentiles (NO sum/cumulative)
* Base stats pattern: height, average, min, max, percentiles (windowed, NO sum/cumulative)
* @template T
* @typedef {Brk.AverageBaseMaxMedianMinPct10Pct25Pct75Pct90Pattern<T>} BaseStatsPattern
* @typedef {Brk.AverageHeightMaxMedianMinPct10Pct25Pct75Pct90Pattern<T>} BaseStatsPattern
*/
/**
* Full stats pattern: base, average, sum, cumulative, min, max, percentiles
* @template T
* @typedef {Brk.AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern<T>} FullStatsPattern
* Full stats pattern: cumulative, sum, average, min, max, percentiles + rolling
* @typedef {Brk.AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90RollingSumPattern} FullStatsPattern
*/
/**
* Sum stats pattern: average, sum, cumulative, percentiles (NO base)
* @template T
* @typedef {Brk.AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2<T>} SumStatsPattern
* Sum stats pattern: cumulative, sum, average, min, max, percentiles + rolling (same as FullStatsPattern)
* @typedef {Brk.AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90RollingSumPattern} SumStatsPattern
*/
/**
* Full stats pattern for Bitcoin (non-generic variant with btc-specific indexes)
* @typedef {Brk.AverageBaseCumulativeMaxMedianMinPct10Pct25Pct75Pct90SumPattern2} BtcFullStatsPattern
* Full stats pattern for Bitcoin (non-generic variant) - same as FullStatsPattern
* @typedef {Brk.AverageCumulativeMaxMedianMinPct10Pct25Pct75Pct90RollingSumPattern} BtcFullStatsPattern
*/
/**
* Count pattern: sum and cumulative only
* Count pattern: height, cumulative, and rolling sum windows
* @template T
* @typedef {Brk.CumulativeSumPattern<T>} CountPattern
* @typedef {Brk.CumulativeHeightSumPattern<T>} CountPattern
*/
/**
* Full per-block pattern: height, cumulative, sum, and distribution stats (all flat)
* @typedef {Brk.AverageCumulativeHeightMaxMedianMinPct10Pct25Pct75Pct90SumPattern} FullPerBlockPattern
*/
/**
* Any stats pattern union - patterns with sum/cumulative + percentiles
* @typedef {SumStatsPattern<any> | FullStatsPattern<any> | BtcFullStatsPattern | BlockSizePattern} AnyStatsPattern
* @typedef {FullStatsPattern | BtcFullStatsPattern} AnyStatsPattern
*/
/**
* Distribution stats: 8 metric fields (average, min, max, median, pct10/25/75/90)
* @typedef {{ average: AnyMetricPattern, min: AnyMetricPattern, max: AnyMetricPattern, median: AnyMetricPattern, pct10: AnyMetricPattern, pct25: AnyMetricPattern, pct75: AnyMetricPattern, pct90: AnyMetricPattern }} DistributionStats
*/
/**
@@ -144,7 +151,7 @@
* @typedef {Brk.MetricsTree_Market_Dca} MarketDca
* @typedef {Brk._10y2y3y4y5y6y8yPattern} PeriodCagrPattern
* Full stats pattern union (both generic and non-generic variants)
* @typedef {FullStatsPattern<any> | BtcFullStatsPattern} AnyFullStatsPattern
* @typedef {FullStatsPattern | BtcFullStatsPattern} AnyFullStatsPattern
*
* DCA period keys - derived from pattern types
* @typedef {keyof Brk._10y2y3y4y5y6y8yPattern} LongPeriodKey