computer: snapshot

This commit is contained in:
nym21
2026-02-26 23:01:51 +01:00
parent cccaf6b206
commit 78fc5ffcf7
69 changed files with 1578 additions and 2205 deletions

3
.dockerignore Normal file
View File

@@ -0,0 +1,3 @@
.git
target
docker

View File

@@ -15,30 +15,31 @@ impl Vecs {
exit: &Exit,
) -> Result<()> {
let mut prev_timestamp = None;
self.interval.height.compute_transform(
starting_indexes.height,
&indexer.vecs.blocks.timestamp,
|(h, timestamp, ..)| {
let interval = if let Some(prev_h) = h.decremented() {
let prev = prev_timestamp.unwrap_or_else(|| {
indexer.vecs.blocks.timestamp.collect_one(prev_h).unwrap()
});
timestamp.checked_sub(prev).unwrap_or(Timestamp::ZERO)
} else {
Timestamp::ZERO
};
prev_timestamp = Some(timestamp);
(h, interval)
},
exit,
)?;
let window_starts = count_vecs.window_starts();
self.interval_rolling.compute_distribution(
self.0.compute(
starting_indexes.height,
&window_starts,
&self.interval.height,
exit,
|vec| {
vec.compute_transform(
starting_indexes.height,
&indexer.vecs.blocks.timestamp,
|(h, timestamp, ..)| {
let interval = if let Some(prev_h) = h.decremented() {
let prev = prev_timestamp.unwrap_or_else(|| {
indexer.vecs.blocks.timestamp.collect_one(prev_h).unwrap()
});
timestamp.checked_sub(prev).unwrap_or(Timestamp::ZERO)
} else {
Timestamp::ZERO
};
prev_timestamp = Some(timestamp);
(h, interval)
},
exit,
)?;
Ok(())
},
)?;
Ok(())

View File

@@ -3,10 +3,7 @@ use brk_types::Version;
use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedFromHeightLast, RollingDistribution},
};
use crate::{indexes, internal::ComputedFromHeightDistribution};
impl Vecs {
pub(crate) fn forced_import(
@@ -15,14 +12,8 @@ impl Vecs {
indexes: &indexes::Vecs,
) -> Result<Self> {
let interval =
ComputedFromHeightLast::forced_import(db, "block_interval", version, indexes)?;
ComputedFromHeightDistribution::forced_import(db, "block_interval", version, indexes)?;
let interval_rolling =
RollingDistribution::forced_import(db, "block_interval", version, indexes)?;
Ok(Self {
interval,
interval_rolling,
})
Ok(Self(interval))
}
}

View File

@@ -1,12 +1,13 @@
use derive_more::{Deref, DerefMut};
use brk_traversable::Traversable;
use brk_types::Timestamp;
use vecdb::{Rw, StorageMode};
use crate::internal::{ComputedFromHeightLast, RollingDistribution};
use crate::internal::ComputedFromHeightDistribution;
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
#[derive(Deref, DerefMut, Traversable)]
pub struct Vecs<M: StorageMode = Rw>(
#[traversable(flatten)]
pub interval: ComputedFromHeightLast<Timestamp, M>,
pub interval_rolling: RollingDistribution<Timestamp, M>,
}
pub ComputedFromHeightDistribution<Timestamp, M>,
);

View File

@@ -23,18 +23,19 @@ impl Vecs {
exit,
)?;
self.fullness.height.compute_transform(
starting_indexes.height,
&indexer.vecs.blocks.weight,
|(h, weight, ..)| (h, StoredF32::from(weight.fullness())),
exit,
)?;
self.fullness_rolling.compute_distribution(
self.fullness.compute(
starting_indexes.height,
&window_starts,
&self.fullness.height,
exit,
|vec| {
vec.compute_transform(
starting_indexes.height,
&indexer.vecs.blocks.weight,
|(h, weight, ..)| (h, StoredF32::from(weight.fullness())),
exit,
)?;
Ok(())
},
)?;
Ok(())

View File

@@ -5,7 +5,7 @@ use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedFromHeightLast, ComputedHeightDerivedCumulativeFull, RollingDistribution},
internal::{ComputedFromHeightDistribution, ComputedHeightDerivedCumulativeFull},
};
impl Vecs {
@@ -22,15 +22,8 @@ impl Vecs {
)?;
let fullness =
ComputedFromHeightLast::forced_import(db, "block_fullness", version, indexes)?;
ComputedFromHeightDistribution::forced_import(db, "block_fullness", version, indexes)?;
let fullness_rolling =
RollingDistribution::forced_import(db, "block_fullness", version, indexes)?;
Ok(Self {
weight,
fullness,
fullness_rolling,
})
Ok(Self { weight, fullness })
}
}

View File

@@ -2,13 +2,10 @@ use brk_traversable::Traversable;
use brk_types::{StoredF32, Weight};
use vecdb::{Rw, StorageMode};
use crate::internal::{
ComputedFromHeightLast, ComputedHeightDerivedCumulativeFull, RollingDistribution,
};
use crate::internal::{ComputedFromHeightDistribution, ComputedHeightDerivedCumulativeFull};
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub weight: ComputedHeightDerivedCumulativeFull<Weight, M>,
pub fullness: ComputedFromHeightLast<StoredF32, M>,
pub fullness_rolling: RollingDistribution<StoredF32, M>,
pub fullness: ComputedFromHeightDistribution<StoredF32, M>,
}

View File

@@ -47,7 +47,7 @@ impl Vecs {
prices,
starting_indexes,
exit,
Some(&self.vaulted_price.usd.height),
&self.vaulted_price.usd.height,
)?;
self.active_price.usd.height.compute_multiply(
@@ -62,7 +62,7 @@ impl Vecs {
prices,
starting_indexes,
exit,
Some(&self.active_price.usd.height),
&self.active_price.usd.height,
)?;
self.true_market_mean.usd.height.compute_divide(
@@ -77,7 +77,7 @@ impl Vecs {
prices,
starting_indexes,
exit,
Some(&self.true_market_mean.usd.height),
&self.true_market_mean.usd.height,
)?;
// cointime_price = cointime_cap / circulating_supply
@@ -93,7 +93,7 @@ impl Vecs {
prices,
starting_indexes,
exit,
Some(&self.cointime_price.usd.height),
&self.cointime_price.usd.height,
)?;
Ok(())

View File

@@ -5,7 +5,7 @@ use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedFromHeightRatio, Price},
internal::{ComputedFromHeightRatioExtended, Price},
};
impl Vecs {
@@ -15,46 +15,24 @@ impl Vecs {
indexes: &indexes::Vecs,
) -> Result<Self> {
let vaulted_price = Price::forced_import(db, "vaulted_price", version, indexes)?;
let vaulted_price_ratio = ComputedFromHeightRatio::forced_import(
db,
"vaulted_price",
Some(&vaulted_price.usd),
version,
indexes,
true,
)?;
let vaulted_price_ratio =
ComputedFromHeightRatioExtended::forced_import(db, "vaulted_price", version, indexes)?;
let active_price = Price::forced_import(db, "active_price", version, indexes)?;
let active_price_ratio = ComputedFromHeightRatio::forced_import(
db,
"active_price",
Some(&active_price.usd),
version,
indexes,
true,
)?;
let active_price_ratio =
ComputedFromHeightRatioExtended::forced_import(db, "active_price", version, indexes)?;
let true_market_mean =
Price::forced_import(db, "true_market_mean", version, indexes)?;
let true_market_mean_ratio = ComputedFromHeightRatio::forced_import(
let true_market_mean = Price::forced_import(db, "true_market_mean", version, indexes)?;
let true_market_mean_ratio = ComputedFromHeightRatioExtended::forced_import(
db,
"true_market_mean",
Some(&true_market_mean.usd),
version,
indexes,
true,
)?;
let cointime_price =
Price::forced_import(db, "cointime_price", version, indexes)?;
let cointime_price_ratio = ComputedFromHeightRatio::forced_import(
db,
"cointime_price",
Some(&cointime_price.usd),
version,
indexes,
true,
)?;
let cointime_price = Price::forced_import(db, "cointime_price", version, indexes)?;
let cointime_price_ratio =
ComputedFromHeightRatioExtended::forced_import(db, "cointime_price", version, indexes)?;
Ok(Self {
vaulted_price,

View File

@@ -2,16 +2,16 @@ use brk_traversable::Traversable;
use brk_types::Dollars;
use vecdb::{Rw, StorageMode};
use crate::internal::{ComputedFromHeightLast, ComputedFromHeightRatio, Price};
use crate::internal::{ComputedFromHeightLast, ComputedFromHeightRatioExtended, Price};
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub vaulted_price: Price<ComputedFromHeightLast<Dollars, M>>,
pub vaulted_price_ratio: ComputedFromHeightRatio<M>,
pub vaulted_price_ratio: ComputedFromHeightRatioExtended<M>,
pub active_price: Price<ComputedFromHeightLast<Dollars, M>>,
pub active_price_ratio: ComputedFromHeightRatio<M>,
pub active_price_ratio: ComputedFromHeightRatioExtended<M>,
pub true_market_mean: Price<ComputedFromHeightLast<Dollars, M>>,
pub true_market_mean_ratio: ComputedFromHeightRatio<M>,
pub true_market_mean_ratio: ComputedFromHeightRatioExtended<M>,
pub cointime_price: Price<ComputedFromHeightLast<Dollars, M>>,
pub cointime_price_ratio: ComputedFromHeightRatio<M>,
pub cointime_price_ratio: ComputedFromHeightRatioExtended<M>,
}

View File

@@ -55,7 +55,6 @@ impl AddressCohortVecs {
db,
filter,
full_name: &full_name,
context: CohortContext::Address,
version,
indexes,
};

View File

@@ -78,7 +78,6 @@ impl UTXOCohorts<Rw> {
db,
filter: Filter::All,
full_name: &all_full_name,
context: CohortContext::Utxo,
version: v + Version::ONE,
indexes,
};
@@ -94,7 +93,6 @@ impl UTXOCohorts<Rw> {
db,
filter: f,
full_name: &full_name,
context: CohortContext::Utxo,
version: v,
indexes,
};
@@ -114,7 +112,6 @@ impl UTXOCohorts<Rw> {
db,
filter: f,
full_name: &full_name,
context: CohortContext::Utxo,
version: v,
indexes,
};
@@ -146,7 +143,6 @@ impl UTXOCohorts<Rw> {
db,
filter: f,
full_name: &full_name,
context: CohortContext::Utxo,
version: v,
indexes,
};
@@ -166,7 +162,6 @@ impl UTXOCohorts<Rw> {
db,
filter: f,
full_name: &full_name,
context: CohortContext::Utxo,
version: v,
indexes,
};
@@ -184,7 +179,6 @@ impl UTXOCohorts<Rw> {
db,
filter: f,
full_name: &full_name,
context: CohortContext::Utxo,
version: v,
indexes,
};
@@ -203,7 +197,6 @@ impl UTXOCohorts<Rw> {
db,
filter: f,
full_name: &full_name,
context: CohortContext::Utxo,
version: v,
indexes,
};
@@ -222,7 +215,6 @@ impl UTXOCohorts<Rw> {
db,
filter: f,
full_name: &full_name,
context: CohortContext::Utxo,
version: v,
indexes,
};

View File

@@ -67,7 +67,7 @@ pub(crate) fn process_blocks(
// From transactions and inputs/outputs (via .height or .height.sum_cumulative.sum patterns):
let height_to_tx_count = &transactions.count.tx_count.height;
let height_to_output_count = &outputs.count.total_count.sum_cumulative.sum.0;
let height_to_output_count = &outputs.count.total_count.height.sum_cumulative.sum.0;
let height_to_input_count = &inputs.count.height.sum_cumulative.sum.0;
// From blocks:
let height_to_timestamp = &blocks.time.timestamp_monotonic;

View File

@@ -1,4 +1,4 @@
use brk_cohort::{CohortContext, Filter};
use brk_cohort::Filter;
use brk_types::Version;
use vecdb::Database;
@@ -9,17 +9,11 @@ pub struct ImportConfig<'a> {
pub db: &'a Database,
pub filter: Filter,
pub full_name: &'a str,
pub context: CohortContext,
pub version: Version,
pub indexes: &'a indexes::Vecs,
}
impl<'a> ImportConfig<'a> {
/// Whether this is an extended cohort (more relative metrics).
pub(crate) fn extended(&self) -> bool {
self.filter.is_extended(self.context)
}
/// Get full metric name with filter prefix.
pub(crate) fn name(&self, suffix: &str) -> String {
if self.full_name.is_empty() {

View File

@@ -36,14 +36,12 @@ impl CostBasisExtended {
&cfg.name("cost_basis"),
cfg.version,
cfg.indexes,
true,
)?,
invested_capital: PercentilesVecs::forced_import(
cfg.db,
&cfg.name("invested_capital"),
cfg.version,
cfg.indexes,
true,
)?,
spot_cost_basis_percentile: ComputedFromHeightLast::forced_import(
cfg.db,
@@ -99,14 +97,12 @@ impl CostBasisExtended {
self.percentiles
.vecs
.iter_mut()
.flatten()
.map(|v| &mut v.usd.height as &mut dyn AnyStoredVec),
);
vecs.extend(
self.invested_capital
.vecs
.iter_mut()
.flatten()
.map(|v| &mut v.usd.height as &mut dyn AnyStoredVec),
);
vecs.push(&mut self.spot_cost_basis_percentile.height);

View File

@@ -142,9 +142,7 @@ impl RealizedBase {
let v1 = Version::ONE;
let v2 = Version::new(2);
let v3 = Version::new(3);
let extended = cfg.extended();
// Import combined types using forced_import which handles height + derived
// Import combined types using forced_import which handles height + derived
let realized_cap_cents = ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name("realized_cap_cents"),
@@ -270,12 +268,11 @@ impl RealizedBase {
&investor_price_cents,
);
let investor_price_extra = ComputedFromHeightRatio::forced_import_from_lazy(
let investor_price_extra = ComputedFromHeightRatio::forced_import(
cfg.db,
&cfg.name("investor_price"),
cfg.version,
cfg.indexes,
extended,
)?;
let lower_price_band = Price::forced_import(
@@ -350,10 +347,8 @@ impl RealizedBase {
let realized_price_extra = ComputedFromHeightRatio::forced_import(
cfg.db,
&cfg.name("realized_price"),
Some(&realized_price.usd),
cfg.version + v1,
cfg.indexes,
extended,
)?;
let mvrv = LazyFromHeightLast::from_computed::<StoredF32Identity>(
@@ -845,28 +840,16 @@ impl RealizedBase {
exit,
)?;
self.realized_price_extra.compute_rest(
blocks,
prices,
starting_indexes,
exit,
Some(&self.realized_price.usd.height),
)?;
self.realized_price_extra.compute_usd_bands(
self.realized_price_extra.compute_ratio(
starting_indexes,
&prices.usd.price,
&self.realized_price.usd.height,
exit,
)?;
self.investor_price_extra.compute_rest(
blocks,
prices,
starting_indexes,
exit,
Some(&self.investor_price.usd.height),
)?;
self.investor_price_extra.compute_usd_bands(
self.investor_price_extra.compute_ratio(
starting_indexes,
&prices.usd.price,
&self.investor_price.usd.height,
exit,
)?;

View File

@@ -5,9 +5,7 @@ use vecdb::{Exit, ReadableVec, Rw, StorageMode};
use crate::{
ComputeIndexes, blocks,
internal::{
ComputedFromHeightLast, Ratio64,
},
internal::{ComputedFromHeightLast, ComputedFromHeightRatioExtension, Ratio64},
};
use crate::distribution::metrics::ImportConfig;
@@ -34,6 +32,10 @@ pub struct RealizedExtended<M: StorageMode = Rw> {
pub realized_profit_to_loss_ratio_7d: ComputedFromHeightLast<StoredF64, M>,
pub realized_profit_to_loss_ratio_30d: ComputedFromHeightLast<StoredF64, M>,
pub realized_profit_to_loss_ratio_1y: ComputedFromHeightLast<StoredF64, M>,
// === Extended ratio metrics for realized/investor price ===
pub realized_price_ratio_ext: ComputedFromHeightRatioExtension<M>,
pub investor_price_ratio_ext: ComputedFromHeightRatioExtension<M>,
}
impl RealizedExtended {
@@ -42,7 +44,12 @@ impl RealizedExtended {
macro_rules! import_rolling {
($name:expr) => {
ComputedFromHeightLast::forced_import(cfg.db, &cfg.name($name), cfg.version + v1, cfg.indexes)?
ComputedFromHeightLast::forced_import(
cfg.db,
&cfg.name($name),
cfg.version + v1,
cfg.indexes,
)?
};
}
@@ -65,9 +72,22 @@ impl RealizedExtended {
realized_profit_to_loss_ratio_7d: import_rolling!("realized_profit_to_loss_ratio_7d"),
realized_profit_to_loss_ratio_30d: import_rolling!("realized_profit_to_loss_ratio_30d"),
realized_profit_to_loss_ratio_1y: import_rolling!("realized_profit_to_loss_ratio_1y"),
realized_price_ratio_ext: ComputedFromHeightRatioExtension::forced_import(
cfg.db,
&cfg.name("realized_price"),
cfg.version + v1,
cfg.indexes,
)?,
investor_price_ratio_ext: ComputedFromHeightRatioExtension::forced_import(
cfg.db,
&cfg.name("investor_price"),
cfg.version,
cfg.indexes,
)?,
})
}
#[allow(clippy::too_many_arguments)]
pub(crate) fn compute_rest_part2_ext(
&mut self,
base: &RealizedBase,
@@ -77,35 +97,118 @@ impl RealizedExtended {
exit: &Exit,
) -> Result<()> {
// Realized profit/loss rolling sums
self.realized_profit_24h.height.compute_rolling_sum(starting_indexes.height, &blocks.count.height_24h_ago, &base.realized_profit.height, exit)?;
self.realized_profit_7d.height.compute_rolling_sum(starting_indexes.height, &blocks.count.height_1w_ago, &base.realized_profit.height, exit)?;
self.realized_profit_30d.height.compute_rolling_sum(starting_indexes.height, &blocks.count.height_1m_ago, &base.realized_profit.height, exit)?;
self.realized_profit_1y.height.compute_rolling_sum(starting_indexes.height, &blocks.count.height_1y_ago, &base.realized_profit.height, exit)?;
self.realized_loss_24h.height.compute_rolling_sum(starting_indexes.height, &blocks.count.height_24h_ago, &base.realized_loss.height, exit)?;
self.realized_loss_7d.height.compute_rolling_sum(starting_indexes.height, &blocks.count.height_1w_ago, &base.realized_loss.height, exit)?;
self.realized_loss_30d.height.compute_rolling_sum(starting_indexes.height, &blocks.count.height_1m_ago, &base.realized_loss.height, exit)?;
self.realized_loss_1y.height.compute_rolling_sum(starting_indexes.height, &blocks.count.height_1y_ago, &base.realized_loss.height, exit)?;
// Realized cap relative to own market cap
self.realized_cap_rel_to_own_market_cap.height.compute_percentage(
self.realized_profit_24h.height.compute_rolling_sum(
starting_indexes.height,
&base.realized_cap.height,
height_to_market_cap,
&blocks.count.height_24h_ago,
&base.realized_profit.height,
exit,
)?;
self.realized_profit_7d.height.compute_rolling_sum(
starting_indexes.height,
&blocks.count.height_1w_ago,
&base.realized_profit.height,
exit,
)?;
self.realized_profit_30d.height.compute_rolling_sum(
starting_indexes.height,
&blocks.count.height_1m_ago,
&base.realized_profit.height,
exit,
)?;
self.realized_profit_1y.height.compute_rolling_sum(
starting_indexes.height,
&blocks.count.height_1y_ago,
&base.realized_profit.height,
exit,
)?;
self.realized_loss_24h.height.compute_rolling_sum(
starting_indexes.height,
&blocks.count.height_24h_ago,
&base.realized_loss.height,
exit,
)?;
self.realized_loss_7d.height.compute_rolling_sum(
starting_indexes.height,
&blocks.count.height_1w_ago,
&base.realized_loss.height,
exit,
)?;
self.realized_loss_30d.height.compute_rolling_sum(
starting_indexes.height,
&blocks.count.height_1m_ago,
&base.realized_loss.height,
exit,
)?;
self.realized_loss_1y.height.compute_rolling_sum(
starting_indexes.height,
&blocks.count.height_1y_ago,
&base.realized_loss.height,
exit,
)?;
// Realized cap relative to own market cap
self.realized_cap_rel_to_own_market_cap
.height
.compute_percentage(
starting_indexes.height,
&base.realized_cap.height,
height_to_market_cap,
exit,
)?;
// Realized profit to loss ratios
self.realized_profit_to_loss_ratio_24h.compute_binary::<Dollars, Dollars, Ratio64>(
starting_indexes.height, &self.realized_profit_24h.height, &self.realized_loss_24h.height, exit,
self.realized_profit_to_loss_ratio_24h
.compute_binary::<Dollars, Dollars, Ratio64>(
starting_indexes.height,
&self.realized_profit_24h.height,
&self.realized_loss_24h.height,
exit,
)?;
self.realized_profit_to_loss_ratio_7d
.compute_binary::<Dollars, Dollars, Ratio64>(
starting_indexes.height,
&self.realized_profit_7d.height,
&self.realized_loss_7d.height,
exit,
)?;
self.realized_profit_to_loss_ratio_30d
.compute_binary::<Dollars, Dollars, Ratio64>(
starting_indexes.height,
&self.realized_profit_30d.height,
&self.realized_loss_30d.height,
exit,
)?;
self.realized_profit_to_loss_ratio_1y
.compute_binary::<Dollars, Dollars, Ratio64>(
starting_indexes.height,
&self.realized_profit_1y.height,
&self.realized_loss_1y.height,
exit,
)?;
// Extended ratio metrics
self.realized_price_ratio_ext.compute_rest(
blocks,
starting_indexes,
exit,
&base.realized_price_extra.ratio.height,
)?;
self.realized_profit_to_loss_ratio_7d.compute_binary::<Dollars, Dollars, Ratio64>(
starting_indexes.height, &self.realized_profit_7d.height, &self.realized_loss_7d.height, exit,
self.realized_price_ratio_ext.compute_usd_bands(
starting_indexes,
&base.realized_price.usd.height,
exit,
)?;
self.realized_profit_to_loss_ratio_30d.compute_binary::<Dollars, Dollars, Ratio64>(
starting_indexes.height, &self.realized_profit_30d.height, &self.realized_loss_30d.height, exit,
self.investor_price_ratio_ext.compute_rest(
blocks,
starting_indexes,
exit,
&base.investor_price_extra.ratio.height,
)?;
self.realized_profit_to_loss_ratio_1y.compute_binary::<Dollars, Dollars, Ratio64>(
starting_indexes.height, &self.realized_profit_1y.height, &self.realized_loss_1y.height, exit,
self.investor_price_ratio_ext.compute_usd_bands(
starting_indexes,
&base.investor_price.usd.height,
exit,
)?;
Ok(())

View File

@@ -14,21 +14,21 @@ impl Vecs {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.height.compute_with_skip(
starting_indexes.height,
&indexes.txindex.input_count,
&indexer.vecs.transactions.first_txindex,
&indexes.height.txindex_count,
exit,
0,
)?;
let window_starts = blocks.count.window_starts();
self.rolling.compute(
self.0.compute(
starting_indexes.height,
&window_starts,
self.height.sum_cumulative.sum.inner(),
exit,
|full| {
full.compute_with_skip(
starting_indexes.height,
&indexes.txindex.input_count,
&indexer.vecs.transactions.first_txindex,
&indexes.height.txindex_count,
exit,
0,
)
},
)?;
Ok(())

View File

@@ -3,16 +3,15 @@ use brk_types::Version;
use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{Full, RollingFull},
};
use crate::{indexes, internal::ComputedFromHeightFull};
impl Vecs {
pub(crate) fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
Ok(Self {
height: Full::forced_import(db, "input_count", version)?,
rolling: RollingFull::forced_import(db, "input_count", version, indexes)?,
})
Ok(Self(ComputedFromHeightFull::forced_import(
db,
"input_count",
version,
indexes,
)?))
}
}

View File

@@ -1,11 +1,13 @@
use derive_more::{Deref, DerefMut};
use brk_traversable::Traversable;
use brk_types::{Height, StoredU64};
use brk_types::StoredU64;
use vecdb::{Rw, StorageMode};
use crate::internal::{Full, RollingFull};
use crate::internal::ComputedFromHeightFull;
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub height: Full<Height, StoredU64, M>,
pub rolling: RollingFull<StoredU64, M>,
}
#[derive(Deref, DerefMut, Traversable)]
pub struct Vecs<M: StorageMode = Rw>(
#[traversable(flatten)]
pub ComputedFromHeightFull<StoredU64, M>,
);

View File

@@ -0,0 +1,72 @@
//! ComputedFromHeightFull - Full (distribution + sum + cumulative) + RollingFull.
//!
//! For metrics aggregated per-block from finer-grained sources (e.g., per-tx data),
//! where we want full per-block stats plus rolling window stats.
use std::ops::SubAssign;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use schemars::JsonSchema;
use vecdb::{Database, Exit, Rw, StorageMode};
use crate::{
indexes,
internal::{Full, NumericValue, RollingFull, WindowStarts},
};
#[derive(Traversable)]
#[traversable(merge)]
pub struct ComputedFromHeightFull<T, M: StorageMode = Rw>
where
T: NumericValue + JsonSchema,
{
#[traversable(flatten)]
pub height: Full<Height, T, M>,
#[traversable(flatten)]
pub rolling: RollingFull<T, M>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedFromHeightFull<T>
where
T: NumericValue + JsonSchema,
{
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height = Full::forced_import(db, name, v)?;
let rolling = RollingFull::forced_import(db, name, v, indexes)?;
Ok(Self { height, rolling })
}
/// Compute Full stats via closure, then rolling windows from the per-block sum.
pub(crate) fn compute(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
exit: &Exit,
compute_full: impl FnOnce(&mut Full<Height, T>) -> Result<()>,
) -> Result<()>
where
T: From<f64> + Default + SubAssign + Copy + Ord,
f64: From<T>,
{
compute_full(&mut self.height)?;
self.rolling.compute(
max_from,
windows,
self.height.sum_cumulative.sum.inner(),
exit,
)?;
Ok(())
}
}

View File

@@ -70,11 +70,10 @@ where
S2T: VecValue,
F: BinaryTransform<S1T, S2T, T>,
{
self.height.compute_transform2(
self.height.compute_binary::<S1T, S2T, F>(
max_from,
source1,
source2,
|(h, s1, s2, ..)| (h, F::apply(s1, s2)),
exit,
)?;
Ok(())

View File

@@ -4,13 +4,11 @@ use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{ReadableBoxedVec, ReadableCloneableVec, LazyVecFrom1, UnaryTransform};
use vecdb::{LazyVecFrom1, ReadableBoxedVec, ReadableCloneableVec, UnaryTransform};
use crate::{
indexes,
internal::{
ComputedFromHeightLast, ComputedVecValue, LazyHeightDerivedLast, NumericValue,
},
internal::{ComputedFromHeightLast, ComputedVecValue, LazyHeightDerivedLast, NumericValue},
};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
@@ -61,7 +59,12 @@ where
let v = version + VERSION;
Self {
height: LazyVecFrom1::transformed::<F>(name, v, height_source.clone()),
rest: Box::new(LazyHeightDerivedLast::from_height_source::<F>(name, v, height_source, indexes)),
rest: Box::new(LazyHeightDerivedLast::from_height_source::<F>(
name,
v,
height_source,
indexes,
)),
}
}
@@ -78,8 +81,11 @@ where
let v = version + VERSION;
Self {
height: LazyVecFrom1::transformed::<F>(name, v, source.height.read_only_boxed_clone()),
rest: Box::new(LazyHeightDerivedLast::from_lazy::<F, S2T>(name, v, &source.rest)),
rest: Box::new(LazyHeightDerivedLast::from_lazy::<F, S2T>(
name,
v,
&source.rest,
)),
}
}
}

View File

@@ -3,6 +3,7 @@ mod cumulative;
mod cumulative_rolling_full;
mod cumulative_rolling_sum;
mod distribution;
mod full;
mod last;
mod lazy_computed_full;
mod lazy_last;
@@ -15,6 +16,7 @@ mod value_change;
mod value_ema;
mod value_full;
mod value_last;
mod value_last_rolling;
mod value_lazy_computed_cumulative;
mod value_lazy_last;
mod value_sum_cumulative;
@@ -24,6 +26,7 @@ pub use cumulative::*;
pub use cumulative_rolling_full::*;
pub use cumulative_rolling_sum::*;
pub use distribution::*;
pub use full::*;
pub use last::*;
pub use lazy_computed_full::*;
pub use lazy_last::*;
@@ -36,6 +39,7 @@ pub use value_change::*;
pub use value_ema::*;
pub use value_full::*;
pub use value_last::*;
pub use value_last_rolling::*;
pub use value_lazy_computed_cumulative::*;
pub use value_lazy_last::*;
pub use value_sum_cumulative::*;

View File

@@ -68,7 +68,7 @@ pub(crate) fn compute_spot_percentile_rank(
}
pub struct PercentilesVecs<M: StorageMode = Rw> {
pub vecs: [Option<Price<ComputedFromHeightLast<Dollars, M>>>; PERCENTILES_LEN],
pub vecs: [Price<ComputedFromHeightLast<Dollars, M>>; PERCENTILES_LEN],
}
const VERSION: Version = Version::ONE;
@@ -79,15 +79,17 @@ impl PercentilesVecs {
prefix: &str,
version: Version,
indexes: &indexes::Vecs,
compute: bool,
) -> Result<Self> {
let vecs = PERCENTILES.map(|p| {
compute.then(|| {
let vecs = PERCENTILES
.into_iter()
.map(|p| {
let metric_name = format!("{prefix}_pct{p:02}");
Price::forced_import(db, &metric_name, version + VERSION, indexes)
.unwrap()
})
});
.collect::<Result<Vec<_>>>()?
.try_into()
.ok()
.expect("PERCENTILES length mismatch");
Ok(Self { vecs })
}
@@ -98,17 +100,15 @@ impl PercentilesVecs {
height: Height,
percentile_prices: &[Dollars; PERCENTILES_LEN],
) -> Result<()> {
for (i, vec) in self.vecs.iter_mut().enumerate() {
if let Some(v) = vec {
v.usd.height.truncate_push(height, percentile_prices[i])?;
}
for (i, v) in self.vecs.iter_mut().enumerate() {
v.usd.height.truncate_push(height, percentile_prices[i])?;
}
Ok(())
}
/// Validate computed versions or reset if mismatched.
pub(crate) fn validate_computed_version_or_reset(&mut self, version: Version) -> Result<()> {
for vec in self.vecs.iter_mut().flatten() {
for vec in self.vecs.iter_mut() {
vec.usd.height.validate_computed_version_or_reset(version)?;
}
Ok(())
@@ -123,7 +123,7 @@ impl ReadOnlyClone for PercentilesVecs {
vecs: self
.vecs
.each_ref()
.map(|v| v.as_ref().map(|p| p.read_only_clone())),
.map(|v| v.read_only_clone()),
}
}
}
@@ -137,7 +137,7 @@ where
PERCENTILES
.iter()
.zip(self.vecs.iter())
.filter_map(|(p, v)| v.as_ref().map(|v| (format!("pct{p:02}"), v.to_tree_node())))
.map(|(p, v)| (format!("pct{p:02}"), v.to_tree_node()))
.collect(),
)
}
@@ -145,7 +145,6 @@ where
fn iter_any_exportable(&self) -> impl Iterator<Item = &dyn AnyExportableVec> {
self.vecs
.iter()
.flatten()
.flat_map(|p| p.iter_any_exportable())
}
}

View File

@@ -1,454 +0,0 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Dollars, Height, StoredF32, Version};
use vecdb::{
AnyStoredVec, AnyVec, Database, EagerVec, Exit, PcoVec, ReadableVec, Rw, StorageMode, VecIndex,
WritableVec,
};
use crate::{
ComputeIndexes, blocks, indexes,
internal::{ComputedFromHeightStdDev, Price, StandardDeviationVecsOptions},
prices,
utils::get_percentile,
};
use super::ComputedFromHeightLast;
#[derive(Traversable)]
pub struct ComputedFromHeightRatio<M: StorageMode = Rw> {
pub price: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub ratio: ComputedFromHeightLast<StoredF32, M>,
pub ratio_1w_sma: Option<ComputedFromHeightLast<StoredF32, M>>,
pub ratio_1m_sma: Option<ComputedFromHeightLast<StoredF32, M>>,
pub ratio_pct99: Option<ComputedFromHeightLast<StoredF32, M>>,
pub ratio_pct98: Option<ComputedFromHeightLast<StoredF32, M>>,
pub ratio_pct95: Option<ComputedFromHeightLast<StoredF32, M>>,
pub ratio_pct5: Option<ComputedFromHeightLast<StoredF32, M>>,
pub ratio_pct2: Option<ComputedFromHeightLast<StoredF32, M>>,
pub ratio_pct1: Option<ComputedFromHeightLast<StoredF32, M>>,
pub ratio_pct99_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub ratio_pct98_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub ratio_pct95_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub ratio_pct5_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub ratio_pct2_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub ratio_pct1_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub ratio_sd: Option<ComputedFromHeightStdDev<M>>,
pub ratio_4y_sd: Option<ComputedFromHeightStdDev<M>>,
pub ratio_2y_sd: Option<ComputedFromHeightStdDev<M>>,
pub ratio_1y_sd: Option<ComputedFromHeightStdDev<M>>,
}
const VERSION: Version = Version::TWO;
impl ComputedFromHeightRatio {
#[allow(clippy::too_many_arguments)]
pub(crate) fn forced_import(
db: &Database,
name: &str,
metric_price: Option<&ComputedFromHeightLast<Dollars>>,
version: Version,
indexes: &indexes::Vecs,
extended: bool,
) -> Result<Self> {
let v = version + VERSION;
macro_rules! import {
($suffix:expr) => {
ComputedFromHeightLast::forced_import(
db,
&format!("{name}_{}", $suffix),
v,
indexes,
)
.unwrap()
};
}
// Only compute price internally when metric_price is None
let price = metric_price
.is_none()
.then(|| Price::forced_import(db, name, v, indexes).unwrap());
macro_rules! import_sd {
($suffix:expr, $days:expr) => {
ComputedFromHeightStdDev::forced_import(
db,
&format!("{name}_{}", $suffix),
$days,
v,
indexes,
StandardDeviationVecsOptions::default().add_all(),
)
.unwrap()
};
}
let ratio_pct99 = extended.then(|| import!("ratio_pct99"));
let ratio_pct98 = extended.then(|| import!("ratio_pct98"));
let ratio_pct95 = extended.then(|| import!("ratio_pct95"));
let ratio_pct5 = extended.then(|| import!("ratio_pct5"));
let ratio_pct2 = extended.then(|| import!("ratio_pct2"));
let ratio_pct1 = extended.then(|| import!("ratio_pct1"));
macro_rules! lazy_usd {
($ratio:expr, $suffix:expr) => {
if !extended {
None
} else {
$ratio.as_ref().map(|_| {
Price::forced_import(
db,
&format!("{name}_{}", $suffix),
v,
indexes,
)
.unwrap()
})
}
};
}
Ok(Self {
ratio: import!("ratio"),
ratio_1w_sma: extended.then(|| import!("ratio_1w_sma")),
ratio_1m_sma: extended.then(|| import!("ratio_1m_sma")),
ratio_sd: extended.then(|| import_sd!("ratio", usize::MAX)),
ratio_1y_sd: extended.then(|| import_sd!("ratio_1y", 365)),
ratio_2y_sd: extended.then(|| import_sd!("ratio_2y", 2 * 365)),
ratio_4y_sd: extended.then(|| import_sd!("ratio_4y", 4 * 365)),
ratio_pct99_usd: lazy_usd!(&ratio_pct99, "ratio_pct99_usd"),
ratio_pct98_usd: lazy_usd!(&ratio_pct98, "ratio_pct98_usd"),
ratio_pct95_usd: lazy_usd!(&ratio_pct95, "ratio_pct95_usd"),
ratio_pct5_usd: lazy_usd!(&ratio_pct5, "ratio_pct5_usd"),
ratio_pct2_usd: lazy_usd!(&ratio_pct2, "ratio_pct2_usd"),
ratio_pct1_usd: lazy_usd!(&ratio_pct1, "ratio_pct1_usd"),
price,
ratio_pct99,
ratio_pct98,
ratio_pct95,
ratio_pct5,
ratio_pct2,
ratio_pct1,
})
}
pub(crate) fn forced_import_from_lazy(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
extended: bool,
) -> Result<Self> {
let v = version + VERSION;
macro_rules! import {
($suffix:expr) => {
ComputedFromHeightLast::forced_import(
db,
&format!("{name}_{}", $suffix),
v,
indexes,
)
.unwrap()
};
}
macro_rules! import_sd {
($suffix:expr, $days:expr) => {
ComputedFromHeightStdDev::forced_import_from_lazy(
db,
&format!("{name}_{}", $suffix),
$days,
v,
indexes,
StandardDeviationVecsOptions::default().add_all(),
)
.unwrap()
};
}
let ratio_pct99 = extended.then(|| import!("ratio_pct99"));
let ratio_pct98 = extended.then(|| import!("ratio_pct98"));
let ratio_pct95 = extended.then(|| import!("ratio_pct95"));
let ratio_pct5 = extended.then(|| import!("ratio_pct5"));
let ratio_pct2 = extended.then(|| import!("ratio_pct2"));
let ratio_pct1 = extended.then(|| import!("ratio_pct1"));
macro_rules! lazy_usd {
($ratio:expr, $suffix:expr) => {
$ratio.as_ref().map(|_| {
Price::forced_import(db, &format!("{name}_{}", $suffix), v, indexes)
.unwrap()
})
};
}
Ok(Self {
ratio: import!("ratio"),
ratio_1w_sma: extended.then(|| import!("ratio_1w_sma")),
ratio_1m_sma: extended.then(|| import!("ratio_1m_sma")),
ratio_sd: extended.then(|| import_sd!("ratio", usize::MAX)),
ratio_1y_sd: extended.then(|| import_sd!("ratio_1y", 365)),
ratio_2y_sd: extended.then(|| import_sd!("ratio_2y", 2 * 365)),
ratio_4y_sd: extended.then(|| import_sd!("ratio_4y", 4 * 365)),
ratio_pct99_usd: lazy_usd!(&ratio_pct99, "ratio_pct99_usd"),
ratio_pct98_usd: lazy_usd!(&ratio_pct98, "ratio_pct98_usd"),
ratio_pct95_usd: lazy_usd!(&ratio_pct95, "ratio_pct95_usd"),
ratio_pct5_usd: lazy_usd!(&ratio_pct5, "ratio_pct5_usd"),
ratio_pct2_usd: lazy_usd!(&ratio_pct2, "ratio_pct2_usd"),
ratio_pct1_usd: lazy_usd!(&ratio_pct1, "ratio_pct1_usd"),
price: None,
ratio_pct99,
ratio_pct98,
ratio_pct95,
ratio_pct5,
ratio_pct2,
ratio_pct1,
})
}
/// Compute all: computes price at height level, then ratio + rest.
pub(crate) fn compute_all<F>(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<Height, Dollars>>) -> Result<()>,
{
compute(&mut self.price.as_mut().unwrap().usd.height)?;
let price_opt: Option<&EagerVec<PcoVec<Height, Dollars>>> = None;
self.compute_rest(blocks, prices, starting_indexes, exit, price_opt)
}
/// Compute ratio and derived metrics from an externally-provided or internal price.
pub(crate) fn compute_rest(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
price_opt: Option<&impl ReadableVec<Height, Dollars>>,
) -> Result<()> {
let close_price = &prices.usd.price;
let price = price_opt.unwrap_or_else(|| unsafe {
std::mem::transmute(&self.price.as_ref().unwrap().usd.height)
});
// Compute ratio = close_price / metric_price at height level
self.ratio.height.compute_transform2(
starting_indexes.height,
close_price,
price,
|(i, close, price, ..)| {
if price == Dollars::ZERO {
(i, StoredF32::from(1.0))
} else {
(i, StoredF32::from(close / price))
}
},
exit,
)?;
if self.ratio_1w_sma.is_none() {
return Ok(());
}
// SMA using lookback vecs
self.ratio_1w_sma
.as_mut()
.unwrap()
.height
.compute_rolling_average(
starting_indexes.height,
&blocks.count.height_1w_ago,
&self.ratio.height,
exit,
)?;
self.ratio_1m_sma
.as_mut()
.unwrap()
.height
.compute_rolling_average(
starting_indexes.height,
&blocks.count.height_1m_ago,
&self.ratio.height,
exit,
)?;
// Percentiles: insert into sorted array on day boundaries
let ratio_version = self.ratio.height.version();
self.mut_ratio_vecs()
.iter_mut()
.try_for_each(|v| -> Result<()> {
v.validate_computed_version_or_reset(ratio_version)?;
Ok(())
})?;
let starting_height = self
.mut_ratio_vecs()
.iter()
.map(|v| Height::from(v.len()))
.min()
.unwrap()
.min(starting_indexes.height);
let start = starting_height.to_usize();
let day_start = &blocks.count.height_24h_ago;
// Collect sorted history up to starting point (one per day boundary)
let mut sorted = {
let ratio_data = self.ratio.height.collect_range_at(0, start);
let day_start_hist = day_start.collect_range_at(0, start);
let mut sorted: Vec<StoredF32> = Vec::new();
let mut last_day_start = Height::from(0_usize);
for (h, ratio) in ratio_data.into_iter().enumerate() {
let cur_day_start = day_start_hist[h];
if h == 0 || cur_day_start != last_day_start {
let pos = sorted.binary_search(&ratio).unwrap_or_else(|p| p);
sorted.insert(pos, ratio);
last_day_start = cur_day_start;
}
}
sorted
};
let pct1_vec = &mut self.ratio_pct1.as_mut().unwrap().height;
let pct2_vec = &mut self.ratio_pct2.as_mut().unwrap().height;
let pct5_vec = &mut self.ratio_pct5.as_mut().unwrap().height;
let pct95_vec = &mut self.ratio_pct95.as_mut().unwrap().height;
let pct98_vec = &mut self.ratio_pct98.as_mut().unwrap().height;
let pct99_vec = &mut self.ratio_pct99.as_mut().unwrap().height;
let ratio_len = self.ratio.height.len();
let ratio_data = self.ratio.height.collect_range_at(start, ratio_len);
let mut last_day_start = if start > 0 {
day_start
.collect_one_at(start - 1)
.unwrap_or(Height::from(0_usize))
} else {
Height::from(0_usize)
};
let day_start_data = day_start.collect_range_at(start, ratio_len);
for (offset, ratio) in ratio_data.into_iter().enumerate() {
let index = start + offset;
// Insert into sorted history on day boundaries
let cur_day_start = day_start_data[offset];
if index == 0 || cur_day_start != last_day_start {
let pos = sorted.binary_search(&ratio).unwrap_or_else(|p| p);
sorted.insert(pos, ratio);
last_day_start = cur_day_start;
}
if sorted.is_empty() {
pct1_vec.truncate_push_at(index, StoredF32::NAN)?;
pct2_vec.truncate_push_at(index, StoredF32::NAN)?;
pct5_vec.truncate_push_at(index, StoredF32::NAN)?;
pct95_vec.truncate_push_at(index, StoredF32::NAN)?;
pct98_vec.truncate_push_at(index, StoredF32::NAN)?;
pct99_vec.truncate_push_at(index, StoredF32::NAN)?;
} else {
pct1_vec.truncate_push_at(index, get_percentile(&sorted, 0.01))?;
pct2_vec.truncate_push_at(index, get_percentile(&sorted, 0.02))?;
pct5_vec.truncate_push_at(index, get_percentile(&sorted, 0.05))?;
pct95_vec.truncate_push_at(index, get_percentile(&sorted, 0.95))?;
pct98_vec.truncate_push_at(index, get_percentile(&sorted, 0.98))?;
pct99_vec.truncate_push_at(index, get_percentile(&sorted, 0.99))?;
}
}
{
let _lock = exit.lock();
self.mut_ratio_vecs()
.into_iter()
.try_for_each(|v| v.flush())?;
}
// Compute stddev at height level
macro_rules! compute_sd {
($($field:ident),*) => {
$(self.$field.as_mut().unwrap().compute_all(
blocks, starting_indexes, exit, &self.ratio.height,
)?;)*
};
}
compute_sd!(ratio_sd, ratio_4y_sd, ratio_2y_sd, ratio_1y_sd);
Ok(())
}
/// Compute USD ratio bands: usd_band = metric_price * ratio_percentile
pub(crate) fn compute_usd_bands(
&mut self,
starting_indexes: &ComputeIndexes,
metric_price: &impl ReadableVec<Height, Dollars>,
exit: &Exit,
) -> Result<()> {
use crate::internal::PriceTimesRatio;
macro_rules! compute_band {
($usd_field:ident, $band_field:ident) => {
if let Some(usd) = self.$usd_field.as_mut() {
if let Some(band) = self.$band_field.as_ref() {
usd.usd
.compute_binary::<Dollars, StoredF32, PriceTimesRatio>(
starting_indexes.height,
metric_price,
&band.height,
exit,
)?;
}
}
};
}
compute_band!(ratio_pct99_usd, ratio_pct99);
compute_band!(ratio_pct98_usd, ratio_pct98);
compute_band!(ratio_pct95_usd, ratio_pct95);
compute_band!(ratio_pct5_usd, ratio_pct5);
compute_band!(ratio_pct2_usd, ratio_pct2);
compute_band!(ratio_pct1_usd, ratio_pct1);
// Stddev USD bands
macro_rules! compute_sd_usd {
($($field:ident),*) => {
$(if let Some(sd) = self.$field.as_mut() {
sd.compute_usd_bands(starting_indexes, metric_price, exit)?;
})*
};
}
compute_sd_usd!(ratio_sd, ratio_4y_sd, ratio_2y_sd, ratio_1y_sd);
Ok(())
}
fn mut_ratio_vecs(&mut self) -> Vec<&mut EagerVec<PcoVec<Height, StoredF32>>> {
macro_rules! collect_vecs {
($($field:ident),*) => {{
let mut vecs = Vec::with_capacity(6);
$(if let Some(v) = self.$field.as_mut() { vecs.push(&mut v.height); })*
vecs
}};
}
collect_vecs!(
ratio_pct1,
ratio_pct2,
ratio_pct5,
ratio_pct95,
ratio_pct98,
ratio_pct99
)
}
}

View File

@@ -0,0 +1,57 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Dollars, Height, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{Database, Exit, ReadableVec, Rw, StorageMode};
use crate::{ComputeIndexes, blocks, indexes, prices};
use super::{ComputedFromHeightRatio, ComputedFromHeightRatioExtension};
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedFromHeightRatioExtended<M: StorageMode = Rw> {
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub base: ComputedFromHeightRatio<M>,
#[traversable(flatten)]
pub extended: ComputedFromHeightRatioExtension<M>,
}
impl ComputedFromHeightRatioExtended {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
base: ComputedFromHeightRatio::forced_import(db, name, version, indexes)?,
extended: ComputedFromHeightRatioExtension::forced_import(db, name, version, indexes)?,
})
}
/// Compute ratio and all extended metrics from an externally-provided metric price.
pub(crate) fn compute_rest(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
metric_price: &impl ReadableVec<Height, Dollars>,
) -> Result<()> {
let close_price = &prices.usd.price;
self.base
.compute_ratio(starting_indexes, close_price, metric_price, exit)?;
self.extended.compute_rest(
blocks,
starting_indexes,
exit,
&self.base.ratio.height,
)?;
self.extended
.compute_usd_bands(starting_indexes, metric_price, exit)?;
Ok(())
}
}

View File

@@ -0,0 +1,277 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Dollars, Height, StoredF32, Version};
use vecdb::{AnyStoredVec, AnyVec, Database, EagerVec, Exit, PcoVec, ReadableVec, Rw, StorageMode, VecIndex, WritableVec};
use crate::{
ComputeIndexes, blocks, indexes,
internal::{ComputedFromHeightStdDevExtended, Price},
utils::get_percentile,
};
use super::super::ComputedFromHeightLast;
#[derive(Traversable)]
pub struct ComputedFromHeightRatioExtension<M: StorageMode = Rw> {
pub ratio_1w_sma: ComputedFromHeightLast<StoredF32, M>,
pub ratio_1m_sma: ComputedFromHeightLast<StoredF32, M>,
pub ratio_pct99: ComputedFromHeightLast<StoredF32, M>,
pub ratio_pct98: ComputedFromHeightLast<StoredF32, M>,
pub ratio_pct95: ComputedFromHeightLast<StoredF32, M>,
pub ratio_pct5: ComputedFromHeightLast<StoredF32, M>,
pub ratio_pct2: ComputedFromHeightLast<StoredF32, M>,
pub ratio_pct1: ComputedFromHeightLast<StoredF32, M>,
pub ratio_pct99_usd: Price<ComputedFromHeightLast<Dollars, M>>,
pub ratio_pct98_usd: Price<ComputedFromHeightLast<Dollars, M>>,
pub ratio_pct95_usd: Price<ComputedFromHeightLast<Dollars, M>>,
pub ratio_pct5_usd: Price<ComputedFromHeightLast<Dollars, M>>,
pub ratio_pct2_usd: Price<ComputedFromHeightLast<Dollars, M>>,
pub ratio_pct1_usd: Price<ComputedFromHeightLast<Dollars, M>>,
pub ratio_sd: ComputedFromHeightStdDevExtended<M>,
pub ratio_4y_sd: ComputedFromHeightStdDevExtended<M>,
pub ratio_2y_sd: ComputedFromHeightStdDevExtended<M>,
pub ratio_1y_sd: ComputedFromHeightStdDevExtended<M>,
}
const VERSION: Version = Version::TWO;
impl ComputedFromHeightRatioExtension {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
macro_rules! import {
($suffix:expr) => {
ComputedFromHeightLast::forced_import(
db,
&format!("{name}_{}", $suffix),
v,
indexes,
)?
};
}
macro_rules! import_sd {
($suffix:expr, $days:expr) => {
ComputedFromHeightStdDevExtended::forced_import(
db,
&format!("{name}_{}", $suffix),
$days,
v,
indexes,
)?
};
}
macro_rules! import_usd {
($suffix:expr) => {
Price::forced_import(db, &format!("{name}_{}", $suffix), v, indexes)?
};
}
Ok(Self {
ratio_1w_sma: import!("ratio_1w_sma"),
ratio_1m_sma: import!("ratio_1m_sma"),
ratio_sd: import_sd!("ratio", usize::MAX),
ratio_1y_sd: import_sd!("ratio_1y", 365),
ratio_2y_sd: import_sd!("ratio_2y", 2 * 365),
ratio_4y_sd: import_sd!("ratio_4y", 4 * 365),
ratio_pct99: import!("ratio_pct99"),
ratio_pct98: import!("ratio_pct98"),
ratio_pct95: import!("ratio_pct95"),
ratio_pct5: import!("ratio_pct5"),
ratio_pct2: import!("ratio_pct2"),
ratio_pct1: import!("ratio_pct1"),
ratio_pct99_usd: import_usd!("ratio_pct99_usd"),
ratio_pct98_usd: import_usd!("ratio_pct98_usd"),
ratio_pct95_usd: import_usd!("ratio_pct95_usd"),
ratio_pct5_usd: import_usd!("ratio_pct5_usd"),
ratio_pct2_usd: import_usd!("ratio_pct2_usd"),
ratio_pct1_usd: import_usd!("ratio_pct1_usd"),
})
}
/// Compute extended ratio metrics from an externally-provided ratio source.
pub(crate) fn compute_rest(
&mut self,
blocks: &blocks::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
ratio_source: &impl ReadableVec<Height, StoredF32>,
) -> Result<()> {
// SMA using lookback vecs
self.ratio_1w_sma.height.compute_rolling_average(
starting_indexes.height,
&blocks.count.height_1w_ago,
ratio_source,
exit,
)?;
self.ratio_1m_sma.height.compute_rolling_average(
starting_indexes.height,
&blocks.count.height_1m_ago,
ratio_source,
exit,
)?;
// Percentiles: insert into sorted array on day boundaries
let ratio_version = ratio_source.version();
self.mut_ratio_vecs()
.try_for_each(|v| -> Result<()> {
v.validate_computed_version_or_reset(ratio_version)?;
Ok(())
})?;
let starting_height = self
.mut_ratio_vecs()
.map(|v| Height::from(v.len()))
.min()
.unwrap()
.min(starting_indexes.height);
let start = starting_height.to_usize();
let day_start = &blocks.count.height_24h_ago;
// Collect sorted history up to starting point (one per day boundary)
let mut sorted = {
let ratio_data = ratio_source.collect_range_at(0, start);
let day_start_hist = day_start.collect_range_at(0, start);
let mut sorted: Vec<StoredF32> = Vec::new();
let mut last_day_start = Height::from(0_usize);
for (h, ratio) in ratio_data.into_iter().enumerate() {
let cur_day_start = day_start_hist[h];
if h == 0 || cur_day_start != last_day_start {
sorted.push(ratio);
last_day_start = cur_day_start;
}
}
sorted.sort_unstable();
sorted
};
let pct1_vec = &mut self.ratio_pct1.height;
let pct2_vec = &mut self.ratio_pct2.height;
let pct5_vec = &mut self.ratio_pct5.height;
let pct95_vec = &mut self.ratio_pct95.height;
let pct98_vec = &mut self.ratio_pct98.height;
let pct99_vec = &mut self.ratio_pct99.height;
let ratio_len = ratio_source.len();
let ratio_data = ratio_source.collect_range_at(start, ratio_len);
let mut last_day_start = if start > 0 {
day_start
.collect_one_at(start - 1)
.unwrap_or(Height::from(0_usize))
} else {
Height::from(0_usize)
};
let day_start_data = day_start.collect_range_at(start, ratio_len);
for (offset, ratio) in ratio_data.into_iter().enumerate() {
let index = start + offset;
let cur_day_start = day_start_data[offset];
if index == 0 || cur_day_start != last_day_start {
let pos = sorted.binary_search(&ratio).unwrap_or_else(|p| p);
sorted.insert(pos, ratio);
last_day_start = cur_day_start;
}
if sorted.is_empty() {
pct1_vec.truncate_push_at(index, StoredF32::NAN)?;
pct2_vec.truncate_push_at(index, StoredF32::NAN)?;
pct5_vec.truncate_push_at(index, StoredF32::NAN)?;
pct95_vec.truncate_push_at(index, StoredF32::NAN)?;
pct98_vec.truncate_push_at(index, StoredF32::NAN)?;
pct99_vec.truncate_push_at(index, StoredF32::NAN)?;
} else {
pct1_vec.truncate_push_at(index, get_percentile(&sorted, 0.01))?;
pct2_vec.truncate_push_at(index, get_percentile(&sorted, 0.02))?;
pct5_vec.truncate_push_at(index, get_percentile(&sorted, 0.05))?;
pct95_vec.truncate_push_at(index, get_percentile(&sorted, 0.95))?;
pct98_vec.truncate_push_at(index, get_percentile(&sorted, 0.98))?;
pct99_vec.truncate_push_at(index, get_percentile(&sorted, 0.99))?;
}
}
{
let _lock = exit.lock();
self.mut_ratio_vecs()
.try_for_each(|v| v.flush())?;
}
// Compute stddev at height level
self.ratio_sd
.compute_all(blocks, starting_indexes, exit, ratio_source)?;
self.ratio_4y_sd
.compute_all(blocks, starting_indexes, exit, ratio_source)?;
self.ratio_2y_sd
.compute_all(blocks, starting_indexes, exit, ratio_source)?;
self.ratio_1y_sd
.compute_all(blocks, starting_indexes, exit, ratio_source)?;
Ok(())
}
/// Compute USD ratio bands: usd_band = metric_price * ratio_percentile
pub(crate) fn compute_usd_bands(
&mut self,
starting_indexes: &ComputeIndexes,
metric_price: &impl ReadableVec<Height, Dollars>,
exit: &Exit,
) -> Result<()> {
use crate::internal::PriceTimesRatio;
macro_rules! compute_band {
($usd_field:ident, $band_source:expr) => {
self.$usd_field
.usd
.compute_binary::<Dollars, StoredF32, PriceTimesRatio>(
starting_indexes.height,
metric_price,
$band_source,
exit,
)?;
};
}
compute_band!(ratio_pct99_usd, &self.ratio_pct99.height);
compute_band!(ratio_pct98_usd, &self.ratio_pct98.height);
compute_band!(ratio_pct95_usd, &self.ratio_pct95.height);
compute_band!(ratio_pct5_usd, &self.ratio_pct5.height);
compute_band!(ratio_pct2_usd, &self.ratio_pct2.height);
compute_band!(ratio_pct1_usd, &self.ratio_pct1.height);
// Stddev USD bands
self.ratio_sd
.compute_usd_bands(starting_indexes, metric_price, exit)?;
self.ratio_4y_sd
.compute_usd_bands(starting_indexes, metric_price, exit)?;
self.ratio_2y_sd
.compute_usd_bands(starting_indexes, metric_price, exit)?;
self.ratio_1y_sd
.compute_usd_bands(starting_indexes, metric_price, exit)?;
Ok(())
}
fn mut_ratio_vecs(
&mut self,
) -> impl Iterator<Item = &mut EagerVec<PcoVec<Height, StoredF32>>> {
[
&mut self.ratio_pct1.height,
&mut self.ratio_pct2.height,
&mut self.ratio_pct5.height,
&mut self.ratio_pct95.height,
&mut self.ratio_pct98.height,
&mut self.ratio_pct99.height,
]
.into_iter()
}
}

View File

@@ -0,0 +1,62 @@
mod extended;
mod extension;
mod price_extended;
pub use extended::*;
pub use extension::*;
pub use price_extended::*;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Dollars, Height, StoredF32, Version};
use vecdb::{Database, Exit, ReadableVec, Rw, StorageMode};
use crate::{ComputeIndexes, indexes};
use super::ComputedFromHeightLast;
#[derive(Traversable)]
pub struct ComputedFromHeightRatio<M: StorageMode = Rw> {
pub ratio: ComputedFromHeightLast<StoredF32, M>,
}
const VERSION: Version = Version::TWO;
impl ComputedFromHeightRatio {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
Ok(Self {
ratio: ComputedFromHeightLast::forced_import(db, &format!("{name}_ratio"), v, indexes)?,
})
}
/// Compute ratio = close_price / metric_price at height level
pub(crate) fn compute_ratio(
&mut self,
starting_indexes: &ComputeIndexes,
close_price: &impl ReadableVec<Height, Dollars>,
metric_price: &impl ReadableVec<Height, Dollars>,
exit: &Exit,
) -> Result<()> {
self.ratio.height.compute_transform2(
starting_indexes.height,
close_price,
metric_price,
|(i, close, price, ..)| {
if price == Dollars::ZERO {
(i, StoredF32::from(1.0))
} else {
(i, StoredF32::from(close / price))
}
},
exit,
)?;
Ok(())
}
}

View File

@@ -0,0 +1,58 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Dollars, Height, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{Database, EagerVec, Exit, PcoVec, Rw, StorageMode};
use crate::internal::{ComputedFromHeightLast, Price};
use crate::{ComputeIndexes, blocks, indexes, prices};
use super::ComputedFromHeightRatioExtended;
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedFromHeightPriceWithRatioExtended<M: StorageMode = Rw> {
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub inner: ComputedFromHeightRatioExtended<M>,
pub price: Price<ComputedFromHeightLast<Dollars, M>>,
}
impl ComputedFromHeightPriceWithRatioExtended {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + Version::TWO;
Ok(Self {
inner: ComputedFromHeightRatioExtended::forced_import(db, name, version, indexes)?,
price: Price::forced_import(db, name, v, indexes)?,
})
}
/// Compute price via closure, then compute ratio + extended metrics.
pub(crate) fn compute_all<F>(
&mut self,
blocks: &blocks::Vecs,
prices: &prices::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
mut compute_price: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<Height, Dollars>>) -> Result<()>,
{
compute_price(&mut self.price.usd.height)?;
self.inner.compute_rest(
blocks,
prices,
starting_indexes,
exit,
&self.price.usd.height,
)?;
Ok(())
}
}

View File

@@ -1,527 +0,0 @@
use std::mem;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Dollars, Height, StoredF32, Version};
use vecdb::{
AnyStoredVec, AnyVec, Database, EagerVec, Exit, PcoVec, ReadableVec, Rw, StorageMode, VecIndex,
WritableVec,
};
use crate::{ComputeIndexes, blocks, indexes};
use crate::internal::{ComputedFromHeightLast, Price};
#[derive(Default)]
pub struct StandardDeviationVecsOptions {
zscore: bool,
bands: bool,
price_bands: bool,
}
impl StandardDeviationVecsOptions {
pub(crate) fn add_all(mut self) -> Self {
self.zscore = true;
self.bands = true;
self.price_bands = true;
self
}
pub(crate) fn zscore(&self) -> bool {
self.zscore
}
pub(crate) fn bands(&self) -> bool {
self.bands
}
pub(crate) fn price_bands(&self) -> bool {
self.price_bands
}
}
#[derive(Traversable)]
pub struct ComputedFromHeightStdDev<M: StorageMode = Rw> {
days: usize,
pub sma: Option<ComputedFromHeightLast<StoredF32, M>>,
pub sd: ComputedFromHeightLast<StoredF32, M>,
pub zscore: Option<ComputedFromHeightLast<StoredF32, M>>,
pub p0_5sd: Option<ComputedFromHeightLast<StoredF32, M>>,
pub p1sd: Option<ComputedFromHeightLast<StoredF32, M>>,
pub p1_5sd: Option<ComputedFromHeightLast<StoredF32, M>>,
pub p2sd: Option<ComputedFromHeightLast<StoredF32, M>>,
pub p2_5sd: Option<ComputedFromHeightLast<StoredF32, M>>,
pub p3sd: Option<ComputedFromHeightLast<StoredF32, M>>,
pub m0_5sd: Option<ComputedFromHeightLast<StoredF32, M>>,
pub m1sd: Option<ComputedFromHeightLast<StoredF32, M>>,
pub m1_5sd: Option<ComputedFromHeightLast<StoredF32, M>>,
pub m2sd: Option<ComputedFromHeightLast<StoredF32, M>>,
pub m2_5sd: Option<ComputedFromHeightLast<StoredF32, M>>,
pub m3sd: Option<ComputedFromHeightLast<StoredF32, M>>,
pub _0sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub p0_5sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub p1sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub p1_5sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub p2sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub p2_5sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub p3sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub m0_5sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub m1sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub m1_5sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub m2sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub m2_5sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
pub m3sd_usd: Option<Price<ComputedFromHeightLast<Dollars, M>>>,
}
impl ComputedFromHeightStdDev {
#[allow(clippy::too_many_arguments)]
pub(crate) fn forced_import(
db: &Database,
name: &str,
days: usize,
parent_version: Version,
indexes: &indexes::Vecs,
options: StandardDeviationVecsOptions,
) -> Result<Self> {
let version = parent_version + Version::TWO;
macro_rules! import {
($suffix:expr) => {
ComputedFromHeightLast::forced_import(
db,
&format!("{name}_{}", $suffix),
version,
indexes,
)
.unwrap()
};
}
let sma_vec = Some(import!("sma"));
let p0_5sd = options.bands().then(|| import!("p0_5sd"));
let p1sd = options.bands().then(|| import!("p1sd"));
let p1_5sd = options.bands().then(|| import!("p1_5sd"));
let p2sd = options.bands().then(|| import!("p2sd"));
let p2_5sd = options.bands().then(|| import!("p2_5sd"));
let p3sd = options.bands().then(|| import!("p3sd"));
let m0_5sd = options.bands().then(|| import!("m0_5sd"));
let m1sd = options.bands().then(|| import!("m1sd"));
let m1_5sd = options.bands().then(|| import!("m1_5sd"));
let m2sd = options.bands().then(|| import!("m2sd"));
let m2_5sd = options.bands().then(|| import!("m2_5sd"));
let m3sd = options.bands().then(|| import!("m3sd"));
// Import USD price band vecs (computed eagerly at compute time)
macro_rules! lazy_usd {
($band:expr, $suffix:expr) => {
if !options.price_bands() {
None
} else {
$band.as_ref().map(|_| {
Price::forced_import(
db,
&format!("{name}_{}", $suffix),
version,
indexes,
)
.unwrap()
})
}
};
}
Ok(Self {
days,
sd: import!("sd"),
zscore: options.zscore().then(|| import!("zscore")),
// Lazy USD vecs
_0sd_usd: lazy_usd!(&sma_vec, "0sd_usd"),
p0_5sd_usd: lazy_usd!(&p0_5sd, "p0_5sd_usd"),
p1sd_usd: lazy_usd!(&p1sd, "p1sd_usd"),
p1_5sd_usd: lazy_usd!(&p1_5sd, "p1_5sd_usd"),
p2sd_usd: lazy_usd!(&p2sd, "p2sd_usd"),
p2_5sd_usd: lazy_usd!(&p2_5sd, "p2_5sd_usd"),
p3sd_usd: lazy_usd!(&p3sd, "p3sd_usd"),
m0_5sd_usd: lazy_usd!(&m0_5sd, "m0_5sd_usd"),
m1sd_usd: lazy_usd!(&m1sd, "m1sd_usd"),
m1_5sd_usd: lazy_usd!(&m1_5sd, "m1_5sd_usd"),
m2sd_usd: lazy_usd!(&m2sd, "m2sd_usd"),
m2_5sd_usd: lazy_usd!(&m2_5sd, "m2_5sd_usd"),
m3sd_usd: lazy_usd!(&m3sd, "m3sd_usd"),
// Stored band sources
sma: sma_vec,
p0_5sd,
p1sd,
p1_5sd,
p2sd,
p2_5sd,
p3sd,
m0_5sd,
m1sd,
m1_5sd,
m2sd,
m2_5sd,
m3sd,
})
}
pub(crate) fn forced_import_from_lazy(
db: &Database,
name: &str,
days: usize,
parent_version: Version,
indexes: &indexes::Vecs,
options: StandardDeviationVecsOptions,
) -> Result<Self> {
let version = parent_version + Version::TWO;
macro_rules! import {
($suffix:expr) => {
ComputedFromHeightLast::forced_import(
db,
&format!("{name}_{}", $suffix),
version,
indexes,
)
.unwrap()
};
}
let sma_vec = Some(import!("sma"));
let p0_5sd = options.bands().then(|| import!("p0_5sd"));
let p1sd = options.bands().then(|| import!("p1sd"));
let p1_5sd = options.bands().then(|| import!("p1_5sd"));
let p2sd = options.bands().then(|| import!("p2sd"));
let p2_5sd = options.bands().then(|| import!("p2_5sd"));
let p3sd = options.bands().then(|| import!("p3sd"));
let m0_5sd = options.bands().then(|| import!("m0_5sd"));
let m1sd = options.bands().then(|| import!("m1sd"));
let m1_5sd = options.bands().then(|| import!("m1_5sd"));
let m2sd = options.bands().then(|| import!("m2sd"));
let m2_5sd = options.bands().then(|| import!("m2_5sd"));
let m3sd = options.bands().then(|| import!("m3sd"));
// For lazy metric price, use from_lazy_block_last_and_block_last.
macro_rules! lazy_usd {
($band:expr, $suffix:expr) => {
if !options.price_bands() {
None
} else {
$band.as_ref().map(|_| {
Price::forced_import(
db,
&format!("{name}_{}", $suffix),
version,
indexes,
)
.unwrap()
})
}
};
}
Ok(Self {
days,
sd: import!("sd"),
zscore: options.zscore().then(|| import!("zscore")),
_0sd_usd: lazy_usd!(&sma_vec, "0sd_usd"),
p0_5sd_usd: lazy_usd!(&p0_5sd, "p0_5sd_usd"),
p1sd_usd: lazy_usd!(&p1sd, "p1sd_usd"),
p1_5sd_usd: lazy_usd!(&p1_5sd, "p1_5sd_usd"),
p2sd_usd: lazy_usd!(&p2sd, "p2sd_usd"),
p2_5sd_usd: lazy_usd!(&p2_5sd, "p2_5sd_usd"),
p3sd_usd: lazy_usd!(&p3sd, "p3sd_usd"),
m0_5sd_usd: lazy_usd!(&m0_5sd, "m0_5sd_usd"),
m1sd_usd: lazy_usd!(&m1sd, "m1sd_usd"),
m1_5sd_usd: lazy_usd!(&m1_5sd, "m1_5sd_usd"),
m2sd_usd: lazy_usd!(&m2sd, "m2sd_usd"),
m2_5sd_usd: lazy_usd!(&m2_5sd, "m2_5sd_usd"),
m3sd_usd: lazy_usd!(&m3sd, "m3sd_usd"),
sma: sma_vec,
p0_5sd,
p1sd,
p1_5sd,
p2sd,
p2_5sd,
p3sd,
m0_5sd,
m1sd,
m1_5sd,
m2sd,
m2_5sd,
m3sd,
})
}
pub(crate) fn compute_all(
&mut self,
blocks: &blocks::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
source: &impl ReadableVec<Height, StoredF32>,
) -> Result<()> {
// 1. Compute SMA using the appropriate lookback vec (or full-history SMA)
if self.days != usize::MAX {
let window_starts = blocks.count.start_vec(self.days);
self.sma.as_mut().unwrap().height.compute_rolling_average(
starting_indexes.height,
window_starts,
source,
exit,
)?;
} else {
// Full history SMA (days == usize::MAX)
self.sma.as_mut().unwrap().height.compute_sma_(
starting_indexes.height,
source,
self.days,
exit,
None,
)?;
}
let sma_opt: Option<&EagerVec<PcoVec<Height, StoredF32>>> = None;
self.compute_rest(blocks, starting_indexes, exit, sma_opt, source)
}
pub(crate) fn compute_rest(
&mut self,
blocks: &blocks::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
sma_opt: Option<&impl ReadableVec<Height, StoredF32>>,
source: &impl ReadableVec<Height, StoredF32>,
) -> Result<()> {
let sma = sma_opt
.unwrap_or_else(|| unsafe { mem::transmute(&self.sma.as_ref().unwrap().height) });
let source_version = source.version();
self.mut_stateful_height_vecs()
.try_for_each(|v| -> Result<()> {
v.validate_computed_version_or_reset(source_version)?;
Ok(())
})?;
let starting_height = self
.mut_stateful_height_vecs()
.map(|v| Height::from(v.len()))
.min()
.unwrap()
.min(starting_indexes.height);
// Reconstruct running statistics up to starting point.
// We accumulate one data point per day boundary, tracking sum and sum_sq
// for O(1) per-height SD computation (instead of O(n) sorted-array scan).
let day_start = &blocks.count.height_24h_ago;
let start = starting_height.to_usize();
let mut n: usize = 0;
let mut welford_sum: f64 = 0.0;
let mut welford_sum_sq: f64 = 0.0;
if start > 0 {
let day_start_hist = day_start.collect_range_at(0, start);
let source_hist = source.collect_range_at(0, start);
let mut last_ds = Height::from(0_usize);
for h in 0..start {
let cur_ds = day_start_hist[h];
if h == 0 || cur_ds != last_ds {
let val = *source_hist[h] as f64;
n += 1;
welford_sum += val;
welford_sum_sq += val * val;
last_ds = cur_ds;
}
}
}
macro_rules! band_ref {
($field:ident) => {
self.$field.as_mut().map(|c| &mut c.height)
};
}
let mut p0_5sd = band_ref!(p0_5sd);
let mut p1sd = band_ref!(p1sd);
let mut p1_5sd = band_ref!(p1_5sd);
let mut p2sd = band_ref!(p2sd);
let mut p2_5sd = band_ref!(p2_5sd);
let mut p3sd = band_ref!(p3sd);
let mut m0_5sd = band_ref!(m0_5sd);
let mut m1sd = band_ref!(m1sd);
let mut m1_5sd = band_ref!(m1_5sd);
let mut m2sd = band_ref!(m2sd);
let mut m2_5sd = band_ref!(m2_5sd);
let mut m3sd = band_ref!(m3sd);
let source_len = source.len();
let source_data = source.collect_range_at(start, source_len);
let sma_data = sma.collect_range_at(start, sma.len());
let mut last_day_start = if start > 0 {
day_start
.collect_one_at(start - 1)
.unwrap_or(Height::from(0_usize))
} else {
Height::from(0_usize)
};
let day_start_data = day_start.collect_range_at(start, source_len);
for (offset, ratio) in source_data.into_iter().enumerate() {
let index = start + offset;
// Update running statistics on day boundaries
let cur_day_start = day_start_data[offset];
if index == 0 || cur_day_start != last_day_start {
let val = *ratio as f64;
n += 1;
welford_sum += val;
welford_sum_sq += val * val;
last_day_start = cur_day_start;
}
let average = sma_data[offset];
let avg_f64 = *average as f64;
// SD = sqrt((sum_sq/n - 2*avg*sum/n + avg^2))
// This is the population SD of all daily values relative to the current SMA
let sd = if n > 0 {
let nf = n as f64;
let variance =
welford_sum_sq / nf - 2.0 * avg_f64 * welford_sum / nf + avg_f64 * avg_f64;
StoredF32::from(variance.max(0.0).sqrt() as f32)
} else {
StoredF32::from(0.0_f32)
};
self.sd.height.truncate_push_at(index, sd)?;
if let Some(v) = p0_5sd.as_mut() {
v.truncate_push_at(index, average + StoredF32::from(0.5 * *sd))?
}
if let Some(v) = p1sd.as_mut() {
v.truncate_push_at(index, average + sd)?
}
if let Some(v) = p1_5sd.as_mut() {
v.truncate_push_at(index, average + StoredF32::from(1.5 * *sd))?
}
if let Some(v) = p2sd.as_mut() {
v.truncate_push_at(index, average + 2 * sd)?
}
if let Some(v) = p2_5sd.as_mut() {
v.truncate_push_at(index, average + StoredF32::from(2.5 * *sd))?
}
if let Some(v) = p3sd.as_mut() {
v.truncate_push_at(index, average + 3 * sd)?
}
if let Some(v) = m0_5sd.as_mut() {
v.truncate_push_at(index, average - StoredF32::from(0.5 * *sd))?
}
if let Some(v) = m1sd.as_mut() {
v.truncate_push_at(index, average - sd)?
}
if let Some(v) = m1_5sd.as_mut() {
v.truncate_push_at(index, average - StoredF32::from(1.5 * *sd))?
}
if let Some(v) = m2sd.as_mut() {
v.truncate_push_at(index, average - 2 * sd)?
}
if let Some(v) = m2_5sd.as_mut() {
v.truncate_push_at(index, average - StoredF32::from(2.5 * *sd))?
}
if let Some(v) = m3sd.as_mut() {
v.truncate_push_at(index, average - 3 * sd)?
}
}
{
let _lock = exit.lock();
self.mut_stateful_height_vecs()
.try_for_each(|v| v.flush())?;
}
if let Some(zscore) = self.zscore.as_mut() {
zscore.height.compute_zscore(
starting_indexes.height,
source,
sma,
&self.sd.height,
exit,
)?;
}
Ok(())
}
/// Compute USD price bands: usd_band = metric_price * band_ratio
pub(crate) fn compute_usd_bands(
&mut self,
starting_indexes: &ComputeIndexes,
metric_price: &impl ReadableVec<Height, Dollars>,
exit: &Exit,
) -> Result<()> {
use crate::internal::PriceTimesRatio;
macro_rules! compute_band {
($usd_field:ident, $band_field:ident) => {
if let Some(usd) = self.$usd_field.as_mut() {
if let Some(band) = self.$band_field.as_ref() {
usd.usd
.compute_binary::<Dollars, StoredF32, PriceTimesRatio>(
starting_indexes.height,
metric_price,
&band.height,
exit,
)?;
}
}
};
}
compute_band!(_0sd_usd, sma);
compute_band!(p0_5sd_usd, p0_5sd);
compute_band!(p1sd_usd, p1sd);
compute_band!(p1_5sd_usd, p1_5sd);
compute_band!(p2sd_usd, p2sd);
compute_band!(p2_5sd_usd, p2_5sd);
compute_band!(p3sd_usd, p3sd);
compute_band!(m0_5sd_usd, m0_5sd);
compute_band!(m1sd_usd, m1sd);
compute_band!(m1_5sd_usd, m1_5sd);
compute_band!(m2sd_usd, m2sd);
compute_band!(m2_5sd_usd, m2_5sd);
compute_band!(m3sd_usd, m3sd);
Ok(())
}
fn mut_stateful_computed(
&mut self,
) -> impl Iterator<Item = &mut ComputedFromHeightLast<StoredF32>> {
[
Some(&mut self.sd),
self.p0_5sd.as_mut(),
self.p1sd.as_mut(),
self.p1_5sd.as_mut(),
self.p2sd.as_mut(),
self.p2_5sd.as_mut(),
self.p3sd.as_mut(),
self.m0_5sd.as_mut(),
self.m1sd.as_mut(),
self.m1_5sd.as_mut(),
self.m2sd.as_mut(),
self.m2_5sd.as_mut(),
self.m3sd.as_mut(),
]
.into_iter()
.flatten()
}
fn mut_stateful_height_vecs(
&mut self,
) -> impl Iterator<Item = &mut EagerVec<PcoVec<Height, StoredF32>>> {
self.mut_stateful_computed().map(|c| &mut c.height)
}
}

View File

@@ -0,0 +1,262 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Dollars, Height, StoredF32, Version};
use vecdb::{AnyStoredVec, AnyVec, Database, EagerVec, Exit, PcoVec, ReadableVec, Rw, StorageMode, VecIndex, WritableVec};
use crate::{ComputeIndexes, blocks, indexes};
use crate::internal::{ComputedFromHeightLast, Price};
use super::ComputedFromHeightStdDev;
#[derive(Traversable)]
#[traversable(merge)]
pub struct ComputedFromHeightStdDevExtended<M: StorageMode = Rw> {
#[traversable(flatten)]
pub base: ComputedFromHeightStdDev<M>,
pub zscore: ComputedFromHeightLast<StoredF32, M>,
pub p0_5sd: ComputedFromHeightLast<StoredF32, M>,
pub p1sd: ComputedFromHeightLast<StoredF32, M>,
pub p1_5sd: ComputedFromHeightLast<StoredF32, M>,
pub p2sd: ComputedFromHeightLast<StoredF32, M>,
pub p2_5sd: ComputedFromHeightLast<StoredF32, M>,
pub p3sd: ComputedFromHeightLast<StoredF32, M>,
pub m0_5sd: ComputedFromHeightLast<StoredF32, M>,
pub m1sd: ComputedFromHeightLast<StoredF32, M>,
pub m1_5sd: ComputedFromHeightLast<StoredF32, M>,
pub m2sd: ComputedFromHeightLast<StoredF32, M>,
pub m2_5sd: ComputedFromHeightLast<StoredF32, M>,
pub m3sd: ComputedFromHeightLast<StoredF32, M>,
pub _0sd_usd: Price<ComputedFromHeightLast<Dollars, M>>,
pub p0_5sd_usd: Price<ComputedFromHeightLast<Dollars, M>>,
pub p1sd_usd: Price<ComputedFromHeightLast<Dollars, M>>,
pub p1_5sd_usd: Price<ComputedFromHeightLast<Dollars, M>>,
pub p2sd_usd: Price<ComputedFromHeightLast<Dollars, M>>,
pub p2_5sd_usd: Price<ComputedFromHeightLast<Dollars, M>>,
pub p3sd_usd: Price<ComputedFromHeightLast<Dollars, M>>,
pub m0_5sd_usd: Price<ComputedFromHeightLast<Dollars, M>>,
pub m1sd_usd: Price<ComputedFromHeightLast<Dollars, M>>,
pub m1_5sd_usd: Price<ComputedFromHeightLast<Dollars, M>>,
pub m2sd_usd: Price<ComputedFromHeightLast<Dollars, M>>,
pub m2_5sd_usd: Price<ComputedFromHeightLast<Dollars, M>>,
pub m3sd_usd: Price<ComputedFromHeightLast<Dollars, M>>,
}
impl ComputedFromHeightStdDevExtended {
pub(crate) fn forced_import(
db: &Database,
name: &str,
days: usize,
parent_version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let version = parent_version + Version::TWO;
macro_rules! import {
($suffix:expr) => {
ComputedFromHeightLast::forced_import(
db,
&format!("{name}_{}", $suffix),
version,
indexes,
)?
};
}
macro_rules! import_usd {
($suffix:expr) => {
Price::forced_import(
db,
&format!("{name}_{}", $suffix),
version,
indexes,
)?
};
}
Ok(Self {
base: ComputedFromHeightStdDev::forced_import(db, name, days, parent_version, indexes)?,
zscore: import!("zscore"),
p0_5sd: import!("p0_5sd"),
p1sd: import!("p1sd"),
p1_5sd: import!("p1_5sd"),
p2sd: import!("p2sd"),
p2_5sd: import!("p2_5sd"),
p3sd: import!("p3sd"),
m0_5sd: import!("m0_5sd"),
m1sd: import!("m1sd"),
m1_5sd: import!("m1_5sd"),
m2sd: import!("m2sd"),
m2_5sd: import!("m2_5sd"),
m3sd: import!("m3sd"),
_0sd_usd: import_usd!("0sd_usd"),
p0_5sd_usd: import_usd!("p0_5sd_usd"),
p1sd_usd: import_usd!("p1sd_usd"),
p1_5sd_usd: import_usd!("p1_5sd_usd"),
p2sd_usd: import_usd!("p2sd_usd"),
p2_5sd_usd: import_usd!("p2_5sd_usd"),
p3sd_usd: import_usd!("p3sd_usd"),
m0_5sd_usd: import_usd!("m0_5sd_usd"),
m1sd_usd: import_usd!("m1sd_usd"),
m1_5sd_usd: import_usd!("m1_5sd_usd"),
m2sd_usd: import_usd!("m2sd_usd"),
m2_5sd_usd: import_usd!("m2_5sd_usd"),
m3sd_usd: import_usd!("m3sd_usd"),
})
}
pub(crate) fn compute_all(
&mut self,
blocks: &blocks::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
source: &impl ReadableVec<Height, StoredF32>,
) -> Result<()> {
self.base.compute_all(blocks, starting_indexes, exit, source)?;
let sma_opt: Option<&EagerVec<PcoVec<Height, StoredF32>>> = None;
self.compute_bands(starting_indexes, exit, sma_opt, source)
}
pub(crate) fn compute_bands(
&mut self,
starting_indexes: &ComputeIndexes,
exit: &Exit,
sma_opt: Option<&impl ReadableVec<Height, StoredF32>>,
source: &impl ReadableVec<Height, StoredF32>,
) -> Result<()> {
let source_version = source.version();
self.mut_band_height_vecs()
.try_for_each(|v| -> Result<()> {
v.validate_computed_version_or_reset(source_version)?;
Ok(())
})?;
let starting_height = self
.mut_band_height_vecs()
.map(|v| Height::from(v.len()))
.min()
.unwrap()
.min(starting_indexes.height);
let start = starting_height.to_usize();
let source_len = source.len();
let source_data = source.collect_range_at(start, source_len);
let sma_len = sma_opt.map(|s| s.len()).unwrap_or(self.base.sma.height.len());
let sma_data: Vec<StoredF32> = if let Some(sma) = sma_opt {
sma.collect_range_at(start, sma_len)
} else {
self.base.sma.height.collect_range_at(start, sma_len)
};
let sd_data = self.base.sd.height.collect_range_at(start, self.base.sd.height.len());
for (offset, _ratio) in source_data.into_iter().enumerate() {
let index = start + offset;
let average = sma_data[offset];
let sd = sd_data[offset];
self.p0_5sd.height.truncate_push_at(index, average + StoredF32::from(0.5 * *sd))?;
self.p1sd.height.truncate_push_at(index, average + sd)?;
self.p1_5sd.height.truncate_push_at(index, average + StoredF32::from(1.5 * *sd))?;
self.p2sd.height.truncate_push_at(index, average + 2 * sd)?;
self.p2_5sd.height.truncate_push_at(index, average + StoredF32::from(2.5 * *sd))?;
self.p3sd.height.truncate_push_at(index, average + 3 * sd)?;
self.m0_5sd.height.truncate_push_at(index, average - StoredF32::from(0.5 * *sd))?;
self.m1sd.height.truncate_push_at(index, average - sd)?;
self.m1_5sd.height.truncate_push_at(index, average - StoredF32::from(1.5 * *sd))?;
self.m2sd.height.truncate_push_at(index, average - 2 * sd)?;
self.m2_5sd.height.truncate_push_at(index, average - StoredF32::from(2.5 * *sd))?;
self.m3sd.height.truncate_push_at(index, average - 3 * sd)?;
}
{
let _lock = exit.lock();
self.mut_band_height_vecs()
.try_for_each(|v| v.flush())?;
}
if let Some(sma) = sma_opt {
self.zscore.height.compute_zscore(
starting_indexes.height,
source,
sma,
&self.base.sd.height,
exit,
)?;
} else {
self.zscore.height.compute_zscore(
starting_indexes.height,
source,
&self.base.sma.height,
&self.base.sd.height,
exit,
)?;
}
Ok(())
}
/// Compute USD price bands: usd_band = metric_price * band_ratio
pub(crate) fn compute_usd_bands(
&mut self,
starting_indexes: &ComputeIndexes,
metric_price: &impl ReadableVec<Height, Dollars>,
exit: &Exit,
) -> Result<()> {
use crate::internal::PriceTimesRatio;
macro_rules! compute_band {
($usd_field:ident, $band_source:expr) => {
self.$usd_field.usd
.compute_binary::<Dollars, StoredF32, PriceTimesRatio>(
starting_indexes.height,
metric_price,
$band_source,
exit,
)?;
};
}
compute_band!(_0sd_usd, &self.base.sma.height);
compute_band!(p0_5sd_usd, &self.p0_5sd.height);
compute_band!(p1sd_usd, &self.p1sd.height);
compute_band!(p1_5sd_usd, &self.p1_5sd.height);
compute_band!(p2sd_usd, &self.p2sd.height);
compute_band!(p2_5sd_usd, &self.p2_5sd.height);
compute_band!(p3sd_usd, &self.p3sd.height);
compute_band!(m0_5sd_usd, &self.m0_5sd.height);
compute_band!(m1sd_usd, &self.m1sd.height);
compute_band!(m1_5sd_usd, &self.m1_5sd.height);
compute_band!(m2sd_usd, &self.m2sd.height);
compute_band!(m2_5sd_usd, &self.m2_5sd.height);
compute_band!(m3sd_usd, &self.m3sd.height);
Ok(())
}
fn mut_band_height_vecs(
&mut self,
) -> impl Iterator<Item = &mut EagerVec<PcoVec<Height, StoredF32>>> {
[
&mut self.p0_5sd.height,
&mut self.p1sd.height,
&mut self.p1_5sd.height,
&mut self.p2sd.height,
&mut self.p2_5sd.height,
&mut self.p3sd.height,
&mut self.m0_5sd.height,
&mut self.m1sd.height,
&mut self.m1_5sd.height,
&mut self.m2sd.height,
&mut self.m2_5sd.height,
&mut self.m3sd.height,
]
.into_iter()
}
}

View File

@@ -0,0 +1,168 @@
mod extended;
pub use extended::*;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, StoredF32, Version};
use vecdb::{AnyStoredVec, AnyVec, Database, Exit, ReadableVec, Rw, StorageMode, VecIndex, WritableVec};
use crate::{ComputeIndexes, blocks, indexes};
use crate::internal::ComputedFromHeightLast;
#[derive(Traversable)]
pub struct ComputedFromHeightStdDev<M: StorageMode = Rw> {
days: usize,
pub sma: ComputedFromHeightLast<StoredF32, M>,
pub sd: ComputedFromHeightLast<StoredF32, M>,
}
impl ComputedFromHeightStdDev {
pub(crate) fn forced_import(
db: &Database,
name: &str,
days: usize,
parent_version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let version = parent_version + Version::TWO;
let sma = ComputedFromHeightLast::forced_import(
db,
&format!("{name}_sma"),
version,
indexes,
)?;
let sd = ComputedFromHeightLast::forced_import(
db,
&format!("{name}_sd"),
version,
indexes,
)?;
Ok(Self { days, sma, sd })
}
pub(crate) fn compute_all(
&mut self,
blocks: &blocks::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
source: &impl ReadableVec<Height, StoredF32>,
) -> Result<()> {
// 1. Compute SMA using the appropriate lookback vec (or full-history SMA)
if self.days != usize::MAX {
let window_starts = blocks.count.start_vec(self.days);
self.sma.height.compute_rolling_average(
starting_indexes.height,
window_starts,
source,
exit,
)?;
} else {
// Full history SMA (days == usize::MAX)
self.sma.height.compute_sma_(
starting_indexes.height,
source,
self.days,
exit,
None,
)?;
}
// Split borrows: sd is mutated, sma is read
compute_sd(
&mut self.sd,
blocks,
starting_indexes,
exit,
&self.sma.height,
source,
)
}
}
fn compute_sd(
sd: &mut ComputedFromHeightLast<StoredF32>,
blocks: &blocks::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
sma: &impl ReadableVec<Height, StoredF32>,
source: &impl ReadableVec<Height, StoredF32>,
) -> Result<()> {
let source_version = source.version();
sd.height
.validate_computed_version_or_reset(source_version)?;
let starting_height = Height::from(sd.height.len()).min(starting_indexes.height);
let day_start = &blocks.count.height_24h_ago;
let start = starting_height.to_usize();
let mut n: usize = 0;
let mut welford_sum: f64 = 0.0;
let mut welford_sum_sq: f64 = 0.0;
if start > 0 {
let day_start_hist = day_start.collect_range_at(0, start);
let source_hist = source.collect_range_at(0, start);
let mut last_ds = Height::from(0_usize);
for h in 0..start {
let cur_ds = day_start_hist[h];
if h == 0 || cur_ds != last_ds {
let val = *source_hist[h] as f64;
n += 1;
welford_sum += val;
welford_sum_sq += val * val;
last_ds = cur_ds;
}
}
}
let source_len = source.len();
let source_data = source.collect_range_at(start, source_len);
let sma_data = sma.collect_range_at(start, sma.len());
let mut last_day_start = if start > 0 {
day_start
.collect_one_at(start - 1)
.unwrap_or(Height::from(0_usize))
} else {
Height::from(0_usize)
};
let day_start_data = day_start.collect_range_at(start, source_len);
for (offset, ratio) in source_data.into_iter().enumerate() {
let index = start + offset;
let cur_day_start = day_start_data[offset];
if index == 0 || cur_day_start != last_day_start {
let val = *ratio as f64;
n += 1;
welford_sum += val;
welford_sum_sq += val * val;
last_day_start = cur_day_start;
}
let average = sma_data[offset];
let avg_f64 = *average as f64;
let sd_val = if n > 0 {
let nf = n as f64;
let variance =
welford_sum_sq / nf - 2.0 * avg_f64 * welford_sum / nf + avg_f64 * avg_f64;
StoredF32::from(variance.max(0.0).sqrt() as f32)
} else {
StoredF32::from(0.0_f32)
};
sd.height.truncate_push_at(index, sd_val)?;
}
{
let _lock = exit.lock();
sd.height.flush()?;
}
Ok(())
}

View File

@@ -10,7 +10,10 @@ use vecdb::{Database, EagerVec, Exit, PcoVec, ReadableCloneableVec, Rw, StorageM
use crate::{
indexes,
internal::{ComputedFromHeightCumulativeFull, LazyFromHeightLast, SatsToBitcoin, WindowStarts},
internal::{
ComputedFromHeightCumulativeFull, LazyFromHeightLast, SatsToBitcoin, SatsToDollars,
WindowStarts,
},
prices,
};
@@ -58,14 +61,10 @@ impl ValueFromHeightFull {
self.sats.compute(max_from, windows, exit, compute_sats)?;
self.usd.compute(max_from, windows, exit, |vec| {
Ok(vec.compute_transform2(
Ok(vec.compute_binary::<Sats, Dollars, SatsToDollars>(
max_from,
&self.sats.height,
&prices.usd.price,
|(h, sats, price, ..)| {
let btc = *sats as f64 / 100_000_000.0;
(h, Dollars::from(*price * btc))
},
exit,
)?)
})

View File

@@ -10,7 +10,7 @@ use vecdb::{Database, Exit, ReadableCloneableVec, Rw, StorageMode};
use crate::{
indexes, prices,
internal::{ComputedFromHeightLast, LazyFromHeightLast, SatsToBitcoin},
internal::{ComputedFromHeightLast, LazyFromHeightLast, SatsToBitcoin, SatsToDollars},
};
#[derive(Traversable)]
@@ -56,14 +56,10 @@ impl ValueFromHeightLast {
max_from: Height,
exit: &Exit,
) -> Result<()> {
self.usd.height.compute_transform2(
self.usd.compute_binary::<Sats, Dollars, SatsToDollars>(
max_from,
&self.sats.height,
&prices.usd.price,
|(h, sats, price, ..)| {
let btc = *sats as f64 / 100_000_000.0;
(h, Dollars::from(*price * btc))
},
exit,
)?;
Ok(())

View File

@@ -0,0 +1,64 @@
//! Value type for Height + Rolling pattern.
//!
//! Combines ValueFromHeight (sats/btc/usd per height, no period views) with
//! StoredValueRollingWindows (rolling sums across 4 windows).
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Sats, Version};
use derive_more::{Deref, DerefMut};
use vecdb::{Database, EagerVec, Exit, PcoVec, Rw, StorageMode};
use crate::{
indexes,
internal::{StoredValueRollingWindows, ValueFromHeight, WindowStarts},
prices,
};
#[derive(Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ValueFromHeightLastRolling<M: StorageMode = Rw> {
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub value: ValueFromHeight<M>,
pub rolling: StoredValueRollingWindows<M>,
}
const VERSION: Version = Version::ZERO;
impl ValueFromHeightLastRolling {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
Ok(Self {
value: ValueFromHeight::forced_import(db, name, v)?,
rolling: StoredValueRollingWindows::forced_import(db, name, v, indexes)?,
})
}
/// Compute sats height via closure, then USD from price, then rolling windows.
pub(crate) fn compute(
&mut self,
max_from: Height,
windows: &WindowStarts<'_>,
prices: &prices::Vecs,
exit: &Exit,
compute_sats: impl FnOnce(&mut EagerVec<PcoVec<Height, Sats>>) -> Result<()>,
) -> Result<()> {
compute_sats(&mut self.value.sats)?;
self.value.compute_usd(prices, max_from, exit)?;
self.rolling.compute_rolling_sum(
max_from,
windows,
&self.value.sats,
&self.value.usd,
exit,
)?;
Ok(())
}
}

View File

@@ -11,7 +11,10 @@ use vecdb::{Database, Exit, ReadableCloneableVec, Rw, StorageMode};
use crate::{
indexes,
internal::{ComputedFromHeightCumulative, ComputedFromHeightLast, LazyFromHeightLast, SatsToBitcoin},
internal::{
ComputedFromHeightCumulative, ComputedFromHeightLast, LazyFromHeightLast, SatsToBitcoin,
SatsToDollars,
},
prices,
};
@@ -57,14 +60,10 @@ impl LazyComputedValueFromHeightCumulative {
) -> Result<()> {
self.sats.compute_rest(max_from, exit)?;
self.usd.height.compute_transform2(
self.usd.compute_binary::<Sats, Dollars, SatsToDollars>(
max_from,
&prices.usd.price,
&self.sats.height,
|(h, price, sats, ..)| {
let btc = *sats as f64 / 100_000_000.0;
(h, Dollars::from(*price * btc))
},
&prices.usd.price,
exit,
)?;
Ok(())

View File

@@ -10,7 +10,10 @@ use vecdb::{Database, EagerVec, Exit, PcoVec, ReadableCloneableVec, Rw, StorageM
use crate::{
indexes, prices,
internal::{ComputedFromHeightCumulativeSum, LazyFromHeightLast, SatsToBitcoin, WindowStarts},
internal::{
ComputedFromHeightCumulativeSum, LazyFromHeightLast, SatsToBitcoin, SatsToDollars,
WindowStarts,
},
};
#[derive(Traversable)]
@@ -57,14 +60,10 @@ impl ValueFromHeightSumCumulative {
self.sats.compute(max_from, windows, exit, compute_sats)?;
self.usd.compute(max_from, windows, exit, |vec| {
Ok(vec.compute_transform2(
Ok(vec.compute_binary::<Sats, Dollars, SatsToDollars>(
max_from,
&self.sats.height,
&prices.usd.price,
|(h, sats, price, ..)| {
let btc = *sats as f64 / 100_000_000.0;
(h, Dollars::from(*price * btc))
},
exit,
)?)
})

View File

@@ -1,3 +1,5 @@
mod lazy_value;
mod value;
pub use lazy_value::*;
pub use value::*;

View File

@@ -0,0 +1,59 @@
//! Value type with height-level data only (no period-derived views).
//!
//! Stores sats and USD per height, plus a lazy btc transform.
//! Use when period views are unnecessary (e.g., rolling windows provide windowed data).
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Dollars, Height, Sats, Version};
use vecdb::{
Database, EagerVec, Exit, ImportableVec, LazyVecFrom1, PcoVec, ReadableCloneableVec, Rw,
StorageMode,
};
use crate::{internal::{SatsToBitcoin, SatsToDollars}, prices};
const VERSION: Version = Version::TWO; // Match ValueFromHeightLast versioning
#[derive(Traversable)]
pub struct ValueFromHeight<M: StorageMode = Rw> {
pub sats: M::Stored<EagerVec<PcoVec<Height, Sats>>>,
pub btc: LazyVecFrom1<Height, Bitcoin, Height, Sats>,
pub usd: M::Stored<EagerVec<PcoVec<Height, Dollars>>>,
}
impl ValueFromHeight {
pub(crate) fn forced_import(
db: &Database,
name: &str,
version: Version,
) -> Result<Self> {
let v = version + VERSION;
let sats: EagerVec<PcoVec<Height, Sats>> = EagerVec::forced_import(db, name, v)?;
let btc = LazyVecFrom1::transformed::<SatsToBitcoin>(
&format!("{name}_btc"),
v,
sats.read_only_boxed_clone(),
);
let usd = EagerVec::forced_import(db, &format!("{name}_usd"), v)?;
Ok(Self { sats, btc, usd })
}
/// Eagerly compute USD height values: sats[h] * price[h].
pub(crate) fn compute_usd(
&mut self,
prices: &prices::Vecs,
max_from: Height,
exit: &Exit,
) -> Result<()> {
self.usd.compute_binary::<Sats, Dollars, SatsToDollars>(
max_from,
&self.sats,
&prices.usd.price,
exit,
)?;
Ok(())
}
}

View File

@@ -26,6 +26,7 @@ mod sat_halve_to_bitcoin;
mod sat_identity;
mod sat_mask;
mod sat_to_bitcoin;
mod sats_to_dollars;
mod u16_to_years;
mod volatility_sqrt30;
mod volatility_sqrt365;
@@ -59,6 +60,7 @@ pub use sat_halve_to_bitcoin::*;
pub use sat_identity::*;
pub use sat_mask::*;
pub use sat_to_bitcoin::*;
pub use sats_to_dollars::*;
pub use u16_to_years::*;
pub use volatility_sqrt7::*;
pub use volatility_sqrt30::*;

View File

@@ -0,0 +1,12 @@
use brk_types::{Dollars, Sats};
use vecdb::BinaryTransform;
/// Sats × Dollars → Dollars (price * sats)
pub struct SatsToDollars;
impl BinaryTransform<Sats, Dollars, Dollars> for SatsToDollars {
#[inline(always)]
fn apply(sats: Sats, price: Dollars) -> Dollars {
price * sats
}
}

View File

@@ -104,14 +104,14 @@ impl Vecs {
self.nvt.compute_binary::<Dollars, Dollars, Ratio32>(
starting_indexes.height,
&distribution.utxo_cohorts.all.metrics.supply.total.usd.height,
&transactions.volume.sent_sum.usd.height,
&transactions.volume.sent_sum.usd,
exit,
)?;
// Pi Cycle: sma_111d / sma_350d_x2
self.pi_cycle.compute_binary::<Dollars, Dollars, Ratio32>(
starting_indexes.height,
&moving_average.price_111d_sma.price.as_ref().unwrap().usd.height,
&moving_average.price_111d_sma.price.usd.height,
&moving_average.price_350d_sma_x2.usd.height,
exit,
)?;

View File

@@ -5,7 +5,7 @@ use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedFromHeightRatio, DollarsTimesTenths, Price},
internal::{ComputedFromHeightPriceWithRatioExtended, DollarsTimesTenths, Price},
};
impl Vecs {
@@ -14,265 +14,21 @@ impl Vecs {
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let price_1w_sma = ComputedFromHeightRatio::forced_import(
db,
"price_1w_sma",
None,
version,
indexes,
true,
)?;
let price_8d_sma = ComputedFromHeightRatio::forced_import(
db,
"price_8d_sma",
None,
version,
indexes,
true,
)?;
let price_13d_sma = ComputedFromHeightRatio::forced_import(
db,
"price_13d_sma",
None,
version,
indexes,
true,
)?;
let price_21d_sma = ComputedFromHeightRatio::forced_import(
db,
"price_21d_sma",
None,
version,
indexes,
true,
)?;
let price_1m_sma = ComputedFromHeightRatio::forced_import(
db,
"price_1m_sma",
None,
version,
indexes,
true,
)?;
let price_34d_sma = ComputedFromHeightRatio::forced_import(
db,
"price_34d_sma",
None,
version,
indexes,
true,
)?;
let price_55d_sma = ComputedFromHeightRatio::forced_import(
db,
"price_55d_sma",
None,
version,
indexes,
true,
)?;
let price_89d_sma = ComputedFromHeightRatio::forced_import(
db,
"price_89d_sma",
None,
version,
indexes,
true,
)?;
let price_111d_sma = ComputedFromHeightRatio::forced_import(
db,
"price_111d_sma",
None,
version,
indexes,
true,
)?;
let price_144d_sma = ComputedFromHeightRatio::forced_import(
db,
"price_144d_sma",
None,
version,
indexes,
true,
)?;
let price_200d_sma = ComputedFromHeightRatio::forced_import(
db,
"price_200d_sma",
None,
version,
indexes,
true,
)?;
let price_350d_sma = ComputedFromHeightRatio::forced_import(
db,
"price_350d_sma",
None,
version,
indexes,
true,
)?;
let price_1y_sma = ComputedFromHeightRatio::forced_import(
db,
"price_1y_sma",
None,
version,
indexes,
true,
)?;
let price_2y_sma = ComputedFromHeightRatio::forced_import(
db,
"price_2y_sma",
None,
version,
indexes,
true,
)?;
let price_200w_sma = ComputedFromHeightRatio::forced_import(
db,
"price_200w_sma",
None,
version,
indexes,
true,
)?;
let price_4y_sma = ComputedFromHeightRatio::forced_import(
db,
"price_4y_sma",
None,
version,
indexes,
true,
)?;
macro_rules! import {
($name:expr) => {
ComputedFromHeightPriceWithRatioExtended::forced_import(
db,
$name,
version,
indexes,
)?
};
}
let price_1w_ema = ComputedFromHeightRatio::forced_import(
db,
"price_1w_ema",
None,
version,
indexes,
true,
)?;
let price_8d_ema = ComputedFromHeightRatio::forced_import(
db,
"price_8d_ema",
None,
version,
indexes,
true,
)?;
let price_12d_ema = ComputedFromHeightRatio::forced_import(
db,
"price_12d_ema",
None,
version,
indexes,
true,
)?;
let price_13d_ema = ComputedFromHeightRatio::forced_import(
db,
"price_13d_ema",
None,
version,
indexes,
true,
)?;
let price_21d_ema = ComputedFromHeightRatio::forced_import(
db,
"price_21d_ema",
None,
version,
indexes,
true,
)?;
let price_26d_ema = ComputedFromHeightRatio::forced_import(
db,
"price_26d_ema",
None,
version,
indexes,
true,
)?;
let price_1m_ema = ComputedFromHeightRatio::forced_import(
db,
"price_1m_ema",
None,
version,
indexes,
true,
)?;
let price_34d_ema = ComputedFromHeightRatio::forced_import(
db,
"price_34d_ema",
None,
version,
indexes,
true,
)?;
let price_55d_ema = ComputedFromHeightRatio::forced_import(
db,
"price_55d_ema",
None,
version,
indexes,
true,
)?;
let price_89d_ema = ComputedFromHeightRatio::forced_import(
db,
"price_89d_ema",
None,
version,
indexes,
true,
)?;
let price_144d_ema = ComputedFromHeightRatio::forced_import(
db,
"price_144d_ema",
None,
version,
indexes,
true,
)?;
let price_200d_ema = ComputedFromHeightRatio::forced_import(
db,
"price_200d_ema",
None,
version,
indexes,
true,
)?;
let price_1y_ema = ComputedFromHeightRatio::forced_import(
db,
"price_1y_ema",
None,
version,
indexes,
true,
)?;
let price_2y_ema = ComputedFromHeightRatio::forced_import(
db,
"price_2y_ema",
None,
version,
indexes,
true,
)?;
let price_200w_ema = ComputedFromHeightRatio::forced_import(
db,
"price_200w_ema",
None,
version,
indexes,
true,
)?;
let price_4y_ema = ComputedFromHeightRatio::forced_import(
db,
"price_4y_ema",
None,
version,
indexes,
true,
)?;
let price_200d_sma = import!("price_200d_sma");
let price_350d_sma = import!("price_350d_sma");
let price_200d_sma_source = &price_200d_sma.price.as_ref().unwrap().usd;
let price_200d_sma_source = &price_200d_sma.price.usd;
let price_200d_sma_x2_4 = Price::from_computed::<DollarsTimesTenths<24>>(
"price_200d_sma_x2_4",
version,
@@ -284,7 +40,7 @@ impl Vecs {
price_200d_sma_source,
);
let price_350d_sma_source = &price_350d_sma.price.as_ref().unwrap().usd;
let price_350d_sma_source = &price_350d_sma.price.usd;
let price_350d_sma_x2 = Price::from_computed::<DollarsTimesTenths<20>>(
"price_350d_sma_x2",
version,
@@ -292,39 +48,39 @@ impl Vecs {
);
Ok(Self {
price_1w_sma,
price_8d_sma,
price_13d_sma,
price_21d_sma,
price_1m_sma,
price_34d_sma,
price_55d_sma,
price_89d_sma,
price_111d_sma,
price_144d_sma,
price_1w_sma: import!("price_1w_sma"),
price_8d_sma: import!("price_8d_sma"),
price_13d_sma: import!("price_13d_sma"),
price_21d_sma: import!("price_21d_sma"),
price_1m_sma: import!("price_1m_sma"),
price_34d_sma: import!("price_34d_sma"),
price_55d_sma: import!("price_55d_sma"),
price_89d_sma: import!("price_89d_sma"),
price_111d_sma: import!("price_111d_sma"),
price_144d_sma: import!("price_144d_sma"),
price_200d_sma,
price_350d_sma,
price_1y_sma,
price_2y_sma,
price_200w_sma,
price_4y_sma,
price_1y_sma: import!("price_1y_sma"),
price_2y_sma: import!("price_2y_sma"),
price_200w_sma: import!("price_200w_sma"),
price_4y_sma: import!("price_4y_sma"),
price_1w_ema,
price_8d_ema,
price_12d_ema,
price_13d_ema,
price_21d_ema,
price_26d_ema,
price_1m_ema,
price_34d_ema,
price_55d_ema,
price_89d_ema,
price_144d_ema,
price_200d_ema,
price_1y_ema,
price_2y_ema,
price_200w_ema,
price_4y_ema,
price_1w_ema: import!("price_1w_ema"),
price_8d_ema: import!("price_8d_ema"),
price_12d_ema: import!("price_12d_ema"),
price_13d_ema: import!("price_13d_ema"),
price_21d_ema: import!("price_21d_ema"),
price_26d_ema: import!("price_26d_ema"),
price_1m_ema: import!("price_1m_ema"),
price_34d_ema: import!("price_34d_ema"),
price_55d_ema: import!("price_55d_ema"),
price_89d_ema: import!("price_89d_ema"),
price_144d_ema: import!("price_144d_ema"),
price_200d_ema: import!("price_200d_ema"),
price_1y_ema: import!("price_1y_ema"),
price_2y_ema: import!("price_2y_ema"),
price_200w_ema: import!("price_200w_ema"),
price_4y_ema: import!("price_4y_ema"),
price_200d_sma_x2_4,
price_200d_sma_x0_8,

View File

@@ -2,44 +2,44 @@ use brk_traversable::Traversable;
use brk_types::Dollars;
use vecdb::{Rw, StorageMode};
use crate::internal::{ComputedFromHeightRatio, LazyFromHeightLast, Price};
use crate::internal::{ComputedFromHeightPriceWithRatioExtended, LazyFromHeightLast, Price};
/// Simple and exponential moving average metrics
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub price_1w_sma: ComputedFromHeightRatio<M>,
pub price_8d_sma: ComputedFromHeightRatio<M>,
pub price_13d_sma: ComputedFromHeightRatio<M>,
pub price_21d_sma: ComputedFromHeightRatio<M>,
pub price_1m_sma: ComputedFromHeightRatio<M>,
pub price_34d_sma: ComputedFromHeightRatio<M>,
pub price_55d_sma: ComputedFromHeightRatio<M>,
pub price_89d_sma: ComputedFromHeightRatio<M>,
pub price_111d_sma: ComputedFromHeightRatio<M>,
pub price_144d_sma: ComputedFromHeightRatio<M>,
pub price_200d_sma: ComputedFromHeightRatio<M>,
pub price_350d_sma: ComputedFromHeightRatio<M>,
pub price_1y_sma: ComputedFromHeightRatio<M>,
pub price_2y_sma: ComputedFromHeightRatio<M>,
pub price_200w_sma: ComputedFromHeightRatio<M>,
pub price_4y_sma: ComputedFromHeightRatio<M>,
pub price_1w_sma: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_8d_sma: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_13d_sma: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_21d_sma: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_1m_sma: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_34d_sma: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_55d_sma: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_89d_sma: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_111d_sma: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_144d_sma: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_200d_sma: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_350d_sma: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_1y_sma: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_2y_sma: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_200w_sma: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_4y_sma: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_1w_ema: ComputedFromHeightRatio<M>,
pub price_8d_ema: ComputedFromHeightRatio<M>,
pub price_12d_ema: ComputedFromHeightRatio<M>,
pub price_13d_ema: ComputedFromHeightRatio<M>,
pub price_21d_ema: ComputedFromHeightRatio<M>,
pub price_26d_ema: ComputedFromHeightRatio<M>,
pub price_1m_ema: ComputedFromHeightRatio<M>,
pub price_34d_ema: ComputedFromHeightRatio<M>,
pub price_55d_ema: ComputedFromHeightRatio<M>,
pub price_89d_ema: ComputedFromHeightRatio<M>,
pub price_144d_ema: ComputedFromHeightRatio<M>,
pub price_200d_ema: ComputedFromHeightRatio<M>,
pub price_1y_ema: ComputedFromHeightRatio<M>,
pub price_2y_ema: ComputedFromHeightRatio<M>,
pub price_200w_ema: ComputedFromHeightRatio<M>,
pub price_4y_ema: ComputedFromHeightRatio<M>,
pub price_1w_ema: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_8d_ema: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_12d_ema: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_13d_ema: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_21d_ema: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_26d_ema: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_1m_ema: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_34d_ema: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_55d_ema: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_89d_ema: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_144d_ema: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_200d_ema: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_1y_ema: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_2y_ema: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_200w_ema: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_4y_ema: ComputedFromHeightPriceWithRatioExtended<M>,
pub price_200d_sma_x2_4: Price<LazyFromHeightLast<Dollars, Dollars>>,
pub price_200d_sma_x0_8: Price<LazyFromHeightLast<Dollars, Dollars>>,

View File

@@ -6,10 +6,8 @@ use super::super::lookback::ByLookbackPeriod;
use super::Vecs;
use crate::{
indexes,
internal::{
ComputedFromHeightLast, ComputedFromHeightStdDev,
StandardDeviationVecsOptions,
},
internal::ComputedFromHeightLast,
internal::ComputedFromHeightStdDev,
market::dca::ByDcaCagr,
};
@@ -41,7 +39,6 @@ impl Vecs {
7,
version + v1,
indexes,
StandardDeviationVecsOptions::default(),
)?;
let _1d_returns_1m_sd = ComputedFromHeightStdDev::forced_import(
db,
@@ -49,7 +46,6 @@ impl Vecs {
30,
version + v1,
indexes,
StandardDeviationVecsOptions::default(),
)?;
let _1d_returns_1y_sd = ComputedFromHeightStdDev::forced_import(
db,
@@ -57,7 +53,6 @@ impl Vecs {
365,
version + v1,
indexes,
StandardDeviationVecsOptions::default(),
)?;
let downside_returns = EagerVec::forced_import(db, "downside_returns", version)?;
@@ -67,7 +62,6 @@ impl Vecs {
7,
version + v1,
indexes,
StandardDeviationVecsOptions::default(),
)?;
let downside_1m_sd = ComputedFromHeightStdDev::forced_import(
db,
@@ -75,7 +69,6 @@ impl Vecs {
30,
version + v1,
indexes,
StandardDeviationVecsOptions::default(),
)?;
let downside_1y_sd = ComputedFromHeightStdDev::forced_import(
db,
@@ -83,7 +76,6 @@ impl Vecs {
365,
version + v1,
indexes,
StandardDeviationVecsOptions::default(),
)?;
Ok(Self {

View File

@@ -60,7 +60,7 @@ impl Vecs {
exit,
)?;
let fee_sats_source = transactions_fees.fee.sum_cumulative.sum.inner();
let fee_sats_source = transactions_fees.fee.height.sum_cumulative.sum.inner();
let fee_usd_source = &transactions_fees.fee_usd_sum;
self.fee_sum.compute_rolling_sum(
starting_indexes.height,
@@ -74,7 +74,7 @@ impl Vecs {
vec.compute_transform2(
starting_indexes.height,
&self.coinbase.sats.height,
transactions_fees.fee.sum_cumulative.sum.inner(),
transactions_fees.fee.height.sum_cumulative.sum.inner(),
|(height, coinbase, fees, ..)| {
(
height,
@@ -107,7 +107,7 @@ impl Vecs {
// All-time cumulative fee dominance
self.fee_dominance.height.compute_percentage(
starting_indexes.height,
transactions_fees.fee.sum_cumulative.cumulative.inner(),
transactions_fees.fee.height.sum_cumulative.cumulative.inner(),
&self.coinbase.sats.cumulative.height,
exit,
)?;

View File

@@ -18,26 +18,26 @@ impl Vecs {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.total_count.compute_with_skip(
starting_indexes.height,
&indexes.txindex.output_count,
&indexer.vecs.transactions.first_txindex,
&indexes.height.txindex_count,
exit,
0,
)?;
let window_starts = blocks.count.window_starts();
self.total_count_rolling.compute(
self.total_count.compute(
starting_indexes.height,
&window_starts,
self.total_count.sum_cumulative.sum.inner(),
exit,
|full| {
full.compute_with_skip(
starting_indexes.height,
&indexes.txindex.output_count,
&indexer.vecs.transactions.first_txindex,
&indexes.height.txindex_count,
exit,
0,
)
},
)?;
self.utxo_count.height.compute_transform3(
starting_indexes.height,
&*self.total_count.sum_cumulative.cumulative,
&*self.total_count.height.sum_cumulative.cumulative,
&*inputs_count.height.sum_cumulative.cumulative,
&scripts_count.opreturn.cumulative.height,
|(h, output_count, input_count, opreturn_count, ..)| {

View File

@@ -5,14 +5,13 @@ use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedFromHeightLast, Full, RollingFull},
internal::{ComputedFromHeightFull, ComputedFromHeightLast},
};
impl Vecs {
pub(crate) fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
Ok(Self {
total_count: Full::forced_import(db, "output_count", version)?,
total_count_rolling: RollingFull::forced_import(db, "output_count", version, indexes)?,
total_count: ComputedFromHeightFull::forced_import(db, "output_count", version, indexes)?,
utxo_count: ComputedFromHeightLast::forced_import(db, "exact_utxo_count", version, indexes)?,
})
}

View File

@@ -1,12 +1,11 @@
use brk_traversable::Traversable;
use brk_types::{Height, StoredU64};
use brk_types::StoredU64;
use vecdb::{Rw, StorageMode};
use crate::internal::{ComputedFromHeightLast, Full, RollingFull};
use crate::internal::{ComputedFromHeightFull, ComputedFromHeightLast};
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub total_count: Full<Height, StoredU64, M>,
pub total_count_rolling: RollingFull<StoredU64, M>,
pub total_count: ComputedFromHeightFull<StoredU64, M>,
pub utxo_count: ComputedFromHeightLast<StoredU64, M>,
}

View File

@@ -238,7 +238,7 @@ impl Vecs {
Ok(vec.compute_transform2(
starting_indexes.height,
&self.blocks_mined.height,
&*transactions.fees.fee.sum_cumulative.sum,
&*transactions.fees.fee.height.sum_cumulative.sum,
|(h, mask, val, ..)| (h, MaskSats::apply(mask, val)),
exit,
)?)

View File

@@ -144,7 +144,7 @@ impl Vecs {
self.taproot_adoption.height.compute_transform2(
starting_indexes.height,
&self.p2tr.height,
&outputs_count.total_count.sum_cumulative.sum.0,
&outputs_count.total_count.height.sum_cumulative.sum.0,
|(h, p2tr, total, ..)| {
let ratio = if *total > 0 {
StoredF32::from(*p2tr as f64 / *total as f64)
@@ -159,7 +159,7 @@ impl Vecs {
self.segwit_adoption.height.compute_transform2(
starting_indexes.height,
&self.segwit.height,
&outputs_count.total_count.sum_cumulative.sum.0,
&outputs_count.total_count.height.sum_cumulative.sum.0,
|(h, segwit, total, ..)| {
let ratio = if *total > 0 {
StoredF32::from(*segwit as f64 / *total as f64)

View File

@@ -20,7 +20,7 @@ impl Vecs {
self.btc.height.compute_rolling_ratio(
starting_indexes.height,
&blocks.count.height_1y_ago,
&transactions.volume.sent_sum.sats.height,
&transactions.volume.sent_sum.sats,
&circulating_supply.sats.height,
exit,
)?;
@@ -29,7 +29,7 @@ impl Vecs {
self.usd.height.compute_rolling_ratio(
starting_indexes.height,
&blocks.count.height_1y_ago,
&transactions.volume.sent_sum.usd.height,
&transactions.volume.sent_sum.usd,
&circulating_supply.usd.height,
exit,
)?;

View File

@@ -5,11 +5,6 @@ use vecdb::{
WritableVec,
};
mod pricing;
// TODO: Re-export when Phase 3 (Pricing migration) is complete
// pub use pricing::{Priced, Pricing, Unpriced};
pub trait ComputeRollingMinFromStarts<I: VecIndex, T> {
fn compute_rolling_min_from_starts<A>(
&mut self,

View File

@@ -1,123 +0,0 @@
//! Compile-time type-state for price-dependent data.
//!
// TODO: Remove this once Phase 3 (Pricing migration) is complete
#![allow(dead_code)]
//!
//! This module provides the `Pricing` trait which enables compile-time
//! differentiation between priced and unpriced data variants. Instead of
//! using `Option<T>` for price-dependent fields, structs use `P: Pricing`
//! with associated types that are either concrete types (for `Priced`) or
//! `()` (for `Unpriced`).
//!
//! Benefits:
//! - LSP/autocomplete visibility: no Options cluttering suggestions
//! - Compile-time guarantees: cannot access price data on `Unpriced` variants
//! - Zero runtime overhead: `()` is a ZST (zero-sized type)
use brk_traversable::Traversable;
/// Type-state trait for price-dependent data.
///
/// Implements the type-state pattern using associated types:
/// - `Priced`: associated types resolve to concrete data types
/// - `Unpriced`: associated types resolve to `()`
///
/// # Associated Types
///
/// | Type | Usage | Priced | Unpriced |
/// |------|-------|--------|----------|
/// | `Data` | Computer top-level | `PricingData` | `()` |
/// | `PriceRef<'a>` | Function params | `&price::Vecs` | `()` |
/// | `ComputedDollarsHeight` | Value wrappers (Height) | `ComputedFromHeight<Dollars>` | `()` |
/// | `ComputedDollarsDay1` | Value wrappers (Day1) | `ComputedVecsDate<Dollars>` | `()` |
/// | `StdDevBandsUsd` | StdDev USD bands | `StdDevBandsUsdData` | `()` |
/// | `RatioUsd` | Ratio USD variants | `RatioUsdData` | `()` |
/// | `BasePriced` | Base metrics | `BasePricedData` | `()` |
/// | `ExtendedPriced` | Extended metrics | `ExtendedPricedData` | `()` |
/// | `AdjustedPriced` | Adjusted metrics | `AdjustedPricedData` | `()` |
/// | `RelToAllPriced` | Rel-to-all metrics | `RelToAllPricedData` | `()` |
pub trait Pricing: 'static + Clone + Send + Sync {
// === Top-level ===
/// Top-level pricing data - PricingData for Priced, () for Unpriced
type Data: Clone + Send + Sync + Traversable;
/// Reference to price vecs for import functions
type PriceRef<'a>: Copy;
// === Value wrappers (used in 20+ places) ===
/// Computed dollars with Height index
type ComputedDollarsHeight: Clone + Send + Sync + Traversable;
/// Computed dollars with Day1 index
type ComputedDollarsDay1: Clone + Send + Sync + Traversable;
// === Specialized structs ===
/// StdDev USD bands (13 fields grouped)
type StdDevBandsUsd: Clone + Send + Sync + Traversable;
/// Ratio USD data
type RatioUsd: Clone + Send + Sync + Traversable;
// === Distribution metrics ===
/// Base-level priced metrics (realized + unrealized)
type BasePriced: Clone + Send + Sync + Traversable;
/// Extended-level priced metrics
type ExtendedPriced: Clone + Send + Sync + Traversable;
/// Adjusted metrics
type AdjustedPriced: Clone + Send + Sync + Traversable;
/// Dollar-based relative-to-all metrics
type RelToAllPriced: Clone + Send + Sync + Traversable;
}
/// Marker type for priced data.
///
/// When `P = Priced`, all associated types resolve to their concrete
/// data types containing price-denominated values.
#[derive(Clone, Copy, Default, Debug)]
pub struct Priced;
/// Marker type for unpriced data.
///
/// When `P = Unpriced`, all associated types resolve to `()`,
/// effectively removing those fields at compile time with zero overhead.
#[derive(Clone, Copy, Default, Debug)]
pub struct Unpriced;
// Note: The actual type implementations for `Priced` and `Unpriced`
// will be added in Phase 3 when we migrate the concrete data types.
// For now, we provide placeholder implementations using () for all types
// to allow incremental migration.
impl Pricing for Priced {
// Placeholder implementations - will be replaced with concrete types in Phase 3
type Data = ();
type PriceRef<'a> = ();
type ComputedDollarsHeight = ();
type ComputedDollarsDay1 = ();
type StdDevBandsUsd = ();
type RatioUsd = ();
type BasePriced = ();
type ExtendedPriced = ();
type AdjustedPriced = ();
type RelToAllPriced = ();
}
impl Pricing for Unpriced {
type Data = ();
type PriceRef<'a> = ();
type ComputedDollarsHeight = ();
type ComputedDollarsDay1 = ();
type StdDevBandsUsd = ();
type RatioUsd = ();
type BasePriced = ();
type ExtendedPriced = ();
type AdjustedPriced = ();
type RelToAllPriced = ();
}

View File

@@ -60,13 +60,21 @@ impl Vecs {
)?;
// Skip coinbase (first tx per block) since it has no fee
self.fee.compute_with_skip(
let window_starts = blocks.count.window_starts();
self.fee.compute(
starting_indexes.height,
&self.fee_txindex,
&indexer.vecs.transactions.first_txindex,
&indexes.height.txindex_count,
&window_starts,
exit,
1,
|full| {
full.compute_with_skip(
starting_indexes.height,
&self.fee_txindex,
&indexer.vecs.transactions.first_txindex,
&indexes.height.txindex_count,
exit,
1,
)
},
)?;
// Skip coinbase (first tx per block) since it has no feerate
@@ -82,21 +90,12 @@ impl Vecs {
// Compute fee USD sum per block: price * Bitcoin::from(sats)
self.fee_usd_sum.compute_transform2(
starting_indexes.height,
self.fee.sum_cumulative.sum.inner(),
self.fee.height.sum_cumulative.sum.inner(),
&prices.usd.price,
|(h, sats, price, ..)| (h, price * Bitcoin::from(sats)),
exit,
)?;
// Rolling fee stats (from per-block sum)
let window_starts = blocks.count.window_starts();
self.fee_rolling.compute(
starting_indexes.height,
&window_starts,
self.fee.sum_cumulative.sum.inner(),
exit,
)?;
// Rolling fee rate distribution (from per-block average)
self.fee_rate_rolling.compute_distribution(
starting_indexes.height,

View File

@@ -5,7 +5,7 @@ use vecdb::{Database, EagerVec, ImportableVec};
use super::Vecs;
use crate::{
indexes,
internal::{Distribution, Full, RollingDistribution, RollingFull},
internal::{ComputedFromHeightFull, Distribution, RollingDistribution},
};
/// Bump this when fee/feerate aggregation logic changes (e.g., skip coinbase).
@@ -22,9 +22,8 @@ impl Vecs {
input_value: EagerVec::forced_import(db, "input_value", version)?,
output_value: EagerVec::forced_import(db, "output_value", version)?,
fee_txindex: EagerVec::forced_import(db, "fee", v)?,
fee: Full::forced_import(db, "fee", v)?,
fee: ComputedFromHeightFull::forced_import(db, "fee", v, indexes)?,
fee_usd_sum: EagerVec::forced_import(db, "fee_usd_sum", v)?,
fee_rolling: RollingFull::forced_import(db, "fee", v, indexes)?,
fee_rate_txindex: EagerVec::forced_import(db, "fee_rate", v)?,
fee_rate: Distribution::forced_import(db, "fee_rate", v)?,
fee_rate_rolling: RollingDistribution::forced_import(db, "fee_rate", v, indexes)?,

View File

@@ -2,16 +2,15 @@ use brk_traversable::Traversable;
use brk_types::{Dollars, FeeRate, Height, Sats, TxIndex};
use vecdb::{EagerVec, PcoVec, Rw, StorageMode};
use crate::internal::{Distribution, Full, RollingDistribution, RollingFull};
use crate::internal::{ComputedFromHeightFull, Distribution, RollingDistribution};
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
pub input_value: M::Stored<EagerVec<PcoVec<TxIndex, Sats>>>,
pub output_value: M::Stored<EagerVec<PcoVec<TxIndex, Sats>>>,
pub fee_txindex: M::Stored<EagerVec<PcoVec<TxIndex, Sats>>>,
pub fee: Full<Height, Sats, M>,
pub fee: ComputedFromHeightFull<Sats, M>,
pub fee_usd_sum: M::Stored<EagerVec<PcoVec<Height, Dollars>>>,
pub fee_rolling: RollingFull<Sats, M>,
pub fee_rate_txindex: M::Stored<EagerVec<PcoVec<TxIndex, FeeRate>>>,
pub fee_rate: Distribution<Height, FeeRate, M>,
pub fee_rate_rolling: RollingDistribution<FeeRate, M>,

View File

@@ -22,61 +22,56 @@ impl Vecs {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.sent_sum.sats.height.compute_filtered_sum_from_indexes(
let window_starts = blocks.count.window_starts();
self.sent_sum.compute(
starting_indexes.height,
&indexer.vecs.transactions.first_txindex,
&indexes.height.txindex_count,
&fees_vecs.input_value,
|sats| !sats.is_max(),
&window_starts,
prices,
exit,
|sats_vec| {
Ok(sats_vec.compute_filtered_sum_from_indexes(
starting_indexes.height,
&indexer.vecs.transactions.first_txindex,
&indexes.height.txindex_count,
&fees_vecs.input_value,
|sats| !sats.is_max(),
exit,
)?)
},
)?;
self.received_sum.sats.height.compute_sum_from_indexes(
self.received_sum.compute(
starting_indexes.height,
&indexer.vecs.transactions.first_txindex,
&indexes.height.txindex_count,
&fees_vecs.output_value,
&window_starts,
prices,
exit,
|sats_vec| {
Ok(sats_vec.compute_sum_from_indexes(
starting_indexes.height,
&indexer.vecs.transactions.first_txindex,
&indexes.height.txindex_count,
&fees_vecs.output_value,
exit,
)?)
},
)?;
// Compute USD from sats × price
self.sent_sum
.compute(prices, starting_indexes.height, exit)?;
self.received_sum
.compute(prices, starting_indexes.height, exit)?;
// Annualized volume: rolling 1y sum of per-block sent volume
self.annualized_volume.sats.height.compute_rolling_sum(
starting_indexes.height,
&blocks.count.height_1y_ago,
&self.sent_sum.sats.height,
&self.sent_sum.sats,
exit,
)?;
self.annualized_volume
.compute(prices, starting_indexes.height, exit)?;
// Rolling sums for sent and received
let window_starts = blocks.count.window_starts();
self.sent_sum_rolling.compute_rolling_sum(
starting_indexes.height,
&window_starts,
&self.sent_sum.sats.height,
&self.sent_sum.usd.height,
exit,
)?;
self.received_sum_rolling.compute_rolling_sum(
starting_indexes.height,
&window_starts,
&self.received_sum.sats.height,
&self.received_sum.usd.height,
exit,
)?;
// tx_per_sec: per-block tx count / block interval
self.tx_per_sec.height.compute_transform2(
starting_indexes.height,
&count_vecs.tx_count.height,
&blocks.interval.interval.height,
&blocks.interval.height,
|(h, tx_count, interval, ..)| {
let interval_f64 = f64::from(*interval);
let per_sec = if interval_f64 > 0.0 {
@@ -93,7 +88,7 @@ impl Vecs {
self.inputs_per_sec.height.compute_transform2(
starting_indexes.height,
&inputs_count.height.sum_cumulative.sum.0,
&blocks.interval.interval.height,
&blocks.interval.height,
|(h, input_count, interval, ..)| {
let interval_f64 = f64::from(*interval);
let per_sec = if interval_f64 > 0.0 {
@@ -109,8 +104,8 @@ impl Vecs {
// outputs_per_sec: per-block output count / block interval
self.outputs_per_sec.height.compute_transform2(
starting_indexes.height,
&outputs_count.total_count.sum_cumulative.sum.0,
&blocks.interval.interval.height,
&outputs_count.total_count.height.sum_cumulative.sum.0,
&blocks.interval.height,
|(h, output_count, interval, ..)| {
let interval_f64 = f64::from(*interval);
let per_sec = if interval_f64 > 0.0 {

View File

@@ -5,7 +5,7 @@ use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::{ComputedFromHeightLast, StoredValueRollingWindows, ValueFromHeightLast},
internal::{ComputedFromHeightLast, ValueFromHeightLast, ValueFromHeightLastRolling},
};
impl Vecs {
@@ -16,16 +16,10 @@ impl Vecs {
) -> Result<Self> {
let v2 = Version::TWO;
Ok(Self {
sent_sum: ValueFromHeightLast::forced_import(
sent_sum: ValueFromHeightLastRolling::forced_import(
db, "sent_sum", version, indexes,
)?,
sent_sum_rolling: StoredValueRollingWindows::forced_import(
db, "sent_sum", version, indexes,
)?,
received_sum: ValueFromHeightLast::forced_import(
db, "received_sum", version, indexes,
)?,
received_sum_rolling: StoredValueRollingWindows::forced_import(
received_sum: ValueFromHeightLastRolling::forced_import(
db, "received_sum", version, indexes,
)?,
annualized_volume: ValueFromHeightLast::forced_import(

View File

@@ -3,18 +3,16 @@ use brk_types::StoredF32;
use vecdb::{Rw, StorageMode};
use crate::internal::{
ComputedFromHeightLast, StoredValueRollingWindows, ValueFromHeightLast,
ComputedFromHeightLast, ValueFromHeightLast, ValueFromHeightLastRolling,
};
/// Volume metrics
#[derive(Traversable)]
pub struct Vecs<M: StorageMode = Rw> {
#[traversable(flatten)]
pub sent_sum: ValueFromHeightLast<M>,
pub sent_sum_rolling: StoredValueRollingWindows<M>,
pub sent_sum: ValueFromHeightLastRolling<M>,
#[traversable(flatten)]
pub received_sum: ValueFromHeightLast<M>,
pub received_sum_rolling: StoredValueRollingWindows<M>,
pub received_sum: ValueFromHeightLastRolling<M>,
#[traversable(flatten)]
pub annualized_volume: ValueFromHeightLast<M>,
pub tx_per_sec: ComputedFromHeightLast<StoredF32, M>,

View File

@@ -15,7 +15,7 @@ impl Query {
let iter = Day1Iter::new(computer, start, current_height.to_usize());
let cumulative = &computer.transactions.fees.fee.sum_cumulative.cumulative;
let cumulative = &computer.transactions.fees.fee.height.sum_cumulative.cumulative;
let first_height = &computer.indexes.day1.first_height;
Ok(iter.collect(|di, ts, h| {

View File

@@ -13,7 +13,7 @@ impl Query {
let start_block = Height::from(current_height.to_usize().saturating_sub(block_count - 1));
let coinbase_vec = &computer.mining.rewards.coinbase.sats.height;
let fee_vec = &computer.transactions.fees.fee.sum_cumulative.sum.0;
let fee_vec = &computer.transactions.fees.fee.height.sum_cumulative.sum.0;
let tx_count_vec = &computer.transactions.count.tx_count.height;
let start = start_block.to_usize();

View File

@@ -1,68 +0,0 @@
# Git
.git
.gitignore
# Build artifacts
target/
# Development files
.vscode/
.idea/
*.swp
*.swo
*~
# OS files
.DS_Store
Thumbs.db
# Docker files
Dockerfile
docker-compose.yml
.dockerignore
docker-build.sh
# Documentation
docs/
LICENSE
# Keep README.md for build process
!README.md
# CI/CD
.github/
# Logs and temporary files
*.log
tmp/
temp/
# BRK runtime data (should be in volumes)
.brk/
# Example and test data
examples/
tests/
*.test
*.example
# Node modules (if any frontend deps)
node_modules/
# Python cache (if any)
__pycache__/
*.pyc
*.pyo
# Rust workspace cache
**/*.rs.bk
# macOS
.AppleDouble
.LSOverride
# Windows
Desktop.ini
ehthumbs.db
# Linux
.directory

View File

@@ -1,29 +1,4 @@
# Bitcoin Core data directory
# This should point to your Bitcoin Core data directory
BITCOIN_DATA_DIR=/path/to/bitcoin
# Bitcoin Core RPC configuration
# If running Bitcoin Core on the same host (not in Docker), use host.docker.internal on macOS/Windows
# or the host's IP address on Linux
BTC_RPC_HOST=localhost
BTC_RPC_PORT=8332
# Use either cookie file authentication (recommended) or username/password
# Cookie file is automatically created by Bitcoin Core
# If using username/password, comment out RPCCOOKIEFILE in docker-compose.yml
# BTC_RPC_USER=your_rpc_username
# BTC_RPC_PASSWORD=your_rpc_password
# Enable price fetching from exchanges
BRK_FETCH=true
# BRK data storage options
# Option 1: Use a Docker named volume (default, recommended)
# This is the default configuration - no changes needed.
# Leave this commented to use the default named volume
# BRK_DATA_VOLUME=brk-data
# Option 2: Use a bind mount to a local directory
# Uncomment and set this to use a specific directory on your host
# Also uncomment the corresponding line in docker-compose.yml
# BRK_DATA_DIR=/path/to/brk/data

View File

@@ -1,57 +1,28 @@
# *************
# Builder
# *************
FROM rustlang/rust:nightly AS builder
FROM rust:1.93-bookworm AS builder
# Install build dependencies
RUN apt-get update && apt-get install -y \
pkg-config \
libssl-dev \
build-essential \
&& rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install -y pkg-config libssl-dev && rm -rf /var/lib/apt/lists/*
WORKDIR /app
# Copy all source files
COPY . .
COPY Cargo.toml Cargo.lock rust-toolchain.toml ./
COPY crates crates
# Build the application
RUN cargo build --release --locked
# *************
# Runtime
# *************
FROM debian:bookworm-slim
# Install runtime dependencies
RUN apt-get update && apt-get install -y \
ca-certificates \
openssl \
&& rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install -y ca-certificates curl && rm -rf /var/lib/apt/lists/*
# Create non-root user
RUN useradd -m -s /bin/bash brk
# Copy binary from builder
COPY --from=builder /app/target/release/brk /usr/local/bin/brk
# Copy websites directory
COPY --from=builder /app/websites /app/websites
# Set ownership
RUN chown -R brk:brk /app
# Switch to non-root user
USER brk
# Create directories for BRK data
RUN mkdir -p /home/brk/.brk
# Expose API port
EXPOSE 3110
# Set working directory
WORKDIR /home/brk
# Default entrypoint
ENTRYPOINT ["brk"]

View File

@@ -1,13 +1,10 @@
# Docker Setup for BRK
This guide explains how to run BRK using Docker and Docker Compose.
## Prerequisites
- Docker Engine (with buildx support)
- Docker Compose v2
- A running Bitcoin Core node with RPC enabled
- Access to Bitcoin Core's blocks directory
## Quick Start
@@ -22,232 +19,75 @@ This guide explains how to run BRK using Docker and Docker Compose.
docker compose -f docker/docker-compose.yml up -d
```
Or from the docker directory:
```bash
cd docker && docker compose up -d
```
3. **Access BRK**
- Web interface: http://localhost:7070
- API: http://localhost:7070/api
- Health check: http://localhost:7070/health
## Architecture
BRK runs as a single container that includes both the blockchain processor and API server. This simplified architecture:
- Ensures processor and server are always in sync
- Simplifies deployment and monitoring
- Uses a single shared data directory
```bash
# Start BRK
docker compose -f docker/docker-compose.yml up
# Or run in background
docker compose -f docker/docker-compose.yml up -d
# Alternative: from docker directory
cd docker && docker compose up -d
```
## Configuration
All configuration is passed via CLI args in `docker-compose.yml`. Edit the `command:` section to change settings.
### Environment Variables
These variables are interpolated into `docker-compose.yml` at startup:
| Variable | Description | Default |
|----------|-------------|---------|
| `BITCOIN_DATA_DIR` | Path to Bitcoin Core data directory | - |
| `BTC_RPC_HOST` | Bitcoin Core RPC host | `localhost` |
| `BTC_RPC_PORT` | Bitcoin Core RPC port | `8332` |
| `BTC_RPC_USER` | Bitcoin RPC username | - |
| `BTC_RPC_PASSWORD` | Bitcoin RPC password | - |
| `BTC_RPC_USER` | Bitcoin RPC username | `bitcoin` |
| `BTC_RPC_PASSWORD` | Bitcoin RPC password | `bitcoin` |
| `BRK_DATA_VOLUME` | Docker volume name for BRK data | `brk-data` |
### Example .env File
```env
# Bitcoin Core paths
BITCOIN_DATA_DIR=/path/to/bitcoin/data
BRK_DATA_VOLUME=brk-data
# Bitcoin RPC configuration
BTC_RPC_HOST=localhost
BTC_RPC_PORT=8332
BTC_RPC_USER=your_username
BTC_RPC_PASSWORD=your_password
# BRK settings
```
### Connecting to Bitcoin Core
#### Option 1: Cookie File Authentication (Recommended)
BRK will automatically use the `.cookie` file from your Bitcoin Core directory.
**Cookie File Authentication (Recommended)**
Uncomment the `--rpccookiefile` lines in `docker-compose.yml` and remove `--rpcuser`/`--rpcpassword`.
#### Option 2: Username/Password
**Username/Password**
Set `BTC_RPC_USER` and `BTC_RPC_PASSWORD` in your `docker/.env` file.
#### Network Connectivity
- **Same host**:
- If Bitcoin Core is running natively (not in Docker): Use `host.docker.internal` on macOS/Windows or `172.17.0.1` on Linux
- If Bitcoin Core is also in Docker: Use the service name or container IP
**Network Connectivity**
- **Same host (Bitcoin Core running natively)**: Use `host.docker.internal` on macOS/Windows or `172.17.0.1` on Linux
- **Same host (Bitcoin Core in Docker)**: Use the service name or container IP
- **Remote host**: Use the actual IP address or hostname
## Building the Image
## Building
### Using Docker Compose
```bash
docker compose -f docker/docker-compose.yml build
```
### or ... Using Docker Build Script
```bash
# Build with default settings
./docker/docker-build.sh
## Data Storage
# Build with custom tag
./docker/docker-build.sh --tag v1.0.0
```
### Named Volume (Default)
Uses a Docker-managed volume called `brk-data`.
## Volumes and Data Storage
BRK supports two options for storing its data:
### Option 1: Docker Named Volume (Default)
Uses a Docker-managed named volume called `brk-data`. This is the recommended approach for most users.
### Option 2: Bind Mount
Maps a specific directory on your host to the container's data directory.
This may be desirable if you want to use a specific storage location for BRK data (e.g. a different disk).
1. Set `BRK_DATA_DIR` in your `docker/.env` file to your desired host directory
2. In `docker/docker-compose.yml`, comment out the named volume line and uncomment the bind mount line
```bash
# In docker/.env file
BRK_DATA_DIR=/home/user/brk-data
```
```bash
# In docker/docker-compose.yml
# Comment out:
- ${BRK_DATA_VOLUME:-brk-data}:/home/brk/.brk
# Uncomment:
# - ${BRK_DATA_DIR:-./brk-data}:/home/brk/.brk
```
Can also remove or comment out the `volumes` section from the docker/docker-compose.yml file (right at the bottom):
```bash
# Comment out:
volumes:
brk-data:
driver: local
```
## Health Checks
The container includes a combined health check that verifies:
- The BRK process is running
- The API server is responding (port 7070 externally, 3110 internally)
### Bind Mount
1. Set `BRK_DATA_DIR` in `docker/.env`
2. In `docker-compose.yml`, comment out the named volume line and uncomment the bind mount line
3. Remove the `volumes:` section at the bottom of `docker-compose.yml`
## Monitoring
### Check Container Status
```bash
# View running container
docker compose -f docker/docker-compose.yml ps
# Check health status
docker compose -f docker/docker-compose.yml ps --format \"table {{.Service}}\\t{{.Status}}\\t{{.Health}}\"
```
### View Logs
```bash
# View logs
docker compose -f docker/docker-compose.yml logs
# Follow logs in real-time
docker compose -f docker/docker-compose.yml logs -f
```
## Troubleshooting
### Server Issues
#### Server returns empty data
- This is normal if the processor hasn't indexed any blocks yet
- The server component will serve data as the processor indexes blocks
#### Server won't start
- Check Docker Compose logs: `docker compose -f docker/docker-compose.yml logs`
- Verify health endpoint: `curl http://localhost:7070/health`
- Ensure no port conflicts on 7070
### Processor Issues
#### Cannot connect to Bitcoin Core
### Cannot connect to Bitcoin Core
1. Ensure Bitcoin Core is running with `-server=1`
2. Check RPC credentials are correct
3. Verify network connectivity from container
4. Test RPC connection: `docker compose -f docker/docker-compose.yml exec brk brk --help`
#### Processor fails to start
- Verify Bitcoin RPC credentials in `docker/.env`
- Ensure Bitcoin Core is running and accessible
- Check Bitcoin data directory permissions (should be readable by UID 1000)
### Permission denied errors
Ensure the Bitcoin data directory is readable by the container user (UID 1000).
### Performance Issues
## Security
#### Slow indexing
- Ensure adequate disk space for indexed data - a minimum of 3GB/s is recommended
- Monitor memory usage during initial indexing
#### Out of memory
- Increase Docker's memory limit
- Monitor container resource usage: `docker stats`
### Permission Issues
#### Permission denied errors
- Ensure the Bitcoin data directory is readable by the container user (UID 1000)
- Check that volumes are properly mounted
- Verify file ownership: `ls -la $BITCOIN_DATA_DIR`
### Network Issues
#### Cannot access web interface
- Verify port mapping: `docker compose -f docker/docker-compose.yml ps`
- Check firewall settings
- Ensure no other services are using port 7070
## Security Considerations
- Bitcoin data is mounted read-only for safety
- Bitcoin data is mounted read-only
- BRK runs as non-root user inside container
- Only necessary ports are exposed
## Backup and Recovery
### Backing Up BRK Data
```bash
# Create backup of named volume
docker run --rm -v brk_brk-data:/source -v \"$(pwd)\":/backup alpine tar czf /backup/brk-backup.tar.gz -C /source .
# Or if using bind mount
tar czf brk-backup.tar.gz -C \"$BRK_DATA_DIR\" .
```
### Restoring BRK Data
```bash
# Stop container
docker compose -f docker/docker-compose.yml down
# Restore from backup (named volume)
docker run --rm -v brk_brk-data:/target -v \"$(pwd)\":/backup alpine tar xzf /backup/brk-backup.tar.gz -C /target
# Start container
docker compose -f docker/docker-compose.yml up -d
```

View File

@@ -1,77 +0,0 @@
#!/bin/bash
set -e
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
# Default values
IMAGE_NAME="brk"
TAG="latest"
# Function to print colored output
print_info() {
echo -e "${GREEN}[INFO]${NC} $1"
}
print_warn() {
echo -e "${YELLOW}[WARN]${NC} $1"
}
print_error() {
echo -e "${RED}[ERROR]${NC} $1"
}
# Parse command line arguments
while [[ $# -gt 0 ]]; do
case $1 in
-t|--tag)
TAG="$2"
shift 2
;;
-h|--help)
echo "Usage: $0 [OPTIONS]"
echo ""
echo "Options:"
echo " -t, --tag TAG Tag for the image (default: latest)"
echo " -h, --help Show this help message"
exit 0
;;
*)
print_error "Unknown option: $1"
exit 1
;;
esac
done
# Build the image
print_info "Building BRK Docker image..."
print_info "Image: ${IMAGE_NAME}:${TAG}"
# Detect script location and set paths accordingly
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
# Determine if we're running from project root or docker directory
if [[ "$(basename "$PWD")" == "docker" ]]; then
# Running from docker directory
DOCKERFILE_PATH="./Dockerfile"
BUILD_CONTEXT=".."
print_info "Running from docker directory"
else
# Running from project root or elsewhere
DOCKERFILE_PATH="docker/Dockerfile"
BUILD_CONTEXT="."
print_info "Running from project root"
fi
# Execute the build
if docker build -f "$DOCKERFILE_PATH" -t "${IMAGE_NAME}:${TAG}" "$BUILD_CONTEXT"; then
print_info "Build completed successfully!"
print_info "Image built as ${IMAGE_NAME}:${TAG}"
else
print_error "Build failed!"
exit 1
fi

View File

@@ -1,5 +1,3 @@
# BRK single-container Docker Compose configuration
name: brk
services:
@@ -11,36 +9,17 @@ services:
container_name: brk
restart: unless-stopped
ports:
- 7070:3110 # Map host port 7070 to container port 3110
- 7070:3110
volumes:
# Bitcoin Core data directory (read-only)
# For access to raw block data
- ${BITCOIN_DATA_DIR:-/path/to/bitcoin}:/bitcoin:ro
# BRK data directory for outputs and state
# Option 1: Use a named volume (default)
- ${BRK_DATA_VOLUME:-brk-data}:/home/brk/.brk
# Option 2: Use a bind mount (uncomment and set BRK_DATA_DIR in .env)
# Bind mount alternative (uncomment and set BRK_DATA_DIR in .env):
# - ${BRK_DATA_DIR:-./brk-data}:/home/brk/.brk
environment:
# Bitcoin Core configuration
- BITCOINDIR=/bitcoin
- BLOCKSDIR=/bitcoin/blocks
# RPC configuration (required for processor)
- RPCCONNECT=${BTC_RPC_HOST:-localhost}
- RPCPORT=${BTC_RPC_PORT:-8332}
# - RPCCOOKIEFILE=/bitcoin/.cookie
# Username/password authentication
- RPCUSER=${BTC_RPC_USER}
- RPCPASSWORD=${BTC_RPC_PASSWORD}
# BRK configuration
- BRKDIR=/home/brk/.brk
- FETCH=${BRK_FETCH:-true}
command:
- --bitcoindir
- /bitcoin
- --blocksdir
- /bitcoin/blocks
- --brkdir
- /home/brk/.brk
- --rpcconnect
@@ -49,8 +28,11 @@ services:
- "${BTC_RPC_USER:-bitcoin}"
- --rpcpassword
- "${BTC_RPC_PASSWORD:-bitcoin}"
# Cookie file alternative (uncomment and remove rpcuser/rpcpassword above):
# - --rpccookiefile
# - /bitcoin/.cookie
healthcheck:
test: ["CMD", "sh", "-c", "pgrep -f brk && nc -z localhost 3110"]
test: ["CMD", "curl", "-sf", "http://localhost:3110/health"]
interval: 30s
timeout: 10s
retries: 3