global: snapshot part 0

This commit is contained in:
nym21
2026-03-19 22:21:23 +01:00
parent 2ce6a7cee2
commit 19bd17566f
12 changed files with 121 additions and 186 deletions

View File

@@ -17,6 +17,12 @@ pub struct UnrealizedSentiment<M: StorageMode = Rw> {
pub net: FiatPerBlock<CentsSigned, M>,
}
#[derive(Traversable)]
pub struct UnrealizedInvestedCapital<M: StorageMode = Rw> {
pub in_profit: FiatPerBlock<Cents, M>,
pub in_loss: FiatPerBlock<Cents, M>,
}
#[derive(Deref, DerefMut, Traversable)]
pub struct UnrealizedFull<M: StorageMode = Rw> {
#[deref]
@@ -25,6 +31,7 @@ pub struct UnrealizedFull<M: StorageMode = Rw> {
pub inner: UnrealizedBase<M>,
pub gross_pnl: FiatPerBlock<Cents, M>,
pub invested_capital: UnrealizedInvestedCapital<M>,
pub sentiment: UnrealizedSentiment<M>,
}
@@ -42,9 +49,15 @@ impl UnrealizedFull {
net: cfg.import("net_sentiment", Version::ONE)?,
};
let invested_capital = UnrealizedInvestedCapital {
in_profit: cfg.import("invested_capital_in_profit", v0)?,
in_loss: cfg.import("invested_capital_in_loss", v0)?,
};
Ok(Self {
inner,
gross_pnl,
invested_capital,
sentiment,
})
}
@@ -57,6 +70,8 @@ impl UnrealizedFull {
pub(crate) fn collect_vecs_mut(&mut self) -> Vec<&mut dyn AnyStoredVec> {
let mut vecs = self.inner.collect_vecs_mut();
vecs.push(&mut self.gross_pnl.cents.height as &mut dyn AnyStoredVec);
vecs.push(&mut self.invested_capital.in_profit.cents.height as &mut dyn AnyStoredVec);
vecs.push(&mut self.invested_capital.in_loss.cents.height as &mut dyn AnyStoredVec);
vecs.push(&mut self.sentiment.pain_index.cents.height as &mut dyn AnyStoredVec);
vecs.push(&mut self.sentiment.greed_index.cents.height as &mut dyn AnyStoredVec);
vecs.push(&mut self.sentiment.net.cents.height as &mut dyn AnyStoredVec);
@@ -80,7 +95,33 @@ impl UnrealizedFull {
exit,
)?;
self.compute_rest_extended(prices, starting_indexes, supply_in_profit_sats, supply_in_loss_sats, exit)?;
// invested_capital_in_profit = supply_profit_sats × spot / ONE_BTC - unrealized_profit
self.invested_capital.in_profit.cents.height.compute_transform3(
starting_indexes.height,
supply_in_profit_sats,
&prices.spot.cents.height,
&self.inner.core.basic.profit.cents.height,
|(h, supply_sats, spot, profit, ..)| {
let market_value = supply_sats.as_u128() * spot.as_u128() / Sats::ONE_BTC_U128;
(h, Cents::new(market_value.saturating_sub(profit.as_u128()) as u64))
},
exit,
)?;
// invested_capital_in_loss = supply_loss_sats × spot / ONE_BTC + unrealized_loss
self.invested_capital.in_loss.cents.height.compute_transform3(
starting_indexes.height,
supply_in_loss_sats,
&prices.spot.cents.height,
&self.inner.core.basic.loss.cents.height,
|(h, supply_sats, spot, loss, ..)| {
let market_value = supply_sats.as_u128() * spot.as_u128() / Sats::ONE_BTC_U128;
(h, Cents::new((market_value + loss.as_u128()) as u64))
},
exit,
)?;
self.compute_rest_extended(prices, starting_indexes, exit)?;
self.sentiment
.net
@@ -100,40 +141,42 @@ impl UnrealizedFull {
&mut self,
prices: &prices::Vecs,
starting_indexes: &Indexes,
supply_in_profit_sats: &(impl ReadableVec<Height, Sats> + Sync),
supply_in_loss_sats: &(impl ReadableVec<Height, Sats> + Sync),
exit: &Exit,
) -> Result<()> {
// greed = spot - investor_price_winners
// investor_price = investor_cap / invested_cap (both in CentsSats)
// invested_cap is now in Cents (already divided by ONE_BTC), so multiply back
self.sentiment.greed_index.cents.height.compute_transform3(
starting_indexes.height,
&self.inner.investor_cap_in_profit_raw,
supply_in_profit_sats,
&self.invested_capital.in_profit.cents.height,
&prices.spot.cents.height,
|(h, investor_cap, supply_sats, spot, ..)| {
let invested_cap = supply_sats.as_u128() * spot.as_u128();
if invested_cap == 0 {
|(h, investor_cap, invested_cap_cents, spot, ..)| {
let invested_cap_raw = invested_cap_cents.as_u128() * Sats::ONE_BTC_U128;
if invested_cap_raw == 0 {
return (h, Cents::ZERO);
}
let investor_price = investor_cap.inner() / invested_cap;
let investor_price = investor_cap.inner() / invested_cap_raw;
let spot_u128 = spot.as_u128();
(h, Cents::new((spot_u128 - investor_price) as u64))
(h, Cents::new(spot_u128.saturating_sub(investor_price) as u64))
},
exit,
)?;
// pain = investor_price_losers - spot
self.sentiment.pain_index.cents.height.compute_transform3(
starting_indexes.height,
&self.inner.investor_cap_in_loss_raw,
supply_in_loss_sats,
&self.invested_capital.in_loss.cents.height,
&prices.spot.cents.height,
|(h, investor_cap, supply_sats, spot, ..)| {
let invested_cap = supply_sats.as_u128() * spot.as_u128();
if invested_cap == 0 {
|(h, investor_cap, invested_cap_cents, spot, ..)| {
let invested_cap_raw = invested_cap_cents.as_u128() * Sats::ONE_BTC_U128;
if invested_cap_raw == 0 {
return (h, Cents::ZERO);
}
let investor_price = investor_cap.inner() / invested_cap;
let investor_price = investor_cap.inner() / invested_cap_raw;
let spot_u128 = spot.as_u128();
(h, Cents::new((investor_price - spot_u128) as u64))
(h, Cents::new(investor_price.saturating_sub(spot_u128) as u64))
},
exit,
)?;

View File

@@ -26,11 +26,8 @@ pub(crate) fn compute_aggregations<I, T, A>(
count_indexes: &impl ReadableVec<I, StoredU64>,
exit: &Exit,
skip_count: usize,
mut first: Option<&mut EagerVec<PcoVec<I, T>>>,
mut last: Option<&mut EagerVec<PcoVec<I, T>>>,
mut min: Option<&mut EagerVec<PcoVec<I, T>>>,
mut max: Option<&mut EagerVec<PcoVec<I, T>>>,
mut average: Option<&mut EagerVec<PcoVec<I, T>>>,
mut sum: Option<&mut EagerVec<PcoVec<I, T>>>,
mut cumulative: Option<&mut EagerVec<PcoVec<I, T>>>,
mut median: Option<&mut EagerVec<PcoVec<I, T>>>,
@@ -57,14 +54,11 @@ where
}
let index = validate_vec!(
first, last, min, max, average, sum, cumulative, median, pct10, pct25, pct75, pct90
min, max, sum, cumulative, median, pct10, pct25, pct75, pct90
);
let needs_first = first.is_some();
let needs_last = last.is_some();
let needs_min = min.is_some();
let needs_max = max.is_some();
let needs_average = average.is_some();
let needs_sum = sum.is_some();
let needs_cumulative = cumulative.is_some();
let needs_percentiles = median.is_some()
@@ -74,9 +68,8 @@ where
|| pct90.is_some();
let needs_minmax = needs_min || needs_max;
let needs_sum_or_cumulative = needs_sum || needs_cumulative;
let needs_aggregates = needs_sum_or_cumulative || needs_average;
if !needs_first && !needs_last && !needs_minmax && !needs_aggregates && !needs_percentiles {
if !needs_minmax && !needs_sum_or_cumulative && !needs_percentiles {
return Ok(());
}
@@ -99,7 +92,7 @@ where
};
}
truncate_vec!(
first, last, min, max, average, sum, cumulative, median, pct10, pct25, pct75, pct90
min, max, sum, cumulative, median, pct10, pct25, pct75, pct90
);
let fi_len = first_indexes.len();
@@ -119,28 +112,8 @@ where
let effective_count = count.saturating_sub(skip_count);
let effective_first_index = first_index + skip_count.min(count);
if let Some(ref mut first_vec) = first {
let f = if effective_count > 0 {
source
.collect_one_at(effective_first_index.to_usize())
.unwrap()
} else {
T::from(0_usize)
};
first_vec.push(f);
}
if let Some(ref mut last_vec) = last {
if effective_count == 0 {
last_vec.push(T::from(0_usize));
} else {
let last_index = first_index + (count - 1);
last_vec.push(source.collect_one_at(last_index.to_usize()).unwrap());
}
}
// Fast path: only min/max needed, no sorting or allocation required
if needs_minmax && !needs_percentiles && !needs_aggregates {
if needs_minmax && !needs_percentiles && !needs_sum_or_cumulative {
let efi = effective_first_index.to_usize();
let mut min_val: Option<T> = None;
let mut max_val: Option<T> = None;
@@ -175,15 +148,13 @@ where
})*
};
}
push_zero!(max, pct90, pct75, median, pct25, pct10, min, average, sum);
push_zero!(max, pct90, pct75, median, pct25, pct10, min, sum);
if let Some(ref mut cumulative_vec) = cumulative {
cumulative_vec.push(cumulative_val.unwrap());
}
} else if needs_percentiles {
let aggregate_result = if needs_aggregates {
let len = values.len();
let sum_val = values.iter().copied().fold(T::from(0), |a, b| a + b);
Some((len, sum_val))
let sum_val = if needs_sum_or_cumulative {
Some(values.iter().copied().fold(T::from(0), |a, b| a + b))
} else {
None
};
@@ -212,25 +183,19 @@ where
min_vec.push(*values.first().unwrap());
}
if let Some((len, sum_val)) = aggregate_result {
if let Some(ref mut average_vec) = average {
average_vec.push(sum_val / len);
if let Some(sum_val) = sum_val {
if let Some(ref mut sum_vec) = sum {
sum_vec.push(sum_val);
}
if needs_sum_or_cumulative {
if let Some(ref mut sum_vec) = sum {
sum_vec.push(sum_val);
}
if let Some(ref mut cumulative_vec) = cumulative {
let t = cumulative_val.unwrap() + sum_val;
cumulative_val.replace(t);
cumulative_vec.push(t);
}
if let Some(ref mut cumulative_vec) = cumulative {
let t = cumulative_val.unwrap() + sum_val;
cumulative_val.replace(t);
cumulative_vec.push(t);
}
}
} else if needs_minmax {
// Single pass for min + max + optional sum
let (min_val, max_val, sum_val, len) = values.iter().copied().fold(
let (min_val, max_val, sum_val, _len) = values.iter().copied().fold(
(values[0], values[0], T::from(0_usize), 0_usize),
|(mn, mx, s, c), v| (mn.min(v), mx.max(v), s + v, c + 1),
);
@@ -242,50 +207,33 @@ where
max_vec.push(max_val);
}
if needs_aggregates {
if let Some(ref mut average_vec) = average {
average_vec.push(sum_val / len);
if needs_sum_or_cumulative {
if let Some(ref mut sum_vec) = sum {
sum_vec.push(sum_val);
}
if needs_sum_or_cumulative {
if let Some(ref mut sum_vec) = sum {
sum_vec.push(sum_val);
}
if let Some(ref mut cumulative_vec) = cumulative {
let t = cumulative_val.unwrap() + sum_val;
cumulative_val.replace(t);
cumulative_vec.push(t);
}
if let Some(ref mut cumulative_vec) = cumulative {
let t = cumulative_val.unwrap() + sum_val;
cumulative_val.replace(t);
cumulative_vec.push(t);
}
}
}
} else if needs_aggregates {
} else if needs_sum_or_cumulative {
let efi = effective_first_index.to_usize();
let (sum_val, len) = source.fold_range_at(
let sum_val = source.fold_range_at(
efi,
efi + effective_count,
(T::from(0_usize), 0_usize),
|(acc, cnt), val| (acc + val, cnt + 1),
T::from(0_usize),
|acc, val| acc + val,
);
if let Some(ref mut average_vec) = average {
let avg = if len > 0 {
sum_val / len
} else {
T::from(0_usize)
};
average_vec.push(avg);
if let Some(ref mut sum_vec) = sum {
sum_vec.push(sum_val);
}
if needs_sum_or_cumulative {
if let Some(ref mut sum_vec) = sum {
sum_vec.push(sum_val);
}
if let Some(ref mut cumulative_vec) = cumulative {
let t = cumulative_val.unwrap() + sum_val;
cumulative_val.replace(t);
cumulative_vec.push(t);
}
if let Some(ref mut cumulative_vec) = cumulative {
let t = cumulative_val.unwrap() + sum_val;
cumulative_val.replace(t);
cumulative_vec.push(t);
}
}
@@ -301,7 +249,7 @@ where
}
write_vec!(
first, last, min, max, average, sum, cumulative, median, pct10, pct25, pct75, pct90
min, max, sum, cumulative, median, pct10, pct25, pct75, pct90
);
Ok(())
@@ -317,7 +265,6 @@ pub(crate) fn compute_aggregations_nblock_window<I, T, A>(
exit: &Exit,
min: &mut EagerVec<PcoVec<I, T>>,
max: &mut EagerVec<PcoVec<I, T>>,
average: &mut EagerVec<PcoVec<I, T>>,
median: &mut EagerVec<PcoVec<I, T>>,
pct10: &mut EagerVec<PcoVec<I, T>>,
pct25: &mut EagerVec<PcoVec<I, T>>,
@@ -335,7 +282,6 @@ where
for vec in [
&mut *min,
&mut *max,
&mut *average,
&mut *median,
&mut *pct10,
&mut *pct25,
@@ -358,7 +304,6 @@ where
for vec in [
&mut *min,
&mut *max,
&mut *average,
&mut *median,
&mut *pct10,
&mut *pct25,
@@ -371,13 +316,12 @@ where
// Persistent sorted window: O(n) merge-insert for new block, O(n) merge-filter
// for expired block. Avoids re-sorting every block. Cursor reads only the new
// block (~1 page decompress vs original's ~4). Ring buffer caches per-block
// sorted values + sums for O(1) expiry.
// sorted values for O(1) expiry.
// Peak memory: 2 × ~15k window elements + n_blocks × ~2500 cached ≈ 360 KB.
let mut block_ring: VecDeque<(Vec<T>, T)> = VecDeque::with_capacity(n_blocks + 1);
let mut block_ring: VecDeque<Vec<T>> = VecDeque::with_capacity(n_blocks + 1);
let mut cursor = source.cursor();
let mut sorted_window: Vec<T> = Vec::new();
let mut merge_buf: Vec<T> = Vec::new();
let mut running_sum = T::from(0_usize);
// Pre-fill initial window blocks [window_start_of_first..start)
let window_start_of_first = start.saturating_sub(n_blocks - 1);
@@ -390,10 +334,8 @@ where
let mut bv = Vec::with_capacity(count);
cursor.for_each(count, |v: T| bv.push(v));
bv.sort_unstable();
let block_sum = bv.iter().copied().fold(T::from(0), |a, b| a + b);
running_sum += block_sum;
sorted_window.extend_from_slice(&bv);
block_ring.push_back((bv, block_sum));
block_ring.push_back(bv);
}
// Initial sorted_window was built by extending individually sorted blocks —
// stable sort detects these sorted runs and merges in O(n × log(k)) instead of O(n log n).
@@ -411,8 +353,6 @@ where
let mut new_block = Vec::with_capacity(count);
cursor.for_each(count, |v: T| new_block.push(v));
new_block.sort_unstable();
let new_sum = new_block.iter().copied().fold(T::from(0), |a, b| a + b);
running_sum += new_sum;
// Merge-insert new sorted block into sorted_window: O(n+m)
merge_buf.clear();
@@ -431,12 +371,11 @@ where
merge_buf.extend_from_slice(&new_block[ni..]);
std::mem::swap(&mut sorted_window, &mut merge_buf);
block_ring.push_back((new_block, new_sum));
block_ring.push_back(new_block);
// Expire oldest block: merge-filter its sorted values from sorted_window in O(n)
if block_ring.len() > n_blocks {
let (expired, expired_sum) = block_ring.pop_front().unwrap();
running_sum = running_sum.checked_sub(expired_sum).unwrap();
let expired = block_ring.pop_front().unwrap();
merge_buf.clear();
merge_buf.reserve(sorted_window.len());
@@ -455,7 +394,6 @@ where
for vec in [
&mut *min,
&mut *max,
&mut *average,
&mut *median,
&mut *pct10,
&mut *pct25,
@@ -465,9 +403,6 @@ where
vec.push(zero);
}
} else {
let len = sorted_window.len();
let avg = running_sum / len;
max.push(*sorted_window.last().unwrap());
pct90.push(get_percentile(&sorted_window, 0.90));
pct75.push(get_percentile(&sorted_window, 0.75));
@@ -475,12 +410,11 @@ where
pct25.push(get_percentile(&sorted_window, 0.25));
pct10.push(get_percentile(&sorted_window, 0.10));
min.push(*sorted_window.first().unwrap());
average.push(avg);
}
}
let _lock = exit.lock();
for vec in [min, max, average, median, pct10, pct25, pct75, pct90] {
for vec in [min, max, median, pct10, pct25, pct75, pct90] {
vec.write()?;
}

View File

@@ -6,7 +6,7 @@ use vecdb::{
use super::sliding_window::SlidingWindowSorted;
/// Compute all 8 rolling distribution stats (avg, min, max, p10, p25, median, p75, p90)
/// Compute all 7 rolling distribution stats (min, max, p10, p25, median, p75, p90)
/// in a single sorted-vec pass per window.
///
/// When computing multiple windows from the same source, pass the same
@@ -18,7 +18,6 @@ pub fn compute_rolling_distribution_from_starts<I, T, A>(
max_from: I,
window_starts: &impl ReadableVec<I, I>,
values: &impl ReadableVec<I, A>,
average_out: &mut EagerVec<PcoVec<I, T>>,
min_out: &mut EagerVec<PcoVec<I, T>>,
max_out: &mut EagerVec<PcoVec<I, T>>,
p10_out: &mut EagerVec<PcoVec<I, T>>,
@@ -38,7 +37,6 @@ where
let version = window_starts.version() + values.version();
for v in [
&mut *average_out,
&mut *min_out,
&mut *max_out,
&mut *p10_out,
@@ -51,7 +49,6 @@ where
}
let skip = [
average_out.len(),
min_out.len(),
max_out.len(),
p10_out.len(),
@@ -108,7 +105,6 @@ where
let starts_batch = window_starts.collect_range_at(skip, end);
for v in [
&mut *average_out,
&mut *min_out,
&mut *max_out,
&mut *p10_out,
@@ -128,7 +124,6 @@ where
if window.is_empty() {
let zero = T::from(0.0);
for v in [
&mut *average_out,
&mut *min_out,
&mut *max_out,
&mut *p10_out,
@@ -140,7 +135,6 @@ where
v.push(zero);
}
} else {
average_out.push(T::from(window.average()));
min_out.push(T::from(window.min()));
max_out.push(T::from(window.max()));
let [p10, p25, p50, p75, p90] =
@@ -152,10 +146,9 @@ where
p90_out.push(T::from(p90));
}
if average_out.batch_limit_reached() {
if min_out.batch_limit_reached() {
let _lock = exit.lock();
for v in [
&mut *average_out,
&mut *min_out,
&mut *max_out,
&mut *p10_out,
@@ -172,7 +165,6 @@ where
// Final flush
let _lock = exit.lock();
for v in [
average_out,
min_out,
max_out,
p10_out,

View File

@@ -121,7 +121,6 @@ impl SortedBlocks {
/// O(n) memmoves with a flat sorted Vec.
pub(crate) struct SlidingWindowSorted {
sorted: SortedBlocks,
running_sum: f64,
prev_start: usize,
}
@@ -129,7 +128,6 @@ impl SlidingWindowSorted {
pub fn with_capacity(cap: usize) -> Self {
Self {
sorted: SortedBlocks::new(cap),
running_sum: 0.0,
prev_start: 0,
}
}
@@ -143,7 +141,6 @@ impl SlidingWindowSorted {
return;
}
let mut sorted_copy: Vec<f64> = slice.to_vec();
self.running_sum = sorted_copy.iter().sum();
sorted_copy.sort_unstable_by(|a, b| a.partial_cmp(b).unwrap_or(Ordering::Equal));
self.sorted = SortedBlocks::from_sorted(&sorted_copy, self.sorted.block_size);
}
@@ -156,12 +153,10 @@ impl SlidingWindowSorted {
partial_values: &[f64],
range_start: usize,
) {
self.running_sum += value;
self.sorted.insert(value);
while self.prev_start < new_start {
let old = partial_values[self.prev_start - range_start];
self.running_sum -= old;
self.sorted.remove(old);
self.prev_start += 1;
}
@@ -172,15 +167,6 @@ impl SlidingWindowSorted {
self.sorted.is_empty()
}
#[inline]
pub fn average(&self) -> f64 {
if self.sorted.is_empty() {
0.0
} else {
self.running_sum / self.sorted.len() as f64
}
}
#[inline]
pub fn min(&self) -> f64 {
if self.sorted.is_empty() {

View File

@@ -2,7 +2,6 @@ use brk_traversable::Traversable;
#[derive(Clone, Traversable)]
pub struct DistributionStats<A> {
pub average: A,
pub min: A,
pub max: A,
pub pct10: A,
@@ -13,31 +12,29 @@ pub struct DistributionStats<A> {
}
impl<A> DistributionStats<A> {
pub const SUFFIXES: [&'static str; 8] = [
"average", "min", "max", "pct10", "pct25", "median", "pct75", "pct90",
pub const SUFFIXES: [&'static str; 7] = [
"min", "max", "pct10", "pct25", "median", "pct75", "pct90",
];
pub fn try_from_fn<E>(
mut f: impl FnMut(&str) -> std::result::Result<A, E>,
) -> std::result::Result<Self, E> {
Ok(Self {
average: f(Self::SUFFIXES[0])?,
min: f(Self::SUFFIXES[1])?,
max: f(Self::SUFFIXES[2])?,
pct10: f(Self::SUFFIXES[3])?,
pct25: f(Self::SUFFIXES[4])?,
median: f(Self::SUFFIXES[5])?,
pct75: f(Self::SUFFIXES[6])?,
pct90: f(Self::SUFFIXES[7])?,
min: f(Self::SUFFIXES[0])?,
max: f(Self::SUFFIXES[1])?,
pct10: f(Self::SUFFIXES[2])?,
pct25: f(Self::SUFFIXES[3])?,
median: f(Self::SUFFIXES[4])?,
pct75: f(Self::SUFFIXES[5])?,
pct90: f(Self::SUFFIXES[6])?,
})
}
/// Apply a fallible operation to each of the 8 fields.
/// Apply a fallible operation to each of the 7 fields.
pub fn try_for_each_mut(
&mut self,
mut f: impl FnMut(&mut A) -> brk_error::Result<()>,
) -> brk_error::Result<()> {
f(&mut self.average)?;
f(&mut self.min)?;
f(&mut self.max)?;
f(&mut self.pct10)?;
@@ -50,8 +47,7 @@ impl<A> DistributionStats<A> {
/// Get minimum value by applying a function to each field.
pub fn min_by(&self, mut f: impl FnMut(&A) -> usize) -> usize {
f(&self.average)
.min(f(&self.min))
f(&self.min)
.min(f(&self.max))
.min(f(&self.pct10))
.min(f(&self.pct25))

View File

@@ -60,7 +60,6 @@ impl RollingDistributionAmountPerBlock {
max_from,
$starts,
$source,
&mut self.0.average.$w.$unit.height,
&mut self.0.min.$w.$unit.height,
&mut self.0.max.$w.$unit.height,
&mut self.0.pct10.$w.$unit.height,

View File

@@ -77,9 +77,6 @@ where
skip_count,
None,
None,
None,
None,
None,
Some(&mut self.sum.height),
Some(&mut self.cumulative.height),
None,

View File

@@ -54,11 +54,8 @@ impl<T: NumericValue + JsonSchema> PerBlockDistribution<T> {
count_indexes,
exit,
skip_count,
None,
None,
Some(&mut s.min.height),
Some(&mut s.max.height),
Some(&mut s.average.height),
None,
None,
Some(&mut s.median.height),
@@ -92,7 +89,6 @@ impl<T: NumericValue + JsonSchema> PerBlockDistribution<T> {
exit,
&mut s.min.height,
&mut s.max.height,
&mut s.average.height,
&mut s.median.height,
&mut s.pct10.height,
&mut s.pct25.height,

View File

@@ -5,7 +5,7 @@ use vecdb::{LazyVecFrom1, ReadableCloneableVec, UnaryTransform, VecIndex};
use crate::internal::{ComputedVecValue, PerBlockDistribution, DistributionStats};
/// Lazy analog of `Distribution<T>`: 8 `LazyVecFrom1` fields,
/// Lazy analog of `Distribution<T>`: 7 `LazyVecFrom1` fields,
/// each derived by transforming the corresponding field of a source `PerBlockDistribution<S1T>`.
#[derive(Clone, Traversable)]
pub struct LazyDistribution<I, T, S1T>
@@ -14,7 +14,6 @@ where
T: ComputedVecValue + JsonSchema,
S1T: ComputedVecValue,
{
pub average: LazyVecFrom1<I, T, I, S1T>,
pub min: LazyVecFrom1<I, T, I, S1T>,
pub max: LazyVecFrom1<I, T, I, S1T>,
pub pct10: LazyVecFrom1<I, T, I, S1T>,
@@ -36,43 +35,38 @@ where
) -> Self {
let s = DistributionStats::<()>::SUFFIXES;
Self {
average: LazyVecFrom1::transformed::<F>(
&format!("{name}_{}", s[0]),
version,
source.average.height.read_only_boxed_clone(),
),
min: LazyVecFrom1::transformed::<F>(
&format!("{name}_{}", s[1]),
&format!("{name}_{}", s[0]),
version,
source.min.height.read_only_boxed_clone(),
),
max: LazyVecFrom1::transformed::<F>(
&format!("{name}_{}", s[2]),
&format!("{name}_{}", s[1]),
version,
source.max.height.read_only_boxed_clone(),
),
pct10: LazyVecFrom1::transformed::<F>(
&format!("{name}_{}", s[3]),
&format!("{name}_{}", s[2]),
version,
source.pct10.height.read_only_boxed_clone(),
),
pct25: LazyVecFrom1::transformed::<F>(
&format!("{name}_{}", s[4]),
&format!("{name}_{}", s[3]),
version,
source.pct25.height.read_only_boxed_clone(),
),
median: LazyVecFrom1::transformed::<F>(
&format!("{name}_{}", s[5]),
&format!("{name}_{}", s[4]),
version,
source.median.height.read_only_boxed_clone(),
),
pct75: LazyVecFrom1::transformed::<F>(
&format!("{name}_{}", s[6]),
&format!("{name}_{}", s[5]),
version,
source.pct75.height.read_only_boxed_clone(),
),
pct90: LazyVecFrom1::transformed::<F>(
&format!("{name}_{}", s[7]),
&format!("{name}_{}", s[6]),
version,
source.pct90.height.read_only_boxed_clone(),
),

View File

@@ -53,7 +53,6 @@ where
max_from,
windows.$w,
source,
&mut self.0.average.$w.height,
&mut self.0.min.$w.height,
&mut self.0.max.$w.height,
&mut self.0.pct10.$w.height,

View File

@@ -62,7 +62,6 @@ where
}
Self(DistributionStats {
average: map_stat!(average, "average"),
min: map_stat!(min, "min"),
max: map_stat!(max, "max"),
pct10: map_stat!(pct10, "pct10"),

View File

@@ -15,14 +15,14 @@ impl Query {
let iter = Day1Iter::new(computer, start, current_height.to_usize());
// Rolling 24h average, sampled at day1 boundaries
// Rolling 24h median, sampled at day1 boundaries
let sizes_vec = &computer
.blocks
.size
.size
.rolling
.distribution
.average
.median
._24h
.day1;
let weights_vec = &computer
@@ -31,7 +31,7 @@ impl Query {
.weight
.rolling
.distribution
.average
.median
._24h
.day1;