global: MASSIVE snapshot

This commit is contained in:
nym21
2026-01-07 01:16:37 +01:00
parent e832ffbe23
commit cb0abc324e
487 changed files with 21155 additions and 13627 deletions

View File

@@ -0,0 +1,59 @@
//! Lazy average-value aggregation.
use brk_traversable::Traversable;
use brk_types::Version;
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{FromCoarserIndex, IterableBoxedVec, LazyVecFrom2, VecIndex, VecValue};
use crate::internal::ComputedVecValue;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(wrap = "average")]
pub struct LazyAverage<I, T, S1I, S2T>(pub LazyVecFrom2<I, T, S1I, T, I, S2T>)
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1I: VecIndex,
S2T: VecValue;
impl<I, T, S1I, S2T> LazyAverage<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1I: VecIndex + 'static + FromCoarserIndex<I>,
S2T: VecValue,
{
pub fn from_source(
name: &str,
version: Version,
source: IterableBoxedVec<S1I, T>,
len_source: IterableBoxedVec<I, S2T>,
) -> Self {
Self(LazyVecFrom2::init(
&format!("{name}_average"),
version + VERSION,
source,
len_source,
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
let mut sum = T::from(0);
let mut len = 0usize;
for v in
S1I::inclusive_range_from(i, source.vec_len()).flat_map(|i| source.get_at(i))
{
sum += v;
len += 1;
}
if len == 0 {
return None;
}
Some(sum / len)
},
))
}
}

View File

@@ -0,0 +1,48 @@
//! Lazy cumulative-only aggregation (takes last value from cumulative source).
use brk_traversable::Traversable;
use brk_types::Version;
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{FromCoarserIndex, IterableBoxedVec, LazyVecFrom2, VecIndex, VecValue};
use crate::internal::ComputedVecValue;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(wrap = "cumulative")]
pub struct LazyCumulative<I, T, S1I, S2T>(pub LazyVecFrom2<I, T, S1I, T, I, S2T>)
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1I: VecIndex,
S2T: VecValue;
impl<I, T, S1I, S2T> LazyCumulative<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1I: VecIndex + 'static + FromCoarserIndex<I>,
S2T: VecValue,
{
pub fn from_source(
name: &str,
version: Version,
cumulative_source: IterableBoxedVec<S1I, T>,
len_source: IterableBoxedVec<I, S2T>,
) -> Self {
Self(LazyVecFrom2::init(
&format!("{name}_cumulative"),
version + VERSION,
cumulative_source,
len_source,
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
source.get_at(S1I::max_from(i, source.vec_len()))
},
))
}
}

View File

@@ -0,0 +1,52 @@
//! Lazy distribution pattern (average, min, max).
use brk_traversable::Traversable;
use brk_types::Version;
use schemars::JsonSchema;
use vecdb::{FromCoarserIndex, IterableBoxedVec, VecIndex};
use super::{LazyAverage, LazyMax, LazyMin};
use crate::internal::ComputedVecValue;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Traversable)]
pub struct LazyDistribution<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1I: VecIndex,
S2T: ComputedVecValue,
{
#[traversable(flatten)]
pub average: LazyAverage<I, T, S1I, S2T>,
#[traversable(flatten)]
pub min: LazyMin<I, T, S1I, S2T>,
#[traversable(flatten)]
pub max: LazyMax<I, T, S1I, S2T>,
}
impl<I, T, S1I, S2T> LazyDistribution<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1I: VecIndex + 'static + FromCoarserIndex<I>,
S2T: ComputedVecValue,
{
pub fn from_distribution(
name: &str,
version: Version,
source_average: IterableBoxedVec<S1I, T>,
source_min: IterableBoxedVec<S1I, T>,
source_max: IterableBoxedVec<S1I, T>,
len_source: IterableBoxedVec<I, S2T>,
) -> Self {
let v = version + VERSION;
Self {
average: LazyAverage::from_source(name, v, source_average, len_source.clone()),
min: LazyMin::from_source(name, v, source_min, len_source.clone()),
max: LazyMax::from_source(name, v, source_max, len_source),
}
}
}

View File

@@ -0,0 +1,48 @@
//! Lazy first-value aggregation.
use brk_traversable::Traversable;
use brk_types::Version;
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{FromCoarserIndex, IterableBoxedVec, LazyVecFrom2, VecIndex, VecValue};
use crate::internal::ComputedVecValue;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(wrap = "first")]
pub struct LazyFirst<I, T, S1I, S2T>(pub LazyVecFrom2<I, T, S1I, T, I, S2T>)
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1I: VecIndex,
S2T: VecValue;
impl<I, T, S1I, S2T> LazyFirst<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1I: VecIndex + 'static + FromCoarserIndex<I>,
S2T: VecValue,
{
pub fn from_source(
name: &str,
version: Version,
source: IterableBoxedVec<S1I, T>,
len_source: IterableBoxedVec<I, S2T>,
) -> Self {
Self(LazyVecFrom2::init(
name,
version + VERSION,
source,
len_source,
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
source.get_at(S1I::min_from(i))
},
))
}
}

View File

@@ -0,0 +1,49 @@
//! Lazy last-value aggregation.
use brk_traversable::Traversable;
use brk_types::Version;
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{FromCoarserIndex, IterableBoxedVec, LazyVecFrom2, VecIndex, VecValue};
use crate::internal::ComputedVecValue;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(wrap = "last")]
pub struct LazyLast<I, T, S1I, S2T>(pub LazyVecFrom2<I, T, S1I, T, I, S2T>)
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1I: VecIndex,
S2T: VecValue;
impl<I, T, S1I, S2T> LazyLast<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1I: VecIndex + 'static + FromCoarserIndex<I>,
S2T: VecValue,
{
pub fn from_source(
name: &str,
version: Version,
source: IterableBoxedVec<S1I, T>,
len_source: IterableBoxedVec<I, S2T>,
) -> Self {
Self(LazyVecFrom2::init(
name,
version + VERSION,
source,
len_source,
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
source.get_at(S1I::max_from(i, source.vec_len()))
},
))
}
}

View File

@@ -0,0 +1,50 @@
//! Lazy max-value aggregation.
use brk_traversable::Traversable;
use brk_types::Version;
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{FromCoarserIndex, IterableBoxedVec, LazyVecFrom2, VecIndex, VecValue};
use crate::internal::ComputedVecValue;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(wrap = "max")]
pub struct LazyMax<I, T, S1I, S2T>(pub LazyVecFrom2<I, T, S1I, T, I, S2T>)
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1I: VecIndex,
S2T: VecValue;
impl<I, T, S1I, S2T> LazyMax<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1I: VecIndex + 'static + FromCoarserIndex<I>,
S2T: VecValue,
{
pub fn from_source(
name: &str,
version: Version,
source: IterableBoxedVec<S1I, T>,
len_source: IterableBoxedVec<I, S2T>,
) -> Self {
Self(LazyVecFrom2::init(
&format!("{name}_max"),
version + VERSION,
source,
len_source,
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
S1I::inclusive_range_from(i, source.vec_len())
.flat_map(|i| source.get_at(i))
.max()
},
))
}
}

View File

@@ -0,0 +1,50 @@
//! Lazy min-value aggregation.
use brk_traversable::Traversable;
use brk_types::Version;
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{FromCoarserIndex, IterableBoxedVec, LazyVecFrom2, VecIndex, VecValue};
use crate::internal::ComputedVecValue;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(wrap = "min")]
pub struct LazyMin<I, T, S1I, S2T>(pub LazyVecFrom2<I, T, S1I, T, I, S2T>)
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1I: VecIndex,
S2T: VecValue;
impl<I, T, S1I, S2T> LazyMin<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1I: VecIndex + 'static + FromCoarserIndex<I>,
S2T: VecValue,
{
pub fn from_source(
name: &str,
version: Version,
source: IterableBoxedVec<S1I, T>,
len_source: IterableBoxedVec<I, S2T>,
) -> Self {
Self(LazyVecFrom2::init(
&format!("{name}_min"),
version + VERSION,
source,
len_source,
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
S1I::inclusive_range_from(i, source.vec_len())
.flat_map(|i| source.get_at(i))
.min()
},
))
}
}

View File

@@ -0,0 +1,23 @@
//! Lazy aggregation primitives (finer index → coarser index).
mod average;
mod cumulative;
mod distribution;
mod first;
mod last;
mod max;
mod min;
mod stats_aggregate;
mod sum;
mod sum_cum;
pub use average::*;
pub use cumulative::*;
pub use distribution::*;
pub use first::*;
pub use last::*;
pub use max::*;
pub use min::*;
pub use stats_aggregate::*;
pub use sum::*;
pub use sum_cum::*;

View File

@@ -0,0 +1,61 @@
//! Lazy stats aggregate pattern (average, min, max, sum, cumulative).
use brk_traversable::Traversable;
use brk_types::Version;
use schemars::JsonSchema;
use vecdb::{FromCoarserIndex, IterableBoxedVec, VecIndex, VecValue};
use super::{LazyAverage, LazyCumulative, LazyMax, LazyMin, LazySum};
use crate::internal::ComputedVecValue;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Traversable)]
pub struct LazyFull<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1I: VecIndex,
S2T: VecValue,
{
#[traversable(flatten)]
pub average: LazyAverage<I, T, S1I, S2T>,
#[traversable(flatten)]
pub min: LazyMin<I, T, S1I, S2T>,
#[traversable(flatten)]
pub max: LazyMax<I, T, S1I, S2T>,
#[traversable(flatten)]
pub sum: LazySum<I, T, S1I, S2T>,
#[traversable(flatten)]
pub cumulative: LazyCumulative<I, T, S1I, S2T>,
}
impl<I, T, S1I, S2T> LazyFull<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1I: VecIndex + 'static + FromCoarserIndex<I>,
S2T: VecValue,
{
#[allow(clippy::too_many_arguments)]
pub fn from_stats_aggregate(
name: &str,
version: Version,
source_average: IterableBoxedVec<S1I, T>,
source_min: IterableBoxedVec<S1I, T>,
source_max: IterableBoxedVec<S1I, T>,
source_sum: IterableBoxedVec<S1I, T>,
source_cumulative: IterableBoxedVec<S1I, T>,
len_source: IterableBoxedVec<I, S2T>,
) -> Self {
let v = version + VERSION;
Self {
average: LazyAverage::from_source(name, v, source_average, len_source.clone()),
min: LazyMin::from_source(name, v, source_min, len_source.clone()),
max: LazyMax::from_source(name, v, source_max, len_source.clone()),
sum: LazySum::from_source(name, v, source_sum, len_source.clone()),
cumulative: LazyCumulative::from_source(name, v, source_cumulative, len_source),
}
}
}

View File

@@ -0,0 +1,60 @@
//! Lazy sum-value aggregation.
use brk_traversable::Traversable;
use brk_types::Version;
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{FromCoarserIndex, IterableBoxedVec, LazyVecFrom2, VecIndex, VecValue};
use crate::internal::ComputedVecValue;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(wrap = "sum")]
pub struct LazySum<I, T, S1I, S2T>(pub LazyVecFrom2<I, T, S1I, T, I, S2T>)
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1I: VecIndex,
S2T: VecValue;
impl<I, T, S1I, S2T> LazySum<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1I: VecIndex + 'static + FromCoarserIndex<I>,
S2T: VecValue,
{
pub fn from_source(
name: &str,
version: Version,
source: IterableBoxedVec<S1I, T>,
len_source: IterableBoxedVec<I, S2T>,
) -> Self {
Self(LazyVecFrom2::init(
&format!("{name}_sum"),
version + VERSION,
source,
len_source,
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
let mut sum = T::from(0);
let mut has_values = false;
for v in S1I::inclusive_range_from(i, source.vec_len())
.flat_map(|i| source.get_at(i))
{
sum += v;
has_values = true;
}
if !has_values {
return None;
}
Some(sum)
},
))
}
}

View File

@@ -0,0 +1,51 @@
//! Lazy sum + cumulative aggregation.
use brk_traversable::Traversable;
use brk_types::Version;
use schemars::JsonSchema;
use vecdb::{FromCoarserIndex, IterableBoxedVec, VecIndex, VecValue};
use crate::internal::{ComputedVecValue, LazyCumulative, LazySum};
const VERSION: Version = Version::ZERO;
#[derive(Clone, Traversable)]
pub struct LazySumCum<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1I: VecIndex,
S2T: VecValue,
{
#[traversable(flatten)]
pub sum: LazySum<I, T, S1I, S2T>,
#[traversable(flatten)]
pub cumulative: LazyCumulative<I, T, S1I, S2T>,
}
impl<I, T, S1I, S2T> LazySumCum<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1I: VecIndex + 'static + FromCoarserIndex<I>,
S2T: VecValue,
{
pub fn from_sources(
name: &str,
version: Version,
sum_source: IterableBoxedVec<S1I, T>,
cumulative_source: IterableBoxedVec<S1I, T>,
len_source: IterableBoxedVec<I, S2T>,
) -> Self {
Self {
sum: LazySum::from_source(name, version + VERSION, sum_source, len_source.clone()),
cumulative: LazyCumulative::from_source(
name,
version + VERSION,
cumulative_source,
len_source,
),
}
}
}

View File

@@ -1,843 +0,0 @@
use brk_error::{Error, Result};
use brk_traversable::Traversable;
use brk_types::{CheckedSub, StoredU64, Version};
use schemars::JsonSchema;
use vecdb::{
AnyStoredVec, Database, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableVec, PcoVec,
VecIndex, VecValue,
};
use crate::utils::{OptionExt, get_percentile};
use super::super::ComputedVecValue;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Debug, Traversable)]
pub struct EagerVecsBuilder<I, T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
{
pub first: Option<Box<EagerVec<PcoVec<I, T>>>>,
pub average: Option<Box<EagerVec<PcoVec<I, T>>>>,
pub sum: Option<Box<EagerVec<PcoVec<I, T>>>>,
pub max: Option<Box<EagerVec<PcoVec<I, T>>>>,
pub pct90: Option<Box<EagerVec<PcoVec<I, T>>>>,
pub pct75: Option<Box<EagerVec<PcoVec<I, T>>>>,
pub median: Option<Box<EagerVec<PcoVec<I, T>>>>,
pub pct25: Option<Box<EagerVec<PcoVec<I, T>>>>,
pub pct10: Option<Box<EagerVec<PcoVec<I, T>>>>,
pub min: Option<Box<EagerVec<PcoVec<I, T>>>>,
pub last: Option<Box<EagerVec<PcoVec<I, T>>>>,
pub cumulative: Option<Box<EagerVec<PcoVec<I, T>>>>,
}
impl<I, T> EagerVecsBuilder<I, T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
options: VecBuilderOptions,
) -> Result<Self> {
let only_one_active = options.is_only_one_active();
let suffix = |s: &str| format!("{name}_{s}");
let maybe_suffix = |s: &str| {
if only_one_active {
name.to_string()
} else {
suffix(s)
}
};
let v = version + VERSION;
macro_rules! import {
($s:expr) => {
Box::new(EagerVec::forced_import(db, &maybe_suffix($s), v).unwrap())
};
}
let s = Self {
first: options.first.then(|| import!("first")),
last: options
.last
.then(|| Box::new(EagerVec::forced_import(db, name, v).unwrap())),
min: options.min.then(|| import!("min")),
max: options.max.then(|| import!("max")),
median: options.median.then(|| import!("median")),
average: options.average.then(|| import!("avg")),
sum: options.sum.then(|| {
let sum_name = if !options.last && !options.average && !options.min && !options.max
{
name.to_string()
} else {
maybe_suffix("sum")
};
Box::new(EagerVec::forced_import(db, &sum_name, v).unwrap())
}),
cumulative: options
.cumulative
.then(|| Box::new(EagerVec::forced_import(db, &suffix("cumulative"), v).unwrap())),
pct90: options.pct90.then(|| import!("pct90")),
pct75: options.pct75.then(|| import!("pct75")),
pct25: options.pct25.then(|| import!("pct25")),
pct10: options.pct10.then(|| import!("pct10")),
};
Ok(s)
}
#[inline]
fn needs_percentiles(&self) -> bool {
self.pct90.is_some()
|| self.pct75.is_some()
|| self.median.is_some()
|| self.pct25.is_some()
|| self.pct10.is_some()
}
#[inline]
fn needs_minmax(&self) -> bool {
self.max.is_some() || self.min.is_some()
}
#[inline]
fn needs_sum_or_cumulative(&self) -> bool {
self.sum.is_some() || self.cumulative.is_some()
}
#[inline]
fn needs_average_sum_or_cumulative(&self) -> bool {
self.needs_sum_or_cumulative() || self.average.is_some()
}
/// Compute min/max in O(n) without sorting or collecting
#[inline]
fn compute_minmax_streaming(
&mut self,
index: usize,
iter: impl Iterator<Item = T>,
) -> Result<()> {
let mut min_val: Option<T> = None;
let mut max_val: Option<T> = None;
let need_min = self.min.is_some();
let need_max = self.max.is_some();
for val in iter {
if need_min {
min_val = Some(min_val.map_or(val, |m| if val < m { val } else { m }));
}
if need_max {
max_val = Some(max_val.map_or(val, |m| if val > m { val } else { m }));
}
}
if let Some(min) = self.min.as_mut() {
min.truncate_push_at(index, min_val.unwrap())?;
}
if let Some(max) = self.max.as_mut() {
max.truncate_push_at(index, max_val.unwrap())?;
}
Ok(())
}
/// Compute min/max from collected values in O(n) without sorting
#[inline]
fn compute_minmax_from_slice(&mut self, index: usize, values: &[T]) -> Result<()> {
if let Some(min) = self.min.as_mut() {
min.truncate_push_at(index, *values.iter().min().unwrap())?;
}
if let Some(max) = self.max.as_mut() {
max.truncate_push_at(index, *values.iter().max().unwrap())?;
}
Ok(())
}
/// Compute percentiles from sorted values (assumes values is already sorted)
fn compute_percentiles_from_sorted(&mut self, index: usize, values: &[T]) -> Result<()> {
if let Some(max) = self.max.as_mut() {
max.truncate_push_at(
index,
*values
.last()
.ok_or(Error::Internal("Empty values for percentiles"))?,
)?;
}
if let Some(pct90) = self.pct90.as_mut() {
pct90.truncate_push_at(index, get_percentile(values, 0.90))?;
}
if let Some(pct75) = self.pct75.as_mut() {
pct75.truncate_push_at(index, get_percentile(values, 0.75))?;
}
if let Some(median) = self.median.as_mut() {
median.truncate_push_at(index, get_percentile(values, 0.50))?;
}
if let Some(pct25) = self.pct25.as_mut() {
pct25.truncate_push_at(index, get_percentile(values, 0.25))?;
}
if let Some(pct10) = self.pct10.as_mut() {
pct10.truncate_push_at(index, get_percentile(values, 0.10))?;
}
if let Some(min) = self.min.as_mut() {
min.truncate_push_at(index, *values.first().unwrap())?;
}
Ok(())
}
/// Compute sum, average, and cumulative from values
fn compute_aggregates(
&mut self,
index: usize,
values: Vec<T>,
cumulative: &mut Option<T>,
) -> Result<()> {
let len = values.len();
let sum = values.into_iter().fold(T::from(0), |a, b| a + b);
if let Some(average) = self.average.as_mut() {
// len == 0 handled by T's Div<usize> returning NaN
average.truncate_push_at(index, sum / len)?;
}
if self.needs_sum_or_cumulative() {
if let Some(sum_vec) = self.sum.as_mut() {
sum_vec.truncate_push_at(index, sum)?;
}
if let Some(cumulative_vec) = self.cumulative.as_mut() {
let t = cumulative.unwrap() + sum;
cumulative.replace(t);
cumulative_vec.truncate_push_at(index, t)?;
}
}
Ok(())
}
pub fn extend(
&mut self,
max_from: I,
source: &impl IterableVec<I, T>,
exit: &Exit,
) -> Result<()> {
if self.cumulative.is_none() {
return Ok(());
};
self.validate_computed_version_or_reset(source.version())?;
let index = self.starting_index(max_from);
let cumulative_vec = self.cumulative.um();
let mut cumulative = index.decremented().map_or(T::from(0_usize), |index| {
cumulative_vec.iter().get_unwrap(index)
});
source
.iter()
.enumerate()
.skip(index.to_usize())
.try_for_each(|(i, v)| -> Result<()> {
cumulative += v;
cumulative_vec.truncate_push_at(i, cumulative)?;
Ok(())
})?;
let _lock = exit.lock();
self.write()?;
Ok(())
}
pub fn compute<A>(
&mut self,
max_from: I,
source: &impl IterableVec<A, T>,
first_indexes: &impl IterableVec<I, A>,
count_indexes: &impl IterableVec<I, StoredU64>,
exit: &Exit,
) -> Result<()>
where
A: VecIndex + VecValue + CheckedSub<A>,
{
self.validate_computed_version_or_reset(
source.version() + first_indexes.version() + count_indexes.version(),
)?;
let index = self.starting_index(max_from);
let mut source_iter = source.iter();
let cumulative_vec = self.cumulative.as_mut();
let mut cumulative = cumulative_vec.map(|cumulative_vec| {
index.decremented().map_or(T::from(0_usize), |index| {
cumulative_vec.iter().get_unwrap(index)
})
});
let mut count_indexes_iter = count_indexes.iter().skip(index.to_usize());
first_indexes
.iter()
.enumerate()
.skip(index.to_usize())
.try_for_each(|(index, first_index)| -> Result<()> {
let count_index = count_indexes_iter.next().unwrap();
if let Some(first) = self.first.as_mut() {
let f = source_iter
.get(first_index)
.unwrap_or_else(|| T::from(0_usize));
first.truncate_push_at(index, f)?;
}
if let Some(last) = self.last.as_mut() {
let count_index = *count_index as usize;
if count_index == 0 {
panic!("should compute last if count can be 0")
}
let last_index = first_index + (count_index - 1);
let v = source_iter.get_unwrap(last_index);
// .context("to work")
// .inspect_err(|_| {
// dbg!(first_index, count_index, last_index);
// })
// .unwrap()
// ;
last.truncate_push_at(index, v)?;
}
let needs_percentiles = self.needs_percentiles();
let needs_minmax = self.needs_minmax();
let needs_aggregates = self.needs_average_sum_or_cumulative();
// Fast path: only min/max needed, no sorting or allocation required
if needs_minmax && !needs_percentiles && !needs_aggregates {
source_iter.set_position(first_index);
self.compute_minmax_streaming(
index,
(&mut source_iter).take(*count_index as usize),
)?;
} else if needs_percentiles || needs_aggregates {
source_iter.set_position(first_index);
let mut values = (&mut source_iter)
.take(*count_index as usize)
.collect::<Vec<_>>();
if needs_percentiles {
values.sort_unstable();
self.compute_percentiles_from_sorted(index, &values)?;
} else if needs_minmax {
// We have values collected but only need min/max (along with aggregates)
self.compute_minmax_from_slice(index, &values)?;
}
if needs_aggregates {
self.compute_aggregates(index, values, &mut cumulative)?;
}
}
Ok(())
})?;
let _lock = exit.lock();
self.write()?;
Ok(())
}
#[allow(clippy::wrong_self_convention)]
pub fn from_aligned<A>(
&mut self,
max_from: I,
source: &EagerVecsBuilder<A, T>,
first_indexes: &impl IterableVec<I, A>,
count_indexes: &impl IterableVec<I, StoredU64>,
exit: &Exit,
) -> Result<()>
where
A: VecIndex + VecValue + CheckedSub<A>,
{
if self.needs_percentiles() {
panic!("percentiles unsupported in from_aligned");
}
self.validate_computed_version_or_reset(
VERSION + first_indexes.version() + count_indexes.version(),
)?;
let index = self.starting_index(max_from);
let mut source_first_iter = source.first.as_ref().map(|f| f.iter());
let mut source_last_iter = source.last.as_ref().map(|f| f.iter());
let mut source_max_iter = source.max.as_ref().map(|f| f.iter());
let mut source_min_iter = source.min.as_ref().map(|f| f.iter());
let mut source_average_iter = source.average.as_ref().map(|f| f.iter());
let mut source_sum_iter = source.sum.as_ref().map(|f| f.iter());
let mut cumulative = self.cumulative.as_mut().map(|cumulative_vec| {
index.decremented().map_or(T::from(0_usize), |index| {
cumulative_vec.iter().get_unwrap(index)
})
});
let mut count_indexes_iter = count_indexes.iter().skip(index.to_usize());
first_indexes
.iter()
.enumerate()
.skip(index.to_usize())
.try_for_each(|(index, first_index, ..)| -> Result<()> {
let count_index = count_indexes_iter.next().unwrap();
if let Some(first) = self.first.as_mut() {
let v = source_first_iter.um().get_unwrap(first_index);
first.truncate_push_at(index, v)?;
}
if let Some(last) = self.last.as_mut() {
let count_index = *count_index as usize;
if count_index == 0 {
panic!("should compute last if count can be 0")
}
let last_index = first_index + (count_index - 1);
let v = source_last_iter.um().get_unwrap(last_index);
last.truncate_push_at(index, v)?;
}
let needs_minmax = self.needs_minmax();
let needs_aggregates = self.needs_average_sum_or_cumulative();
if needs_minmax || needs_aggregates {
// Min/max: use streaming O(n) instead of sort O(n log n)
if needs_minmax {
if let Some(max) = self.max.as_mut() {
let source_max_iter = source_max_iter.um();
source_max_iter.set_position(first_index);
let max_val =
source_max_iter.take(*count_index as usize).max().unwrap();
max.truncate_push_at(index, max_val)?;
}
if let Some(min) = self.min.as_mut() {
let source_min_iter = source_min_iter.um();
source_min_iter.set_position(first_index);
let min_val =
source_min_iter.take(*count_index as usize).min().unwrap();
min.truncate_push_at(index, min_val)?;
}
}
if needs_aggregates {
if let Some(average) = self.average.as_mut() {
let source_average_iter = source_average_iter.um();
source_average_iter.set_position(first_index);
let mut len = 0usize;
let sum = (&mut *source_average_iter)
.take(*count_index as usize)
.inspect(|_| len += 1)
.fold(T::from(0), |a, b| a + b);
// TODO: Multiply by count then divide by cumulative
// Right now it's not 100% accurate as there could be more or less elements in the lower timeframe (28 days vs 31 days in a month for example)
// len == 0 handled by T's Div<usize> returning NaN
let avg = sum / len;
average.truncate_push_at(index, avg)?;
}
if self.needs_sum_or_cumulative() {
let source_sum_iter = source_sum_iter.um();
source_sum_iter.set_position(first_index);
let sum = source_sum_iter
.take(*count_index as usize)
.fold(T::from(0), |a, b| a + b);
if let Some(sum_vec) = self.sum.as_mut() {
sum_vec.truncate_push_at(index, sum)?;
}
if let Some(cumulative_vec) = self.cumulative.as_mut() {
let t = cumulative.unwrap() + sum;
cumulative.replace(t);
cumulative_vec.truncate_push_at(index, t)?;
}
}
}
}
Ok(())
})?;
let _lock = exit.lock();
self.write()?;
Ok(())
}
pub fn starting_index(&self, max_from: I) -> I {
max_from.min(I::from(
self.iter_any_exportable().map(|v| v.len()).min().unwrap(),
))
}
#[inline]
pub fn unwrap_first(&self) -> &EagerVec<PcoVec<I, T>> {
self.first.u()
}
#[inline]
pub fn unwrap_average(&self) -> &EagerVec<PcoVec<I, T>> {
self.average.u()
}
#[inline]
pub fn unwrap_sum(&self) -> &EagerVec<PcoVec<I, T>> {
self.sum.u()
}
#[inline]
pub fn unwrap_max(&self) -> &EagerVec<PcoVec<I, T>> {
self.max.u()
}
#[inline]
pub fn unwrap_pct90(&self) -> &EagerVec<PcoVec<I, T>> {
self.pct90.u()
}
#[inline]
pub fn unwrap_pct75(&self) -> &EagerVec<PcoVec<I, T>> {
self.pct75.u()
}
#[inline]
pub fn unwrap_median(&self) -> &EagerVec<PcoVec<I, T>> {
self.median.u()
}
#[inline]
pub fn unwrap_pct25(&self) -> &EagerVec<PcoVec<I, T>> {
self.pct25.u()
}
#[inline]
pub fn unwrap_pct10(&self) -> &EagerVec<PcoVec<I, T>> {
self.pct10.u()
}
#[inline]
pub fn unwrap_min(&self) -> &EagerVec<PcoVec<I, T>> {
self.min.u()
}
#[inline]
pub fn unwrap_last(&self) -> &EagerVec<PcoVec<I, T>> {
self.last.u()
}
#[inline]
pub fn unwrap_cumulative(&self) -> &EagerVec<PcoVec<I, T>> {
self.cumulative.u()
}
pub fn write(&mut self) -> Result<()> {
if let Some(first) = self.first.as_mut() {
first.write()?;
}
if let Some(last) = self.last.as_mut() {
last.write()?;
}
if let Some(min) = self.min.as_mut() {
min.write()?;
}
if let Some(max) = self.max.as_mut() {
max.write()?;
}
if let Some(median) = self.median.as_mut() {
median.write()?;
}
if let Some(average) = self.average.as_mut() {
average.write()?;
}
if let Some(sum) = self.sum.as_mut() {
sum.write()?;
}
if let Some(cumulative) = self.cumulative.as_mut() {
cumulative.write()?;
}
if let Some(pct90) = self.pct90.as_mut() {
pct90.write()?;
}
if let Some(pct75) = self.pct75.as_mut() {
pct75.write()?;
}
if let Some(pct25) = self.pct25.as_mut() {
pct25.write()?;
}
if let Some(pct10) = self.pct10.as_mut() {
pct10.write()?;
}
Ok(())
}
pub fn validate_computed_version_or_reset(&mut self, dep_version: Version) -> Result<()> {
if let Some(first) = self.first.as_mut() {
first.validate_computed_version_or_reset(dep_version)?;
}
if let Some(last) = self.last.as_mut() {
last.validate_computed_version_or_reset(dep_version)?;
}
if let Some(min) = self.min.as_mut() {
min.validate_computed_version_or_reset(dep_version)?;
}
if let Some(max) = self.max.as_mut() {
max.validate_computed_version_or_reset(dep_version)?;
}
if let Some(median) = self.median.as_mut() {
median.validate_computed_version_or_reset(dep_version)?;
}
if let Some(average) = self.average.as_mut() {
average.validate_computed_version_or_reset(dep_version)?;
}
if let Some(sum) = self.sum.as_mut() {
sum.validate_computed_version_or_reset(dep_version)?;
}
if let Some(cumulative) = self.cumulative.as_mut() {
cumulative.validate_computed_version_or_reset(dep_version)?;
}
if let Some(pct90) = self.pct90.as_mut() {
pct90.validate_computed_version_or_reset(dep_version)?;
}
if let Some(pct75) = self.pct75.as_mut() {
pct75.validate_computed_version_or_reset(dep_version)?;
}
if let Some(pct25) = self.pct25.as_mut() {
pct25.validate_computed_version_or_reset(dep_version)?;
}
if let Some(pct10) = self.pct10.as_mut() {
pct10.validate_computed_version_or_reset(dep_version)?;
}
Ok(())
}
}
#[derive(Default, Clone, Copy)]
pub struct VecBuilderOptions {
average: bool,
sum: bool,
max: bool,
pct90: bool,
pct75: bool,
median: bool,
pct25: bool,
pct10: bool,
min: bool,
first: bool,
last: bool,
cumulative: bool,
}
impl VecBuilderOptions {
pub fn average(&self) -> bool {
self.average
}
pub fn sum(&self) -> bool {
self.sum
}
pub fn max(&self) -> bool {
self.max
}
pub fn pct90(&self) -> bool {
self.pct90
}
pub fn pct75(&self) -> bool {
self.pct75
}
pub fn median(&self) -> bool {
self.median
}
pub fn pct25(&self) -> bool {
self.pct25
}
pub fn pct10(&self) -> bool {
self.pct10
}
pub fn min(&self) -> bool {
self.min
}
pub fn first(&self) -> bool {
self.first
}
pub fn last(&self) -> bool {
self.last
}
pub fn cumulative(&self) -> bool {
self.cumulative
}
pub fn add_first(mut self) -> Self {
self.first = true;
self
}
pub fn add_last(mut self) -> Self {
self.last = true;
self
}
pub fn add_min(mut self) -> Self {
self.min = true;
self
}
pub fn add_max(mut self) -> Self {
self.max = true;
self
}
pub fn add_median(mut self) -> Self {
self.median = true;
self
}
pub fn add_average(mut self) -> Self {
self.average = true;
self
}
pub fn add_sum(mut self) -> Self {
self.sum = true;
self
}
pub fn add_pct90(mut self) -> Self {
self.pct90 = true;
self
}
pub fn add_pct75(mut self) -> Self {
self.pct75 = true;
self
}
pub fn add_pct25(mut self) -> Self {
self.pct25 = true;
self
}
pub fn add_pct10(mut self) -> Self {
self.pct10 = true;
self
}
pub fn add_cumulative(mut self) -> Self {
self.cumulative = true;
self
}
pub fn rm_min(mut self) -> Self {
self.min = false;
self
}
pub fn rm_max(mut self) -> Self {
self.max = false;
self
}
pub fn rm_median(mut self) -> Self {
self.median = false;
self
}
pub fn rm_average(mut self) -> Self {
self.average = false;
self
}
pub fn rm_sum(mut self) -> Self {
self.sum = false;
self
}
pub fn rm_pct90(mut self) -> Self {
self.pct90 = false;
self
}
pub fn rm_pct75(mut self) -> Self {
self.pct75 = false;
self
}
pub fn rm_pct25(mut self) -> Self {
self.pct25 = false;
self
}
pub fn rm_pct10(mut self) -> Self {
self.pct10 = false;
self
}
pub fn rm_cumulative(mut self) -> Self {
self.cumulative = false;
self
}
pub fn add_minmax(mut self) -> Self {
self.min = true;
self.max = true;
self
}
pub fn add_percentiles(mut self) -> Self {
self.pct90 = true;
self.pct75 = true;
self.median = true;
self.pct25 = true;
self.pct10 = true;
self
}
pub fn remove_percentiles(mut self) -> Self {
self.pct90 = false;
self.pct75 = false;
self.median = false;
self.pct25 = false;
self.pct10 = false;
self
}
pub fn is_only_one_active(&self) -> bool {
[
self.average,
self.sum,
self.max,
self.pct90,
self.pct75,
self.median,
self.pct25,
self.pct10,
self.min,
self.first,
self.last,
self.cumulative,
]
.iter()
.filter(|b| **b)
.count()
== 1
}
pub fn copy_self_extra(&self) -> Self {
Self {
cumulative: self.cumulative,
..Self::default()
}
}
}

View File

@@ -1,361 +0,0 @@
use brk_traversable::Traversable;
use brk_types::Version;
use schemars::JsonSchema;
use vecdb::{FromCoarserIndex, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2, VecIndex};
use crate::internal::{EagerVecsBuilder, VecBuilderOptions};
use crate::utils::OptionExt;
use super::super::ComputedVecValue;
#[allow(clippy::type_complexity)]
#[derive(Clone, Traversable)]
pub struct LazyVecsBuilder<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1I: VecIndex,
S2T: ComputedVecValue,
{
pub first: Option<Box<LazyVecFrom2<I, T, S1I, T, I, S2T>>>,
pub average: Option<Box<LazyVecFrom2<I, T, S1I, T, I, S2T>>>,
pub sum: Option<Box<LazyVecFrom2<I, T, S1I, T, I, S2T>>>,
pub max: Option<Box<LazyVecFrom2<I, T, S1I, T, I, S2T>>>,
pub min: Option<Box<LazyVecFrom2<I, T, S1I, T, I, S2T>>>,
pub last: Option<Box<LazyVecFrom2<I, T, S1I, T, I, S2T>>>,
pub cumulative: Option<Box<LazyVecFrom2<I, T, S1I, T, I, S2T>>>,
}
const VERSION: Version = Version::ZERO;
impl<I, T, S1I, S2T> LazyVecsBuilder<I, T, S1I, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1I: VecIndex + 'static + FromCoarserIndex<I>,
S2T: ComputedVecValue,
{
#[allow(clippy::too_many_arguments)]
pub fn forced_import(
name: &str,
version: Version,
source: Option<IterableBoxedVec<S1I, T>>,
source_extra: &EagerVecsBuilder<S1I, T>,
len_source: IterableBoxedVec<I, S2T>,
options: LazyVecBuilderOptions,
) -> Self {
let only_one_active = options.is_only_one_active();
let suffix = |s: &str| format!("{name}_{s}");
let maybe_suffix = |s: &str| {
if only_one_active {
name.to_string()
} else {
suffix(s)
}
};
Self {
first: options.first.then(|| {
Box::new(LazyVecFrom2::init(
&maybe_suffix("first"),
version + VERSION,
source_extra
.first
.as_ref()
.map_or_else(|| source.u().clone(), |v| v.clone()),
len_source.clone(),
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
source.get_at(S1I::min_from(i))
},
))
}),
last: options.last.then(|| {
Box::new(LazyVecFrom2::init(
name,
version + VERSION,
source_extra.last.as_ref().map_or_else(
|| {
source
.as_ref()
.unwrap_or_else(|| {
dbg!(name, I::to_string());
panic!()
})
.clone()
},
|v| v.clone(),
),
len_source.clone(),
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
source.get_at(S1I::max_from(i, source.vec_len()))
},
))
}),
min: options.min.then(|| {
Box::new(LazyVecFrom2::init(
&maybe_suffix("min"),
version + VERSION,
source_extra
.min
.as_ref()
.map_or_else(|| source.u().clone(), |v| v.clone()),
len_source.clone(),
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
S1I::inclusive_range_from(i, source.vec_len())
.flat_map(|i| source.get_at(i))
.min()
},
))
}),
max: options.max.then(|| {
Box::new(LazyVecFrom2::init(
&maybe_suffix("max"),
version + VERSION,
source_extra
.max
.as_ref()
.map_or_else(|| source.u().clone(), |v| v.clone()),
len_source.clone(),
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
S1I::inclusive_range_from(i, source.vec_len())
.flat_map(|i| source.get_at(i))
.max()
},
))
}),
average: options.average.then(|| {
Box::new(LazyVecFrom2::init(
&maybe_suffix("avg"),
version + VERSION,
source_extra
.average
.as_ref()
.map_or_else(|| source.u().clone(), |v| v.clone()),
len_source.clone(),
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
let mut sum = T::from(0);
let mut len = 0usize;
for v in S1I::inclusive_range_from(i, source.vec_len())
.flat_map(|i| source.get_at(i))
{
sum += v;
len += 1;
}
if len == 0 {
return None;
}
Some(sum / len)
},
))
}),
sum: options.sum.then(|| {
Box::new(LazyVecFrom2::init(
&(if !options.last && !options.average && !options.min && !options.max {
name.to_string()
} else {
maybe_suffix("sum")
}),
version + VERSION,
source_extra
.sum
.as_ref()
.map_or_else(|| source.u().clone(), |v| v.clone()),
len_source.clone(),
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
let mut sum = T::from(0);
let mut has_values = false;
for v in S1I::inclusive_range_from(i, source.vec_len())
.flat_map(|i| source.get_at(i))
{
sum += v;
has_values = true;
}
if !has_values {
return None;
}
Some(sum)
},
))
}),
cumulative: options.cumulative.then(|| {
Box::new(LazyVecFrom2::init(
&suffix("cumulative"),
version + VERSION,
source_extra.cumulative.u().boxed_clone(),
len_source.clone(),
|i: I, source, len_source| {
if i.to_usize() >= len_source.vec_len() {
return None;
}
source.get_at(S1I::max_from(i, source.vec_len()))
},
))
}),
}
}
pub fn starting_index(&self, max_from: I) -> I {
max_from.min(I::from(
self.iter_any_exportable().map(|v| v.len()).min().unwrap(),
))
}
pub fn unwrap_first(&self) -> &LazyVecFrom2<I, T, S1I, T, I, S2T> {
self.first.u()
}
pub fn unwrap_average(&self) -> &LazyVecFrom2<I, T, S1I, T, I, S2T> {
self.average.u()
}
pub fn unwrap_sum(&self) -> &LazyVecFrom2<I, T, S1I, T, I, S2T> {
self.sum.u()
}
pub fn unwrap_max(&self) -> &LazyVecFrom2<I, T, S1I, T, I, S2T> {
self.max.u()
}
pub fn unwrap_min(&self) -> &LazyVecFrom2<I, T, S1I, T, I, S2T> {
self.min.u()
}
pub fn unwrap_last(&self) -> &LazyVecFrom2<I, T, S1I, T, I, S2T> {
self.last.u()
}
pub fn unwrap_cumulative(&self) -> &LazyVecFrom2<I, T, S1I, T, I, S2T> {
self.cumulative.u()
}
}
#[derive(Default, Clone, Copy)]
pub struct LazyVecBuilderOptions {
average: bool,
sum: bool,
max: bool,
min: bool,
first: bool,
last: bool,
cumulative: bool,
}
impl From<VecBuilderOptions> for LazyVecBuilderOptions {
#[inline]
fn from(value: VecBuilderOptions) -> Self {
Self {
average: value.average(),
sum: value.sum(),
max: value.max(),
min: value.min(),
first: value.first(),
last: value.last(),
cumulative: value.cumulative(),
}
}
}
impl LazyVecBuilderOptions {
pub fn add_first(mut self) -> Self {
self.first = true;
self
}
pub fn add_last(mut self) -> Self {
self.last = true;
self
}
pub fn add_min(mut self) -> Self {
self.min = true;
self
}
pub fn add_max(mut self) -> Self {
self.max = true;
self
}
pub fn add_average(mut self) -> Self {
self.average = true;
self
}
pub fn add_sum(mut self) -> Self {
self.sum = true;
self
}
pub fn add_cumulative(mut self) -> Self {
self.cumulative = true;
self
}
pub fn rm_min(mut self) -> Self {
self.min = false;
self
}
pub fn rm_max(mut self) -> Self {
self.max = false;
self
}
pub fn rm_average(mut self) -> Self {
self.average = false;
self
}
pub fn rm_sum(mut self) -> Self {
self.sum = false;
self
}
pub fn rm_cumulative(mut self) -> Self {
self.cumulative = false;
self
}
pub fn add_minmax(mut self) -> Self {
self.min = true;
self.max = true;
self
}
pub fn is_only_one_active(&self) -> bool {
[
self.average,
self.sum,
self.max,
self.min,
self.first,
self.last,
self.cumulative,
]
.iter()
.filter(|b| **b)
.count()
== 1
}
pub fn copy_self_extra(&self) -> Self {
Self {
cumulative: self.cumulative,
..Self::default()
}
}
}

View File

@@ -1,9 +0,0 @@
mod eager;
mod lazy;
mod transform;
mod transform2;
pub use eager::*;
pub use lazy::*;
pub use transform::*;
pub use transform2::*;

View File

@@ -1,224 +0,0 @@
use brk_traversable::Traversable;
use brk_types::Version;
use schemars::JsonSchema;
use vecdb::{IterableCloneableVec, LazyVecFrom1, UnaryTransform, VecIndex};
use super::{
super::ComputedVecValue,
eager::EagerVecsBuilder,
lazy::LazyVecsBuilder,
};
const VERSION: Version = Version::ZERO;
/// Lazy transform version of `EagerVecsBuilder`.
/// Each group is a `LazyVecFrom1` that transforms from the corresponding stored group.
/// S1T is the source type, T is the output type (can be the same for transforms like negation).
#[derive(Clone, Traversable)]
pub struct LazyTransformBuilder<I, T, S1T = T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1T: ComputedVecValue,
{
pub first: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub average: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub sum: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub max: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub pct90: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub pct75: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub median: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub pct25: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub pct10: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub min: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub last: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
pub cumulative: Option<Box<LazyVecFrom1<I, T, I, S1T>>>,
}
impl<I, T, S1T> LazyTransformBuilder<I, T, S1T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
/// Create a lazy transform from a stored `EagerVecsBuilder`.
/// F is the transform type (e.g., `Negate`, `Halve`).
pub fn from_eager<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
source: &EagerVecsBuilder<I, S1T>,
) -> Self {
let v = version + VERSION;
let suffix = |s: &str| format!("{name}_{s}");
Self {
first: source.first.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("first"),
v,
s.boxed_clone(),
))
}),
average: source.average.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("avg"),
v,
s.boxed_clone(),
))
}),
sum: source.sum.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("sum"),
v,
s.boxed_clone(),
))
}),
max: source.max.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("max"),
v,
s.boxed_clone(),
))
}),
pct90: source.pct90.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("pct90"),
v,
s.boxed_clone(),
))
}),
pct75: source.pct75.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("pct75"),
v,
s.boxed_clone(),
))
}),
median: source.median.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("median"),
v,
s.boxed_clone(),
))
}),
pct25: source.pct25.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("pct25"),
v,
s.boxed_clone(),
))
}),
pct10: source.pct10.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("pct10"),
v,
s.boxed_clone(),
))
}),
min: source.min.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("min"),
v,
s.boxed_clone(),
))
}),
last: source
.last
.as_ref()
.map(|s| Box::new(LazyVecFrom1::transformed::<F>(name, v, s.boxed_clone()))),
cumulative: source.cumulative.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("cumulative"),
v,
s.boxed_clone(),
))
}),
}
}
}
impl<I, T, S1T> LazyTransformBuilder<I, T, S1T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1T: ComputedVecValue,
{
pub fn unwrap_sum(&self) -> &LazyVecFrom1<I, T, I, S1T> {
self.sum.as_ref().unwrap()
}
pub fn unwrap_cumulative(&self) -> &LazyVecFrom1<I, T, I, S1T> {
self.cumulative.as_ref().unwrap()
}
}
impl<I, T, S1T> LazyTransformBuilder<I, T, S1T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
/// Create a lazy transform from a `LazyVecsBuilder`.
/// Note: LazyVecsBuilder doesn't have percentiles, so those will be None.
pub fn from_lazy<F: UnaryTransform<S1T, T>, S1I: VecIndex, S2T: ComputedVecValue>(
name: &str,
version: Version,
source: &LazyVecsBuilder<I, S1T, S1I, S2T>,
) -> Self {
let v = version + VERSION;
// Use same suffix pattern as EagerVecsBuilder
let suffix = |s: &str| format!("{name}_{s}");
Self {
first: source.first.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("first"),
v,
s.boxed_clone(),
))
}),
average: source.average.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("avg"),
v,
s.boxed_clone(),
))
}),
sum: source.sum.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("sum"),
v,
s.boxed_clone(),
))
}),
max: source.max.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("max"),
v,
s.boxed_clone(),
))
}),
pct90: None,
pct75: None,
median: None,
pct25: None,
pct10: None,
min: source.min.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("min"),
v,
s.boxed_clone(),
))
}),
last: source
.last
.as_ref()
.map(|s| Box::new(LazyVecFrom1::transformed::<F>(name, v, s.boxed_clone()))),
cumulative: source.cumulative.as_ref().map(|s| {
Box::new(LazyVecFrom1::transformed::<F>(
&suffix("cumulative"),
v,
s.boxed_clone(),
))
}),
}
}
}

View File

@@ -1,240 +0,0 @@
use brk_traversable::Traversable;
use brk_types::Version;
use schemars::JsonSchema;
use vecdb::{BinaryTransform, IterableCloneableVec, LazyVecFrom2, VecIndex};
use super::{
super::ComputedVecValue,
eager::EagerVecsBuilder,
lazy::LazyVecsBuilder,
};
const VERSION: Version = Version::ZERO;
/// Lazy binary transform builder.
/// Each group is a `LazyVecFrom2` that transforms from two corresponding stored groups.
#[derive(Clone, Traversable)]
#[allow(clippy::type_complexity)]
pub struct LazyTransform2Builder<I, T, S1T, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
pub first: Option<Box<LazyVecFrom2<I, T, I, S1T, I, S2T>>>,
pub average: Option<Box<LazyVecFrom2<I, T, I, S1T, I, S2T>>>,
pub sum: Option<Box<LazyVecFrom2<I, T, I, S1T, I, S2T>>>,
pub max: Option<Box<LazyVecFrom2<I, T, I, S1T, I, S2T>>>,
pub min: Option<Box<LazyVecFrom2<I, T, I, S1T, I, S2T>>>,
pub last: Option<Box<LazyVecFrom2<I, T, I, S1T, I, S2T>>>,
pub cumulative: Option<Box<LazyVecFrom2<I, T, I, S1T, I, S2T>>>,
}
impl<I, T, S1T, S2T> LazyTransform2Builder<I, T, S1T, S2T>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
/// Create a lazy binary transform from two stored `EagerVecsBuilder`.
pub fn from_eager<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &EagerVecsBuilder<I, S1T>,
source2: &EagerVecsBuilder<I, S2T>,
) -> Self {
let v = version + VERSION;
let suffix = |s: &str| format!("{name}_{s}");
Self {
first: source1
.first
.as_ref()
.zip(source2.first.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("first"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
average: source1
.average
.as_ref()
.zip(source2.average.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("avg"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
sum: source1
.sum
.as_ref()
.zip(source2.sum.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("sum"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
max: source1
.max
.as_ref()
.zip(source2.max.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("max"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
min: source1
.min
.as_ref()
.zip(source2.min.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("min"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
last: source1
.last
.as_ref()
.zip(source2.last.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
name,
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
cumulative: source1
.cumulative
.as_ref()
.zip(source2.cumulative.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("cumulative"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
}
}
/// Create a lazy binary transform from two `LazyVecsBuilder`.
pub fn from_lazy<
F: BinaryTransform<S1T, S2T, T>,
S1I: VecIndex,
S1E: ComputedVecValue,
S2I: VecIndex,
S2E: ComputedVecValue,
>(
name: &str,
version: Version,
source1: &LazyVecsBuilder<I, S1T, S1I, S1E>,
source2: &LazyVecsBuilder<I, S2T, S2I, S2E>,
) -> Self {
let v = version + VERSION;
let suffix = |s: &str| format!("{name}_{s}");
Self {
first: source1
.first
.as_ref()
.zip(source2.first.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("first"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
average: source1
.average
.as_ref()
.zip(source2.average.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("avg"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
sum: source1
.sum
.as_ref()
.zip(source2.sum.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("sum"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
max: source1
.max
.as_ref()
.zip(source2.max.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("max"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
min: source1
.min
.as_ref()
.zip(source2.min.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("min"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
last: source1
.last
.as_ref()
.zip(source2.last.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
name,
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
cumulative: source1
.cumulative
.as_ref()
.zip(source2.cumulative.as_ref())
.map(|(s1, s2)| {
Box::new(LazyVecFrom2::transformed::<F>(
&suffix("cumulative"),
v,
s1.boxed_clone(),
s2.boxed_clone(),
))
}),
}
}
}

View File

@@ -0,0 +1,521 @@
//! Compute functions for aggregation - take optional vecs, compute what's needed.
//!
//! These functions replace the Option-based compute logic in flexible builders.
//! Each function takes optional mutable references and computes only for Some() vecs.
use brk_error::{Error, Result};
use brk_types::{CheckedSub, StoredU64};
use schemars::JsonSchema;
use vecdb::{
AnyStoredVec, AnyVec, EagerVec, Exit, GenericStoredVec, IterableVec, PcoVec, VecIndex, VecValue,
};
use crate::utils::get_percentile;
use super::ComputedVecValue;
/// Helper to validate and get starting index for a single vec
fn validate_and_start<I: VecIndex, T: ComputedVecValue + JsonSchema>(
vec: &mut EagerVec<PcoVec<I, T>>,
combined_version: vecdb::Version,
current_start: I,
) -> Result<I> {
vec.validate_computed_version_or_reset(combined_version)?;
Ok(current_start.min(I::from(vec.len())))
}
/// Compute aggregations from a source vec into target vecs.
///
/// This function computes all requested aggregations in a single pass when possible,
/// optimizing for the common case where multiple aggregations are needed.
#[allow(clippy::too_many_arguments)]
pub fn compute_aggregations<I, T, A>(
max_from: I,
source: &impl IterableVec<A, T>,
first_indexes: &impl IterableVec<I, A>,
count_indexes: &impl IterableVec<I, StoredU64>,
exit: &Exit,
mut first: Option<&mut EagerVec<PcoVec<I, T>>>,
mut last: Option<&mut EagerVec<PcoVec<I, T>>>,
mut min: Option<&mut EagerVec<PcoVec<I, T>>>,
mut max: Option<&mut EagerVec<PcoVec<I, T>>>,
mut average: Option<&mut EagerVec<PcoVec<I, T>>>,
mut sum: Option<&mut EagerVec<PcoVec<I, T>>>,
mut cumulative: Option<&mut EagerVec<PcoVec<I, T>>>,
mut median: Option<&mut EagerVec<PcoVec<I, T>>>,
mut pct10: Option<&mut EagerVec<PcoVec<I, T>>>,
mut pct25: Option<&mut EagerVec<PcoVec<I, T>>>,
mut pct75: Option<&mut EagerVec<PcoVec<I, T>>>,
mut pct90: Option<&mut EagerVec<PcoVec<I, T>>>,
) -> Result<()>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
A: VecIndex + VecValue + CheckedSub<A>,
{
let combined_version = source.version() + first_indexes.version() + count_indexes.version();
let mut starting_index = max_from;
if let Some(ref mut v) = first {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = last {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = min {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = max {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = average {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = sum {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = cumulative {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = median {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = pct10 {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = pct25 {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = pct75 {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = pct90 {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
let index = starting_index;
let needs_first = first.is_some();
let needs_last = last.is_some();
let needs_min = min.is_some();
let needs_max = max.is_some();
let needs_average = average.is_some();
let needs_sum = sum.is_some();
let needs_cumulative = cumulative.is_some();
let needs_percentiles = median.is_some()
|| pct10.is_some()
|| pct25.is_some()
|| pct75.is_some()
|| pct90.is_some();
let needs_minmax = needs_min || needs_max;
let needs_sum_or_cumulative = needs_sum || needs_cumulative;
let needs_aggregates = needs_sum_or_cumulative || needs_average;
if !needs_first && !needs_last && !needs_minmax && !needs_aggregates && !needs_percentiles {
return Ok(());
}
let mut source_iter = source.iter();
let mut cumulative_val = cumulative.as_ref().map(|cumulative_vec| {
index.decremented().map_or(T::from(0_usize), |idx| {
cumulative_vec.iter().get_unwrap(idx)
})
});
let mut count_indexes_iter = count_indexes.iter().skip(index.to_usize());
first_indexes
.iter()
.enumerate()
.skip(index.to_usize())
.try_for_each(|(idx, first_index)| -> Result<()> {
let count_index = count_indexes_iter.next().unwrap();
let count = *count_index as usize;
if let Some(ref mut first_vec) = first {
let f = source_iter
.get(first_index)
.unwrap_or_else(|| T::from(0_usize));
first_vec.truncate_push_at(idx, f)?;
}
if let Some(ref mut last_vec) = last {
if count == 0 {
panic!("should not compute last if count can be 0");
}
let last_index = first_index + (count - 1);
let v = source_iter.get_unwrap(last_index);
last_vec.truncate_push_at(idx, v)?;
}
// Fast path: only min/max needed, no sorting or allocation required
if needs_minmax && !needs_percentiles && !needs_aggregates {
source_iter.set_position(first_index);
let mut min_val: Option<T> = None;
let mut max_val: Option<T> = None;
for val in (&mut source_iter).take(count) {
if needs_min {
min_val = Some(min_val.map_or(val, |m| if val < m { val } else { m }));
}
if needs_max {
max_val = Some(max_val.map_or(val, |m| if val > m { val } else { m }));
}
}
if let Some(ref mut min_vec) = min {
min_vec.truncate_push_at(idx, min_val.unwrap())?;
}
if let Some(ref mut max_vec) = max {
max_vec.truncate_push_at(idx, max_val.unwrap())?;
}
} else if needs_percentiles || needs_aggregates || needs_minmax {
source_iter.set_position(first_index);
let mut values: Vec<T> = (&mut source_iter).take(count).collect();
if needs_percentiles {
values.sort_unstable();
if let Some(ref mut max_vec) = max {
max_vec.truncate_push_at(
idx,
*values
.last()
.ok_or(Error::Internal("Empty values for percentiles"))?,
)?;
}
if let Some(ref mut pct90_vec) = pct90 {
pct90_vec.truncate_push_at(idx, get_percentile(&values, 0.90))?;
}
if let Some(ref mut pct75_vec) = pct75 {
pct75_vec.truncate_push_at(idx, get_percentile(&values, 0.75))?;
}
if let Some(ref mut median_vec) = median {
median_vec.truncate_push_at(idx, get_percentile(&values, 0.50))?;
}
if let Some(ref mut pct25_vec) = pct25 {
pct25_vec.truncate_push_at(idx, get_percentile(&values, 0.25))?;
}
if let Some(ref mut pct10_vec) = pct10 {
pct10_vec.truncate_push_at(idx, get_percentile(&values, 0.10))?;
}
if let Some(ref mut min_vec) = min {
min_vec.truncate_push_at(idx, *values.first().unwrap())?;
}
} else if needs_minmax {
if let Some(ref mut min_vec) = min {
min_vec.truncate_push_at(idx, *values.iter().min().unwrap())?;
}
if let Some(ref mut max_vec) = max {
max_vec.truncate_push_at(idx, *values.iter().max().unwrap())?;
}
}
if needs_aggregates {
let len = values.len();
let sum_val = values.into_iter().fold(T::from(0), |a, b| a + b);
if let Some(ref mut average_vec) = average {
average_vec.truncate_push_at(idx, sum_val / len)?;
}
if needs_sum_or_cumulative {
if let Some(ref mut sum_vec) = sum {
sum_vec.truncate_push_at(idx, sum_val)?;
}
if let Some(ref mut cumulative_vec) = cumulative {
let t = cumulative_val.unwrap() + sum_val;
cumulative_val.replace(t);
cumulative_vec.truncate_push_at(idx, t)?;
}
}
}
}
Ok(())
})?;
let _lock = exit.lock();
if let Some(v) = first {
v.write()?;
}
if let Some(v) = last {
v.write()?;
}
if let Some(v) = min {
v.write()?;
}
if let Some(v) = max {
v.write()?;
}
if let Some(v) = average {
v.write()?;
}
if let Some(v) = sum {
v.write()?;
}
if let Some(v) = cumulative {
v.write()?;
}
if let Some(v) = median {
v.write()?;
}
if let Some(v) = pct10 {
v.write()?;
}
if let Some(v) = pct25 {
v.write()?;
}
if let Some(v) = pct75 {
v.write()?;
}
if let Some(v) = pct90 {
v.write()?;
}
Ok(())
}
/// Compute cumulative extension from a source vec.
///
/// Used when only cumulative needs to be extended from an existing source.
pub fn compute_cumulative_extend<I, T>(
max_from: I,
source: &impl IterableVec<I, T>,
cumulative: &mut EagerVec<PcoVec<I, T>>,
exit: &Exit,
) -> Result<()>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
{
cumulative.validate_computed_version_or_reset(source.version())?;
let index = max_from.min(I::from(cumulative.len()));
let mut cumulative_val = index
.decremented()
.map_or(T::from(0_usize), |idx| cumulative.iter().get_unwrap(idx));
source
.iter()
.enumerate()
.skip(index.to_usize())
.try_for_each(|(i, v)| -> Result<()> {
cumulative_val += v;
cumulative.truncate_push_at(i, cumulative_val)?;
Ok(())
})?;
let _lock = exit.lock();
cumulative.write()?;
Ok(())
}
/// Compute coarser aggregations from already-aggregated source data.
///
/// This is used for dateindex → weekindex, monthindex, etc. where we derive
/// coarser aggregations from finer ones.
///
/// NOTE: Percentiles are NOT supported - they cannot be derived from finer percentiles.
#[allow(clippy::too_many_arguments)]
pub fn compute_aggregations_from_aligned<I, T, A>(
max_from: I,
first_indexes: &impl IterableVec<I, A>,
count_indexes: &impl IterableVec<I, StoredU64>,
exit: &Exit,
// Source vecs (already aggregated at finer level)
source_first: Option<&EagerVec<PcoVec<A, T>>>,
source_last: Option<&EagerVec<PcoVec<A, T>>>,
source_min: Option<&EagerVec<PcoVec<A, T>>>,
source_max: Option<&EagerVec<PcoVec<A, T>>>,
source_average: Option<&EagerVec<PcoVec<A, T>>>,
source_sum: Option<&EagerVec<PcoVec<A, T>>>,
// Target vecs
mut first: Option<&mut EagerVec<PcoVec<I, T>>>,
mut last: Option<&mut EagerVec<PcoVec<I, T>>>,
mut min: Option<&mut EagerVec<PcoVec<I, T>>>,
mut max: Option<&mut EagerVec<PcoVec<I, T>>>,
mut average: Option<&mut EagerVec<PcoVec<I, T>>>,
mut sum: Option<&mut EagerVec<PcoVec<I, T>>>,
mut cumulative: Option<&mut EagerVec<PcoVec<I, T>>>,
) -> Result<()>
where
I: VecIndex,
T: ComputedVecValue + JsonSchema,
A: VecIndex + VecValue + CheckedSub<A>,
{
let combined_version = first_indexes.version() + count_indexes.version();
let mut starting_index = max_from;
if let Some(ref mut v) = first {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = last {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = min {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = max {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = average {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = sum {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
if let Some(ref mut v) = cumulative {
starting_index = validate_and_start(v, combined_version, starting_index)?;
}
let index = starting_index;
let needs_first = first.is_some();
let needs_last = last.is_some();
let needs_min = min.is_some();
let needs_max = max.is_some();
let needs_average = average.is_some();
let needs_sum = sum.is_some();
let needs_cumulative = cumulative.is_some();
if !needs_first
&& !needs_last
&& !needs_min
&& !needs_max
&& !needs_average
&& !needs_sum
&& !needs_cumulative
{
return Ok(());
}
let mut source_first_iter = source_first.map(|f| f.iter());
let mut source_last_iter = source_last.map(|f| f.iter());
let mut source_min_iter = source_min.map(|f| f.iter());
let mut source_max_iter = source_max.map(|f| f.iter());
let mut source_average_iter = source_average.map(|f| f.iter());
let mut source_sum_iter = source_sum.map(|f| f.iter());
let mut cumulative_val = cumulative.as_ref().map(|cumulative_vec| {
index.decremented().map_or(T::from(0_usize), |idx| {
cumulative_vec.iter().get_unwrap(idx)
})
});
let mut count_indexes_iter = count_indexes.iter().skip(index.to_usize());
first_indexes
.iter()
.enumerate()
.skip(index.to_usize())
.try_for_each(|(idx, first_index)| -> Result<()> {
let count_index = count_indexes_iter.next().unwrap();
let count = *count_index as usize;
if let Some(ref mut first_vec) = first {
let source_iter = source_first_iter
.as_mut()
.expect("source_first required for first");
let v = source_iter.get_unwrap(first_index);
first_vec.truncate_push_at(idx, v)?;
}
if let Some(ref mut last_vec) = last {
if count == 0 {
panic!("should not compute last if count can be 0");
}
let last_index = first_index + (count - 1);
let source_iter = source_last_iter
.as_mut()
.expect("source_last required for last");
let v = source_iter.get_unwrap(last_index);
last_vec.truncate_push_at(idx, v)?;
}
if let Some(ref mut min_vec) = min {
let source_iter = source_min_iter
.as_mut()
.expect("source_min required for min");
source_iter.set_position(first_index);
let min_val = source_iter.take(count).min().unwrap();
min_vec.truncate_push_at(idx, min_val)?;
}
if let Some(ref mut max_vec) = max {
let source_iter = source_max_iter
.as_mut()
.expect("source_max required for max");
source_iter.set_position(first_index);
let max_val = source_iter.take(count).max().unwrap();
max_vec.truncate_push_at(idx, max_val)?;
}
if let Some(ref mut average_vec) = average {
let source_iter = source_average_iter
.as_mut()
.expect("source_average required for average");
source_iter.set_position(first_index);
let mut len = 0usize;
let sum_val = (&mut *source_iter)
.take(count)
.inspect(|_| len += 1)
.fold(T::from(0), |a, b| a + b);
// TODO: Multiply by count then divide by cumulative for accuracy
let average = sum_val / len;
average_vec.truncate_push_at(idx, average)?;
}
if needs_sum || needs_cumulative {
let source_iter = source_sum_iter
.as_mut()
.expect("source_sum required for sum/cumulative");
source_iter.set_position(first_index);
let sum_val = source_iter.take(count).fold(T::from(0), |a, b| a + b);
if let Some(ref mut sum_vec) = sum {
sum_vec.truncate_push_at(idx, sum_val)?;
}
if let Some(ref mut cumulative_vec) = cumulative {
let t = cumulative_val.unwrap() + sum_val;
cumulative_val.replace(t);
cumulative_vec.truncate_push_at(idx, t)?;
}
}
Ok(())
})?;
let _lock = exit.lock();
if let Some(v) = first {
v.write()?;
}
if let Some(v) = last {
v.write()?;
}
if let Some(v) = min {
v.write()?;
}
if let Some(v) = max {
v.write()?;
}
if let Some(v) = average {
v.write()?;
}
if let Some(v) = sum {
v.write()?;
}
if let Some(v) = cumulative {
v.write()?;
}
Ok(())
}

View File

@@ -0,0 +1,69 @@
//! ComputedBlock with full stats aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedVecValue, DerivedComputedBlockFull, NumericValue};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedBlockFull<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
#[traversable(wrap = "base")]
pub height: EagerVec<PcoVec<Height, T>>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: DerivedComputedBlockFull<T>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedBlockFull<T>
where
T: NumericValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let rest = DerivedComputedBlockFull::forced_import(
db,
name,
height.boxed_clone(),
v,
indexes,
)?;
Ok(Self { height, rest })
}
pub fn compute_all<F>(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
{
compute(&mut self.height)?;
self.rest.derive_from(indexes, starting_indexes, &self.height, exit)
}
}

View File

@@ -0,0 +1,64 @@
//! ComputedBlock using only LastVec aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedVecValue, DerivedComputedBlockLast, NumericValue};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedBlockLast<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub height: EagerVec<PcoVec<Height, T>>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: DerivedComputedBlockLast<T>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedBlockLast<T>
where
T: NumericValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let rest =
DerivedComputedBlockLast::forced_import(db, name, height.boxed_clone(), v, indexes)?;
Ok(Self { height, rest })
}
pub fn compute_all<F>(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
{
compute(&mut self.height)?;
self.rest
.derive_from(indexes, starting_indexes, &self.height, exit)
}
}

View File

@@ -0,0 +1,13 @@
//! Block-level computed types (height + dateindex + periods + difficultyepoch).
//!
//! For simpler chain-level types (height + difficultyepoch only), see `chain/`.
mod full;
mod last;
mod sum;
mod sum_cum;
pub use full::*;
pub use last::*;
pub use sum::*;
pub use sum_cum::*;

View File

@@ -0,0 +1,69 @@
//! ComputedBlock using Sum-only aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedVecValue, DerivedComputedBlockSum, NumericValue};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedBlockSum<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
#[traversable(wrap = "base")]
pub height: EagerVec<PcoVec<Height, T>>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: DerivedComputedBlockSum<T>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedBlockSum<T>
where
T: NumericValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let rest = DerivedComputedBlockSum::forced_import(
db,
name,
height.boxed_clone(),
v,
indexes,
)?;
Ok(Self { height, rest })
}
pub fn compute_all<F>(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
{
compute(&mut self.height)?;
self.rest.derive_from(indexes, starting_indexes, &self.height, exit)
}
}

View File

@@ -0,0 +1,97 @@
//! ComputedBlock using SumCum aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{
AnyStoredVec, AnyVec, Database, EagerVec, Exit, GenericStoredVec, ImportableVec,
IterableCloneableVec, IterableVec, PcoVec, VecIndex,
};
use crate::{indexes, ComputeIndexes};
use crate::internal::{ComputedVecValue, DerivedComputedBlockSumCum, NumericValue};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedBlockSumCum<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
#[traversable(wrap = "base")]
pub height: EagerVec<PcoVec<Height, T>>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: DerivedComputedBlockSumCum<T>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedBlockSumCum<T>
where
T: NumericValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let rest = DerivedComputedBlockSumCum::forced_import(
db,
name,
height.boxed_clone(),
v,
indexes,
)?;
Ok(Self { height, rest })
}
pub fn compute_all<F>(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
{
compute(&mut self.height)?;
self.rest.derive_from(indexes, starting_indexes, &self.height, exit)
}
/// Derive from an external height source (e.g., a LazyVec).
pub fn derive_from(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
source: &impl IterableVec<Height, T>,
exit: &Exit,
) -> Result<()> {
let target_len = source.len();
let starting_height = starting_indexes.height.to_usize().min(self.height.len());
self.height
.validate_computed_version_or_reset(source.version())?;
let mut source_iter = source.iter();
for h_idx in starting_height..target_len {
let height = Height::from(h_idx);
let value = source_iter.get_unwrap(height);
self.height.truncate_push(height, value)?;
}
self.height.write()?;
self.rest.derive_from(indexes, starting_indexes, &self.height, exit)
}
}

View File

@@ -0,0 +1,68 @@
//! ComputedChain for first-value aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DifficultyEpoch, Height, Version};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedVecValue, LazyFirst, NumericValue};
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct ComputedChainFirst<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub height: EagerVec<PcoVec<Height, T>>,
pub difficultyepoch: LazyFirst<DifficultyEpoch, T, Height, DifficultyEpoch>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedChainFirst<T>
where
T: NumericValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let difficultyepoch = LazyFirst::from_source(
name,
v,
height.boxed_clone(),
indexes
.block
.difficultyepoch_to_difficultyepoch
.boxed_clone(),
);
Ok(Self {
height,
difficultyepoch,
})
}
pub fn compute<F>(
&mut self,
_starting_indexes: &ComputeIndexes,
_exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
{
compute(&mut self.height)?;
Ok(())
}
}

View File

@@ -0,0 +1,68 @@
//! ComputedChain for last-value aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DifficultyEpoch, Height, Version};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedVecValue, LazyLast, NumericValue};
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct ComputedChainLast<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub height: EagerVec<PcoVec<Height, T>>,
pub difficultyepoch: LazyLast<DifficultyEpoch, T, Height, DifficultyEpoch>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedChainLast<T>
where
T: NumericValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let difficultyepoch = LazyLast::from_source(
name,
v,
height.boxed_clone(),
indexes
.block
.difficultyepoch_to_difficultyepoch
.boxed_clone(),
);
Ok(Self {
height,
difficultyepoch,
})
}
pub fn compute<F>(
&mut self,
_starting_indexes: &ComputeIndexes,
_exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
{
compute(&mut self.height)?;
Ok(())
}
}

View File

@@ -0,0 +1,68 @@
//! ComputedChain for max-value aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DifficultyEpoch, Height, Version};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedVecValue, LazyMax, NumericValue};
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct ComputedChainMax<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub height: EagerVec<PcoVec<Height, T>>,
pub difficultyepoch: LazyMax<DifficultyEpoch, T, Height, DifficultyEpoch>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedChainMax<T>
where
T: NumericValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let difficultyepoch = LazyMax::from_source(
name,
v,
height.boxed_clone(),
indexes
.block
.difficultyepoch_to_difficultyepoch
.boxed_clone(),
);
Ok(Self {
height,
difficultyepoch,
})
}
pub fn compute<F>(
&mut self,
_starting_indexes: &ComputeIndexes,
_exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
{
compute(&mut self.height)?;
Ok(())
}
}

View File

@@ -0,0 +1,68 @@
//! ComputedChain for min-value aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DifficultyEpoch, Height, Version};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedVecValue, LazyMin, NumericValue};
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct ComputedChainMin<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub height: EagerVec<PcoVec<Height, T>>,
pub difficultyepoch: LazyMin<DifficultyEpoch, T, Height, DifficultyEpoch>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedChainMin<T>
where
T: NumericValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height: EagerVec<PcoVec<Height, T>> = EagerVec::forced_import(db, name, v)?;
let difficultyepoch = LazyMin::from_source(
name,
v,
height.boxed_clone(),
indexes
.block
.difficultyepoch_to_difficultyepoch
.boxed_clone(),
);
Ok(Self {
height,
difficultyepoch,
})
}
pub fn compute<F>(
&mut self,
_starting_indexes: &ComputeIndexes,
_exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
{
compute(&mut self.height)?;
Ok(())
}
}

View File

@@ -0,0 +1,13 @@
//! Chain-level computed types (height + difficultyepoch only).
//!
//! These are simpler than block-level types which include dateindex + periods.
mod first;
mod last;
mod max;
mod min;
pub use first::*;
pub use last::*;
pub use max::*;
pub use min::*;

View File

@@ -0,0 +1,64 @@
//! ComputedVecsDate using only average-value aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedVecValue, DerivedDateAverage};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedVecsDateAverage<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub dateindex: EagerVec<PcoVec<DateIndex, T>>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: DerivedDateAverage<T>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedVecsDateAverage<T>
where
T: ComputedVecValue + JsonSchema + 'static,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let dateindex = EagerVec::forced_import(db, name, version + VERSION)?;
Ok(Self {
rest: DerivedDateAverage::from_source(
name,
version + VERSION,
dateindex.boxed_clone(),
indexes,
),
dateindex,
})
}
pub fn compute_all<F>(
&mut self,
_starting_indexes: &ComputeIndexes,
_exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<DateIndex, T>>) -> Result<()>,
{
compute(&mut self.dateindex)?;
Ok(())
}
}

View File

@@ -0,0 +1,64 @@
//! ComputedVecsDate using only first-value aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedVecValue, DerivedDateFirst};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedVecsDateFirst<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub dateindex: EagerVec<PcoVec<DateIndex, T>>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: DerivedDateFirst<T>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedVecsDateFirst<T>
where
T: ComputedVecValue + JsonSchema + 'static,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let dateindex = EagerVec::forced_import(db, name, version + VERSION)?;
Ok(Self {
rest: DerivedDateFirst::from_source(
name,
version + VERSION,
dateindex.boxed_clone(),
indexes,
),
dateindex,
})
}
pub fn compute_all<F>(
&mut self,
_starting_indexes: &ComputeIndexes,
_exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<DateIndex, T>>) -> Result<()>,
{
compute(&mut self.dateindex)?;
Ok(())
}
}

View File

@@ -0,0 +1,73 @@
//! ComputedVecsDate using only last-value aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, IterableVec, PcoVec};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedVecValue, DerivedDateLast};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedDateLast<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub dateindex: EagerVec<PcoVec<DateIndex, T>>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: DerivedDateLast<T>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedDateLast<T>
where
T: ComputedVecValue + JsonSchema + 'static,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let dateindex = EagerVec::forced_import(db, name, version + VERSION)?;
Ok(Self {
rest: DerivedDateLast::from_source(
name,
version + VERSION,
dateindex.boxed_clone(),
indexes,
),
dateindex,
})
}
pub fn compute_all<F>(
&mut self,
_starting_indexes: &ComputeIndexes,
_exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<DateIndex, T>>) -> Result<()>,
{
compute(&mut self.dateindex)?;
Ok(())
}
pub fn compute_rest(
&mut self,
_starting_indexes: &ComputeIndexes,
_exit: &Exit,
_dateindex: Option<&impl IterableVec<DateIndex, T>>,
) -> Result<()> {
Ok(())
}
}

View File

@@ -0,0 +1,64 @@
//! ComputedVecsDate using only max-value aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedVecValue, DerivedDateMax};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedVecsDateMax<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub dateindex: EagerVec<PcoVec<DateIndex, T>>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: DerivedDateMax<T>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedVecsDateMax<T>
where
T: ComputedVecValue + JsonSchema + 'static,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let dateindex = EagerVec::forced_import(db, name, version + VERSION)?;
Ok(Self {
rest: DerivedDateMax::from_source(
name,
version + VERSION,
dateindex.boxed_clone(),
indexes,
),
dateindex,
})
}
pub fn compute_all<F>(
&mut self,
_starting_indexes: &ComputeIndexes,
_exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<DateIndex, T>>) -> Result<()>,
{
compute(&mut self.dateindex)?;
Ok(())
}
}

View File

@@ -0,0 +1,64 @@
//! ComputedVecsDate using only min-value aggregation.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedVecValue, DerivedDateMin};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedVecsDateMin<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub dateindex: EagerVec<PcoVec<DateIndex, T>>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: DerivedDateMin<T>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedVecsDateMin<T>
where
T: ComputedVecValue + JsonSchema + 'static,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let dateindex = EagerVec::forced_import(db, name, version + VERSION)?;
Ok(Self {
rest: DerivedDateMin::from_source(
name,
version + VERSION,
dateindex.boxed_clone(),
indexes,
),
dateindex,
})
}
pub fn compute_all<F>(
&mut self,
_starting_indexes: &ComputeIndexes,
_exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<DateIndex, T>>) -> Result<()>,
{
compute(&mut self.dateindex)?;
Ok(())
}
}

View File

@@ -0,0 +1,11 @@
mod average;
mod first;
mod last;
mod max;
mod min;
pub use average::*;
pub use first::*;
pub use last::*;
pub use max::*;
pub use min::*;

View File

@@ -0,0 +1,93 @@
//! DerivedComputedBlockDistribution - dateindex storage + lazy time periods + difficultyepoch.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, DifficultyEpoch, Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, Exit, IterableBoxedVec, IterableCloneableVec, IterableVec};
use crate::{
ComputeIndexes, indexes,
internal::{
ComputedVecValue, DerivedDateDistribution, Distribution, LazyDistribution, NumericValue,
},
};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct DerivedComputedBlockDistribution<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub dateindex: Distribution<DateIndex, T>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub dates: DerivedDateDistribution<T>,
pub difficultyepoch: LazyDistribution<DifficultyEpoch, T, Height, DifficultyEpoch>,
}
const VERSION: Version = Version::ZERO;
impl<T> DerivedComputedBlockDistribution<T>
where
T: NumericValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
height_source: IterableBoxedVec<Height, T>,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let dateindex = Distribution::forced_import(db, name, version + VERSION)?;
let v = version + VERSION;
let dates = DerivedDateDistribution::from_sources(
name,
v,
dateindex.average.0.boxed_clone(),
dateindex.minmax.min.0.boxed_clone(),
dateindex.minmax.max.0.boxed_clone(),
indexes,
);
let difficultyepoch = LazyDistribution::from_distribution(
name,
v,
height_source.boxed_clone(),
height_source.boxed_clone(),
height_source,
indexes
.block
.difficultyepoch_to_difficultyepoch
.boxed_clone(),
);
Ok(Self {
dateindex,
dates,
difficultyepoch,
})
}
pub fn derive_from(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
height_source: &impl IterableVec<Height, T>,
exit: &Exit,
) -> Result<()> {
self.dateindex.compute(
starting_indexes.dateindex,
height_source,
&indexes.time.dateindex_to_first_height,
&indexes.time.dateindex_to_height_count,
exit,
)?;
Ok(())
}
}

View File

@@ -0,0 +1,110 @@
//! DerivedComputedBlockFull - height_cumulative + dateindex storage + difficultyepoch + lazy time periods.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, DifficultyEpoch, Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, Exit, IterableBoxedVec, IterableCloneableVec, IterableVec};
use crate::{
ComputeIndexes, indexes,
internal::{
ComputedVecValue, CumulativeVec, DerivedDateFull, Full, LazyFull, NumericValue,
compute_cumulative_extend,
},
};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct DerivedComputedBlockFull<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub height_cumulative: CumulativeVec<Height, T>,
pub dateindex: Full<DateIndex, T>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub dates: DerivedDateFull<T>,
pub difficultyepoch: LazyFull<DifficultyEpoch, T, Height, DifficultyEpoch>,
}
const VERSION: Version = Version::ZERO;
impl<T> DerivedComputedBlockFull<T>
where
T: NumericValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
height_source: IterableBoxedVec<Height, T>,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height_cumulative = CumulativeVec::forced_import(db, name, v)?;
let dateindex = Full::forced_import(db, name, v)?;
Ok(Self {
dates: DerivedDateFull::from_sources(
name,
v,
dateindex.distribution.average.0.boxed_clone(),
dateindex.distribution.minmax.min.0.boxed_clone(),
dateindex.distribution.minmax.max.0.boxed_clone(),
dateindex.sum_cum.sum.0.boxed_clone(),
dateindex.sum_cum.cumulative.0.boxed_clone(),
indexes,
),
difficultyepoch: LazyFull::from_stats_aggregate(
name,
v,
height_source.boxed_clone(),
height_source.boxed_clone(),
height_source.boxed_clone(),
height_source.boxed_clone(),
height_cumulative.0.boxed_clone(),
indexes
.block
.difficultyepoch_to_difficultyepoch
.boxed_clone(),
),
height_cumulative,
dateindex,
})
}
pub fn derive_from(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
height_source: &impl IterableVec<Height, T>,
exit: &Exit,
) -> Result<()> {
// Compute height_cumulative from external source
self.compute_height_cumulative(starting_indexes.height, height_source, exit)?;
// Compute dateindex aggregations
self.dateindex.compute(
starting_indexes.dateindex,
height_source,
&indexes.time.dateindex_to_first_height,
&indexes.time.dateindex_to_height_count,
exit,
)?;
Ok(())
}
fn compute_height_cumulative(
&mut self,
max_from: Height,
height_source: &impl IterableVec<Height, T>,
exit: &Exit,
) -> Result<()> {
compute_cumulative_extend(max_from, height_source, &mut self.height_cumulative.0, exit)
}
}

View File

@@ -0,0 +1,77 @@
//! DerivedComputedBlockLast - dateindex storage + difficultyepoch + lazy time periods.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, DifficultyEpoch, Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{Database, Exit, IterableBoxedVec, IterableCloneableVec, IterableVec};
use crate::{
ComputeIndexes, indexes,
internal::{ComputedVecValue, DerivedDateLast, LastVec, LazyLast, NumericValue},
};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct DerivedComputedBlockLast<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub dateindex: LastVec<DateIndex, T>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub dates: DerivedDateLast<T>,
pub difficultyepoch: LazyLast<DifficultyEpoch, T, Height, DifficultyEpoch>,
}
const VERSION: Version = Version::ZERO;
impl<T> DerivedComputedBlockLast<T>
where
T: NumericValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
height_source: IterableBoxedVec<Height, T>,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let dateindex = LastVec::forced_import(db, name, version + VERSION)?;
let v = version + VERSION;
Ok(Self {
dates: DerivedDateLast::from_source(name, v, dateindex.0.boxed_clone(), indexes),
difficultyepoch: LazyLast::from_source(
name,
v,
height_source,
indexes
.block
.difficultyepoch_to_difficultyepoch
.boxed_clone(),
),
dateindex,
})
}
pub fn derive_from(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
height_source: &impl IterableVec<Height, T>,
exit: &Exit,
) -> Result<()> {
self.dateindex.compute_last(
starting_indexes.dateindex,
height_source,
&indexes.time.dateindex_to_first_height,
&indexes.time.dateindex_to_height_count,
exit,
)?;
Ok(())
}
}

View File

@@ -0,0 +1,11 @@
mod distribution;
mod full;
mod last;
mod sum;
mod sum_cum;
pub use distribution::*;
pub use full::*;
pub use last::*;
pub use sum::*;
pub use sum_cum::*;

View File

@@ -0,0 +1,119 @@
//! DerivedComputedBlockSum - dateindex storage + difficultyepoch + lazy time periods.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, DifficultyEpoch, Height, StoredU64, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{
AnyStoredVec, AnyVec, Database, Exit, GenericStoredVec, IterableBoxedVec, IterableCloneableVec,
IterableVec, VecIndex,
};
use crate::{
ComputeIndexes, indexes,
internal::{ComputedVecValue, DerivedDateSum, LazySum, NumericValue, SumVec},
};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct DerivedComputedBlockSum<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub dateindex: SumVec<DateIndex, T>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub dates: DerivedDateSum<T>,
pub difficultyepoch: LazySum<DifficultyEpoch, T, Height, DifficultyEpoch>,
}
const VERSION: Version = Version::ZERO;
impl<T> DerivedComputedBlockSum<T>
where
T: NumericValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
height_source: IterableBoxedVec<Height, T>,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let dateindex = SumVec::forced_import(db, name, version + VERSION)?;
let v = version + VERSION;
Ok(Self {
dates: DerivedDateSum::from_source(name, v, dateindex.0.boxed_clone(), indexes),
difficultyepoch: LazySum::from_source(
name,
v,
height_source,
indexes
.block
.difficultyepoch_to_difficultyepoch
.boxed_clone(),
),
dateindex,
})
}
pub fn derive_from(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
height_source: &impl IterableVec<Height, T>,
exit: &Exit,
) -> Result<()> {
self.compute_from(
starting_indexes.dateindex,
height_source,
&indexes.time.dateindex_to_first_height,
&indexes.time.dateindex_to_height_count,
exit,
)
}
fn compute_from(
&mut self,
starting_dateindex: DateIndex,
height_source: &impl IterableVec<Height, T>,
first_indexes: &impl IterableVec<DateIndex, Height>,
count_indexes: &impl IterableVec<DateIndex, StoredU64>,
exit: &Exit,
) -> Result<()> {
let sum_vec = &mut self.dateindex.0;
let combined_version =
height_source.version() + first_indexes.version() + count_indexes.version();
sum_vec.validate_computed_version_or_reset(combined_version)?;
let index = starting_dateindex.to_usize().min(sum_vec.len());
let mut source_iter = height_source.iter();
let mut count_iter = count_indexes.iter().skip(index);
first_indexes.iter().enumerate().skip(index).try_for_each(
|(idx, first_height)| -> Result<()> {
let count = *count_iter.next().unwrap() as usize;
source_iter.set_position(first_height);
let sum: T = (&mut source_iter)
.take(count)
.fold(T::from(0_usize), |acc, v| acc + v);
sum_vec.truncate_push_at(idx, sum)?;
Ok(())
},
)?;
let _lock = exit.lock();
sum_vec.write()?;
Ok(())
}
}

View File

@@ -0,0 +1,161 @@
//! DerivedComputedBlockSumCum - aggregates derived from an external height source.
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, DifficultyEpoch, Height, StoredU64, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{
AnyStoredVec, AnyVec, Database, Exit, GenericStoredVec, IterableBoxedVec, IterableCloneableVec,
IterableVec, VecIndex,
};
use crate::{
ComputeIndexes, indexes,
internal::{
ComputedVecValue, CumulativeVec, DerivedDateSumCum, LazySumCum, NumericValue, SumCum,
compute_cumulative_extend,
},
};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct DerivedComputedBlockSumCum<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub height_cumulative: CumulativeVec<Height, T>,
pub dateindex: SumCum<DateIndex, T>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub dates: DerivedDateSumCum<T>,
pub difficultyepoch: LazySumCum<DifficultyEpoch, T, Height, DifficultyEpoch>,
}
const VERSION: Version = Version::ZERO;
impl<T> DerivedComputedBlockSumCum<T>
where
T: NumericValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
height_source: IterableBoxedVec<Height, T>,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let v = version + VERSION;
let height_cumulative = CumulativeVec::forced_import(db, name, v)?;
let dateindex = SumCum::forced_import(db, name, v)?;
let dates = DerivedDateSumCum::from_sources(
name,
v,
dateindex.sum.0.boxed_clone(),
dateindex.cumulative.0.boxed_clone(),
indexes,
);
let difficultyepoch = LazySumCum::from_sources(
name,
v,
height_source.boxed_clone(),
height_cumulative.0.boxed_clone(),
indexes
.block
.difficultyepoch_to_difficultyepoch
.boxed_clone(),
);
Ok(Self {
height_cumulative,
dateindex,
dates,
difficultyepoch,
})
}
pub fn derive_from(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
height_source: &impl IterableVec<Height, T>,
exit: &Exit,
) -> Result<()> {
self.compute_height_cumulative(starting_indexes.height, height_source, exit)?;
self.compute_dateindex_sum_cum(
starting_indexes.dateindex,
height_source,
&indexes.time.dateindex_to_first_height,
&indexes.time.dateindex_to_height_count,
exit,
)
}
fn compute_height_cumulative(
&mut self,
max_from: Height,
source: &impl IterableVec<Height, T>,
exit: &Exit,
) -> Result<()> {
compute_cumulative_extend(max_from, source, &mut self.height_cumulative.0, exit)
}
fn compute_dateindex_sum_cum(
&mut self,
starting_dateindex: DateIndex,
height_source: &impl IterableVec<Height, T>,
first_indexes: &impl IterableVec<DateIndex, Height>,
count_indexes: &impl IterableVec<DateIndex, StoredU64>,
exit: &Exit,
) -> Result<()> {
let sum_vec = &mut self.dateindex.sum.0;
let cumulative_vec = &mut self.dateindex.cumulative.0;
let combined_version =
height_source.version() + first_indexes.version() + count_indexes.version();
sum_vec.validate_computed_version_or_reset(combined_version)?;
cumulative_vec.validate_computed_version_or_reset(combined_version)?;
let index = starting_dateindex
.to_usize()
.min(sum_vec.len())
.min(cumulative_vec.len());
let mut cumulative = if index > 0 {
cumulative_vec.iter().get_unwrap((index - 1).into())
} else {
T::from(0_usize)
};
let mut source_iter = height_source.iter();
let mut count_iter = count_indexes.iter().skip(index);
first_indexes.iter().enumerate().skip(index).try_for_each(
|(idx, first_height)| -> Result<()> {
let count = *count_iter.next().unwrap() as usize;
source_iter.set_position(first_height);
let sum: T = (&mut source_iter)
.take(count)
.fold(T::from(0_usize), |acc, v| acc + v);
cumulative += sum;
sum_vec.truncate_push_at(idx, sum)?;
cumulative_vec.truncate_push_at(idx, cumulative)?;
Ok(())
},
)?;
let _lock = exit.lock();
sum_vec.write()?;
cumulative_vec.write()?;
Ok(())
}
}

View File

@@ -1,199 +0,0 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, TreeNode, Version, WeekIndex,
YearIndex,
};
use schemars::JsonSchema;
use vecdb::{
AnyExportableVec, Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, IterableVec,
PcoVec,
};
use crate::{ComputeIndexes, indexes, internal::LazyVecsBuilder, utils::OptionExt};
use crate::internal::{ComputedVecValue, EagerVecsBuilder, Source, VecBuilderOptions};
#[derive(Clone)]
pub struct ComputedVecsFromDateIndex<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub dateindex: Option<EagerVec<PcoVec<DateIndex, T>>>,
pub dateindex_extra: EagerVecsBuilder<DateIndex, T>,
pub weekindex: LazyVecsBuilder<WeekIndex, T, DateIndex, WeekIndex>,
pub monthindex: LazyVecsBuilder<MonthIndex, T, DateIndex, MonthIndex>,
pub quarterindex: LazyVecsBuilder<QuarterIndex, T, DateIndex, QuarterIndex>,
pub semesterindex: LazyVecsBuilder<SemesterIndex, T, DateIndex, SemesterIndex>,
pub yearindex: LazyVecsBuilder<YearIndex, T, DateIndex, YearIndex>,
pub decadeindex: LazyVecsBuilder<DecadeIndex, T, DateIndex, DecadeIndex>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedVecsFromDateIndex<T>
where
T: ComputedVecValue + JsonSchema + 'static,
{
#[allow(clippy::too_many_arguments)]
pub fn forced_import(
db: &Database,
name: &str,
source: Source<DateIndex, T>,
version: Version,
indexes: &indexes::Vecs,
options: VecBuilderOptions,
) -> Result<Self> {
let dateindex = source
.is_compute()
.then(|| EagerVec::forced_import(db, name, version + VERSION).unwrap());
let dateindex_extra = EagerVecsBuilder::forced_import(
db,
name,
version + VERSION,
options.copy_self_extra(),
)?;
let options = options.remove_percentiles();
let dateindex_source = source.vec().or(dateindex.as_ref().map(|v| v.boxed_clone()));
Ok(Self {
weekindex: LazyVecsBuilder::forced_import(
name,
version + VERSION,
dateindex_source.clone(),
&dateindex_extra,
indexes.time.weekindex_to_weekindex.boxed_clone(),
options.into(),
),
monthindex: LazyVecsBuilder::forced_import(
name,
version + VERSION,
dateindex_source.clone(),
&dateindex_extra,
indexes.time.monthindex_to_monthindex.boxed_clone(),
options.into(),
),
quarterindex: LazyVecsBuilder::forced_import(
name,
version + VERSION,
dateindex_source.clone(),
&dateindex_extra,
indexes.time.quarterindex_to_quarterindex.boxed_clone(),
options.into(),
),
semesterindex: LazyVecsBuilder::forced_import(
name,
version + VERSION,
dateindex_source.clone(),
&dateindex_extra,
indexes.time.semesterindex_to_semesterindex.boxed_clone(),
options.into(),
),
yearindex: LazyVecsBuilder::forced_import(
name,
version + VERSION,
dateindex_source.clone(),
&dateindex_extra,
indexes.time.yearindex_to_yearindex.boxed_clone(),
options.into(),
),
decadeindex: LazyVecsBuilder::forced_import(
name,
version + VERSION,
dateindex_source.clone(),
&dateindex_extra,
indexes.time.decadeindex_to_decadeindex.boxed_clone(),
options.into(),
),
dateindex,
dateindex_extra,
})
}
pub fn compute_all<F>(
&mut self,
starting_indexes: &ComputeIndexes,
exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<DateIndex, T>>) -> Result<()>,
{
compute(self.dateindex.um())?;
let dateindex: Option<&EagerVec<PcoVec<DateIndex, T>>> = None;
self.compute_rest(starting_indexes, exit, dateindex)
}
pub fn compute_rest(
&mut self,
starting_indexes: &ComputeIndexes,
exit: &Exit,
dateindex: Option<&impl IterableVec<DateIndex, T>>,
) -> Result<()> {
if let Some(dateindex) = dateindex {
self.dateindex_extra
.extend(starting_indexes.dateindex, dateindex, exit)?;
} else {
let dateindex = self.dateindex.u();
self.dateindex_extra
.extend(starting_indexes.dateindex, dateindex, exit)?;
}
Ok(())
}
}
impl<T> Traversable for ComputedVecsFromDateIndex<T>
where
T: ComputedVecValue + JsonSchema,
{
fn to_tree_node(&self) -> TreeNode {
let dateindex_extra_node = self.dateindex_extra.to_tree_node();
TreeNode::Branch(
[
self.dateindex
.as_ref()
.map(|nested| ("dateindex".to_string(), nested.to_tree_node())),
if dateindex_extra_node.is_empty() {
None
} else {
Some(("dateindex_extra".to_string(), dateindex_extra_node))
},
Some(("weekindex".to_string(), self.weekindex.to_tree_node())),
Some(("monthindex".to_string(), self.monthindex.to_tree_node())),
Some(("quarterindex".to_string(), self.quarterindex.to_tree_node())),
Some((
"semesterindex".to_string(),
self.semesterindex.to_tree_node(),
)),
Some(("yearindex".to_string(), self.yearindex.to_tree_node())),
Some(("decadeindex".to_string(), self.decadeindex.to_tree_node())),
]
.into_iter()
.flatten()
.collect(),
)
.merge_branches()
.unwrap()
}
fn iter_any_exportable(&self) -> impl Iterator<Item = &dyn AnyExportableVec> {
let mut regular_iter: Box<dyn Iterator<Item = &dyn AnyExportableVec>> =
Box::new(self.dateindex_extra.iter_any_exportable());
regular_iter = Box::new(regular_iter.chain(self.weekindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.monthindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.quarterindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.semesterindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.yearindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.decadeindex.iter_any_exportable()));
if let Some(ref x) = self.dateindex {
regular_iter = Box::new(regular_iter.chain(x.iter_any_exportable()));
}
regular_iter
}
}

View File

@@ -1,5 +0,0 @@
mod standard;
mod strict;
pub use standard::*;
pub use strict::*;

View File

@@ -1,248 +0,0 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, DifficultyEpoch, Height, MonthIndex, QuarterIndex, SemesterIndex,
TreeNode, Version, WeekIndex, YearIndex,
};
use schemars::JsonSchema;
use vecdb::{
AnyExportableVec, Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, IterableVec,
PcoVec,
};
use crate::{
ComputeIndexes, indexes,
internal::{LazyVecsBuilder, Source},
utils::OptionExt,
};
use crate::internal::{ComputedVecValue, EagerVecsBuilder, VecBuilderOptions};
#[derive(Clone)]
pub struct ComputedVecsFromHeight<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub height: Option<EagerVec<PcoVec<Height, T>>>,
pub height_extra: EagerVecsBuilder<Height, T>,
pub dateindex: EagerVecsBuilder<DateIndex, T>,
pub weekindex: LazyVecsBuilder<WeekIndex, T, DateIndex, WeekIndex>,
pub difficultyepoch: LazyVecsBuilder<DifficultyEpoch, T, Height, DifficultyEpoch>,
pub monthindex: LazyVecsBuilder<MonthIndex, T, DateIndex, MonthIndex>,
pub quarterindex: LazyVecsBuilder<QuarterIndex, T, DateIndex, QuarterIndex>,
pub semesterindex: LazyVecsBuilder<SemesterIndex, T, DateIndex, SemesterIndex>,
pub yearindex: LazyVecsBuilder<YearIndex, T, DateIndex, YearIndex>,
// TODO: pub halvingepoch: StorableVecGeneator<Halvingepoch, T>,
pub decadeindex: LazyVecsBuilder<DecadeIndex, T, DateIndex, DecadeIndex>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedVecsFromHeight<T>
where
T: ComputedVecValue + Ord + From<f64> + JsonSchema + 'static,
f64: From<T>,
{
#[allow(clippy::too_many_arguments)]
pub fn forced_import(
db: &Database,
name: &str,
source: Source<Height, T>,
version: Version,
indexes: &indexes::Vecs,
options: VecBuilderOptions,
) -> Result<Self> {
let height = source
.is_compute()
.then(|| EagerVec::forced_import(db, name, version + VERSION).unwrap());
let height_extra = EagerVecsBuilder::forced_import(
db,
name,
version + VERSION,
options.copy_self_extra(),
)?;
let dateindex = EagerVecsBuilder::forced_import(db, name, version + VERSION, options)?;
let options = options.remove_percentiles();
let height_source = source.vec().or(height.as_ref().map(|v| v.boxed_clone()));
Ok(Self {
weekindex: LazyVecsBuilder::forced_import(
name,
version + VERSION,
None,
&dateindex,
indexes.time.weekindex_to_weekindex.boxed_clone(),
options.into(),
),
monthindex: LazyVecsBuilder::forced_import(
name,
version + VERSION,
None,
&dateindex,
indexes.time.monthindex_to_monthindex.boxed_clone(),
options.into(),
),
quarterindex: LazyVecsBuilder::forced_import(
name,
version + VERSION,
None,
&dateindex,
indexes.time.quarterindex_to_quarterindex.boxed_clone(),
options.into(),
),
semesterindex: LazyVecsBuilder::forced_import(
name,
version + VERSION,
None,
&dateindex,
indexes.time.semesterindex_to_semesterindex.boxed_clone(),
options.into(),
),
yearindex: LazyVecsBuilder::forced_import(
name,
version + VERSION,
None,
&dateindex,
indexes.time.yearindex_to_yearindex.boxed_clone(),
options.into(),
),
decadeindex: LazyVecsBuilder::forced_import(
name,
version + VERSION,
None,
&dateindex,
indexes.time.decadeindex_to_decadeindex.boxed_clone(),
options.into(),
),
// halvingepoch: StorableVecGeneator::forced_import(db, name, version + VERSION , format, options)?,
difficultyepoch: LazyVecsBuilder::forced_import(
name,
version + VERSION,
height_source,
&height_extra,
indexes
.block
.difficultyepoch_to_difficultyepoch
.boxed_clone(),
options.into(),
),
height,
height_extra,
dateindex,
})
}
pub fn compute_all<F>(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
{
compute(self.height.um())?;
let height: Option<&EagerVec<PcoVec<Height, T>>> = None;
self.compute_rest(indexes, starting_indexes, exit, height)
}
pub fn compute_rest(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
height_vec: Option<&impl IterableVec<Height, T>>,
) -> Result<()> {
if let Some(height) = height_vec {
self.height_extra
.extend(starting_indexes.height, height, exit)?;
self.dateindex.compute(
starting_indexes.dateindex,
height,
&indexes.time.dateindex_to_first_height,
&indexes.time.dateindex_to_height_count,
exit,
)?;
} else {
let height = self.height.u();
self.height_extra
.extend(starting_indexes.height, height, exit)?;
self.dateindex.compute(
starting_indexes.dateindex,
height,
&indexes.time.dateindex_to_first_height,
&indexes.time.dateindex_to_height_count,
exit,
)?;
}
Ok(())
}
}
impl<T> Traversable for ComputedVecsFromHeight<T>
where
T: ComputedVecValue + JsonSchema,
{
fn to_tree_node(&self) -> TreeNode {
let height_extra_node = self.height_extra.to_tree_node();
TreeNode::Branch(
[
self.height
.as_ref()
.map(|nested| ("height".to_string(), nested.to_tree_node())),
if height_extra_node.is_empty() {
None
} else {
Some(("height_extra".to_string(), height_extra_node))
},
Some(("dateindex".to_string(), self.dateindex.to_tree_node())),
Some(("weekindex".to_string(), self.weekindex.to_tree_node())),
Some((
"difficultyepoch".to_string(),
self.difficultyepoch.to_tree_node(),
)),
Some(("monthindex".to_string(), self.monthindex.to_tree_node())),
Some(("quarterindex".to_string(), self.quarterindex.to_tree_node())),
Some((
"semesterindex".to_string(),
self.semesterindex.to_tree_node(),
)),
Some(("yearindex".to_string(), self.yearindex.to_tree_node())),
Some(("decadeindex".to_string(), self.decadeindex.to_tree_node())),
]
.into_iter()
.flatten()
.collect(),
)
.merge_branches()
.unwrap()
}
fn iter_any_exportable(&self) -> impl Iterator<Item = &dyn AnyExportableVec> {
let mut regular_iter: Box<dyn Iterator<Item = &dyn AnyExportableVec>> =
Box::new(self.height_extra.iter_any_exportable());
regular_iter = Box::new(regular_iter.chain(self.dateindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.weekindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.difficultyepoch.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.monthindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.quarterindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.semesterindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.yearindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.decadeindex.iter_any_exportable()));
if let Some(ref x) = self.height {
regular_iter = Box::new(regular_iter.chain(x.iter_any_exportable()));
}
regular_iter
}
}

View File

@@ -1,120 +0,0 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DifficultyEpoch, Height, TreeNode, Version};
use schemars::JsonSchema;
use vecdb::{
AnyExportableVec, Database, EagerVec, Exit, ImportableVec, IterableCloneableVec, PcoVec,
};
use crate::{ComputeIndexes, indexes};
use crate::internal::{ComputedVecValue, EagerVecsBuilder, LazyVecsBuilder, VecBuilderOptions};
#[derive(Clone)]
pub struct ComputedVecsFromHeightStrict<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub height: EagerVec<PcoVec<Height, T>>,
pub height_extra: EagerVecsBuilder<Height, T>,
pub difficultyepoch: LazyVecsBuilder<DifficultyEpoch, T, Height, DifficultyEpoch>,
// TODO: pub halvingepoch: StorableVecGeneator<Halvingepoch, T>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedVecsFromHeightStrict<T>
where
T: ComputedVecValue + Ord + From<f64> + JsonSchema,
f64: From<T>,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
options: VecBuilderOptions,
) -> Result<Self> {
let height = EagerVec::forced_import(db, name, version + VERSION)?;
let height_extra = EagerVecsBuilder::forced_import(
db,
name,
version + VERSION,
options.copy_self_extra(),
)?;
let options = options.remove_percentiles();
Ok(Self {
difficultyepoch: LazyVecsBuilder::forced_import(
name,
version + VERSION,
Some(height.boxed_clone()),
&height_extra,
indexes
.block
.difficultyepoch_to_difficultyepoch
.boxed_clone(),
options.into(),
),
height,
height_extra,
// halvingepoch: StorableVecGeneator::forced_import(db, name, version + VERSION , format, options)?,
})
}
pub fn compute<F>(
&mut self,
starting_indexes: &ComputeIndexes,
exit: &Exit,
mut compute: F,
) -> Result<()>
where
F: FnMut(&mut EagerVec<PcoVec<Height, T>>) -> Result<()>,
{
compute(&mut self.height)?;
self.height_extra
.extend(starting_indexes.height, &self.height, exit)?;
Ok(())
}
}
impl<T> Traversable for ComputedVecsFromHeightStrict<T>
where
T: ComputedVecValue + JsonSchema,
{
fn to_tree_node(&self) -> TreeNode {
let height_extra_node = self.height_extra.to_tree_node();
TreeNode::Branch(
[
Some(("height".to_string(), self.height.to_tree_node())),
if height_extra_node.is_empty() {
None
} else {
Some(("height_extra".to_string(), height_extra_node))
},
Some((
"difficultyepoch".to_string(),
self.difficultyepoch.to_tree_node(),
)),
]
.into_iter()
.flatten()
.collect(),
)
.merge_branches()
.unwrap()
}
fn iter_any_exportable(&self) -> impl Iterator<Item = &dyn AnyExportableVec> {
let mut regular_iter: Box<dyn Iterator<Item = &dyn AnyExportableVec>> =
Box::new(self.height.iter_any_exportable());
regular_iter = Box::new(regular_iter.chain(self.height_extra.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.difficultyepoch.iter_any_exportable()));
regular_iter
}
}

View File

@@ -1,452 +0,0 @@
use brk_error::Result;
use brk_indexer::Indexer;
use brk_traversable::Traversable;
use brk_types::{
Bitcoin, DateIndex, DecadeIndex, DifficultyEpoch, Dollars, Height, MonthIndex, QuarterIndex,
Sats, SemesterIndex, TreeNode, TxIndex, Version, WeekIndex, YearIndex,
};
use schemars::JsonSchema;
use vecdb::{
AnyExportableVec, AnyVec, CollectableVec, Database, EagerVec, Exit, GenericStoredVec,
ImportableVec, IterableCloneableVec, PcoVec, TypedVecIterator, VecIndex,
};
use crate::{
ComputeIndexes, indexes,
internal::{LazyVecsBuilder, Source},
price,
utils::OptionExt,
};
use crate::internal::{ComputedVecValue, EagerVecsBuilder, VecBuilderOptions};
#[derive(Clone)]
pub struct ComputedVecsFromTxindex<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub txindex: Option<Box<EagerVec<PcoVec<TxIndex, T>>>>,
pub height: EagerVecsBuilder<Height, T>,
pub dateindex: EagerVecsBuilder<DateIndex, T>,
pub weekindex: LazyVecsBuilder<WeekIndex, T, DateIndex, WeekIndex>,
pub difficultyepoch: LazyVecsBuilder<DifficultyEpoch, T, Height, DifficultyEpoch>,
pub monthindex: LazyVecsBuilder<MonthIndex, T, DateIndex, MonthIndex>,
pub quarterindex: LazyVecsBuilder<QuarterIndex, T, DateIndex, QuarterIndex>,
pub semesterindex: LazyVecsBuilder<SemesterIndex, T, DateIndex, SemesterIndex>,
pub yearindex: LazyVecsBuilder<YearIndex, T, DateIndex, YearIndex>,
// TODO: pub halvingepoch: StorableVecGeneator<Halvingepoch, T>,
pub decadeindex: LazyVecsBuilder<DecadeIndex, T, DateIndex, DecadeIndex>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedVecsFromTxindex<T>
where
T: ComputedVecValue + Ord + From<f64> + JsonSchema + 'static,
f64: From<T>,
{
#[allow(clippy::too_many_arguments)]
pub fn forced_import(
db: &Database,
name: &str,
source: Source<TxIndex, T>,
version: Version,
indexes: &indexes::Vecs,
options: VecBuilderOptions,
) -> Result<Self> {
let txindex = source
.is_compute()
.then(|| Box::new(EagerVec::forced_import(db, name, version + VERSION).unwrap()));
let height = EagerVecsBuilder::forced_import(db, name, version + VERSION, options)?;
let options = options.remove_percentiles();
let dateindex = EagerVecsBuilder::forced_import(db, name, version + VERSION, options)?;
Ok(Self {
weekindex: LazyVecsBuilder::forced_import(
name,
version + VERSION,
None,
&dateindex,
indexes.time.weekindex_to_weekindex.boxed_clone(),
options.into(),
),
difficultyepoch: LazyVecsBuilder::forced_import(
name,
version + VERSION,
None,
&height,
indexes
.block
.difficultyepoch_to_difficultyepoch
.boxed_clone(),
options.into(),
),
monthindex: LazyVecsBuilder::forced_import(
name,
version + VERSION,
None,
&dateindex,
indexes.time.monthindex_to_monthindex.boxed_clone(),
options.into(),
),
quarterindex: LazyVecsBuilder::forced_import(
name,
version + VERSION,
None,
&dateindex,
indexes.time.quarterindex_to_quarterindex.boxed_clone(),
options.into(),
),
semesterindex: LazyVecsBuilder::forced_import(
name,
version + VERSION,
None,
&dateindex,
indexes.time.semesterindex_to_semesterindex.boxed_clone(),
options.into(),
),
yearindex: LazyVecsBuilder::forced_import(
name,
version + VERSION,
None,
&dateindex,
indexes.time.yearindex_to_yearindex.boxed_clone(),
options.into(),
),
decadeindex: LazyVecsBuilder::forced_import(
name,
version + VERSION,
None,
&dateindex,
indexes.time.decadeindex_to_decadeindex.boxed_clone(),
options.into(),
),
txindex,
height,
dateindex,
// halvingepoch: StorableVecGeneator::forced_import(db, name, version + VERSION , format, options)?,
})
}
// pub fn compute_all<F>(
// &mut self,
// indexer: &Indexer,
// indexes: &indexes::Vecs,
// starting_indexes: &ComputeIndexes,
// exit: &Exit,
// mut compute: F,
// ) -> Result<()>
// where
// F: FnMut(
// &mut EagerVec<PcoVec<TxIndex, T>>,
// &Indexer,
// &indexes::Vecs,
// &Indexes,
// &Exit,
// ) -> Result<()>,
// {
// compute(
// self.txindex.um(),
// indexer,
// indexes,
// starting_indexes,
// exit,
// )?;
// let txindex: Option<&StoredVec<TxIndex, T>> = None;
// self.compute_rest(indexer, indexes, starting_indexes, exit, txindex)?;
// Ok(())
// }
pub fn compute_rest(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
txindex: Option<&impl CollectableVec<TxIndex, T>>,
) -> Result<()> {
if let Some(txindex) = txindex {
self.height.compute(
starting_indexes.height,
txindex,
&indexer.vecs.tx.height_to_first_txindex,
&indexes.block.height_to_txindex_count,
exit,
)?;
} else {
let txindex = self.txindex.u().as_ref();
self.height.compute(
starting_indexes.height,
txindex,
&indexer.vecs.tx.height_to_first_txindex,
&indexes.block.height_to_txindex_count,
exit,
)?;
}
self.compute_after_height(indexes, starting_indexes, exit)
}
fn compute_after_height(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.dateindex.from_aligned(
starting_indexes.dateindex,
&self.height,
&indexes.time.dateindex_to_first_height,
&indexes.time.dateindex_to_height_count,
exit,
)?;
Ok(())
}
}
impl ComputedVecsFromTxindex<Bitcoin> {
pub fn compute_rest_from_sats(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
sats: &ComputedVecsFromTxindex<Sats>,
txindex: Option<&impl CollectableVec<TxIndex, Bitcoin>>,
) -> Result<()> {
let txindex_version = if let Some(txindex) = txindex {
txindex.version()
} else {
self.txindex.u().as_ref().version()
};
self.height
.validate_computed_version_or_reset(txindex_version)?;
let starting_index = self.height.starting_index(starting_indexes.height);
// Create iterators once before the loop to avoid repeated iterator creation
let mut first_iter = sats.height.first.as_ref().map(|v| v.into_iter());
let mut average_iter = sats.height.average.as_ref().map(|v| v.into_iter());
let mut sum_iter = sats.height.sum.as_ref().map(|v| v.into_iter());
let mut max_iter = sats.height.max.as_ref().map(|v| v.into_iter());
let mut pct90_iter = sats.height.pct90.as_ref().map(|v| v.into_iter());
let mut pct75_iter = sats.height.pct75.as_ref().map(|v| v.into_iter());
let mut median_iter = sats.height.median.as_ref().map(|v| v.into_iter());
let mut pct25_iter = sats.height.pct25.as_ref().map(|v| v.into_iter());
let mut pct10_iter = sats.height.pct10.as_ref().map(|v| v.into_iter());
let mut min_iter = sats.height.min.as_ref().map(|v| v.into_iter());
let mut last_iter = sats.height.last.as_ref().map(|v| v.into_iter());
let mut cumulative_iter = sats.height.cumulative.as_ref().map(|v| v.into_iter());
(starting_index.to_usize()..indexer.vecs.block.height_to_weight.len())
.map(Height::from)
.try_for_each(|height| -> Result<()> {
if let Some(first) = self.height.first.as_mut() {
first
.truncate_push(height, Bitcoin::from(first_iter.um().get_unwrap(height)))?;
}
if let Some(average) = self.height.average.as_mut() {
average.truncate_push(
height,
Bitcoin::from(average_iter.um().get_unwrap(height)),
)?;
}
if let Some(sum) = self.height.sum.as_mut() {
sum.truncate_push(height, Bitcoin::from(sum_iter.um().get_unwrap(height)))?;
}
if let Some(max) = self.height.max.as_mut() {
max.truncate_push(height, Bitcoin::from(max_iter.um().get_unwrap(height)))?;
}
if let Some(pct90) = self.height.pct90.as_mut() {
pct90
.truncate_push(height, Bitcoin::from(pct90_iter.um().get_unwrap(height)))?;
}
if let Some(pct75) = self.height.pct75.as_mut() {
pct75
.truncate_push(height, Bitcoin::from(pct75_iter.um().get_unwrap(height)))?;
}
if let Some(median) = self.height.median.as_mut() {
median.truncate_push(
height,
Bitcoin::from(median_iter.um().get_unwrap(height)),
)?;
}
if let Some(pct25) = self.height.pct25.as_mut() {
pct25
.truncate_push(height, Bitcoin::from(pct25_iter.um().get_unwrap(height)))?;
}
if let Some(pct10) = self.height.pct10.as_mut() {
pct10
.truncate_push(height, Bitcoin::from(pct10_iter.um().get_unwrap(height)))?;
}
if let Some(min) = self.height.min.as_mut() {
min.truncate_push(height, Bitcoin::from(min_iter.um().get_unwrap(height)))?;
}
if let Some(last) = self.height.last.as_mut() {
last.truncate_push(height, Bitcoin::from(last_iter.um().get_unwrap(height)))?;
}
if let Some(cumulative) = self.height.cumulative.as_mut() {
cumulative.truncate_push(
height,
Bitcoin::from(cumulative_iter.um().get_unwrap(height)),
)?;
}
Ok(())
})?;
self.height.write()?;
self.compute_after_height(indexes, starting_indexes, exit)
}
}
impl ComputedVecsFromTxindex<Dollars> {
#[allow(clippy::too_many_arguments)]
pub fn compute_rest_from_bitcoin(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
bitcoin: &ComputedVecsFromTxindex<Bitcoin>,
txindex: Option<&impl CollectableVec<TxIndex, Dollars>>,
price: &price::Vecs,
) -> Result<()> {
let txindex_version = if let Some(txindex) = txindex {
txindex.version()
} else {
self.txindex.u().as_ref().version()
};
self.height
.validate_computed_version_or_reset(txindex_version)?;
let starting_index = self.height.starting_index(starting_indexes.height);
let mut close_iter = price.usd.chainindexes_to_price_close.height.into_iter();
// Create iterators once before the loop to avoid repeated iterator creation
let mut first_iter = bitcoin.height.first.as_ref().map(|v| v.into_iter());
let mut average_iter = bitcoin.height.average.as_ref().map(|v| v.into_iter());
let mut sum_iter = bitcoin.height.sum.as_ref().map(|v| v.into_iter());
let mut max_iter = bitcoin.height.max.as_ref().map(|v| v.into_iter());
let mut pct90_iter = bitcoin.height.pct90.as_ref().map(|v| v.into_iter());
let mut pct75_iter = bitcoin.height.pct75.as_ref().map(|v| v.into_iter());
let mut median_iter = bitcoin.height.median.as_ref().map(|v| v.into_iter());
let mut pct25_iter = bitcoin.height.pct25.as_ref().map(|v| v.into_iter());
let mut pct10_iter = bitcoin.height.pct10.as_ref().map(|v| v.into_iter());
let mut min_iter = bitcoin.height.min.as_ref().map(|v| v.into_iter());
let mut last_iter = bitcoin.height.last.as_ref().map(|v| v.into_iter());
let mut cumulative_iter = bitcoin.height.cumulative.as_ref().map(|v| v.into_iter());
(starting_index.to_usize()..indexer.vecs.block.height_to_weight.len())
.map(Height::from)
.try_for_each(|height| -> Result<()> {
let price = *close_iter.get_unwrap(height);
if let Some(first) = self.height.first.as_mut() {
first.truncate_push(height, price * first_iter.um().get_unwrap(height))?;
}
if let Some(average) = self.height.average.as_mut() {
average.truncate_push(height, price * average_iter.um().get_unwrap(height))?;
}
if let Some(sum) = self.height.sum.as_mut() {
sum.truncate_push(height, price * sum_iter.um().get_unwrap(height))?;
}
if let Some(max) = self.height.max.as_mut() {
max.truncate_push(height, price * max_iter.um().get_unwrap(height))?;
}
if let Some(pct90) = self.height.pct90.as_mut() {
pct90.truncate_push(height, price * pct90_iter.um().get_unwrap(height))?;
}
if let Some(pct75) = self.height.pct75.as_mut() {
pct75.truncate_push(height, price * pct75_iter.um().get_unwrap(height))?;
}
if let Some(median) = self.height.median.as_mut() {
median.truncate_push(height, price * median_iter.um().get_unwrap(height))?;
}
if let Some(pct25) = self.height.pct25.as_mut() {
pct25.truncate_push(height, price * pct25_iter.um().get_unwrap(height))?;
}
if let Some(pct10) = self.height.pct10.as_mut() {
pct10.truncate_push(height, price * pct10_iter.um().get_unwrap(height))?;
}
if let Some(min) = self.height.min.as_mut() {
min.truncate_push(height, price * min_iter.um().get_unwrap(height))?;
}
if let Some(last) = self.height.last.as_mut() {
last.truncate_push(height, price * last_iter.um().get_unwrap(height))?;
}
if let Some(cumulative) = self.height.cumulative.as_mut() {
cumulative
.truncate_push(height, price * cumulative_iter.um().get_unwrap(height))?;
}
Ok(())
})?;
self.height.write()?;
self.compute_after_height(indexes, starting_indexes, exit)
}
}
impl<T> Traversable for ComputedVecsFromTxindex<T>
where
T: ComputedVecValue + JsonSchema,
{
fn to_tree_node(&self) -> TreeNode {
TreeNode::Branch(
[
self.txindex
.as_ref()
.map(|nested| ("txindex".to_string(), nested.to_tree_node())),
Some(("height".to_string(), self.height.to_tree_node())),
Some(("dateindex".to_string(), self.dateindex.to_tree_node())),
Some(("weekindex".to_string(), self.weekindex.to_tree_node())),
Some((
"difficultyepoch".to_string(),
self.difficultyepoch.to_tree_node(),
)),
Some(("monthindex".to_string(), self.monthindex.to_tree_node())),
Some(("quarterindex".to_string(), self.quarterindex.to_tree_node())),
Some((
"semesterindex".to_string(),
self.semesterindex.to_tree_node(),
)),
Some(("yearindex".to_string(), self.yearindex.to_tree_node())),
Some(("decadeindex".to_string(), self.decadeindex.to_tree_node())),
]
.into_iter()
.flatten()
.collect(),
)
.merge_branches()
.unwrap()
}
fn iter_any_exportable(&self) -> impl Iterator<Item = &dyn AnyExportableVec> {
let mut regular_iter: Box<dyn Iterator<Item = &dyn AnyExportableVec>> =
Box::new(self.height.iter_any_exportable());
regular_iter = Box::new(regular_iter.chain(self.dateindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.weekindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.difficultyepoch.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.monthindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.quarterindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.semesterindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.yearindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.decadeindex.iter_any_exportable()));
if let Some(ref x) = self.txindex {
regular_iter = Box::new(regular_iter.chain(x.iter_any_exportable()));
}
regular_iter
}
}

View File

@@ -1,9 +1,11 @@
mod from_dateindex;
mod from_height;
mod from_txindex;
mod traits;
mod block;
mod chain;
mod date;
mod derived_block;
mod tx;
pub use from_dateindex::*;
pub use from_height::*;
pub use from_txindex::*;
pub use traits::*;
pub use block::*;
pub use chain::*;
pub use date::*;
pub use derived_block::*;
pub use tx::*;

View File

@@ -0,0 +1,111 @@
//! ComputedTxDistribution - computes TxIndex data to height Distribution + dateindex MinMaxAverage + lazy aggregations.
//!
//! Note: Percentiles are computed at height level only. DateIndex and coarser
//! periods only have average+min+max since computing percentiles across all
//! transactions per day would be expensive.
use brk_error::Result;
use brk_indexer::Indexer;
use brk_traversable::Traversable;
use brk_types::{DateIndex, DifficultyEpoch, Height, TxIndex, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{CollectableVec, Database, Exit, IterableCloneableVec};
use crate::{
ComputeIndexes, indexes,
internal::{
ComputedVecValue, DerivedDateDistribution, Distribution, LazyDistribution, MinMaxAverage,
NumericValue,
},
};
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct ComputedTxDistribution<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub height: Distribution<Height, T>,
pub difficultyepoch: LazyDistribution<DifficultyEpoch, T, Height, DifficultyEpoch>,
pub dateindex: MinMaxAverage<DateIndex, T>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub dates: DerivedDateDistribution<T>,
}
const VERSION: Version = Version::ZERO;
impl<T> ComputedTxDistribution<T>
where
T: NumericValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let height = Distribution::forced_import(db, name, version + VERSION)?;
let dateindex = MinMaxAverage::forced_import(db, name, version + VERSION)?;
let v = version + VERSION;
let difficultyepoch =
LazyDistribution::<DifficultyEpoch, T, Height, DifficultyEpoch>::from_distribution(
name,
v,
height.average.0.boxed_clone(),
height.minmax.min.0.boxed_clone(),
height.minmax.max.0.boxed_clone(),
indexes
.block
.difficultyepoch_to_difficultyepoch
.boxed_clone(),
);
let dates = DerivedDateDistribution::from_sources(
name,
v,
dateindex.average.0.boxed_clone(),
dateindex.minmax.min.0.boxed_clone(),
dateindex.minmax.max.0.boxed_clone(),
indexes,
);
Ok(Self {
height,
difficultyepoch,
dateindex,
dates,
})
}
pub fn derive_from(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
txindex_source: &impl CollectableVec<TxIndex, T>,
exit: &Exit,
) -> Result<()> {
self.height.compute(
starting_indexes.height,
txindex_source,
&indexer.vecs.tx.height_to_first_txindex,
&indexes.block.height_to_txindex_count,
exit,
)?;
self.dateindex.compute(
starting_indexes.dateindex,
&self.height.average.0,
&indexes.time.dateindex_to_first_height,
&indexes.time.dateindex_to_height_count,
exit,
)?;
Ok(())
}
}

View File

@@ -0,0 +1,106 @@
//! DerivedTxFull - aggregates from TxIndex to height Full + dateindex Stats + lazy date periods.
use brk_error::Result;
use brk_indexer::Indexer;
use brk_traversable::Traversable;
use brk_types::{DateIndex, DifficultyEpoch, Height, TxIndex, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{CollectableVec, Database, Exit, IterableCloneableVec};
use crate::{
indexes, ComputeIndexes,
internal::{ComputedVecValue, DerivedDateFull, Full, LazyFull, NumericValue, Stats},
};
/// Aggregates from TxIndex to height/dateindex with full stats.
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct DerivedTxFull<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub height: Full<Height, T>,
pub difficultyepoch: LazyFull<DifficultyEpoch, T, Height, DifficultyEpoch>,
pub dateindex: Stats<DateIndex, T>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub dates: DerivedDateFull<T>,
}
const VERSION: Version = Version::ZERO;
impl<T> DerivedTxFull<T>
where
T: NumericValue + JsonSchema,
{
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let height = Full::forced_import(db, name, version + VERSION)?;
let dateindex = Stats::forced_import(db, name, version + VERSION)?;
let v = version + VERSION;
let difficultyepoch =
LazyFull::<DifficultyEpoch, T, Height, DifficultyEpoch>::from_stats_aggregate(
name,
v,
height.distribution.average.0.boxed_clone(),
height.distribution.minmax.min.0.boxed_clone(),
height.distribution.minmax.max.0.boxed_clone(),
height.sum_cum.sum.0.boxed_clone(),
height.sum_cum.cumulative.0.boxed_clone(),
indexes.block.difficultyepoch_to_difficultyepoch.boxed_clone(),
);
let dates = DerivedDateFull::from_sources(
name,
v,
dateindex.average.0.boxed_clone(),
dateindex.minmax.min.0.boxed_clone(),
dateindex.minmax.max.0.boxed_clone(),
dateindex.sum_cum.sum.0.boxed_clone(),
dateindex.sum_cum.cumulative.0.boxed_clone(),
indexes,
);
Ok(Self {
height,
difficultyepoch,
dateindex,
dates,
})
}
pub fn derive_from(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
txindex_source: &impl CollectableVec<TxIndex, T>,
exit: &Exit,
) -> Result<()> {
self.height.compute(
starting_indexes.height,
txindex_source,
&indexer.vecs.tx.height_to_first_txindex,
&indexes.block.height_to_txindex_count,
exit,
)?;
self.dateindex.compute(
starting_indexes.dateindex,
&self.height.distribution.average.0,
&indexes.time.dateindex_to_first_height,
&indexes.time.dateindex_to_height_count,
exit,
)?;
Ok(())
}
}

View File

@@ -0,0 +1,5 @@
mod distribution;
mod full;
pub use distribution::*;
pub use full::*;

View File

@@ -0,0 +1,80 @@
//! Derived date periods with average-value aggregation.
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex,
};
use schemars::JsonSchema;
use vecdb::{IterableBoxedVec, IterableCloneableVec};
use crate::{indexes, internal::LazyAverage};
use crate::internal::ComputedVecValue;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct DerivedDateAverage<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub weekindex: LazyAverage<WeekIndex, T, DateIndex, WeekIndex>,
pub monthindex: LazyAverage<MonthIndex, T, DateIndex, MonthIndex>,
pub quarterindex: LazyAverage<QuarterIndex, T, DateIndex, QuarterIndex>,
pub semesterindex: LazyAverage<SemesterIndex, T, DateIndex, SemesterIndex>,
pub yearindex: LazyAverage<YearIndex, T, DateIndex, YearIndex>,
pub decadeindex: LazyAverage<DecadeIndex, T, DateIndex, DecadeIndex>,
}
const VERSION: Version = Version::ZERO;
impl<T> DerivedDateAverage<T>
where
T: ComputedVecValue + JsonSchema + 'static,
{
/// Create from an external dateindex source.
pub fn from_source(
name: &str,
version: Version,
dateindex_source: IterableBoxedVec<DateIndex, T>,
indexes: &indexes::Vecs,
) -> Self {
Self {
weekindex: LazyAverage::from_source(
name,
version + VERSION,
dateindex_source.clone(),
indexes.time.weekindex_to_weekindex.boxed_clone(),
),
monthindex: LazyAverage::from_source(
name,
version + VERSION,
dateindex_source.clone(),
indexes.time.monthindex_to_monthindex.boxed_clone(),
),
quarterindex: LazyAverage::from_source(
name,
version + VERSION,
dateindex_source.clone(),
indexes.time.quarterindex_to_quarterindex.boxed_clone(),
),
semesterindex: LazyAverage::from_source(
name,
version + VERSION,
dateindex_source.clone(),
indexes.time.semesterindex_to_semesterindex.boxed_clone(),
),
yearindex: LazyAverage::from_source(
name,
version + VERSION,
dateindex_source.clone(),
indexes.time.yearindex_to_yearindex.boxed_clone(),
),
decadeindex: LazyAverage::from_source(
name,
version + VERSION,
dateindex_source,
indexes.time.decadeindex_to_decadeindex.boxed_clone(),
),
}
}
}

View File

@@ -0,0 +1,94 @@
//! Derived date periods with distribution aggregation.
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex,
};
use schemars::JsonSchema;
use vecdb::{IterableBoxedVec, IterableCloneableVec};
use crate::{indexes, internal::LazyDistribution};
use crate::internal::ComputedVecValue;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct DerivedDateDistribution<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub weekindex: LazyDistribution<WeekIndex, T, DateIndex, WeekIndex>,
pub monthindex: LazyDistribution<MonthIndex, T, DateIndex, MonthIndex>,
pub quarterindex: LazyDistribution<QuarterIndex, T, DateIndex, QuarterIndex>,
pub semesterindex: LazyDistribution<SemesterIndex, T, DateIndex, SemesterIndex>,
pub yearindex: LazyDistribution<YearIndex, T, DateIndex, YearIndex>,
pub decadeindex: LazyDistribution<DecadeIndex, T, DateIndex, DecadeIndex>,
}
const VERSION: Version = Version::ZERO;
impl<T> DerivedDateDistribution<T>
where
T: ComputedVecValue + JsonSchema + 'static,
{
/// Create from external dateindex sources for distribution stats.
pub fn from_sources(
name: &str,
version: Version,
average_source: IterableBoxedVec<DateIndex, T>,
min_source: IterableBoxedVec<DateIndex, T>,
max_source: IterableBoxedVec<DateIndex, T>,
indexes: &indexes::Vecs,
) -> Self {
Self {
weekindex: LazyDistribution::from_distribution(
name,
version + VERSION,
average_source.clone(),
min_source.clone(),
max_source.clone(),
indexes.time.weekindex_to_weekindex.boxed_clone(),
),
monthindex: LazyDistribution::from_distribution(
name,
version + VERSION,
average_source.clone(),
min_source.clone(),
max_source.clone(),
indexes.time.monthindex_to_monthindex.boxed_clone(),
),
quarterindex: LazyDistribution::from_distribution(
name,
version + VERSION,
average_source.clone(),
min_source.clone(),
max_source.clone(),
indexes.time.quarterindex_to_quarterindex.boxed_clone(),
),
semesterindex: LazyDistribution::from_distribution(
name,
version + VERSION,
average_source.clone(),
min_source.clone(),
max_source.clone(),
indexes.time.semesterindex_to_semesterindex.boxed_clone(),
),
yearindex: LazyDistribution::from_distribution(
name,
version + VERSION,
average_source.clone(),
min_source.clone(),
max_source.clone(),
indexes.time.yearindex_to_yearindex.boxed_clone(),
),
decadeindex: LazyDistribution::from_distribution(
name,
version + VERSION,
average_source,
min_source,
max_source,
indexes.time.decadeindex_to_decadeindex.boxed_clone(),
),
}
}
}

View File

@@ -0,0 +1,80 @@
//! Derived date periods with first-value aggregation.
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex,
};
use schemars::JsonSchema;
use vecdb::{IterableBoxedVec, IterableCloneableVec};
use crate::{indexes, internal::LazyFirst};
use crate::internal::ComputedVecValue;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct DerivedDateFirst<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub weekindex: LazyFirst<WeekIndex, T, DateIndex, WeekIndex>,
pub monthindex: LazyFirst<MonthIndex, T, DateIndex, MonthIndex>,
pub quarterindex: LazyFirst<QuarterIndex, T, DateIndex, QuarterIndex>,
pub semesterindex: LazyFirst<SemesterIndex, T, DateIndex, SemesterIndex>,
pub yearindex: LazyFirst<YearIndex, T, DateIndex, YearIndex>,
pub decadeindex: LazyFirst<DecadeIndex, T, DateIndex, DecadeIndex>,
}
const VERSION: Version = Version::ZERO;
impl<T> DerivedDateFirst<T>
where
T: ComputedVecValue + JsonSchema + 'static,
{
/// Create from an external dateindex source.
pub fn from_source(
name: &str,
version: Version,
dateindex_source: IterableBoxedVec<DateIndex, T>,
indexes: &indexes::Vecs,
) -> Self {
Self {
weekindex: LazyFirst::from_source(
name,
version + VERSION,
dateindex_source.clone(),
indexes.time.weekindex_to_weekindex.boxed_clone(),
),
monthindex: LazyFirst::from_source(
name,
version + VERSION,
dateindex_source.clone(),
indexes.time.monthindex_to_monthindex.boxed_clone(),
),
quarterindex: LazyFirst::from_source(
name,
version + VERSION,
dateindex_source.clone(),
indexes.time.quarterindex_to_quarterindex.boxed_clone(),
),
semesterindex: LazyFirst::from_source(
name,
version + VERSION,
dateindex_source.clone(),
indexes.time.semesterindex_to_semesterindex.boxed_clone(),
),
yearindex: LazyFirst::from_source(
name,
version + VERSION,
dateindex_source.clone(),
indexes.time.yearindex_to_yearindex.boxed_clone(),
),
decadeindex: LazyFirst::from_source(
name,
version + VERSION,
dateindex_source,
indexes.time.decadeindex_to_decadeindex.boxed_clone(),
),
}
}
}

View File

@@ -0,0 +1,109 @@
//! Derived date periods with full stats aggregation.
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex,
};
use schemars::JsonSchema;
use vecdb::{IterableBoxedVec, IterableCloneableVec};
use crate::{indexes, internal::LazyFull};
use crate::internal::ComputedVecValue;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct DerivedDateFull<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub weekindex: LazyFull<WeekIndex, T, DateIndex, WeekIndex>,
pub monthindex: LazyFull<MonthIndex, T, DateIndex, MonthIndex>,
pub quarterindex: LazyFull<QuarterIndex, T, DateIndex, QuarterIndex>,
pub semesterindex: LazyFull<SemesterIndex, T, DateIndex, SemesterIndex>,
pub yearindex: LazyFull<YearIndex, T, DateIndex, YearIndex>,
pub decadeindex: LazyFull<DecadeIndex, T, DateIndex, DecadeIndex>,
}
const VERSION: Version = Version::ZERO;
impl<T> DerivedDateFull<T>
where
T: ComputedVecValue + JsonSchema + 'static,
{
/// Create from external dateindex sources for full stats.
#[allow(clippy::too_many_arguments)]
pub fn from_sources(
name: &str,
version: Version,
average_source: IterableBoxedVec<DateIndex, T>,
min_source: IterableBoxedVec<DateIndex, T>,
max_source: IterableBoxedVec<DateIndex, T>,
sum_source: IterableBoxedVec<DateIndex, T>,
cumulative_source: IterableBoxedVec<DateIndex, T>,
indexes: &indexes::Vecs,
) -> Self {
Self {
weekindex: LazyFull::from_stats_aggregate(
name,
version + VERSION,
average_source.clone(),
min_source.clone(),
max_source.clone(),
sum_source.clone(),
cumulative_source.clone(),
indexes.time.weekindex_to_weekindex.boxed_clone(),
),
monthindex: LazyFull::from_stats_aggregate(
name,
version + VERSION,
average_source.clone(),
min_source.clone(),
max_source.clone(),
sum_source.clone(),
cumulative_source.clone(),
indexes.time.monthindex_to_monthindex.boxed_clone(),
),
quarterindex: LazyFull::from_stats_aggregate(
name,
version + VERSION,
average_source.clone(),
min_source.clone(),
max_source.clone(),
sum_source.clone(),
cumulative_source.clone(),
indexes.time.quarterindex_to_quarterindex.boxed_clone(),
),
semesterindex: LazyFull::from_stats_aggregate(
name,
version + VERSION,
average_source.clone(),
min_source.clone(),
max_source.clone(),
sum_source.clone(),
cumulative_source.clone(),
indexes.time.semesterindex_to_semesterindex.boxed_clone(),
),
yearindex: LazyFull::from_stats_aggregate(
name,
version + VERSION,
average_source.clone(),
min_source.clone(),
max_source.clone(),
sum_source.clone(),
cumulative_source.clone(),
indexes.time.yearindex_to_yearindex.boxed_clone(),
),
decadeindex: LazyFull::from_stats_aggregate(
name,
version + VERSION,
average_source,
min_source,
max_source,
sum_source,
cumulative_source,
indexes.time.decadeindex_to_decadeindex.boxed_clone(),
),
}
}
}

View File

@@ -0,0 +1,80 @@
//! Derived date periods with last-value aggregation.
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex,
};
use schemars::JsonSchema;
use vecdb::{IterableBoxedVec, IterableCloneableVec};
use crate::{indexes, internal::LazyLast};
use crate::internal::ComputedVecValue;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct DerivedDateLast<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub weekindex: LazyLast<WeekIndex, T, DateIndex, WeekIndex>,
pub monthindex: LazyLast<MonthIndex, T, DateIndex, MonthIndex>,
pub quarterindex: LazyLast<QuarterIndex, T, DateIndex, QuarterIndex>,
pub semesterindex: LazyLast<SemesterIndex, T, DateIndex, SemesterIndex>,
pub yearindex: LazyLast<YearIndex, T, DateIndex, YearIndex>,
pub decadeindex: LazyLast<DecadeIndex, T, DateIndex, DecadeIndex>,
}
const VERSION: Version = Version::ZERO;
impl<T> DerivedDateLast<T>
where
T: ComputedVecValue + JsonSchema + 'static,
{
/// Create from an external dateindex source.
pub fn from_source(
name: &str,
version: Version,
dateindex_source: IterableBoxedVec<DateIndex, T>,
indexes: &indexes::Vecs,
) -> Self {
Self {
weekindex: LazyLast::from_source(
name,
version + VERSION,
dateindex_source.clone(),
indexes.time.weekindex_to_weekindex.boxed_clone(),
),
monthindex: LazyLast::from_source(
name,
version + VERSION,
dateindex_source.clone(),
indexes.time.monthindex_to_monthindex.boxed_clone(),
),
quarterindex: LazyLast::from_source(
name,
version + VERSION,
dateindex_source.clone(),
indexes.time.quarterindex_to_quarterindex.boxed_clone(),
),
semesterindex: LazyLast::from_source(
name,
version + VERSION,
dateindex_source.clone(),
indexes.time.semesterindex_to_semesterindex.boxed_clone(),
),
yearindex: LazyLast::from_source(
name,
version + VERSION,
dateindex_source.clone(),
indexes.time.yearindex_to_yearindex.boxed_clone(),
),
decadeindex: LazyLast::from_source(
name,
version + VERSION,
dateindex_source,
indexes.time.decadeindex_to_decadeindex.boxed_clone(),
),
}
}
}

View File

@@ -0,0 +1,80 @@
//! Derived date periods with max-value aggregation.
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex,
};
use schemars::JsonSchema;
use vecdb::{IterableBoxedVec, IterableCloneableVec};
use crate::{indexes, internal::LazyMax};
use crate::internal::ComputedVecValue;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct DerivedDateMax<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub weekindex: LazyMax<WeekIndex, T, DateIndex, WeekIndex>,
pub monthindex: LazyMax<MonthIndex, T, DateIndex, MonthIndex>,
pub quarterindex: LazyMax<QuarterIndex, T, DateIndex, QuarterIndex>,
pub semesterindex: LazyMax<SemesterIndex, T, DateIndex, SemesterIndex>,
pub yearindex: LazyMax<YearIndex, T, DateIndex, YearIndex>,
pub decadeindex: LazyMax<DecadeIndex, T, DateIndex, DecadeIndex>,
}
const VERSION: Version = Version::ZERO;
impl<T> DerivedDateMax<T>
where
T: ComputedVecValue + JsonSchema + 'static,
{
/// Create from an external dateindex source.
pub fn from_source(
name: &str,
version: Version,
dateindex_source: IterableBoxedVec<DateIndex, T>,
indexes: &indexes::Vecs,
) -> Self {
Self {
weekindex: LazyMax::from_source(
name,
version + VERSION,
dateindex_source.clone(),
indexes.time.weekindex_to_weekindex.boxed_clone(),
),
monthindex: LazyMax::from_source(
name,
version + VERSION,
dateindex_source.clone(),
indexes.time.monthindex_to_monthindex.boxed_clone(),
),
quarterindex: LazyMax::from_source(
name,
version + VERSION,
dateindex_source.clone(),
indexes.time.quarterindex_to_quarterindex.boxed_clone(),
),
semesterindex: LazyMax::from_source(
name,
version + VERSION,
dateindex_source.clone(),
indexes.time.semesterindex_to_semesterindex.boxed_clone(),
),
yearindex: LazyMax::from_source(
name,
version + VERSION,
dateindex_source.clone(),
indexes.time.yearindex_to_yearindex.boxed_clone(),
),
decadeindex: LazyMax::from_source(
name,
version + VERSION,
dateindex_source,
indexes.time.decadeindex_to_decadeindex.boxed_clone(),
),
}
}
}

View File

@@ -0,0 +1,80 @@
//! Derived date periods with min-value aggregation.
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex,
};
use schemars::JsonSchema;
use vecdb::{IterableBoxedVec, IterableCloneableVec};
use crate::{indexes, internal::LazyMin};
use crate::internal::ComputedVecValue;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct DerivedDateMin<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub weekindex: LazyMin<WeekIndex, T, DateIndex, WeekIndex>,
pub monthindex: LazyMin<MonthIndex, T, DateIndex, MonthIndex>,
pub quarterindex: LazyMin<QuarterIndex, T, DateIndex, QuarterIndex>,
pub semesterindex: LazyMin<SemesterIndex, T, DateIndex, SemesterIndex>,
pub yearindex: LazyMin<YearIndex, T, DateIndex, YearIndex>,
pub decadeindex: LazyMin<DecadeIndex, T, DateIndex, DecadeIndex>,
}
const VERSION: Version = Version::ZERO;
impl<T> DerivedDateMin<T>
where
T: ComputedVecValue + JsonSchema + 'static,
{
/// Create from an external dateindex source.
pub fn from_source(
name: &str,
version: Version,
dateindex_source: IterableBoxedVec<DateIndex, T>,
indexes: &indexes::Vecs,
) -> Self {
Self {
weekindex: LazyMin::from_source(
name,
version + VERSION,
dateindex_source.clone(),
indexes.time.weekindex_to_weekindex.boxed_clone(),
),
monthindex: LazyMin::from_source(
name,
version + VERSION,
dateindex_source.clone(),
indexes.time.monthindex_to_monthindex.boxed_clone(),
),
quarterindex: LazyMin::from_source(
name,
version + VERSION,
dateindex_source.clone(),
indexes.time.quarterindex_to_quarterindex.boxed_clone(),
),
semesterindex: LazyMin::from_source(
name,
version + VERSION,
dateindex_source.clone(),
indexes.time.semesterindex_to_semesterindex.boxed_clone(),
),
yearindex: LazyMin::from_source(
name,
version + VERSION,
dateindex_source.clone(),
indexes.time.yearindex_to_yearindex.boxed_clone(),
),
decadeindex: LazyMin::from_source(
name,
version + VERSION,
dateindex_source,
indexes.time.decadeindex_to_decadeindex.boxed_clone(),
),
}
}
}

View File

@@ -0,0 +1,19 @@
mod average;
mod distribution;
mod first;
mod full;
mod last;
mod max;
mod min;
mod sum;
mod sum_cum;
pub use average::*;
pub use distribution::*;
pub use first::*;
pub use full::*;
pub use last::*;
pub use max::*;
pub use min::*;
pub use sum::*;
pub use sum_cum::*;

View File

@@ -0,0 +1,81 @@
//! Derived date periods with sum aggregation.
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex,
};
use schemars::JsonSchema;
use vecdb::{IterableBoxedVec, IterableCloneableVec};
use crate::{indexes, internal::LazySum};
use crate::internal::ComputedVecValue;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct DerivedDateSum<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub weekindex: LazySum<WeekIndex, T, DateIndex, WeekIndex>,
pub monthindex: LazySum<MonthIndex, T, DateIndex, MonthIndex>,
pub quarterindex: LazySum<QuarterIndex, T, DateIndex, QuarterIndex>,
pub semesterindex: LazySum<SemesterIndex, T, DateIndex, SemesterIndex>,
pub yearindex: LazySum<YearIndex, T, DateIndex, YearIndex>,
pub decadeindex: LazySum<DecadeIndex, T, DateIndex, DecadeIndex>,
}
const VERSION: Version = Version::ZERO;
impl<T> DerivedDateSum<T>
where
T: ComputedVecValue + JsonSchema + 'static,
{
/// Create from an external dateindex source.
pub fn from_source(
name: &str,
version: Version,
dateindex_source: IterableBoxedVec<DateIndex, T>,
indexes: &indexes::Vecs,
) -> Self {
let v = version + VERSION;
Self {
weekindex: LazySum::from_source(
name,
v,
dateindex_source.clone(),
indexes.time.weekindex_to_weekindex.boxed_clone(),
),
monthindex: LazySum::from_source(
name,
v,
dateindex_source.clone(),
indexes.time.monthindex_to_monthindex.boxed_clone(),
),
quarterindex: LazySum::from_source(
name,
v,
dateindex_source.clone(),
indexes.time.quarterindex_to_quarterindex.boxed_clone(),
),
semesterindex: LazySum::from_source(
name,
v,
dateindex_source.clone(),
indexes.time.semesterindex_to_semesterindex.boxed_clone(),
),
yearindex: LazySum::from_source(
name,
v,
dateindex_source.clone(),
indexes.time.yearindex_to_yearindex.boxed_clone(),
),
decadeindex: LazySum::from_source(
name,
v,
dateindex_source,
indexes.time.decadeindex_to_decadeindex.boxed_clone(),
),
}
}
}

View File

@@ -0,0 +1,87 @@
//! Derived date periods with sum+cumulative aggregation.
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex,
};
use schemars::JsonSchema;
use vecdb::{IterableBoxedVec, IterableCloneableVec};
use crate::{indexes, internal::LazySumCum};
use crate::internal::ComputedVecValue;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct DerivedDateSumCum<T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
{
pub weekindex: LazySumCum<WeekIndex, T, DateIndex, WeekIndex>,
pub monthindex: LazySumCum<MonthIndex, T, DateIndex, MonthIndex>,
pub quarterindex: LazySumCum<QuarterIndex, T, DateIndex, QuarterIndex>,
pub semesterindex: LazySumCum<SemesterIndex, T, DateIndex, SemesterIndex>,
pub yearindex: LazySumCum<YearIndex, T, DateIndex, YearIndex>,
pub decadeindex: LazySumCum<DecadeIndex, T, DateIndex, DecadeIndex>,
}
const VERSION: Version = Version::ZERO;
impl<T> DerivedDateSumCum<T>
where
T: ComputedVecValue + JsonSchema + 'static,
{
/// Create from external dateindex sum and cumulative sources.
pub fn from_sources(
name: &str,
version: Version,
sum_source: IterableBoxedVec<DateIndex, T>,
cumulative_source: IterableBoxedVec<DateIndex, T>,
indexes: &indexes::Vecs,
) -> Self {
Self {
weekindex: LazySumCum::from_sources(
name,
version + VERSION,
sum_source.clone(),
cumulative_source.clone(),
indexes.time.weekindex_to_weekindex.boxed_clone(),
),
monthindex: LazySumCum::from_sources(
name,
version + VERSION,
sum_source.clone(),
cumulative_source.clone(),
indexes.time.monthindex_to_monthindex.boxed_clone(),
),
quarterindex: LazySumCum::from_sources(
name,
version + VERSION,
sum_source.clone(),
cumulative_source.clone(),
indexes.time.quarterindex_to_quarterindex.boxed_clone(),
),
semesterindex: LazySumCum::from_sources(
name,
version + VERSION,
sum_source.clone(),
cumulative_source.clone(),
indexes.time.semesterindex_to_semesterindex.boxed_clone(),
),
yearindex: LazySumCum::from_sources(
name,
version + VERSION,
sum_source.clone(),
cumulative_source.clone(),
indexes.time.yearindex_to_yearindex.boxed_clone(),
),
decadeindex: LazySumCum::from_sources(
name,
version + VERSION,
sum_source,
cumulative_source,
indexes.time.decadeindex_to_decadeindex.boxed_clone(),
),
}
}
}

View File

@@ -0,0 +1,5 @@
mod date;
pub use date::*;
// tx derived types have been moved to computed/tx/

View File

@@ -0,0 +1,81 @@
use brk_error::Result;
use brk_traversable::Traversable;
use schemars::JsonSchema;
use vecdb::{AnyVec, Database, Exit, IterableVec, VecIndex, VecValue, Version};
use crate::internal::ComputedVecValue;
use crate::internal::vec::AverageVec;
use super::{MinMax, Percentiles};
/// Distribution stats (average + minmax + percentiles)
#[derive(Clone, Traversable)]
pub struct Distribution<I: VecIndex, T: ComputedVecValue + JsonSchema> {
#[traversable(flatten)]
pub average: AverageVec<I, T>,
#[traversable(flatten)]
pub minmax: MinMax<I, T>,
pub percentiles: Percentiles<I, T>,
}
impl<I: VecIndex, T: ComputedVecValue + JsonSchema> Distribution<I, T> {
pub fn forced_import(db: &Database, name: &str, version: Version) -> Result<Self> {
Ok(Self {
average: AverageVec::forced_import(db, name, version)?,
minmax: MinMax::forced_import(db, name, version)?,
percentiles: Percentiles::forced_import(db, name, version)?,
})
}
/// Compute distribution stats from source data.
///
/// This computes: average, min, max, percentiles (pct10, pct25, median, pct75, pct90)
pub fn compute<A>(
&mut self,
max_from: I,
source: &impl IterableVec<A, T>,
first_indexes: &impl IterableVec<I, A>,
count_indexes: &impl IterableVec<I, brk_types::StoredU64>,
exit: &Exit,
) -> Result<()>
where
A: VecIndex + VecValue + brk_types::CheckedSub<A>,
{
crate::internal::compute_aggregations(
max_from,
source,
first_indexes,
count_indexes,
exit,
None, // first
None, // last
Some(&mut self.minmax.min.0),
Some(&mut self.minmax.max.0),
Some(&mut self.average.0),
None, // sum
None, // cumulative
Some(&mut self.percentiles.median.0),
Some(&mut self.percentiles.pct10.0),
Some(&mut self.percentiles.pct25.0),
Some(&mut self.percentiles.pct75.0),
Some(&mut self.percentiles.pct90.0),
)
}
pub fn len(&self) -> usize {
self.average
.0
.len()
.min(self.minmax.min.0.len())
.min(self.minmax.max.0.len())
.min(self.percentiles.pct10.0.len())
.min(self.percentiles.pct25.0.len())
.min(self.percentiles.median.0.len())
.min(self.percentiles.pct75.0.len())
.min(self.percentiles.pct90.0.len())
}
pub fn starting_index(&self, max_from: I) -> I {
max_from.min(I::from(self.len()))
}
}

View File

@@ -0,0 +1,106 @@
use brk_error::Result;
use brk_traversable::Traversable;
use schemars::JsonSchema;
use vecdb::{Database, Exit, IterableVec, VecIndex, VecValue, Version};
use crate::internal::ComputedVecValue;
use super::{Distribution, SumCum};
/// Full stats aggregate: distribution + sum_cum
/// Matches the common full_stats() pattern: average + minmax + percentiles + sum + cumulative
#[derive(Clone, Traversable)]
pub struct Full<I: VecIndex, T: ComputedVecValue + JsonSchema> {
pub distribution: Distribution<I, T>,
pub sum_cum: SumCum<I, T>,
}
impl<I: VecIndex, T: ComputedVecValue + JsonSchema> Full<I, T> {
pub fn forced_import(db: &Database, name: &str, version: Version) -> Result<Self> {
Ok(Self {
distribution: Distribution::forced_import(db, name, version)?,
sum_cum: SumCum::forced_import(db, name, version)?,
})
}
/// Compute all stats from source data.
///
/// This computes: average, min, max, percentiles (pct10, pct25, median, pct75, pct90), sum, cumulative
pub fn compute<A>(
&mut self,
max_from: I,
source: &impl IterableVec<A, T>,
first_indexes: &impl IterableVec<I, A>,
count_indexes: &impl IterableVec<I, brk_types::StoredU64>,
exit: &Exit,
) -> Result<()>
where
A: VecIndex + VecValue + brk_types::CheckedSub<A>,
{
crate::internal::compute_aggregations(
max_from,
source,
first_indexes,
count_indexes,
exit,
None, // first
None, // last
Some(&mut self.distribution.minmax.min.0),
Some(&mut self.distribution.minmax.max.0),
Some(&mut self.distribution.average.0),
Some(&mut self.sum_cum.sum.0),
Some(&mut self.sum_cum.cumulative.0),
Some(&mut self.distribution.percentiles.median.0),
Some(&mut self.distribution.percentiles.pct10.0),
Some(&mut self.distribution.percentiles.pct25.0),
Some(&mut self.distribution.percentiles.pct75.0),
Some(&mut self.distribution.percentiles.pct90.0),
)
}
pub fn len(&self) -> usize {
self.distribution.len().min(self.sum_cum.len())
}
pub fn starting_index(&self, max_from: I) -> I {
max_from.min(I::from(self.len()))
}
/// Compute from aligned source (for coarser time periods like week from dateindex).
///
/// NOTE: Percentiles cannot be derived from finer percentiles - they are skipped.
pub fn compute_from_aligned<A>(
&mut self,
max_from: I,
source: &Full<A, T>,
first_indexes: &impl IterableVec<I, A>,
count_indexes: &impl IterableVec<I, brk_types::StoredU64>,
exit: &Exit,
) -> Result<()>
where
A: VecIndex + VecValue + brk_types::CheckedSub<A>,
{
// Note: Percentiles cannot be derived from finer percentiles, so we skip them
crate::internal::compute_aggregations_from_aligned(
max_from,
first_indexes,
count_indexes,
exit,
// Source vecs
None, // first not in Full
None, // last not in Full
Some(&source.distribution.minmax.min.0),
Some(&source.distribution.minmax.max.0),
Some(&source.distribution.average.0),
Some(&source.sum_cum.sum.0),
// Target vecs
None, // first
None, // last
Some(&mut self.distribution.minmax.min.0),
Some(&mut self.distribution.minmax.max.0),
Some(&mut self.distribution.average.0),
Some(&mut self.sum_cum.sum.0),
Some(&mut self.sum_cum.cumulative.0),
)
}
}

View File

@@ -0,0 +1,25 @@
use brk_error::Result;
use brk_traversable::Traversable;
use schemars::JsonSchema;
use vecdb::{Database, VecIndex, Version};
use crate::internal::vec::{MaxVec, MinVec};
use crate::internal::ComputedVecValue;
/// Min + Max
#[derive(Clone, Traversable)]
pub struct MinMax<I: VecIndex, T: ComputedVecValue + JsonSchema> {
#[traversable(flatten)]
pub min: MinVec<I, T>,
#[traversable(flatten)]
pub max: MaxVec<I, T>,
}
impl<I: VecIndex, T: ComputedVecValue + JsonSchema> MinMax<I, T> {
pub fn forced_import(db: &Database, name: &str, version: Version) -> Result<Self> {
Ok(Self {
min: MinVec::forced_import(db, name, version)?,
max: MaxVec::forced_import(db, name, version)?,
})
}
}

View File

@@ -0,0 +1,106 @@
use brk_error::Result;
use brk_traversable::Traversable;
use schemars::JsonSchema;
use vecdb::{AnyVec, Database, Exit, IterableVec, VecIndex, VecValue, Version};
use crate::internal::ComputedVecValue;
use crate::internal::vec::AverageVec;
use super::MinMax;
/// Average + MinMax (for TxIndex dateindex aggregation - no percentiles)
#[derive(Clone, Traversable)]
pub struct MinMaxAverage<I: VecIndex, T: ComputedVecValue + JsonSchema> {
pub average: AverageVec<I, T>,
#[traversable(flatten)]
pub minmax: MinMax<I, T>,
}
impl<I: VecIndex, T: ComputedVecValue + JsonSchema> MinMaxAverage<I, T> {
pub fn forced_import(db: &Database, name: &str, version: Version) -> Result<Self> {
Ok(Self {
average: AverageVec::forced_import(db, name, version)?,
minmax: MinMax::forced_import(db, name, version)?,
})
}
/// Compute average and minmax from source data.
pub fn compute<A>(
&mut self,
max_from: I,
source: &impl IterableVec<A, T>,
first_indexes: &impl IterableVec<I, A>,
count_indexes: &impl IterableVec<I, brk_types::StoredU64>,
exit: &Exit,
) -> Result<()>
where
A: VecIndex + VecValue + brk_types::CheckedSub<A>,
{
crate::internal::compute_aggregations(
max_from,
source,
first_indexes,
count_indexes,
exit,
None, // first
None, // last
Some(&mut self.minmax.min.0),
Some(&mut self.minmax.max.0),
Some(&mut self.average.0),
None, // sum
None, // cumulative
None, // median
None, // pct10
None, // pct25
None, // pct75
None, // pct90
)
}
/// Compute from aligned source (for coarser time periods).
pub fn compute_from_aligned<A>(
&mut self,
max_from: I,
source: &MinMaxAverage<A, T>,
first_indexes: &impl IterableVec<I, A>,
count_indexes: &impl IterableVec<I, brk_types::StoredU64>,
exit: &Exit,
) -> Result<()>
where
A: VecIndex + VecValue + brk_types::CheckedSub<A>,
{
crate::internal::compute_aggregations_from_aligned(
max_from,
first_indexes,
count_indexes,
exit,
// Source vecs
None, // first
None, // last
Some(&source.minmax.min.0),
Some(&source.minmax.max.0),
Some(&source.average.0),
None, // sum
// Target vecs
None, // first
None, // last
Some(&mut self.minmax.min.0),
Some(&mut self.minmax.max.0),
Some(&mut self.average.0),
None, // sum
None, // cumulative
)
}
pub fn len(&self) -> usize {
self.average
.0
.len()
.min(self.minmax.min.0.len())
.min(self.minmax.max.0.len())
}
pub fn starting_index(&self, max_from: I) -> I {
max_from.min(I::from(self.len()))
}
}

View File

@@ -0,0 +1,15 @@
mod distribution;
mod full;
mod min_max;
mod min_max_average;
mod percentiles;
mod stats;
mod sum_cum;
pub use distribution::*;
pub use full::*;
pub use min_max::*;
pub use min_max_average::*;
pub use percentiles::*;
pub use stats::*;
pub use sum_cum::*;

View File

@@ -0,0 +1,29 @@
use brk_error::Result;
use brk_traversable::Traversable;
use schemars::JsonSchema;
use vecdb::{Database, VecIndex, Version};
use crate::internal::vec::{MedianVec, Pct10Vec, Pct25Vec, Pct75Vec, Pct90Vec};
use crate::internal::ComputedVecValue;
/// All percentiles (pct10, pct25, median, pct75, pct90)
#[derive(Clone, Traversable)]
pub struct Percentiles<I: VecIndex, T: ComputedVecValue + JsonSchema> {
pub pct10: Pct10Vec<I, T>,
pub pct25: Pct25Vec<I, T>,
pub median: MedianVec<I, T>,
pub pct75: Pct75Vec<I, T>,
pub pct90: Pct90Vec<I, T>,
}
impl<I: VecIndex, T: ComputedVecValue + JsonSchema> Percentiles<I, T> {
pub fn forced_import(db: &Database, name: &str, version: Version) -> Result<Self> {
Ok(Self {
pct10: Pct10Vec::forced_import(db, name, version)?,
pct25: Pct25Vec::forced_import(db, name, version)?,
median: MedianVec::forced_import(db, name, version)?,
pct75: Pct75Vec::forced_import(db, name, version)?,
pct90: Pct90Vec::forced_import(db, name, version)?,
})
}
}

View File

@@ -0,0 +1,72 @@
use brk_error::Result;
use brk_traversable::Traversable;
use schemars::JsonSchema;
use vecdb::{AnyVec, Database, Exit, IterableVec, VecIndex, VecValue, Version};
use crate::internal::vec::AverageVec;
use crate::internal::ComputedVecValue;
use super::{MinMax, SumCum};
/// Sum + Cumulative + Average + Min + Max. Like `Full` but without percentiles.
#[derive(Clone, Traversable)]
pub struct Stats<I: VecIndex, T: ComputedVecValue + JsonSchema> {
pub sum_cum: SumCum<I, T>,
pub average: AverageVec<I, T>,
pub minmax: MinMax<I, T>,
}
impl<I: VecIndex, T: ComputedVecValue + JsonSchema> Stats<I, T> {
pub fn forced_import(db: &Database, name: &str, version: Version) -> Result<Self> {
Ok(Self {
sum_cum: SumCum::forced_import(db, name, version)?,
average: AverageVec::forced_import(db, name, version)?,
minmax: MinMax::forced_import(db, name, version)?,
})
}
/// Compute sum, cumulative, average, and minmax from source data.
pub fn compute<A>(
&mut self,
max_from: I,
source: &impl IterableVec<A, T>,
first_indexes: &impl IterableVec<I, A>,
count_indexes: &impl IterableVec<I, brk_types::StoredU64>,
exit: &Exit,
) -> Result<()>
where
A: VecIndex + VecValue + brk_types::CheckedSub<A>,
{
crate::internal::compute_aggregations(
max_from,
source,
first_indexes,
count_indexes,
exit,
None, // first
None, // last
Some(&mut self.minmax.min.0),
Some(&mut self.minmax.max.0),
Some(&mut self.average.0),
Some(&mut self.sum_cum.sum.0),
Some(&mut self.sum_cum.cumulative.0),
None, // median
None, // pct10
None, // pct25
None, // pct75
None, // pct90
)
}
pub fn len(&self) -> usize {
self.sum_cum
.len()
.min(self.average.0.len())
.min(self.minmax.min.0.len())
.min(self.minmax.max.0.len())
}
pub fn starting_index(&self, max_from: I) -> I {
max_from.min(I::from(self.len()))
}
}

View File

@@ -0,0 +1,111 @@
use brk_error::Result;
use brk_traversable::Traversable;
use schemars::JsonSchema;
use vecdb::{AnyVec, Database, Exit, IterableVec, VecIndex, VecValue, Version};
use crate::internal::vec::{CumulativeVec, SumVec};
use crate::internal::ComputedVecValue;
/// Sum + Cumulative (12% of usage)
#[derive(Clone, Traversable)]
pub struct SumCum<I: VecIndex, T: ComputedVecValue + JsonSchema> {
#[traversable(flatten)]
pub sum: SumVec<I, T>,
#[traversable(flatten)]
pub cumulative: CumulativeVec<I, T>,
}
impl<I: VecIndex, T: ComputedVecValue + JsonSchema> SumCum<I, T> {
pub fn forced_import(db: &Database, name: &str, version: Version) -> Result<Self> {
Ok(Self {
sum: SumVec::forced_import(db, name, version)?,
cumulative: CumulativeVec::forced_import(db, name, version)?,
})
}
/// Compute sum and cumulative from source data.
pub fn compute<A>(
&mut self,
max_from: I,
source: &impl IterableVec<A, T>,
first_indexes: &impl IterableVec<I, A>,
count_indexes: &impl IterableVec<I, brk_types::StoredU64>,
exit: &Exit,
) -> Result<()>
where
A: VecIndex + VecValue + brk_types::CheckedSub<A>,
{
crate::internal::compute_aggregations(
max_from,
source,
first_indexes,
count_indexes,
exit,
None, // first
None, // last
None, // min
None, // max
None, // average
Some(&mut self.sum.0),
Some(&mut self.cumulative.0),
None, // median
None, // pct10
None, // pct25
None, // pct75
None, // pct90
)
}
/// Extend cumulative from an existing source vec.
pub fn extend_cumulative(
&mut self,
max_from: I,
source: &impl IterableVec<I, T>,
exit: &Exit,
) -> Result<()> {
crate::internal::compute_cumulative_extend(max_from, source, &mut self.cumulative.0, exit)
}
pub fn len(&self) -> usize {
self.sum.0.len().min(self.cumulative.0.len())
}
pub fn starting_index(&self, max_from: I) -> I {
max_from.min(I::from(self.len()))
}
/// Compute from aligned source (for coarser time periods like week from dateindex).
pub fn compute_from_aligned<A>(
&mut self,
max_from: I,
source: &SumCum<A, T>,
first_indexes: &impl IterableVec<I, A>,
count_indexes: &impl IterableVec<I, brk_types::StoredU64>,
exit: &Exit,
) -> Result<()>
where
A: VecIndex + VecValue + brk_types::CheckedSub<A>,
{
crate::internal::compute_aggregations_from_aligned(
max_from,
first_indexes,
count_indexes,
exit,
// Source vecs
None, // first
None, // last
None, // min
None, // max
None, // average
Some(&source.sum.0),
// Target vecs
None, // first
None, // last
None, // min
None, // max
None, // average
Some(&mut self.sum.0),
Some(&mut self.cumulative.0),
)
}
}

View File

@@ -1,259 +0,0 @@
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, TreeNode, Version, WeekIndex,
YearIndex,
};
use schemars::JsonSchema;
use vecdb::{AnyExportableVec, BinaryTransform, IterableCloneableVec, LazyVecFrom2};
use crate::internal::{
ComputedVecValue, ComputedVecsFromDateIndex, ComputedVecsFromHeight, LazyTransform2Builder,
};
const VERSION: Version = Version::ZERO;
/// Lazy binary transform from two `ComputedVecsFromDateIndex` sources.
#[derive(Clone)]
pub struct LazyVecsFrom2FromDateIndex<T, S1T, S2T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
pub dateindex: Option<LazyVecFrom2<DateIndex, T, DateIndex, S1T, DateIndex, S2T>>,
pub weekindex: LazyTransform2Builder<WeekIndex, T, S1T, S2T>,
pub monthindex: LazyTransform2Builder<MonthIndex, T, S1T, S2T>,
pub quarterindex: LazyTransform2Builder<QuarterIndex, T, S1T, S2T>,
pub semesterindex: LazyTransform2Builder<SemesterIndex, T, S1T, S2T>,
pub yearindex: LazyTransform2Builder<YearIndex, T, S1T, S2T>,
pub decadeindex: LazyTransform2Builder<DecadeIndex, T, S1T, S2T>,
}
impl<T, S1T, S2T> LazyVecsFrom2FromDateIndex<T, S1T, S2T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
/// Create from two `ComputedVecsFromDateIndex` sources.
pub fn from_computed<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &ComputedVecsFromDateIndex<S1T>,
source2: &ComputedVecsFromDateIndex<S2T>,
) -> Self {
let v = version + VERSION;
Self {
dateindex: source1
.dateindex
.as_ref()
.zip(source2.dateindex.as_ref())
.map(|(s1, s2)| {
LazyVecFrom2::transformed::<F>(name, v, s1.boxed_clone(), s2.boxed_clone())
}),
weekindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.weekindex,
&source2.weekindex,
),
monthindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.monthindex,
&source2.monthindex,
),
quarterindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.quarterindex,
&source2.quarterindex,
),
semesterindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.semesterindex,
&source2.semesterindex,
),
yearindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.yearindex,
&source2.yearindex,
),
decadeindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.decadeindex,
&source2.decadeindex,
),
}
}
/// Create from a `ComputedVecsFromHeight` (first source) and `ComputedVecsFromDateIndex` (second source).
/// Used for computing USD values from price (Height-based) and ratio (DateIndex-based).
pub fn from_height_and_dateindex<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &ComputedVecsFromHeight<S1T>,
source2: &ComputedVecsFromDateIndex<S2T>,
) -> Self {
let v = version + VERSION;
Self {
dateindex: source2.dateindex.as_ref().map(|s2| {
LazyVecFrom2::transformed::<F>(
name,
v,
source1.dateindex.unwrap_last().boxed_clone(),
s2.boxed_clone(),
)
}),
weekindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.weekindex,
&source2.weekindex,
),
monthindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.monthindex,
&source2.monthindex,
),
quarterindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.quarterindex,
&source2.quarterindex,
),
semesterindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.semesterindex,
&source2.semesterindex,
),
yearindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.yearindex,
&source2.yearindex,
),
decadeindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.decadeindex,
&source2.decadeindex,
),
}
}
/// Create from a `ComputedVecsFromDateIndex` (first source) and `ComputedVecsFromHeight` (second source).
/// Used for ratios like NVT where numerator is from dateindex (market cap) and denominator from height (volume sum).
pub fn from_dateindex_and_height<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &ComputedVecsFromDateIndex<S1T>,
source2: &ComputedVecsFromHeight<S2T>,
) -> Self
where
S2T: Ord + From<f64> + 'static,
f64: From<S2T>,
{
let v = version + VERSION;
Self {
dateindex: source1.dateindex.as_ref().map(|s1| {
LazyVecFrom2::transformed::<F>(
name,
v,
s1.boxed_clone(),
source2.dateindex.unwrap_sum().boxed_clone(),
)
}),
weekindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.weekindex,
&source2.weekindex,
),
monthindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.monthindex,
&source2.monthindex,
),
quarterindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.quarterindex,
&source2.quarterindex,
),
semesterindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.semesterindex,
&source2.semesterindex,
),
yearindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.yearindex,
&source2.yearindex,
),
decadeindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.decadeindex,
&source2.decadeindex,
),
}
}
}
impl<T, S1T, S2T> Traversable for LazyVecsFrom2FromDateIndex<T, S1T, S2T>
where
T: ComputedVecValue + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
fn to_tree_node(&self) -> TreeNode {
TreeNode::Branch(
[
self.dateindex
.as_ref()
.map(|v| ("dateindex".to_string(), v.to_tree_node())),
Some(("weekindex".to_string(), self.weekindex.to_tree_node())),
Some(("monthindex".to_string(), self.monthindex.to_tree_node())),
Some(("quarterindex".to_string(), self.quarterindex.to_tree_node())),
Some((
"semesterindex".to_string(),
self.semesterindex.to_tree_node(),
)),
Some(("yearindex".to_string(), self.yearindex.to_tree_node())),
Some(("decadeindex".to_string(), self.decadeindex.to_tree_node())),
]
.into_iter()
.flatten()
.collect(),
)
.merge_branches()
.unwrap()
}
fn iter_any_exportable(&self) -> impl Iterator<Item = &dyn AnyExportableVec> {
let mut iter: Box<dyn Iterator<Item = &dyn AnyExportableVec>> =
Box::new(std::iter::empty());
if let Some(ref v) = self.dateindex {
iter = Box::new(iter.chain(v.iter_any_exportable()));
}
iter = Box::new(iter.chain(self.weekindex.iter_any_exportable()));
iter = Box::new(iter.chain(self.monthindex.iter_any_exportable()));
iter = Box::new(iter.chain(self.quarterindex.iter_any_exportable()));
iter = Box::new(iter.chain(self.semesterindex.iter_any_exportable()));
iter = Box::new(iter.chain(self.yearindex.iter_any_exportable()));
iter = Box::new(iter.chain(self.decadeindex.iter_any_exportable()));
iter
}
}

View File

@@ -1,242 +0,0 @@
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, DifficultyEpoch, Height, MonthIndex, QuarterIndex, SemesterIndex,
TreeNode, Version, WeekIndex, YearIndex,
};
use schemars::JsonSchema;
use vecdb::{AnyExportableVec, BinaryTransform, IterableBoxedVec, LazyVecFrom2};
use crate::internal::{
ComputedVecValue, ComputedVecsFromHeight, ComputedVecsFromTxindex, LazyTransform2Builder,
};
const VERSION: Version = Version::ZERO;
/// Lazy binary transform from two `ComputedVecsFromHeight` sources.
#[derive(Clone)]
pub struct LazyVecsFrom2FromHeight<T, S1T, S2T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
pub height: LazyVecFrom2<Height, T, Height, S1T, Height, S2T>,
pub height_extra: LazyTransform2Builder<Height, T, S1T, S2T>,
pub dateindex: LazyTransform2Builder<DateIndex, T, S1T, S2T>,
pub weekindex: LazyTransform2Builder<WeekIndex, T, S1T, S2T>,
pub difficultyepoch: LazyTransform2Builder<DifficultyEpoch, T, S1T, S2T>,
pub monthindex: LazyTransform2Builder<MonthIndex, T, S1T, S2T>,
pub quarterindex: LazyTransform2Builder<QuarterIndex, T, S1T, S2T>,
pub semesterindex: LazyTransform2Builder<SemesterIndex, T, S1T, S2T>,
pub yearindex: LazyTransform2Builder<YearIndex, T, S1T, S2T>,
pub decadeindex: LazyTransform2Builder<DecadeIndex, T, S1T, S2T>,
}
impl<T, S1T, S2T> LazyVecsFrom2FromHeight<T, S1T, S2T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
/// Create from two `ComputedVecsFromHeight` sources with explicit height sources.
pub fn from_computed<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
height_source1: IterableBoxedVec<Height, S1T>,
height_source2: IterableBoxedVec<Height, S2T>,
source1: &ComputedVecsFromHeight<S1T>,
source2: &ComputedVecsFromHeight<S2T>,
) -> Self {
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(name, v, height_source1, height_source2),
height_extra: LazyTransform2Builder::from_eager::<F>(
name,
v,
&source1.height_extra,
&source2.height_extra,
),
dateindex: LazyTransform2Builder::from_eager::<F>(
name,
v,
&source1.dateindex,
&source2.dateindex,
),
weekindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.weekindex,
&source2.weekindex,
),
difficultyepoch: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.difficultyepoch,
&source2.difficultyepoch,
),
monthindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.monthindex,
&source2.monthindex,
),
quarterindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.quarterindex,
&source2.quarterindex,
),
semesterindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.semesterindex,
&source2.semesterindex,
),
yearindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.yearindex,
&source2.yearindex,
),
decadeindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.decadeindex,
&source2.decadeindex,
),
}
}
/// Create from a `ComputedVecsFromHeight` and a `ComputedVecsFromTxindex`.
/// Used for ratios like type_count / total_output_count where the denominator
/// comes from txindex-aggregated data.
pub fn from_height_and_txindex<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
height_source1: IterableBoxedVec<Height, S1T>,
height_source2: IterableBoxedVec<Height, S2T>,
source1: &ComputedVecsFromHeight<S1T>,
source2: &ComputedVecsFromTxindex<S2T>,
) -> Self
where
S2T: Ord + From<f64> + 'static,
f64: From<S2T>,
{
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(name, v, height_source1, height_source2),
// For height_extra, source2 uses .height (EagerVecsBuilder) instead of .height_extra
height_extra: LazyTransform2Builder::from_eager::<F>(
name,
v,
&source1.height_extra,
&source2.height,
),
dateindex: LazyTransform2Builder::from_eager::<F>(
name,
v,
&source1.dateindex,
&source2.dateindex,
),
weekindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.weekindex,
&source2.weekindex,
),
difficultyepoch: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.difficultyepoch,
&source2.difficultyepoch,
),
monthindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.monthindex,
&source2.monthindex,
),
quarterindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.quarterindex,
&source2.quarterindex,
),
semesterindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.semesterindex,
&source2.semesterindex,
),
yearindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.yearindex,
&source2.yearindex,
),
decadeindex: LazyTransform2Builder::from_lazy::<F, _, _, _, _>(
name,
v,
&source1.decadeindex,
&source2.decadeindex,
),
}
}
}
impl<T, S1T, S2T> Traversable for LazyVecsFrom2FromHeight<T, S1T, S2T>
where
T: ComputedVecValue + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
fn to_tree_node(&self) -> TreeNode {
let height_extra_node = self.height_extra.to_tree_node();
TreeNode::Branch(
[
Some(("height".to_string(), self.height.to_tree_node())),
if height_extra_node.is_empty() {
None
} else {
Some(("height_extra".to_string(), height_extra_node))
},
Some(("dateindex".to_string(), self.dateindex.to_tree_node())),
Some(("weekindex".to_string(), self.weekindex.to_tree_node())),
Some((
"difficultyepoch".to_string(),
self.difficultyepoch.to_tree_node(),
)),
Some(("monthindex".to_string(), self.monthindex.to_tree_node())),
Some(("quarterindex".to_string(), self.quarterindex.to_tree_node())),
Some((
"semesterindex".to_string(),
self.semesterindex.to_tree_node(),
)),
Some(("yearindex".to_string(), self.yearindex.to_tree_node())),
Some(("decadeindex".to_string(), self.decadeindex.to_tree_node())),
]
.into_iter()
.flatten()
.collect(),
)
.merge_branches()
.unwrap()
}
fn iter_any_exportable(&self) -> impl Iterator<Item = &dyn AnyExportableVec> {
let mut iter: Box<dyn Iterator<Item = &dyn AnyExportableVec>> =
Box::new(self.height.iter_any_exportable());
iter = Box::new(iter.chain(self.height_extra.iter_any_exportable()));
iter = Box::new(iter.chain(self.dateindex.iter_any_exportable()));
iter = Box::new(iter.chain(self.weekindex.iter_any_exportable()));
iter = Box::new(iter.chain(self.difficultyepoch.iter_any_exportable()));
iter = Box::new(iter.chain(self.monthindex.iter_any_exportable()));
iter = Box::new(iter.chain(self.quarterindex.iter_any_exportable()));
iter = Box::new(iter.chain(self.semesterindex.iter_any_exportable()));
iter = Box::new(iter.chain(self.yearindex.iter_any_exportable()));
iter = Box::new(iter.chain(self.decadeindex.iter_any_exportable()));
iter
}
}

View File

@@ -1,5 +0,0 @@
mod from_dateindex;
mod from_height;
pub use from_dateindex::*;
pub use from_height::*;

View File

@@ -0,0 +1,61 @@
//! Lazy binary transform from Full sources.
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, IterableBoxedVec, LazyVecFrom2};
use crate::internal::{ComputedBlockFull, ComputedVecValue, DerivedTxFull, NumericValue};
use super::super::derived_block::LazyDerivedBlock2SumCum;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct BinaryBlockFull<T, S1T = T, S2T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
#[traversable(wrap = "base")]
pub height: LazyVecFrom2<Height, T, Height, S1T, Height, S2T>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: LazyDerivedBlock2SumCum<T, S1T, S2T>,
}
const VERSION: Version = Version::ZERO;
impl<T, S1T, S2T> BinaryBlockFull<T, S1T, S2T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: NumericValue + JsonSchema,
S2T: NumericValue + JsonSchema,
{
pub fn from_height_and_txindex<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
height_source1: IterableBoxedVec<Height, S1T>,
height_source2: IterableBoxedVec<Height, S2T>,
source1: &ComputedBlockFull<S1T>,
source2: &DerivedTxFull<S2T>,
) -> Self {
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(name, v, height_source1, height_source2),
rest: LazyDerivedBlock2SumCum::from_derived_full::<F, _, _, _, _>(
name,
v,
&source1.dateindex.sum_cum,
&source1.rest,
&source1.difficultyepoch,
&source2.dateindex.sum_cum,
&source2.dates,
&source2.difficultyepoch,
),
}
}
}

View File

@@ -0,0 +1,52 @@
//! Lazy binary transform from two Sum-only sources with height level.
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, IterableBoxedVec, LazyVecFrom2};
use crate::internal::{ComputedVecValue, DerivedComputedBlockSum, NumericValue};
use super::super::derived_block::LazyDerivedBlock2Sum;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct BinaryBlockSum<T, S1T, S2T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
#[traversable(wrap = "base")]
pub height: LazyVecFrom2<Height, T, Height, S1T, Height, S2T>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: LazyDerivedBlock2Sum<T, S1T, S2T>,
}
impl<T, S1T, S2T> BinaryBlockSum<T, S1T, S2T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: NumericValue + JsonSchema,
S2T: NumericValue + JsonSchema,
{
pub fn from_derived<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
height_source1: IterableBoxedVec<Height, S1T>,
height_source2: IterableBoxedVec<Height, S2T>,
source1: &DerivedComputedBlockSum<S1T>,
source2: &DerivedComputedBlockSum<S2T>,
) -> Self {
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(name, v, height_source1, height_source2),
rest: LazyDerivedBlock2Sum::from_derived::<F>(name, v, source1, source2),
}
}
}

View File

@@ -0,0 +1,108 @@
//! Lazy binary transform from two SumCum sources.
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2};
use crate::internal::{ComputedBlockSumCum, ComputedVecValue, DerivedComputedBlockSumCum};
use super::super::derived_block::LazyDerivedBlock2SumCum;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct BinaryBlockSumCum<T, S1T = T, S2T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
#[traversable(wrap = "base")]
pub height: LazyVecFrom2<Height, T, Height, S1T, Height, S2T>,
#[traversable(wrap = "cumulative")]
pub height_cumulative: LazyVecFrom2<Height, T, Height, S1T, Height, S2T>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: LazyDerivedBlock2SumCum<T, S1T, S2T>,
}
const VERSION: Version = Version::ZERO;
impl<T, S1T, S2T> BinaryBlockSumCum<T, S1T, S2T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
pub fn from_computed<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
height_source1: IterableBoxedVec<Height, S1T>,
height_source2: IterableBoxedVec<Height, S2T>,
source1: &ComputedBlockSumCum<S1T>,
source2: &ComputedBlockSumCum<S2T>,
) -> Self
where
S1T: PartialOrd,
S2T: PartialOrd,
{
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(name, v, height_source1, height_source2),
height_cumulative: LazyVecFrom2::transformed::<F>(
&format!("{name}_cumulative"),
v,
source1.height_cumulative.0.boxed_clone(),
source2.height_cumulative.0.boxed_clone(),
),
rest: LazyDerivedBlock2SumCum::from_computed::<F>(
name,
v,
&source1.dateindex,
&source1.rest,
&source1.difficultyepoch,
&source2.dateindex,
&source2.rest,
&source2.difficultyepoch,
),
}
}
pub fn from_derived<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
height_source1: IterableBoxedVec<Height, S1T>,
height_source2: IterableBoxedVec<Height, S2T>,
source1: &DerivedComputedBlockSumCum<S1T>,
source2: &DerivedComputedBlockSumCum<S2T>,
) -> Self
where
S1T: PartialOrd,
S2T: PartialOrd,
{
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(name, v, height_source1, height_source2),
height_cumulative: LazyVecFrom2::transformed::<F>(
&format!("{name}_cumulative"),
v,
source1.height_cumulative.0.boxed_clone(),
source2.height_cumulative.0.boxed_clone(),
),
rest: LazyDerivedBlock2SumCum::from_computed::<F>(
name,
v,
&source1.dateindex,
source1,
&source1.difficultyepoch,
&source2.dateindex,
source2,
&source2.difficultyepoch,
),
}
}
}

View File

@@ -0,0 +1,121 @@
//! Lazy binary transform from SumCum + Last sources.
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, IterableBoxedVec, LazyVecFrom2};
use crate::internal::{
ComputedBlockLast, ComputedBlockSumCum, ComputedVecValue, DerivedComputedBlockLast,
DerivedComputedBlockSumCum, NumericValue,
};
use super::super::derived_block::LazyDerivedBlock2SumCumLast;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct BinaryBlockSumCumLast<T, S1T = T, S2T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
#[traversable(wrap = "base")]
pub height: LazyVecFrom2<Height, T, Height, S1T, Height, S2T>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: LazyDerivedBlock2SumCumLast<T, S1T, S2T>,
}
const VERSION: Version = Version::ZERO;
impl<T, S1T, S2T> BinaryBlockSumCumLast<T, S1T, S2T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
pub fn from_computed<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
height_source1: IterableBoxedVec<Height, S1T>,
height_source2: IterableBoxedVec<Height, S2T>,
source1: &ComputedBlockSumCum<S1T>,
source2: &ComputedBlockLast<S2T>,
) -> Self
where
S1T: PartialOrd,
S2T: NumericValue,
{
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(name, v, height_source1, height_source2),
rest: LazyDerivedBlock2SumCumLast::from_computed::<F>(name, v, source1, source2),
}
}
pub fn from_derived_computed<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
height_source1: IterableBoxedVec<Height, S1T>,
height_source2: IterableBoxedVec<Height, S2T>,
source1: &DerivedComputedBlockSumCum<S1T>,
source2: &ComputedBlockLast<S2T>,
) -> Self
where
S1T: NumericValue,
S2T: NumericValue,
{
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(name, v, height_source1, height_source2),
rest: LazyDerivedBlock2SumCumLast::from_derived_computed_full::<F>(
name, v, source1, source2,
),
}
}
pub fn from_derived<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
height_source1: IterableBoxedVec<Height, S1T>,
height_source2: IterableBoxedVec<Height, S2T>,
source1: &DerivedComputedBlockSumCum<S1T>,
source2: &DerivedComputedBlockLast<S2T>,
) -> Self
where
S1T: NumericValue,
S2T: NumericValue,
{
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(name, v, height_source1, height_source2),
rest: LazyDerivedBlock2SumCumLast::from_derived_computed::<F>(
name, v, source1, source2,
),
}
}
pub fn from_computed_derived<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
height_source1: IterableBoxedVec<Height, S1T>,
height_source2: IterableBoxedVec<Height, S2T>,
source1: &ComputedBlockSumCum<S1T>,
source2: &DerivedComputedBlockLast<S2T>,
) -> Self
where
S1T: PartialOrd,
S2T: NumericValue,
{
let v = version + VERSION;
Self {
height: LazyVecFrom2::transformed::<F>(name, v, height_source1, height_source2),
rest: LazyDerivedBlock2SumCumLast::from_computed_derived_computed::<F>(
name, v, source1, source2,
),
}
}
}

View File

@@ -0,0 +1,70 @@
//! Lazy unary transform from height with Full aggregation.
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{IterableBoxedVec, LazyVecFrom1, UnaryTransform};
use crate::internal::{
ComputedBlockFull, ComputedVecValue, DerivedComputedBlockFull, NumericValue,
};
use super::super::derived_block::LazyDerivedBlockFull;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyBlockFull<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
#[traversable(wrap = "base")]
pub height: LazyVecFrom1<Height, T, Height, S1T>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: LazyDerivedBlockFull<T, S1T>,
}
const VERSION: Version = Version::ZERO;
impl<T, S1T> LazyBlockFull<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
pub fn from_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
height_source: IterableBoxedVec<Height, S1T>,
source: &ComputedBlockFull<S1T>,
) -> Self {
let v = version + VERSION;
Self {
height: LazyVecFrom1::transformed::<F>(name, v, height_source),
rest: LazyDerivedBlockFull::from_computed::<F>(
name,
v,
&source.dateindex,
&source.rest,
&source.difficultyepoch,
),
}
}
pub fn from_derived<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
height_source: IterableBoxedVec<Height, S1T>,
source: &DerivedComputedBlockFull<S1T>,
) -> Self
where
S1T: NumericValue,
{
let v = version + VERSION;
Self {
height: LazyVecFrom1::transformed::<F>(name, v, height_source),
rest: LazyDerivedBlockFull::from_derived_computed::<F>(name, v, source),
}
}
}

View File

@@ -0,0 +1,66 @@
//! Lazy unary transform from height with Last aggregation.
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{IterableBoxedVec, LazyVecFrom1, UnaryTransform};
use crate::internal::{
ComputedBlockLast, ComputedVecValue, DerivedComputedBlockLast, NumericValue,
};
use super::super::derived_block::LazyDerivedBlockLast;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyBlockLast<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
pub height: LazyVecFrom1<Height, T, Height, S1T>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: LazyDerivedBlockLast<T, S1T>,
}
const VERSION: Version = Version::ZERO;
impl<T, S1T> LazyBlockLast<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
pub fn from_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
height_source: IterableBoxedVec<Height, S1T>,
source: &ComputedBlockLast<S1T>,
) -> Self
where
S1T: NumericValue,
{
let v = version + VERSION;
Self {
height: LazyVecFrom1::transformed::<F>(name, v, height_source),
rest: LazyDerivedBlockLast::from_computed::<F>(name, v, source),
}
}
pub fn from_derived<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
height_source: IterableBoxedVec<Height, S1T>,
source: &DerivedComputedBlockLast<S1T>,
) -> Self
where
S1T: NumericValue,
{
let v = version + VERSION;
Self {
height: LazyVecFrom1::transformed::<F>(name, v, height_source),
rest: LazyDerivedBlockLast::from_derived_computed::<F>(name, v, source),
}
}
}

View File

@@ -0,0 +1,17 @@
mod binary_full;
mod binary_sum;
mod binary_sum_cum;
mod binary_sum_cum_last;
mod full;
mod last;
mod sum;
mod sum_cum;
pub use binary_full::*;
pub use binary_sum::*;
pub use binary_sum_cum::*;
pub use binary_sum_cum_last::*;
pub use full::*;
pub use last::*;
pub use sum::*;
pub use sum_cum::*;

View File

@@ -0,0 +1,68 @@
//! Lazy unary transform from height with Sum aggregation.
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{IterableBoxedVec, LazyVecFrom1, UnaryTransform};
use crate::internal::{ComputedBlockSum, ComputedVecValue, DerivedComputedBlockSum, NumericValue};
use super::super::derived_block::LazyDerivedBlockSum;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyBlockSum<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
#[traversable(wrap = "base")]
pub height: LazyVecFrom1<Height, T, Height, S1T>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: LazyDerivedBlockSum<T, S1T>,
}
const VERSION: Version = Version::ZERO;
impl<T, S1T> LazyBlockSum<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
pub fn from_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
height_source: IterableBoxedVec<Height, S1T>,
source: &ComputedBlockSum<S1T>,
) -> Self {
let v = version + VERSION;
Self {
height: LazyVecFrom1::transformed::<F>(name, v, height_source),
rest: LazyDerivedBlockSum::from_computed::<F>(
name,
v,
&source.dateindex,
&source.rest,
&source.difficultyepoch,
),
}
}
pub fn from_derived<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
height_source: IterableBoxedVec<Height, S1T>,
source: &DerivedComputedBlockSum<S1T>,
) -> Self
where
S1T: NumericValue,
{
let v = version + VERSION;
Self {
height: LazyVecFrom1::transformed::<F>(name, v, height_source),
rest: LazyDerivedBlockSum::from_derived_computed::<F>(name, v, source),
}
}
}

View File

@@ -0,0 +1,70 @@
//! Lazy unary transform from height with SumCum aggregation.
use brk_traversable::Traversable;
use brk_types::{Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{IterableBoxedVec, LazyVecFrom1, UnaryTransform};
use crate::internal::{
ComputedBlockSumCum, ComputedVecValue, DerivedComputedBlockSumCum, NumericValue,
};
use super::super::derived_block::LazyDerivedBlockSumCum;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyBlockSumCum<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
#[traversable(wrap = "base")]
pub height: LazyVecFrom1<Height, T, Height, S1T>,
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub rest: LazyDerivedBlockSumCum<T, S1T>,
}
const VERSION: Version = Version::ZERO;
impl<T, S1T> LazyBlockSumCum<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
pub fn from_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
height_source: IterableBoxedVec<Height, S1T>,
source: &ComputedBlockSumCum<S1T>,
) -> Self {
let v = version + VERSION;
Self {
height: LazyVecFrom1::transformed::<F>(name, v, height_source),
rest: LazyDerivedBlockSumCum::from_computed::<F>(
name,
v,
&source.dateindex,
&source.rest,
&source.difficultyepoch,
),
}
}
pub fn from_derived<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
height_source: IterableBoxedVec<Height, S1T>,
source: &DerivedComputedBlockSumCum<S1T>,
) -> Self
where
S1T: NumericValue,
{
let v = version + VERSION;
Self {
height: LazyVecFrom1::transformed::<F>(name, v, height_source),
rest: LazyDerivedBlockSumCum::from_derived_computed::<F>(name, v, source),
}
}
}

View File

@@ -0,0 +1,320 @@
//! Binary transform composite from DateIndex - Last aggregation only.
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex,
};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, IterableBoxedVec, IterableCloneableVec, LazyVecFrom2};
use crate::internal::{
ComputedBlockLast, ComputedBlockSum, ComputedDateLast, ComputedVecValue, DerivedDateLast,
NumericValue,
};
use super::super::transform::LazyTransform2Last;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct BinaryDateLast<T, S1T, S2T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
pub dateindex: LazyVecFrom2<DateIndex, T, DateIndex, S1T, DateIndex, S2T>,
pub weekindex: LazyTransform2Last<WeekIndex, T, S1T, S2T>,
pub monthindex: LazyTransform2Last<MonthIndex, T, S1T, S2T>,
pub quarterindex: LazyTransform2Last<QuarterIndex, T, S1T, S2T>,
pub semesterindex: LazyTransform2Last<SemesterIndex, T, S1T, S2T>,
pub yearindex: LazyTransform2Last<YearIndex, T, S1T, S2T>,
pub decadeindex: LazyTransform2Last<DecadeIndex, T, S1T, S2T>,
}
impl<T, S1T, S2T> BinaryDateLast<T, S1T, S2T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
pub fn from_computed_both_last<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &ComputedDateLast<S1T>,
source2: &ComputedDateLast<S2T>,
) -> Self {
let v = version + VERSION;
Self {
dateindex: LazyVecFrom2::transformed::<F>(
name,
v,
source1.dateindex.boxed_clone(),
source2.dateindex.boxed_clone(),
),
weekindex: LazyTransform2Last::from_lazy_last::<F, _, _, _, _>(
name,
v,
&source1.weekindex,
&source2.weekindex,
),
monthindex: LazyTransform2Last::from_lazy_last::<F, _, _, _, _>(
name,
v,
&source1.monthindex,
&source2.monthindex,
),
quarterindex: LazyTransform2Last::from_lazy_last::<F, _, _, _, _>(
name,
v,
&source1.quarterindex,
&source2.quarterindex,
),
semesterindex: LazyTransform2Last::from_lazy_last::<F, _, _, _, _>(
name,
v,
&source1.semesterindex,
&source2.semesterindex,
),
yearindex: LazyTransform2Last::from_lazy_last::<F, _, _, _, _>(
name,
v,
&source1.yearindex,
&source2.yearindex,
),
decadeindex: LazyTransform2Last::from_lazy_last::<F, _, _, _, _>(
name,
v,
&source1.decadeindex,
&source2.decadeindex,
),
}
}
pub fn from_derived_last_and_computed_last<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
dateindex_source1: IterableBoxedVec<DateIndex, S1T>,
source1: &DerivedDateLast<S1T>,
source2: &ComputedDateLast<S2T>,
) -> Self {
let v = version + VERSION;
Self {
dateindex: LazyVecFrom2::transformed::<F>(
name,
v,
dateindex_source1,
source2.dateindex.boxed_clone(),
),
weekindex: LazyTransform2Last::from_lazy_last::<F, _, _, _, _>(
name,
v,
&source1.weekindex,
&source2.weekindex,
),
monthindex: LazyTransform2Last::from_lazy_last::<F, _, _, _, _>(
name,
v,
&source1.monthindex,
&source2.monthindex,
),
quarterindex: LazyTransform2Last::from_lazy_last::<F, _, _, _, _>(
name,
v,
&source1.quarterindex,
&source2.quarterindex,
),
semesterindex: LazyTransform2Last::from_lazy_last::<F, _, _, _, _>(
name,
v,
&source1.semesterindex,
&source2.semesterindex,
),
yearindex: LazyTransform2Last::from_lazy_last::<F, _, _, _, _>(
name,
v,
&source1.yearindex,
&source2.yearindex,
),
decadeindex: LazyTransform2Last::from_lazy_last::<F, _, _, _, _>(
name,
v,
&source1.decadeindex,
&source2.decadeindex,
),
}
}
pub fn from_both_derived_last<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
dateindex_source1: IterableBoxedVec<DateIndex, S1T>,
source1: &DerivedDateLast<S1T>,
dateindex_source2: IterableBoxedVec<DateIndex, S2T>,
source2: &DerivedDateLast<S2T>,
) -> Self {
let v = version + VERSION;
Self {
dateindex: LazyVecFrom2::transformed::<F>(
name,
v,
dateindex_source1,
dateindex_source2,
),
weekindex: LazyTransform2Last::from_lazy_last::<F, _, _, _, _>(
name,
v,
&source1.weekindex,
&source2.weekindex,
),
monthindex: LazyTransform2Last::from_lazy_last::<F, _, _, _, _>(
name,
v,
&source1.monthindex,
&source2.monthindex,
),
quarterindex: LazyTransform2Last::from_lazy_last::<F, _, _, _, _>(
name,
v,
&source1.quarterindex,
&source2.quarterindex,
),
semesterindex: LazyTransform2Last::from_lazy_last::<F, _, _, _, _>(
name,
v,
&source1.semesterindex,
&source2.semesterindex,
),
yearindex: LazyTransform2Last::from_lazy_last::<F, _, _, _, _>(
name,
v,
&source1.yearindex,
&source2.yearindex,
),
decadeindex: LazyTransform2Last::from_lazy_last::<F, _, _, _, _>(
name,
v,
&source1.decadeindex,
&source2.decadeindex,
),
}
}
pub fn from_height_and_dateindex_last<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &ComputedBlockLast<S1T>,
source2: &ComputedDateLast<S2T>,
) -> Self
where
S1T: NumericValue,
{
let v = version + VERSION;
Self {
dateindex: LazyVecFrom2::transformed::<F>(
name,
v,
source1.dateindex.0.boxed_clone(),
source2.dateindex.boxed_clone(),
),
weekindex: LazyTransform2Last::from_lazy_last::<F, _, _, _, _>(
name,
v,
&source1.weekindex,
&source2.weekindex,
),
monthindex: LazyTransform2Last::from_lazy_last::<F, _, _, _, _>(
name,
v,
&source1.monthindex,
&source2.monthindex,
),
quarterindex: LazyTransform2Last::from_lazy_last::<F, _, _, _, _>(
name,
v,
&source1.quarterindex,
&source2.quarterindex,
),
semesterindex: LazyTransform2Last::from_lazy_last::<F, _, _, _, _>(
name,
v,
&source1.semesterindex,
&source2.semesterindex,
),
yearindex: LazyTransform2Last::from_lazy_last::<F, _, _, _, _>(
name,
v,
&source1.yearindex,
&source2.yearindex,
),
decadeindex: LazyTransform2Last::from_lazy_last::<F, _, _, _, _>(
name,
v,
&source1.decadeindex,
&source2.decadeindex,
),
}
}
pub fn from_dateindex_last_and_height_sum<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &ComputedDateLast<S1T>,
source2: &ComputedBlockSum<S2T>,
) -> Self
where
S2T: NumericValue,
{
let v = version + VERSION;
Self {
dateindex: LazyVecFrom2::transformed::<F>(
name,
v,
source1.dateindex.boxed_clone(),
source2.dateindex.0.boxed_clone(),
),
weekindex: LazyTransform2Last::from_vecs::<F>(
name,
v,
source1.weekindex.boxed_clone(),
source2.weekindex.boxed_clone(),
),
monthindex: LazyTransform2Last::from_vecs::<F>(
name,
v,
source1.monthindex.boxed_clone(),
source2.monthindex.boxed_clone(),
),
quarterindex: LazyTransform2Last::from_vecs::<F>(
name,
v,
source1.quarterindex.boxed_clone(),
source2.quarterindex.boxed_clone(),
),
semesterindex: LazyTransform2Last::from_vecs::<F>(
name,
v,
source1.semesterindex.boxed_clone(),
source2.semesterindex.boxed_clone(),
),
yearindex: LazyTransform2Last::from_vecs::<F>(
name,
v,
source1.yearindex.boxed_clone(),
source2.yearindex.boxed_clone(),
),
decadeindex: LazyTransform2Last::from_vecs::<F>(
name,
v,
source1.decadeindex.boxed_clone(),
source2.decadeindex.boxed_clone(),
),
}
}
}

View File

@@ -0,0 +1,90 @@
//! Binary transform for Sum-only pattern across date periods.
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex,
};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, IterableCloneableVec};
use crate::internal::{ComputedVecValue, DerivedComputedBlockSum, LazyTransform2Sum, NumericValue};
const VERSION: Version = Version::ZERO;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct LazyDate2Sum<T, S1T, S2T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
pub dateindex: LazyTransform2Sum<DateIndex, T, S1T, S2T>,
pub weekindex: LazyTransform2Sum<WeekIndex, T, S1T, S2T>,
pub monthindex: LazyTransform2Sum<MonthIndex, T, S1T, S2T>,
pub quarterindex: LazyTransform2Sum<QuarterIndex, T, S1T, S2T>,
pub semesterindex: LazyTransform2Sum<SemesterIndex, T, S1T, S2T>,
pub yearindex: LazyTransform2Sum<YearIndex, T, S1T, S2T>,
pub decadeindex: LazyTransform2Sum<DecadeIndex, T, S1T, S2T>,
}
impl<T, S1T, S2T> LazyDate2Sum<T, S1T, S2T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: NumericValue + JsonSchema,
S2T: NumericValue + JsonSchema,
{
pub fn from_derived<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &DerivedComputedBlockSum<S1T>,
source2: &DerivedComputedBlockSum<S2T>,
) -> Self {
let v = version + VERSION;
Self {
dateindex: LazyTransform2Sum::from_sum::<F>(
name,
v,
&source1.dateindex,
&source2.dateindex,
),
weekindex: LazyTransform2Sum::from_boxed::<F>(
name,
v,
source1.weekindex.boxed_clone(),
source2.weekindex.boxed_clone(),
),
monthindex: LazyTransform2Sum::from_boxed::<F>(
name,
v,
source1.monthindex.boxed_clone(),
source2.monthindex.boxed_clone(),
),
quarterindex: LazyTransform2Sum::from_boxed::<F>(
name,
v,
source1.quarterindex.boxed_clone(),
source2.quarterindex.boxed_clone(),
),
semesterindex: LazyTransform2Sum::from_boxed::<F>(
name,
v,
source1.semesterindex.boxed_clone(),
source2.semesterindex.boxed_clone(),
),
yearindex: LazyTransform2Sum::from_boxed::<F>(
name,
v,
source1.yearindex.boxed_clone(),
source2.yearindex.boxed_clone(),
),
decadeindex: LazyTransform2Sum::from_boxed::<F>(
name,
v,
source1.decadeindex.boxed_clone(),
source2.decadeindex.boxed_clone(),
),
}
}
}

View File

@@ -0,0 +1,153 @@
//! Binary transform for SumCum pattern across date periods.
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex,
};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, IterableCloneableVec};
use crate::internal::{ComputedVecValue, DerivedDateFull, DerivedDateSumCum, SumCum};
use super::super::transform::LazyTransform2SumCum;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct LazyDate2SumCum<T, S1T = T, S2T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
pub dateindex: LazyTransform2SumCum<DateIndex, T, S1T, S2T>,
pub weekindex: LazyTransform2SumCum<WeekIndex, T, S1T, S2T>,
pub monthindex: LazyTransform2SumCum<MonthIndex, T, S1T, S2T>,
pub quarterindex: LazyTransform2SumCum<QuarterIndex, T, S1T, S2T>,
pub semesterindex: LazyTransform2SumCum<SemesterIndex, T, S1T, S2T>,
pub yearindex: LazyTransform2SumCum<YearIndex, T, S1T, S2T>,
pub decadeindex: LazyTransform2SumCum<DecadeIndex, T, S1T, S2T>,
}
impl<T, S1T, S2T> LazyDate2SumCum<T, S1T, S2T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
#[allow(clippy::too_many_arguments)]
pub fn from_computed<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
dateindex1: &SumCum<DateIndex, S1T>,
periods1: &DerivedDateSumCum<S1T>,
dateindex2: &SumCum<DateIndex, S2T>,
periods2: &DerivedDateSumCum<S2T>,
) -> Self {
let v = version + VERSION;
Self {
dateindex: LazyTransform2SumCum::from_sum_cum::<F>(name, v, dateindex1, dateindex2),
weekindex: LazyTransform2SumCum::from_sources::<F>(
name,
v,
periods1.weekindex.sum.boxed_clone(),
periods2.weekindex.sum.boxed_clone(),
periods1.weekindex.cumulative.boxed_clone(),
periods2.weekindex.cumulative.boxed_clone(),
),
monthindex: LazyTransform2SumCum::from_sources::<F>(
name,
v,
periods1.monthindex.sum.boxed_clone(),
periods2.monthindex.sum.boxed_clone(),
periods1.monthindex.cumulative.boxed_clone(),
periods2.monthindex.cumulative.boxed_clone(),
),
quarterindex: LazyTransform2SumCum::from_sources::<F>(
name,
v,
periods1.quarterindex.sum.boxed_clone(),
periods2.quarterindex.sum.boxed_clone(),
periods1.quarterindex.cumulative.boxed_clone(),
periods2.quarterindex.cumulative.boxed_clone(),
),
semesterindex: LazyTransform2SumCum::from_sources::<F>(
name,
v,
periods1.semesterindex.sum.boxed_clone(),
periods2.semesterindex.sum.boxed_clone(),
periods1.semesterindex.cumulative.boxed_clone(),
periods2.semesterindex.cumulative.boxed_clone(),
),
yearindex: LazyTransform2SumCum::from_sources::<F>(
name,
v,
periods1.yearindex.sum.boxed_clone(),
periods2.yearindex.sum.boxed_clone(),
periods1.yearindex.cumulative.boxed_clone(),
periods2.yearindex.cumulative.boxed_clone(),
),
decadeindex: LazyTransform2SumCum::from_sources::<F>(
name,
v,
periods1.decadeindex.sum.boxed_clone(),
periods2.decadeindex.sum.boxed_clone(),
periods1.decadeindex.cumulative.boxed_clone(),
periods2.decadeindex.cumulative.boxed_clone(),
),
}
}
pub fn from_derived_full<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
dateindex1: &SumCum<DateIndex, S1T>,
dates1: &DerivedDateFull<S1T>,
dateindex2: &SumCum<DateIndex, S2T>,
dates2: &DerivedDateFull<S2T>,
) -> Self {
let v = version + VERSION;
Self {
dateindex: LazyTransform2SumCum::from_sum_cum::<F>(name, v, dateindex1, dateindex2),
weekindex: LazyTransform2SumCum::from_lazy_stats_aggregate::<F, _, _, _, _>(
name,
v,
&dates1.weekindex,
&dates2.weekindex,
),
monthindex: LazyTransform2SumCum::from_lazy_stats_aggregate::<F, _, _, _, _>(
name,
v,
&dates1.monthindex,
&dates2.monthindex,
),
quarterindex: LazyTransform2SumCum::from_lazy_stats_aggregate::<F, _, _, _, _>(
name,
v,
&dates1.quarterindex,
&dates2.quarterindex,
),
semesterindex: LazyTransform2SumCum::from_lazy_stats_aggregate::<F, _, _, _, _>(
name,
v,
&dates1.semesterindex,
&dates2.semesterindex,
),
yearindex: LazyTransform2SumCum::from_lazy_stats_aggregate::<F, _, _, _, _>(
name,
v,
&dates1.yearindex,
&dates2.yearindex,
),
decadeindex: LazyTransform2SumCum::from_lazy_stats_aggregate::<F, _, _, _, _>(
name,
v,
&dates1.decadeindex,
&dates2.decadeindex,
),
}
}
}

View File

@@ -0,0 +1,297 @@
//! Binary transform for SumCum + Last pattern across date periods.
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex,
};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, IterableCloneableVec};
use crate::internal::{
ComputedBlockLast, ComputedBlockSumCum, ComputedVecValue, DerivedComputedBlockLast,
DerivedComputedBlockSumCum, NumericValue,
};
use super::super::transform::LazyTransform2SumCumLast;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct LazyDate2SumCumLast<T, S1T = T, S2T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
pub dateindex: LazyTransform2SumCumLast<DateIndex, T, S1T, S2T>,
pub weekindex: LazyTransform2SumCumLast<WeekIndex, T, S1T, S2T>,
pub monthindex: LazyTransform2SumCumLast<MonthIndex, T, S1T, S2T>,
pub quarterindex: LazyTransform2SumCumLast<QuarterIndex, T, S1T, S2T>,
pub semesterindex: LazyTransform2SumCumLast<SemesterIndex, T, S1T, S2T>,
pub yearindex: LazyTransform2SumCumLast<YearIndex, T, S1T, S2T>,
pub decadeindex: LazyTransform2SumCumLast<DecadeIndex, T, S1T, S2T>,
}
impl<T, S1T, S2T> LazyDate2SumCumLast<T, S1T, S2T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
pub fn from_computed<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &ComputedBlockSumCum<S1T>,
source2: &ComputedBlockLast<S2T>,
) -> Self
where
S1T: PartialOrd,
S2T: NumericValue,
{
let v = version + VERSION;
Self {
dateindex: LazyTransform2SumCumLast::from_sources::<F>(
name,
v,
&source1.dateindex,
&source2.dateindex,
),
weekindex: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.rest.weekindex.sum.boxed_clone(),
source1.rest.weekindex.cumulative.boxed_clone(),
source2.rest.weekindex.boxed_clone(),
),
monthindex: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.rest.monthindex.sum.boxed_clone(),
source1.rest.monthindex.cumulative.boxed_clone(),
source2.rest.monthindex.boxed_clone(),
),
quarterindex: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.rest.quarterindex.sum.boxed_clone(),
source1.rest.quarterindex.cumulative.boxed_clone(),
source2.rest.quarterindex.boxed_clone(),
),
semesterindex: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.rest.semesterindex.sum.boxed_clone(),
source1.rest.semesterindex.cumulative.boxed_clone(),
source2.rest.semesterindex.boxed_clone(),
),
yearindex: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.rest.yearindex.sum.boxed_clone(),
source1.rest.yearindex.cumulative.boxed_clone(),
source2.rest.yearindex.boxed_clone(),
),
decadeindex: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.rest.decadeindex.sum.boxed_clone(),
source1.rest.decadeindex.cumulative.boxed_clone(),
source2.rest.decadeindex.boxed_clone(),
),
}
}
pub fn from_derived_computed_full<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &DerivedComputedBlockSumCum<S1T>,
source2: &ComputedBlockLast<S2T>,
) -> Self
where
S1T: NumericValue,
S2T: NumericValue,
{
let v = version + VERSION;
Self {
dateindex: LazyTransform2SumCumLast::from_sources::<F>(
name,
v,
&source1.dateindex,
&source2.dateindex,
),
weekindex: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.weekindex.sum.boxed_clone(),
source1.weekindex.cumulative.boxed_clone(),
source2.rest.weekindex.boxed_clone(),
),
monthindex: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.monthindex.sum.boxed_clone(),
source1.monthindex.cumulative.boxed_clone(),
source2.rest.monthindex.boxed_clone(),
),
quarterindex: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.quarterindex.sum.boxed_clone(),
source1.quarterindex.cumulative.boxed_clone(),
source2.rest.quarterindex.boxed_clone(),
),
semesterindex: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.semesterindex.sum.boxed_clone(),
source1.semesterindex.cumulative.boxed_clone(),
source2.rest.semesterindex.boxed_clone(),
),
yearindex: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.yearindex.sum.boxed_clone(),
source1.yearindex.cumulative.boxed_clone(),
source2.rest.yearindex.boxed_clone(),
),
decadeindex: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.decadeindex.sum.boxed_clone(),
source1.decadeindex.cumulative.boxed_clone(),
source2.rest.decadeindex.boxed_clone(),
),
}
}
pub fn from_computed_derived_computed<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &ComputedBlockSumCum<S1T>,
source2: &DerivedComputedBlockLast<S2T>,
) -> Self
where
S1T: PartialOrd,
S2T: NumericValue,
{
let v = version + VERSION;
Self {
dateindex: LazyTransform2SumCumLast::from_sources::<F>(
name,
v,
&source1.dateindex,
&source2.dateindex,
),
weekindex: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.rest.weekindex.sum.boxed_clone(),
source1.rest.weekindex.cumulative.boxed_clone(),
source2.weekindex.boxed_clone(),
),
monthindex: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.rest.monthindex.sum.boxed_clone(),
source1.rest.monthindex.cumulative.boxed_clone(),
source2.monthindex.boxed_clone(),
),
quarterindex: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.rest.quarterindex.sum.boxed_clone(),
source1.rest.quarterindex.cumulative.boxed_clone(),
source2.quarterindex.boxed_clone(),
),
semesterindex: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.rest.semesterindex.sum.boxed_clone(),
source1.rest.semesterindex.cumulative.boxed_clone(),
source2.semesterindex.boxed_clone(),
),
yearindex: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.rest.yearindex.sum.boxed_clone(),
source1.rest.yearindex.cumulative.boxed_clone(),
source2.yearindex.boxed_clone(),
),
decadeindex: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.rest.decadeindex.sum.boxed_clone(),
source1.rest.decadeindex.cumulative.boxed_clone(),
source2.decadeindex.boxed_clone(),
),
}
}
pub fn from_derived_computed<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &DerivedComputedBlockSumCum<S1T>,
source2: &DerivedComputedBlockLast<S2T>,
) -> Self
where
S1T: NumericValue,
S2T: NumericValue,
{
let v = version + VERSION;
Self {
dateindex: LazyTransform2SumCumLast::from_sources::<F>(
name,
v,
&source1.dateindex,
&source2.dateindex,
),
weekindex: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.weekindex.sum.boxed_clone(),
source1.weekindex.cumulative.boxed_clone(),
source2.weekindex.boxed_clone(),
),
monthindex: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.monthindex.sum.boxed_clone(),
source1.monthindex.cumulative.boxed_clone(),
source2.monthindex.boxed_clone(),
),
quarterindex: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.quarterindex.sum.boxed_clone(),
source1.quarterindex.cumulative.boxed_clone(),
source2.quarterindex.boxed_clone(),
),
semesterindex: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.semesterindex.sum.boxed_clone(),
source1.semesterindex.cumulative.boxed_clone(),
source2.semesterindex.boxed_clone(),
),
yearindex: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.yearindex.sum.boxed_clone(),
source1.yearindex.cumulative.boxed_clone(),
source2.yearindex.boxed_clone(),
),
decadeindex: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.decadeindex.sum.boxed_clone(),
source1.decadeindex.cumulative.boxed_clone(),
source2.decadeindex.boxed_clone(),
),
}
}
}

View File

@@ -0,0 +1,101 @@
//! Lazy transform for Full date sources.
use brk_traversable::Traversable;
use brk_types::{DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex};
use schemars::JsonSchema;
use vecdb::{IterableCloneableVec, UnaryTransform};
use crate::internal::{ComputedVecValue, DerivedDateFull, Full};
use super::super::transform::LazyTransformFull;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct LazyDateFull<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
pub dateindex: LazyTransformFull<DateIndex, T, S1T>,
pub weekindex: LazyTransformFull<WeekIndex, T, S1T>,
pub monthindex: LazyTransformFull<MonthIndex, T, S1T>,
pub quarterindex: LazyTransformFull<QuarterIndex, T, S1T>,
pub semesterindex: LazyTransformFull<SemesterIndex, T, S1T>,
pub yearindex: LazyTransformFull<YearIndex, T, S1T>,
pub decadeindex: LazyTransformFull<DecadeIndex, T, S1T>,
}
impl<T, S1T> LazyDateFull<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
pub fn from_full<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
dateindex: &Full<DateIndex, S1T>,
source: &DerivedDateFull<S1T>,
) -> Self {
let v = version + VERSION;
Self {
dateindex: LazyTransformFull::from_stats_aggregate::<F>(name, v, dateindex),
weekindex: LazyTransformFull::from_boxed::<F>(
name,
v,
source.weekindex.average.boxed_clone(),
source.weekindex.min.boxed_clone(),
source.weekindex.max.boxed_clone(),
source.weekindex.sum.boxed_clone(),
source.weekindex.cumulative.boxed_clone(),
),
monthindex: LazyTransformFull::from_boxed::<F>(
name,
v,
source.monthindex.average.boxed_clone(),
source.monthindex.min.boxed_clone(),
source.monthindex.max.boxed_clone(),
source.monthindex.sum.boxed_clone(),
source.monthindex.cumulative.boxed_clone(),
),
quarterindex: LazyTransformFull::from_boxed::<F>(
name,
v,
source.quarterindex.average.boxed_clone(),
source.quarterindex.min.boxed_clone(),
source.quarterindex.max.boxed_clone(),
source.quarterindex.sum.boxed_clone(),
source.quarterindex.cumulative.boxed_clone(),
),
semesterindex: LazyTransformFull::from_boxed::<F>(
name,
v,
source.semesterindex.average.boxed_clone(),
source.semesterindex.min.boxed_clone(),
source.semesterindex.max.boxed_clone(),
source.semesterindex.sum.boxed_clone(),
source.semesterindex.cumulative.boxed_clone(),
),
yearindex: LazyTransformFull::from_boxed::<F>(
name,
v,
source.yearindex.average.boxed_clone(),
source.yearindex.min.boxed_clone(),
source.yearindex.max.boxed_clone(),
source.yearindex.sum.boxed_clone(),
source.yearindex.cumulative.boxed_clone(),
),
decadeindex: LazyTransformFull::from_boxed::<F>(
name,
v,
source.decadeindex.average.boxed_clone(),
source.decadeindex.min.boxed_clone(),
source.decadeindex.max.boxed_clone(),
source.decadeindex.sum.boxed_clone(),
source.decadeindex.cumulative.boxed_clone(),
),
}
}
}

View File

@@ -0,0 +1,79 @@
//! Lazy transform for Last-only date sources.
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex,
};
use schemars::JsonSchema;
use vecdb::{IterableBoxedVec, IterableCloneableVec, UnaryTransform};
use crate::internal::{ComputedDateLast, ComputedVecValue, DerivedDateLast};
use super::super::transform::LazyTransformLast;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct LazyDateLast<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
pub dateindex: LazyTransformLast<DateIndex, T, S1T>,
pub weekindex: LazyTransformLast<WeekIndex, T, S1T>,
pub monthindex: LazyTransformLast<MonthIndex, T, S1T>,
pub quarterindex: LazyTransformLast<QuarterIndex, T, S1T>,
pub semesterindex: LazyTransformLast<SemesterIndex, T, S1T>,
pub yearindex: LazyTransformLast<YearIndex, T, S1T>,
pub decadeindex: LazyTransformLast<DecadeIndex, T, S1T>,
}
impl<T, S1T> LazyDateLast<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
pub fn from_source<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
source: &ComputedDateLast<S1T>,
) -> Self {
Self::from_computed::<F>(name, version, source.dateindex.boxed_clone(), source)
}
pub fn from_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
dateindex_source: IterableBoxedVec<DateIndex, S1T>,
source: &ComputedDateLast<S1T>,
) -> Self {
Self::from_derived::<F>(name, version, dateindex_source, &source.rest)
}
pub fn from_derived<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
dateindex_source: IterableBoxedVec<DateIndex, S1T>,
source: &DerivedDateLast<S1T>,
) -> Self {
let v = version + VERSION;
Self {
dateindex: LazyTransformLast::from_boxed::<F>(name, v, dateindex_source),
weekindex: LazyTransformLast::from_lazy_last::<F, _, _>(name, v, &source.weekindex),
monthindex: LazyTransformLast::from_lazy_last::<F, _, _>(name, v, &source.monthindex),
quarterindex: LazyTransformLast::from_lazy_last::<F, _, _>(
name,
v,
&source.quarterindex,
),
semesterindex: LazyTransformLast::from_lazy_last::<F, _, _>(
name,
v,
&source.semesterindex,
),
yearindex: LazyTransformLast::from_lazy_last::<F, _, _>(name, v, &source.yearindex),
decadeindex: LazyTransformLast::from_lazy_last::<F, _, _>(name, v, &source.decadeindex),
}
}
}

View File

@@ -0,0 +1,17 @@
mod binary_last;
mod binary_sum;
mod binary_sum_cum;
mod binary_sum_cum_last;
mod full;
mod last;
mod sum;
mod sum_cum;
pub use binary_last::*;
pub use binary_sum::*;
pub use binary_sum_cum::*;
pub use binary_sum_cum_last::*;
pub use full::*;
pub use last::*;
pub use sum::*;
pub use sum_cum::*;

View File

@@ -0,0 +1,53 @@
//! Lazy transform for Sum-only date sources.
use brk_traversable::Traversable;
use brk_types::{DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex};
use schemars::JsonSchema;
use vecdb::{IterableBoxedVec, IterableCloneableVec, UnaryTransform};
use crate::internal::{ComputedVecValue, DerivedDateSum};
use super::super::transform::LazyTransformSum;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct LazyDateSum<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
pub dateindex: LazyTransformSum<DateIndex, T, S1T>,
pub weekindex: LazyTransformSum<WeekIndex, T, S1T>,
pub monthindex: LazyTransformSum<MonthIndex, T, S1T>,
pub quarterindex: LazyTransformSum<QuarterIndex, T, S1T>,
pub semesterindex: LazyTransformSum<SemesterIndex, T, S1T>,
pub yearindex: LazyTransformSum<YearIndex, T, S1T>,
pub decadeindex: LazyTransformSum<DecadeIndex, T, S1T>,
}
impl<T, S1T> LazyDateSum<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
pub fn from_derived<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
dateindex_source: IterableBoxedVec<DateIndex, S1T>,
source: &DerivedDateSum<S1T>,
) -> Self {
let v = version + VERSION;
Self {
dateindex: LazyTransformSum::from_boxed::<F>(name, v, dateindex_source),
weekindex: LazyTransformSum::from_boxed::<F>(name, v, source.weekindex.boxed_clone()),
monthindex: LazyTransformSum::from_boxed::<F>(name, v, source.monthindex.boxed_clone()),
quarterindex: LazyTransformSum::from_boxed::<F>(name, v, source.quarterindex.boxed_clone()),
semesterindex: LazyTransformSum::from_boxed::<F>(name, v, source.semesterindex.boxed_clone()),
yearindex: LazyTransformSum::from_boxed::<F>(name, v, source.yearindex.boxed_clone()),
decadeindex: LazyTransformSum::from_boxed::<F>(name, v, source.decadeindex.boxed_clone()),
}
}
}

View File

@@ -0,0 +1,82 @@
//! Lazy transform for SumCum date sources.
use brk_traversable::Traversable;
use brk_types::{DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, Version, WeekIndex, YearIndex};
use schemars::JsonSchema;
use vecdb::{IterableCloneableVec, UnaryTransform};
use crate::internal::{ComputedVecValue, DerivedDateSumCum, SumCum};
use super::super::transform::LazyTransformSumCum;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct LazyDateSumCum<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
pub dateindex: LazyTransformSumCum<DateIndex, T, S1T>,
pub weekindex: LazyTransformSumCum<WeekIndex, T, S1T>,
pub monthindex: LazyTransformSumCum<MonthIndex, T, S1T>,
pub quarterindex: LazyTransformSumCum<QuarterIndex, T, S1T>,
pub semesterindex: LazyTransformSumCum<SemesterIndex, T, S1T>,
pub yearindex: LazyTransformSumCum<YearIndex, T, S1T>,
pub decadeindex: LazyTransformSumCum<DecadeIndex, T, S1T>,
}
impl<T, S1T> LazyDateSumCum<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
pub fn from_sum_cum<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
dateindex: &SumCum<DateIndex, S1T>,
source: &DerivedDateSumCum<S1T>,
) -> Self {
let v = version + VERSION;
Self {
dateindex: LazyTransformSumCum::from_sum_cum::<F>(name, v, dateindex),
weekindex: LazyTransformSumCum::from_boxed::<F>(
name,
v,
source.weekindex.sum.boxed_clone(),
source.weekindex.cumulative.boxed_clone(),
),
monthindex: LazyTransformSumCum::from_boxed::<F>(
name,
v,
source.monthindex.sum.boxed_clone(),
source.monthindex.cumulative.boxed_clone(),
),
quarterindex: LazyTransformSumCum::from_boxed::<F>(
name,
v,
source.quarterindex.sum.boxed_clone(),
source.quarterindex.cumulative.boxed_clone(),
),
semesterindex: LazyTransformSumCum::from_boxed::<F>(
name,
v,
source.semesterindex.sum.boxed_clone(),
source.semesterindex.cumulative.boxed_clone(),
),
yearindex: LazyTransformSumCum::from_boxed::<F>(
name,
v,
source.yearindex.sum.boxed_clone(),
source.yearindex.cumulative.boxed_clone(),
),
decadeindex: LazyTransformSumCum::from_boxed::<F>(
name,
v,
source.decadeindex.sum.boxed_clone(),
source.decadeindex.cumulative.boxed_clone(),
),
}
}
}

View File

@@ -0,0 +1,52 @@
//! Lazy aggregated binary transform for Sum-only pattern across all time periods.
use brk_traversable::Traversable;
use brk_types::{DifficultyEpoch, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, IterableCloneableVec};
use crate::internal::{ComputedVecValue, DerivedComputedBlockSum, LazyDate2Sum, LazyTransform2Sum, NumericValue};
const VERSION: Version = Version::ZERO;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyDerivedBlock2Sum<T, S1T, S2T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub dates: LazyDate2Sum<T, S1T, S2T>,
pub difficultyepoch: LazyTransform2Sum<DifficultyEpoch, T, S1T, S2T>,
}
impl<T, S1T, S2T> LazyDerivedBlock2Sum<T, S1T, S2T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: NumericValue + JsonSchema,
S2T: NumericValue + JsonSchema,
{
pub fn from_derived<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &DerivedComputedBlockSum<S1T>,
source2: &DerivedComputedBlockSum<S2T>,
) -> Self {
let v = version + VERSION;
Self {
dates: LazyDate2Sum::from_derived::<F>(name, v, source1, source2),
difficultyepoch: LazyTransform2Sum::from_boxed::<F>(
name,
v,
source1.difficultyepoch.boxed_clone(),
source2.difficultyepoch.boxed_clone(),
),
}
}
}

View File

@@ -0,0 +1,99 @@
//! Lazy aggregated SumCum - binary transform version.
use brk_traversable::Traversable;
use brk_types::{DifficultyEpoch, Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, IterableCloneableVec};
use crate::internal::{
ComputedVecValue, DerivedDateFull, DerivedDateSumCum, LazyDate2SumCum, LazyFull, LazySumCum,
SumCum,
};
use super::super::transform::LazyTransform2SumCum;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyDerivedBlock2SumCum<T, S1T = T, S2T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub dates: LazyDate2SumCum<T, S1T, S2T>,
pub difficultyepoch: LazyTransform2SumCum<DifficultyEpoch, T, S1T, S2T>,
}
impl<T, S1T, S2T> LazyDerivedBlock2SumCum<T, S1T, S2T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
#[allow(clippy::too_many_arguments)]
pub fn from_computed<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
dateindex1: &SumCum<brk_types::DateIndex, S1T>,
periods1: &DerivedDateSumCum<S1T>,
difficultyepoch1: &LazySumCum<DifficultyEpoch, S1T, Height, DifficultyEpoch>,
dateindex2: &SumCum<brk_types::DateIndex, S2T>,
periods2: &DerivedDateSumCum<S2T>,
difficultyepoch2: &LazySumCum<DifficultyEpoch, S2T, Height, DifficultyEpoch>,
) -> Self {
let v = version + VERSION;
Self {
dates: LazyDate2SumCum::from_computed::<F>(
name, v, dateindex1, periods1, dateindex2, periods2,
),
difficultyepoch: LazyTransform2SumCum::from_sources::<F>(
name,
v,
difficultyepoch1.sum.boxed_clone(),
difficultyepoch2.sum.boxed_clone(),
difficultyepoch1.cumulative.boxed_clone(),
difficultyepoch2.cumulative.boxed_clone(),
),
}
}
#[allow(clippy::too_many_arguments)]
pub fn from_derived_full<F, S1I, S1L, S2I, S2L>(
name: &str,
version: Version,
dateindex1: &SumCum<brk_types::DateIndex, S1T>,
dates1: &DerivedDateFull<S1T>,
difficultyepoch1: &LazyFull<DifficultyEpoch, S1T, S1I, S1L>,
dateindex2: &SumCum<brk_types::DateIndex, S2T>,
dates2: &DerivedDateFull<S2T>,
difficultyepoch2: &LazyFull<DifficultyEpoch, S2T, S2I, S2L>,
) -> Self
where
F: BinaryTransform<S1T, S2T, T>,
S1I: vecdb::VecIndex + 'static,
S1L: ComputedVecValue,
S2I: vecdb::VecIndex + 'static,
S2L: ComputedVecValue,
{
let v = version + VERSION;
Self {
dates: LazyDate2SumCum::from_derived_full::<F>(
name, v, dateindex1, dates1, dateindex2, dates2,
),
difficultyepoch: LazyTransform2SumCum::from_lazy_stats_aggregate::<F, _, _, _, _>(
name,
v,
difficultyepoch1,
difficultyepoch2,
),
}
}
}

View File

@@ -0,0 +1,136 @@
//! Lazy aggregated for SumCum + Last binary transform.
use brk_traversable::Traversable;
use brk_types::{DifficultyEpoch, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{BinaryTransform, IterableCloneableVec};
use crate::internal::{
ComputedBlockLast, ComputedBlockSumCum, ComputedVecValue, DerivedComputedBlockLast,
DerivedComputedBlockSumCum, LazyDate2SumCumLast, NumericValue,
};
use super::super::transform::LazyTransform2SumCumLast;
const VERSION: Version = Version::ZERO;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyDerivedBlock2SumCumLast<T, S1T = T, S2T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
S2T: ComputedVecValue,
{
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub dates: LazyDate2SumCumLast<T, S1T, S2T>,
pub difficultyepoch: LazyTransform2SumCumLast<DifficultyEpoch, T, S1T, S2T>,
}
impl<T, S1T, S2T> LazyDerivedBlock2SumCumLast<T, S1T, S2T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
S2T: ComputedVecValue + JsonSchema,
{
pub fn from_computed<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &ComputedBlockSumCum<S1T>,
source2: &ComputedBlockLast<S2T>,
) -> Self
where
S1T: PartialOrd,
S2T: NumericValue,
{
let v = version + VERSION;
Self {
dates: LazyDate2SumCumLast::from_computed::<F>(name, v, source1, source2),
difficultyepoch: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.difficultyepoch.sum.boxed_clone(),
source1.difficultyepoch.cumulative.boxed_clone(),
source2.difficultyepoch.boxed_clone(),
),
}
}
pub fn from_derived_computed_full<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &DerivedComputedBlockSumCum<S1T>,
source2: &ComputedBlockLast<S2T>,
) -> Self
where
S1T: NumericValue,
S2T: NumericValue,
{
let v = version + VERSION;
Self {
dates: LazyDate2SumCumLast::from_derived_computed_full::<F>(name, v, source1, source2),
difficultyepoch: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.difficultyepoch.sum.boxed_clone(),
source1.difficultyepoch.cumulative.boxed_clone(),
source2.difficultyepoch.boxed_clone(),
),
}
}
pub fn from_computed_derived_computed<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &ComputedBlockSumCum<S1T>,
source2: &DerivedComputedBlockLast<S2T>,
) -> Self
where
S1T: PartialOrd,
S2T: NumericValue,
{
let v = version + VERSION;
Self {
dates: LazyDate2SumCumLast::from_computed_derived_computed::<F>(
name, v, source1, source2,
),
difficultyepoch: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.difficultyepoch.sum.boxed_clone(),
source1.difficultyepoch.cumulative.boxed_clone(),
source2.difficultyepoch.boxed_clone(),
),
}
}
pub fn from_derived_computed<F: BinaryTransform<S1T, S2T, T>>(
name: &str,
version: Version,
source1: &DerivedComputedBlockSumCum<S1T>,
source2: &DerivedComputedBlockLast<S2T>,
) -> Self
where
S1T: NumericValue,
S2T: NumericValue,
{
let v = version + VERSION;
Self {
dates: LazyDate2SumCumLast::from_derived_computed::<F>(name, v, source1, source2),
difficultyepoch: LazyTransform2SumCumLast::from_boxed::<F>(
name,
v,
source1.difficultyepoch.sum.boxed_clone(),
source1.difficultyepoch.cumulative.boxed_clone(),
source2.difficultyepoch.boxed_clone(),
),
}
}
}

View File

@@ -0,0 +1,87 @@
//! Lazy aggregated Full for block-level sources.
use brk_traversable::Traversable;
use brk_types::{DateIndex, DifficultyEpoch, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{IterableCloneableVec, UnaryTransform};
use crate::internal::{
ComputedVecValue, DerivedComputedBlockFull, DerivedDateFull, Full, LazyDateFull, NumericValue,
};
use super::super::transform::LazyTransformFull;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyDerivedBlockFull<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub dates: LazyDateFull<T, S1T>,
pub difficultyepoch: LazyTransformFull<DifficultyEpoch, T, S1T>,
}
const VERSION: Version = Version::ZERO;
impl<T, S1T> LazyDerivedBlockFull<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
pub fn from_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
dateindex: &Full<DateIndex, S1T>,
periods: &DerivedDateFull<S1T>,
difficultyepoch: &crate::internal::LazyFull<
DifficultyEpoch,
S1T,
brk_types::Height,
DifficultyEpoch,
>,
) -> Self {
let v = version + VERSION;
Self {
dates: LazyDateFull::from_full::<F>(name, v, dateindex, periods),
difficultyepoch: LazyTransformFull::from_boxed::<F>(
name,
v,
difficultyepoch.average.boxed_clone(),
difficultyepoch.min.boxed_clone(),
difficultyepoch.max.boxed_clone(),
difficultyepoch.sum.boxed_clone(),
difficultyepoch.cumulative.boxed_clone(),
),
}
}
pub fn from_derived_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
source: &DerivedComputedBlockFull<S1T>,
) -> Self
where
S1T: NumericValue,
{
let v = version + VERSION;
Self {
dates: LazyDateFull::from_full::<F>(name, v, &source.dateindex, &source.dates),
difficultyepoch: LazyTransformFull::from_boxed::<F>(
name,
v,
source.difficultyepoch.average.boxed_clone(),
source.difficultyepoch.min.boxed_clone(),
source.difficultyepoch.max.boxed_clone(),
source.difficultyepoch.sum.boxed_clone(),
source.difficultyepoch.cumulative.boxed_clone(),
),
}
}
}

View File

@@ -0,0 +1,85 @@
//! Lazy aggregated Last for block-level sources.
use brk_traversable::Traversable;
use brk_types::{DifficultyEpoch, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{IterableCloneableVec, UnaryTransform};
use crate::internal::{
ComputedBlockLast, ComputedVecValue, DerivedComputedBlockLast, LazyDateLast, NumericValue,
};
use super::super::transform::LazyTransformLast;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyDerivedBlockLast<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub dates: LazyDateLast<T, S1T>,
pub difficultyepoch: LazyTransformLast<DifficultyEpoch, T, S1T>,
}
const VERSION: Version = Version::ZERO;
impl<T, S1T> LazyDerivedBlockLast<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
pub fn from_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
source: &ComputedBlockLast<S1T>,
) -> Self
where
S1T: NumericValue,
{
let v = version + VERSION;
Self {
dates: LazyDateLast::from_derived::<F>(
name,
v,
source.dateindex.0.boxed_clone(),
&source.rest,
),
difficultyepoch: LazyTransformLast::from_boxed::<F>(
name,
v,
source.difficultyepoch.boxed_clone(),
),
}
}
pub fn from_derived_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
source: &DerivedComputedBlockLast<S1T>,
) -> Self
where
S1T: NumericValue,
{
let v = version + VERSION;
Self {
dates: LazyDateLast::from_derived::<F>(
name,
v,
source.dateindex.0.boxed_clone(),
&source.dates,
),
difficultyepoch: LazyTransformLast::from_boxed::<F>(
name,
v,
source.difficultyepoch.boxed_clone(),
),
}
}
}

View File

@@ -0,0 +1,15 @@
mod binary_sum;
mod binary_sum_cum;
mod binary_sum_cum_last;
mod full;
mod last;
mod sum;
mod sum_cum;
pub use binary_sum::*;
pub use binary_sum_cum::*;
pub use binary_sum_cum_last::*;
pub use full::*;
pub use last::*;
pub use sum::*;
pub use sum_cum::*;

View File

@@ -0,0 +1,79 @@
//! Lazy aggregated Sum for block-level sources.
use brk_traversable::Traversable;
use brk_types::{DateIndex, DifficultyEpoch, Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{IterableCloneableVec, UnaryTransform};
use crate::internal::{
ComputedVecValue, DerivedComputedBlockSum, DerivedDateSum, LazyDateSum, LazySum, NumericValue, SumVec,
};
use super::super::transform::LazyTransformSum;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyDerivedBlockSum<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub dates: LazyDateSum<T, S1T>,
pub difficultyepoch: LazyTransformSum<DifficultyEpoch, T, S1T>,
}
const VERSION: Version = Version::ZERO;
impl<T, S1T> LazyDerivedBlockSum<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
pub fn from_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
dateindex: &SumVec<DateIndex, S1T>,
periods: &DerivedDateSum<S1T>,
difficultyepoch: &LazySum<DifficultyEpoch, S1T, Height, DifficultyEpoch>,
) -> Self {
let v = version + VERSION;
Self {
dates: LazyDateSum::from_derived::<F>(name, v, dateindex.0.boxed_clone(), periods),
difficultyepoch: LazyTransformSum::from_boxed::<F>(
name,
v,
difficultyepoch.boxed_clone(),
),
}
}
pub fn from_derived_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
source: &DerivedComputedBlockSum<S1T>,
) -> Self
where
S1T: NumericValue,
{
let v = version + VERSION;
Self {
dates: LazyDateSum::from_derived::<F>(
name,
v,
source.dateindex.0.boxed_clone(),
&source.dates,
),
difficultyepoch: LazyTransformSum::from_boxed::<F>(
name,
v,
source.difficultyepoch.boxed_clone(),
),
}
}
}

View File

@@ -0,0 +1,77 @@
//! Lazy aggregated SumCum for block-level sources.
use brk_traversable::Traversable;
use brk_types::{DateIndex, DifficultyEpoch, Height, Version};
use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use vecdb::{IterableCloneableVec, UnaryTransform};
use crate::internal::{
ComputedVecValue, DerivedComputedBlockSumCum, DerivedDateSumCum, LazyDateSumCum, LazySumCum,
NumericValue, SumCum,
};
use super::super::transform::LazyTransformSumCum;
#[derive(Clone, Deref, DerefMut, Traversable)]
#[traversable(merge)]
pub struct LazyDerivedBlockSumCum<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
#[deref]
#[deref_mut]
#[traversable(flatten)]
pub dates: LazyDateSumCum<T, S1T>,
pub difficultyepoch: LazyTransformSumCum<DifficultyEpoch, T, S1T>,
}
const VERSION: Version = Version::ZERO;
impl<T, S1T> LazyDerivedBlockSumCum<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
pub fn from_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
dateindex: &SumCum<DateIndex, S1T>,
periods: &DerivedDateSumCum<S1T>,
difficultyepoch: &LazySumCum<DifficultyEpoch, S1T, Height, DifficultyEpoch>,
) -> Self {
let v = version + VERSION;
Self {
dates: LazyDateSumCum::from_sum_cum::<F>(name, v, dateindex, periods),
difficultyepoch: LazyTransformSumCum::from_boxed::<F>(
name,
v,
difficultyepoch.sum.boxed_clone(),
difficultyepoch.cumulative.boxed_clone(),
),
}
}
pub fn from_derived_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
source: &DerivedComputedBlockSumCum<S1T>,
) -> Self
where
S1T: NumericValue,
{
let v = version + VERSION;
Self {
dates: LazyDateSumCum::from_sum_cum::<F>(name, v, &source.dateindex, &source.dates),
difficultyepoch: LazyTransformSumCum::from_boxed::<F>(
name,
v,
source.difficultyepoch.sum.boxed_clone(),
source.difficultyepoch.cumulative.boxed_clone(),
),
}
}
}

View File

@@ -0,0 +1,123 @@
//! Lazy transform of DerivedTxFull.
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, DifficultyEpoch, Height, MonthIndex, QuarterIndex, SemesterIndex,
Version, WeekIndex, YearIndex,
};
use schemars::JsonSchema;
use vecdb::{IterableCloneableVec, UnaryTransform};
use crate::internal::{ComputedVecValue, DerivedTxFull};
use super::super::transform::LazyTransformFull;
#[derive(Clone, Traversable)]
#[traversable(merge)]
pub struct LazyDerivedTxFull<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
pub height: LazyTransformFull<Height, T, S1T>,
pub difficultyepoch: LazyTransformFull<DifficultyEpoch, T, S1T>,
pub dateindex: LazyTransformFull<DateIndex, T, S1T>,
pub weekindex: LazyTransformFull<WeekIndex, T, S1T>,
pub monthindex: LazyTransformFull<MonthIndex, T, S1T>,
pub quarterindex: LazyTransformFull<QuarterIndex, T, S1T>,
pub semesterindex: LazyTransformFull<SemesterIndex, T, S1T>,
pub yearindex: LazyTransformFull<YearIndex, T, S1T>,
pub decadeindex: LazyTransformFull<DecadeIndex, T, S1T>,
}
const VERSION: Version = Version::ZERO;
impl<T, S1T> LazyDerivedTxFull<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
pub fn from_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
source: &DerivedTxFull<S1T>,
) -> Self {
let v = version + VERSION;
Self {
height: LazyTransformFull::from_stats_aggregate::<F>(name, v, &source.height),
difficultyepoch: LazyTransformFull::from_boxed::<F>(
name,
v,
source.difficultyepoch.average.boxed_clone(),
source.difficultyepoch.min.boxed_clone(),
source.difficultyepoch.max.boxed_clone(),
source.difficultyepoch.sum.boxed_clone(),
source.difficultyepoch.cumulative.boxed_clone(),
),
dateindex: LazyTransformFull::from_boxed::<F>(
name,
v,
source.dateindex.average.0.boxed_clone(),
source.dateindex.minmax.min.0.boxed_clone(),
source.dateindex.minmax.max.0.boxed_clone(),
source.dateindex.sum_cum.sum.0.boxed_clone(),
source.dateindex.sum_cum.cumulative.0.boxed_clone(),
),
weekindex: LazyTransformFull::from_boxed::<F>(
name,
v,
source.weekindex.average.boxed_clone(),
source.weekindex.min.boxed_clone(),
source.weekindex.max.boxed_clone(),
source.weekindex.sum.boxed_clone(),
source.weekindex.cumulative.boxed_clone(),
),
monthindex: LazyTransformFull::from_boxed::<F>(
name,
v,
source.monthindex.average.boxed_clone(),
source.monthindex.min.boxed_clone(),
source.monthindex.max.boxed_clone(),
source.monthindex.sum.boxed_clone(),
source.monthindex.cumulative.boxed_clone(),
),
quarterindex: LazyTransformFull::from_boxed::<F>(
name,
v,
source.quarterindex.average.boxed_clone(),
source.quarterindex.min.boxed_clone(),
source.quarterindex.max.boxed_clone(),
source.quarterindex.sum.boxed_clone(),
source.quarterindex.cumulative.boxed_clone(),
),
semesterindex: LazyTransformFull::from_boxed::<F>(
name,
v,
source.semesterindex.average.boxed_clone(),
source.semesterindex.min.boxed_clone(),
source.semesterindex.max.boxed_clone(),
source.semesterindex.sum.boxed_clone(),
source.semesterindex.cumulative.boxed_clone(),
),
yearindex: LazyTransformFull::from_boxed::<F>(
name,
v,
source.yearindex.average.boxed_clone(),
source.yearindex.min.boxed_clone(),
source.yearindex.max.boxed_clone(),
source.yearindex.sum.boxed_clone(),
source.yearindex.cumulative.boxed_clone(),
),
decadeindex: LazyTransformFull::from_boxed::<F>(
name,
v,
source.decadeindex.average.boxed_clone(),
source.decadeindex.min.boxed_clone(),
source.decadeindex.max.boxed_clone(),
source.decadeindex.sum.boxed_clone(),
source.decadeindex.cumulative.boxed_clone(),
),
}
}
}

View File

@@ -0,0 +1,3 @@
mod full;
pub use full::*;

View File

@@ -1,114 +0,0 @@
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, TreeNode, Version, WeekIndex,
YearIndex,
};
use schemars::JsonSchema;
use vecdb::{AnyExportableVec, IterableBoxedVec, LazyVecFrom1, UnaryTransform};
use crate::internal::{ComputedVecValue, ComputedVecsFromDateIndex, LazyTransformBuilder};
const VERSION: Version = Version::ZERO;
#[derive(Clone)]
pub struct LazyVecsFromDateIndex<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
pub dateindex: Option<LazyVecFrom1<DateIndex, T, DateIndex, S1T>>,
pub dateindex_extra: LazyTransformBuilder<DateIndex, T, S1T>,
pub weekindex: LazyTransformBuilder<WeekIndex, T, S1T>,
pub monthindex: LazyTransformBuilder<MonthIndex, T, S1T>,
pub quarterindex: LazyTransformBuilder<QuarterIndex, T, S1T>,
pub semesterindex: LazyTransformBuilder<SemesterIndex, T, S1T>,
pub yearindex: LazyTransformBuilder<YearIndex, T, S1T>,
pub decadeindex: LazyTransformBuilder<DecadeIndex, T, S1T>,
}
impl<T, S1T> LazyVecsFromDateIndex<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
/// Create a lazy transform from a stored `ComputedVecsFromDateIndex`.
/// F is the transform type (e.g., `Negate`, `Halve`).
pub fn from_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
dateindex_source: Option<IterableBoxedVec<DateIndex, S1T>>,
source: &ComputedVecsFromDateIndex<S1T>,
) -> Self {
let v = version + VERSION;
Self {
dateindex: dateindex_source.map(|s| LazyVecFrom1::transformed::<F>(name, v, s)),
dateindex_extra: LazyTransformBuilder::from_eager::<F>(
name,
v,
&source.dateindex_extra,
),
weekindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.weekindex),
monthindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.monthindex),
quarterindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.quarterindex),
semesterindex: LazyTransformBuilder::from_lazy::<F, _, _>(
name,
v,
&source.semesterindex,
),
yearindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.yearindex),
decadeindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.decadeindex),
}
}
}
impl<T, S1T> Traversable for LazyVecsFromDateIndex<T, S1T>
where
T: ComputedVecValue + JsonSchema,
S1T: ComputedVecValue,
{
fn to_tree_node(&self) -> TreeNode {
let dateindex_extra_node = self.dateindex_extra.to_tree_node();
TreeNode::Branch(
[
self.dateindex
.as_ref()
.map(|v| ("dateindex".to_string(), v.to_tree_node())),
if dateindex_extra_node.is_empty() {
None
} else {
Some(("dateindex_extra".to_string(), dateindex_extra_node))
},
Some(("weekindex".to_string(), self.weekindex.to_tree_node())),
Some(("monthindex".to_string(), self.monthindex.to_tree_node())),
Some(("quarterindex".to_string(), self.quarterindex.to_tree_node())),
Some((
"semesterindex".to_string(),
self.semesterindex.to_tree_node(),
)),
Some(("yearindex".to_string(), self.yearindex.to_tree_node())),
Some(("decadeindex".to_string(), self.decadeindex.to_tree_node())),
]
.into_iter()
.flatten()
.collect(),
)
.merge_branches()
.unwrap()
}
fn iter_any_exportable(&self) -> impl Iterator<Item = &dyn AnyExportableVec> {
let mut regular_iter: Box<dyn Iterator<Item = &dyn AnyExportableVec>> =
Box::new(std::iter::empty());
if let Some(ref dateindex) = self.dateindex {
regular_iter = Box::new(regular_iter.chain(dateindex.iter_any_exportable()));
}
regular_iter = Box::new(regular_iter.chain(self.dateindex_extra.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.weekindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.monthindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.quarterindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.semesterindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.yearindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.decadeindex.iter_any_exportable()));
regular_iter
}
}

View File

@@ -1,122 +0,0 @@
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, DifficultyEpoch, Height, MonthIndex, QuarterIndex, SemesterIndex,
TreeNode, Version, WeekIndex, YearIndex,
};
use schemars::JsonSchema;
use vecdb::{AnyExportableVec, IterableBoxedVec, LazyVecFrom1, UnaryTransform};
use crate::internal::{ComputedVecValue, ComputedVecsFromHeight, LazyTransformBuilder};
const VERSION: Version = Version::ZERO;
/// Fully lazy version of `ComputedVecsFromHeight` where all vecs are lazy transforms.
/// Each index uses `LazyTransformBuilder` sourced from its corresponding stored groups.
#[derive(Clone)]
pub struct LazyVecsFromHeight<T, S1T = T>
where
T: ComputedVecValue + PartialOrd + JsonSchema,
S1T: ComputedVecValue,
{
pub height: LazyVecFrom1<Height, T, Height, S1T>,
pub height_extra: LazyTransformBuilder<Height, T, S1T>,
pub dateindex: LazyTransformBuilder<DateIndex, T, S1T>,
pub weekindex: LazyTransformBuilder<WeekIndex, T, S1T>,
pub difficultyepoch: LazyTransformBuilder<DifficultyEpoch, T, S1T>,
pub monthindex: LazyTransformBuilder<MonthIndex, T, S1T>,
pub quarterindex: LazyTransformBuilder<QuarterIndex, T, S1T>,
pub semesterindex: LazyTransformBuilder<SemesterIndex, T, S1T>,
pub yearindex: LazyTransformBuilder<YearIndex, T, S1T>,
pub decadeindex: LazyTransformBuilder<DecadeIndex, T, S1T>,
}
impl<T, S1T> LazyVecsFromHeight<T, S1T>
where
T: ComputedVecValue + JsonSchema + 'static,
S1T: ComputedVecValue + JsonSchema,
{
/// Create a lazy transform from a stored `ComputedVecsFromHeight`.
/// F is the transform type (e.g., `Negate`, `Halve`).
pub fn from_computed<F: UnaryTransform<S1T, T>>(
name: &str,
version: Version,
height_source: IterableBoxedVec<Height, S1T>,
source: &ComputedVecsFromHeight<S1T>,
) -> Self {
let v = version + VERSION;
Self {
height: LazyVecFrom1::transformed::<F>(name, v, height_source),
height_extra: LazyTransformBuilder::from_eager::<F>(name, v, &source.height_extra),
dateindex: LazyTransformBuilder::from_eager::<F>(name, v, &source.dateindex),
weekindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.weekindex),
difficultyepoch: LazyTransformBuilder::from_lazy::<F, _, _>(
name,
v,
&source.difficultyepoch,
),
monthindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.monthindex),
quarterindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.quarterindex),
semesterindex: LazyTransformBuilder::from_lazy::<F, _, _>(
name,
v,
&source.semesterindex,
),
yearindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.yearindex),
decadeindex: LazyTransformBuilder::from_lazy::<F, _, _>(name, v, &source.decadeindex),
}
}
}
impl<T, S1T> Traversable for LazyVecsFromHeight<T, S1T>
where
T: ComputedVecValue + JsonSchema,
S1T: ComputedVecValue,
{
fn to_tree_node(&self) -> TreeNode {
let height_extra_node = self.height_extra.to_tree_node();
TreeNode::Branch(
[
Some(("height".to_string(), self.height.to_tree_node())),
if height_extra_node.is_empty() {
None
} else {
Some(("height_extra".to_string(), height_extra_node))
},
Some(("dateindex".to_string(), self.dateindex.to_tree_node())),
Some(("weekindex".to_string(), self.weekindex.to_tree_node())),
Some((
"difficultyepoch".to_string(),
self.difficultyepoch.to_tree_node(),
)),
Some(("monthindex".to_string(), self.monthindex.to_tree_node())),
Some(("quarterindex".to_string(), self.quarterindex.to_tree_node())),
Some((
"semesterindex".to_string(),
self.semesterindex.to_tree_node(),
)),
Some(("yearindex".to_string(), self.yearindex.to_tree_node())),
Some(("decadeindex".to_string(), self.decadeindex.to_tree_node())),
]
.into_iter()
.flatten()
.collect(),
)
.merge_branches()
.unwrap()
}
fn iter_any_exportable(&self) -> impl Iterator<Item = &dyn AnyExportableVec> {
let mut regular_iter: Box<dyn Iterator<Item = &dyn AnyExportableVec>> =
Box::new(self.height.iter_any_exportable());
regular_iter = Box::new(regular_iter.chain(self.height_extra.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.dateindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.weekindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.difficultyepoch.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.monthindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.quarterindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.semesterindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.yearindex.iter_any_exportable()));
regular_iter = Box::new(regular_iter.chain(self.decadeindex.iter_any_exportable()));
regular_iter
}
}

Some files were not shown because too many files have changed in this diff Show More