brk: first commit

This commit is contained in:
nym21
2025-02-23 01:25:15 +01:00
parent 8c3f519016
commit 19cf34f9d4
266 changed files with 225 additions and 1268 deletions

View File

@@ -0,0 +1,887 @@
use itertools::Itertools;
use rayon::prelude::*;
use struct_iterable::Iterable;
use crate::{
parser::datasets::{
cohort_metadata::AddressCohortMetadataDataset, ComputeData, DateRecapDataset, RatioDataset,
SubDataset,
},
structs::{
AnyBiMap, AnyDateMap, AnyHeightMap, AnyMap, BiMap, Date, DateMap, Height, HeightMap,
MapKind, Timestamp, OHLC,
},
};
use super::{AnyDatasetGroup, MinInitialStates};
pub trait AnyDataset: Iterable {
fn get_min_initial_states(&self) -> &MinInitialStates;
fn needs_insert(&self, height: Height, date: Date) -> bool {
self.needs_insert_height(height) || self.needs_insert_date(date)
}
#[inline(always)]
fn needs_insert_height(&self, height: Height) -> bool {
!self.to_all_inserted_height_map_vec().is_empty()
&& self
.get_min_initial_states()
.inserted
.first_unsafe_height
.unwrap_or(Height::ZERO)
<= height
}
#[inline(always)]
fn needs_insert_date(&self, date: Date) -> bool {
!self.to_all_inserted_date_map_vec().is_empty()
&& self
.get_min_initial_states()
.inserted
.first_unsafe_date
.map_or(true, |min_initial_first_unsafe_date| {
min_initial_first_unsafe_date <= date
})
}
fn to_kind_bi_map_vec(&self, kind: MapKind) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
let mut v = vec![];
self.iter().for_each(|(_, any)| {
if let Some(map) = any.downcast_ref::<BiMap<u8>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<u16>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<u32>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<u64>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<usize>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<f32>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<f64>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<OHLC>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<Date>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<Height>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<Timestamp>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(dataset) = any.downcast_ref::<RatioDataset>() {
match kind {
MapKind::Inserted => dataset.to_inserted_bi_map_vec(),
MapKind::Computed => dataset.to_computed_bi_map_vec(),
}
.into_iter()
.for_each(|map| {
v.push(map);
});
} else if let Some(dataset) = any.downcast_ref::<AddressCohortMetadataDataset>() {
match kind {
MapKind::Inserted => dataset.to_inserted_bi_map_vec(),
MapKind::Computed => dataset.to_computed_bi_map_vec(),
}
.into_iter()
.for_each(|map| {
v.push(map);
});
} else if let Some(dataset) = any.downcast_ref::<SubDataset>() {
dataset.as_vec().into_iter().for_each(|dataset| {
v.append(&mut dataset.to_kind_bi_map_vec(kind));
});
}
});
v
}
fn to_kind_mut_bi_map_vec(&mut self, kind: MapKind) -> Vec<&mut dyn AnyBiMap> {
let mut v = vec![];
self.iter_mut().for_each(|(_, any)| match any {
any if any.is::<BiMap<u8>>() => {
if let Some(map) = any.downcast_mut::<BiMap<u8>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<u16>>() => {
if let Some(map) = any.downcast_mut::<BiMap<u16>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<u32>>() => {
if let Some(map) = any.downcast_mut::<BiMap<u32>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<u64>>() => {
if let Some(map) = any.downcast_mut::<BiMap<u64>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<usize>>() => {
if let Some(map) = any.downcast_mut::<BiMap<usize>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<f32>>() => {
if let Some(map) = any.downcast_mut::<BiMap<f32>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<f64>>() => {
if let Some(map) = any.downcast_mut::<BiMap<f64>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<OHLC>>() => {
if let Some(map) = any.downcast_mut::<BiMap<OHLC>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<Date>>() => {
if let Some(map) = any.downcast_mut::<BiMap<Date>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<Height>>() => {
if let Some(map) = any.downcast_mut::<BiMap<Height>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<Timestamp>>() => {
if let Some(map) = any.downcast_mut::<BiMap<Timestamp>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<RatioDataset>() => {
if let Some(dataset) = any.downcast_mut::<RatioDataset>() {
match kind {
MapKind::Inserted => dataset.to_inserted_mut_bi_map_vec(),
MapKind::Computed => dataset.to_computed_mut_bi_map_vec(),
}
.into_iter()
.for_each(|map| {
v.push(map);
});
}
}
any if any.is::<AddressCohortMetadataDataset>() => {
if let Some(dataset) = any.downcast_mut::<AddressCohortMetadataDataset>() {
match kind {
MapKind::Inserted => dataset.to_inserted_mut_bi_map_vec(),
MapKind::Computed => dataset.to_computed_mut_bi_map_vec(),
}
.into_iter()
.for_each(|map| {
v.push(map);
});
}
}
any if any.is::<SubDataset>() => {
if let Some(dataset) = any.downcast_mut::<SubDataset>() {
dataset.as_mut_vec().into_iter().for_each(|dataset| {
v.append(&mut dataset.to_kind_mut_bi_map_vec(kind));
});
}
}
_ => {}
});
v
}
fn to_kind_date_map_vec(&self, kind: MapKind) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
let mut v = vec![];
self.iter().for_each(|(_, any)| {
if let Some(map) = any.downcast_ref::<DateMap<u8>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<u16>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<u32>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<u64>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<usize>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<f32>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<f64>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<OHLC>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<Date>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<Height>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<Timestamp>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(dataset) = any.downcast_ref::<DateRecapDataset<u32>>() {
dataset.as_vec().into_iter().for_each(|map| {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
});
} else if let Some(dataset) = any.downcast_ref::<DateRecapDataset<u64>>() {
dataset.as_vec().into_iter().for_each(|map| {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
});
} else if let Some(dataset) = any.downcast_ref::<DateRecapDataset<f32>>() {
dataset.as_vec().into_iter().for_each(|map| {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
});
} else if let Some(dataset) = any.downcast_ref::<SubDataset>() {
dataset.as_vec().into_iter().for_each(|dataset| {
v.append(&mut dataset.to_kind_date_map_vec(kind));
});
} else if let Some(dataset) = any.downcast_ref::<AddressCohortMetadataDataset>() {
match kind {
MapKind::Inserted => dataset.to_inserted_date_map_vec(),
MapKind::Computed => dataset.to_computed_date_map_vec(),
}
.into_iter()
.for_each(|map| {
v.push(map);
});
}
});
v
}
fn to_kind_mut_date_map_vec(&mut self, kind: MapKind) -> Vec<&mut dyn AnyDateMap> {
let mut v = vec![];
self.iter_mut().for_each(|(_, any)| match any {
any if any.is::<DateMap<u8>>() => {
if let Some(map) = any.downcast_mut::<DateMap<u8>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<u16>>() => {
if let Some(map) = any.downcast_mut::<DateMap<u16>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<u32>>() => {
if let Some(map) = any.downcast_mut::<DateMap<u32>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<u64>>() => {
if let Some(map) = any.downcast_mut::<DateMap<u64>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<usize>>() => {
if let Some(map) = any.downcast_mut::<DateMap<usize>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<f32>>() => {
if let Some(map) = any.downcast_mut::<DateMap<f32>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<f64>>() => {
if let Some(map) = any.downcast_mut::<DateMap<f64>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<OHLC>>() => {
if let Some(map) = any.downcast_mut::<DateMap<OHLC>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<Date>>() => {
if let Some(map) = any.downcast_mut::<DateMap<Date>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<Height>>() => {
if let Some(map) = any.downcast_mut::<DateMap<Height>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<Timestamp>>() => {
if let Some(map) = any.downcast_mut::<DateMap<Timestamp>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateRecapDataset<u32>>() => {
if let Some(dataset) = any.downcast_mut::<DateRecapDataset<u32>>() {
dataset.as_mut_vec().into_iter().for_each(|map| {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
});
}
}
any if any.is::<DateRecapDataset<u64>>() => {
if let Some(dataset) = any.downcast_mut::<DateRecapDataset<u64>>() {
dataset.as_mut_vec().into_iter().for_each(|map| {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
});
}
}
any if any.is::<DateRecapDataset<f32>>() => {
if let Some(dataset) = any.downcast_mut::<DateRecapDataset<f32>>() {
dataset.as_mut_vec().into_iter().for_each(|map| {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
});
}
}
any if any.is::<SubDataset>() => {
if let Some(dataset) = any.downcast_mut::<SubDataset>() {
dataset.as_mut_vec().into_iter().for_each(|dataset| {
v.append(&mut dataset.to_kind_mut_date_map_vec(kind));
});
}
}
any if any.is::<AddressCohortMetadataDataset>() => {
if let Some(dataset) = any.downcast_mut::<AddressCohortMetadataDataset>() {
match kind {
MapKind::Inserted => dataset.to_inserted_mut_date_map_vec(),
MapKind::Computed => dataset.to_computed_mut_date_map_vec(),
}
.into_iter()
.for_each(|map| {
v.push(map);
});
}
}
_ => {}
});
v
}
fn to_kind_height_map_vec(&self, kind: MapKind) -> Vec<&(dyn AnyHeightMap + Send + Sync)> {
let mut v = vec![];
self.iter().for_each(|(_, any)| {
if let Some(map) = any.downcast_ref::<HeightMap<u8>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<u16>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<u32>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<u64>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<usize>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<f32>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<f64>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<OHLC>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<Date>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<Height>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<Timestamp>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(dataset) = any.downcast_ref::<SubDataset>() {
dataset.as_vec().into_iter().for_each(|dataset| {
v.append(&mut dataset.to_kind_height_map_vec(kind));
});
} else if let Some(dataset) = any.downcast_ref::<AddressCohortMetadataDataset>() {
match kind {
MapKind::Inserted => dataset.to_inserted_height_map_vec(),
MapKind::Computed => dataset.to_computed_height_map_vec(),
}
.into_iter()
.for_each(|map| {
v.push(map);
});
}
});
v
}
fn to_kind_mut_height_map_vec(&mut self, kind: MapKind) -> Vec<&mut dyn AnyHeightMap> {
let mut v = vec![];
self.iter_mut().for_each(|(_, any)| match any {
any if any.is::<HeightMap<u8>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<u8>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<u16>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<u16>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<u32>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<u32>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<u64>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<u64>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<usize>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<usize>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<f32>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<f32>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<f64>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<f64>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<OHLC>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<OHLC>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<Date>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<Date>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<Height>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<Height>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<Timestamp>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<Timestamp>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<SubDataset>() => {
if let Some(dataset) = any.downcast_mut::<SubDataset>() {
dataset.as_mut_vec().into_iter().for_each(|dataset| {
v.append(&mut dataset.to_kind_mut_height_map_vec(kind));
});
}
}
any if any.is::<AddressCohortMetadataDataset>() => {
if let Some(dataset) = any.downcast_mut::<AddressCohortMetadataDataset>() {
match kind {
MapKind::Inserted => dataset.to_inserted_mut_height_map_vec(),
MapKind::Computed => dataset.to_computed_mut_height_map_vec(),
}
.into_iter()
.for_each(|map| {
v.push(map);
});
}
}
_ => {}
});
v
}
fn to_inserted_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
self.to_kind_bi_map_vec(MapKind::Inserted)
}
fn to_inserted_height_map_vec(&self) -> Vec<&(dyn AnyHeightMap + Send + Sync)> {
self.to_kind_height_map_vec(MapKind::Inserted)
}
fn to_inserted_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
self.to_kind_date_map_vec(MapKind::Inserted)
}
fn to_inserted_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
self.to_kind_mut_bi_map_vec(MapKind::Inserted)
}
fn to_inserted_mut_height_map_vec(&mut self) -> Vec<&mut dyn AnyHeightMap> {
self.to_kind_mut_height_map_vec(MapKind::Inserted)
}
fn to_inserted_mut_date_map_vec(&mut self) -> Vec<&mut dyn AnyDateMap> {
self.to_kind_mut_date_map_vec(MapKind::Inserted)
}
fn to_all_inserted_height_map_vec(&self) -> Vec<&(dyn AnyHeightMap + Send + Sync)> {
let mut vec = self.to_inserted_height_map_vec();
vec.append(
&mut self
.to_inserted_bi_map_vec()
.iter()
.map(|bi| bi.get_height())
.collect_vec(),
);
vec
}
fn to_all_inserted_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
let mut vec = self.to_inserted_date_map_vec();
vec.append(
&mut self
.to_inserted_bi_map_vec()
.iter()
.map(|bi| bi.get_date())
.collect_vec(),
);
vec
}
fn to_all_inserted_map_vec(&self) -> Vec<&(dyn AnyMap + Send + Sync)> {
let heights = self
.to_all_inserted_height_map_vec()
.into_iter()
.map(|d| d.as_any_map());
let dates = self
.to_all_inserted_date_map_vec()
.into_iter()
.map(|d| d.as_any_map());
heights.chain(dates).collect_vec()
}
#[inline(always)]
fn should_compute(&self, compute_data: &ComputeData) -> bool {
compute_data
.heights
.last()
.map_or(false, |height| self.should_compute_height(*height))
|| compute_data
.dates
.last()
.map_or(false, |date| self.should_compute_date(*date))
}
#[inline(always)]
fn should_compute_height(&self, height: Height) -> bool {
!self.to_all_computed_height_map_vec().is_empty()
&& self
.get_min_initial_states()
.computed
.first_unsafe_height
.unwrap_or(Height::ZERO)
<= height
}
#[inline(always)]
fn should_compute_date(&self, date: Date) -> bool {
!self.to_all_computed_date_map_vec().is_empty()
&& self
.get_min_initial_states()
.computed
.first_unsafe_date
.map_or(true, |min_initial_first_unsafe_date| {
min_initial_first_unsafe_date <= date
})
}
fn to_computed_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
self.to_kind_bi_map_vec(MapKind::Computed)
}
fn to_computed_height_map_vec(&self) -> Vec<&(dyn AnyHeightMap + Send + Sync)> {
self.to_kind_height_map_vec(MapKind::Computed)
}
fn to_computed_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
self.to_kind_date_map_vec(MapKind::Computed)
}
fn to_computed_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
self.to_kind_mut_bi_map_vec(MapKind::Computed)
}
fn to_computed_mut_height_map_vec(&mut self) -> Vec<&mut dyn AnyHeightMap> {
self.to_kind_mut_height_map_vec(MapKind::Computed)
}
fn to_computed_mut_date_map_vec(&mut self) -> Vec<&mut dyn AnyDateMap> {
self.to_kind_mut_date_map_vec(MapKind::Computed)
}
fn to_all_computed_height_map_vec(&self) -> Vec<&(dyn AnyHeightMap + Send + Sync)> {
let mut vec = self.to_computed_height_map_vec();
vec.append(
&mut self
.to_computed_bi_map_vec()
.iter()
.map(|bi| bi.get_height())
.collect_vec(),
);
vec
}
fn to_all_computed_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
let mut vec = self.to_computed_date_map_vec();
vec.append(
&mut self
.to_computed_bi_map_vec()
.iter()
.map(|bi| bi.get_date())
.collect_vec(),
);
vec
}
fn to_all_computed_map_vec(&self) -> Vec<&(dyn AnyMap + Send + Sync)> {
let heights = self
.to_all_computed_height_map_vec()
.into_iter()
.map(|d| d.as_any_map());
let dates = self
.to_all_computed_date_map_vec()
.into_iter()
.map(|d| d.as_any_map());
heights.chain(dates).collect_vec()
}
fn to_all_map_vec(&self) -> Vec<&(dyn AnyMap + Send + Sync)> {
let mut inserted = self.to_all_inserted_map_vec();
inserted.append(&mut self.to_all_computed_map_vec());
inserted
}
// #[inline(always)]
// fn is_empty(&self) -> bool {
// self.to_any_map_vec().is_empty()
// }
fn pre_export(&mut self) {
self.to_inserted_mut_height_map_vec()
.into_iter()
.for_each(|map| map.pre_export());
self.to_inserted_mut_date_map_vec()
.into_iter()
.for_each(|map| map.pre_export());
self.to_inserted_mut_bi_map_vec().into_iter().for_each(|d| {
d.as_any_mut_map()
.into_iter()
.for_each(|map| map.pre_export())
});
self.to_computed_mut_height_map_vec()
.into_iter()
.for_each(|map| map.pre_export());
self.to_computed_mut_date_map_vec()
.into_iter()
.for_each(|map| map.pre_export());
self.to_computed_mut_bi_map_vec().into_iter().for_each(|d| {
d.as_any_mut_map()
.into_iter()
.for_each(|map| map.pre_export())
});
}
fn export(&self) -> color_eyre::Result<()> {
self.to_all_map_vec()
.into_par_iter()
.try_for_each(|map| -> color_eyre::Result<()> { map.export() })
}
fn post_export(&mut self) {
self.to_inserted_mut_height_map_vec()
.into_iter()
.for_each(|map| map.post_export());
self.to_inserted_mut_date_map_vec()
.into_iter()
.for_each(|map| map.post_export());
self.to_inserted_mut_bi_map_vec().into_iter().for_each(|d| {
d.as_any_mut_map()
.into_iter()
.for_each(|map| map.post_export())
});
self.to_computed_mut_height_map_vec()
.into_iter()
.for_each(|map| map.post_export());
self.to_computed_mut_date_map_vec()
.into_iter()
.for_each(|map| map.post_export());
self.to_computed_mut_bi_map_vec().into_iter().for_each(|d| {
d.as_any_mut_map()
.into_iter()
.for_each(|map| map.post_export())
});
}
fn reset_computed(&self) {
self.to_all_computed_date_map_vec()
.iter()
.for_each(|map| map.delete_files());
self.to_all_computed_height_map_vec()
.iter()
.for_each(|map| map.delete_files());
}
}

View File

@@ -0,0 +1,7 @@
use super::AnyDataset;
pub trait AnyDatasetGroup {
fn as_vec(&self) -> Vec<&(dyn AnyDataset + Send + Sync)>;
fn as_mut_vec(&mut self) -> Vec<&mut dyn AnyDataset>;
}

View File

@@ -0,0 +1,9 @@
use super::{AnyDataset, MinInitialStates};
pub trait AnyDatasets {
fn get_min_initial_states(&self) -> &MinInitialStates;
fn to_any_dataset_vec(&self) -> Vec<&(dyn AnyDataset + Send + Sync)>;
fn to_mut_any_dataset_vec(&mut self) -> Vec<&mut dyn AnyDataset>;
}

View File

@@ -0,0 +1,279 @@
use allocative::Allocative;
use crate::structs::{AnyDateMap, AnyHeightMap, Config, Date, Height};
use super::{AnyDataset, AnyDatasets};
#[derive(Default, Debug, Clone, Copy, Allocative)]
pub struct MinInitialStates {
pub inserted: MinInitialState,
pub computed: MinInitialState,
}
impl MinInitialStates {
pub fn consume(&mut self, other: Self) {
self.inserted = other.inserted;
self.computed = other.computed;
}
pub fn compute_from_dataset(dataset: &dyn AnyDataset, config: &Config) -> Self {
Self {
inserted: MinInitialState::compute_from_dataset(dataset, Mode::Inserted, config),
computed: MinInitialState::compute_from_dataset(dataset, Mode::Computed, config),
}
}
pub fn compute_from_datasets(datasets: &dyn AnyDatasets, config: &Config) -> Self {
Self {
inserted: MinInitialState::compute_from_datasets(datasets, Mode::Inserted, config),
computed: MinInitialState::compute_from_datasets(datasets, Mode::Computed, config),
}
}
pub fn min_last_height(&self) -> Option<Height> {
self.computed.last_height.min(self.inserted.last_height)
}
}
#[derive(Default, Debug, Clone, Copy, Allocative)]
pub struct MinInitialState {
pub first_unsafe_date: Option<Date>,
pub first_unsafe_height: Option<Height>,
pub last_date: Option<Date>,
pub last_height: Option<Height>,
}
enum Mode {
Inserted,
Computed,
}
impl MinInitialState {
fn compute_from_datasets(datasets: &dyn AnyDatasets, mode: Mode, config: &Config) -> Self {
match mode {
Mode::Inserted => {
let contains_date_maps = |dataset: &&(dyn AnyDataset + Sync + Send)| {
!dataset.to_all_inserted_date_map_vec().is_empty()
};
let contains_height_maps = |dataset: &&(dyn AnyDataset + Sync + Send)| {
!dataset.to_all_inserted_height_map_vec().is_empty()
};
Self {
first_unsafe_date: Self::min_datasets_date(
datasets,
contains_date_maps,
|dataset| {
dataset
.get_min_initial_states()
.inserted
.first_unsafe_date
.as_ref()
.cloned()
},
),
first_unsafe_height: Self::min_datasets_height(
datasets,
contains_height_maps,
|dataset| {
dataset
.get_min_initial_states()
.inserted
.first_unsafe_height
.as_ref()
.cloned()
},
),
last_date: Self::min_datasets_date(datasets, contains_date_maps, |dataset| {
dataset
.get_min_initial_states()
.inserted
.last_date
.as_ref()
.cloned()
}),
last_height: Self::min_datasets_height(
datasets,
contains_height_maps,
|dataset| {
dataset
.get_min_initial_states()
.inserted
.last_height
.as_ref()
.cloned()
},
),
}
}
Mode::Computed => {
if config.recompute_computed() {
// datasets.reset_computed();
return Self::default();
}
let contains_date_maps = |dataset: &&(dyn AnyDataset + Sync + Send)| {
!dataset.to_all_computed_date_map_vec().is_empty()
};
let contains_height_maps = |dataset: &&(dyn AnyDataset + Sync + Send)| {
!dataset.to_all_computed_height_map_vec().is_empty()
};
Self {
first_unsafe_date: Self::min_datasets_date(
datasets,
contains_date_maps,
|dataset| {
dataset
.get_min_initial_states()
.computed
.first_unsafe_date
.as_ref()
.cloned()
},
),
first_unsafe_height: Self::min_datasets_height(
datasets,
contains_height_maps,
|dataset| {
dataset
.get_min_initial_states()
.computed
.first_unsafe_height
.as_ref()
.cloned()
},
),
last_date: Self::min_datasets_date(datasets, contains_date_maps, |dataset| {
dataset
.get_min_initial_states()
.computed
.last_date
.as_ref()
.cloned()
}),
last_height: Self::min_datasets_height(
datasets,
contains_height_maps,
|dataset| {
dataset
.get_min_initial_states()
.computed
.last_height
.as_ref()
.cloned()
},
),
}
}
}
}
fn min_datasets_date(
datasets: &dyn AnyDatasets,
is_not_empty: impl Fn(&&(dyn AnyDataset + Sync + Send)) -> bool,
map: impl Fn(&(dyn AnyDataset + Sync + Send)) -> Option<Date>,
) -> Option<Date> {
Self::min_date(
datasets
.to_any_dataset_vec()
.into_iter()
.filter(is_not_empty)
.map(map),
)
}
fn min_datasets_height(
datasets: &dyn AnyDatasets,
is_not_empty: impl Fn(&&(dyn AnyDataset + Sync + Send)) -> bool,
map: impl Fn(&(dyn AnyDataset + Sync + Send)) -> Option<Height>,
) -> Option<Height> {
Self::min_height(
datasets
.to_any_dataset_vec()
.into_iter()
.filter(is_not_empty)
.map(map),
)
}
fn compute_from_dataset(dataset: &dyn AnyDataset, mode: Mode, config: &Config) -> Self {
match mode {
Mode::Inserted => {
let date_vec = dataset.to_all_inserted_date_map_vec();
let height_vec = dataset.to_all_inserted_height_map_vec();
Self {
first_unsafe_date: Self::compute_min_initial_first_unsafe_date_from_dataset(
&date_vec,
),
first_unsafe_height: Self::compute_min_initial_first_unsafe_height_from_dataset(
&height_vec,
),
last_date: Self::compute_min_initial_last_date_from_dataset(&date_vec),
last_height: Self::compute_min_initial_last_height_from_dataset(&height_vec),
}
}
Mode::Computed => {
if config.recompute_computed() {
dataset.reset_computed();
return Self::default();
}
let date_vec = dataset.to_all_computed_date_map_vec();
let height_vec = dataset.to_all_computed_height_map_vec();
Self {
first_unsafe_date: Self::compute_min_initial_first_unsafe_date_from_dataset(
&date_vec,
),
first_unsafe_height: Self::compute_min_initial_first_unsafe_height_from_dataset(
&height_vec,
),
last_date: Self::compute_min_initial_last_date_from_dataset(&date_vec),
last_height: Self::compute_min_initial_last_height_from_dataset(&height_vec),
}
}
}
}
#[inline(always)]
fn compute_min_initial_last_date_from_dataset(
arr: &[&(dyn AnyDateMap + Sync + Send)],
) -> Option<Date> {
Self::min_date(arr.iter().map(|map| map.get_initial_last_date()))
}
#[inline(always)]
fn compute_min_initial_last_height_from_dataset(
arr: &[&(dyn AnyHeightMap + Sync + Send)],
) -> Option<Height> {
Self::min_height(arr.iter().map(|map| map.get_initial_last_height()))
}
#[inline(always)]
fn compute_min_initial_first_unsafe_date_from_dataset(
arr: &[&(dyn AnyDateMap + Sync + Send)],
) -> Option<Date> {
Self::min_date(arr.iter().map(|map| map.get_initial_first_unsafe_date()))
}
#[inline(always)]
fn compute_min_initial_first_unsafe_height_from_dataset(
arr: &[&(dyn AnyHeightMap + Sync + Send)],
) -> Option<Height> {
Self::min_height(arr.iter().map(|map| map.get_initial_first_unsafe_height()))
}
#[inline(always)]
fn min_date(iter: impl Iterator<Item = Option<Date>>) -> Option<Date> {
iter.min().and_then(|opt| opt)
}
#[inline(always)]
fn min_height(iter: impl Iterator<Item = Option<Height>>) -> Option<Height> {
iter.min().and_then(|opt| opt)
}
}

View File

@@ -0,0 +1,9 @@
mod any_dataset;
mod any_dataset_group;
mod any_datasets;
mod min_initial_state;
pub use any_dataset::*;
pub use any_dataset_group::*;
pub use any_datasets::*;
pub use min_initial_state::*;

View File

@@ -0,0 +1,74 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates},
structs::{BiMap, Config, MapKind, MapPath},
};
#[derive(Allocative, Iterable)]
pub struct AllAddressesMetadataDataset {
min_initial_states: MinInitialStates,
created_addreses: BiMap<u32>,
empty_addresses: BiMap<u32>,
new_addresses: BiMap<u32>,
}
impl AllAddressesMetadataDataset {
pub fn import(path: &MapPath, config: &Config) -> color_eyre::Result<Self> {
let f = |s: &str| path.join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// Inserted
created_addreses: BiMap::new_bin(1, MapKind::Inserted, &f("created_addresses")),
empty_addresses: BiMap::new_bin(1, MapKind::Inserted, &f("empty_addresses")),
// Computed
new_addresses: BiMap::new_bin(1, MapKind::Computed, &f("new_addresses")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(&mut self, insert_data: &InsertData) {
let &InsertData {
databases,
height,
date,
is_date_last_block,
..
} = insert_data;
let created_addresses = self
.created_addreses
.height
.insert(height, *databases.address_to_address_index.metadata.len);
let empty_addresses = self.empty_addresses.height.insert(
height,
*databases.address_index_to_empty_address_data.metadata.len,
);
if is_date_last_block {
self.created_addreses.date.insert(date, created_addresses);
self.empty_addresses.date.insert(date, empty_addresses);
}
}
pub fn compute(&mut self, &ComputeData { heights, dates, .. }: &ComputeData) {
self.new_addresses
.multi_insert_net_change(heights, dates, &mut self.created_addreses, 1)
}
}
impl AnyDataset for AllAddressesMetadataDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,381 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates, SubDataset},
states::{AddressCohortId, DurableStates},
},
structs::{AddressSplit, BiMap, Config, Date, Height, MapPath},
};
use super::cohort_metadata::AddressCohortMetadataDataset;
#[derive(Allocative, Iterable)]
pub struct CohortDataset {
min_initial_states: MinInitialStates,
split: AddressSplit,
metadata: AddressCohortMetadataDataset,
pub subs: SubDataset,
}
impl CohortDataset {
pub fn import(
path: &MapPath,
id: AddressCohortId,
config: &Config,
) -> color_eyre::Result<Self> {
let name = id.as_name().map(|s| s.to_owned());
let split = id.as_split();
let mut s = Self {
min_initial_states: MinInitialStates::default(),
split,
metadata: AddressCohortMetadataDataset::import(path, &name, config)?,
subs: SubDataset::import(path, &name, config)?,
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn sub_datasets_vec(&self) -> Vec<&SubDataset> {
vec![&self.subs]
}
pub fn needs_insert_metadata(&self, height: Height, date: Date) -> bool {
self.metadata.needs_insert(height, date)
}
pub fn needs_insert_utxo(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.utxo.needs_insert(height, date))
}
pub fn needs_insert_capitalization(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.capitalization.needs_insert(height, date))
}
pub fn needs_insert_supply(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.supply.needs_insert(height, date))
}
pub fn needs_insert_price_paid(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.price_paid.needs_insert(height, date))
}
pub fn needs_insert_realized(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.realized.needs_insert(height, date))
}
pub fn needs_insert_unrealized(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.unrealized.needs_insert(height, date))
}
pub fn needs_insert_input(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.input.needs_insert(height, date))
}
// fn needs_insert_output(&self, insert_data: &InsertData) -> bool {
// self.sub_datasets_vec()
// .iter()
// .any(|sub| sub.output.needs_insert(height, date))
// }
pub fn insert_realized_data(&mut self, insert_data: &InsertData) {
let realized_state = insert_data
.address_cohorts_realized_states
.as_ref()
.unwrap()
.get(&self.split)
.unwrap();
self.subs.realized.insert(insert_data, realized_state);
}
fn insert_metadata(&mut self, insert_data: &InsertData) {
let address_count = insert_data
.states
.address_cohorts_durable_states
.as_ref()
.unwrap()
.get(&self.split)
.unwrap()
.address_count;
self.metadata.insert(insert_data, address_count);
}
fn insert_supply_data(&mut self, insert_data: &InsertData, durable_states: &DurableStates) {
self.subs
.supply
.insert(insert_data, &durable_states.supply_state);
}
fn insert_utxo_data(&mut self, insert_data: &InsertData, durable_states: &DurableStates) {
self.subs
.utxo
.insert(insert_data, &durable_states.utxo_state);
}
fn insert_capitalization_data(
&mut self,
insert_data: &InsertData,
durable_states: &DurableStates,
) {
self.subs
.capitalization
.insert(insert_data, &durable_states.capitalization_state);
}
fn insert_unrealized_data(&mut self, insert_data: &InsertData) {
let states = insert_data
.address_cohorts_one_shot_states
.as_ref()
.unwrap()
.get(&self.split)
.unwrap();
self.subs.unrealized.insert(
insert_data,
&states.unrealized_block_state,
&states.unrealized_date_state,
);
}
fn insert_price_paid_data(&mut self, insert_data: &InsertData) {
let states = insert_data
.address_cohorts_one_shot_states
.as_ref()
.unwrap()
.get(&self.split)
.unwrap();
self.subs
.price_paid
.insert(insert_data, &states.price_paid_state);
}
fn insert_input_data(&mut self, insert_data: &InsertData) {
let state = insert_data
.address_cohorts_input_states
.as_ref()
.unwrap()
.get(&self.split)
.unwrap();
self.subs.input.insert(insert_data, state);
}
// fn insert_output_data(&mut self, insert_data: &InsertData) {
// let state = insert_data
// .address_cohorts_output_states
// .as_ref()
// .unwrap()
// .get(&self.split)
// .unwrap();
// self.output.insert(insert_data, &state.all);
// self.illiquid.output.insert(insert_data, &state.illiquid);
// self.liquid.output.insert(insert_data, &state.liquid);
// self.highly_liquid
// .output
// .insert(insert_data, &state.highly_liquid);
// }
pub fn insert(&mut self, insert_data: &InsertData) {
if !insert_data.compute_addresses {
return;
}
let address_cohort_durable_states = insert_data
.states
.address_cohorts_durable_states
.as_ref()
.unwrap()
.get(&self.split);
if address_cohort_durable_states.is_none() {
return; // TODO: Check if should panic instead
}
let address_cohort_durable_states = address_cohort_durable_states.unwrap();
if self.needs_insert_metadata(insert_data.height, insert_data.date) {
self.insert_metadata(insert_data);
}
if self.needs_insert_utxo(insert_data.height, insert_data.date) {
self.insert_utxo_data(insert_data, &address_cohort_durable_states.durable_states);
}
if self.needs_insert_capitalization(insert_data.height, insert_data.date) {
self.insert_capitalization_data(
insert_data,
&address_cohort_durable_states.durable_states,
);
}
if self.needs_insert_supply(insert_data.height, insert_data.date) {
self.insert_supply_data(insert_data, &address_cohort_durable_states.durable_states);
}
if self.needs_insert_realized(insert_data.height, insert_data.date) {
self.insert_realized_data(insert_data);
}
if self.needs_insert_unrealized(insert_data.height, insert_data.date) {
self.insert_unrealized_data(insert_data);
}
if self.needs_insert_price_paid(insert_data.height, insert_data.date) {
self.insert_price_paid_data(insert_data);
}
if self.needs_insert_input(insert_data.height, insert_data.date) {
self.insert_input_data(insert_data);
}
// if self.needs_insert_output(insert_data) {
// self.insert_output_data(insert_data);
// }
}
// pub fn should_compute_metadata(&self, compute_data: &ComputeData) -> bool {
// self.metadata.should_compute(compute_data)
// }
// pub fn should_compute_utxo(&self, compute_data: &ComputeData) -> bool {
// self.sub_datasets_vec()
// .iter()
// .any(|sub| sub.utxo.should_compute(compute_data))
// }
pub fn should_compute_supply(&self, compute_data: &ComputeData) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.supply.should_compute(compute_data))
}
pub fn should_compute_capitalization(&self, compute_data: &ComputeData) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.capitalization.should_compute(compute_data))
}
fn should_compute_realized(&self, compute_data: &ComputeData) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.realized.should_compute(compute_data))
}
fn should_compute_unrealized(&self, compute_data: &ComputeData) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.unrealized.should_compute(compute_data))
}
// fn should_compute_input(&self, compute_data: &ComputeData) -> bool {
// self.sub_datasets_vec()
// .iter()
// .any(|sub| sub.input.should_compute(compute_data))
// }
// fn should_compute_output(&self, compute_data: &ComputeData) -> bool {
// self.sub_datasets_vec()
// .iter()
// .any(|sub| sub.output.should_compute(compute_data))
// }
fn compute_supply_data(
&mut self,
compute_data: &ComputeData,
circulating_supply: &mut BiMap<f64>,
) {
self.subs.supply.compute(compute_data, circulating_supply);
}
fn compute_unrealized_data(
&mut self,
compute_data: &ComputeData,
circulating_supply: &mut BiMap<f64>,
market_cap: &mut BiMap<f32>,
) {
self.subs.unrealized.compute(
compute_data,
&mut self.subs.supply.supply,
circulating_supply,
market_cap,
);
}
fn compute_realized_data(&mut self, compute_data: &ComputeData, market_cap: &mut BiMap<f32>) {
self.subs.realized.compute(compute_data, market_cap);
}
fn compute_capitalization_data(&mut self, compute_data: &ComputeData, closes: &mut BiMap<f32>) {
self.subs
.capitalization
.compute(compute_data, closes, &mut self.subs.supply.supply);
}
// fn compute_output_data(&mut self, compute_data: &ComputeData) {
// self.all
// .output
// .compute(compute_data, &mut self.supply.total);
// }
pub fn compute(
&mut self,
compute_data: &ComputeData,
closes: &mut BiMap<f32>,
circulating_supply: &mut BiMap<f64>,
market_cap: &mut BiMap<f32>,
) {
if self.should_compute_supply(compute_data) {
self.compute_supply_data(compute_data, circulating_supply);
}
if self.should_compute_unrealized(compute_data) {
self.compute_unrealized_data(compute_data, circulating_supply, market_cap);
}
if self.should_compute_realized(compute_data) {
self.compute_realized_data(compute_data, market_cap);
}
// MUST BE after compute_supply
if self.should_compute_capitalization(compute_data) {
self.compute_capitalization_data(compute_data, closes);
}
// if self.should_compute_output(compute_data) {
// self.compute_output_data(compute_data);
// }
}
}
impl AnyDataset for CohortDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,70 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::datasets::{AnyDataset, InsertData, MinInitialStates},
structs::{BiMap, Config, MapKind, MapPath},
};
#[derive(Allocative, Iterable)]
pub struct AddressCohortMetadataDataset {
min_initial_states: MinInitialStates,
address_count: BiMap<f64>,
// pub output: OutputSubDataset,
// Sending addresses
// Receiving addresses
// Active addresses (Unique(Sending + Receiving))
}
impl AddressCohortMetadataDataset {
pub fn import(
path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let f = |s: &str| {
if let Some(name) = name {
path.join(&format!("{name}/{s}"))
} else {
path.join(s)
}
};
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// Inserted
address_count: BiMap::new_bin(1, MapKind::Inserted, &f("address_count")),
// output: OutputSubDataset::import(parent_path)?,
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
..
}: &InsertData,
address_count: f64,
) {
self.address_count.height.insert(height, address_count);
if is_date_last_block {
self.address_count.date.insert(date, address_count);
}
}
}
impl AnyDataset for AddressCohortMetadataDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,176 @@
mod all_metadata;
mod cohort;
pub mod cohort_metadata;
use allocative::Allocative;
use itertools::Itertools;
use rayon::prelude::*;
use crate::{
parser::states::SplitByAddressCohort,
structs::{BiMap, Config, Date, Height},
};
use self::{all_metadata::AllAddressesMetadataDataset, cohort::CohortDataset};
use super::{AnyDataset, AnyDatasets, ComputeData, InsertData, MinInitialStates};
#[derive(Allocative)]
pub struct AddressDatasets {
min_initial_states: MinInitialStates,
metadata: AllAddressesMetadataDataset,
pub cohorts: SplitByAddressCohort<CohortDataset>,
}
impl AddressDatasets {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let mut cohorts = SplitByAddressCohort::<Option<CohortDataset>>::default();
let path_dataset = config.path_datasets();
cohorts
.as_vec()
.into_par_iter()
.map(|(_, id)| (id, CohortDataset::import(&path_dataset, id, config)))
.collect::<Vec<_>>()
.into_iter()
.try_for_each(|(id, dataset)| -> color_eyre::Result<()> {
cohorts.get_mut_from_id(&id).replace(dataset?);
Ok(())
})?;
let mut s = Self {
min_initial_states: MinInitialStates::default(),
metadata: AllAddressesMetadataDataset::import(&path_dataset, config)?,
cohorts: cohorts.unwrap(),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_datasets(&s, config));
Ok(s)
}
pub fn insert(&mut self, insert_data: &InsertData) {
self.metadata.insert(insert_data);
self.cohorts
.as_mut_vec()
.into_iter()
.for_each(|(cohort, _)| cohort.insert(insert_data))
}
pub fn needs_durable_states(&self, height: Height, date: Date) -> bool {
let needs_insert_utxo = self.needs_insert_utxo(height, date);
let needs_insert_capitalization = self.needs_insert_capitalization(height, date);
let needs_insert_supply = self.needs_insert_supply(height, date);
let needs_one_shot_states = self.needs_one_shot_states(height, date);
needs_insert_utxo
|| needs_insert_capitalization
|| needs_insert_supply
|| needs_one_shot_states
}
pub fn needs_one_shot_states(&self, height: Height, date: Date) -> bool {
self.needs_insert_price_paid(height, date) || self.needs_insert_unrealized(height, date)
}
// pub fn needs_sent_states(&self, height: Height, date: WNaiveDate) -> bool {
// self.needs_insert_input(height, date) || self.needs_insert_realized(height, date)
// }
pub fn needs_insert_utxo(&self, height: Height, date: Date) -> bool {
self.cohorts
.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_utxo(height, date))
}
pub fn needs_insert_capitalization(&self, height: Height, date: Date) -> bool {
self.cohorts
.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_capitalization(height, date))
}
pub fn needs_insert_supply(&self, height: Height, date: Date) -> bool {
self.cohorts
.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_supply(height, date))
}
pub fn needs_insert_price_paid(&self, height: Height, date: Date) -> bool {
self.cohorts
.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_price_paid(height, date))
}
// pub fn needs_insert_realized(&self, height: Height, date: WNaiveDate) -> bool {
// self.cohorts
// .as_vec()
// .iter()
// .any(|(dataset, _)| dataset.needs_insert_realized(height, date))
// }
pub fn needs_insert_unrealized(&self, height: Height, date: Date) -> bool {
self.cohorts
.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_unrealized(height, date))
}
// pub fn needs_insert_input(&self, height: Height, date: WNaiveDate) -> bool {
// self.cohorts
// .as_vec()
// .iter()
// .any(|(dataset, _)| dataset.needs_insert_input(height, date))
// }
pub fn compute(
&mut self,
compute_data: &ComputeData,
closes: &mut BiMap<f32>,
circulating_supply: &mut BiMap<f64>,
market_cap: &mut BiMap<f32>,
) {
self.metadata.compute(compute_data);
self.cohorts
.as_mut_vec()
.into_iter()
.for_each(|(cohort, _)| {
cohort.compute(compute_data, closes, circulating_supply, market_cap)
})
}
}
impl AnyDatasets for AddressDatasets {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_any_dataset_vec(&self) -> Vec<&(dyn AnyDataset + Send + Sync)> {
self.cohorts
.as_vec()
.into_iter()
.map(|(d, _)| d as &(dyn AnyDataset + Send + Sync))
.chain(vec![&self.metadata as &(dyn AnyDataset + Send + Sync)])
.collect_vec()
}
fn to_mut_any_dataset_vec(&mut self) -> Vec<&mut dyn AnyDataset> {
self.cohorts
.as_mut_vec()
.into_iter()
.map(|(d, _)| d as &mut dyn AnyDataset)
.chain(vec![&mut self.metadata as &mut dyn AnyDataset])
.collect_vec()
}
}

View File

@@ -0,0 +1,51 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::datasets::AnyDataset,
structs::{Config, Date, HeightMap, MapKind, Timestamp},
};
use super::{InsertData, MinInitialStates};
#[derive(Allocative, Iterable)]
pub struct BlockMetadataDataset {
min_initial_states: MinInitialStates,
pub date: HeightMap<Date>,
pub timestamp: HeightMap<Timestamp>,
}
impl BlockMetadataDataset {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let f = |s: &str| config.path_datasets().join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// Inserted
date: HeightMap::new_bin(1, MapKind::Inserted, &f("date")),
timestamp: HeightMap::new_bin(1, MapKind::Inserted, &f("timestamp")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height, timestamp, ..
}: &InsertData,
) {
self.timestamp.insert(height, timestamp);
self.date.insert(height, timestamp.to_date());
}
}
impl AnyDataset for BlockMetadataDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,65 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::datasets::AnyDataset,
structs::{Config, DateMap, HeightMap, MapKind},
};
use super::{InsertData, MinInitialStates};
#[derive(Allocative, Iterable)]
pub struct CoindaysDataset {
min_initial_states: MinInitialStates,
pub coindays_destroyed: HeightMap<f32>,
pub coindays_destroyed_1d_sum: DateMap<f32>,
}
impl CoindaysDataset {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let f = |s: &str| config.path_datasets().join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// Inserted
coindays_destroyed: HeightMap::new_bin(1, MapKind::Inserted, &f("coindays_destroyed")),
coindays_destroyed_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("coindays_destroyed_1d_sum"),
),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
satdays_destroyed,
date_blocks_range,
is_date_last_block,
date,
..
}: &InsertData,
) {
self.coindays_destroyed
.insert(height, satdays_destroyed.to_btc() as f32);
if is_date_last_block {
self.coindays_destroyed_1d_sum
.insert(date, self.coindays_destroyed.sum_range(date_blocks_range));
}
}
}
impl AnyDataset for CoindaysDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,659 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
structs::{BiMap, Config, DateMap, Height, HeightMap, MapKind},
utils::{ONE_DAY_IN_DAYS, ONE_YEAR_IN_DAYS, THREE_MONTHS_IN_DAYS, TWO_WEEK_IN_DAYS},
};
use super::{AnyDataset, ComputeData, InsertData, MinInitialStates, RatioDataset};
#[derive(Allocative, Iterable)]
pub struct CointimeDataset {
min_initial_states: MinInitialStates,
// Inserted
pub coinblocks_destroyed: HeightMap<f32>,
pub coinblocks_destroyed_1d_sum: DateMap<f32>,
// Computed
pub active_cap: BiMap<f32>,
pub active_price: BiMap<f32>,
pub active_price_ratio: RatioDataset,
pub active_supply: BiMap<f32>,
pub active_supply_3m_net_change: BiMap<f32>,
pub active_supply_net_change: BiMap<f32>,
pub activity_to_vaultedness_ratio: BiMap<f32>,
pub coinblocks_created: HeightMap<f32>,
pub coinblocks_created_1d_sum: DateMap<f32>,
pub coinblocks_stored: HeightMap<f32>,
pub coinblocks_stored_1d_sum: DateMap<f32>,
pub cointime_adjusted_velocity: DateMap<f32>,
pub cointime_adjusted_inflation_rate: DateMap<f32>,
pub cointime_adjusted_yearly_inflation_rate: DateMap<f32>,
pub cointime_cap: BiMap<f32>,
pub cointime_price: BiMap<f32>,
pub cointime_price_ratio: RatioDataset,
pub cointime_value_created: HeightMap<f32>,
pub cointime_value_created_1d_sum: DateMap<f32>,
pub cointime_value_destroyed: HeightMap<f32>,
pub cointime_value_destroyed_1d_sum: DateMap<f32>,
pub cointime_value_stored: HeightMap<f32>,
pub cointime_value_stored_1d_sum: DateMap<f32>,
pub concurrent_liveliness: DateMap<f32>,
pub concurrent_liveliness_2w_median: DateMap<f32>,
pub cumulative_coinblocks_created: BiMap<f32>,
pub cumulative_coinblocks_destroyed: BiMap<f32>,
pub cumulative_coinblocks_stored: BiMap<f32>,
pub investor_cap: BiMap<f32>,
pub investorness: BiMap<f32>,
pub liveliness: BiMap<f32>,
pub liveliness_net_change: BiMap<f32>,
pub liveliness_net_change_2w_median: BiMap<f32>,
pub producerness: BiMap<f32>,
pub thermo_cap: BiMap<f32>,
pub thermo_cap_to_investor_cap_ratio: BiMap<f32>,
pub total_cointime_value_created: BiMap<f32>,
pub total_cointime_value_destroyed: BiMap<f32>,
pub total_cointime_value_stored: BiMap<f32>,
pub true_market_deviation: BiMap<f32>,
pub true_market_mean: BiMap<f32>,
pub true_market_mean_ratio: RatioDataset,
pub true_market_net_unrealized_profit_and_loss: BiMap<f32>,
pub vaulted_cap: BiMap<f32>,
pub vaulted_price: BiMap<f32>,
pub vaulted_price_ratio: RatioDataset,
pub vaulted_supply: BiMap<f32>,
pub vaulted_supply_net_change: BiMap<f32>,
pub vaulted_supply_3m_net_change: BiMap<f32>,
pub vaultedness: BiMap<f32>,
pub vaulting_rate: BiMap<f32>,
}
impl CointimeDataset {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let path_dataset = config.path_datasets();
let f = |s: &str| path_dataset.join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// Inserted
coinblocks_destroyed: HeightMap::new_bin(
1,
MapKind::Inserted,
&f("coinblocks_destroyed"),
),
coinblocks_destroyed_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("coinblocks_destroyed_1d_sum"),
),
// Computed
active_cap: BiMap::new_bin(1, MapKind::Computed, &f("active_cap")),
active_price: BiMap::new_bin(1, MapKind::Computed, &f("active_price")),
active_price_ratio: RatioDataset::import(&path_dataset, "active_price", config)?,
active_supply: BiMap::new_bin(1, MapKind::Computed, &f("active_supply")),
active_supply_3m_net_change: BiMap::new_bin(
1,
MapKind::Computed,
&f("active_supply_3m_net_change"),
),
active_supply_net_change: BiMap::new_bin(
1,
MapKind::Computed,
&f("active_supply_net_change"),
),
activity_to_vaultedness_ratio: BiMap::new_bin(
2,
MapKind::Computed,
&f("activity_to_vaultedness_ratio"),
),
coinblocks_created: HeightMap::new_bin(1, MapKind::Computed, &f("coinblocks_created")),
coinblocks_created_1d_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("coinblocks_created_1d_sum"),
),
coinblocks_stored: HeightMap::new_bin(1, MapKind::Computed, &f("coinblocks_stored")),
coinblocks_stored_1d_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("coinblocks_stored_1d_sum"),
),
cointime_adjusted_velocity: DateMap::new_bin(
1,
MapKind::Computed,
&f("cointime_adjusted_velocity"),
),
cointime_adjusted_inflation_rate: DateMap::new_bin(
1,
MapKind::Computed,
&f("cointime_adjusted_inflation_rate"),
),
cointime_adjusted_yearly_inflation_rate: DateMap::new_bin(
1,
MapKind::Computed,
&f("cointime_adjusted_yearly_inflation_rate"),
),
cointime_cap: BiMap::new_bin(1, MapKind::Computed, &f("cointime_cap")),
cointime_price: BiMap::new_bin(1, MapKind::Computed, &f("cointime_price")),
cointime_price_ratio: RatioDataset::import(&path_dataset, "cointime_price", config)?,
cointime_value_created: HeightMap::new_bin(
1,
MapKind::Computed,
&f("cointime_value_created"),
),
cointime_value_created_1d_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("cointime_value_created_1d_sum"),
),
cointime_value_destroyed: HeightMap::new_bin(
1,
MapKind::Computed,
&f("cointime_value_destroyed"),
),
cointime_value_destroyed_1d_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("cointime_value_destroyed_1d_sum"),
),
cointime_value_stored: HeightMap::new_bin(
1,
MapKind::Computed,
&f("cointime_value_stored"),
),
cointime_value_stored_1d_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("cointime_value_stored_1d_sum"),
),
concurrent_liveliness: DateMap::new_bin(
1,
MapKind::Computed,
&f("concurrent_liveliness"),
),
concurrent_liveliness_2w_median: DateMap::new_bin(
2,
MapKind::Computed,
&f("concurrent_liveliness_2w_median"),
),
cumulative_coinblocks_created: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_coinblocks_created"),
),
cumulative_coinblocks_destroyed: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_coinblocks_destroyed"),
),
cumulative_coinblocks_stored: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_coinblocks_stored"),
),
investor_cap: BiMap::new_bin(1, MapKind::Computed, &f("investor_cap")),
investorness: BiMap::new_bin(1, MapKind::Computed, &f("investorness")),
liveliness: BiMap::new_bin(1, MapKind::Computed, &f("liveliness")),
liveliness_net_change: BiMap::new_bin(
1,
MapKind::Computed,
&f("liveliness_net_change"),
),
liveliness_net_change_2w_median: BiMap::new_bin(
3,
MapKind::Computed,
&f("liveliness_net_change_2w_median"),
),
producerness: BiMap::new_bin(1, MapKind::Computed, &f("producerness")),
thermo_cap: BiMap::new_bin(1, MapKind::Computed, &f("thermo_cap")),
thermo_cap_to_investor_cap_ratio: BiMap::new_bin(
2,
MapKind::Computed,
&f("thermo_cap_to_investor_cap_ratio"),
),
total_cointime_value_created: BiMap::new_bin(
1,
MapKind::Computed,
&f("total_cointime_value_created"),
),
total_cointime_value_destroyed: BiMap::new_bin(
1,
MapKind::Computed,
&f("total_cointime_value_destroyed"),
),
total_cointime_value_stored: BiMap::new_bin(
1,
MapKind::Computed,
&f("total_cointime_value_stored"),
),
true_market_deviation: BiMap::new_bin(
1,
MapKind::Computed,
&f("true_market_deviation"),
),
true_market_mean: BiMap::new_bin(1, MapKind::Computed, &f("true_market_mean")),
true_market_mean_ratio: RatioDataset::import(
&path_dataset,
"true_market_mean",
config,
)?,
true_market_net_unrealized_profit_and_loss: BiMap::new_bin(
1,
MapKind::Computed,
&f("true_market_net_unrealized_profit_and_loss"),
),
vaulted_cap: BiMap::new_bin(1, MapKind::Computed, &f("vaulted_cap")),
vaulted_price: BiMap::new_bin(1, MapKind::Computed, &f("vaulted_price")),
vaulted_price_ratio: RatioDataset::import(&path_dataset, "vaulted_price", config)?,
vaulted_supply: BiMap::new_bin(1, MapKind::Computed, &f("vaulted_supply")),
vaulted_supply_3m_net_change: BiMap::new_bin(
1,
MapKind::Computed,
&f("vaulted_supply_3m_net_change"),
),
vaulted_supply_net_change: BiMap::new_bin(
1,
MapKind::Computed,
&f("vaulted_supply_net_change"),
),
vaultedness: BiMap::new_bin(1, MapKind::Computed, &f("vaultedness")),
vaulting_rate: BiMap::new_bin(1, MapKind::Computed, &f("vaulting_rate")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
satblocks_destroyed,
date_blocks_range,
is_date_last_block,
..
}: &InsertData,
) {
self.coinblocks_destroyed
.insert(height, satblocks_destroyed.to_btc() as f32);
if is_date_last_block {
self.coinblocks_destroyed_1d_sum
.insert(date, self.coinblocks_destroyed.sum_range(date_blocks_range));
}
}
#[allow(clippy::too_many_arguments)]
pub fn compute(
&mut self,
compute_data: &ComputeData,
first_height: &mut DateMap<Height>,
last_height: &mut DateMap<Height>,
closes: &mut BiMap<f32>,
circulating_supply: &mut BiMap<f64>,
realized_cap: &mut BiMap<f32>,
realized_price: &mut BiMap<f32>,
inflation_rate: &mut DateMap<f64>,
yearly_inflation_rate: &mut DateMap<f64>,
annualized_transaction_volume: &mut DateMap<f32>,
cumulative_subsidy_in_dollars: &mut BiMap<f32>,
) {
let &ComputeData { heights, dates, .. } = compute_data;
self.cumulative_coinblocks_destroyed
.height
.multi_insert_cumulative(heights, &mut self.coinblocks_destroyed);
self.cumulative_coinblocks_destroyed
.date
.multi_insert_cumulative(dates, &mut self.coinblocks_destroyed_1d_sum);
self.coinblocks_created.multi_insert_simple_transform(
heights,
&mut circulating_supply.height,
|circulating_supply, _| circulating_supply as f32,
);
self.coinblocks_created_1d_sum.multi_insert_sum_range(
dates,
&self.coinblocks_created,
first_height,
last_height,
);
self.cumulative_coinblocks_created
.height
.multi_insert_cumulative(heights, &mut self.coinblocks_created);
self.cumulative_coinblocks_created
.date
.multi_insert_cumulative(dates, &mut self.coinblocks_created_1d_sum);
self.coinblocks_stored.multi_insert_subtract(
heights,
&mut self.coinblocks_created,
&mut self.coinblocks_destroyed,
);
self.coinblocks_stored_1d_sum.multi_insert_sum_range(
dates,
&self.coinblocks_stored,
first_height,
last_height,
);
self.cumulative_coinblocks_stored
.height
.multi_insert_cumulative(heights, &mut self.coinblocks_stored);
self.cumulative_coinblocks_stored
.date
.multi_insert_cumulative(dates, &mut self.coinblocks_stored_1d_sum);
self.liveliness.multi_insert_divide(
heights,
dates,
&mut self.cumulative_coinblocks_destroyed,
&mut self.cumulative_coinblocks_created,
);
self.vaultedness.multi_insert_simple_transform(
heights,
dates,
&mut self.liveliness,
&|liveliness| 1.0 - liveliness,
);
self.activity_to_vaultedness_ratio.multi_insert_divide(
heights,
dates,
&mut self.liveliness,
&mut self.vaultedness,
);
self.concurrent_liveliness.multi_insert_divide(
dates,
&mut self.coinblocks_destroyed_1d_sum,
&mut self.coinblocks_created_1d_sum,
);
self.concurrent_liveliness_2w_median.multi_insert_median(
dates,
&mut self.concurrent_liveliness,
Some(TWO_WEEK_IN_DAYS),
);
self.liveliness_net_change.multi_insert_net_change(
heights,
dates,
&mut self.liveliness,
ONE_DAY_IN_DAYS,
);
self.liveliness_net_change_2w_median
.multi_insert_net_change(heights, dates, &mut self.liveliness, TWO_WEEK_IN_DAYS);
self.vaulted_supply.multi_insert_multiply(
heights,
dates,
&mut self.vaultedness,
circulating_supply,
);
self.vaulted_supply_net_change.multi_insert_net_change(
heights,
dates,
&mut self.vaulted_supply,
ONE_DAY_IN_DAYS,
);
self.vaulted_supply_3m_net_change.multi_insert_net_change(
heights,
dates,
&mut self.vaulted_supply,
THREE_MONTHS_IN_DAYS,
);
self.vaulting_rate.multi_insert_simple_transform(
heights,
dates,
&mut self.vaulted_supply,
&|vaulted_supply| vaulted_supply * ONE_YEAR_IN_DAYS as f32,
);
self.active_supply.multi_insert_multiply(
heights,
dates,
&mut self.liveliness,
circulating_supply,
);
self.active_supply_net_change.multi_insert_net_change(
heights,
dates,
&mut self.active_supply,
ONE_DAY_IN_DAYS,
);
self.active_supply_3m_net_change.multi_insert_net_change(
heights,
dates,
&mut self.active_supply,
THREE_MONTHS_IN_DAYS,
);
// TODO: Do these
// let min_vaulted_supply = ;
// let max_active_supply = ;
self.cointime_adjusted_inflation_rate.multi_insert_multiply(
dates,
&mut self.activity_to_vaultedness_ratio.date,
inflation_rate,
);
self.cointime_adjusted_yearly_inflation_rate
.multi_insert_multiply(
dates,
&mut self.activity_to_vaultedness_ratio.date,
yearly_inflation_rate,
);
self.cointime_adjusted_velocity.multi_insert_divide(
dates,
annualized_transaction_volume,
&mut self.active_supply.date,
);
// TODO:
// const activeSupplyChangeFromTransactions90dChange =
// createNetChangeLazyDataset(activeSupplyChangeFromTransactions, 90);
// const activeSupplyChangeFromIssuance = createMultipliedLazyDataset(
// lastSubsidy,
// liveliness,
// );
self.thermo_cap.multi_insert_simple_transform(
heights,
dates,
cumulative_subsidy_in_dollars,
&|cumulative_subsidy_in_dollars| cumulative_subsidy_in_dollars,
);
self.investor_cap
.multi_insert_subtract(heights, dates, realized_cap, &mut self.thermo_cap);
self.thermo_cap_to_investor_cap_ratio
.multi_insert_percentage(heights, dates, &mut self.thermo_cap, &mut self.investor_cap);
// TODO:
// const activeSupplyChangeFromIssuance90dChange = createNetChangeLazyDataset(
// activeSupplyChangeFromIssuance,
// 90,
// );
self.active_price
.multi_insert_divide(heights, dates, realized_price, &mut self.liveliness);
self.active_cap.height.multi_insert_multiply(
heights,
&mut self.active_supply.height,
&mut closes.height,
);
self.active_cap.date.multi_insert_multiply(
dates,
&mut self.active_supply.date,
&mut closes.date,
);
self.vaulted_price.multi_insert_divide(
heights,
dates,
realized_price,
&mut self.vaultedness,
);
self.vaulted_cap.height.multi_insert_multiply(
heights,
&mut self.vaulted_supply.height,
&mut closes.height,
);
self.vaulted_cap.date.multi_insert_multiply(
dates,
&mut self.vaulted_supply.date,
&mut closes.date,
);
self.true_market_mean.multi_insert_divide(
heights,
dates,
&mut self.investor_cap,
&mut self.active_supply,
);
self.true_market_deviation.multi_insert_divide(
heights,
dates,
&mut self.active_cap,
&mut self.investor_cap,
);
self.true_market_net_unrealized_profit_and_loss
.height
.multi_insert_complex_transform(
heights,
&mut self.active_cap.height,
|(active_cap, height, ..)| {
let investor_cap = self.investor_cap.height.get(height).unwrap();
(active_cap - investor_cap) / active_cap
},
);
self.true_market_net_unrealized_profit_and_loss
.date
.multi_insert_complex_transform(
dates,
&mut self.active_cap.date,
|(active_cap, date, _, _)| {
let investor_cap = self.investor_cap.date.get(date).unwrap();
(active_cap - investor_cap) / active_cap
},
);
self.investorness
.multi_insert_divide(heights, dates, &mut self.investor_cap, realized_cap);
self.producerness
.multi_insert_divide(heights, dates, &mut self.thermo_cap, realized_cap);
self.cointime_value_destroyed.multi_insert_multiply(
heights,
&mut self.coinblocks_destroyed,
&mut closes.height,
);
self.cointime_value_destroyed_1d_sum.multi_insert_multiply(
dates,
&mut self.coinblocks_destroyed_1d_sum,
&mut closes.date,
);
self.cointime_value_created.multi_insert_multiply(
heights,
&mut self.coinblocks_created,
&mut closes.height,
);
self.cointime_value_created_1d_sum.multi_insert_multiply(
dates,
&mut self.coinblocks_created_1d_sum,
&mut closes.date,
);
self.cointime_value_stored.multi_insert_multiply(
heights,
&mut self.coinblocks_stored,
&mut closes.height,
);
self.cointime_value_stored_1d_sum.multi_insert_multiply(
dates,
&mut self.coinblocks_stored_1d_sum,
&mut closes.date,
);
self.total_cointime_value_created
.height
.multi_insert_cumulative(heights, &mut self.cointime_value_created);
self.total_cointime_value_created
.date
.multi_insert_cumulative(dates, &mut self.cointime_value_created_1d_sum);
self.total_cointime_value_destroyed
.height
.multi_insert_cumulative(heights, &mut self.cointime_value_destroyed);
self.total_cointime_value_destroyed
.date
.multi_insert_cumulative(dates, &mut self.cointime_value_destroyed_1d_sum);
self.total_cointime_value_stored
.height
.multi_insert_cumulative(heights, &mut self.cointime_value_stored);
self.total_cointime_value_stored
.date
.multi_insert_cumulative(dates, &mut self.cointime_value_stored_1d_sum);
self.cointime_price.multi_insert_divide(
heights,
dates,
&mut self.total_cointime_value_destroyed,
&mut self.cumulative_coinblocks_stored,
);
self.cointime_cap.multi_insert_multiply(
heights,
dates,
&mut self.cointime_price,
circulating_supply,
);
self.active_price_ratio
.compute(compute_data, closes, &mut self.active_price);
self.cointime_price_ratio
.compute(compute_data, closes, &mut self.cointime_price);
self.true_market_mean_ratio
.compute(compute_data, closes, &mut self.true_market_mean);
self.vaulted_price_ratio
.compute(compute_data, closes, &mut self.vaulted_price);
}
}
impl AnyDataset for CointimeDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,50 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::structs::{BiMap, Config, MapKind};
use super::{AnyDataset, ComputeData, MinInitialStates};
#[derive(Allocative, Iterable)]
pub struct ConstantDataset {
min_initial_states: MinInitialStates,
pub _0: BiMap<u16>,
pub _1: BiMap<u16>,
pub _50: BiMap<u16>,
pub _100: BiMap<u16>,
}
impl ConstantDataset {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let f = |s: &str| config.path_datasets().join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// Computed
_0: BiMap::new_bin(1, MapKind::Computed, &f("0")),
_1: BiMap::new_bin(1, MapKind::Computed, &f("1")),
_50: BiMap::new_bin(1, MapKind::Computed, &f("50")),
_100: BiMap::new_bin(1, MapKind::Computed, &f("100")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn compute(&mut self, &ComputeData { heights, dates, .. }: &ComputeData) {
self._0.multi_insert_const(heights, dates, 0);
self._1.multi_insert_const(heights, dates, 1);
self._50.multi_insert_const(heights, dates, 50);
self._100.multi_insert_const(heights, dates, 100);
}
}
impl AnyDataset for ConstantDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,56 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::datasets::AnyDataset,
structs::{Config, DateMap, Height, MapKind},
};
use super::{InsertData, MinInitialStates};
#[derive(Allocative, Iterable)]
pub struct DateMetadataDataset {
min_initial_states: MinInitialStates,
pub first_height: DateMap<Height>,
pub last_height: DateMap<Height>,
}
impl DateMetadataDataset {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let f = |s: &str| config.path_datasets().join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// Inserted
first_height: DateMap::new_bin(1, MapKind::Inserted, &f("first_height")),
last_height: DateMap::new_bin(1, MapKind::Inserted, &f("last_height")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
date,
date_first_height,
height,
..
}: &InsertData,
) {
self.first_height.insert(date, date_first_height);
self.last_height.insert(date, height);
}
}
impl AnyDataset for DateMetadataDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,760 @@
use allocative::Allocative;
use itertools::Itertools;
use ordered_float::OrderedFloat;
use struct_iterable::Iterable;
use crate::{
parser::datasets::AnyDataset,
structs::{Amount, BiMap, Config, DateMap, Height, HeightMap, MapKey, MapKind},
utils::{
BYTES_IN_MB, ONE_DAY_IN_DAYS, ONE_MONTH_IN_DAYS, ONE_WEEK_IN_DAYS, ONE_YEAR_IN_DAYS,
TARGET_BLOCKS_PER_DAY,
},
};
use super::{
ComputeData, DateRecapDataset, InsertData, MinInitialStates, RecapDataset, RecapOptions,
};
#[derive(Allocative, Iterable)]
pub struct MiningDataset {
min_initial_states: MinInitialStates,
// Inserted
pub blocks_mined: DateMap<usize>,
pub total_blocks_mined: DateMap<usize>,
pub coinbase: HeightMap<f64>,
pub coinbase_1d_sum: DateMap<f64>,
pub coinbase_in_dollars: HeightMap<f32>,
pub coinbase_in_dollars_1d_sum: DateMap<f32>,
pub fees: HeightMap<f64>,
pub fees_1d_sum: DateMap<f64>,
pub fees_in_dollars: HeightMap<f32>,
pub fees_in_dollars_1d_sum: DateMap<f32>,
// Raw
// pub average_fee_paid: BiMap<f32>,
// pub max_fee_paid: BiMap<f32>,
// pub _90th_percentile_fee_paid: BiMap<f32>,
// pub _75th_percentile_fee_paid: BiMap<f32>,
// pub median_fee_paid: BiMap<f32>,
// pub _25th_percentile_fee_paid: BiMap<f32>,
// pub _10th_percentile_fee_paid: BiMap<f32>,
// pub min_fee_paid: BiMap<f32>,
// sat/vB
// pub average_fee_price: BiMap<f32>,
// pub max_fee_price: BiMap<f32>,
// pub _90th_percentile_fee_price: BiMap<f32>,
// pub _75th_percentile_fee_price: BiMap<f32>,
// pub median_fee_price: BiMap<f32>,
// pub _25th_percentile_fee_price: BiMap<f32>,
// pub _10th_percentile_fee_price: BiMap<f32>,
// pub min_fee_price: BiMap<f32>,
// -
pub subsidy: HeightMap<f64>,
pub subsidy_1d_sum: DateMap<f64>,
pub subsidy_in_dollars: HeightMap<f32>,
pub subsidy_in_dollars_1d_sum: DateMap<f32>,
pub last_coinbase: DateMap<f64>,
pub last_coinbase_in_dollars: DateMap<f32>,
pub last_fees: DateMap<f64>,
pub last_fees_in_dollars: DateMap<f32>,
pub last_subsidy: DateMap<f64>,
pub last_subsidy_in_dollars: DateMap<f32>,
pub difficulty: BiMap<f64>,
pub block_size: HeightMap<f32>, // in MB
pub block_weight: HeightMap<f32>, // in MB
pub block_vbytes: HeightMap<u64>,
pub block_interval: HeightMap<u32>, // in s
// Computed
pub annualized_issuance: DateMap<f64>, // Same as subsidy_1y_sum
pub blocks_mined_1d_target: DateMap<usize>,
pub blocks_mined_1m_sma: DateMap<f32>,
pub blocks_mined_1m_sum: DateMap<usize>,
pub blocks_mined_1m_target: DateMap<usize>,
pub blocks_mined_1w_sma: DateMap<f32>,
pub blocks_mined_1w_sum: DateMap<usize>,
pub blocks_mined_1w_target: DateMap<usize>,
pub blocks_mined_1y_sum: DateMap<usize>,
pub blocks_mined_1y_target: DateMap<usize>,
pub cumulative_block_size: BiMap<f32>,
pub cumulative_block_size_gigabytes: BiMap<f32>,
pub subsidy_1y_sum: DateMap<f64>,
pub subsidy_in_dollars_1y_sum: DateMap<f64>,
pub cumulative_subsidy: BiMap<f64>,
pub cumulative_subsidy_in_dollars: BiMap<f32>,
pub coinbase_1y_sum: DateMap<f64>,
pub coinbase_in_dollars_1y_sum: DateMap<f64>,
pub coinbase_in_dollars_1d_sum_1y_sma: DateMap<f32>,
pub cumulative_coinbase: BiMap<f64>,
pub cumulative_coinbase_in_dollars: BiMap<f32>,
pub fees_1y_sum: DateMap<f64>,
pub fees_in_dollars_1y_sum: DateMap<f64>,
pub cumulative_fees: BiMap<f64>,
pub cumulative_fees_in_dollars: BiMap<f32>,
pub inflation_rate: DateMap<f64>,
pub yearly_inflation_rate: DateMap<f64>,
pub subsidy_to_coinbase_ratio: HeightMap<f64>,
pub subsidy_to_coinbase_1d_ratio: DateMap<f64>,
pub fees_to_coinbase_ratio: HeightMap<f64>,
pub fees_to_coinbase_1d_ratio: DateMap<f64>,
pub hash_rate: DateMap<f64>,
pub hash_rate_1w_sma: DateMap<f32>,
pub hash_rate_1m_sma: DateMap<f32>,
pub hash_rate_2m_sma: DateMap<f32>,
pub hash_price: DateMap<f64>,
pub hash_price_min: DateMap<f64>,
pub hash_price_rebound: DateMap<f64>,
pub difficulty_adjustment: DateMap<f64>,
pub block_size_recap: DateRecapDataset<f32>, // in MB
pub block_weight_recap: DateRecapDataset<f32>, // in MB
pub block_vbytes_recap: DateRecapDataset<u64>,
pub block_interval_recap: DateRecapDataset<u32>, // in s
pub puell_multiple: DateMap<f32>,
// pub hash_price_in_dollars: DateMap<f64>,
// pub hash_price_30d_volatility: BiMap<f32>,
// difficulty_adjustment
// next_difficulty_adjustment
// op return fees
// inscriptions fees
// until adjustement
// until halving in days
// until halving in blocks
}
impl MiningDataset {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let f = |s: &str| config.path_datasets().join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
total_blocks_mined: DateMap::new_bin(1, MapKind::Inserted, &f("total_blocks_mined")),
blocks_mined: DateMap::new_bin(1, MapKind::Inserted, &f("blocks_mined")),
coinbase: HeightMap::new_bin(1, MapKind::Inserted, &f("coinbase")),
coinbase_1d_sum: DateMap::new_bin(1, MapKind::Inserted, &f("coinbase_1d_sum")),
coinbase_in_dollars: HeightMap::new_bin(
1,
MapKind::Inserted,
&f("coinbase_in_dollars"),
),
coinbase_in_dollars_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("coinbase_in_dollars_1d_sum"),
),
fees: HeightMap::new_bin(1, MapKind::Inserted, &f("fees")),
fees_1d_sum: DateMap::new_bin(1, MapKind::Inserted, &f("fees_1d_sum")),
fees_in_dollars: HeightMap::new_bin(1, MapKind::Inserted, &f("fees_in_dollars")),
fees_in_dollars_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("fees_in_dollars_1d_sum"),
),
subsidy: HeightMap::new_bin(1, MapKind::Inserted, &f("subsidy")),
subsidy_1d_sum: DateMap::new_bin(1, MapKind::Inserted, &f("subsidy_1d_sum")),
subsidy_in_dollars: HeightMap::new_bin(1, MapKind::Inserted, &f("subsidy_in_dollars")),
subsidy_in_dollars_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("subsidy_in_dollars_1d_sum"),
),
last_subsidy: DateMap::new_bin(1, MapKind::Inserted, &f("last_subsidy")),
last_subsidy_in_dollars: DateMap::new_bin(
1,
MapKind::Inserted,
&f("last_subsidy_in_dollars"),
),
last_coinbase: DateMap::new_bin(1, MapKind::Inserted, &f("last_coinbase")),
last_coinbase_in_dollars: DateMap::new_bin(
1,
MapKind::Inserted,
&f("last_coinbase_in_dollars"),
),
last_fees: DateMap::new_bin(1, MapKind::Inserted, &f("last_fees")),
last_fees_in_dollars: DateMap::new_bin(
1,
MapKind::Inserted,
&f("last_fees_in_dollars"),
),
difficulty: BiMap::new_bin(1, MapKind::Inserted, &f("difficulty")),
block_size: HeightMap::new_bin(1, MapKind::Inserted, &f("block_size")),
block_weight: HeightMap::new_bin(1, MapKind::Inserted, &f("block_weight")),
block_vbytes: HeightMap::new_bin(1, MapKind::Inserted, &f("block_vbytes")),
block_interval: HeightMap::new_bin(2, MapKind::Inserted, &f("block_interval")),
// ---
// Computed
// ---
coinbase_1y_sum: DateMap::new_bin(1, MapKind::Computed, &f("coinbase_1y_sum")),
coinbase_in_dollars_1y_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("coinbase_in_dollars_1y_sum"),
),
coinbase_in_dollars_1d_sum_1y_sma: DateMap::new_bin(
1,
MapKind::Computed,
&f("coinbase_in_dollars_1d_sum_1y_sma"),
),
cumulative_coinbase: BiMap::new_bin(1, MapKind::Computed, &f("cumulative_coinbase")),
cumulative_coinbase_in_dollars: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_coinbase_in_dollars"),
),
fees_1y_sum: DateMap::new_bin(1, MapKind::Computed, &f("fees_1y_sum")),
fees_in_dollars_1y_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("fees_in_dollars_1y_sum"),
),
cumulative_fees: BiMap::new_bin(1, MapKind::Computed, &f("cumulative_fees")),
cumulative_fees_in_dollars: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_fees_in_dollars"),
),
subsidy_1y_sum: DateMap::new_bin(1, MapKind::Computed, &f("subsidy_1y_sum")),
subsidy_in_dollars_1y_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("subsidy_in_dollars_1y_sum"),
),
cumulative_subsidy: BiMap::new_bin(1, MapKind::Computed, &f("cumulative_subsidy")),
cumulative_subsidy_in_dollars: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_subsidy_in_dollars"),
),
subsidy_to_coinbase_ratio: HeightMap::new_bin(
1,
MapKind::Computed,
&f("subsidy_to_coinbase_ratio"),
),
subsidy_to_coinbase_1d_ratio: DateMap::new_bin(
1,
MapKind::Computed,
&f("subsidy_to_coinbase_1d_ratio"),
),
fees_to_coinbase_ratio: HeightMap::new_bin(
1,
MapKind::Computed,
&f("fees_to_coinbase_ratio"),
),
fees_to_coinbase_1d_ratio: DateMap::new_bin(
1,
MapKind::Computed,
&f("fees_to_coinbase_1d_ratio"),
),
annualized_issuance: DateMap::new_bin(1, MapKind::Computed, &f("annualized_issuance")),
inflation_rate: DateMap::new_bin(2, MapKind::Computed, &f("inflation_rate")),
yearly_inflation_rate: DateMap::new_bin(
1,
MapKind::Computed,
&f("yearly_inflation_rate"),
),
blocks_mined_1d_target: DateMap::new_bin(
1,
MapKind::Computed,
&f("blocks_mined_1d_target"),
),
blocks_mined_1w_sma: DateMap::new_bin(1, MapKind::Computed, &f("blocks_mined_1w_sma")),
blocks_mined_1m_sma: DateMap::new_bin(1, MapKind::Computed, &f("blocks_mined_1m_sma")),
blocks_mined_1w_sum: DateMap::new_bin(1, MapKind::Computed, &f("blocks_mined_1w_sum")),
blocks_mined_1m_sum: DateMap::new_bin(1, MapKind::Computed, &f("blocks_mined_1m_sum")),
blocks_mined_1y_sum: DateMap::new_bin(1, MapKind::Computed, &f("blocks_mined_1y_sum")),
blocks_mined_1w_target: DateMap::new_bin(
1,
MapKind::Computed,
&f("blocks_mined_1w_target"),
),
blocks_mined_1m_target: DateMap::new_bin(
1,
MapKind::Computed,
&f("blocks_mined_1m_target"),
),
blocks_mined_1y_target: DateMap::new_bin(
1,
MapKind::Computed,
&f("blocks_mined_1y_target"),
),
difficulty_adjustment: DateMap::new_bin(
1,
MapKind::Computed,
&f("difficulty_adjustment"),
),
block_size_recap: RecapDataset::import(
&f("block_size_1d"),
RecapOptions::default()
.add_sum()
.add_average()
.add_max()
.add_90p()
.add_75p()
.add_median()
.add_25p()
.add_10p()
.add_min(),
)?,
cumulative_block_size: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_block_size"),
),
cumulative_block_size_gigabytes: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_block_size_gigabytes"),
),
block_weight_recap: RecapDataset::import(
&f("block_weight_1d"),
RecapOptions::default()
.add_average()
.add_max()
.add_90p()
.add_75p()
.add_median()
.add_25p()
.add_10p()
.add_min(),
)?,
block_vbytes_recap: RecapDataset::import(
&f("block_vbytes_1d"),
RecapOptions::default()
.add_average()
.add_max()
.add_90p()
.add_75p()
.add_median()
.add_25p()
.add_10p()
.add_min(),
)?,
block_interval_recap: RecapDataset::import(
&f("block_interval_1d"),
RecapOptions::default()
.add_average()
.add_max()
.add_90p()
.add_75p()
.add_median()
.add_25p()
.add_10p()
.add_min(),
)?,
hash_rate: DateMap::new_bin(1, MapKind::Computed, &f("hash_rate")),
hash_rate_1w_sma: DateMap::new_bin(1, MapKind::Computed, &f("hash_rate_1w_sma")),
hash_rate_1m_sma: DateMap::new_bin(1, MapKind::Computed, &f("hash_rate_1m_sma")),
hash_rate_2m_sma: DateMap::new_bin(1, MapKind::Computed, &f("hash_rate_2m_sma")),
hash_price: DateMap::new_bin(1, MapKind::Computed, &f("hash_price")),
hash_price_min: DateMap::new_bin(1, MapKind::Computed, &f("hash_price_min")),
hash_price_rebound: DateMap::new_bin(1, MapKind::Computed, &f("hash_price_rebound")),
puell_multiple: DateMap::new_bin(1, MapKind::Computed, &f("puell_multiple")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
date_first_height,
height,
coinbase,
fees,
date_blocks_range,
is_date_last_block,
block_price,
date,
difficulty,
block_size,
block_vbytes,
block_weight,
block_interval,
..
}: &InsertData,
) {
self.coinbase.insert(height, coinbase.to_btc());
let coinbase_in_dollars = self
.coinbase_in_dollars
.insert(height, (block_price * coinbase).to_dollar() as f32);
let sumed_fees = Amount::from_sat(fees.iter().map(|amount| amount.to_sat()).sum());
self.fees.insert(height, sumed_fees.to_btc());
let sumed_fees_in_dollars = self
.fees_in_dollars
.insert(height, (block_price * sumed_fees).to_dollar() as f32);
let subsidy = coinbase - sumed_fees;
self.subsidy.insert(height, subsidy.to_btc());
let subsidy_in_dollars = self
.subsidy_in_dollars
.insert(height, (block_price * subsidy).to_dollar() as f32);
self.difficulty.height.insert(height, difficulty);
self.block_size
.insert(height, block_size as f32 / BYTES_IN_MB as f32);
self.block_weight
.insert(height, block_weight as f32 / BYTES_IN_MB as f32);
self.block_vbytes.insert(height, block_vbytes);
self.block_interval.insert(height, *block_interval);
if is_date_last_block {
self.coinbase_1d_sum
.insert(date, self.coinbase.sum_range(date_blocks_range));
self.coinbase_in_dollars_1d_sum
.insert(date, self.coinbase_in_dollars.sum_range(date_blocks_range));
self.fees_1d_sum
.insert(date, self.fees.sum_range(date_blocks_range));
self.fees_in_dollars_1d_sum
.insert(date, self.fees_in_dollars.sum_range(date_blocks_range));
self.subsidy_1d_sum
.insert(date, self.subsidy.sum_range(date_blocks_range));
self.subsidy_in_dollars_1d_sum
.insert(date, self.subsidy_in_dollars.sum_range(date_blocks_range));
self.last_coinbase.insert(date, coinbase.to_btc());
self.last_coinbase_in_dollars
.insert(date, coinbase_in_dollars);
self.last_subsidy.insert(date, subsidy.to_btc());
self.last_subsidy_in_dollars
.insert(date, subsidy_in_dollars);
self.last_fees.insert(date, sumed_fees.to_btc());
self.last_fees_in_dollars
.insert(date, sumed_fees_in_dollars);
let total_blocks_mined = self.total_blocks_mined.insert(date, height.to_usize() + 1);
self.blocks_mined
.insert(date, total_blocks_mined - date_first_height.to_usize());
self.difficulty.date.insert(date, difficulty);
}
}
pub fn compute(
&mut self,
&ComputeData { heights, dates, .. }: &ComputeData,
first_height: &mut DateMap<Height>,
last_height: &mut DateMap<Height>,
) {
self.blocks_mined_1w_sum.multi_insert_last_x_sum(
dates,
&mut self.blocks_mined,
ONE_WEEK_IN_DAYS,
);
self.blocks_mined_1m_sum.multi_insert_last_x_sum(
dates,
&mut self.blocks_mined,
ONE_MONTH_IN_DAYS,
);
self.blocks_mined_1y_sum.multi_insert_last_x_sum(
dates,
&mut self.blocks_mined,
ONE_YEAR_IN_DAYS,
);
self.subsidy_1y_sum.multi_insert_last_x_sum(
dates,
&mut self.subsidy_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.subsidy_in_dollars_1y_sum.multi_insert_last_x_sum(
dates,
&mut self.subsidy_in_dollars_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.cumulative_subsidy
.height
.multi_insert_cumulative(heights, &mut self.subsidy);
self.cumulative_subsidy
.date
.multi_insert_cumulative(dates, &mut self.subsidy_1d_sum);
self.cumulative_subsidy_in_dollars
.height
.multi_insert_cumulative(heights, &mut self.subsidy_in_dollars);
self.cumulative_subsidy_in_dollars
.date
.multi_insert_cumulative(dates, &mut self.subsidy_in_dollars_1d_sum);
self.fees_1y_sum
.multi_insert_last_x_sum(dates, &mut self.fees_1d_sum, ONE_YEAR_IN_DAYS);
self.fees_in_dollars_1y_sum.multi_insert_last_x_sum(
dates,
&mut self.fees_in_dollars_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.cumulative_fees
.height
.multi_insert_cumulative(heights, &mut self.fees);
self.cumulative_fees
.date
.multi_insert_cumulative(dates, &mut self.fees_1d_sum);
self.cumulative_fees_in_dollars
.height
.multi_insert_cumulative(heights, &mut self.fees_in_dollars);
self.cumulative_fees_in_dollars
.date
.multi_insert_cumulative(dates, &mut self.fees_in_dollars_1d_sum);
self.coinbase_1y_sum.multi_insert_last_x_sum(
dates,
&mut self.coinbase_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.coinbase_in_dollars_1y_sum.multi_insert_last_x_sum(
dates,
&mut self.coinbase_in_dollars_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.coinbase_in_dollars_1d_sum_1y_sma
.multi_insert_simple_average(
dates,
&mut self.coinbase_in_dollars_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.cumulative_coinbase
.height
.multi_insert_cumulative(heights, &mut self.coinbase);
self.cumulative_coinbase
.date
.multi_insert_cumulative(dates, &mut self.coinbase_1d_sum);
self.cumulative_coinbase_in_dollars
.height
.multi_insert_cumulative(heights, &mut self.coinbase_in_dollars);
self.cumulative_coinbase_in_dollars
.date
.multi_insert_cumulative(dates, &mut self.coinbase_in_dollars_1d_sum);
self.subsidy_to_coinbase_ratio.multi_insert_percentage(
heights,
&mut self.subsidy,
&mut self.coinbase,
);
self.subsidy_to_coinbase_1d_ratio.multi_insert_percentage(
dates,
&mut self.subsidy_1d_sum,
&mut self.coinbase_1d_sum,
);
self.fees_to_coinbase_ratio.multi_insert_percentage(
heights,
&mut self.fees,
&mut self.coinbase,
);
self.fees_to_coinbase_1d_ratio.multi_insert_percentage(
dates,
&mut self.fees_1d_sum,
&mut self.coinbase_1d_sum,
);
self.annualized_issuance.multi_insert_last_x_sum(
dates,
&mut self.subsidy_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.inflation_rate.multi_insert_simple_transform(
dates,
&mut self.subsidy_1d_sum,
|subsidy_1d_sum, date| {
subsidy_1d_sum * ONE_YEAR_IN_DAYS as f64
/ self.cumulative_subsidy.date.get_or_import(date).unwrap()
* 100.0
},
);
self.yearly_inflation_rate.multi_insert_percentage(
dates,
&mut self.annualized_issuance,
&mut self.cumulative_subsidy.date,
);
self.blocks_mined_1d_target
.multi_insert_const(dates, TARGET_BLOCKS_PER_DAY);
self.blocks_mined_1w_target
.multi_insert_const(dates, ONE_WEEK_IN_DAYS * TARGET_BLOCKS_PER_DAY);
self.blocks_mined_1m_target
.multi_insert_const(dates, ONE_MONTH_IN_DAYS * TARGET_BLOCKS_PER_DAY);
self.blocks_mined_1y_target
.multi_insert_const(dates, ONE_YEAR_IN_DAYS * TARGET_BLOCKS_PER_DAY);
self.blocks_mined_1w_sma.multi_insert_simple_average(
dates,
&mut self.blocks_mined,
ONE_WEEK_IN_DAYS,
);
self.blocks_mined_1m_sma.multi_insert_simple_average(
dates,
&mut self.blocks_mined,
ONE_MONTH_IN_DAYS,
);
self.cumulative_block_size
.height
.multi_insert_cumulative(heights, &mut self.block_size);
self.cumulative_block_size.date.multi_insert_last(
dates,
&mut self.cumulative_block_size.height,
last_height,
);
self.cumulative_block_size
.height
.multi_insert_cumulative(heights, &mut self.block_size);
self.cumulative_block_size_gigabytes
.multi_insert_simple_transform(heights, dates, &mut self.cumulative_block_size, &|v| {
v / 1000.0
});
// https://hashrateindex.com/blog/what-is-bitcoins-hashrate/
self.hash_rate.multi_insert(dates, |date| {
let blocks_mined = self.blocks_mined.get_or_import(date).unwrap();
let difficulty = self.difficulty.date.get_or_import(date).unwrap();
(blocks_mined as f64 / (date.get_day_completion() * TARGET_BLOCKS_PER_DAY as f64)
* difficulty
* 2.0_f64.powi(32))
/ 600.0
/ 1_000_000_000_000_000_000.0
});
self.hash_rate_1w_sma.multi_insert_simple_average(
dates,
&mut self.hash_rate,
ONE_WEEK_IN_DAYS,
);
self.hash_rate_1m_sma.multi_insert_simple_average(
dates,
&mut self.hash_rate,
ONE_MONTH_IN_DAYS,
);
self.hash_rate_2m_sma.multi_insert_simple_average(
dates,
&mut self.hash_rate,
2 * ONE_MONTH_IN_DAYS,
);
self.hash_price.multi_insert(dates, |date| {
let coinbase_in_dollars = self.coinbase_in_dollars_1d_sum.get_or_import(date).unwrap();
let hash_rate = self.hash_rate.get_or_import(date).unwrap();
coinbase_in_dollars as f64 / hash_rate / 1_000.0
});
self.hash_price_min
.multi_insert_min(dates, &mut self.hash_price, 0.0);
self.hash_price_rebound.multi_insert_percentage(
dates,
&mut self.hash_price,
&mut self.hash_price_min,
);
self.puell_multiple.multi_insert_divide(
dates,
&mut self.coinbase_in_dollars_1d_sum,
&mut self.coinbase_in_dollars_1d_sum_1y_sma,
);
self.puell_multiple.multi_insert_divide(
dates,
&mut self.coinbase_in_dollars_1d_sum,
&mut self.coinbase_in_dollars_1d_sum_1y_sma,
);
self.difficulty_adjustment.multi_insert_percentage_change(
dates,
&mut self.difficulty.date,
ONE_DAY_IN_DAYS,
);
dates.iter().for_each(|date| {
let first = first_height.get_or_import(date).unwrap();
let last = last_height.get_or_import(date).unwrap();
self.block_size_recap.compute(
*date,
&mut self
.block_size
.get_or_import_range_inclusive(first, last)
.into_iter()
.map(OrderedFloat)
.collect_vec(),
);
self.block_weight_recap.compute(
*date,
&mut self
.block_weight
.get_or_import_range_inclusive(first, last)
.into_iter()
.map(OrderedFloat)
.collect_vec(),
);
self.block_vbytes_recap.compute(
*date,
&mut self.block_vbytes.get_or_import_range_inclusive(first, last),
);
self.block_interval_recap.compute(
*date,
&mut self
.block_interval
.get_or_import_range_inclusive(first, last),
);
})
}
}
impl AnyDataset for MiningDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

347
_src/parser/datasets/mod.rs Normal file
View File

@@ -0,0 +1,347 @@
use std::{collections::BTreeMap, ops::RangeInclusive};
use allocative::Allocative;
use itertools::Itertools;
use log::info;
use rayon::prelude::*;
mod _traits;
mod address;
mod block_metadata;
mod coindays;
mod cointime;
mod constant;
mod date_metadata;
mod mining;
mod price;
mod subs;
mod transaction;
mod utxo;
pub use _traits::*;
pub use address::*;
pub use block_metadata::*;
pub use coindays::*;
pub use cointime::*;
pub use constant::*;
pub use date_metadata::*;
pub use mining::*;
pub use price::*;
use serde_json::Value;
pub use subs::*;
pub use transaction::*;
pub use utxo::*;
use crate::{
io::Json,
parser::{
databases::Databases,
states::{
AddressCohortsInputStates,
AddressCohortsOneShotStates,
AddressCohortsRealizedStates,
States,
UTXOCohortsOneShotStates,
// UTXOCohortsReceivedStates,
UTXOCohortsSentStates,
},
},
structs::{Amount, Config, Date, Height, Price, Timestamp},
};
pub struct InsertData<'a> {
pub address_cohorts_input_states: &'a Option<AddressCohortsInputStates>,
pub address_cohorts_one_shot_states: &'a Option<AddressCohortsOneShotStates>,
pub address_cohorts_realized_states: &'a Option<AddressCohortsRealizedStates>,
pub amount_sent: Amount,
pub block_interval: Timestamp,
pub block_price: Price,
pub block_size: usize,
pub block_vbytes: u64,
pub block_weight: u64,
pub coinbase: Amount,
pub compute_addresses: bool,
pub databases: &'a Databases,
pub date: Date,
pub date_blocks_range: &'a RangeInclusive<u32>,
pub date_first_height: Height,
pub difficulty: f64,
pub fees: &'a Vec<Amount>,
pub height: Height,
pub is_date_last_block: bool,
pub satblocks_destroyed: Amount,
pub satdays_destroyed: Amount,
pub states: &'a States,
pub timestamp: Timestamp,
pub transaction_count: usize,
pub utxo_cohorts_one_shot_states: &'a UTXOCohortsOneShotStates,
// pub utxo_cohorts_received_states: &'a UTXOCohortsReceivedStates,
pub utxo_cohorts_sent_states: &'a UTXOCohortsSentStates,
}
pub struct ComputeData<'a> {
pub heights: &'a [Height],
pub dates: &'a [Date],
}
#[derive(Allocative)]
pub struct Datasets {
min_initial_states: MinInitialStates,
pub constant: ConstantDataset,
pub address: AddressDatasets,
pub block_metadata: BlockMetadataDataset,
pub coindays: CoindaysDataset,
pub cointime: CointimeDataset,
pub date_metadata: DateMetadataDataset,
pub mining: MiningDataset,
pub price: PriceDatasets,
pub transaction: TransactionDataset,
pub utxo: UTXODatasets,
}
impl Datasets {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let price = PriceDatasets::import(config)?;
let constant = ConstantDataset::import(config)?;
let date_metadata = DateMetadataDataset::import(config)?;
let cointime = CointimeDataset::import(config)?;
let coindays = CoindaysDataset::import(config)?;
let mining = MiningDataset::import(config)?;
let block_metadata = BlockMetadataDataset::import(config)?;
let transaction = TransactionDataset::import(config)?;
let address = AddressDatasets::import(config)?;
let utxo = UTXODatasets::import(config)?;
let mut s = Self {
min_initial_states: MinInitialStates::default(),
address,
block_metadata,
cointime,
coindays,
constant,
date_metadata,
price,
mining,
transaction,
utxo,
};
s.set_initial_states(config);
info!("Imported datasets");
Ok(s)
}
fn set_initial_states(&mut self, config: &Config) {
self.min_initial_states
.consume(MinInitialStates::compute_from_datasets(self, config));
}
pub fn insert(&mut self, insert_data: InsertData) {
if insert_data.compute_addresses {
self.address.insert(&insert_data);
}
self.utxo.insert(&insert_data);
if self
.block_metadata
.needs_insert(insert_data.height, insert_data.date)
{
self.block_metadata.insert(&insert_data);
}
if self
.date_metadata
.needs_insert(insert_data.height, insert_data.date)
{
self.date_metadata.insert(&insert_data);
}
if self
.coindays
.needs_insert(insert_data.height, insert_data.date)
{
self.coindays.insert(&insert_data);
}
if self
.mining
.needs_insert(insert_data.height, insert_data.date)
{
self.mining.insert(&insert_data);
}
if self
.transaction
.needs_insert(insert_data.height, insert_data.date)
{
self.transaction.insert(&insert_data);
}
if self
.cointime
.needs_insert(insert_data.height, insert_data.date)
{
self.cointime.insert(&insert_data);
}
}
pub fn compute(&mut self, compute_data: ComputeData) {
if self.constant.should_compute(&compute_data) {
self.constant.compute(&compute_data);
}
if self.mining.should_compute(&compute_data) {
self.mining.compute(
&compute_data,
&mut self.date_metadata.first_height,
&mut self.date_metadata.last_height,
);
}
// No compute needed for now
self.price
.compute(&compute_data, &mut self.mining.cumulative_subsidy);
self.address.compute(
&compute_data,
&mut self.price.close,
&mut self.mining.cumulative_subsidy,
&mut self.price.market_cap,
);
self.utxo.compute(
&compute_data,
&mut self.price.close,
&mut self.mining.cumulative_subsidy,
&mut self.price.market_cap,
);
if self.transaction.should_compute(&compute_data) {
self.transaction.compute(
&compute_data,
&mut self.mining.cumulative_subsidy,
&mut self.mining.block_interval,
);
}
if self.cointime.should_compute(&compute_data) {
self.cointime.compute(
&compute_data,
&mut self.date_metadata.first_height,
&mut self.date_metadata.last_height,
&mut self.price.close,
&mut self.mining.cumulative_subsidy,
&mut self.address.cohorts.all.subs.capitalization.realized_cap,
&mut self.address.cohorts.all.subs.capitalization.realized_price,
&mut self.mining.inflation_rate,
&mut self.mining.yearly_inflation_rate,
&mut self.transaction.annualized_volume,
&mut self.mining.cumulative_subsidy_in_dollars,
);
}
}
pub fn export(&mut self, config: &Config, height: Height) -> color_eyre::Result<()> {
let is_new = self
.min_initial_states
.min_last_height()
.map_or(true, |last| last <= height);
self.to_mut_any_dataset_vec()
.into_iter()
.for_each(|dataset| dataset.pre_export());
self.to_any_dataset_vec()
.into_par_iter()
.try_for_each(|dataset| -> color_eyre::Result<()> { dataset.export() })?;
let mut path_to_last: BTreeMap<String, Value> = BTreeMap::default();
self.to_mut_any_dataset_vec()
.into_iter()
.for_each(|dataset| {
dataset.post_export();
if is_new {
dataset.to_all_map_vec().iter().for_each(|map| {
if map.path_last().is_some() {
if let Some(last_value) = map.last_value() {
path_to_last.insert(map.id(config), last_value);
}
}
});
}
});
if is_new {
Json::export(&config.path_datasets_last_values(), &path_to_last)?;
}
self.set_initial_states(config);
Ok(())
}
}
impl AnyDatasets for Datasets {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_any_dataset_vec(&self) -> Vec<&(dyn AnyDataset + Send + Sync)> {
vec![
vec![
&self.price as &(dyn AnyDataset + Send + Sync),
&self.constant,
],
self.address.to_any_dataset_vec(),
self.utxo.to_any_dataset_vec(),
vec![
&self.mining,
&self.transaction,
&self.block_metadata,
&self.date_metadata,
&self.cointime,
&self.coindays,
],
]
.into_iter()
.flatten()
.collect_vec()
}
fn to_mut_any_dataset_vec(&mut self) -> Vec<&mut dyn AnyDataset> {
vec![
vec![&mut self.price as &mut dyn AnyDataset, &mut self.constant],
self.address.to_mut_any_dataset_vec(),
self.utxo.to_mut_any_dataset_vec(),
vec![
&mut self.mining,
&mut self.transaction,
&mut self.block_metadata,
&mut self.date_metadata,
&mut self.cointime,
&mut self.coindays,
],
]
.into_iter()
.flatten()
.collect_vec()
}
}

View File

@@ -0,0 +1,733 @@
use std::collections::BTreeMap;
use allocative::Allocative;
use chrono::Days;
use color_eyre::eyre::Error;
use struct_iterable::Iterable;
use crate::{
parser::price::{Binance, Kibo, Kraken},
structs::{
Amount, BiMap, Config, Date, DateMap, DateMapChunkId, Height, HeightMapChunkId, MapKey,
MapKind, Timestamp, OHLC,
},
utils::{ONE_MONTH_IN_DAYS, ONE_WEEK_IN_DAYS, ONE_YEAR_IN_DAYS},
};
use super::{AnyDataset, ComputeData, MinInitialStates, RatioDataset};
#[derive(Allocative, Iterable)]
pub struct PriceDatasets {
min_initial_states: MinInitialStates,
kraken_daily: Option<BTreeMap<Date, OHLC>>,
kraken_1mn: Option<BTreeMap<u32, OHLC>>,
binance_1mn: Option<BTreeMap<u32, OHLC>>,
binance_daily: Option<BTreeMap<Date, OHLC>>,
binance_har: Option<BTreeMap<u32, OHLC>>,
kibo_by_height: BTreeMap<HeightMapChunkId, Vec<OHLC>>,
kibo_by_date: BTreeMap<DateMapChunkId, BTreeMap<Date, OHLC>>,
pub ohlc: BiMap<OHLC>,
pub open: BiMap<f32>,
pub high: BiMap<f32>,
pub low: BiMap<f32>,
pub close: BiMap<f32>,
pub market_cap: BiMap<f32>,
pub price_1w_sma: BiMap<f32>,
pub price_1w_sma_ratio: RatioDataset,
pub price_1m_sma: BiMap<f32>,
pub price_1m_sma_ratio: RatioDataset,
pub price_1y_sma: BiMap<f32>,
pub price_1y_sma_ratio: RatioDataset,
pub price_2y_sma: BiMap<f32>,
pub price_2y_sma_ratio: RatioDataset,
pub price_4y_sma: BiMap<f32>,
pub price_4y_sma_ratio: RatioDataset,
pub price_8d_sma: BiMap<f32>,
pub price_8d_sma_ratio: RatioDataset,
pub price_13d_sma: BiMap<f32>,
pub price_13d_sma_ratio: RatioDataset,
pub price_21d_sma: BiMap<f32>,
pub price_21d_sma_ratio: RatioDataset,
pub price_34d_sma: BiMap<f32>,
pub price_34d_sma_ratio: RatioDataset,
pub price_55d_sma: BiMap<f32>,
pub price_55d_sma_ratio: RatioDataset,
pub price_89d_sma: BiMap<f32>,
pub price_89d_sma_ratio: RatioDataset,
pub price_144d_sma: BiMap<f32>,
pub price_144d_sma_ratio: RatioDataset,
pub price_200w_sma: BiMap<f32>,
pub price_200w_sma_ratio: RatioDataset,
pub price_1d_total_return: DateMap<f32>,
pub price_1m_total_return: DateMap<f32>,
pub price_6m_total_return: DateMap<f32>,
pub price_1y_total_return: DateMap<f32>,
pub price_2y_total_return: DateMap<f32>,
pub price_3y_total_return: DateMap<f32>,
pub price_4y_total_return: DateMap<f32>,
pub price_6y_total_return: DateMap<f32>,
pub price_8y_total_return: DateMap<f32>,
pub price_10y_total_return: DateMap<f32>,
pub price_4y_compound_return: DateMap<f32>,
// projection via lowest 4y compound value
pub all_time_high: BiMap<f32>,
pub all_time_high_date: DateMap<Date>,
pub days_since_all_time_high: DateMap<u32>,
pub max_days_between_all_time_highs: DateMap<u32>,
pub max_years_between_all_time_highs: DateMap<f32>,
pub market_price_to_all_time_high_ratio: BiMap<f32>,
pub drawdown: BiMap<f32>,
pub sats_per_dollar: BiMap<f32>,
// volatility
}
impl PriceDatasets {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let path_dataset = config.path_datasets();
let f = |s: &str| path_dataset.join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
binance_1mn: None,
binance_daily: None,
binance_har: None,
kraken_1mn: None,
kraken_daily: None,
kibo_by_height: BTreeMap::default(),
kibo_by_date: BTreeMap::default(),
// ---
// Inserted
// ---
ohlc: BiMap::new_json(1, MapKind::Inserted, &config.path_price()),
// ---
// Computed
// ---
open: BiMap::new_bin(1, MapKind::Computed, &f("open")),
high: BiMap::new_bin(1, MapKind::Computed, &f("high")),
low: BiMap::new_bin(1, MapKind::Computed, &f("low")),
close: BiMap::new_bin(1, MapKind::Computed, &f("close")),
market_cap: BiMap::new_bin(1, MapKind::Computed, &f("market_cap")),
price_1w_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_1w_sma")),
price_1w_sma_ratio: RatioDataset::import(&path_dataset, "price_1w_sma", config)?,
price_1m_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_1m_sma")),
price_1m_sma_ratio: RatioDataset::import(&path_dataset, "price_1m_sma", config)?,
price_1y_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_1y_sma")),
price_1y_sma_ratio: RatioDataset::import(&path_dataset, "price_1y_sma", config)?,
price_2y_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_2y_sma")),
price_2y_sma_ratio: RatioDataset::import(&path_dataset, "price_2y_sma", config)?,
price_4y_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_4y_sma")),
price_4y_sma_ratio: RatioDataset::import(&path_dataset, "price_4y_sma", config)?,
price_8d_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_8d_sma")),
price_8d_sma_ratio: RatioDataset::import(&path_dataset, "price_8d_sma", config)?,
price_13d_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_13d_sma")),
price_13d_sma_ratio: RatioDataset::import(&path_dataset, "price_13d_sma", config)?,
price_21d_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_21d_sma")),
price_21d_sma_ratio: RatioDataset::import(&path_dataset, "price_21d_sma", config)?,
price_34d_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_34d_sma")),
price_34d_sma_ratio: RatioDataset::import(&path_dataset, "price_34d_sma", config)?,
price_55d_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_55d_sma")),
price_55d_sma_ratio: RatioDataset::import(&path_dataset, "price_55d_sma", config)?,
price_89d_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_89d_sma")),
price_89d_sma_ratio: RatioDataset::import(&path_dataset, "price_89d_sma", config)?,
price_144d_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_144d_sma")),
price_144d_sma_ratio: RatioDataset::import(&path_dataset, "price_144d_sma", config)?,
price_200w_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_200w_sma")),
price_200w_sma_ratio: RatioDataset::import(&path_dataset, "price_200w_sma", config)?,
price_1d_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_1d_total_return"),
),
price_1m_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_1m_total_return"),
),
price_6m_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_6m_total_return"),
),
price_1y_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_1y_total_return"),
),
price_2y_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_2y_total_return"),
),
price_3y_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_3y_total_return"),
),
price_4y_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_4y_total_return"),
),
price_6y_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_6y_total_return"),
),
price_8y_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_8y_total_return"),
),
price_10y_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_10y_total_return"),
),
price_4y_compound_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_4y_compound_return"),
),
all_time_high: BiMap::new_bin(1, MapKind::Computed, &f("all_time_high")),
all_time_high_date: DateMap::new_bin(1, MapKind::Computed, &f("all_time_high_date")),
days_since_all_time_high: DateMap::new_bin(
1,
MapKind::Computed,
&f("days_since_all_time_high"),
),
max_days_between_all_time_highs: DateMap::new_bin(
1,
MapKind::Computed,
&f("max_days_between_all_time_highs"),
),
max_years_between_all_time_highs: DateMap::new_bin(
2,
MapKind::Computed,
&f("max_years_between_all_time_highs"),
),
market_price_to_all_time_high_ratio: BiMap::new_bin(
1,
MapKind::Computed,
&f("market_price_to_all_time_high_ratio"),
),
drawdown: BiMap::new_bin(1, MapKind::Computed, &f("drawdown")),
sats_per_dollar: BiMap::new_bin(1, MapKind::Computed, &f("sats_per_dollar")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn compute(&mut self, compute_data: &ComputeData, circulating_supply: &mut BiMap<f64>) {
let &ComputeData { dates, heights, .. } = compute_data;
self.open
.multi_insert_simple_transform(heights, dates, &mut self.ohlc, &|ohlc| ohlc.open);
self.high
.multi_insert_simple_transform(heights, dates, &mut self.ohlc, &|ohlc| ohlc.high);
self.low
.multi_insert_simple_transform(heights, dates, &mut self.ohlc, &|ohlc| ohlc.low);
self.close
.multi_insert_simple_transform(heights, dates, &mut self.ohlc, &|ohlc| ohlc.close);
self.market_cap
.multi_insert_multiply(heights, dates, &mut self.close, circulating_supply);
self.price_1w_sma.multi_insert_simple_average(
heights,
dates,
&mut self.close,
ONE_WEEK_IN_DAYS,
);
self.price_1m_sma.multi_insert_simple_average(
heights,
dates,
&mut self.close,
ONE_MONTH_IN_DAYS,
);
self.price_1y_sma.multi_insert_simple_average(
heights,
dates,
&mut self.close,
ONE_YEAR_IN_DAYS,
);
self.price_2y_sma.multi_insert_simple_average(
heights,
dates,
&mut self.close,
2 * ONE_YEAR_IN_DAYS,
);
self.price_4y_sma.multi_insert_simple_average(
heights,
dates,
&mut self.close,
4 * ONE_YEAR_IN_DAYS,
);
self.price_8d_sma
.multi_insert_simple_average(heights, dates, &mut self.close, 8);
self.price_13d_sma
.multi_insert_simple_average(heights, dates, &mut self.close, 13);
self.price_21d_sma
.multi_insert_simple_average(heights, dates, &mut self.close, 21);
self.price_34d_sma
.multi_insert_simple_average(heights, dates, &mut self.close, 34);
self.price_55d_sma
.multi_insert_simple_average(heights, dates, &mut self.close, 55);
self.price_89d_sma
.multi_insert_simple_average(heights, dates, &mut self.close, 89);
self.price_144d_sma
.multi_insert_simple_average(heights, dates, &mut self.close, 144);
self.price_200w_sma.multi_insert_simple_average(
heights,
dates,
&mut self.close,
200 * ONE_WEEK_IN_DAYS,
);
self.price_1d_total_return
.multi_insert_percentage_change(dates, &mut self.close.date, 1);
self.price_1m_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
ONE_MONTH_IN_DAYS,
);
self.price_6m_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
6 * ONE_MONTH_IN_DAYS,
);
self.price_1y_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
ONE_YEAR_IN_DAYS,
);
self.price_2y_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
2 * ONE_YEAR_IN_DAYS,
);
self.price_3y_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
3 * ONE_YEAR_IN_DAYS,
);
self.price_4y_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
4 * ONE_YEAR_IN_DAYS,
);
self.price_6y_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
6 * ONE_YEAR_IN_DAYS,
);
self.price_8y_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
8 * ONE_YEAR_IN_DAYS,
);
self.price_10y_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
10 * ONE_YEAR_IN_DAYS,
);
self.price_4y_compound_return
.multi_insert_complex_transform(
dates,
&mut self.close.date,
|(last_value, date, closes, _)| {
let previous_value = date
.checked_sub_days(Days::new(4 * ONE_YEAR_IN_DAYS as u64))
.and_then(|date| closes.get_or_import(&Date::wrap(date)))
.unwrap_or_default();
(((last_value / previous_value).powf(1.0 / 4.0)) - 1.0) * 100.0
},
);
self.price_1w_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_1w_sma);
self.price_1m_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_1m_sma);
self.price_1y_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_1y_sma);
self.price_2y_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_2y_sma);
self.price_4y_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_4y_sma);
self.price_8d_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_8d_sma);
self.price_13d_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_13d_sma);
self.price_21d_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_21d_sma);
self.price_34d_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_34d_sma);
self.price_55d_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_55d_sma);
self.price_89d_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_89d_sma);
self.price_144d_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_144d_sma);
self.price_200w_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_200w_sma);
self.all_time_high
.multi_insert_max(heights, dates, &mut self.high);
self.market_price_to_all_time_high_ratio
.multi_insert_percentage(heights, dates, &mut self.close, &mut self.all_time_high);
self.all_time_high_date.multi_insert_complex_transform(
dates,
&mut self.all_time_high.date,
|(value, date, _, map)| {
let high = self.high.date.get_or_import(date).unwrap();
let is_ath = high == value;
if is_ath {
*date
} else {
let previous_date = date.checked_sub(1).unwrap();
*map.get_or_import(&previous_date).as_ref().unwrap_or(date)
}
},
);
self.days_since_all_time_high.multi_insert_simple_transform(
dates,
&mut self.all_time_high_date,
|value, key| key.difference_in_days_between(value),
);
self.max_days_between_all_time_highs
.multi_insert_max(dates, &mut self.days_since_all_time_high);
self.max_years_between_all_time_highs
.multi_insert_simple_transform(
dates,
&mut self.max_days_between_all_time_highs,
|days, _| (days as f64 / ONE_YEAR_IN_DAYS as f64) as f32,
);
self.drawdown.multi_insert_simple_transform(
heights,
dates,
&mut self.market_price_to_all_time_high_ratio,
&|v| -(100.0 - v),
);
self.sats_per_dollar.multi_insert_simple_transform(
heights,
dates,
&mut self.close,
&|price| Amount::ONE_BTC_F32 / price,
);
}
pub fn get_date_ohlc(&mut self, date: Date) -> color_eyre::Result<OHLC> {
if self.ohlc.date.is_key_safe(date) {
Ok(self.ohlc.date.get_or_import(&date).unwrap().to_owned())
} else {
let ohlc = self
.get_from_daily_kraken(&date)
.or_else(|_| self.get_from_daily_binance(&date))
.or_else(|_| self.get_from_date_kibo(&date))?;
self.ohlc.date.insert(date, ohlc);
Ok(ohlc)
}
}
fn get_from_date_kibo(&mut self, date: &Date) -> color_eyre::Result<OHLC> {
let chunk_id = date.to_chunk_id();
#[allow(clippy::map_entry)]
if !self.kibo_by_date.contains_key(&chunk_id)
|| self
.kibo_by_date
.get(&chunk_id)
.unwrap()
.last_key_value()
.unwrap()
.0
< date
{
self.kibo_by_date
.insert(chunk_id, Kibo::fetch_date_prices(chunk_id)?);
}
self.kibo_by_date
.get(&chunk_id)
.unwrap()
.get(date)
.cloned()
.ok_or(Error::msg("Couldn't find date in satonomics"))
}
fn get_from_daily_kraken(&mut self, date: &Date) -> color_eyre::Result<OHLC> {
if self.kraken_daily.is_none()
|| self
.kraken_daily
.as_ref()
.unwrap()
.last_key_value()
.unwrap()
.0
< date
{
self.kraken_daily.replace(Kraken::fetch_daily_prices()?);
}
self.kraken_daily
.as_ref()
.unwrap()
.get(date)
.cloned()
.ok_or(Error::msg("Couldn't find date"))
}
fn get_from_daily_binance(&mut self, date: &Date) -> color_eyre::Result<OHLC> {
if self.binance_daily.is_none()
|| self
.binance_daily
.as_ref()
.unwrap()
.last_key_value()
.unwrap()
.0
< date
{
self.binance_daily.replace(Binance::fetch_daily_prices()?);
}
self.binance_daily
.as_ref()
.unwrap()
.get(date)
.cloned()
.ok_or(Error::msg("Couldn't find date"))
}
pub fn get_height_ohlc(
&mut self,
height: Height,
timestamp: Timestamp,
previous_timestamp: Option<Timestamp>,
config: &Config,
) -> color_eyre::Result<OHLC> {
if let Some(ohlc) = self.ohlc.height.get_or_import(&height) {
return Ok(ohlc);
}
let timestamp = timestamp.to_floored_seconds();
if previous_timestamp.is_none() && !height.is_first() {
panic!("Shouldn't be possible");
}
let previous_timestamp = previous_timestamp.map(|t| t.to_floored_seconds());
let ohlc = self
.get_from_1mn_kraken(timestamp, previous_timestamp)
.unwrap_or_else(|_| {
self.get_from_1mn_binance(timestamp, previous_timestamp)
.unwrap_or_else(|_| {
self.get_from_har_binance(timestamp, previous_timestamp, config)
.unwrap_or_else(|_| {
self.get_from_height_kibo(&height).unwrap_or_else(|_| {
let date = timestamp.to_date();
panic!(
"Can't find the price for: height: {height} - date: {date}
1mn APIs are limited to the last 16 hours for Binance's and the last 10 hours for Kraken's
How to fix this:
1. Go to https://www.binance.com/en/trade/BTC_USDT?type=spot
2. Select 1mn interval
3. Open the inspector/dev tools
4. Go to the Network Tab
5. Filter URLs by 'uiKlines'
6. Go back to the chart and scroll until you pass the date mentioned few lines ago
7. Go back to the dev tools
8. Export to a har file (if there is no explicit button, click on the cog button)
9. Move the file to 'parser/imports/binance.har'
"
)
})
})
})
});
self.ohlc.height.insert(height, ohlc);
Ok(ohlc)
}
fn get_from_height_kibo(&mut self, height: &Height) -> color_eyre::Result<OHLC> {
let chunk_id = height.to_chunk_id();
#[allow(clippy::map_entry)]
if !self.kibo_by_height.contains_key(&chunk_id)
|| ((chunk_id.to_usize() + self.kibo_by_height.get(&chunk_id).unwrap().len())
<= height.to_usize())
{
self.kibo_by_height
.insert(chunk_id, Kibo::fetch_height_prices(chunk_id)?);
}
self.kibo_by_height
.get(&chunk_id)
.unwrap()
.get(height.to_serialized_key().to_usize())
.cloned()
.ok_or(Error::msg("Couldn't find height in kibo"))
}
fn get_from_1mn_kraken(
&mut self,
timestamp: Timestamp,
previous_timestamp: Option<Timestamp>,
) -> color_eyre::Result<OHLC> {
if self.kraken_1mn.is_none()
|| self
.kraken_1mn
.as_ref()
.unwrap()
.last_key_value()
.unwrap()
.0
<= &timestamp
{
self.kraken_1mn.replace(Kraken::fetch_1mn_prices()?);
}
Self::find_height_ohlc(&self.kraken_1mn, timestamp, previous_timestamp, "kraken 1m")
}
fn get_from_1mn_binance(
&mut self,
timestamp: Timestamp,
previous_timestamp: Option<Timestamp>,
) -> color_eyre::Result<OHLC> {
if self.binance_1mn.is_none()
|| self
.binance_1mn
.as_ref()
.unwrap()
.last_key_value()
.unwrap()
.0
<= &timestamp
{
self.binance_1mn.replace(Binance::fetch_1mn_prices()?);
}
Self::find_height_ohlc(
&self.binance_1mn,
timestamp,
previous_timestamp,
"binance 1m",
)
}
fn get_from_har_binance(
&mut self,
timestamp: Timestamp,
previous_timestamp: Option<Timestamp>,
config: &Config,
) -> color_eyre::Result<OHLC> {
if self.binance_har.is_none() {
self.binance_har
.replace(Binance::read_har_file(config).unwrap_or_default());
}
Self::find_height_ohlc(
&self.binance_har,
timestamp,
previous_timestamp,
"binance har",
)
}
fn find_height_ohlc(
tree: &Option<BTreeMap<u32, OHLC>>,
timestamp: Timestamp,
previous_timestamp: Option<Timestamp>,
name: &str,
) -> color_eyre::Result<OHLC> {
let tree = tree.as_ref().unwrap();
let err = Error::msg(format!("Couldn't find timestamp in {name}"));
let previous_ohlc = previous_timestamp
.map_or(Some(OHLC::default()), |previous_timestamp| {
tree.get(&previous_timestamp).cloned()
});
let last_ohlc = tree.get(&timestamp);
if previous_ohlc.is_none() || last_ohlc.is_none() {
return Err(err);
}
let previous_ohlc = previous_ohlc.unwrap();
let mut final_ohlc = OHLC {
open: previous_ohlc.close,
high: previous_ohlc.close,
low: previous_ohlc.close,
close: previous_ohlc.close,
};
let start = previous_timestamp.unwrap_or_default();
let end = timestamp;
// Otherwise it's a re-org
if start < end {
tree.range(&*start..=&*end).skip(1).for_each(|(_, ohlc)| {
if ohlc.high > final_ohlc.high {
final_ohlc.high = ohlc.high
}
if ohlc.low < final_ohlc.low {
final_ohlc.low = ohlc.low
}
final_ohlc.close = ohlc.close;
});
}
Ok(final_ohlc)
}
}
impl AnyDataset for PriceDatasets {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,123 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates},
states::CapitalizationState,
},
structs::{BiMap, Config, MapKind, MapPath},
utils::ONE_MONTH_IN_DAYS,
};
use super::RatioDataset;
#[derive(Allocative, Iterable)]
pub struct CapitalizationDataset {
min_initial_states: MinInitialStates,
pub realized_cap: BiMap<f32>,
pub realized_price: BiMap<f32>,
realized_cap_1m_net_change: BiMap<f32>,
realized_price_ratio: RatioDataset,
}
impl CapitalizationDataset {
pub fn import(
path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let f = |s: &str| {
if let Some(name) = name {
path.join(&format!("{name}/{s}"))
} else {
path.join(s)
}
};
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
realized_cap: BiMap::new_bin(1, MapKind::Inserted, &f("realized_cap")),
// ---
// Computed
// ---
realized_cap_1m_net_change: BiMap::new_bin(
1,
MapKind::Computed,
&f("realized_cap_1m_net_change"),
),
realized_price: BiMap::new_bin(1, MapKind::Computed, &f("realized_price")),
realized_price_ratio: RatioDataset::import(
path,
&format!(
"{}realized_price",
name.as_ref().map_or("".to_owned(), |n| format!("{n}-"))
),
config,
)?,
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
..
}: &InsertData,
state: &CapitalizationState,
) {
let realized_cap = self
.realized_cap
.height
.insert(height, state.realized_cap().to_dollar() as f32);
if is_date_last_block {
self.realized_cap.date.insert(date, realized_cap);
}
}
pub fn compute(
&mut self,
compute_data: &ComputeData,
closes: &mut BiMap<f32>,
cohort_supply: &mut BiMap<f64>,
) {
let &ComputeData { heights, dates, .. } = compute_data;
self.realized_price.multi_insert_divide(
heights,
dates,
&mut self.realized_cap,
cohort_supply,
);
self.realized_cap_1m_net_change.multi_insert_net_change(
heights,
dates,
&mut self.realized_cap,
ONE_MONTH_IN_DAYS,
);
self.realized_price_ratio
.compute(compute_data, closes, &mut self.realized_price);
}
}
impl AnyDataset for CapitalizationDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,86 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, InsertData, MinInitialStates},
states::InputState,
},
structs::{BiMap, Config, DateMap, HeightMap, MapKind, MapPath},
};
#[derive(Allocative, Iterable)]
pub struct InputSubDataset {
min_initial_states: MinInitialStates,
// Inserted
pub count: BiMap<u64>,
pub volume: HeightMap<f64>,
pub volume_1d_sum: DateMap<f64>,
// Computed
// add inputs_per_second
}
impl InputSubDataset {
pub fn import(
path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let f = |s: &str| {
if let Some(name) = name {
path.join(&format!("{name}/{s}"))
} else {
path.join(s)
}
};
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
count: BiMap::new_bin(1, MapKind::Inserted, &f("input_count")),
volume: HeightMap::new_bin(1, MapKind::Inserted, &f("input_volume")),
volume_1d_sum: DateMap::new_bin(1, MapKind::Inserted, &f("input_volume_1d_sum")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
date_blocks_range,
..
}: &InsertData,
state: &InputState,
) {
let count = self
.count
.height
.insert(height, state.count().round() as u64);
self.volume.insert(height, state.volume().to_btc());
if is_date_last_block {
self.count.date.insert(date, count);
self.volume_1d_sum
.insert(date, self.volume.sum_range(date_blocks_range));
}
}
}
impl AnyDataset for InputSubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,90 @@
use allocative::Allocative;
mod capitalization;
mod input;
mod price_paid;
mod ratio;
mod realized;
mod recap;
mod supply;
mod unrealized;
mod utxo;
pub use capitalization::*;
pub use input::*;
pub use price_paid::*;
pub use ratio::*;
pub use realized::*;
pub use recap::*;
use struct_iterable::Iterable;
pub use supply::*;
pub use unrealized::*;
pub use utxo::*;
use crate::{
parser::datasets::AnyDataset,
structs::{Config, MapPath},
};
use super::AnyDatasetGroup;
#[derive(Allocative, Iterable)]
pub struct SubDataset {
pub capitalization: CapitalizationDataset,
pub input: InputSubDataset,
// pub output: OutputSubDataset,
pub price_paid: PricePaidSubDataset,
pub realized: RealizedSubDataset,
pub supply: SupplySubDataset,
pub unrealized: UnrealizedSubDataset,
pub utxo: UTXOSubDataset,
}
impl SubDataset {
pub fn import(
parent_path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let s = Self {
capitalization: CapitalizationDataset::import(parent_path, name, config)?,
input: InputSubDataset::import(parent_path, name, config)?,
// output: OutputSubDataset::import(parent_path)?,
price_paid: PricePaidSubDataset::import(parent_path, name, config)?,
realized: RealizedSubDataset::import(parent_path, name, config)?,
supply: SupplySubDataset::import(parent_path, name, config)?,
unrealized: UnrealizedSubDataset::import(parent_path, name, config)?,
utxo: UTXOSubDataset::import(parent_path, name, config)?,
};
Ok(s)
}
}
impl AnyDatasetGroup for SubDataset {
fn as_vec(&self) -> Vec<&(dyn AnyDataset + Send + Sync)> {
vec![
&self.capitalization,
&self.price_paid,
&self.realized,
&self.supply,
&self.unrealized,
&self.utxo,
&self.input,
// &self.output,
]
}
fn as_mut_vec(&mut self) -> Vec<&mut dyn AnyDataset> {
vec![
&mut self.capitalization,
&mut self.price_paid,
&mut self.realized,
&mut self.supply,
&mut self.unrealized,
&mut self.utxo,
&mut self.input,
// &mut self.output,
]
}
}

View File

@@ -0,0 +1,87 @@
use crate::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates},
states::OutputState,
structs::{AnyBiMap, BiMap},
utils::ONE_YEAR_IN_DAYS,
};
pub struct OutputSubDataset {
min_initial_states: MinInitialStates,
// Inserted
pub count: BiMap<f32>,
pub volume: BiMap<f32>,
// Computed
pub annualized_volume: BiMap<f32>,
pub velocity: BiMap<f32>,
// add outputs_per_second
}
impl OutputSubDataset {
pub fn import(parent_path: &str) -> color_eyre::Result<Self> {
let f = |s: &str| format!("{parent_path}/{s}");
let mut s = Self {
min_initial_states: MinInitialStates::default(),
count: BiMap::new_bin(1, &f("output_count")),
volume: BiMap::new_bin(1, &f("output_volume")),
annualized_volume: BiMap::new_bin(1, &f("annualized_output_volume")),
velocity: BiMap::new_bin(1, &f("output_velocity")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
date_blocks_range,
..
}: &InsertData,
state: &OutputState,
) {
let count = self.count.height.insert(height, state.count);
self.volume.height.insert(height, state.volume);
if is_date_last_block {
self.count.date.insert(date, count);
self.volume.date_insert_sum_range(date, date_blocks_range);
}
}
pub fn compute(
&mut self,
&ComputeData { heights, dates }: &ComputeData,
cohort_supply: &mut BiMap<f32>,
) {
self.annualized_volume.multi_insert_last_x_sum(
heights,
dates,
&mut self.volume,
ONE_YEAR_IN_DAYS,
);
self.velocity.multi_insert_divide(
heights,
dates,
&mut self.annualized_volume,
cohort_supply,
);
}
}
impl AnyDataset for OutputSubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,266 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, InsertData, MinInitialStates},
states::PricePaidState,
},
structs::{BiMap, Config, Date, Height, MapKind, MapPath},
};
#[derive(Allocative, Iterable)]
pub struct PricePaidSubDataset {
min_initial_states: MinInitialStates,
pp_median: BiMap<f32>,
pp_95p: BiMap<f32>,
pp_90p: BiMap<f32>,
pp_85p: BiMap<f32>,
pp_80p: BiMap<f32>,
pp_75p: BiMap<f32>,
pp_70p: BiMap<f32>,
pp_65p: BiMap<f32>,
pp_60p: BiMap<f32>,
pp_55p: BiMap<f32>,
pp_45p: BiMap<f32>,
pp_40p: BiMap<f32>,
pp_35p: BiMap<f32>,
pp_30p: BiMap<f32>,
pp_25p: BiMap<f32>,
pp_20p: BiMap<f32>,
pp_15p: BiMap<f32>,
pp_10p: BiMap<f32>,
pp_05p: BiMap<f32>,
}
impl PricePaidSubDataset {
pub fn import(
path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let f = |s: &str| {
if let Some(name) = name {
path.join(&format!("{name}/{s}"))
} else {
path.join(s)
}
};
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
pp_median: BiMap::new_bin(1, MapKind::Inserted, &f("median_price_paid")),
pp_95p: BiMap::new_bin(1, MapKind::Inserted, &f("95p_price_paid")),
pp_90p: BiMap::new_bin(1, MapKind::Inserted, &f("90p_price_paid")),
pp_85p: BiMap::new_bin(1, MapKind::Inserted, &f("85p_price_paid")),
pp_80p: BiMap::new_bin(1, MapKind::Inserted, &f("80p_price_paid")),
pp_75p: BiMap::new_bin(1, MapKind::Inserted, &f("75p_price_paid")),
pp_70p: BiMap::new_bin(1, MapKind::Inserted, &f("70p_price_paid")),
pp_65p: BiMap::new_bin(1, MapKind::Inserted, &f("65p_price_paid")),
pp_60p: BiMap::new_bin(1, MapKind::Inserted, &f("60p_price_paid")),
pp_55p: BiMap::new_bin(1, MapKind::Inserted, &f("55p_price_paid")),
pp_45p: BiMap::new_bin(1, MapKind::Inserted, &f("45p_price_paid")),
pp_40p: BiMap::new_bin(1, MapKind::Inserted, &f("40p_price_paid")),
pp_35p: BiMap::new_bin(1, MapKind::Inserted, &f("35p_price_paid")),
pp_30p: BiMap::new_bin(1, MapKind::Inserted, &f("30p_price_paid")),
pp_25p: BiMap::new_bin(1, MapKind::Inserted, &f("25p_price_paid")),
pp_20p: BiMap::new_bin(1, MapKind::Inserted, &f("20p_price_paid")),
pp_15p: BiMap::new_bin(1, MapKind::Inserted, &f("15p_price_paid")),
pp_10p: BiMap::new_bin(1, MapKind::Inserted, &f("10p_price_paid")),
pp_05p: BiMap::new_bin(1, MapKind::Inserted, &f("05p_price_paid")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
is_date_last_block,
date,
..
}: &InsertData,
state: &PricePaidState,
) {
let pp_05p = state.pp_05p();
let pp_10p = state.pp_10p();
let pp_15p = state.pp_15p();
let pp_20p = state.pp_20p();
let pp_25p = state.pp_25p();
let pp_30p = state.pp_30p();
let pp_35p = state.pp_35p();
let pp_40p = state.pp_40p();
let pp_45p = state.pp_45p();
let pp_median = state.pp_median();
let pp_55p = state.pp_55p();
let pp_60p = state.pp_60p();
let pp_65p = state.pp_65p();
let pp_70p = state.pp_70p();
let pp_75p = state.pp_75p();
let pp_80p = state.pp_80p();
let pp_85p = state.pp_85p();
let pp_90p = state.pp_90p();
let pp_95p = state.pp_95p();
// Check if iter was empty
if pp_05p.is_none() {
self.insert_height_default(height);
if is_date_last_block {
self.insert_date_default(date);
}
return;
}
let pp_05p = self
.pp_05p
.height
.insert(height, pp_05p.unwrap().to_dollar() as f32);
let pp_10p = self
.pp_10p
.height
.insert(height, pp_10p.unwrap().to_dollar() as f32);
let pp_15p = self
.pp_15p
.height
.insert(height, pp_15p.unwrap().to_dollar() as f32);
let pp_20p = self
.pp_20p
.height
.insert(height, pp_20p.unwrap().to_dollar() as f32);
let pp_25p = self
.pp_25p
.height
.insert(height, pp_25p.unwrap().to_dollar() as f32);
let pp_30p = self
.pp_30p
.height
.insert(height, pp_30p.unwrap().to_dollar() as f32);
let pp_35p = self
.pp_35p
.height
.insert(height, pp_35p.unwrap().to_dollar() as f32);
let pp_40p = self
.pp_40p
.height
.insert(height, pp_40p.unwrap().to_dollar() as f32);
let pp_45p = self
.pp_45p
.height
.insert(height, pp_45p.unwrap().to_dollar() as f32);
let pp_median = self
.pp_median
.height
.insert(height, pp_median.unwrap().to_dollar() as f32);
let pp_55p = self
.pp_55p
.height
.insert(height, pp_55p.unwrap().to_dollar() as f32);
let pp_60p = self
.pp_60p
.height
.insert(height, pp_60p.unwrap().to_dollar() as f32);
let pp_65p = self
.pp_65p
.height
.insert(height, pp_65p.unwrap().to_dollar() as f32);
let pp_70p = self
.pp_70p
.height
.insert(height, pp_70p.unwrap().to_dollar() as f32);
let pp_75p = self
.pp_75p
.height
.insert(height, pp_75p.unwrap().to_dollar() as f32);
let pp_80p = self
.pp_80p
.height
.insert(height, pp_80p.unwrap().to_dollar() as f32);
let pp_85p = self
.pp_85p
.height
.insert(height, pp_85p.unwrap().to_dollar() as f32);
let pp_90p = self
.pp_90p
.height
.insert(height, pp_90p.unwrap().to_dollar() as f32);
let pp_95p = self
.pp_95p
.height
.insert(height, pp_95p.unwrap().to_dollar() as f32);
if is_date_last_block {
self.pp_05p.date.insert(date, pp_05p);
self.pp_10p.date.insert(date, pp_10p);
self.pp_15p.date.insert(date, pp_15p);
self.pp_20p.date.insert(date, pp_20p);
self.pp_25p.date.insert(date, pp_25p);
self.pp_30p.date.insert(date, pp_30p);
self.pp_35p.date.insert(date, pp_35p);
self.pp_40p.date.insert(date, pp_40p);
self.pp_45p.date.insert(date, pp_45p);
self.pp_median.date.insert(date, pp_median);
self.pp_55p.date.insert(date, pp_55p);
self.pp_60p.date.insert(date, pp_60p);
self.pp_65p.date.insert(date, pp_65p);
self.pp_70p.date.insert(date, pp_70p);
self.pp_75p.date.insert(date, pp_75p);
self.pp_80p.date.insert(date, pp_80p);
self.pp_85p.date.insert(date, pp_85p);
self.pp_90p.date.insert(date, pp_90p);
self.pp_95p.date.insert(date, pp_95p);
}
}
fn insert_height_default(&mut self, height: Height) {
self.inserted_as_mut_vec().into_iter().for_each(|bi| {
bi.height.insert_default(height);
})
}
fn insert_date_default(&mut self, date: Date) {
self.inserted_as_mut_vec().into_iter().for_each(|bi| {
bi.date.insert_default(date);
})
}
pub fn inserted_as_mut_vec(&mut self) -> Vec<&mut BiMap<f32>> {
vec![
&mut self.pp_95p,
&mut self.pp_90p,
&mut self.pp_85p,
&mut self.pp_80p,
&mut self.pp_75p,
&mut self.pp_70p,
&mut self.pp_65p,
&mut self.pp_60p,
&mut self.pp_55p,
&mut self.pp_median,
&mut self.pp_45p,
&mut self.pp_40p,
&mut self.pp_35p,
&mut self.pp_30p,
&mut self.pp_25p,
&mut self.pp_20p,
&mut self.pp_15p,
&mut self.pp_10p,
&mut self.pp_05p,
]
}
}
impl AnyDataset for PricePaidSubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,171 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::datasets::{AnyDataset, ComputeData, MinInitialStates},
structs::{BiMap, Config, MapKind, MapPath},
utils::{ONE_MONTH_IN_DAYS, ONE_WEEK_IN_DAYS, ONE_YEAR_IN_DAYS},
};
#[derive(Allocative, Iterable)]
pub struct RatioDataset {
min_initial_states: MinInitialStates,
ratio: BiMap<f32>,
ratio_1w_sma: BiMap<f32>,
ratio_1m_sma: BiMap<f32>,
ratio_1y_sma: BiMap<f32>,
ratio_1y_sma_momentum_oscillator: BiMap<f32>,
ratio_99p: BiMap<f32>,
ratio_99_5p: BiMap<f32>,
ratio_99_9p: BiMap<f32>,
ratio_1p: BiMap<f32>,
ratio_0_5p: BiMap<f32>,
ratio_0_1p: BiMap<f32>,
price_99p: BiMap<f32>,
price_99_5p: BiMap<f32>,
price_99_9p: BiMap<f32>,
price_1p: BiMap<f32>,
price_0_5p: BiMap<f32>,
price_0_1p: BiMap<f32>,
}
impl RatioDataset {
pub fn import(path: &MapPath, name: &str, config: &Config) -> color_eyre::Result<Self> {
let f_ratio = |s: &str| path.join(&format!("market_price_to_{name}_{s}"));
let f_price = |s: &str| path.join(&format!("{name}_{s}"));
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Computed
// ---
ratio: BiMap::new_bin(1, MapKind::Computed, &f_ratio("ratio")),
ratio_1w_sma: BiMap::new_bin(2, MapKind::Computed, &f_ratio("ratio_1w_sma")),
ratio_1m_sma: BiMap::new_bin(2, MapKind::Computed, &f_ratio("ratio_1m_sma")),
ratio_1y_sma: BiMap::new_bin(2, MapKind::Computed, &f_ratio("ratio_1y_sma")),
ratio_1y_sma_momentum_oscillator: BiMap::new_bin(
2,
MapKind::Computed,
&f_ratio("ratio_1y_sma_momentum_oscillator"),
),
ratio_99p: BiMap::new_bin(3, MapKind::Computed, &f_ratio("ratio_99p")),
ratio_99_5p: BiMap::new_bin(3, MapKind::Computed, &f_ratio("ratio_99_5p")),
ratio_99_9p: BiMap::new_bin(3, MapKind::Computed, &f_ratio("ratio_99_9p")),
ratio_1p: BiMap::new_bin(3, MapKind::Computed, &f_ratio("ratio_1p")),
ratio_0_5p: BiMap::new_bin(3, MapKind::Computed, &f_ratio("ratio_0_5p")),
ratio_0_1p: BiMap::new_bin(3, MapKind::Computed, &f_ratio("ratio_0_1p")),
price_99p: BiMap::new_bin(4, MapKind::Computed, &f_price("99p")),
price_99_5p: BiMap::new_bin(4, MapKind::Computed, &f_price("99_5p")),
price_99_9p: BiMap::new_bin(4, MapKind::Computed, &f_price("99_9p")),
price_1p: BiMap::new_bin(4, MapKind::Computed, &f_price("1p")),
price_0_5p: BiMap::new_bin(4, MapKind::Computed, &f_price("0_5p")),
price_0_1p: BiMap::new_bin(4, MapKind::Computed, &f_price("0_1p")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn compute(
&mut self,
&ComputeData { heights, dates, .. }: &ComputeData,
market_price: &mut BiMap<f32>,
other_price: &mut BiMap<f32>,
) {
self.ratio.height.multi_insert_divide(
heights,
&mut market_price.height,
&mut other_price.height,
);
self.ratio
.date
.multi_insert_divide(dates, &mut market_price.date, &mut other_price.date);
self.ratio_1w_sma.multi_insert_simple_average(
heights,
dates,
&mut self.ratio,
ONE_WEEK_IN_DAYS,
);
self.ratio_1m_sma.multi_insert_simple_average(
heights,
dates,
&mut self.ratio,
ONE_MONTH_IN_DAYS,
);
self.ratio_1m_sma.multi_insert_simple_average(
heights,
dates,
&mut self.ratio,
ONE_MONTH_IN_DAYS,
);
self.ratio_1y_sma.multi_insert_simple_average(
heights,
dates,
&mut self.ratio,
ONE_YEAR_IN_DAYS,
);
self.ratio_1y_sma_momentum_oscillator
.height
.multi_insert_complex_transform(
heights,
&mut self.ratio.height,
|(ratio, height, ..)| {
(ratio / self.ratio_1y_sma.height.get_or_import(height).unwrap()) - 1.0
},
);
self.ratio_1y_sma_momentum_oscillator
.date
.multi_insert_complex_transform(dates, &mut self.ratio.date, |(ratio, date, _, _)| {
(ratio / self.ratio_1y_sma.date.get_or_import(date).unwrap()) - 1.0
});
self.ratio.multi_insert_percentile(
heights,
dates,
vec![
(&mut self.ratio_99p, 0.99),
(&mut self.ratio_99_5p, 0.995),
(&mut self.ratio_99_9p, 0.999),
(&mut self.ratio_1p, 0.1),
(&mut self.ratio_0_5p, 0.005),
(&mut self.ratio_0_1p, 0.001),
],
None,
);
self.price_99p
.multi_insert_multiply(heights, dates, other_price, &mut self.ratio_99p);
self.price_99_5p
.multi_insert_multiply(heights, dates, other_price, &mut self.ratio_99_5p);
self.price_99_9p
.multi_insert_multiply(heights, dates, other_price, &mut self.ratio_99_9p);
self.price_1p
.multi_insert_multiply(heights, dates, other_price, &mut self.ratio_1p);
self.price_0_5p
.multi_insert_multiply(heights, dates, other_price, &mut self.ratio_0_5p);
self.price_0_1p
.multi_insert_multiply(heights, dates, other_price, &mut self.ratio_0_1p);
}
}
impl AnyDataset for RatioDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,387 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates},
states::RealizedState,
},
structs::{BiMap, Config, DateMap, HeightMap, MapKind, MapPath, Price},
utils::ONE_MONTH_IN_DAYS,
};
#[derive(Allocative, Iterable)]
pub struct RealizedSubDataset {
min_initial_states: MinInitialStates,
realized_profit: HeightMap<f32>,
realized_loss: HeightMap<f32>,
value_created: HeightMap<f32>,
adjusted_value_created: HeightMap<f32>,
value_destroyed: HeightMap<f32>,
adjusted_value_destroyed: HeightMap<f32>,
realized_profit_1d_sum: DateMap<f32>,
realized_loss_1d_sum: DateMap<f32>,
value_created_1d_sum: DateMap<f32>,
adjusted_value_created_1d_sum: DateMap<f32>,
value_destroyed_1d_sum: DateMap<f32>,
adjusted_value_destroyed_1d_sum: DateMap<f32>,
spent_output_profit_ratio: BiMap<f32>,
adjusted_spent_output_profit_ratio: BiMap<f32>,
negative_realized_loss: HeightMap<f32>,
negative_realized_loss_1d_sum: DateMap<f32>,
net_realized_profit_and_loss: HeightMap<f32>,
net_realized_profit_and_loss_1d_sum: DateMap<f32>,
net_realized_profit_and_loss_1d_sum_to_market_cap_ratio: DateMap<f32>,
cumulative_realized_profit: BiMap<f32>,
cumulative_realized_loss: BiMap<f32>,
cumulative_net_realized_profit_and_loss: BiMap<f32>,
cumulative_net_realized_profit_and_loss_1m_net_change: BiMap<f32>,
realized_value: HeightMap<f32>,
realized_value_1d_sum: DateMap<f32>,
sell_side_risk_ratio: DateMap<f32>,
realized_profit_to_loss_ratio: HeightMap<f32>,
realized_profit_to_loss_1d_sum_ratio: DateMap<f32>,
}
impl RealizedSubDataset {
pub fn import(
path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let f = |s: &str| {
if let Some(name) = name {
path.join(&format!("{name}/{s}"))
} else {
path.join(s)
}
};
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
realized_profit: HeightMap::new_bin(1, MapKind::Inserted, &f("realized_profit")),
realized_loss: HeightMap::new_bin(1, MapKind::Inserted, &f("realized_loss")),
value_created: HeightMap::new_bin(1, MapKind::Inserted, &f("value_created")),
adjusted_value_created: HeightMap::new_bin(
1,
MapKind::Inserted,
&f("adjusted_value_created"),
),
value_destroyed: HeightMap::new_bin(1, MapKind::Inserted, &f("value_destroyed")),
adjusted_value_destroyed: HeightMap::new_bin(
1,
MapKind::Inserted,
&f("adjusted_value_destroyed"),
),
realized_profit_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("realized_profit_1d_sum"),
),
realized_loss_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("realized_loss_1d_sum"),
),
value_created_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("value_created_1d_sum"),
),
adjusted_value_created_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("adjusted_value_created_1d_sum"),
),
value_destroyed_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("value_destroyed_1d_sum"),
),
adjusted_value_destroyed_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("adjusted_value_destroyed_1d_sum"),
),
spent_output_profit_ratio: BiMap::new_bin(
2,
MapKind::Inserted,
&f("spent_output_profit_ratio"),
),
adjusted_spent_output_profit_ratio: BiMap::new_bin(
2,
MapKind::Inserted,
&f("adjusted_spent_output_profit_ratio"),
),
// ---
// Computed
// ---
negative_realized_loss: HeightMap::new_bin(
2,
MapKind::Computed,
&f("negative_realized_loss"),
),
negative_realized_loss_1d_sum: DateMap::new_bin(
2,
MapKind::Computed,
&f("negative_realized_loss_1d_sum"),
),
net_realized_profit_and_loss: HeightMap::new_bin(
1,
MapKind::Computed,
&f("net_realized_profit_and_loss"),
),
net_realized_profit_and_loss_1d_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("net_realized_profit_and_loss_1d_sum"),
),
net_realized_profit_and_loss_1d_sum_to_market_cap_ratio: DateMap::new_bin(
2,
MapKind::Computed,
&f("net_realized_profit_and_loss_to_market_cap_ratio"),
),
cumulative_realized_profit: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_realized_profit"),
),
cumulative_realized_loss: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_realized_loss"),
),
cumulative_net_realized_profit_and_loss: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_net_realized_profit_and_loss"),
),
cumulative_net_realized_profit_and_loss_1m_net_change: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_net_realized_profit_and_loss_1m_net_change"),
),
realized_value: HeightMap::new_bin(1, MapKind::Computed, &f("realized_value")),
realized_value_1d_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("realized_value_1d_sum"),
),
sell_side_risk_ratio: DateMap::new_bin(
1,
MapKind::Computed,
&f("sell_side_risk_ratio"),
),
realized_profit_to_loss_ratio: HeightMap::new_bin(
1,
MapKind::Computed,
&f("realized_profit_to_loss_ratio"),
),
realized_profit_to_loss_1d_sum_ratio: DateMap::new_bin(
1,
MapKind::Computed,
&f("realized_profit_to_loss_1d_sum_ratio"),
),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
date_blocks_range,
..
}: &InsertData,
height_state: &RealizedState,
) {
self.realized_profit
.insert(height, height_state.realized_profit().to_dollar() as f32);
self.realized_loss
.insert(height, height_state.realized_loss().to_dollar() as f32);
self.value_created
.insert(height, height_state.value_created().to_dollar() as f32);
self.adjusted_value_created.insert(
height,
height_state.adjusted_value_created().to_dollar() as f32,
);
self.value_destroyed
.insert(height, height_state.value_destroyed().to_dollar() as f32);
self.adjusted_value_destroyed.insert(
height,
height_state.adjusted_value_destroyed().to_dollar() as f32,
);
self.spent_output_profit_ratio.height.insert(height, {
if height_state.value_destroyed() > Price::ZERO {
(height_state.value_created().to_cent() as f64
/ height_state.value_destroyed().to_cent() as f64) as f32
} else {
1.0
}
});
self.adjusted_spent_output_profit_ratio
.height
.insert(height, {
if height_state.adjusted_value_destroyed() > Price::ZERO {
(height_state.adjusted_value_created().to_cent() as f64
/ height_state.adjusted_value_destroyed().to_cent() as f64)
as f32
} else {
1.0
}
});
if is_date_last_block {
self.realized_profit_1d_sum
.insert(date, self.realized_profit.sum_range(date_blocks_range));
self.realized_loss_1d_sum
.insert(date, self.realized_loss.sum_range(date_blocks_range));
let value_created_1d_sum = self
.value_created_1d_sum
.insert(date, self.value_created.sum_range(date_blocks_range));
let adjusted_value_created_1d_sum = self.adjusted_value_created_1d_sum.insert(
date,
self.adjusted_value_created.sum_range(date_blocks_range),
);
let value_destroyed_1d_sum = self
.value_destroyed_1d_sum
.insert(date, self.value_destroyed.sum_range(date_blocks_range));
let adjusted_value_destroyed_1d_sum = self.adjusted_value_destroyed_1d_sum.insert(
date,
self.adjusted_value_destroyed.sum_range(date_blocks_range),
);
self.spent_output_profit_ratio
.date
.insert(date, value_created_1d_sum / value_destroyed_1d_sum);
self.adjusted_spent_output_profit_ratio.date.insert(
date,
adjusted_value_created_1d_sum / adjusted_value_destroyed_1d_sum,
);
}
}
pub fn compute(
&mut self,
&ComputeData { heights, dates, .. }: &ComputeData,
market_cap: &mut BiMap<f32>,
) {
self.negative_realized_loss.multi_insert_simple_transform(
heights,
&mut self.realized_loss,
|v, _| v * -1.0,
);
self.negative_realized_loss_1d_sum
.multi_insert_simple_transform(dates, &mut self.realized_loss_1d_sum, |v, _| v * -1.0);
self.net_realized_profit_and_loss.multi_insert_subtract(
heights,
&mut self.realized_profit,
&mut self.realized_loss,
);
self.net_realized_profit_and_loss_1d_sum
.multi_insert_subtract(
dates,
&mut self.realized_profit_1d_sum,
&mut self.realized_loss_1d_sum,
);
self.net_realized_profit_and_loss_1d_sum_to_market_cap_ratio
.multi_insert_percentage(
dates,
&mut self.net_realized_profit_and_loss_1d_sum,
&mut market_cap.date,
);
self.cumulative_realized_profit
.height
.multi_insert_cumulative(heights, &mut self.realized_profit);
self.cumulative_realized_profit
.date
.multi_insert_cumulative(dates, &mut self.realized_profit_1d_sum);
self.cumulative_realized_loss
.height
.multi_insert_cumulative(heights, &mut self.realized_loss);
self.cumulative_realized_loss
.date
.multi_insert_cumulative(dates, &mut self.realized_loss_1d_sum);
self.cumulative_net_realized_profit_and_loss
.height
.multi_insert_cumulative(heights, &mut self.net_realized_profit_and_loss);
self.cumulative_net_realized_profit_and_loss
.date
.multi_insert_cumulative(dates, &mut self.net_realized_profit_and_loss_1d_sum);
self.cumulative_net_realized_profit_and_loss_1m_net_change
.multi_insert_net_change(
heights,
dates,
&mut self.cumulative_net_realized_profit_and_loss,
ONE_MONTH_IN_DAYS,
);
self.realized_value.multi_insert_add(
heights,
&mut self.realized_profit,
&mut self.realized_loss,
);
self.realized_value_1d_sum.multi_insert_add(
dates,
&mut self.realized_profit_1d_sum,
&mut self.realized_loss_1d_sum,
);
self.sell_side_risk_ratio.multi_insert_percentage(
dates,
&mut self.realized_value_1d_sum,
&mut market_cap.date,
);
self.realized_profit_to_loss_ratio.multi_insert_divide(
heights,
&mut self.realized_profit,
&mut self.realized_loss,
);
self.realized_profit_to_loss_1d_sum_ratio
.multi_insert_divide(
dates,
&mut self.realized_profit_1d_sum,
&mut self.realized_loss_1d_sum,
);
}
}
impl AnyDataset for RealizedSubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,277 @@
use std::{iter::Sum, ops::Add};
use allocative::Allocative;
use crate::{
structs::{
Date, DateMapChunkId, GenericMap, MapChunkId, MapKey, MapKind, MapPath, MapSerialized,
MapValue, SerializedDateMap,
},
utils::{get_percentile, LossyFrom},
};
pub type DateRecapDataset<T> = RecapDataset<Date, T, DateMapChunkId, SerializedDateMap<T>>;
#[derive(Allocative)]
pub struct RecapDataset<Key, Value, ChunkId, Serialized> {
average: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
sum: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
max: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
_90p: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
_75p: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
median: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
_25p: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
_10p: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
min: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
}
#[derive(Default)]
pub struct RecapOptions {
average: bool,
sum: bool,
max: bool,
_90p: bool,
_75p: bool,
median: bool,
_25p: bool,
_10p: bool,
min: bool,
}
impl RecapOptions {
pub fn add_min(mut self) -> Self {
self.min = true;
self
}
pub fn add_max(mut self) -> Self {
self.max = true;
self
}
pub fn add_median(mut self) -> Self {
self.median = true;
self
}
pub fn add_average(mut self) -> Self {
self.average = true;
self
}
#[allow(unused)]
pub fn add_sum(mut self) -> Self {
self.sum = true;
self
}
pub fn add_90p(mut self) -> Self {
self._90p = true;
self
}
pub fn add_75p(mut self) -> Self {
self._75p = true;
self
}
pub fn add_25p(mut self) -> Self {
self._25p = true;
self
}
pub fn add_10p(mut self) -> Self {
self._10p = true;
self
}
}
impl<Key, Value, ChunkId, Serialized> RecapDataset<Key, Value, ChunkId, Serialized>
where
Value: MapValue,
ChunkId: MapChunkId,
Key: MapKey<ChunkId>,
Serialized: MapSerialized<Key, Value, ChunkId>,
{
pub fn import(path: &MapPath, options: RecapOptions) -> color_eyre::Result<Self> {
let f = |s: &str| path.join(s);
let s = Self {
// ---
// Computed
// ---
min: options
.min
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("min"))),
max: options
.max
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("max"))),
median: options
.median
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("median"))),
average: options
.average
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("average"))),
sum: options
.sum
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("sum"))),
_90p: options
._90p
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("90p"))),
_75p: options
._75p
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("75p"))),
_25p: options
._25p
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("25p"))),
_10p: options
._10p
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("10p"))),
};
Ok(s)
}
pub fn compute<'a, Value2>(&mut self, key: Key, values: &'a mut [Value2])
where
Value: LossyFrom<f32> + LossyFrom<Value2>,
Value2: Sum<&'a Value2> + Ord + Add<Output = Value2> + Clone + Copy + LossyFrom<f32>,
f32: LossyFrom<Value> + LossyFrom<Value2>,
{
if self.max.is_some()
|| self._90p.is_some()
|| self._75p.is_some()
|| self.median.is_some()
|| self._25p.is_some()
|| self._10p.is_some()
|| self.min.is_some()
{
values.sort_unstable();
if let Some(max) = self.max.as_mut() {
max.insert_computed(key, Value::lossy_from(*values.last().unwrap()));
}
if let Some(_90p) = self._90p.as_mut() {
_90p.insert_computed(key, Value::lossy_from(get_percentile(values, 0.90)));
}
if let Some(_75p) = self._75p.as_mut() {
_75p.insert_computed(key, Value::lossy_from(get_percentile(values, 0.75)));
}
if let Some(median) = self.median.as_mut() {
median.insert_computed(key, Value::lossy_from(get_percentile(values, 0.50)));
}
if let Some(_25p) = self._25p.as_mut() {
_25p.insert_computed(key, Value::lossy_from(get_percentile(values, 0.25)));
}
if let Some(_10p) = self._10p.as_mut() {
_10p.insert_computed(key, Value::lossy_from(get_percentile(values, 0.10)));
}
if let Some(min) = self.min.as_mut() {
min.insert_computed(key, Value::lossy_from(*values.first().unwrap()));
}
}
if self.sum.is_some() || self.average.is_some() {
let sum = Value::lossy_from(values.iter().sum::<Value2>());
if let Some(sum_map) = self.sum.as_mut() {
sum_map.insert_computed(key, sum);
}
if let Some(average) = self.average.as_mut() {
let len = values.len() as f32;
average.insert_computed(key, Value::lossy_from(f32::lossy_from(sum) / len));
}
}
}
pub fn as_vec(&self) -> Vec<&GenericMap<Key, Value, ChunkId, Serialized>> {
let mut v = vec![];
if let Some(min) = self.min.as_ref() {
v.push(min);
}
if let Some(max) = self.max.as_ref() {
v.push(max);
}
if let Some(median) = self.median.as_ref() {
v.push(median);
}
if let Some(average) = self.average.as_ref() {
v.push(average);
}
if let Some(sum) = self.sum.as_ref() {
v.push(sum);
}
if let Some(_90p) = self._90p.as_ref() {
v.push(_90p);
}
if let Some(_75p) = self._75p.as_ref() {
v.push(_75p);
}
if let Some(_25p) = self._25p.as_ref() {
v.push(_25p);
}
if let Some(_10p) = self._10p.as_ref() {
v.push(_10p);
}
v
}
pub fn as_mut_vec(&mut self) -> Vec<&mut GenericMap<Key, Value, ChunkId, Serialized>> {
let mut v = vec![];
if let Some(min) = self.min.as_mut() {
v.push(min);
}
if let Some(max) = self.max.as_mut() {
v.push(max);
}
if let Some(median) = self.median.as_mut() {
v.push(median);
}
if let Some(average) = self.average.as_mut() {
v.push(average);
}
if let Some(sum) = self.sum.as_mut() {
v.push(sum);
}
if let Some(_90p) = self._90p.as_mut() {
v.push(_90p);
}
if let Some(_75p) = self._75p.as_mut() {
v.push(_75p);
}
if let Some(_25p) = self._25p.as_mut() {
v.push(_25p);
}
if let Some(_10p) = self._10p.as_mut() {
v.push(_10p);
}
v
}
}

View File

@@ -0,0 +1,109 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates},
states::SupplyState,
},
structs::{BiMap, Config, MapKind, MapPath},
};
#[derive(Allocative, Iterable)]
pub struct SupplySubDataset {
min_initial_states: MinInitialStates,
pub supply: BiMap<f64>,
pub supply_to_circulating_supply_ratio: BiMap<f64>,
pub halved_supply: BiMap<f64>,
pub halved_supply_to_circulating_supply_ratio: BiMap<f64>,
}
impl SupplySubDataset {
pub fn import(
path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let f = |s: &str| {
if let Some(name) = name {
path.join(&format!("{name}/{s}"))
} else {
path.join(s)
}
};
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
supply: BiMap::new_bin(1, MapKind::Inserted, &f("supply")),
// ---
// Computed,
// ---
supply_to_circulating_supply_ratio: BiMap::new_bin(
1,
MapKind::Computed,
&f("supply_to_circulating_supply_ratio"),
),
halved_supply: BiMap::new_bin(1, MapKind::Computed, &f("halved_supply")),
halved_supply_to_circulating_supply_ratio: BiMap::new_bin(
1,
MapKind::Computed,
&f("halved_supply_to_circulating_supply_ratio"),
),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
..
}: &InsertData,
state: &SupplyState,
) {
let total_supply = self.supply.height.insert(height, state.supply().to_btc());
if is_date_last_block {
self.supply.date.insert(date, total_supply);
}
}
#[allow(unused_variables)]
pub fn compute(
&mut self,
&ComputeData { heights, dates, .. }: &ComputeData,
circulating_supply: &mut BiMap<f64>,
) {
self.supply_to_circulating_supply_ratio
.multi_insert_percentage(heights, dates, &mut self.supply, circulating_supply);
self.halved_supply
.multi_insert_simple_transform(heights, dates, &mut self.supply, &|v| v / 2.0);
self.halved_supply_to_circulating_supply_ratio
.multi_insert_simple_transform(
heights,
dates,
&mut self.supply_to_circulating_supply_ratio,
&|v| v / 2.0,
);
}
}
impl AnyDataset for SupplySubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,199 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates},
states::UnrealizedState,
},
structs::{BiMap, Config, MapKind, MapPath},
};
#[derive(Allocative, Iterable)]
pub struct UnrealizedSubDataset {
min_initial_states: MinInitialStates,
supply_in_profit: BiMap<f64>,
unrealized_profit: BiMap<f32>,
unrealized_loss: BiMap<f32>,
supply_in_loss: BiMap<f64>,
negative_unrealized_loss: BiMap<f32>,
net_unrealized_profit_and_loss: BiMap<f32>,
net_unrealized_profit_and_loss_to_market_cap_ratio: BiMap<f32>,
supply_in_profit_to_own_supply_ratio: BiMap<f64>,
supply_in_profit_to_circulating_supply_ratio: BiMap<f64>,
supply_in_loss_to_own_supply_ratio: BiMap<f64>,
supply_in_loss_to_circulating_supply_ratio: BiMap<f64>,
}
impl UnrealizedSubDataset {
pub fn import(
path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let f = |s: &str| {
if let Some(name) = name {
path.join(&format!("{name}/{s}"))
} else {
path.join(s)
}
};
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
supply_in_profit: BiMap::new_bin(1, MapKind::Inserted, &f("supply_in_profit")),
unrealized_profit: BiMap::new_bin(1, MapKind::Inserted, &f("unrealized_profit")),
unrealized_loss: BiMap::new_bin(1, MapKind::Inserted, &f("unrealized_loss")),
// ---
// Inserted
// ---
supply_in_loss: BiMap::new_bin(1, MapKind::Computed, &f("supply_in_loss")),
negative_unrealized_loss: BiMap::new_bin(
1,
MapKind::Computed,
&f("negative_unrealized_loss"),
),
net_unrealized_profit_and_loss: BiMap::new_bin(
1,
MapKind::Computed,
&f("net_unrealized_profit_and_loss"),
),
net_unrealized_profit_and_loss_to_market_cap_ratio: BiMap::new_bin(
2,
MapKind::Computed,
&f("net_unrealized_profit_and_loss_to_market_cap_ratio"),
),
supply_in_profit_to_own_supply_ratio: BiMap::new_bin(
1,
MapKind::Computed,
&f("supply_in_profit_to_own_supply_ratio"),
),
supply_in_profit_to_circulating_supply_ratio: BiMap::new_bin(
1,
MapKind::Computed,
&f("supply_in_profit_to_circulating_supply_ratio"),
),
supply_in_loss_to_own_supply_ratio: BiMap::new_bin(
1,
MapKind::Computed,
&f("supply_in_loss_to_own_supply_ratio"),
),
supply_in_loss_to_circulating_supply_ratio: BiMap::new_bin(
1,
MapKind::Computed,
&f("supply_in_loss_to_circulating_supply_ratio"),
),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
..
}: &InsertData,
block_state: &UnrealizedState,
date_state: &Option<UnrealizedState>,
) {
self.supply_in_profit
.height
.insert(height, block_state.supply_in_profit().to_btc());
self.unrealized_profit
.height
.insert(height, block_state.unrealized_profit().to_dollar() as f32);
self.unrealized_loss
.height
.insert(height, block_state.unrealized_loss().to_dollar() as f32);
if is_date_last_block {
let date_state = date_state.as_ref().unwrap();
self.supply_in_profit
.date
.insert(date, date_state.supply_in_profit().to_btc());
self.unrealized_profit
.date
.insert(date, date_state.unrealized_profit().to_dollar() as f32);
self.unrealized_loss
.date
.insert(date, date_state.unrealized_loss().to_dollar() as f32);
}
}
pub fn compute(
&mut self,
&ComputeData { heights, dates, .. }: &ComputeData,
own_supply: &mut BiMap<f64>,
circulating_supply: &mut BiMap<f64>,
market_cap: &mut BiMap<f32>,
) {
self.supply_in_loss.multi_insert_subtract(
heights,
dates,
own_supply,
&mut self.supply_in_profit,
);
self.negative_unrealized_loss.multi_insert_simple_transform(
heights,
dates,
&mut self.unrealized_loss,
&|v| v * -1.0,
);
self.net_unrealized_profit_and_loss.multi_insert_subtract(
heights,
dates,
&mut self.unrealized_profit,
&mut self.unrealized_loss,
);
self.net_unrealized_profit_and_loss_to_market_cap_ratio
.multi_insert_percentage(
heights,
dates,
&mut self.net_unrealized_profit_and_loss,
market_cap,
);
self.supply_in_profit_to_own_supply_ratio
.multi_insert_percentage(heights, dates, &mut self.supply_in_profit, own_supply);
self.supply_in_profit_to_circulating_supply_ratio
.multi_insert_percentage(
heights,
dates,
&mut self.supply_in_profit,
circulating_supply,
);
self.supply_in_loss_to_own_supply_ratio
.multi_insert_percentage(heights, dates, &mut self.supply_in_loss, own_supply);
self.supply_in_loss_to_circulating_supply_ratio
.multi_insert_percentage(heights, dates, &mut self.supply_in_loss, circulating_supply);
}
}
impl AnyDataset for UnrealizedSubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,70 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, InsertData, MinInitialStates},
states::UTXOState,
},
structs::{BiMap, Config, MapKind, MapPath},
};
#[derive(Allocative, Iterable)]
pub struct UTXOSubDataset {
min_initial_states: MinInitialStates,
count: BiMap<f64>,
}
impl UTXOSubDataset {
pub fn import(
path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let f = |s: &str| {
if let Some(name) = name {
path.join(&format!("{name}/{s}"))
} else {
path.join(s)
}
};
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
count: BiMap::new_bin(1, MapKind::Inserted, &f("utxo_count")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
is_date_last_block,
date,
..
}: &InsertData,
state: &UTXOState,
) {
let count = self.count.height.insert(height, state.count());
if is_date_last_block {
self.count.date.insert(date, count);
}
}
}
impl AnyDataset for UTXOSubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,325 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::datasets::InsertData,
structs::{BiMap, Config, DateMap, HeightMap, MapKind},
utils::{
ONE_DAY_IN_S, ONE_MONTH_IN_DAYS, ONE_WEEK_IN_DAYS, ONE_YEAR_IN_DAYS, TARGET_BLOCKS_PER_DAY,
},
};
use super::{AnyDataset, ComputeData, MinInitialStates};
#[derive(Allocative, Iterable)]
pub struct TransactionDataset {
min_initial_states: MinInitialStates,
pub count: HeightMap<usize>,
pub count_1d_sum: DateMap<usize>,
pub volume: HeightMap<f64>,
pub volume_1d_sum: DateMap<f64>,
pub volume_in_dollars: HeightMap<f32>,
pub volume_in_dollars_1d_sum: DateMap<f32>,
// Average sent
// Average sent in dollars
// Median sent
// Median sent in dollars
// Min
// Max
// 10th 25th 75th 90th percentiles
// type
// version
pub count_1w_sma: HeightMap<f32>,
pub count_1d_sum_1w_sma: DateMap<f32>,
pub count_1m_sma: HeightMap<f32>,
pub count_1d_sum_1m_sma: DateMap<f32>,
pub volume_1w_sma: HeightMap<f32>,
pub volume_1d_sum_1w_sma: DateMap<f32>,
pub volume_1m_sma: HeightMap<f32>,
pub volume_1d_sum_1m_sma: DateMap<f32>,
pub volume_in_dollars_1w_sma: HeightMap<f32>,
pub volume_in_dollars_1d_sum_1w_sma: DateMap<f32>,
pub volume_in_dollars_1m_sma: HeightMap<f32>,
pub volume_in_dollars_1d_sum_1m_sma: DateMap<f32>,
pub annualized_volume: DateMap<f32>,
pub annualized_volume_in_dollars: DateMap<f32>,
pub velocity: DateMap<f32>,
pub transactions_per_second: BiMap<f32>,
pub transactions_per_second_1w_sma: BiMap<f32>,
pub transactions_per_second_1m_sma: BiMap<f32>,
}
impl TransactionDataset {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let f = |s: &str| config.path_datasets().join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
count: HeightMap::new_bin(1, MapKind::Inserted, &f("transaction_count")),
count_1d_sum: DateMap::new_bin(1, MapKind::Inserted, &f("transaction_count_1d_sum")),
volume: HeightMap::new_bin(1, MapKind::Inserted, &f("transaction_volume")),
volume_1d_sum: DateMap::new_bin(1, MapKind::Inserted, &f("transaction_volume_1d_sum")),
volume_in_dollars: HeightMap::new_bin(
1,
MapKind::Inserted,
&f("transaction_volume_in_dollars"),
),
volume_in_dollars_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("transaction_volume_in_dollars_1d_sum"),
),
// ---
// Inserted
// ---
count_1w_sma: HeightMap::new_bin(1, MapKind::Computed, &f("transaction_count_1w_sma")),
count_1d_sum_1w_sma: DateMap::new_bin(
1,
MapKind::Computed,
&f("transaction_count_1d_sum_1w_sma"),
),
count_1m_sma: HeightMap::new_bin(1, MapKind::Computed, &f("transaction_count_1m_sma")),
count_1d_sum_1m_sma: DateMap::new_bin(
1,
MapKind::Computed,
&f("transaction_count_1d_sum_1m_sma"),
),
volume_1w_sma: HeightMap::new_bin(
1,
MapKind::Computed,
&f("transaction_volume_1w_sma"),
),
volume_1d_sum_1w_sma: DateMap::new_bin(
1,
MapKind::Computed,
&f("transaction_volume_1d_sum_1w_sma"),
),
volume_1m_sma: HeightMap::new_bin(
1,
MapKind::Computed,
&f("transaction_volume_1m_sma"),
),
volume_1d_sum_1m_sma: DateMap::new_bin(
1,
MapKind::Computed,
&f("transaction_volume_1d_sum_1m_sma"),
),
volume_in_dollars_1w_sma: HeightMap::new_bin(
1,
MapKind::Computed,
&f("transaction_volume_in_dollars_1w_sma"),
),
volume_in_dollars_1d_sum_1w_sma: DateMap::new_bin(
1,
MapKind::Computed,
&f("transaction_volume_in_dollars_1d_sum_1w_sma"),
),
volume_in_dollars_1m_sma: HeightMap::new_bin(
1,
MapKind::Computed,
&f("transaction_volume_in_dollars_1m_sma"),
),
volume_in_dollars_1d_sum_1m_sma: DateMap::new_bin(
1,
MapKind::Computed,
&f("transaction_volume_in_dollars_1d_sum_1m_sma"),
),
annualized_volume: DateMap::new_bin(
1,
MapKind::Computed,
&f("annualized_transaction_volume"),
),
annualized_volume_in_dollars: DateMap::new_bin(
2,
MapKind::Computed,
&f("annualized_transaction_volume_in_dollars"),
),
velocity: DateMap::new_bin(1, MapKind::Computed, &f("transaction_velocity")),
transactions_per_second: BiMap::new_bin(
1,
MapKind::Computed,
&f("transactions_per_second"),
),
transactions_per_second_1w_sma: BiMap::new_bin(
1,
MapKind::Computed,
&f("transactions_per_second_1w_sma"),
),
transactions_per_second_1m_sma: BiMap::new_bin(
1,
MapKind::Computed,
&f("transactions_per_second_1m_sma"),
),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
amount_sent,
transaction_count,
is_date_last_block,
date_blocks_range,
block_price,
..
}: &InsertData,
) {
self.count.insert(height, transaction_count);
self.volume.insert(height, amount_sent.to_btc());
self.volume_in_dollars
.insert(height, (block_price * amount_sent).to_dollar() as f32);
if is_date_last_block {
self.count_1d_sum
.insert(date, self.count.sum_range(date_blocks_range));
self.volume_1d_sum
.insert(date, self.volume.sum_range(date_blocks_range));
self.volume_in_dollars_1d_sum
.insert(date, self.volume_in_dollars.sum_range(date_blocks_range));
}
}
pub fn compute(
&mut self,
&ComputeData { heights, dates, .. }: &ComputeData,
circulating_supply: &mut BiMap<f64>,
block_interval: &mut HeightMap<u32>,
) {
self.count_1w_sma.multi_insert_simple_average(
heights,
&mut self.count,
TARGET_BLOCKS_PER_DAY * ONE_WEEK_IN_DAYS,
);
self.count_1d_sum_1w_sma.multi_insert_simple_average(
dates,
&mut self.count_1d_sum,
ONE_WEEK_IN_DAYS,
);
self.count_1m_sma.multi_insert_simple_average(
heights,
&mut self.count,
TARGET_BLOCKS_PER_DAY * ONE_MONTH_IN_DAYS,
);
self.count_1d_sum_1m_sma.multi_insert_simple_average(
dates,
&mut self.count_1d_sum,
ONE_MONTH_IN_DAYS,
);
self.volume_1w_sma.multi_insert_simple_average(
heights,
&mut self.volume,
TARGET_BLOCKS_PER_DAY * ONE_WEEK_IN_DAYS,
);
self.volume_1d_sum_1w_sma.multi_insert_simple_average(
dates,
&mut self.volume_1d_sum,
ONE_WEEK_IN_DAYS,
);
self.volume_1m_sma.multi_insert_simple_average(
heights,
&mut self.volume,
TARGET_BLOCKS_PER_DAY * ONE_MONTH_IN_DAYS,
);
self.volume_1d_sum_1m_sma.multi_insert_simple_average(
dates,
&mut self.volume_1d_sum,
ONE_MONTH_IN_DAYS,
);
self.volume_in_dollars_1w_sma.multi_insert_simple_average(
heights,
&mut self.volume_in_dollars,
TARGET_BLOCKS_PER_DAY * ONE_WEEK_IN_DAYS,
);
self.volume_in_dollars_1d_sum_1w_sma
.multi_insert_simple_average(
dates,
&mut self.volume_in_dollars_1d_sum,
ONE_WEEK_IN_DAYS,
);
self.volume_in_dollars_1m_sma.multi_insert_simple_average(
heights,
&mut self.volume_in_dollars,
TARGET_BLOCKS_PER_DAY * ONE_MONTH_IN_DAYS,
);
self.volume_in_dollars_1d_sum_1m_sma
.multi_insert_simple_average(
dates,
&mut self.volume_in_dollars_1d_sum,
ONE_MONTH_IN_DAYS,
);
self.annualized_volume.multi_insert_last_x_sum(
dates,
&mut self.volume_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.annualized_volume_in_dollars.multi_insert_last_x_sum(
dates,
&mut self.volume_in_dollars_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.velocity.multi_insert_divide(
dates,
&mut self.annualized_volume,
&mut circulating_supply.date,
);
self.transactions_per_second.height.multi_insert_divide(
heights,
&mut self.count,
block_interval,
);
self.transactions_per_second
.date
.multi_insert_simple_transform(dates, &mut self.count_1d_sum, |count, date| {
count as f32 / (date.get_day_completion() as f32 * ONE_DAY_IN_S as f32)
});
self.transactions_per_second_1w_sma
.multi_insert_simple_average(
heights,
dates,
&mut self.transactions_per_second,
ONE_WEEK_IN_DAYS,
);
self.transactions_per_second_1m_sma
.multi_insert_simple_average(
heights,
dates,
&mut self.transactions_per_second,
ONE_MONTH_IN_DAYS,
);
}
}
impl AnyDataset for TransactionDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,199 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates, SubDataset},
states::UTXOCohortId,
},
structs::{BiMap, Config, Date, Height, MapPath},
};
#[derive(Allocative, Iterable)]
pub struct UTXODataset {
id: UTXOCohortId,
min_initial_states: MinInitialStates,
pub subs: SubDataset,
}
impl UTXODataset {
pub fn import(
parent_path: &MapPath,
id: UTXOCohortId,
config: &Config,
) -> color_eyre::Result<Self> {
let name = id.name().to_owned();
let mut s = Self {
min_initial_states: MinInitialStates::default(),
id,
subs: SubDataset::import(parent_path, &Some(name), config)?,
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(&mut self, insert_data: &InsertData) {
let &InsertData {
states,
utxo_cohorts_one_shot_states,
// utxo_cohorts_received_states,
utxo_cohorts_sent_states,
..
} = insert_data;
if self.needs_insert_supply(insert_data.height, insert_data.date) {
self.subs.supply.insert(
insert_data,
&states
.utxo_cohorts_durable_states
.as_ref()
.unwrap()
.get(&self.id)
.durable_states
.supply_state,
);
}
if self.needs_insert_utxo(insert_data.height, insert_data.date) {
self.subs.utxo.insert(
insert_data,
&states
.utxo_cohorts_durable_states
.as_ref()
.unwrap()
.get(&self.id)
.durable_states
.utxo_state,
);
}
if self.needs_insert_capitalization(insert_data.height, insert_data.date) {
self.subs.capitalization.insert(
insert_data,
&states
.utxo_cohorts_durable_states
.as_ref()
.unwrap()
.get(&self.id)
.durable_states
.capitalization_state,
);
}
if self.needs_insert_unrealized(insert_data.height, insert_data.date) {
self.subs.unrealized.insert(
insert_data,
&utxo_cohorts_one_shot_states
.get(&self.id)
.unrealized_block_state,
&utxo_cohorts_one_shot_states
.get(&self.id)
.unrealized_date_state,
);
}
if self.needs_insert_price_paid(insert_data.height, insert_data.date) {
self.subs.price_paid.insert(
insert_data,
&utxo_cohorts_one_shot_states.get(&self.id).price_paid_state,
);
}
if self.needs_insert_realized(insert_data.height, insert_data.date) {
self.subs.realized.insert(
insert_data,
&utxo_cohorts_sent_states.get(&self.id).realized,
);
}
if self.needs_insert_input(insert_data.height, insert_data.date) {
self.subs
.input
.insert(insert_data, &utxo_cohorts_sent_states.get(&self.id).input);
}
// TODO: move output from common to address
// if self.subs.output.needs_insert(insert_data) {
// self.subs
// .output
// .insert(insert_data, utxo_cohorts_received_states.get(&self.id));
// }
}
pub fn needs_insert_utxo(&self, height: Height, date: Date) -> bool {
self.subs.utxo.needs_insert(height, date)
}
pub fn needs_insert_capitalization(&self, height: Height, date: Date) -> bool {
self.subs.capitalization.needs_insert(height, date)
}
pub fn needs_insert_supply(&self, height: Height, date: Date) -> bool {
self.subs.supply.needs_insert(height, date)
}
pub fn needs_insert_price_paid(&self, height: Height, date: Date) -> bool {
self.subs.price_paid.needs_insert(height, date)
}
pub fn needs_insert_realized(&self, height: Height, date: Date) -> bool {
self.subs.realized.needs_insert(height, date)
}
pub fn needs_insert_unrealized(&self, height: Height, date: Date) -> bool {
self.subs.unrealized.needs_insert(height, date)
}
pub fn needs_insert_input(&self, height: Height, date: Date) -> bool {
self.subs.input.needs_insert(height, date)
}
pub fn compute(
&mut self,
compute_data: &ComputeData,
closes: &mut BiMap<f32>,
circulating_supply: &mut BiMap<f64>,
market_cap: &mut BiMap<f32>,
) {
if self.subs.supply.should_compute(compute_data) {
self.subs.supply.compute(compute_data, circulating_supply);
}
if self.subs.unrealized.should_compute(compute_data) {
self.subs.unrealized.compute(
compute_data,
&mut self.subs.supply.supply,
circulating_supply,
market_cap,
);
}
if self.subs.realized.should_compute(compute_data) {
self.subs.realized.compute(compute_data, market_cap);
}
if self.subs.capitalization.should_compute(compute_data) {
self.subs
.capitalization
.compute(compute_data, closes, &mut self.subs.supply.supply);
}
// if self.subs.output.should_compute(compute_data) {
// self.subs
// .output
// .compute(compute_data, &mut self.subs.supply.total);
// }
}
}
impl AnyDataset for UTXODataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,164 @@
mod dataset;
use allocative::Allocative;
use dataset::*;
use rayon::prelude::*;
use itertools::Itertools;
use crate::{
parser::datasets::AnyDatasets,
parser::states::{SplitByUTXOCohort, UTXOCohortId},
structs::{BiMap, Config, Date, Height},
};
use super::{AnyDataset, ComputeData, InsertData, MinInitialStates};
#[derive(Allocative)]
pub struct UTXODatasets {
min_initial_states: MinInitialStates,
cohorts: SplitByUTXOCohort<UTXODataset>,
}
impl UTXODatasets {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let mut cohorts = SplitByUTXOCohort::<Option<UTXODataset>>::default();
let path_dataset = config.path_datasets();
cohorts
.as_vec()
.into_par_iter()
.map(|(_, id)| (id, UTXODataset::import(&path_dataset, id, config)))
.collect::<Vec<_>>()
.into_iter()
.try_for_each(|(id, dataset)| -> color_eyre::Result<()> {
cohorts.get_mut(&id).replace(dataset?);
Ok(())
})?;
let mut s = Self {
min_initial_states: MinInitialStates::default(),
cohorts: cohorts.unwrap(),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_datasets(&s, config));
Ok(s)
}
pub fn insert(&mut self, insert_data: &InsertData) {
self.cohorts
.as_mut_vec()
.into_iter()
.for_each(|(cohort, _)| cohort.insert(insert_data))
}
pub fn needs_durable_states(&self, height: Height, date: Date) -> bool {
let needs_insert_utxo = self.needs_insert_utxo(height, date);
let needs_insert_capitalization = self.needs_insert_capitalization(height, date);
let needs_insert_supply = self.needs_insert_supply(height, date);
let needs_one_shot_states = self.needs_one_shot_states(height, date);
needs_insert_utxo
|| needs_insert_capitalization
|| needs_insert_supply
|| needs_one_shot_states
}
pub fn needs_one_shot_states(&self, height: Height, date: Date) -> bool {
self.needs_insert_price_paid(height, date) || self.needs_insert_unrealized(height, date)
}
pub fn needs_sent_states(&self, height: Height, date: Date) -> bool {
self.needs_insert_input(height, date) || self.needs_insert_realized(height, date)
}
pub fn needs_insert_utxo(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_utxo(height, date))
}
pub fn needs_insert_capitalization(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_capitalization(height, date))
}
pub fn needs_insert_supply(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_supply(height, date))
}
pub fn needs_insert_price_paid(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_price_paid(height, date))
}
pub fn needs_insert_realized(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_realized(height, date))
}
pub fn needs_insert_unrealized(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_unrealized(height, date))
}
pub fn needs_insert_input(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_input(height, date))
}
pub fn compute(
&mut self,
compute_data: &ComputeData,
closes: &mut BiMap<f32>,
circulating_supply: &mut BiMap<f64>,
market_cap: &mut BiMap<f32>,
) {
self.cohorts
.as_mut_vec()
.into_iter()
.for_each(|(cohort, _)| {
cohort.compute(compute_data, closes, circulating_supply, market_cap)
})
}
fn as_vec(&self) -> Vec<(&UTXODataset, UTXOCohortId)> {
self.cohorts.as_vec()
}
fn as_mut_vec(&mut self) -> Vec<(&mut UTXODataset, UTXOCohortId)> {
self.cohorts.as_mut_vec()
}
}
impl AnyDatasets for UTXODatasets {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_any_dataset_vec(&self) -> Vec<&(dyn AnyDataset + Send + Sync)> {
self.as_vec()
.into_iter()
.map(|(dataset, _)| dataset as &(dyn AnyDataset + Send + Sync))
.collect_vec()
}
fn to_mut_any_dataset_vec(&mut self) -> Vec<&mut dyn AnyDataset> {
self.as_mut_vec()
.into_iter()
.map(|(dataset, _)| dataset as &mut dyn AnyDataset)
.collect_vec()
}
}