mirror of
https://github.com/bitcoinresearchkit/brk.git
synced 2026-04-28 16:49:58 -07:00
parser: add recap dataset
This commit is contained in:
@@ -12,7 +12,7 @@ use crate::{
|
||||
utils::time,
|
||||
};
|
||||
|
||||
use super::{databases_folder_path, AnyDatabaseGroup, Metadata, SizedDatabase};
|
||||
use super::{AnyDatabaseGroup, Metadata, SizedDatabase};
|
||||
|
||||
type Key = u32;
|
||||
type Value = AddressData;
|
||||
|
||||
@@ -1,15 +1,20 @@
|
||||
use allocative::Allocative;
|
||||
use itertools::Itertools;
|
||||
use ordered_float::OrderedFloat;
|
||||
|
||||
use crate::{
|
||||
bitcoin::TARGET_BLOCKS_PER_DAY,
|
||||
datasets::AnyDataset,
|
||||
structs::{
|
||||
Amount, AnyBiMap, AnyDateMap, AnyHeightMap, BiMap, DateMap, Height, HeightMap, MapKey,
|
||||
date_map_vec_to_any_date_map_vec, date_map_vec_to_mut_any_date_map_vec, Amount, AnyBiMap,
|
||||
AnyDateMap, AnyHeightMap, BiMap, DateMap, Height, HeightMap, MapKey,
|
||||
},
|
||||
utils::{BYTES_IN_MB, ONE_DAY_IN_DAYS, ONE_MONTH_IN_DAYS, ONE_WEEK_IN_DAYS, ONE_YEAR_IN_DAYS},
|
||||
};
|
||||
|
||||
use super::{ComputeData, InsertData, MinInitialStates};
|
||||
use super::{
|
||||
ComputeData, DateRecapDataset, InsertData, MinInitialStates, RecapDataset, RecapOptions,
|
||||
};
|
||||
|
||||
#[derive(Allocative)]
|
||||
pub struct MiningDataset {
|
||||
@@ -50,10 +55,14 @@ pub struct MiningDataset {
|
||||
pub last_subsidy: DateMap<f64>,
|
||||
pub last_subsidy_in_dollars: DateMap<f32>,
|
||||
pub difficulty: BiMap<f64>,
|
||||
pub block_size: HeightMap<f32>, // in MB
|
||||
pub block_weight: HeightMap<f32>, // in MB
|
||||
pub block_size: HeightMap<f32>, // in MB
|
||||
pub block_size_recap: DateRecapDataset<f32>, // in MB
|
||||
pub block_weight: HeightMap<f32>, // in MB
|
||||
pub block_weight_recap: DateRecapDataset<f32>, // in MB
|
||||
pub block_vbytes: HeightMap<u64>,
|
||||
pub block_interval: HeightMap<u32>, // in s
|
||||
pub block_vbytes_recap: DateRecapDataset<u64>,
|
||||
pub block_interval: HeightMap<u32>, // in s
|
||||
pub block_interval_recap: DateRecapDataset<u32>, // in s
|
||||
|
||||
// Computed
|
||||
pub annualized_issuance: BiMap<f64>, // Same as subsidy_1y_sum
|
||||
@@ -173,19 +182,59 @@ impl MiningDataset {
|
||||
difficulty: BiMap::new_bin(1, &f("difficulty")),
|
||||
difficulty_adjustment: DateMap::new_bin(1, &f("difficulty_adjustment")),
|
||||
block_size: HeightMap::new_bin(1, &f("block_size")),
|
||||
//
|
||||
// block_size_1d_sma: HeightMap::new_bin(1, &f("block_size")),
|
||||
// block_size_1d_median: HeightMap::new_bin(1, &f("block_size")),
|
||||
//
|
||||
block_size_recap: RecapDataset::import(
|
||||
&f("block_size_1d"),
|
||||
RecapOptions::default()
|
||||
.add_average()
|
||||
.add_max()
|
||||
.add_90p()
|
||||
.add_75p()
|
||||
.add_median()
|
||||
.add_25p()
|
||||
.add_10p()
|
||||
.add_min(),
|
||||
)?,
|
||||
cumulative_block_size: BiMap::new_bin(1, &f("cumulative_block_size")),
|
||||
block_weight: HeightMap::new_bin(1, &f("block_weight")),
|
||||
//
|
||||
// block_weight_1d_sma: HeightMap::new_bin(1, &f("block_weight")),
|
||||
block_weight_recap: RecapDataset::import(
|
||||
&f("block_weight_1d"),
|
||||
RecapOptions::default()
|
||||
.add_average()
|
||||
.add_max()
|
||||
.add_90p()
|
||||
.add_75p()
|
||||
.add_median()
|
||||
.add_25p()
|
||||
.add_10p()
|
||||
.add_min(),
|
||||
)?,
|
||||
block_vbytes: HeightMap::new_bin(1, &f("block_vbytes")),
|
||||
block_vbytes_recap: RecapDataset::import(
|
||||
&f("block_vbytes_1d"),
|
||||
RecapOptions::default()
|
||||
.add_average()
|
||||
.add_max()
|
||||
.add_90p()
|
||||
.add_75p()
|
||||
.add_median()
|
||||
.add_25p()
|
||||
.add_10p()
|
||||
.add_min(),
|
||||
)?,
|
||||
// block_vbytes_1d_sma: HeightMap::new_bin(1, &f("block_vbytes")),
|
||||
block_interval: HeightMap::new_bin(2, &f("block_interval")),
|
||||
// block_interval_1d_sma: HeightMap::new_bin(2, &f("block_interval")),
|
||||
//
|
||||
block_interval_recap: RecapDataset::import(
|
||||
&f("block_interval_1d"),
|
||||
RecapOptions::default()
|
||||
.add_average()
|
||||
.add_max()
|
||||
.add_90p()
|
||||
.add_75p()
|
||||
.add_median()
|
||||
.add_25p()
|
||||
.add_10p()
|
||||
.add_min(),
|
||||
)?,
|
||||
hash_rate: DateMap::new_bin(1, &f("hash_rate")),
|
||||
hash_rate_1w_sma: DateMap::new_bin(1, &f("hash_rate_1w_sma")),
|
||||
hash_rate_1m_sma: DateMap::new_bin(1, &f("hash_rate_1m_sma")),
|
||||
@@ -295,6 +344,7 @@ impl MiningDataset {
|
||||
pub fn compute(
|
||||
&mut self,
|
||||
&ComputeData { heights, dates, .. }: &ComputeData,
|
||||
first_height: &mut DateMap<Height>,
|
||||
last_height: &mut DateMap<Height>,
|
||||
) {
|
||||
self.blocks_mined_1w_sum.multi_insert_last_x_sum(
|
||||
@@ -491,6 +541,38 @@ impl MiningDataset {
|
||||
&mut self.difficulty.date,
|
||||
ONE_DAY_IN_DAYS,
|
||||
);
|
||||
|
||||
dates.iter().for_each(|date| {
|
||||
let first = first_height.get_or_import(date).unwrap();
|
||||
let last = last_height.get_or_import(date).unwrap();
|
||||
|
||||
self.block_size_recap.compute(
|
||||
*date,
|
||||
&mut self.block_vbytes.get_or_import_range_inclusive(first, last),
|
||||
);
|
||||
|
||||
self.block_weight_recap.compute(
|
||||
*date,
|
||||
&mut self
|
||||
.block_weight
|
||||
.get_or_import_range_inclusive(first, last)
|
||||
.into_iter()
|
||||
.map(OrderedFloat)
|
||||
.collect_vec(),
|
||||
);
|
||||
|
||||
self.block_vbytes_recap.compute(
|
||||
*date,
|
||||
&mut self.block_vbytes.get_or_import_range_inclusive(first, last),
|
||||
);
|
||||
|
||||
self.block_interval_recap.compute(
|
||||
*date,
|
||||
&mut self
|
||||
.block_interval
|
||||
.get_or_import_range_inclusive(first, last),
|
||||
);
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -600,8 +682,8 @@ impl AnyDataset for MiningDataset {
|
||||
}
|
||||
|
||||
fn to_computed_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
|
||||
vec![
|
||||
&self.blocks_mined_1d_target,
|
||||
[
|
||||
&self.blocks_mined_1d_target as &(dyn AnyDateMap + Send + Sync),
|
||||
&self.blocks_mined_1w_sma,
|
||||
&self.blocks_mined_1m_sma,
|
||||
&self.blocks_mined_1w_sum,
|
||||
@@ -625,11 +707,25 @@ impl AnyDataset for MiningDataset {
|
||||
&self.puell_multiple,
|
||||
&self.difficulty_adjustment,
|
||||
]
|
||||
.into_iter()
|
||||
.chain(date_map_vec_to_any_date_map_vec(
|
||||
self.block_size_recap.as_vec(),
|
||||
))
|
||||
.chain(date_map_vec_to_any_date_map_vec(
|
||||
self.block_vbytes_recap.as_vec(),
|
||||
))
|
||||
.chain(date_map_vec_to_any_date_map_vec(
|
||||
self.block_weight_recap.as_vec(),
|
||||
))
|
||||
.chain(date_map_vec_to_any_date_map_vec(
|
||||
self.block_interval_recap.as_vec(),
|
||||
))
|
||||
.collect_vec()
|
||||
}
|
||||
|
||||
fn to_computed_mut_date_map_vec(&mut self) -> Vec<&mut dyn AnyDateMap> {
|
||||
vec![
|
||||
&mut self.blocks_mined_1d_target,
|
||||
[
|
||||
&mut self.blocks_mined_1d_target as &mut dyn AnyDateMap,
|
||||
&mut self.blocks_mined_1w_sma,
|
||||
&mut self.blocks_mined_1m_sma,
|
||||
&mut self.blocks_mined_1w_sum,
|
||||
@@ -653,5 +749,19 @@ impl AnyDataset for MiningDataset {
|
||||
&mut self.puell_multiple,
|
||||
&mut self.difficulty_adjustment,
|
||||
]
|
||||
.into_iter()
|
||||
.chain(date_map_vec_to_mut_any_date_map_vec(
|
||||
self.block_size_recap.as_mut_vec(),
|
||||
))
|
||||
.chain(date_map_vec_to_mut_any_date_map_vec(
|
||||
self.block_vbytes_recap.as_mut_vec(),
|
||||
))
|
||||
.chain(date_map_vec_to_mut_any_date_map_vec(
|
||||
self.block_weight_recap.as_mut_vec(),
|
||||
))
|
||||
.chain(date_map_vec_to_mut_any_date_map_vec(
|
||||
self.block_interval_recap.as_mut_vec(),
|
||||
))
|
||||
.collect_vec()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -199,8 +199,11 @@ impl AllDatasets {
|
||||
}
|
||||
|
||||
if self.mining.should_compute(&compute_data) {
|
||||
self.mining
|
||||
.compute(&compute_data, &mut self.date_metadata.last_height);
|
||||
self.mining.compute(
|
||||
&compute_data,
|
||||
&mut self.date_metadata.first_height,
|
||||
&mut self.date_metadata.last_height,
|
||||
);
|
||||
}
|
||||
|
||||
// No compute needed for now
|
||||
|
||||
@@ -1,32 +1,26 @@
|
||||
use std::{iter::Sum, ops::Add};
|
||||
|
||||
use allocative::Allocative;
|
||||
|
||||
use crate::{
|
||||
datasets::{AnyDataset, ComputeData, MinInitialStates},
|
||||
structs::{AnyDateMap, MapValue},
|
||||
DateMap, HeightMap,
|
||||
structs::{DateMapChunkId, GenericMap, MapKey, MapSerialized, MapValue},
|
||||
utils::{get_percentile, LossyFrom},
|
||||
Date, MapChunkId, SerializedBTreeMap,
|
||||
};
|
||||
|
||||
#[derive(Allocative)]
|
||||
pub enum RecapTime {
|
||||
Insert,
|
||||
Compute,
|
||||
}
|
||||
pub type DateRecapDataset<T> = RecapDataset<Date, T, DateMapChunkId, SerializedBTreeMap<Date, T>>;
|
||||
|
||||
#[derive(Allocative)]
|
||||
pub struct RecapDataset<T> {
|
||||
min_initial_states: MinInitialStates,
|
||||
time: RecapTime,
|
||||
|
||||
// Computed
|
||||
average: Option<DateMap<T>>,
|
||||
sum: Option<DateMap<T>>,
|
||||
max: Option<DateMap<T>>,
|
||||
_90p: Option<DateMap<T>>,
|
||||
_75p: Option<DateMap<T>>,
|
||||
median: Option<DateMap<T>>,
|
||||
_25p: Option<DateMap<T>>,
|
||||
_10p: Option<DateMap<T>>,
|
||||
min: Option<DateMap<T>>,
|
||||
pub struct RecapDataset<Key, Value, ChunkId, Serialized> {
|
||||
average: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
|
||||
sum: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
|
||||
max: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
|
||||
_90p: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
|
||||
_75p: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
|
||||
median: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
|
||||
_25p: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
|
||||
_10p: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
|
||||
min: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
@@ -43,132 +37,141 @@ pub struct RecapOptions {
|
||||
}
|
||||
|
||||
impl RecapOptions {
|
||||
pub fn add_min(&mut self) {
|
||||
pub fn add_min(mut self) -> Self {
|
||||
self.min = true;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_max(&mut self) {
|
||||
pub fn add_max(mut self) -> Self {
|
||||
self.max = true;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_median(&mut self) {
|
||||
pub fn add_median(mut self) -> Self {
|
||||
self.median = true;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_average(&mut self) {
|
||||
pub fn add_average(mut self) -> Self {
|
||||
self.average = true;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_sum(&mut self) {
|
||||
#[allow(unused)]
|
||||
pub fn add_sum(mut self) -> Self {
|
||||
self.sum = true;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_90p(&mut self) {
|
||||
pub fn add_90p(mut self) -> Self {
|
||||
self._90p = true;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_75p(&mut self) {
|
||||
pub fn add_75p(mut self) -> Self {
|
||||
self._75p = true;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_25p(&mut self) {
|
||||
pub fn add_25p(mut self) -> Self {
|
||||
self._25p = true;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_10p(&mut self) {
|
||||
pub fn add_10p(mut self) -> Self {
|
||||
self._10p = true;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> RecapDataset<T>
|
||||
impl<Key, Value, ChunkId, Serialized> RecapDataset<Key, Value, ChunkId, Serialized>
|
||||
where
|
||||
T: MapValue,
|
||||
Value: MapValue,
|
||||
ChunkId: MapChunkId,
|
||||
Key: MapKey<ChunkId>,
|
||||
Serialized: MapSerialized<Key, Value, ChunkId>,
|
||||
{
|
||||
pub fn import(
|
||||
parent_path: &str,
|
||||
time: RecapTime,
|
||||
options: RecapOptions,
|
||||
) -> color_eyre::Result<Self> {
|
||||
pub fn import(parent_path: &str, options: RecapOptions) -> color_eyre::Result<Self> {
|
||||
let f = |s: &str| format!("{parent_path}/{s}");
|
||||
|
||||
let mut s = Self {
|
||||
min_initial_states: MinInitialStates::default(),
|
||||
time,
|
||||
|
||||
min: options.min.then(|| DateMap::new_bin(1, &f("min"))),
|
||||
max: options.max.then(|| DateMap::new_bin(1, &f("max"))),
|
||||
median: options.median.then(|| DateMap::new_bin(1, &f("median"))),
|
||||
average: options.average.then(|| DateMap::new_bin(1, &f("average"))),
|
||||
sum: options.sum.then(|| DateMap::new_bin(1, &f("sum"))),
|
||||
_90p: options._90p.then(|| DateMap::new_bin(1, &f("90p"))),
|
||||
_75p: options._75p.then(|| DateMap::new_bin(1, &f("75p"))),
|
||||
_25p: options._25p.then(|| DateMap::new_bin(1, &f("25p"))),
|
||||
_10p: options._10p.then(|| DateMap::new_bin(1, &f("10p"))),
|
||||
let s = Self {
|
||||
min: options.min.then(|| GenericMap::new_bin(1, &f("min"))),
|
||||
max: options.max.then(|| GenericMap::new_bin(1, &f("max"))),
|
||||
median: options.median.then(|| GenericMap::new_bin(1, &f("median"))),
|
||||
average: options
|
||||
.average
|
||||
.then(|| GenericMap::new_bin(1, &f("average"))),
|
||||
sum: options.sum.then(|| GenericMap::new_bin(1, &f("sum"))),
|
||||
_90p: options._90p.then(|| GenericMap::new_bin(1, &f("90p"))),
|
||||
_75p: options._75p.then(|| GenericMap::new_bin(1, &f("75p"))),
|
||||
_25p: options._25p.then(|| GenericMap::new_bin(1, &f("25p"))),
|
||||
_10p: options._10p.then(|| GenericMap::new_bin(1, &f("10p"))),
|
||||
};
|
||||
|
||||
s.min_initial_states
|
||||
.consume(MinInitialStates::compute_from_dataset(&s));
|
||||
|
||||
Ok(s)
|
||||
}
|
||||
|
||||
pub fn compute(
|
||||
&mut self,
|
||||
&ComputeData { heights, dates, .. }: &ComputeData,
|
||||
source: &mut HeightMap<f32>,
|
||||
) {
|
||||
dates.iter().enumerate().for_each(|(index, date)| {
|
||||
// let heights = heights_by_date.get(index).unwrap();
|
||||
pub fn compute<'a, Value2>(&mut self, key: Key, values: &'a mut [Value2])
|
||||
where
|
||||
Value: LossyFrom<f32> + LossyFrom<Value2>,
|
||||
Value2: Sum<&'a Value2> + Ord + Add<Output = Value2> + Clone + Copy + LossyFrom<f32>,
|
||||
f32: LossyFrom<Value> + LossyFrom<Value2>,
|
||||
{
|
||||
if self.max.is_some()
|
||||
|| self._90p.is_some()
|
||||
|| self._75p.is_some()
|
||||
|| self.median.is_some()
|
||||
|| self._25p.is_some()
|
||||
|| self._10p.is_some()
|
||||
|| self.min.is_some()
|
||||
{
|
||||
values.sort_unstable();
|
||||
|
||||
if let Some(sum) = self.sum.as_ref() {
|
||||
// v.push(sum);
|
||||
if let Some(max) = self.max.as_mut() {
|
||||
max.insert(key, Value::lossy_from(*values.last().unwrap()));
|
||||
}
|
||||
|
||||
if let Some(average) = self.average.as_ref() {
|
||||
// v.push(average);
|
||||
if let Some(_90p) = self._90p.as_mut() {
|
||||
_90p.insert(key, Value::lossy_from(get_percentile(values, 0.90)));
|
||||
}
|
||||
|
||||
if let Some(max) = self.max.as_ref() {
|
||||
// v.push(max);
|
||||
if let Some(_75p) = self._75p.as_mut() {
|
||||
_75p.insert(key, Value::lossy_from(get_percentile(values, 0.75)));
|
||||
}
|
||||
|
||||
if let Some(_90p) = self._90p.as_ref() {
|
||||
// v.push(_90p);
|
||||
if let Some(median) = self.median.as_mut() {
|
||||
median.insert(key, Value::lossy_from(get_percentile(values, 0.50)));
|
||||
}
|
||||
|
||||
if let Some(_75p) = self._75p.as_ref() {
|
||||
// v.push(_75p);
|
||||
if let Some(_25p) = self._25p.as_mut() {
|
||||
_25p.insert(key, Value::lossy_from(get_percentile(values, 0.25)));
|
||||
}
|
||||
|
||||
if let Some(median) = self.median.as_ref() {
|
||||
// v.push(median);
|
||||
if let Some(_10p) = self._10p.as_mut() {
|
||||
_10p.insert(key, Value::lossy_from(get_percentile(values, 0.10)));
|
||||
}
|
||||
|
||||
if let Some(_25p) = self._25p.as_ref() {
|
||||
// v.push(_25p);
|
||||
if let Some(min) = self.min.as_mut() {
|
||||
min.insert(key, Value::lossy_from(*values.first().unwrap()));
|
||||
}
|
||||
}
|
||||
|
||||
if self.sum.is_some() || self.average.is_some() {
|
||||
let sum = Value::lossy_from(values.iter().sum::<Value2>());
|
||||
|
||||
if let Some(sum_map) = self.sum.as_mut() {
|
||||
sum_map.insert(key, sum);
|
||||
}
|
||||
|
||||
if let Some(_10p) = self._10p.as_ref() {
|
||||
// v.push(_10p);
|
||||
if let Some(average) = self.average.as_mut() {
|
||||
let len = values.len() as f32;
|
||||
average.insert(key, Value::lossy_from(f32::lossy_from(sum) / len));
|
||||
}
|
||||
|
||||
if let Some(min) = self.min.as_ref() {
|
||||
// v.push(min);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> AnyDataset for RecapDataset<T>
|
||||
where
|
||||
T: MapValue,
|
||||
{
|
||||
fn get_min_initial_states(&self) -> &MinInitialStates {
|
||||
&self.min_initial_states
|
||||
}
|
||||
}
|
||||
|
||||
fn to_computed_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
|
||||
let mut v: Vec<&(dyn AnyDateMap + Send + Sync)> = vec![];
|
||||
pub fn as_vec(&self) -> Vec<&GenericMap<Key, Value, ChunkId, Serialized>> {
|
||||
let mut v = vec![];
|
||||
|
||||
if let Some(min) = self.min.as_ref() {
|
||||
v.push(min);
|
||||
@@ -209,8 +212,8 @@ where
|
||||
v
|
||||
}
|
||||
|
||||
fn to_computed_mut_date_map_vec(&mut self) -> Vec<&mut dyn AnyDateMap> {
|
||||
let mut v: Vec<&mut dyn AnyDateMap> = vec![];
|
||||
pub fn as_mut_vec(&mut self) -> Vec<&mut GenericMap<Key, Value, ChunkId, Serialized>> {
|
||||
let mut v = vec![];
|
||||
|
||||
if let Some(min) = self.min.as_mut() {
|
||||
v.push(min);
|
||||
@@ -251,3 +254,99 @@ where
|
||||
v
|
||||
}
|
||||
}
|
||||
|
||||
// impl<Key, Value, ChunkId, Serialized> AnyDataset for RecapDataset<Key, Value, ChunkId, Serialized>
|
||||
// where
|
||||
// Value: MapValue,
|
||||
// ChunkId: MapChunkId,
|
||||
// Key: MapKey<ChunkId>,
|
||||
// Serialized: MapSerialized<Key, Value, ChunkId>,
|
||||
// {
|
||||
// fn get_min_initial_states(&self) -> &MinInitialStates {
|
||||
// &self.min_initial_states
|
||||
// }
|
||||
|
||||
// fn to_computed_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
|
||||
// let mut v: Vec<&(dyn AnyDateMap + Send + Sync)> = vec![];
|
||||
|
||||
// if let Some(min) = self.min.as_ref() {
|
||||
// v.push(min);
|
||||
// }
|
||||
|
||||
// if let Some(max) = self.max.as_ref() {
|
||||
// v.push(max);
|
||||
// }
|
||||
|
||||
// if let Some(median) = self.median.as_ref() {
|
||||
// v.push(median);
|
||||
// }
|
||||
|
||||
// if let Some(average) = self.average.as_ref() {
|
||||
// v.push(average);
|
||||
// }
|
||||
|
||||
// if let Some(sum) = self.sum.as_ref() {
|
||||
// v.push(sum);
|
||||
// }
|
||||
|
||||
// if let Some(_90p) = self._90p.as_ref() {
|
||||
// v.push(_90p);
|
||||
// }
|
||||
|
||||
// if let Some(_75p) = self._75p.as_ref() {
|
||||
// v.push(_75p);
|
||||
// }
|
||||
|
||||
// if let Some(_25p) = self._25p.as_ref() {
|
||||
// v.push(_25p);
|
||||
// }
|
||||
|
||||
// if let Some(_10p) = self._10p.as_ref() {
|
||||
// v.push(_10p);
|
||||
// }
|
||||
|
||||
// v
|
||||
// }
|
||||
|
||||
// fn to_computed_mut_date_map_vec(&mut self) -> Vec<&mut dyn AnyDateMap> {
|
||||
// let mut v: Vec<&mut dyn AnyDateMap> = vec![];
|
||||
|
||||
// if let Some(min) = self.min.as_mut() {
|
||||
// v.push(min);
|
||||
// }
|
||||
|
||||
// if let Some(max) = self.max.as_mut() {
|
||||
// v.push(max);
|
||||
// }
|
||||
|
||||
// if let Some(median) = self.median.as_mut() {
|
||||
// v.push(median);
|
||||
// }
|
||||
|
||||
// if let Some(average) = self.average.as_mut() {
|
||||
// v.push(average);
|
||||
// }
|
||||
|
||||
// if let Some(sum) = self.sum.as_mut() {
|
||||
// v.push(sum);
|
||||
// }
|
||||
|
||||
// if let Some(_90p) = self._90p.as_mut() {
|
||||
// v.push(_90p);
|
||||
// }
|
||||
|
||||
// if let Some(_75p) = self._75p.as_mut() {
|
||||
// v.push(_75p);
|
||||
// }
|
||||
|
||||
// if let Some(_25p) = self._25p.as_mut() {
|
||||
// v.push(_25p);
|
||||
// }
|
||||
|
||||
// if let Some(_10p) = self._10p.as_mut() {
|
||||
// v.push(_10p);
|
||||
// }
|
||||
|
||||
// v
|
||||
// }
|
||||
// }
|
||||
|
||||
@@ -4,24 +4,23 @@ use std::{
|
||||
};
|
||||
|
||||
use allocative::Allocative;
|
||||
use ordered_float::FloatCore;
|
||||
|
||||
use crate::{bitcoin::TARGET_BLOCKS_PER_DAY, utils::LossyFrom};
|
||||
|
||||
use super::{AnyDateMap, AnyHeightMap, AnyMap, Date, DateMap, Height, HeightMap, MapValue};
|
||||
|
||||
#[derive(Default, Allocative)]
|
||||
pub struct BiMap<T>
|
||||
pub struct BiMap<Value>
|
||||
where
|
||||
T: MapValue,
|
||||
Value: MapValue,
|
||||
{
|
||||
pub height: HeightMap<T>,
|
||||
pub date: DateMap<T>,
|
||||
pub height: HeightMap<Value>,
|
||||
pub date: DateMap<Value>,
|
||||
}
|
||||
|
||||
impl<T> BiMap<T>
|
||||
impl<Value> BiMap<Value>
|
||||
where
|
||||
T: MapValue,
|
||||
Value: MapValue,
|
||||
{
|
||||
pub fn new_bin(version: u32, path: &str) -> Self {
|
||||
Self {
|
||||
@@ -39,7 +38,7 @@ where
|
||||
|
||||
pub fn date_insert_sum_range(&mut self, date: Date, date_blocks_range: &RangeInclusive<u32>)
|
||||
where
|
||||
T: Sum,
|
||||
Value: Sum,
|
||||
{
|
||||
self.date
|
||||
.insert(date, self.height.sum_range(date_blocks_range));
|
||||
@@ -51,7 +50,7 @@ where
|
||||
first_height: &mut DateMap<Height>,
|
||||
last_height: &mut DateMap<Height>,
|
||||
) where
|
||||
T: Sum,
|
||||
Value: Sum,
|
||||
{
|
||||
dates.iter().for_each(|date| {
|
||||
let first_height = first_height.get_or_import(date).unwrap();
|
||||
@@ -62,7 +61,7 @@ where
|
||||
})
|
||||
}
|
||||
|
||||
pub fn multi_insert_const(&mut self, heights: &[Height], dates: &[Date], constant: T) {
|
||||
pub fn multi_insert_const(&mut self, heights: &[Height], dates: &[Date], constant: Value) {
|
||||
self.height.multi_insert_const(heights, constant);
|
||||
|
||||
self.date.multi_insert_const(dates, constant);
|
||||
@@ -75,8 +74,8 @@ where
|
||||
source: &mut BiMap<K>,
|
||||
transform: &F,
|
||||
) where
|
||||
T: Div<Output = T>,
|
||||
F: Fn(K) -> T,
|
||||
Value: Div<Output = Value>,
|
||||
F: Fn(K) -> Value,
|
||||
K: MapValue,
|
||||
{
|
||||
self.height
|
||||
@@ -95,8 +94,8 @@ where
|
||||
) where
|
||||
A: MapValue,
|
||||
B: MapValue,
|
||||
T: LossyFrom<A> + LossyFrom<B>,
|
||||
T: Add<Output = T>,
|
||||
Value: LossyFrom<A> + LossyFrom<B>,
|
||||
Value: Add<Output = Value>,
|
||||
{
|
||||
self.height
|
||||
.multi_insert_add(heights, &mut added.height, &mut adder.height);
|
||||
@@ -113,8 +112,8 @@ where
|
||||
) where
|
||||
A: MapValue,
|
||||
B: MapValue,
|
||||
T: LossyFrom<A> + LossyFrom<B>,
|
||||
T: Sub<Output = T>,
|
||||
Value: LossyFrom<A> + LossyFrom<B>,
|
||||
Value: Sub<Output = Value>,
|
||||
{
|
||||
self.height
|
||||
.multi_insert_subtract(heights, &mut subtracted.height, &mut subtracter.height);
|
||||
@@ -132,8 +131,8 @@ where
|
||||
) where
|
||||
A: MapValue,
|
||||
B: MapValue,
|
||||
T: LossyFrom<A> + LossyFrom<B>,
|
||||
T: Mul<Output = T>,
|
||||
Value: LossyFrom<A> + LossyFrom<B>,
|
||||
Value: Mul<Output = Value>,
|
||||
{
|
||||
self.height
|
||||
.multi_insert_multiply(heights, &mut multiplied.height, &mut multiplier.height);
|
||||
@@ -150,8 +149,8 @@ where
|
||||
) where
|
||||
A: MapValue,
|
||||
B: MapValue,
|
||||
T: LossyFrom<A> + LossyFrom<B>,
|
||||
T: Div<Output = T> + Mul<Output = T> + From<u8>,
|
||||
Value: LossyFrom<A> + LossyFrom<B>,
|
||||
Value: Div<Output = Value> + Mul<Output = Value> + From<u8>,
|
||||
{
|
||||
self.height
|
||||
.multi_insert_divide(heights, &mut divided.height, &mut divider.height);
|
||||
@@ -168,8 +167,8 @@ where
|
||||
) where
|
||||
A: MapValue,
|
||||
B: MapValue,
|
||||
T: LossyFrom<A> + LossyFrom<B>,
|
||||
T: Div<Output = T> + Mul<Output = T> + From<u8>,
|
||||
Value: LossyFrom<A> + LossyFrom<B>,
|
||||
Value: Div<Output = Value> + Mul<Output = Value> + From<u8>,
|
||||
{
|
||||
self.height
|
||||
.multi_insert_percentage(heights, &mut divided.height, &mut divider.height);
|
||||
@@ -184,8 +183,8 @@ where
|
||||
source: &mut BiMap<K>,
|
||||
) where
|
||||
K: MapValue,
|
||||
T: LossyFrom<K>,
|
||||
T: Add<Output = T> + Sub<Output = T>,
|
||||
Value: LossyFrom<K>,
|
||||
Value: Add<Output = Value> + Sub<Output = Value>,
|
||||
{
|
||||
self.height
|
||||
.multi_insert_cumulative(heights, &mut source.height);
|
||||
@@ -201,8 +200,8 @@ where
|
||||
days: usize,
|
||||
) where
|
||||
K: MapValue,
|
||||
T: LossyFrom<K>,
|
||||
T: Add<Output = T> + Sub<Output = T>,
|
||||
Value: LossyFrom<K>,
|
||||
Value: Add<Output = Value> + Sub<Output = Value>,
|
||||
{
|
||||
self.height.multi_insert_last_x_sum(
|
||||
heights,
|
||||
@@ -221,7 +220,7 @@ where
|
||||
source: &mut BiMap<K>,
|
||||
days: usize,
|
||||
) where
|
||||
T: Into<f32> + From<f32>,
|
||||
Value: Into<f32> + From<f32>,
|
||||
K: MapValue + Sum,
|
||||
f32: LossyFrom<K>,
|
||||
{
|
||||
@@ -238,10 +237,10 @@ where
|
||||
&mut self,
|
||||
heights: &[Height],
|
||||
dates: &[Date],
|
||||
source: &mut BiMap<T>,
|
||||
source: &mut BiMap<Value>,
|
||||
days: usize,
|
||||
) where
|
||||
T: Sub<Output = T>,
|
||||
Value: Sub<Output = Value>,
|
||||
{
|
||||
self.height.multi_insert_net_change(
|
||||
heights,
|
||||
@@ -256,10 +255,11 @@ where
|
||||
&mut self,
|
||||
heights: &[Height],
|
||||
dates: &[Date],
|
||||
source: &mut BiMap<T>,
|
||||
source: &mut BiMap<Value>,
|
||||
days: Option<usize>,
|
||||
) where
|
||||
T: FloatCore,
|
||||
Value: LossyFrom<f32>,
|
||||
f32: LossyFrom<Value>,
|
||||
{
|
||||
self.height.multi_insert_median(
|
||||
heights,
|
||||
@@ -274,10 +274,11 @@ where
|
||||
&mut self,
|
||||
heights: &[Height],
|
||||
dates: &[Date],
|
||||
mut map_and_percentiles: Vec<(&mut BiMap<T>, f32)>,
|
||||
mut map_and_percentiles: Vec<(&mut BiMap<Value>, f32)>,
|
||||
days: Option<usize>,
|
||||
) where
|
||||
T: FloatCore,
|
||||
Value: LossyFrom<f32>,
|
||||
f32: LossyFrom<Value>,
|
||||
{
|
||||
let mut date_map_and_percentiles = vec![];
|
||||
let mut height_map_and_percentiles = vec![];
|
||||
|
||||
@@ -35,6 +35,27 @@ pub trait AnyDateMap: AnyMap {
|
||||
fn as_any_mut_map(&mut self) -> &mut dyn AnyMap;
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn date_map_vec_to_any_date_map_vec<T>(
|
||||
vec: Vec<&DateMap<T>>,
|
||||
) -> impl Iterator<Item = &(dyn AnyDateMap + Send + Sync)>
|
||||
where
|
||||
T: MapValue,
|
||||
{
|
||||
vec.into_iter()
|
||||
.map(|map| map as &(dyn AnyDateMap + Send + Sync))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn date_map_vec_to_mut_any_date_map_vec<T>(
|
||||
vec: Vec<&mut DateMap<T>>,
|
||||
) -> impl Iterator<Item = &mut (dyn AnyDateMap)>
|
||||
where
|
||||
T: MapValue,
|
||||
{
|
||||
vec.into_iter().map(|map| map as &mut dyn AnyDateMap)
|
||||
}
|
||||
|
||||
impl<T> AnyDateMap for DateMap<T>
|
||||
where
|
||||
T: MapValue,
|
||||
|
||||
@@ -11,10 +11,14 @@ use std::{
|
||||
use allocative::Allocative;
|
||||
use bincode::{Decode, Encode};
|
||||
use itertools::Itertools;
|
||||
use ordered_float::{FloatCore, OrderedFloat};
|
||||
use ordered_float::OrderedFloat;
|
||||
use serde::{de::DeserializeOwned, Serialize};
|
||||
|
||||
use crate::{log, utils::LossyFrom, Serialization};
|
||||
use crate::{
|
||||
log,
|
||||
utils::{get_percentile, LossyFrom},
|
||||
Serialization,
|
||||
};
|
||||
|
||||
use super::{AnyMap, MapValue};
|
||||
|
||||
@@ -693,28 +697,31 @@ where
|
||||
|
||||
pub fn multi_insert_percentage_change(&mut self, keys: &[Key], source: &mut Self, len: usize)
|
||||
where
|
||||
Value: Sub<Output = Value> + FloatCore,
|
||||
Value: Sub<Output = Value> + LossyFrom<f32>,
|
||||
f32: LossyFrom<Value>,
|
||||
{
|
||||
let one = Value::from(1.0).unwrap();
|
||||
let hundred = Value::from(100.0).unwrap();
|
||||
let one = 1.0;
|
||||
let hundred = 100.0;
|
||||
|
||||
keys.iter().for_each(|key| {
|
||||
let previous_value = key
|
||||
.checked_sub(len)
|
||||
.and_then(|previous_key| source.get_or_import(&previous_key))
|
||||
.unwrap_or_default();
|
||||
let previous_value = f32::lossy_from(
|
||||
key.checked_sub(len)
|
||||
.and_then(|previous_key| source.get_or_import(&previous_key))
|
||||
.unwrap_or_default(),
|
||||
);
|
||||
|
||||
let last_value = source.get_or_import(key).unwrap();
|
||||
let last_value = f32::lossy_from(source.get_or_import(key).unwrap());
|
||||
|
||||
let percentage_change = ((last_value / previous_value) - one) * hundred;
|
||||
|
||||
self.insert(*key, percentage_change);
|
||||
self.insert(*key, Value::lossy_from(percentage_change));
|
||||
});
|
||||
}
|
||||
|
||||
pub fn multi_insert_median(&mut self, keys: &[Key], source: &mut Self, len: Option<usize>)
|
||||
where
|
||||
Value: FloatCore,
|
||||
Value: LossyFrom<f32>,
|
||||
f32: LossyFrom<Value>,
|
||||
{
|
||||
source.multi_insert_percentile(keys, vec![(self, 0.5)], len);
|
||||
}
|
||||
@@ -725,7 +732,8 @@ where
|
||||
mut map_and_percentiles: Vec<(&mut Self, f32)>,
|
||||
len: Option<usize>,
|
||||
) where
|
||||
Value: FloatCore,
|
||||
Value: LossyFrom<f32>,
|
||||
f32: LossyFrom<Value>,
|
||||
{
|
||||
if len.map_or(false, |size| size < 3) {
|
||||
panic!("Computing a percentile for a size lower than 3 is useless");
|
||||
@@ -736,8 +744,7 @@ where
|
||||
|
||||
let min_percentile_key = Key::min_percentile_key();
|
||||
|
||||
let nan = Value::from(f32::NAN).unwrap();
|
||||
let two = Value::from(2.0).unwrap();
|
||||
let nan = Value::lossy_from(f32::NAN);
|
||||
|
||||
keys.iter().cloned().try_for_each(|key| {
|
||||
if key < min_percentile_key {
|
||||
@@ -753,8 +760,9 @@ where
|
||||
let mut vec = start
|
||||
.iter_up_to(&key)
|
||||
.flat_map(|key| self.get_or_import(&key))
|
||||
.map(|v| f32::lossy_from(v))
|
||||
.filter(|f| !f.is_nan())
|
||||
.map(|f| OrderedFloat(f))
|
||||
.map(OrderedFloat)
|
||||
.collect_vec();
|
||||
|
||||
if len.is_some() {
|
||||
@@ -765,7 +773,7 @@ where
|
||||
|
||||
sorted_vec.replace(vec);
|
||||
} else {
|
||||
let float_value = self.get_or_import(&key).unwrap();
|
||||
let float_value = f32::lossy_from(self.get_or_import(&key).unwrap());
|
||||
|
||||
if !float_value.is_nan() {
|
||||
let float_value = OrderedFloat(float_value);
|
||||
@@ -797,8 +805,6 @@ where
|
||||
|
||||
let vec = sorted_vec.as_ref().unwrap();
|
||||
|
||||
let len = vec.len();
|
||||
|
||||
map_and_percentiles
|
||||
.iter_mut()
|
||||
.for_each(|(map, percentile)| {
|
||||
@@ -806,47 +812,9 @@ where
|
||||
panic!("The percentile should be between 0.0 and 1.0");
|
||||
}
|
||||
|
||||
let value = {
|
||||
if len < 2 {
|
||||
nan
|
||||
} else {
|
||||
let index = (len - 1) as f32 * *percentile;
|
||||
let float_value = get_percentile::<OrderedFloat<f32>>(vec, *percentile).0;
|
||||
|
||||
let fract = index.fract();
|
||||
|
||||
if fract != 0.0 {
|
||||
(vec.get(index.ceil() as usize)
|
||||
.unwrap_or_else(|| {
|
||||
dbg!(vec, index, &self.path_all, &self.path_all, len);
|
||||
panic!()
|
||||
})
|
||||
.0
|
||||
+ vec
|
||||
.get(index as usize)
|
||||
.unwrap_or_else(|| {
|
||||
dbg!(
|
||||
vec,
|
||||
index,
|
||||
&self.path_all,
|
||||
&self.path_all,
|
||||
len
|
||||
);
|
||||
panic!()
|
||||
})
|
||||
.0)
|
||||
/ two
|
||||
} else {
|
||||
vec.get(index as usize)
|
||||
.unwrap_or_else(|| {
|
||||
dbg!(vec, index);
|
||||
panic!();
|
||||
})
|
||||
.0
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
(*map).insert(key, value);
|
||||
(*map).insert(key, Value::lossy_from(float_value));
|
||||
});
|
||||
} else {
|
||||
map_and_percentiles.iter_mut().for_each(|(map, _)| {
|
||||
|
||||
@@ -45,6 +45,12 @@ impl Height {
|
||||
pub fn is_safe(&self, block_count: usize) -> bool {
|
||||
**self < (block_count - NUMBER_OF_UNSAFE_BLOCKS) as u32
|
||||
}
|
||||
|
||||
pub fn iter_range_inclusive(first: Height, last: Height) -> impl Iterator<Item = Height> {
|
||||
let range = (*first)..=(*last);
|
||||
|
||||
range.into_iter().map(Height::new)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<u64> for Height {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,11 +1,59 @@
|
||||
use ordered_float::OrderedFloat;
|
||||
|
||||
pub trait LossyFrom<T> {
|
||||
fn lossy_from(x: T) -> Self;
|
||||
}
|
||||
|
||||
// ---
|
||||
// u32
|
||||
// ---
|
||||
|
||||
impl LossyFrom<u32> for u32 {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: u32) -> Self {
|
||||
x
|
||||
}
|
||||
}
|
||||
|
||||
impl LossyFrom<u64> for u32 {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: u64) -> Self {
|
||||
x as u32
|
||||
}
|
||||
}
|
||||
|
||||
impl LossyFrom<usize> for u32 {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: usize) -> Self {
|
||||
x as u32
|
||||
}
|
||||
}
|
||||
|
||||
impl LossyFrom<f32> for u32 {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: f32) -> Self {
|
||||
x as u32
|
||||
}
|
||||
}
|
||||
|
||||
impl LossyFrom<OrderedFloat<f32>> for u32 {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: OrderedFloat<f32>) -> Self {
|
||||
x.0 as u32
|
||||
}
|
||||
}
|
||||
|
||||
// ---
|
||||
// u64
|
||||
// ---
|
||||
|
||||
impl LossyFrom<u32> for u64 {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: u32) -> Self {
|
||||
x as u64
|
||||
}
|
||||
}
|
||||
|
||||
impl LossyFrom<u64> for u64 {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: u64) -> Self {
|
||||
@@ -20,6 +68,20 @@ impl LossyFrom<usize> for u64 {
|
||||
}
|
||||
}
|
||||
|
||||
impl LossyFrom<f32> for u64 {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: f32) -> Self {
|
||||
x as u64
|
||||
}
|
||||
}
|
||||
|
||||
impl LossyFrom<OrderedFloat<f32>> for u64 {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: OrderedFloat<f32>) -> Self {
|
||||
x.0 as u64
|
||||
}
|
||||
}
|
||||
|
||||
// ---
|
||||
// usize
|
||||
// ---
|
||||
@@ -70,6 +132,13 @@ impl LossyFrom<f32> for f32 {
|
||||
}
|
||||
}
|
||||
|
||||
impl LossyFrom<OrderedFloat<f32>> for f32 {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: OrderedFloat<f32>) -> Self {
|
||||
x.0
|
||||
}
|
||||
}
|
||||
|
||||
impl LossyFrom<f64> for f32 {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: f64) -> Self {
|
||||
@@ -77,6 +146,66 @@ impl LossyFrom<f64> for f32 {
|
||||
}
|
||||
}
|
||||
|
||||
impl LossyFrom<OrderedFloat<f64>> for f32 {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: OrderedFloat<f64>) -> Self {
|
||||
x.0 as f32
|
||||
}
|
||||
}
|
||||
|
||||
// ---
|
||||
// OrderedFloat<f32>
|
||||
// ---
|
||||
|
||||
impl LossyFrom<u32> for OrderedFloat<f32> {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: u32) -> Self {
|
||||
OrderedFloat(x as f32)
|
||||
}
|
||||
}
|
||||
|
||||
impl LossyFrom<u64> for OrderedFloat<f32> {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: u64) -> Self {
|
||||
OrderedFloat(x as f32)
|
||||
}
|
||||
}
|
||||
|
||||
impl LossyFrom<usize> for OrderedFloat<f32> {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: usize) -> Self {
|
||||
OrderedFloat(x as f32)
|
||||
}
|
||||
}
|
||||
|
||||
impl LossyFrom<f32> for OrderedFloat<f32> {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: f32) -> Self {
|
||||
OrderedFloat(x)
|
||||
}
|
||||
}
|
||||
|
||||
impl LossyFrom<OrderedFloat<f32>> for OrderedFloat<f32> {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: OrderedFloat<f32>) -> Self {
|
||||
x
|
||||
}
|
||||
}
|
||||
|
||||
impl LossyFrom<f64> for OrderedFloat<f32> {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: f64) -> Self {
|
||||
OrderedFloat(x as f32)
|
||||
}
|
||||
}
|
||||
|
||||
impl LossyFrom<OrderedFloat<f64>> for OrderedFloat<f32> {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: OrderedFloat<f64>) -> Self {
|
||||
OrderedFloat(x.0 as f32)
|
||||
}
|
||||
}
|
||||
|
||||
// ---
|
||||
// f64
|
||||
// ---
|
||||
@@ -102,9 +231,69 @@ impl LossyFrom<f32> for f64 {
|
||||
}
|
||||
}
|
||||
|
||||
impl LossyFrom<OrderedFloat<f32>> for f64 {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: OrderedFloat<f32>) -> Self {
|
||||
x.0 as f64
|
||||
}
|
||||
}
|
||||
|
||||
impl LossyFrom<f64> for f64 {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: f64) -> Self {
|
||||
x
|
||||
}
|
||||
}
|
||||
|
||||
impl LossyFrom<OrderedFloat<f64>> for f64 {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: OrderedFloat<f64>) -> Self {
|
||||
x.0
|
||||
}
|
||||
}
|
||||
|
||||
// ---
|
||||
// OrderedFloat<f64>
|
||||
// ---
|
||||
|
||||
impl LossyFrom<u64> for OrderedFloat<f64> {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: u64) -> Self {
|
||||
OrderedFloat(x as f64)
|
||||
}
|
||||
}
|
||||
|
||||
impl LossyFrom<usize> for OrderedFloat<f64> {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: usize) -> Self {
|
||||
OrderedFloat(x as f64)
|
||||
}
|
||||
}
|
||||
|
||||
impl LossyFrom<f32> for OrderedFloat<f64> {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: f32) -> Self {
|
||||
OrderedFloat(x as f64)
|
||||
}
|
||||
}
|
||||
|
||||
impl LossyFrom<OrderedFloat<f32>> for OrderedFloat<f64> {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: OrderedFloat<f32>) -> Self {
|
||||
OrderedFloat(x.0 as f64)
|
||||
}
|
||||
}
|
||||
|
||||
impl LossyFrom<f64> for OrderedFloat<f64> {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: f64) -> Self {
|
||||
OrderedFloat(x)
|
||||
}
|
||||
}
|
||||
|
||||
impl LossyFrom<OrderedFloat<f64>> for OrderedFloat<f64> {
|
||||
#[inline(always)]
|
||||
fn lossy_from(x: OrderedFloat<f64>) -> Self {
|
||||
x
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ mod date;
|
||||
mod flamegraph;
|
||||
mod log;
|
||||
mod lossy;
|
||||
mod percentile;
|
||||
mod retry;
|
||||
mod time;
|
||||
|
||||
@@ -11,5 +12,6 @@ pub use date::*;
|
||||
pub use flamegraph::*;
|
||||
pub use log::*;
|
||||
pub use lossy::*;
|
||||
pub use percentile::*;
|
||||
pub use retry::*;
|
||||
pub use time::*;
|
||||
|
||||
28
parser/src/utils/percentile.rs
Normal file
28
parser/src/utils/percentile.rs
Normal file
@@ -0,0 +1,28 @@
|
||||
use std::ops::Add;
|
||||
|
||||
use super::LossyFrom;
|
||||
|
||||
pub fn get_percentile<T>(sorted: &[T], percentile: f32) -> T
|
||||
where
|
||||
T: Clone + Copy + LossyFrom<f32> + Add<Output = T>,
|
||||
f32: LossyFrom<T>,
|
||||
{
|
||||
let len = sorted.len();
|
||||
|
||||
if len < 2 {
|
||||
T::lossy_from(f32::NAN)
|
||||
} else {
|
||||
let index = (len - 1) as f32 * percentile;
|
||||
|
||||
let fract = index.fract();
|
||||
|
||||
if fract != 0.0 {
|
||||
let left = *sorted.get(index as usize).unwrap();
|
||||
let right = *sorted.get(index.ceil() as usize).unwrap();
|
||||
|
||||
T::lossy_from(f32::lossy_from(left + right) / 2.0)
|
||||
} else {
|
||||
*sorted.get(index as usize).unwrap()
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user