brk: first commit

This commit is contained in:
nym21
2025-02-23 01:25:15 +01:00
parent 8c3f519016
commit 19cf34f9d4
266 changed files with 225 additions and 1268 deletions

117
_src/structs/address.rs Normal file
View File

@@ -0,0 +1,117 @@
use bitcoin_hashes::hash160;
use brk_parser::bitcoin::TxOut;
use super::{AddressType, Counter, U8x19, U8x31};
#[derive(Debug, Clone, PartialEq, PartialOrd, Ord, Eq)]
pub enum Address {
// https://mempool.space/tx/7bd54def72825008b4ca0f4aeff13e6be2c5fe0f23430629a9d484a1ac2a29b8
Empty(u32),
OpReturn(u32),
PushOnly(u32),
Unknown(u32),
// https://mempool.space/tx/274f8be3b7b9b1a220285f5f71f61e2691dd04df9d69bb02a8b3b85f91fb1857
MultiSig(u32),
P2PK((u16, U8x19)),
P2PKH((u16, U8x19)),
P2SH((u16, U8x19)),
P2WPKH((u16, U8x19)),
P2WSH((u16, U8x31)),
P2TR((u16, U8x31)),
}
impl Address {
pub fn to_type(&self) -> AddressType {
match self {
Self::Empty(_) => AddressType::Empty,
Self::OpReturn(_) => AddressType::OpReturn,
Self::PushOnly(_) => AddressType::PushOnly,
Self::Unknown(_) => AddressType::Unknown,
Self::MultiSig(_) => AddressType::MultiSig,
Self::P2PK(_) => AddressType::P2PK,
Self::P2PKH(_) => AddressType::P2PKH,
Self::P2SH(_) => AddressType::P2SH,
Self::P2WPKH(_) => AddressType::P2WPKH,
Self::P2WSH(_) => AddressType::P2WSH,
Self::P2TR(_) => AddressType::P2TR,
}
}
pub fn from(
txout: &TxOut,
multisig_addresses: &mut Counter,
op_return_addresses: &mut Counter,
push_only_addresses: &mut Counter,
unknown_addresses: &mut Counter,
empty_addresses: &mut Counter,
) -> Self {
let script = &txout.script_pubkey;
if script.is_p2pk() {
let pk = match script.as_bytes().len() {
67 => &script.as_bytes()[1..66],
35 => &script.as_bytes()[1..34],
_ => unreachable!(),
};
let hash = hash160::Hash::hash(pk);
let (prefix, rest) = Self::split_slice(&hash.as_byte_array()[..]);
Self::P2PK((prefix, rest.into()))
} else if script.is_p2pkh() {
let (prefix, rest) = Self::split_slice(&script.as_bytes()[3..23]);
Self::P2PKH((prefix, rest.into()))
} else if script.is_p2sh() {
let (prefix, rest) = Self::split_slice(&script.as_bytes()[2..22]);
Self::P2SH((prefix, rest.into()))
} else if script.is_p2wpkh() {
let (prefix, rest) = Self::split_slice(&script.as_bytes()[2..]);
Self::P2WPKH((prefix, rest.into()))
} else if script.is_p2wsh() {
let (prefix, rest) = Self::split_slice(&script.as_bytes()[2..]);
Self::P2WSH((prefix, rest.into()))
} else if script.is_p2tr() {
let (prefix, rest) = Self::split_slice(&script.as_bytes()[2..]);
Self::P2TR((prefix, rest.into()))
} else if script.is_empty() {
let index = empty_addresses.inner();
empty_addresses.increment();
Self::Empty(index)
} else if script.is_op_return() {
let index = op_return_addresses.inner();
op_return_addresses.increment();
Self::OpReturn(index)
} else if script.is_multisig() {
let index = multisig_addresses.inner();
multisig_addresses.increment();
Self::MultiSig(index)
} else if script.is_push_only() {
let index = push_only_addresses.inner();
push_only_addresses.increment();
Self::PushOnly(index)
} else {
Self::new_unknown(unknown_addresses)
}
}
fn new_unknown(unknown_addresses: &mut Counter) -> Address {
let index = unknown_addresses.inner();
unknown_addresses.increment();
Self::Unknown(index)
}
fn split_slice(slice: &[u8]) -> (u16, &[u8]) {
let prefix = ((slice[0] as u16) << 2) + ((slice[1] as u16) >> 6);
let rest = &slice[1..];
(prefix, rest)
}
}

View File

@@ -0,0 +1,98 @@
use allocative::Allocative;
use color_eyre::eyre::eyre;
use snkrj::{direct_repr, Storable, UnsizedStorable};
use super::{AddressType, Amount, EmptyAddressData, LiquidityClassification, Price};
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default, Allocative)]
pub struct AddressData {
pub address_type: AddressType,
pub amount: Amount,
pub sent: Amount,
pub received: Amount,
pub realized_cap: Price,
pub outputs_len: u32,
}
direct_repr!(AddressData);
impl AddressData {
pub fn new(address_type: AddressType) -> Self {
Self {
address_type,
amount: Amount::ZERO,
sent: Amount::ZERO,
received: Amount::ZERO,
realized_cap: Price::ZERO,
outputs_len: 0,
}
}
pub fn receive(&mut self, amount: Amount, price: Price) {
let previous_amount = self.amount;
let new_amount = previous_amount + amount;
self.amount = new_amount;
self.received += amount;
self.outputs_len += 1;
let received_value = price * amount;
self.realized_cap += received_value;
}
pub fn send(&mut self, amount: Amount, previous_price: Price) -> color_eyre::Result<()> {
let previous_amount = self.amount;
if previous_amount < amount {
return Err(eyre!("previous_amount smaller than sent amount"));
}
let new_amount = previous_amount - amount;
self.amount = new_amount;
self.sent += amount;
self.outputs_len -= 1;
let previous_sent_dollar_value = previous_price * amount;
self.realized_cap -= previous_sent_dollar_value;
Ok(())
}
#[inline(always)]
pub fn is_empty(&self) -> bool {
if self.amount == Amount::ZERO {
if self.outputs_len != 0 {
unreachable!();
}
true
} else {
false
}
}
pub fn from_empty(empty: &EmptyAddressData) -> Self {
let address_type = empty.address_type();
let transfered = empty.transfered();
Self {
address_type,
amount: Amount::ZERO,
sent: transfered,
received: transfered,
realized_cap: Price::ZERO,
outputs_len: 0,
}
}
pub fn compute_liquidity_classification(&self) -> LiquidityClassification {
LiquidityClassification::new(self.sent, self.received)
}
}

View File

@@ -0,0 +1,9 @@
use allocative::Allocative;
#[derive(Default, Allocative, Clone, Copy)]
pub enum AddressLiquidity {
#[default]
Illiquid,
Liquid,
HighlyLiquid,
}

View File

@@ -0,0 +1,69 @@
use super::{AddressData, Amount, Price, Timestamp};
#[derive(Debug)]
pub struct AddressRealizedData {
pub initial_address_data: AddressData,
pub received: Amount,
pub sent: Amount,
pub profit: Price,
pub loss: Price,
pub value_created: Price,
pub adjusted_value_created: Price,
pub value_destroyed: Price,
pub adjusted_value_destroyed: Price,
pub utxos_created: u32,
pub utxos_destroyed: u32,
}
impl AddressRealizedData {
pub fn default(initial_address_data: &AddressData) -> Self {
Self {
received: Amount::ZERO,
sent: Amount::ZERO,
profit: Price::ZERO,
loss: Price::ZERO,
utxos_created: 0,
utxos_destroyed: 0,
value_created: Price::ZERO,
adjusted_value_created: Price::ZERO,
value_destroyed: Price::ZERO,
adjusted_value_destroyed: Price::ZERO,
initial_address_data: *initial_address_data,
}
}
pub fn receive(&mut self, amount: Amount) {
self.received += amount;
self.utxos_created += 1;
}
pub fn send(
&mut self,
amount: Amount,
current_price: Price,
previous_price: Price,
current_timestamp: Timestamp,
previous_timestamp: Timestamp,
) {
self.sent += amount;
self.utxos_destroyed += 1;
let current_value = current_price * amount;
let previous_value = previous_price * amount;
self.value_created += current_value;
self.value_destroyed += previous_value;
if previous_timestamp.older_by_1h_plus_than(current_timestamp) {
self.adjusted_value_created += current_value;
self.adjusted_value_destroyed += previous_value;
}
if current_value >= previous_value {
self.profit += current_value - previous_value;
} else {
self.loss += previous_value - current_value;
}
}
}

View File

@@ -0,0 +1,32 @@
use allocative::Allocative;
use super::Amount;
#[derive(PartialEq, PartialOrd, Ord, Eq, Debug, Allocative)]
pub enum AddressSize {
Empty,
Plankton,
Shrimp,
Crab,
Fish,
Shark,
Whale,
Humpback,
Megalodon,
}
impl AddressSize {
pub fn from_amount(amount: Amount) -> Self {
match amount.to_sat() {
0 => Self::Empty,
1..=9_999_999 => Self::Plankton,
10_000_000..=99_999_999 => Self::Shrimp,
100_000_000..=999_999_999 => Self::Crab,
1_000_000_000..=9_999_999_999 => Self::Fish,
10_000_000_000..=99_999_999_999 => Self::Shark,
100_000_000_000..=999_999_999_999 => Self::Whale,
1_000_000_000_000..=9_999_999_999_999 => Self::Humpback,
10_000_000_000_000..=u64::MAX => Self::Megalodon,
}
}
}

View File

@@ -0,0 +1,12 @@
use allocative::Allocative;
use super::{AddressLiquidity, AddressSize, AddressType};
#[derive(Default, Allocative)]
pub enum AddressSplit {
#[default]
All,
Type(AddressType),
Size(AddressSize),
Liquidity(AddressLiquidity),
}

View File

@@ -0,0 +1,42 @@
use allocative::Allocative;
use bincode::{Decode, Encode};
// https://unchained.com/blog/bitcoin-address-types-compared/
#[derive(
Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Default, Encode, Decode, Allocative,
)]
pub enum AddressType {
Empty,
OpReturn,
PushOnly,
#[default]
Unknown,
MultiSig,
P2PK,
P2PKH,
P2SH,
P2WPKH,
P2WSH,
P2TR,
}
impl TryFrom<u64> for AddressType {
type Error = ();
fn try_from(u: u64) -> Result<Self, Self::Error> {
match u {
x if x == AddressType::Empty as u64 => Ok(AddressType::Empty),
x if x == AddressType::OpReturn as u64 => Ok(AddressType::OpReturn),
x if x == AddressType::PushOnly as u64 => Ok(AddressType::PushOnly),
x if x == AddressType::Unknown as u64 => Ok(AddressType::Unknown),
x if x == AddressType::MultiSig as u64 => Ok(AddressType::MultiSig),
x if x == AddressType::P2PK as u64 => Ok(AddressType::P2PK),
x if x == AddressType::P2PKH as u64 => Ok(AddressType::P2PKH),
x if x == AddressType::P2SH as u64 => Ok(AddressType::P2SH),
x if x == AddressType::P2WPKH as u64 => Ok(AddressType::P2WPKH),
x if x == AddressType::P2WSH as u64 => Ok(AddressType::P2WSH),
x if x == AddressType::P2TR as u64 => Ok(AddressType::P2TR),
_ => Err(()),
}
}
}

125
_src/structs/amount.rs Normal file
View File

@@ -0,0 +1,125 @@
use std::{
iter::Sum,
ops::{Add, AddAssign, Mul, Sub, SubAssign},
};
use allocative::{Allocative, Visitor};
use bincode::{
BorrowDecode, Decode, Encode,
de::{BorrowDecoder, Decoder},
enc::Encoder,
error::{DecodeError, EncodeError},
};
use brk_parser::bitcoin::Amount as BitcoinAmount;
use derive_deref::{Deref, DerefMut};
use serde::{Deserialize, Serialize};
use snkrj::{Storable, UnsizedStorable, direct_repr};
use super::Height;
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Deref, DerefMut, Default, Serialize, Deserialize)]
pub struct Amount(BitcoinAmount);
direct_repr!(Amount);
impl Amount {
pub const ZERO: Self = Self(BitcoinAmount::ZERO);
pub const ONE_BTC_F32: f32 = 100_000_000.0;
pub const ONE_BTC_F64: f64 = 100_000_000.0;
#[inline(always)]
pub fn wrap(amount: BitcoinAmount) -> Self {
Self(amount)
}
#[inline(always)]
pub fn from_sat(sats: u64) -> Self {
Self(BitcoinAmount::from_sat(sats))
}
}
impl Add for Amount {
type Output = Amount;
fn add(self, rhs: Amount) -> Self::Output {
Amount::from_sat(self.to_sat() + rhs.to_sat())
}
}
impl AddAssign for Amount {
fn add_assign(&mut self, rhs: Self) {
*self = Amount::from_sat(self.to_sat() + rhs.to_sat());
}
}
impl Sub for Amount {
type Output = Amount;
fn sub(self, rhs: Amount) -> Self::Output {
Amount::from_sat(self.to_sat() - rhs.to_sat())
}
}
impl SubAssign for Amount {
fn sub_assign(&mut self, rhs: Self) {
*self = Amount::from_sat(self.to_sat() - rhs.to_sat());
}
}
impl Mul<Amount> for Amount {
type Output = Amount;
fn mul(self, rhs: Amount) -> Self::Output {
Amount::from_sat(self.to_sat() * rhs.to_sat())
}
}
impl Mul<u64> for Amount {
type Output = Amount;
fn mul(self, rhs: u64) -> Self::Output {
Amount::from_sat(self.to_sat() * rhs)
}
}
impl Mul<Height> for Amount {
type Output = Amount;
fn mul(self, rhs: Height) -> Self::Output {
Amount::from_sat(self.to_sat() * *rhs as u64)
}
}
impl Sum for Amount {
fn sum<I: Iterator<Item = Self>>(iter: I) -> Self {
let sats = iter.map(|amt| amt.to_sat()).sum();
Amount::from_sat(sats)
}
}
impl Encode for Amount {
fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), EncodeError> {
Encode::encode(&self.to_sat(), encoder)
}
}
impl Decode for Amount {
fn decode<D: Decoder>(decoder: &mut D) -> core::result::Result<Self, DecodeError> {
let sats: u64 = Decode::decode(decoder)?;
Ok(Amount::from_sat(sats))
}
}
impl<'de> BorrowDecode<'de> for Amount {
fn borrow_decode<D: BorrowDecoder<'de>>(decoder: &mut D) -> Result<Self, DecodeError> {
let sats: u64 = BorrowDecode::borrow_decode(decoder)?;
Ok(Amount::from_sat(sats))
}
}
impl Allocative for Amount {
fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
visitor.visit_simple_sized::<Self>();
}
}

23
_src/structs/any_map.rs Normal file
View File

@@ -0,0 +1,23 @@
use std::path::Path;
use serde_json::Value;
use crate::io::Serialization;
use super::{Config, MapKind, MapPath};
pub trait AnyMap {
fn path(&self) -> &Path;
fn path_parent(&self) -> &Path;
fn path_last(&self) -> &Option<MapPath>;
fn last_value(&self) -> Option<Value>;
fn serialization(&self) -> Serialization;
fn type_name(&self) -> &str;
fn key_name(&self) -> &str;
fn pre_export(&mut self);
fn export(&self) -> color_eyre::Result<()>;
fn post_export(&mut self);
fn delete_files(&self);
fn kind(&self) -> MapKind;
fn id(&self, config: &Config) -> String;
}

31
_src/structs/array.rs Normal file
View File

@@ -0,0 +1,31 @@
use std::fmt::Debug;
use allocative::Allocative;
use derive_deref::{Deref, DerefMut};
use snkrj::{direct_repr, Storable, UnsizedStorable};
#[derive(
Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Deref, DerefMut, Default, Copy, Allocative,
)]
pub struct U8x19([u8; 19]);
direct_repr!(U8x19);
impl From<&[u8]> for U8x19 {
fn from(slice: &[u8]) -> Self {
let mut arr = Self::default();
arr.copy_from_slice(slice);
arr
}
}
#[derive(
Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Deref, DerefMut, Default, Copy, Allocative,
)]
pub struct U8x31([u8; 31]);
direct_repr!(U8x31);
impl From<&[u8]> for U8x31 {
fn from(slice: &[u8]) -> Self {
let mut arr = Self::default();
arr.copy_from_slice(slice);
arr
}
}

347
_src/structs/bi_map.rs Normal file
View File

@@ -0,0 +1,347 @@
use std::{
iter::Sum,
ops::{Add, Div, Mul, Sub},
};
use allocative::Allocative;
use crate::utils::{LossyFrom, TARGET_BLOCKS_PER_DAY};
use super::{
AnyDateMap, AnyHeightMap, AnyMap, Date, DateMap, Height, HeightMap, MapKind, MapPath, MapValue,
};
#[derive(Allocative, Debug)]
pub struct BiMap<Value>
where
Value: MapValue,
{
pub height: HeightMap<Value>,
pub date: DateMap<Value>,
}
impl<Value> BiMap<Value>
where
Value: MapValue,
{
pub fn new_bin(version: u32, kind: MapKind, path: &MapPath) -> Self {
Self {
height: HeightMap::_new_bin(version, kind, path, true),
date: DateMap::_new_bin(version, kind, path, false),
}
}
pub fn new_json(version: u32, kind: MapKind, path: &MapPath) -> Self {
Self {
height: HeightMap::new_json(version, kind, path, true),
date: DateMap::new_json(version, kind, path, false),
}
}
// pub fn date_insert_sum_range(&mut self, date: Date, date_blocks_range: &RangeInclusive<u32>)
// where
// Value: Sum,
// {
// self.date
// .insert(date, self.height.sum_range(date_blocks_range));
// }
// pub fn multi_date_insert_sum_range(
// &mut self,
// dates: &[Date],
// first_height: &mut DateMap<Height>,
// last_height: &mut DateMap<Height>,
// ) where
// Value: Sum,
// {
// dates.iter().for_each(|date| {
// let first_height = first_height.get_or_import(date).unwrap();
// let last_height = last_height.get_or_import(date).unwrap();
// let range = (*first_height)..=(*last_height);
// self.date.insert(*date, self.height.sum_range(&range));
// })
// }
pub fn multi_insert_const(&mut self, heights: &[Height], dates: &[Date], constant: Value) {
self.height.multi_insert_const(heights, constant);
self.date.multi_insert_const(dates, constant);
}
pub fn multi_insert_simple_transform<F, V>(
&mut self,
heights: &[Height],
dates: &[Date],
source: &mut BiMap<V>,
transform: &F,
) where
Value: Div<Output = Value>,
F: Fn(V) -> Value,
V: MapValue,
{
self.height
.multi_insert_simple_transform(heights, &mut source.height, |v, _| transform(v));
self.date
.multi_insert_simple_transform(dates, &mut source.date, |v, _| transform(v));
}
#[allow(unused)]
pub fn multi_insert_add<A, B>(
&mut self,
heights: &[Height],
dates: &[Date],
added: &mut BiMap<A>,
adder: &mut BiMap<B>,
) where
A: MapValue,
B: MapValue,
Value: LossyFrom<A> + LossyFrom<B>,
Value: Add<Output = Value>,
{
self.height
.multi_insert_add(heights, &mut added.height, &mut adder.height);
self.date
.multi_insert_add(dates, &mut added.date, &mut adder.date);
}
pub fn multi_insert_subtract<A, B>(
&mut self,
heights: &[Height],
dates: &[Date],
subtracted: &mut BiMap<A>,
subtracter: &mut BiMap<B>,
) where
A: MapValue,
B: MapValue,
Value: LossyFrom<A> + LossyFrom<B>,
Value: Sub<Output = Value>,
{
self.height
.multi_insert_subtract(heights, &mut subtracted.height, &mut subtracter.height);
self.date
.multi_insert_subtract(dates, &mut subtracted.date, &mut subtracter.date);
}
pub fn multi_insert_multiply<A, B>(
&mut self,
heights: &[Height],
dates: &[Date],
multiplied: &mut BiMap<A>,
multiplier: &mut BiMap<B>,
) where
A: MapValue,
B: MapValue,
Value: LossyFrom<A> + LossyFrom<B>,
Value: Mul<Output = Value>,
{
self.height
.multi_insert_multiply(heights, &mut multiplied.height, &mut multiplier.height);
self.date
.multi_insert_multiply(dates, &mut multiplied.date, &mut multiplier.date);
}
pub fn multi_insert_divide<A, B>(
&mut self,
heights: &[Height],
dates: &[Date],
divided: &mut BiMap<A>,
divider: &mut BiMap<B>,
) where
A: MapValue,
B: MapValue,
Value: LossyFrom<A> + LossyFrom<B>,
Value: Div<Output = Value> + Mul<Output = Value> + From<u8>,
{
self.height
.multi_insert_divide(heights, &mut divided.height, &mut divider.height);
self.date
.multi_insert_divide(dates, &mut divided.date, &mut divider.date);
}
pub fn multi_insert_percentage<A, B>(
&mut self,
heights: &[Height],
dates: &[Date],
divided: &mut BiMap<A>,
divider: &mut BiMap<B>,
) where
A: MapValue,
B: MapValue,
Value: LossyFrom<A> + LossyFrom<B>,
Value: Div<Output = Value> + Mul<Output = Value> + From<u8>,
{
self.height
.multi_insert_percentage(heights, &mut divided.height, &mut divider.height);
self.date
.multi_insert_percentage(dates, &mut divided.date, &mut divider.date);
}
#[allow(unused)]
pub fn multi_insert_cumulative<K>(
&mut self,
heights: &[Height],
dates: &[Date],
source: &mut BiMap<K>,
) where
K: MapValue,
Value: LossyFrom<K>,
Value: Add<Output = Value> + Sub<Output = Value>,
{
self.height
.multi_insert_cumulative(heights, &mut source.height);
self.date.multi_insert_cumulative(dates, &mut source.date);
}
pub fn multi_insert_simple_average<K>(
&mut self,
heights: &[Height],
dates: &[Date],
source: &mut BiMap<K>,
days: usize,
) where
Value: Into<f32> + From<f32>,
K: MapValue + Sum,
f32: LossyFrom<K>,
{
self.height.multi_insert_simple_average(
heights,
&mut source.height,
TARGET_BLOCKS_PER_DAY * days,
);
self.date
.multi_insert_simple_average(dates, &mut source.date, days);
}
pub fn multi_insert_net_change(
&mut self,
heights: &[Height],
dates: &[Date],
source: &mut BiMap<Value>,
days: usize,
) where
Value: Sub<Output = Value>,
{
self.height.multi_insert_net_change(
heights,
&mut source.height,
TARGET_BLOCKS_PER_DAY * days,
);
self.date
.multi_insert_net_change(dates, &mut source.date, days);
}
#[allow(unused)]
pub fn multi_insert_median(
&mut self,
heights: &[Height],
dates: &[Date],
source: &mut BiMap<Value>,
days: Option<usize>,
) where
Value: LossyFrom<f32>,
f32: LossyFrom<Value>,
{
self.height.multi_insert_median(
heights,
&mut source.height,
days.map(|days| TARGET_BLOCKS_PER_DAY * days),
);
self.date.multi_insert_median(dates, &mut source.date, days);
}
#[allow(unused)]
pub fn multi_insert_percentile(
&mut self,
heights: &[Height],
dates: &[Date],
mut map_and_percentiles: Vec<(&mut BiMap<Value>, f32)>,
days: Option<usize>,
) where
Value: LossyFrom<f32>,
f32: LossyFrom<Value>,
{
let mut date_map_and_percentiles = vec![];
let mut height_map_and_percentiles = vec![];
map_and_percentiles
.iter_mut()
.for_each(|(map, percentile)| {
date_map_and_percentiles.push((&mut map.date, *percentile));
height_map_and_percentiles.push((&mut map.height, *percentile));
});
self.height.multi_insert_percentile(
heights,
height_map_and_percentiles,
days.map(|days| TARGET_BLOCKS_PER_DAY * days),
);
self.date
.multi_insert_percentile(dates, date_map_and_percentiles, days);
}
pub fn multi_insert_max(
&mut self,
heights: &[Height],
dates: &[Date],
source: &mut BiMap<Value>,
) where
Value: PartialOrd,
{
self.height.multi_insert_max(heights, &mut source.height);
self.date.multi_insert_max(dates, &mut source.date);
}
pub fn kind(&self) -> MapKind {
self.date.kind()
}
}
pub trait AnyBiMap {
#[allow(unused)]
fn as_any_map(&self) -> Vec<&(dyn AnyMap + Send + Sync)>;
fn as_any_mut_map(&mut self) -> Vec<&mut dyn AnyMap>;
fn get_height(&self) -> &(dyn AnyHeightMap + Send + Sync);
#[allow(unused)]
fn get_mut_height(&mut self) -> &mut dyn AnyHeightMap;
fn get_date(&self) -> &(dyn AnyDateMap + Send + Sync);
#[allow(unused)]
fn get_mut_date(&mut self) -> &mut dyn AnyDateMap;
}
impl<T> AnyBiMap for BiMap<T>
where
T: MapValue,
{
fn as_any_map(&self) -> Vec<&(dyn AnyMap + Send + Sync)> {
vec![self.date.as_any_map(), self.height.as_any_map()]
}
fn as_any_mut_map(&mut self) -> Vec<&mut dyn AnyMap> {
vec![self.date.as_any_mut_map(), self.height.as_any_mut_map()]
}
fn get_height(&self) -> &(dyn AnyHeightMap + Send + Sync) {
&self.height
}
fn get_mut_height(&mut self) -> &mut dyn AnyHeightMap {
&mut self.height
}
fn get_date(&self) -> &(dyn AnyDateMap + Send + Sync) {
&self.date
}
fn get_mut_date(&mut self) -> &mut dyn AnyDateMap {
&mut self.date
}
}

View File

@@ -0,0 +1,42 @@
use allocative::Allocative;
use bincode::{Decode, Encode};
use serde::{Deserialize, Serialize};
use super::{Amount, Height, Price, Timestamp};
#[derive(Debug, Serialize, Deserialize, Encode, Decode, Allocative)]
pub struct BlockData {
pub height: Height,
pub price: Price,
pub timestamp: Timestamp,
pub amount: Amount,
pub utxos: u32,
}
impl BlockData {
pub fn new(height: Height, price: Price, timestamp: Timestamp) -> Self {
Self {
height,
price,
timestamp,
amount: Amount::ZERO,
utxos: 0,
}
}
pub fn send(&mut self, amount: Amount) {
self.utxos -= 1;
if self.amount < amount {
unreachable!();
}
self.amount -= amount;
}
pub fn receive(&mut self, amount: Amount) {
self.utxos += 1;
self.amount += amount;
}
}

View File

@@ -0,0 +1,17 @@
use allocative::Allocative;
use bincode::{Decode, Encode};
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Copy, Encode, Decode, Allocative)]
pub struct BlockPath {
pub date_index: u16,
pub block_index: u16,
}
impl BlockPath {
pub fn new(date_index: u16, block_index: u16) -> Self {
Self {
date_index,
block_index,
}
}
}

326
_src/structs/config.rs Normal file
View File

@@ -0,0 +1,326 @@
use std::{
fs::{self},
mem,
path::{Path, PathBuf},
};
use brk_parser::bitcoincore_rpc::Auth;
use clap::Parser;
use color_eyre::eyre::eyre;
use log::info;
use serde::{Deserialize, Serialize};
use crate::io::JSON_EXTENSION;
use super::MapPath;
#[derive(Parser, Debug, Clone, Default, Serialize, Deserialize, PartialEq)]
#[command(version, about, long_about = None)]
pub struct Config {
/// Bitcoin data directory path, saved
#[arg(long, value_name = "PATH")]
bitcoindir: Option<String>,
/// Kibo output directory path, saved
#[arg(long, value_name = "PATH")]
kibodir: Option<String>,
/// Bitcoin RPC ip, default: localhost, saved
#[arg(long, value_name = "IP")]
rpcconnect: Option<String>,
/// Bitcoin RPC port, default: 8332, saved
#[arg(long, value_name = "PORT")]
rpcport: Option<u16>,
/// Bitcoin RPC cookie file, default: --bitcoindir/.cookie, saved
#[arg(long, value_name = "PATH")]
rpccookiefile: Option<String>,
/// Bitcoin RPC username, saved
#[arg(long, value_name = "USERNAME")]
rpcuser: Option<String>,
/// Bitcoin RPC password, saved
#[arg(long, value_name = "PASSWORD")]
rpcpassword: Option<String>,
/// Delay between runs, default: 0, saved
#[arg(long, value_name = "SECONDS")]
delay: Option<u64>,
/// Disable the parser, not saved
#[serde(default)]
#[arg(long, default_value_t = false)]
no_parser: bool,
/// Disable the server, not saved
#[serde(default)]
#[arg(long, default_value_t = false)]
no_server: bool,
/// Run without saving, not saved
#[serde(default)]
#[arg(long, default_value_t = false)]
dry_run: bool,
/// Record ram usage, not saved
#[serde(default)]
#[arg(long, default_value_t = false)]
record_ram_usage: bool,
/// Recompute all computed datasets, not saved
#[serde(default)]
#[arg(long, default_value_t = false)]
recompute_computed: bool,
}
impl Config {
pub const DATASET_DIR_NAME: &str = "datasets";
pub const DATABASES_DIR_NAME: &str = "databases";
pub fn import() -> color_eyre::Result<Self> {
let path = Self::path_dot_kibo();
let _ = fs::create_dir_all(&path);
let path = path.join("config.toml");
let mut config_saved = Self::read(&path);
let mut config_args = Config::parse();
if let Some(bitcoindir) = config_args.bitcoindir.take() {
config_saved.bitcoindir = Some(bitcoindir);
}
if let Some(kibodir) = config_args.kibodir.take() {
config_saved.kibodir = Some(kibodir);
}
if let Some(rpcconnect) = config_args.rpcconnect.take() {
config_saved.rpcconnect = Some(rpcconnect);
}
if let Some(rpcport) = config_args.rpcport.take() {
config_saved.rpcport = Some(rpcport);
}
if let Some(rpccookiefile) = config_args.rpccookiefile.take() {
config_saved.rpccookiefile = Some(rpccookiefile);
}
if let Some(rpcuser) = config_args.rpcuser.take() {
config_saved.rpcuser = Some(rpcuser);
}
if let Some(rpcpassword) = config_args.rpcpassword.take() {
config_saved.rpcpassword = Some(rpcpassword);
}
if let Some(delay) = config_args.delay.take() {
config_saved.delay = Some(delay);
}
// if let Some(max_ram) = config_args.max_ram.take() {
// config_saved.max_ram = Some(max_ram);
// }
// Done importing
let mut config = config_saved;
config.check();
config.write(&path)?;
config.no_parser = mem::take(&mut config_args.no_parser);
config.no_server = mem::take(&mut config_args.no_server);
config.dry_run = mem::take(&mut config_args.dry_run);
config.record_ram_usage = mem::take(&mut config_args.record_ram_usage);
config.recompute_computed = mem::take(&mut config_args.recompute_computed);
info!("Configuration {{");
info!(" bitcoindir: {:?}", config.bitcoindir);
info!(" kibodir: {:?}", config.kibodir);
info!(" rpcconnect: {:?}", config.rpcconnect);
info!(" rpcport: {:?}", config.rpcport);
info!(" rpccookiefile: {:?}", config.rpccookiefile);
info!(" rpcuser: {:?}", config.rpcuser);
info!(" rpcpassword: {:?}", config.rpcpassword);
info!(" delay: {:?}", config.delay);
// info!(" max_ram: {:?}", config.max_ram);
info!(" parser: {:?}", config.parser());
info!(" server: {:?}", config.server());
info!(" dry_run: {:?}", config.dry_run());
info!(" record_ram_usage: {:?}", config.record_ram_usage());
info!(" recompute_computed: {:?}", config.recompute_computed());
info!("}}");
if config_args != Config::default() {
dbg!(config_args);
panic!("Didn't consume the full config")
}
Ok(config)
}
fn check(&self) {
if self.bitcoindir.is_none() {
println!(
"You need to set the --bitcoindir parameter at least once to run the parser.\nRun the program with '-h' for help."
);
std::process::exit(1);
} else if !self.path_bitcoindir().is_dir() {
println!(
"Given --bitcoindir parameter doesn't seem to be a valid directory path.\nRun the program with '-h' for help."
);
std::process::exit(1);
}
if self.kibodir.is_none() {
println!(
"You need to set the --kibodir parameter at least once to run the parser.\nRun the program with '-h' for help."
);
std::process::exit(1);
} else if !self.path_kibodir().is_dir() {
println!(
"Given --kibodir parameter doesn't seem to be a valid directory path.\nRun the program with '-h' for help."
);
std::process::exit(1);
}
let path = self.path_bitcoindir();
if !path.is_dir() {
println!("Expect path '{:#?}' to be a directory.", path);
std::process::exit(1);
}
if self.to_rpc_auth().is_err() {
println!(
"No way found to authenticate the RPC client, please either set --rpccookiefile or --rpcuser and --rpcpassword.\nRun the program with '-h' for help."
);
std::process::exit(1);
}
}
fn read(path: &Path) -> Self {
fs::read_to_string(path).map_or(Config::default(), |contents| {
toml::from_str(&contents).unwrap_or_default()
})
}
fn write(&self, path: &Path) -> std::io::Result<()> {
fs::write(path, toml::to_string(self).unwrap())
}
pub fn to_rpc_auth(&self) -> color_eyre::Result<Auth> {
let cookie = self.path_cookiefile();
if cookie.is_file() {
Ok(Auth::CookieFile(cookie))
} else if self.rpcuser.is_some() && self.rpcpassword.is_some() {
Ok(Auth::UserPass(
self.rpcuser.clone().unwrap(),
self.rpcpassword.clone().unwrap(),
))
} else {
Err(eyre!("Failed to find correct auth"))
}
}
pub fn rpcconnect(&self) -> Option<&String> {
self.rpcconnect.as_ref()
}
pub fn rpcport(&self) -> Option<u16> {
self.rpcport
}
pub fn delay(&self) -> Option<u64> {
self.delay
}
pub fn dry_run(&self) -> bool {
self.dry_run
}
pub fn record_ram_usage(&self) -> bool {
self.record_ram_usage
}
pub fn recompute_computed(&self) -> bool {
self.recompute_computed
}
pub fn path_bitcoindir(&self) -> PathBuf {
Self::fix_user_path(self.bitcoindir.as_ref().unwrap().as_ref())
}
pub fn path_kibodir(&self) -> PathBuf {
Self::fix_user_path(self.kibodir.as_ref().unwrap().as_ref())
}
fn path_cookiefile(&self) -> PathBuf {
self.rpccookiefile.as_ref().map_or_else(
|| self.path_bitcoindir().join(".cookie"),
|p| Self::fix_user_path(p.as_str()),
)
}
fn fix_user_path(path: &str) -> PathBuf {
let fix = move |pattern: &str| {
if path.starts_with(pattern) {
let path = &path.replace(&format!("{pattern}/"), "").replace(pattern, "");
let home = std::env::var("HOME").unwrap();
Some(Path::new(&home).join(path))
} else {
None
}
};
fix("~").unwrap_or_else(|| fix("$HOME").unwrap_or_else(|| PathBuf::from(&path)))
}
pub fn path_datasets(&self) -> MapPath {
MapPath::from(self.path_kibodir().join(Self::DATASET_DIR_NAME))
}
pub fn path_datasets_last_values(&self) -> MapPath {
self.path_datasets().join(&format!("last.{JSON_EXTENSION}"))
}
pub fn path_price(&self) -> MapPath {
MapPath::from(self.path_kibodir().join("price"))
}
pub fn path_databases(&self) -> PathBuf {
self.path_kibodir().join(Self::DATABASES_DIR_NAME)
}
pub fn path_states(&self) -> PathBuf {
self.path_kibodir().join("states")
}
pub fn path_inputs(&self) -> PathBuf {
self.path_kibodir().join("inputs")
}
fn path_dot_kibo() -> PathBuf {
let home = std::env::var("HOME").unwrap();
Path::new(&home).join(".kibo")
}
pub fn path_log() -> PathBuf {
Self::path_dot_kibo().join("log")
}
pub fn parser(&self) -> bool {
!self.no_parser
}
pub fn server(&self) -> bool {
!self.no_server
}
}

31
_src/structs/counter.rs Normal file
View File

@@ -0,0 +1,31 @@
use allocative::Allocative;
use bincode::{Decode, Encode};
use derive_deref::{Deref, DerefMut};
use serde::{Deserialize, Serialize};
#[derive(
Debug, Deref, DerefMut, Default, Clone, Copy, Encode, Decode, Serialize, Deserialize, Allocative,
)]
pub struct Counter(u32);
impl Counter {
#[inline(always)]
pub fn increment(&mut self) {
self.0 += 1;
}
#[inline(always)]
pub fn decrement(&mut self) {
self.0 -= 1;
}
#[inline(always)]
pub fn reset(&mut self) {
self.0 = 0;
}
#[inline(always)]
pub fn inner(&self) -> u32 {
self.0
}
}

166
_src/structs/date.rs Normal file
View File

@@ -0,0 +1,166 @@
use std::{cmp::Ordering, fmt, str::FromStr};
use allocative::{Allocative, Visitor};
use bincode::{
de::{BorrowDecoder, Decoder},
enc::Encoder,
error::{DecodeError, EncodeError},
BorrowDecode, Decode, Encode,
};
use chrono::{Datelike, Days, NaiveDate, NaiveDateTime};
use derive_deref::{Deref, DerefMut};
use serde::{Deserialize, Serialize};
use crate::utils::ONE_DAY_IN_S;
use super::{DateMapChunkId, MapKey, Timestamp};
const NUMBER_OF_UNSAFE_DATES: usize = 2;
const MIN_YEAR: i32 = 2009;
const APPROX_MAX_YEAR: i32 = 2100;
#[derive(
Debug,
PartialEq,
Eq,
PartialOrd,
Ord,
Clone,
Copy,
Deref,
DerefMut,
Default,
Serialize,
Deserialize,
)]
pub struct Date(NaiveDate);
impl Date {
pub fn wrap(date: NaiveDate) -> Self {
Self(date)
}
pub fn today() -> Self {
Self(chrono::offset::Utc::now().date_naive())
}
pub fn difference_in_days_between(&self, older: Self) -> u32 {
(**self - *older).num_days() as u32
}
pub fn to_timestamp(self) -> Timestamp {
Timestamp::from(NaiveDateTime::from(*self).and_utc().timestamp() as u32)
}
/// Returns value between 0.0 and 1.0 depending on its completion
///
/// Any date before today (utc) will return 1.0
///
/// Any date after today (utc) will panic even though it should return 0.0, as it shouldn't happen in the code
///
/// Any date equal to today will have a completion between 0.0 and 1.0
pub fn get_day_completion(self) -> f64 {
let now = Timestamp::now();
let today = Date::today();
match self.cmp(&today) {
Ordering::Less => 1.0,
Ordering::Equal => *(now - self.to_timestamp()) as f64 / ONE_DAY_IN_S as f64,
Ordering::Greater => unreachable!("0.0 but shouldn't be called"),
}
}
pub fn is_january(&self) -> bool {
self.month() == 1
}
pub fn is_july(&self) -> bool {
self.month() == 7
}
pub fn is_first_of_month(&self) -> bool {
self.day() == 1
}
}
impl MapKey<DateMapChunkId> for Date {
fn to_chunk_id(&self) -> DateMapChunkId {
DateMapChunkId::new(self)
}
fn to_first_unsafe(&self) -> Option<Self> {
let offset = NUMBER_OF_UNSAFE_DATES - 1;
self.checked_sub_days(Days::new(offset as u64))
.map(Date::wrap)
}
fn to_serialized_key(&self) -> Self {
*self
}
fn is_out_of_bounds(&self) -> bool {
!(MIN_YEAR..=APPROX_MAX_YEAR).contains(&self.year())
}
fn is_first(&self) -> bool {
let day = self.day();
if self.year() == 2009 && self.month() == 1 {
day == 3
} else {
day == 1
}
}
fn checked_sub(&self, days: usize) -> Option<Self> {
self.checked_sub_days(Days::new(days as u64))
.map(Self::wrap)
}
fn min_percentile_key() -> Self {
Self::wrap(NaiveDate::from_ymd_opt(2012, 1, 1).unwrap())
}
fn iter_up_to(&self, other: &Self) -> impl Iterator<Item = Self> {
self.iter_days().take_while(|d| d <= other).map(Date::wrap)
}
fn map_name<'a>() -> &'a str {
"date"
}
}
impl fmt::Display for Date {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.0, f)
}
}
impl Encode for Date {
fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), EncodeError> {
Encode::encode(&self.to_string(), encoder)
}
}
impl Decode for Date {
fn decode<D: Decoder>(decoder: &mut D) -> core::result::Result<Self, DecodeError> {
let str: String = Decode::decode(decoder)?;
Ok(Self(NaiveDate::from_str(&str).unwrap()))
}
}
impl<'de> BorrowDecode<'de> for Date {
fn borrow_decode<D: BorrowDecoder<'de>>(decoder: &mut D) -> Result<Self, DecodeError> {
let str: String = BorrowDecode::borrow_decode(decoder)?;
Ok(Self(NaiveDate::from_str(&str).unwrap()))
}
}
impl Allocative for Date {
fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
visitor.visit_simple_sized::<Self>();
}
}

21
_src/structs/date_data.rs Normal file
View File

@@ -0,0 +1,21 @@
use allocative::Allocative;
use bincode::{Decode, Encode};
use serde::{Deserialize, Serialize};
use super::{BlockData, BlockPath, Date};
#[derive(Debug, Serialize, Deserialize, Encode, Decode, Allocative)]
pub struct DateData {
pub date: Date,
pub blocks: Vec<BlockData>,
}
impl DateData {
pub fn new(date: Date, blocks: Vec<BlockData>) -> Self {
Self { date, blocks }
}
pub fn get_block_data(&self, block_path: &BlockPath) -> Option<&BlockData> {
self.blocks.get(block_path.block_index as usize)
}
}

79
_src/structs/date_map.rs Normal file
View File

@@ -0,0 +1,79 @@
use std::iter::Sum;
use super::{
AnyMap, Date, DateMapChunkId, GenericMap, Height, HeightMap, MapValue, SerializedDateMap,
};
pub type DateMap<Value> = GenericMap<Date, Value, DateMapChunkId, SerializedDateMap<Value>>;
impl<Value> DateMap<Value>
where
Value: MapValue,
{
pub fn multi_insert_last(
&mut self,
dates: &[Date],
source: &mut HeightMap<Value>,
last_height: &mut DateMap<Height>,
) {
dates.iter().for_each(|date| {
self.insert_computed(
*date,
source
.get_or_import(&last_height.get_or_import(date).unwrap())
.unwrap(),
);
});
}
pub fn multi_insert_sum_range(
&mut self,
dates: &[Date],
height_map: &HeightMap<Value>,
first_height: &mut DateMap<Height>,
last_height: &mut DateMap<Height>,
) where
Value: Sum,
{
dates.iter().for_each(|date| {
let first_height = first_height.get_or_import(date).unwrap();
let last_height = last_height.get_or_import(date).unwrap();
let range = (*first_height)..=(*last_height);
self.insert_computed(*date, height_map.sum_range(&range));
})
}
}
pub trait AnyDateMap: AnyMap {
fn get_initial_first_unsafe_date(&self) -> Option<Date>;
fn get_initial_last_date(&self) -> Option<Date>;
fn as_any_map(&self) -> &(dyn AnyMap + Send + Sync);
fn as_any_mut_map(&mut self) -> &mut dyn AnyMap;
}
impl<T> AnyDateMap for DateMap<T>
where
T: MapValue,
{
#[inline(always)]
fn get_initial_first_unsafe_date(&self) -> Option<Date> {
self.initial_first_unsafe_key
}
#[inline(always)]
fn get_initial_last_date(&self) -> Option<Date> {
self.initial_last_key
}
fn as_any_map(&self) -> &(dyn AnyMap + Send + Sync) {
self
}
fn as_any_mut_map(&mut self) -> &mut dyn AnyMap {
self
}
}

View File

@@ -0,0 +1,50 @@
use std::path::Path;
use allocative::Allocative;
use chrono::Datelike;
use super::{Date, MapChunkId};
#[derive(Debug, Default, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Allocative)]
pub struct DateMapChunkId(i32);
impl DateMapChunkId {
pub fn new(date: &Date) -> Self {
Self(date.year())
}
}
impl MapChunkId for DateMapChunkId {
fn to_string(&self) -> String {
self.0.to_string()
}
fn from_path(path: &Path) -> color_eyre::Result<Self> {
Ok(Self(
path.file_name()
.unwrap()
.to_str()
.unwrap()
.split(".")
.next()
.unwrap()
.parse::<i32>()?,
))
}
fn to_usize(self) -> usize {
self.0 as usize
}
fn from_usize(id: usize) -> Self {
Self(id as i32)
}
fn next(&self) -> Option<Self> {
self.0.checked_add(1).map(Self)
}
fn previous(&self) -> Option<Self> {
self.0.checked_sub(1).map(Self)
}
}

View File

@@ -0,0 +1,37 @@
use allocative::Allocative;
use snkrj::{direct_repr, Storable, UnsizedStorable};
use super::{AddressData, AddressType, Amount};
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Default, Allocative)]
pub struct EmptyAddressData(u64);
direct_repr!(EmptyAddressData);
const SHIFT: u64 = 5;
const AND: u64 = (1 << SHIFT) - 1;
const MAX: u64 = (u64::MAX - 1) >> 5;
impl EmptyAddressData {
pub fn from_non_empty(non_empty: &AddressData) -> Self {
if non_empty.sent != non_empty.received {
dbg!(&non_empty);
panic!("Trying to convert not empty wallet to empty !");
}
let transfered = non_empty.sent.to_sat();
if transfered >= MAX {
panic!("Too large !");
}
Self((transfered << SHIFT) + (non_empty.address_type as u64))
}
pub fn address_type(&self) -> AddressType {
(self.0 & AND).try_into().unwrap()
}
pub fn transfered(&self) -> Amount {
Amount::from_sat(self.0 >> SHIFT)
}
}

20
_src/structs/epoch.rs Normal file
View File

@@ -0,0 +1,20 @@
use super::{Height, MapKey};
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct Epoch(pub u16);
impl Epoch {
pub const BLOCKS_PER_EPOCH: usize = 210_000;
}
impl From<Height> for Epoch {
fn from(height: Height) -> Self {
Self::from(&height)
}
}
impl From<&Height> for Epoch {
fn from(height: &Height) -> Self {
Self(((height.to_usize() / Self::BLOCKS_PER_EPOCH) + 1) as u16)
}
}

62
_src/structs/exit.rs Normal file
View File

@@ -0,0 +1,62 @@
use std::{
process::exit,
sync::{
atomic::{AtomicBool, Ordering},
Arc,
},
thread::sleep,
time::Duration,
};
use log::info;
#[derive(Default, Clone)]
pub struct Exit {
blocked: Arc<AtomicBool>,
active: Arc<AtomicBool>,
}
impl Exit {
pub fn new() -> Self {
let s = Self {
active: Arc::new(AtomicBool::new(false)),
blocked: Arc::new(AtomicBool::new(false)),
};
let active = s.active.clone();
let _blocked = s.blocked.clone();
let blocked = move || _blocked.load(Ordering::SeqCst);
ctrlc::set_handler(move || {
info!("Exitting...");
active.store(true, Ordering::SeqCst);
if blocked() {
info!("Waiting to exit safely");
while blocked() {
sleep(Duration::from_millis(50));
}
}
exit(0);
})
.expect("Error setting Ctrl-C handler");
s
}
pub fn block(&self) {
self.blocked.store(true, Ordering::SeqCst);
}
pub fn unblock(&self) {
self.blocked.store(false, Ordering::SeqCst);
}
pub fn active(&self) -> bool {
self.active.load(Ordering::SeqCst)
}
}

964
_src/structs/generic_map.rs Normal file
View File

@@ -0,0 +1,964 @@
use std::{
collections::{BTreeMap, VecDeque},
fmt::{Debug, Display},
fs,
iter::Sum,
mem,
ops::{Add, ControlFlow, Div, Mul, Sub},
path::{Path, PathBuf},
};
use allocative::Allocative;
use bincode::{Decode, Encode};
use itertools::Itertools;
use ordered_float::OrderedFloat;
use serde::{de::DeserializeOwned, Serialize};
use crate::{
io::Serialization,
utils::{get_percentile, LossyFrom},
};
use super::{AnyMap, Config, MapPath, MapValue};
#[derive(Debug, Clone, Copy, Allocative, PartialEq, Eq)]
pub enum MapKind {
Inserted,
Computed,
}
pub trait MapKey<ChunkId>
where
Self: Sized + PartialOrd + Ord + Clone + Copy + Debug + Display,
ChunkId: MapChunkId,
{
fn to_chunk_id(&self) -> ChunkId;
fn to_first_unsafe(&self) -> Option<Self>;
fn to_serialized_key(&self) -> Self;
fn is_out_of_bounds(&self) -> bool;
fn is_first(&self) -> bool;
fn checked_sub(&self, x: usize) -> Option<Self>;
fn min_percentile_key() -> Self;
fn iter_up_to(&self, other: &Self) -> impl Iterator<Item = Self>;
fn map_name<'a>() -> &'a str;
fn from_usize(_: usize) -> Self {
unreachable!()
}
fn to_usize(&self) -> usize {
unreachable!()
}
}
pub trait MapSerialized<Key, Value, ChunkId>
where
Self: Debug + Serialize + DeserializeOwned + Encode + Decode,
ChunkId: MapChunkId,
{
fn new(version: u32) -> Self;
fn get_last_key(&self, last_chunk_id: &ChunkId) -> Option<Key>;
fn version(&self) -> u32;
fn get(&self, serialized_key: &Key) -> Option<&Value>;
fn last(&self) -> Option<&Value>;
fn extend(&mut self, map: BTreeMap<Key, Value>);
fn import_all(path: &Path, serialization: &Serialization) -> Self;
fn to_csv(self, id: &str) -> String;
fn map(&self) -> &impl Serialize;
}
pub trait MapChunkId
where
Self: Ord + Debug + Copy + Clone,
{
fn to_string(&self) -> String;
fn from_path(path: &Path) -> color_eyre::Result<Self>;
fn to_usize(self) -> usize;
fn from_usize(id: usize) -> Self;
fn previous(&self) -> Option<Self>;
fn next(&self) -> Option<Self>;
}
#[derive(Debug, Allocative)]
pub struct GenericMap<Key, Value, ChunkId, Serialized> {
version: u32,
kind: MapKind,
path_all: MapPath,
path_parent: MapPath,
path_last: Option<MapPath>,
chunks_in_memory: usize,
serialization: Serialization,
pub initial_last_key: Option<Key>,
pub initial_first_unsafe_key: Option<Key>,
imported: BTreeMap<ChunkId, Serialized>,
to_insert: BTreeMap<ChunkId, BTreeMap<Key, Value>>,
}
impl<Key, Value, ChunkId, Serialized> GenericMap<Key, Value, ChunkId, Serialized>
where
Value: MapValue,
ChunkId: MapChunkId,
Key: MapKey<ChunkId>,
Serialized: MapSerialized<Key, Value, ChunkId>,
{
pub fn new_bin(version: u32, kind: MapKind, path: &MapPath) -> Self {
Self::new(version, kind, path, Serialization::Binary, 1, true)
}
pub fn _new_bin(version: u32, kind: MapKind, path: &MapPath, export_last: bool) -> Self {
Self::new(version, kind, path, Serialization::Binary, 1, export_last)
}
pub fn new_json(version: u32, kind: MapKind, path: &MapPath, export_last: bool) -> Self {
Self::new(
version,
kind,
path,
Serialization::Json,
usize::MAX,
export_last,
)
}
fn new(
version: u32,
kind: MapKind,
path: &MapPath,
serialization: Serialization,
chunks_in_memory: usize,
export_last: bool,
) -> Self {
if chunks_in_memory < 1 {
panic!("Should always have at least the latest chunk in memory");
}
let path_all = path.join(Key::map_name());
fs::create_dir_all(&*path_all).unwrap_or_else(|_| {
dbg!(&path_all);
panic!()
});
let path_last = {
if export_last {
Some(path.join("last"))
} else {
None
}
};
let mut s = Self {
version,
kind,
path_all,
path_parent: path.to_owned(),
path_last,
chunks_in_memory,
serialization,
initial_last_key: None,
initial_first_unsafe_key: None,
to_insert: BTreeMap::default(),
imported: BTreeMap::default(),
};
s.read_dir()
.into_iter()
.rev()
.take(chunks_in_memory)
.for_each(|(chunk_start, path)| {
if let Ok(serialized) = s.import(&path) {
if serialized.version() == s.version {
s.imported.insert(chunk_start, serialized);
} else {
s.delete_files();
}
}
});
s.set_initial_keys();
// if s.initial_first_unsafe_key.is_none() {
// log(&format!("Missing dataset: {path:?}/{}", Key::map_name()));
// }
s
}
fn set_initial_keys(&mut self) {
self.initial_last_key = self
.imported
.iter()
.last()
.and_then(|(last_chunk_id, serialized)| serialized.get_last_key(last_chunk_id));
self.initial_first_unsafe_key = self
.initial_last_key
.and_then(|last_key| last_key.to_first_unsafe());
}
fn read_dir(&self) -> BTreeMap<ChunkId, PathBuf> {
Self::_read_dir(&self.path_all, &self.serialization)
}
pub fn _read_dir(path: &Path, serialization: &Serialization) -> BTreeMap<ChunkId, PathBuf> {
fs::read_dir(path)
.unwrap()
.map(|entry| entry.unwrap().path())
.filter(|path| serialization.is_serializable(path))
.flat_map(|path| {
if let Ok(chunk_id) = ChunkId::from_path(&path) {
Some((chunk_id, path))
} else {
None
}
})
.collect()
}
fn import(&self, path: &Path) -> color_eyre::Result<Serialized> {
self.serialization.import::<Serialized>(path)
}
pub fn insert(&mut self, key: Key, value: Value) -> Value {
self.checked_insert(key, value, MapKind::Inserted)
}
pub fn insert_computed(&mut self, key: Key, value: Value) -> Value {
self.checked_insert(key, value, MapKind::Computed)
}
fn checked_insert(&mut self, key: Key, value: Value, kind: MapKind) -> Value {
if self.kind != kind {
dbg!(&self.path());
panic!("Called at the wrong place");
}
if !self.is_key_safe(key) {
self.to_insert
.entry(key.to_chunk_id())
.or_default()
.insert(key.to_serialized_key(), value);
}
value
}
pub fn insert_default(&mut self, key: Key) -> Value {
self.insert(key, Value::default())
}
#[inline(always)]
pub fn is_key_safe(&self, key: Key) -> bool {
self.initial_first_unsafe_key
.map_or(false, |initial_first_unsafe_key| {
initial_first_unsafe_key > key
})
}
pub fn get(&self, key: &Key) -> Option<Value> {
let chunk_id = key.to_chunk_id();
let serialized_key = key.to_serialized_key();
self.to_insert
.get(&chunk_id)
.and_then(|tree| tree.get(&serialized_key).cloned())
.or_else(|| {
self.imported
.get(&chunk_id)
.and_then(|serialized| serialized.get(&serialized_key))
.cloned()
})
}
pub fn get_or_import(&mut self, key: &Key) -> Option<Value> {
if key.is_out_of_bounds() {
return None;
}
let chunk_id = key.to_chunk_id();
let serialized_key = key.to_serialized_key();
self.to_insert
.get(&chunk_id)
.and_then(|tree| tree.get(&serialized_key).cloned())
.or_else(|| {
#[allow(clippy::map_entry)] // Can't be mut and then use read_dir()
if !self.imported.contains_key(&chunk_id) {
let dir_content = self.read_dir();
if let Some(path) = dir_content.get(&chunk_id) {
let serialized = self.import(path).unwrap();
self.imported.insert(chunk_id, serialized);
}
}
self.imported
.get(&chunk_id)
.and_then(|serialized| serialized.get(&serialized_key))
.cloned()
})
}
}
impl<Key, Value, ChunkId, Serialized> AnyMap for GenericMap<Key, Value, ChunkId, Serialized>
where
Value: MapValue,
ChunkId: MapChunkId,
Key: MapKey<ChunkId>,
Serialized: MapSerialized<Key, Value, ChunkId>,
{
fn id(&self, config: &Config) -> String {
let path_to_string = |p: &Path| p.to_str().unwrap().to_owned();
path_to_string(self.path_parent())
.replace(&format!("{}/", path_to_string(&config.path_kibodir())), "")
.replace(&format!("{}/", Config::DATASET_DIR_NAME), "")
.replace("/", "-")
}
fn serialization(&self) -> Serialization {
self.serialization
}
fn path(&self) -> &Path {
&self.path_all
}
fn path_parent(&self) -> &Path {
&self.path_parent
}
fn path_last(&self) -> &Option<MapPath> {
&self.path_last
}
fn last_value(&self) -> Option<serde_json::Value> {
self.imported
.last_key_value()
.and_then(|(_, serialized)| serialized.last())
.and_then(|v| serde_json::to_value(v).ok())
}
fn type_name(&self) -> &str {
std::any::type_name::<Value>()
}
fn key_name(&self) -> &str {
Key::map_name()
}
fn pre_export(&mut self) {
self.to_insert.iter_mut().for_each(|(chunk_id, map)| {
if let Some((key, _)) = map.first_key_value() {
if !key.is_first() && !self.imported.contains_key(chunk_id) {
// Had to copy paste many lines from functions as calling a function from self isn't allowed because of the &mut
let dir_content = Self::_read_dir(&self.path_all, &self.serialization);
let path = dir_content.get(chunk_id).unwrap_or_else(|| {
dbg!(&self.path_all, chunk_id, &dir_content);
panic!();
});
let serialized = self.serialization.import::<Serialized>(path).unwrap();
self.imported.insert(*chunk_id, serialized);
}
}
self.imported
.entry(*chunk_id)
.or_insert(Serialized::new(self.version))
.extend(mem::take(map));
});
self.set_initial_keys();
}
fn export(&self) -> color_eyre::Result<()> {
let len = self.imported.len();
self.to_insert.iter().enumerate().try_for_each(
|(index, (chunk_id, map))| -> color_eyre::Result<()> {
if !map.is_empty() {
unreachable!()
}
let serialized = self.imported.get(chunk_id).unwrap_or_else(|| {
dbg!(&self.path_all, chunk_id, &self.imported);
panic!();
});
let path = self.path_all.join(&chunk_id.to_string());
self.serialization.export(&path, serialized)?;
// Export last
if index == len - 1 {
if let Some(path_last) = self.path_last.as_ref() {
self.serialization
.export(path_last, serialized.last().unwrap())?;
}
}
Ok(())
},
)
}
fn post_export(&mut self) {
self.imported
.keys()
.rev()
.enumerate()
.filter(|(index, _)| *index + 1 > self.chunks_in_memory)
.map(|(_, key)| *key)
.collect_vec()
.iter()
.for_each(|key| {
self.imported.remove(key);
});
self.to_insert.clear();
}
fn delete_files(&self) {
self.read_dir()
.iter()
.for_each(|(_, path)| fs::remove_file(path).unwrap())
}
fn kind(&self) -> MapKind {
self.kind
}
}
impl<Key, Value, ChunkId, Serialized> GenericMap<Key, Value, ChunkId, Serialized>
where
Value: MapValue,
ChunkId: MapChunkId,
Key: MapKey<ChunkId>,
Serialized: MapSerialized<Key, Value, ChunkId>,
{
// pub fn sum_keys(&mut self, keys: &[Key]) -> Value
// where
// Value: Sum,
// {
// keys.iter()
// .map(|key| self.get_or_import(key).unwrap())
// .sum::<Value>()
// }
// pub fn average_keys(&mut self, keys: &[Key]) -> f32
// where
// Value: Sum,
// f32: LossyFrom<Value>,
// {
// f32::lossy_from(self.sum_keys(keys)) / keys.len() as f32
// }
pub fn multi_insert<F>(&mut self, keys: &[Key], mut callback: F)
where
F: FnMut(&Key) -> Value,
{
keys.iter().for_each(|key| {
self.insert_computed(*key, callback(key));
});
}
pub fn multi_insert_const(&mut self, keys: &[Key], constant: Value) {
keys.iter().for_each(|key| {
self.insert_computed(*key, constant);
});
}
pub fn multi_insert_simple_transform<SourceValue, SourceSerialized, F>(
&mut self,
keys: &[Key],
source: &mut GenericMap<Key, SourceValue, ChunkId, SourceSerialized>,
mut transform: F,
) where
SourceValue: MapValue,
SourceSerialized: MapSerialized<Key, SourceValue, ChunkId>,
F: FnMut(SourceValue, &Key) -> Value,
{
keys.iter().for_each(|key| {
self.insert_computed(*key, transform(source.get_or_import(key).unwrap(), key));
});
}
pub fn multi_insert_complex_transform<SourceValue, SourceSerialized, F>(
&mut self,
keys: &[Key],
source: &mut GenericMap<Key, SourceValue, ChunkId, SourceSerialized>,
mut transform: F,
) where
SourceValue: MapValue,
SourceSerialized: MapSerialized<Key, SourceValue, ChunkId>,
F: FnMut(
(
SourceValue,
&Key,
&mut GenericMap<Key, SourceValue, ChunkId, SourceSerialized>,
&mut Self,
),
) -> Value,
{
keys.iter().for_each(|key| {
let value = transform((source.get_or_import(key).unwrap(), key, source, self));
self.insert_computed(*key, value);
});
}
pub fn multi_insert_add<A, B, ASerialized, BSerialized>(
&mut self,
keys: &[Key],
added: &mut GenericMap<Key, A, ChunkId, ASerialized>,
adder: &mut GenericMap<Key, B, ChunkId, BSerialized>,
) where
A: MapValue,
ASerialized: MapSerialized<Key, A, ChunkId>,
B: MapValue,
BSerialized: MapSerialized<Key, B, ChunkId>,
Value: LossyFrom<A> + LossyFrom<B> + Add<Output = Value>,
{
keys.iter().for_each(|key| {
self.insert_computed(
*key,
Value::lossy_from(added.get_or_import(key).unwrap())
+ Value::lossy_from(adder.get_or_import(key).unwrap()),
);
});
}
pub fn multi_insert_subtract<A, B, ASerialized, BSerialized>(
&mut self,
keys: &[Key],
subtracted: &mut GenericMap<Key, A, ChunkId, ASerialized>,
subtracter: &mut GenericMap<Key, B, ChunkId, BSerialized>,
) where
A: MapValue,
ASerialized: MapSerialized<Key, A, ChunkId>,
B: MapValue,
BSerialized: MapSerialized<Key, B, ChunkId>,
Value: LossyFrom<A> + LossyFrom<B> + Sub<Output = Value>,
{
keys.iter().for_each(|key| {
self.insert_computed(
*key,
Value::lossy_from(subtracted.get_or_import(key).unwrap())
- Value::lossy_from(subtracter.get_or_import(key).unwrap()),
);
});
}
pub fn multi_insert_multiply<A, B, ASerialized, BSerialized>(
&mut self,
keys: &[Key],
multiplied: &mut GenericMap<Key, A, ChunkId, ASerialized>,
multiplier: &mut GenericMap<Key, B, ChunkId, BSerialized>,
) where
A: MapValue,
ASerialized: MapSerialized<Key, A, ChunkId>,
B: MapValue,
BSerialized: MapSerialized<Key, B, ChunkId>,
Value: LossyFrom<A> + LossyFrom<B> + Mul<Output = Value>,
{
keys.iter().for_each(|key| {
self.insert_computed(
*key,
Value::lossy_from(multiplied.get_or_import(key).unwrap())
* Value::lossy_from(multiplier.get_or_import(key).unwrap()),
);
});
}
pub fn multi_insert_divide<A, B, ASerialized, BSerialized>(
&mut self,
keys: &[Key],
divided: &mut GenericMap<Key, A, ChunkId, ASerialized>,
divider: &mut GenericMap<Key, B, ChunkId, BSerialized>,
) where
A: MapValue,
ASerialized: MapSerialized<Key, A, ChunkId>,
B: MapValue,
BSerialized: MapSerialized<Key, B, ChunkId>,
Value: LossyFrom<A> + LossyFrom<B> + Div<Output = Value> + Mul<Output = Value> + From<u8>,
{
self._multi_insert_divide(keys, divided, divider, false)
}
pub fn multi_insert_percentage<A, B, ASerialized, BSerialized>(
&mut self,
keys: &[Key],
divided: &mut GenericMap<Key, A, ChunkId, ASerialized>,
divider: &mut GenericMap<Key, B, ChunkId, BSerialized>,
) where
A: MapValue,
ASerialized: MapSerialized<Key, A, ChunkId>,
B: MapValue,
BSerialized: MapSerialized<Key, B, ChunkId>,
Value: LossyFrom<A> + LossyFrom<B> + Div<Output = Value> + Mul<Output = Value> + From<u8>,
{
self._multi_insert_divide(keys, divided, divider, true)
}
fn _multi_insert_divide<A, B, ASerialized, BSerialized>(
&mut self,
keys: &[Key],
divided: &mut GenericMap<Key, A, ChunkId, ASerialized>,
divider: &mut GenericMap<Key, B, ChunkId, BSerialized>,
as_percentage: bool,
) where
A: MapValue,
ASerialized: MapSerialized<Key, A, ChunkId>,
B: MapValue,
BSerialized: MapSerialized<Key, B, ChunkId>,
Value: LossyFrom<A> + LossyFrom<B> + Div<Output = Value> + Mul<Output = Value> + From<u8>,
{
let multiplier = Value::from(if as_percentage { 100 } else { 1 });
keys.iter().for_each(|key| {
self.insert_computed(
*key,
Value::lossy_from(divided.get_or_import(key).unwrap())
/ Value::lossy_from(divider.get_or_import(key).unwrap())
* multiplier,
);
});
}
pub fn multi_insert_cumulative<SourceValue, SourceSerialized>(
&mut self,
keys: &[Key],
source: &mut GenericMap<Key, SourceValue, ChunkId, SourceSerialized>,
) where
SourceValue: MapValue,
SourceSerialized: MapSerialized<Key, SourceValue, ChunkId>,
Value: LossyFrom<SourceValue> + Add<Output = Value> + Sub<Output = Value>,
{
self._multi_insert_last_x_sum(keys, source, None)
}
pub fn multi_insert_last_x_sum<SourceValue, SourceSerialized>(
&mut self,
keys: &[Key],
source: &mut GenericMap<Key, SourceValue, ChunkId, SourceSerialized>,
len: usize,
) where
SourceValue: MapValue,
SourceSerialized: MapSerialized<Key, SourceValue, ChunkId>,
Value: LossyFrom<SourceValue> + Add<Output = Value> + Sub<Output = Value>,
{
self._multi_insert_last_x_sum(keys, source, Some(len))
}
fn _multi_insert_last_x_sum<SourceValue, SourceSerialized>(
&mut self,
keys: &[Key],
source: &mut GenericMap<Key, SourceValue, ChunkId, SourceSerialized>,
len: Option<usize>,
) where
SourceValue: MapValue,
SourceSerialized: MapSerialized<Key, SourceValue, ChunkId>,
Value: LossyFrom<SourceValue> + Add<Output = Value> + Sub<Output = Value>,
{
let mut sum = None;
keys.iter().for_each(|key| {
let to_subtract = len
.and_then(|x| {
key.checked_sub(x)
.and_then(|previous_key| source.get_or_import(&previous_key))
})
.unwrap_or_default();
let previous_sum = sum.unwrap_or_else(|| {
key.checked_sub(1)
.and_then(|previous_sum_key| self.get_or_import(&previous_sum_key))
.unwrap_or_default()
});
let last_value = source.get_or_import(key).unwrap_or_else(|| {
dbg!(&source.to_insert, &source.path(), key);
panic!();
});
sum.replace(
previous_sum + Value::lossy_from(last_value) - Value::lossy_from(to_subtract),
);
self.insert_computed(*key, sum.unwrap());
});
}
pub fn multi_insert_simple_average<SourceValue, SourceSerialized>(
&mut self,
keys: &[Key],
source: &mut GenericMap<Key, SourceValue, ChunkId, SourceSerialized>,
len: usize,
) where
SourceValue: MapValue + Sum,
SourceSerialized: MapSerialized<Key, SourceValue, ChunkId>,
Value: Into<f32> + From<f32>,
f32: LossyFrom<SourceValue>,
{
if len <= 1 {
panic!("Average of 1 or less is not useful");
}
let len = len as f32;
let mut average = None;
keys.iter().for_each(|key| {
let mut previous_average: f32 = average
.unwrap_or_else(|| {
key.checked_sub(1)
.and_then(|previous_average_key| self.get_or_import(&previous_average_key))
.unwrap_or_default()
})
.into();
if previous_average.is_nan() || previous_average.is_infinite() {
previous_average = 0.0;
}
let mut last_value = f32::lossy_from(source.get_or_import(key).unwrap_or_else(|| {
dbg!(key);
panic!()
}));
if last_value.is_nan() || last_value.is_infinite() {
last_value = 0.0;
}
average.replace(((previous_average * (len - 1.0) + last_value) / len).into());
self.insert_computed(*key, average.unwrap());
});
}
pub fn multi_insert_net_change(&mut self, keys: &[Key], source: &mut Self, len: usize)
where
Value: Sub<Output = Value>,
{
keys.iter().for_each(|key| {
let previous_value = key
.checked_sub(len)
.and_then(|previous_key| source.get_or_import(&previous_key))
.unwrap_or_default();
let last_value = source.get_or_import(key).unwrap();
let net_change = last_value - previous_value;
self.insert_computed(*key, net_change);
});
}
pub fn multi_insert_percentage_change(&mut self, keys: &[Key], source: &mut Self, len: usize)
where
Value: Sub<Output = Value> + LossyFrom<f32>,
f32: LossyFrom<Value>,
{
let one = 1.0;
let hundred = 100.0;
keys.iter().for_each(|key| {
let previous_value = f32::lossy_from(
key.checked_sub(len)
.and_then(|previous_key| source.get_or_import(&previous_key))
.unwrap_or_default(),
);
let last_value = f32::lossy_from(source.get_or_import(key).unwrap());
let percentage_change = ((last_value / previous_value) - one) * hundred;
self.insert_computed(*key, Value::lossy_from(percentage_change));
});
}
pub fn multi_insert_median(&mut self, keys: &[Key], source: &mut Self, len: Option<usize>)
where
Value: LossyFrom<f32>,
f32: LossyFrom<Value>,
{
source.multi_insert_percentile(keys, vec![(self, 0.5)], len);
}
pub fn multi_insert_percentile(
&mut self,
keys: &[Key],
mut map_and_percentiles: Vec<(&mut Self, f32)>,
len: Option<usize>,
) where
Value: LossyFrom<f32>,
f32: LossyFrom<Value>,
{
if len.map_or(false, |size| size < 3) {
panic!("Computing a percentile for a size lower than 3 is useless");
}
let mut ordered_vec = None;
let mut sorted_vec = None;
let min_percentile_key = Key::min_percentile_key();
let nan = Value::lossy_from(f32::NAN);
keys.iter().cloned().try_for_each(|key| {
if key < min_percentile_key {
map_and_percentiles.iter_mut().for_each(|(map, _)| {
(*map).insert_computed(key, nan);
});
return ControlFlow::Continue::<()>(());
}
if let Some(start) = len.map_or(Some(min_percentile_key), |size| key.checked_sub(size))
{
if sorted_vec.is_none() {
let mut vec = start
.iter_up_to(&key)
.flat_map(|key| self.get_or_import(&key))
.map(|v| f32::lossy_from(v))
.filter(|f| !f.is_nan())
.map(OrderedFloat)
.collect_vec();
if len.is_some() {
ordered_vec.replace(VecDeque::from(vec.clone()));
}
vec.sort_unstable();
sorted_vec.replace(vec);
} else {
let float_value = f32::lossy_from(self.get_or_import(&key).unwrap());
if !float_value.is_nan() {
let float_value = OrderedFloat(float_value);
if let Some(len) = len {
if let Some(ordered_vec) = ordered_vec.as_mut() {
if ordered_vec.len() == len {
let first = ordered_vec.pop_front().unwrap();
let pos =
sorted_vec.as_ref().unwrap().binary_search(&first).unwrap();
sorted_vec.as_mut().unwrap().remove(pos);
}
ordered_vec.push_back(float_value);
}
}
let pos = sorted_vec
.as_ref()
.unwrap()
.binary_search(&float_value)
.unwrap_or_else(|pos| pos);
sorted_vec.as_mut().unwrap().insert(pos, float_value);
}
}
let vec = sorted_vec.as_ref().unwrap();
map_and_percentiles
.iter_mut()
.for_each(|(map, percentile)| {
if !(0.0..=1.0).contains(percentile) {
panic!("The percentile should be between 0.0 and 1.0");
}
let float_value = get_percentile::<OrderedFloat<f32>>(vec, *percentile).0;
(*map).insert_computed(key, Value::lossy_from(float_value));
});
} else {
map_and_percentiles.iter_mut().for_each(|(map, _)| {
(*map).insert_computed(key, nan);
});
}
ControlFlow::Continue(())
});
}
pub fn multi_insert_max(&mut self, keys: &[Key], source: &mut Self)
where
Value: Default + PartialOrd,
{
let mut previous_max = None;
keys.iter().for_each(|key| {
if previous_max.is_none() {
key.checked_sub(1)
.and_then(|previous_max_key| self.get_or_import(&previous_max_key))
.and_then(|v| previous_max.replace(v));
}
let last_value = source.get_or_import(key).unwrap_or_else(|| {
dbg!(key);
panic!()
});
if previous_max.is_none()
|| previous_max.is_some_and(|previous_max| previous_max < last_value)
{
previous_max.replace(last_value);
}
self.insert_computed(*key, previous_max.unwrap());
});
}
pub fn multi_insert_min(&mut self, keys: &[Key], source: &mut Self, min_excluded: Value)
where
Value: Default + PartialOrd,
{
let mut previous_min = None;
keys.iter().for_each(|key| {
if previous_min.is_none() {
if let Some(value) = key
.checked_sub(1)
.and_then(|previous_min_key| self.get_or_import(&previous_min_key))
{
if value > min_excluded {
previous_min.replace(value);
}
}
}
let last_value = source.get_or_import(key).unwrap_or_else(|| {
dbg!(key);
panic!()
});
if last_value > min_excluded
&& (previous_min.is_none()
|| previous_min.is_some_and(|previous_min| previous_min > last_value))
{
previous_min.replace(last_value);
}
self.insert_computed(*key, previous_min.unwrap_or_default());
});
}
}

159
_src/structs/height.rs Normal file
View File

@@ -0,0 +1,159 @@
use std::{
fmt,
ops::{Add, AddAssign, Sub},
};
use allocative::Allocative;
use bincode::{Decode, Encode};
use brk_parser::NUMBER_OF_UNSAFE_BLOCKS;
use derive_deref::{Deref, DerefMut};
use serde::{Deserialize, Serialize};
use super::{HEIGHT_MAP_CHUNK_SIZE, HeightMapChunkId, MapKey};
#[derive(
Debug,
PartialEq,
Eq,
PartialOrd,
Ord,
Clone,
Copy,
Deref,
DerefMut,
Default,
Serialize,
Deserialize,
Encode,
Decode,
Allocative,
)]
pub struct Height(u32);
impl Height {
pub const ZERO: Height = Height(0);
pub fn new(height: u32) -> Self {
Self(height)
}
pub fn is_close_to_end(&self, block_count: usize) -> bool {
**self > (block_count - (NUMBER_OF_UNSAFE_BLOCKS * 3)) as u32
}
pub fn is_safe(&self, block_count: usize) -> bool {
**self < (block_count - NUMBER_OF_UNSAFE_BLOCKS) as u32
}
// pub fn iter_range_inclusive(first: Height, last: Height) -> impl Iterator<Item = Height> {
// let range = (*first)..=(*last);
// range.into_iter().map(Height::new)
// }
}
impl PartialEq<u64> for Height {
fn eq(&self, other: &u64) -> bool {
**self == *other as u32
}
}
impl Add<u32> for Height {
type Output = Height;
fn add(self, rhs: u32) -> Self::Output {
Self::new(*self + rhs)
}
}
impl Add<usize> for Height {
type Output = Height;
fn add(self, rhs: usize) -> Self::Output {
Self::new(*self + rhs as u32)
}
}
impl Sub<Height> for Height {
type Output = Height;
fn sub(self, rhs: Height) -> Self::Output {
Self::new(*self - *rhs)
}
}
impl Sub<u32> for Height {
type Output = Height;
fn sub(self, rhs: u32) -> Self::Output {
Self::new(*self - rhs)
}
}
impl Sub<usize> for Height {
type Output = Height;
fn sub(self, rhs: usize) -> Self::Output {
Self::new(*self - rhs as u32)
}
}
impl AddAssign<usize> for Height {
fn add_assign(&mut self, rhs: usize) {
*self = self.add(rhs);
}
}
impl fmt::Display for Height {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", **self)
}
}
impl MapKey<HeightMapChunkId> for Height {
fn to_chunk_id(&self) -> HeightMapChunkId {
HeightMapChunkId::new(self)
}
fn to_first_unsafe(&self) -> Option<Self> {
let offset = NUMBER_OF_UNSAFE_BLOCKS - 1;
self.checked_sub(offset)
}
fn to_serialized_key(&self) -> Self {
Height::new(**self % HEIGHT_MAP_CHUNK_SIZE)
}
fn is_out_of_bounds(&self) -> bool {
!(0..=2_100_000).contains(&**self)
}
fn is_first(&self) -> bool {
**self == 0
}
fn checked_sub(&self, x: usize) -> Option<Self> {
(**self).checked_sub(x as u32).map(Height::new)
}
fn min_percentile_key() -> Self {
Self(160_000)
}
fn iter_up_to(&self, other: &Self) -> impl Iterator<Item = Self> {
(**self..=**other).map(Height::new)
}
fn map_name<'a>() -> &'a str {
"height"
}
fn to_usize(&self) -> usize {
(**self) as usize
}
fn from_usize(h: usize) -> Self {
Self(h as u32)
}
}

View File

@@ -0,0 +1,64 @@
use std::{iter::Sum, ops::RangeInclusive};
use itertools::Itertools;
use super::{AnyMap, GenericMap, Height, HeightMapChunkId, MapValue, SerializedVec};
pub const HEIGHT_MAP_CHUNK_SIZE: u32 = 10_000;
pub type HeightMap<Value> = GenericMap<Height, Value, HeightMapChunkId, SerializedVec<Value>>;
impl<Value> HeightMap<Value>
where
Value: MapValue,
{
pub fn sum_range(&self, range: &RangeInclusive<u32>) -> Value
where
Value: Sum,
{
range
.to_owned()
.flat_map(|height| self.get(&Height::new(height)))
.sum::<Value>()
}
pub fn get_or_import_range_inclusive(&mut self, first: Height, last: Height) -> Vec<Value> {
((*first)..=(*last))
.map(Height::new)
.map(|h| self.get_or_import(&h).unwrap())
.collect_vec()
}
}
pub trait AnyHeightMap: AnyMap {
fn get_initial_first_unsafe_height(&self) -> Option<Height>;
fn get_initial_last_height(&self) -> Option<Height>;
fn as_any_map(&self) -> &(dyn AnyMap + Send + Sync);
fn as_any_mut_map(&mut self) -> &mut dyn AnyMap;
}
impl<T> AnyHeightMap for HeightMap<T>
where
T: MapValue,
{
#[inline(always)]
fn get_initial_first_unsafe_height(&self) -> Option<Height> {
self.initial_first_unsafe_key
}
#[inline(always)]
fn get_initial_last_height(&self) -> Option<Height> {
self.initial_last_key
}
fn as_any_map(&self) -> &(dyn AnyMap + Send + Sync) {
self
}
fn as_any_mut_map(&mut self) -> &mut dyn AnyMap {
self
}
}

View File

@@ -0,0 +1,59 @@
use std::path::Path;
use allocative::Allocative;
use derive_deref::{Deref, DerefMut};
use super::{Height, MapChunkId, HEIGHT_MAP_CHUNK_SIZE};
#[derive(
Debug, Default, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Allocative, Deref, DerefMut,
)]
pub struct HeightMapChunkId(Height);
impl HeightMapChunkId {
pub fn new(height: &Height) -> Self {
Self(Height::new(
**height / HEIGHT_MAP_CHUNK_SIZE * HEIGHT_MAP_CHUNK_SIZE,
))
}
}
impl MapChunkId for HeightMapChunkId {
fn to_string(&self) -> String {
let start = ***self;
let end = start + HEIGHT_MAP_CHUNK_SIZE;
format!("{start}..{end}")
}
fn from_path(path: &Path) -> color_eyre::Result<Self> {
Ok(Self(Height::new(
path.file_name()
.unwrap()
.to_str()
.unwrap()
.split("..")
.next()
.unwrap()
.parse::<u32>()?,
)))
}
fn to_usize(self) -> usize {
**self as usize
}
fn from_usize(id: usize) -> Self {
Self(Height::new(id as u32))
}
fn next(&self) -> Option<Self> {
self.checked_add(HEIGHT_MAP_CHUNK_SIZE)
.map(|h| Self(Height::new(h)))
}
fn previous(&self) -> Option<Self> {
self.checked_sub(HEIGHT_MAP_CHUNK_SIZE)
.map(|h| Self(Height::new(h)))
}
}

15
_src/structs/instant.rs Normal file
View File

@@ -0,0 +1,15 @@
use std::time::Instant;
use color_eyre::owo_colors::OwoColorize;
pub trait DisplayInstant {
fn display(&self) -> String;
}
impl DisplayInstant for Instant {
fn display(&self) -> String {
format!("{:.2}s", self.elapsed().as_secs_f32())
.bright_black()
.to_string()
}
}

103
_src/structs/liquidity.rs Normal file
View File

@@ -0,0 +1,103 @@
use std::f64::consts::E;
use super::{AddressLiquidity, Amount};
#[derive(Debug)]
pub struct LiquidityClassification {
illiquid: f64,
liquid: f64,
}
impl LiquidityClassification {
/// Following this:
/// https://insights.glassnode.com/bitcoin-liquid-supply/
/// https://www.desmos.com/calculator/dutgni5rtj
pub fn new(sent: Amount, received: Amount) -> Self {
if received == Amount::ZERO {
dbg!(sent, received);
panic!()
}
let liquidity = {
if sent > received {
panic!("Shouldn't be possible");
}
if sent == Amount::ZERO {
0.0
} else {
let liquidity = sent.to_sat() as f64 / received.to_sat() as f64;
if liquidity.is_nan() {
dbg!(sent, received);
unreachable!()
} else {
liquidity
}
}
};
let illiquid_line = Self::compute_illiquid_line(liquidity);
let liquid_line = Self::compute_liquid_line(liquidity);
let illiquid = illiquid_line;
let liquid = liquid_line - illiquid_line;
let highly_liquid = 1.0 - liquid_line;
if illiquid < 0.0 || liquid < 0.0 || highly_liquid < 0.0 {
unreachable!()
}
Self { illiquid, liquid }
}
#[inline(always)]
pub fn split(&self, value: f64) -> LiquiditySplitResult {
let illiquid = value * self.illiquid;
let liquid = value * self.liquid;
let highly_liquid = value - illiquid - liquid;
LiquiditySplitResult {
illiquid,
liquid,
highly_liquid,
}
}
/// Returns value in range 0.0..1.0
#[inline(always)]
fn compute_illiquid_line(x: f64) -> f64 {
Self::compute_ratio(x, 0.25)
}
/// Returns value in range 0.0..1.0
#[inline(always)]
fn compute_liquid_line(x: f64) -> f64 {
Self::compute_ratio(x, 0.75)
}
#[inline(always)]
fn compute_ratio(x: f64, x0: f64) -> f64 {
let l = 1.0;
let k = 25.0;
l / (1.0 + E.powf(k * (x - x0)))
}
}
#[derive(Debug, Default)]
pub struct LiquiditySplitResult {
pub illiquid: f64,
pub liquid: f64,
pub highly_liquid: f64,
}
impl LiquiditySplitResult {
pub fn from(&self, address_liquidity: AddressLiquidity) -> f64 {
match address_liquidity {
AddressLiquidity::Illiquid => self.illiquid,
AddressLiquidity::Liquid => self.liquid,
AddressLiquidity::HighlyLiquid => self.highly_liquid,
}
}
}

24
_src/structs/map_path.rs Normal file
View File

@@ -0,0 +1,24 @@
use std::path::PathBuf;
use allocative::Allocative;
use derive_deref::{Deref, DerefMut};
#[derive(Debug, Clone, Deref, DerefMut, Allocative)]
pub struct MapPath(PathBuf);
impl MapPath {
pub fn join(&self, path: &str) -> Self {
let path = path.replace(['-', '_', ' '], "/");
Self(self.0.join(path))
}
pub fn unwrap(&self) -> &PathBuf {
&self.0
}
}
impl From<PathBuf> for MapPath {
fn from(value: PathBuf) -> Self {
Self(value)
}
}

35
_src/structs/map_value.rs Normal file
View File

@@ -0,0 +1,35 @@
use std::fmt::{Debug, Display};
use allocative::Allocative;
use bincode::{Decode, Encode};
use serde::{de::DeserializeOwned, Serialize};
use super::{Date, Height, Timestamp, OHLC};
pub trait MapValue:
Clone
+ Copy
+ Default
+ Debug
+ Serialize
+ DeserializeOwned
+ Encode
+ Decode
+ Sync
+ Send
+ Allocative
+ Display
{
}
impl MapValue for u8 {}
impl MapValue for u16 {}
impl MapValue for u32 {}
impl MapValue for u64 {}
impl MapValue for usize {}
impl MapValue for f32 {}
impl MapValue for f64 {}
impl MapValue for Date {}
impl MapValue for OHLC {}
impl MapValue for Height {}
impl MapValue for Timestamp {}

80
_src/structs/mod.rs Normal file
View File

@@ -0,0 +1,80 @@
mod address;
mod address_data;
mod address_liquidity;
mod address_realized_data;
mod address_size;
mod address_split;
mod address_type;
mod amount;
mod any_map;
mod array;
mod bi_map;
mod block_data;
mod block_path;
mod config;
mod counter;
mod date;
mod date_data;
mod date_map;
mod date_map_chunk_id;
mod empty_address_data;
mod epoch;
mod exit;
mod generic_map;
mod height;
mod height_map;
mod height_map_chunk_id;
mod instant;
mod liquidity;
mod map_path;
mod map_value;
mod ohlc;
mod partial_txout_data;
mod price;
mod rpc;
mod sent_data;
mod serialized_btreemap;
mod serialized_vec;
mod timestamp;
mod tx_data;
mod txout_index;
pub use address::*;
pub use address_data::*;
pub use address_liquidity::*;
pub use address_realized_data::*;
pub use address_size::*;
pub use address_split::*;
pub use address_type::*;
pub use amount::*;
pub use any_map::*;
pub use array::*;
pub use bi_map::*;
pub use block_data::*;
pub use block_path::*;
pub use config::*;
pub use counter::*;
pub use date::*;
pub use date_data::*;
pub use date_map::*;
pub use date_map_chunk_id::*;
pub use empty_address_data::*;
pub use epoch::*;
pub use exit::*;
pub use generic_map::*;
pub use height::*;
pub use height_map::*;
pub use height_map_chunk_id::*;
pub use instant::*;
pub use liquidity::*;
pub use map_path::*;
pub use map_value::*;
pub use ohlc::*;
pub use partial_txout_data::*;
pub use price::*;
pub use sent_data::*;
pub use serialized_btreemap::*;
pub use serialized_vec::*;
pub use timestamp::*;
pub use tx_data::*;
pub use txout_index::*;

24
_src/structs/ohlc.rs Normal file
View File

@@ -0,0 +1,24 @@
use std::fmt::{self};
use allocative::Allocative;
use bincode::{Decode, Encode};
use serde::{Deserialize, Serialize};
#[allow(clippy::upper_case_acronyms)]
#[derive(Debug, Default, Deserialize, Serialize, Encode, Decode, Clone, Copy, Allocative)]
pub struct OHLC {
pub open: f32,
pub high: f32,
pub low: f32,
pub close: f32,
}
impl fmt::Display for OHLC {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{{ open: {}, high: {}, low: {}, close: {} }}",
self.open, self.high, self.low, self.close
)
}
}

View File

@@ -0,0 +1,18 @@
use super::{Address, Amount};
#[derive(Debug)]
pub struct PartialTxoutData {
pub amount: Amount,
pub address: Option<Address>,
pub address_index_opt: Option<u32>,
}
impl PartialTxoutData {
pub fn new(address: Option<Address>, amount: Amount, address_index_opt: Option<u32>) -> Self {
Self {
address,
amount,
address_index_opt,
}
}
}

106
_src/structs/price.rs Normal file
View File

@@ -0,0 +1,106 @@
use std::ops::{Add, AddAssign, Div, Mul, Sub, SubAssign};
use allocative::Allocative;
use bincode::{Decode, Encode};
use serde::{Deserialize, Serialize};
use super::Amount;
#[derive(
Debug,
Default,
Clone,
Copy,
PartialEq,
Eq,
PartialOrd,
Ord,
Serialize,
Deserialize,
Encode,
Decode,
Allocative,
)]
pub struct Price(u64);
const SIGNIFICANT_DIGITS: i32 = 3;
impl Price {
pub const ZERO: Price = Price(0);
pub fn to_cent(self) -> u64 {
self.0
}
pub fn to_dollar(self) -> f64 {
self.0 as f64 / 100.0
}
pub fn from_cent(cent: u64) -> Self {
Self(cent)
}
pub fn from_dollar(dollar: f64) -> Self {
Self((dollar * 100.0) as u64)
}
pub fn to_significant(self) -> Self {
let mut price = self;
let ilog10 = price.0.checked_ilog10().unwrap_or(0) as i32;
if ilog10 >= SIGNIFICANT_DIGITS {
let log_diff = ilog10 - SIGNIFICANT_DIGITS + 1;
let pow = 10.0_f64.powi(log_diff);
price = Price::from_cent(((price.0 as f64 / pow).round() * pow) as u64);
}
price
}
}
impl Add for Price {
type Output = Self;
fn add(self, rhs: Self) -> Self::Output {
Self(self.0 + rhs.0)
}
}
impl AddAssign for Price {
fn add_assign(&mut self, rhs: Self) {
self.0 += rhs.0;
}
}
impl Sub for Price {
type Output = Self;
fn sub(self, rhs: Self) -> Self::Output {
Self(self.0 - rhs.0)
}
}
impl SubAssign for Price {
fn sub_assign(&mut self, rhs: Self) {
self.0 -= rhs.0;
}
}
impl Mul<Amount> for Price {
type Output = Self;
fn mul(self, rhs: Amount) -> Self::Output {
Self((self.to_cent() as f64 * rhs.to_sat() as f64 / Amount::ONE_BTC_F64).round() as u64)
}
}
impl Div<Amount> for Price {
type Output = Self;
fn div(self, rhs: Amount) -> Self::Output {
Self((self.to_cent() as f64 * Amount::ONE_BTC_F64 / rhs.to_sat() as f64).round() as u64)
}
}

26
_src/structs/ram.rs Normal file
View File

@@ -0,0 +1,26 @@
use derive_deref::Deref;
use memory_stats::memory_stats;
use sysinfo::System;
use crate::structs::Config;
#[allow(clippy::upper_case_acronyms)]
#[derive(Deref)]
pub struct RAM(System);
impl RAM {
pub fn new() -> Self {
Self(System::new_all())
}
pub fn max_exceeded(&self, config: &Config) -> bool {
let ram_used = memory_stats().unwrap().physical_mem as f64;
if let Some(max_ram) = config.max_ram {
(ram_used / 1_000_000_000.0) > max_ram
} else {
let ram_total = self.total_memory() as f64;
ram_used / ram_total > 0.5
}
}
}

17
_src/structs/rpc.rs Normal file
View File

@@ -0,0 +1,17 @@
use brk_parser::bitcoincore_rpc::Client;
use crate::structs::Config;
impl From<&Config> for Client {
fn from(config: &Config) -> Self {
Client::new(
&format!(
"http://{}:{}",
config.rpcconnect().unwrap_or(&"localhost".to_owned()),
config.rpcport().unwrap_or(8332)
),
config.to_rpc_auth().unwrap(),
)
.unwrap()
}
}

14
_src/structs/sent_data.rs Normal file
View File

@@ -0,0 +1,14 @@
use super::Amount;
#[derive(Default, Debug)]
pub struct SentData {
pub volume: Amount,
pub count: u32,
}
impl SentData {
pub fn send(&mut self, amount: Amount) {
self.volume += amount;
self.count += 1;
}
}

View File

@@ -0,0 +1,87 @@
use std::{collections::BTreeMap, fmt::Debug, path::Path};
use allocative::Allocative;
use bincode::{Decode, Encode};
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use crate::io::Serialization;
use super::{Date, DateMap, MapChunkId, MapKey, MapSerialized, MapValue, Timestamp};
pub type SerializedDateMap<T> = SerializedBTreeMap<Date, T>;
pub type SerializedTimeMap<T> = SerializedBTreeMap<Timestamp, T>;
#[derive(Debug, Default, Serialize, Deserialize, Encode, Decode, Allocative)]
pub struct SerializedBTreeMap<Key, Value>
where
Key: Ord,
{
version: u32,
pub map: BTreeMap<Key, Value>,
}
impl<Key, Value, ChunkId> MapSerialized<Key, Value, ChunkId> for SerializedBTreeMap<Key, Value>
where
Self: Debug + Serialize + DeserializeOwned + Encode + Decode,
ChunkId: MapChunkId,
Key: MapKey<ChunkId> + Serialize,
Value: MapValue,
{
fn new(version: u32) -> Self {
Self {
version,
map: BTreeMap::default(),
}
}
fn get_last_key(&self, _: &ChunkId) -> Option<Key> {
self.map.last_key_value().map(|(k, _)| k.to_owned())
}
fn version(&self) -> u32 {
self.version
}
fn get(&self, key: &Key) -> Option<&Value> {
self.map.get(key)
}
fn last(&self) -> Option<&Value> {
self.map.last_key_value().map(|(_, v)| v)
}
fn extend(&mut self, map: BTreeMap<Key, Value>) {
self.map.extend(map)
}
fn import_all(path: &Path, serialization: &Serialization) -> Self {
let mut s = None;
DateMap::<usize>::_read_dir(path, serialization)
.iter()
.for_each(|(_, path)| {
let map = serialization.import::<Self>(path).unwrap();
if s.is_none() {
s.replace(map);
} else {
#[allow(clippy::unnecessary_unwrap)]
s.as_mut().unwrap().map.extend(map.map);
}
});
s.unwrap()
}
fn to_csv(self, id: &str) -> String {
let mut csv = format!("{},{}\n", Key::map_name(), id);
self.map.iter().for_each(|(k, v)| {
csv += &format!("{},{}\n", k, v);
});
csv
}
fn map(&self) -> &impl Serialize {
&self.map
}
}

View File

@@ -0,0 +1,106 @@
use std::{cmp::Ordering, collections::BTreeMap, fmt::Debug, path::Path};
use allocative::Allocative;
use bincode::{Decode, Encode};
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use crate::io::Serialization;
use super::{HeightMap, MapChunkId, MapKey, MapSerialized, MapValue};
#[derive(Debug, Default, Serialize, Deserialize, Encode, Decode, Allocative)]
pub struct SerializedVec<Value> {
version: u32,
pub map: Vec<Value>,
}
impl<Value> SerializedVec<Value> {
pub fn get_index(&self, index: usize) -> Option<&Value> {
self.map.get(index)
}
}
impl<Key, Value, ChunkId> MapSerialized<Key, Value, ChunkId> for SerializedVec<Value>
where
Self: Debug + Serialize + DeserializeOwned + Encode + Decode,
ChunkId: MapChunkId,
Key: MapKey<ChunkId>,
Value: MapValue,
{
#[inline(always)]
fn new(version: u32) -> Self {
Self {
version,
map: vec![],
}
}
fn get_last_key(&self, chunk_id: &ChunkId) -> Option<Key> {
Some(Key::from_usize(chunk_id.to_usize() + self.map.len()))
}
#[inline(always)]
fn version(&self) -> u32 {
self.version
}
#[inline(always)]
fn get(&self, serialized_key: &Key) -> Option<&Value> {
self.map.get(serialized_key.to_usize())
}
#[inline(always)]
fn last(&self) -> Option<&Value> {
self.map.last()
}
fn extend(&mut self, map: BTreeMap<Key, Value>) {
map.into_iter().for_each(|(key, value)| {
let key = key.to_serialized_key().to_usize();
match self.map.len().cmp(&key) {
Ordering::Greater => self.map[key] = value,
Ordering::Equal => self.map.push(value),
Ordering::Less => {
dbg!(&self.map, key, value);
panic!()
}
}
});
}
fn import_all(path: &Path, serialization: &Serialization) -> Self
where
Self: Debug + Serialize + DeserializeOwned + Encode + Decode,
Value: MapValue,
{
let mut s = None;
HeightMap::<usize>::_read_dir(path, serialization)
.iter()
.for_each(|(_, path)| {
let mut map = serialization.import::<Self>(path).unwrap();
if s.is_none() {
s.replace(map);
} else {
#[allow(clippy::unnecessary_unwrap)]
s.as_mut().unwrap().map.append(&mut map.map);
}
});
s.unwrap()
}
fn to_csv(self, id: &str) -> String {
let mut csv = format!("{},{}\n", Key::map_name(), id);
self.map.iter().enumerate().for_each(|(k, v)| {
csv += &format!("{:?},{:?}\n", k, v);
});
csv
}
fn map(&self) -> &impl Serialize {
&self.map
}
}

137
_src/structs/timestamp.rs Normal file
View File

@@ -0,0 +1,137 @@
use std::{fmt, ops::Sub};
use allocative::Allocative;
use bincode::{Decode, Encode};
use chrono::{NaiveDateTime, NaiveTime, TimeZone, Timelike, Utc};
use derive_deref::{Deref, DerefMut};
use serde::{Deserialize, Serialize};
use crate::utils::{ONE_DAY_IN_S, ONE_HOUR_IN_S};
use super::{Date, HeightMapChunkId, MapKey};
#[derive(
Debug,
Default,
Clone,
Copy,
Allocative,
Serialize,
Deserialize,
Deref,
DerefMut,
PartialEq,
Eq,
PartialOrd,
Ord,
Encode,
Decode,
)]
pub struct Timestamp(u32);
impl Timestamp {
pub const ZERO: Self = Self(0);
pub fn now() -> Self {
Self(chrono::offset::Utc::now().timestamp() as u32)
}
pub fn to_date(self) -> Date {
Date::wrap(
Utc.timestamp_opt(i64::from(self.0), 0)
.unwrap()
.date_naive(),
)
}
pub fn to_floored_seconds(self) -> Self {
let date_time = Utc.timestamp_opt(i64::from(self.0), 0).unwrap();
Self::from(
NaiveDateTime::new(
date_time.date_naive(),
NaiveTime::from_hms_opt(date_time.hour(), date_time.minute(), 0).unwrap(),
)
.and_utc()
.timestamp() as u32,
)
}
pub fn difference_in_days_between(older: Self, younger: Self) -> u32 {
if younger <= older {
0
} else {
*(younger - older) / ONE_DAY_IN_S as u32
}
}
pub fn older_by_1h_plus_than(&self, younger: Self) -> bool {
(*younger).checked_sub(**self).unwrap_or_default() > ONE_HOUR_IN_S as u32
}
}
impl Sub for Timestamp {
type Output = Self;
fn sub(self, rhs: Self) -> Self::Output {
Self::from(self.0 - rhs.0)
}
}
impl fmt::Display for Timestamp {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", **self)
}
}
impl From<u32> for Timestamp {
fn from(value: u32) -> Self {
Self(value)
}
}
impl MapKey<HeightMapChunkId> for Timestamp {
fn to_chunk_id(&self) -> HeightMapChunkId {
unreachable!();
}
fn to_first_unsafe(&self) -> Option<Self> {
unreachable!();
}
fn to_serialized_key(&self) -> Self {
unreachable!();
}
fn is_out_of_bounds(&self) -> bool {
unreachable!();
}
fn is_first(&self) -> bool {
unreachable!();
}
fn checked_sub(&self, _: usize) -> Option<Self> {
unreachable!();
}
fn min_percentile_key() -> Self {
unreachable!();
}
fn iter_up_to(&self, other: &Self) -> impl Iterator<Item = Self> {
(**self..=**other).map(Timestamp::from)
}
fn map_name<'a>() -> &'a str {
"timestamp"
}
fn to_usize(&self) -> usize {
(**self) as usize
}
fn from_usize(t: usize) -> Self {
Self(t as u32)
}
}

28
_src/structs/tx_data.rs Normal file
View File

@@ -0,0 +1,28 @@
use allocative::Allocative;
use snkrj::{direct_repr, Storable, UnsizedStorable};
use super::BlockPath;
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Allocative)]
pub struct TxData {
pub index: u32,
pub block_path: BlockPath,
pub utxos: u16,
}
direct_repr!(TxData);
impl TxData {
#[inline(always)]
pub fn new(index: u32, block_path: BlockPath, utxos: u16) -> Self {
Self {
index,
block_path,
utxos,
}
}
#[inline(always)]
pub fn is_empty(&self) -> bool {
self.utxos == 0
}
}

View File

@@ -0,0 +1,34 @@
use allocative::Allocative;
use bincode::{Decode, Encode};
use snkrj::{direct_repr, Storable, UnsizedStorable};
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Encode, Decode, Allocative)]
pub struct TxoutIndex {
pub tx_index: u32,
pub vout: u16,
}
direct_repr!(TxoutIndex);
const SHIFT: u64 = 16;
const AND: u64 = (1 << SHIFT) - 1;
impl TxoutIndex {
#[inline(always)]
pub fn new(tx_index: u32, vout: u16) -> Self {
Self { tx_index, vout }
}
#[inline(always)]
pub fn as_u64(&self) -> u64 {
((self.tx_index as u64) << SHIFT) + self.vout as u64
}
}
impl From<u64> for TxoutIndex {
fn from(value: u64) -> Self {
Self {
tx_index: (value >> SHIFT) as u32,
vout: (value & AND) as u16,
}
}
}