mirror of
https://github.com/bitcoinresearchkit/brk.git
synced 2026-04-25 15:19:58 -07:00
git: reset
This commit is contained in:
133
parser/src/structs/address.rs
Normal file
133
parser/src/structs/address.rs
Normal file
@@ -0,0 +1,133 @@
|
||||
use bitcoin::TxOut;
|
||||
use bitcoin_hashes::{hash160, Hash};
|
||||
use itertools::Itertools;
|
||||
|
||||
use crate::{
|
||||
bitcoin::multisig_addresses,
|
||||
databases::{U8x19, U8x31, SANAKIRJA_MAX_KEY_SIZE},
|
||||
};
|
||||
|
||||
use super::{AddressType, Counter};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Ord, Eq)]
|
||||
pub enum Address {
|
||||
// https://mempool.space/tx/7bd54def72825008b4ca0f4aeff13e6be2c5fe0f23430629a9d484a1ac2a29b8
|
||||
Empty(u32),
|
||||
OpReturn(u32),
|
||||
PushOnly(u32),
|
||||
Unknown(u32),
|
||||
// https://mempool.space/tx/274f8be3b7b9b1a220285f5f71f61e2691dd04df9d69bb02a8b3b85f91fb1857
|
||||
MultiSig(Box<[u8]>),
|
||||
P2PK((u16, U8x19)),
|
||||
P2PKH((u16, U8x19)),
|
||||
P2SH((u16, U8x19)),
|
||||
P2WPKH((u16, U8x19)),
|
||||
P2WSH((u16, U8x31)),
|
||||
P2TR((u16, U8x31)),
|
||||
}
|
||||
|
||||
impl Address {
|
||||
pub fn to_type(&self) -> AddressType {
|
||||
match self {
|
||||
Self::Empty(_) => AddressType::Empty,
|
||||
Self::OpReturn(_) => AddressType::OpReturn,
|
||||
Self::PushOnly(_) => AddressType::PushOnly,
|
||||
Self::Unknown(_) => AddressType::Unknown,
|
||||
Self::MultiSig(_) => AddressType::MultiSig,
|
||||
Self::P2PK(_) => AddressType::P2PK,
|
||||
Self::P2PKH(_) => AddressType::P2PKH,
|
||||
Self::P2SH(_) => AddressType::P2SH,
|
||||
Self::P2WPKH(_) => AddressType::P2WPKH,
|
||||
Self::P2WSH(_) => AddressType::P2WSH,
|
||||
Self::P2TR(_) => AddressType::P2TR,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from(
|
||||
txout: &TxOut,
|
||||
op_return_addresses: &mut Counter,
|
||||
push_only_addresses: &mut Counter,
|
||||
unknown_addresses: &mut Counter,
|
||||
empty_addresses: &mut Counter,
|
||||
) -> Self {
|
||||
let script = &txout.script_pubkey;
|
||||
|
||||
if script.is_p2pk() {
|
||||
let pk = match script.as_bytes().len() {
|
||||
67 => &script.as_bytes()[1..66],
|
||||
35 => &script.as_bytes()[1..34],
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
let hash = hash160::Hash::hash(pk);
|
||||
|
||||
let (prefix, rest) = Self::split_slice(&hash[..]);
|
||||
|
||||
Self::P2PK((prefix, rest.into()))
|
||||
} else if script.is_p2pkh() {
|
||||
let (prefix, rest) = Self::split_slice(&script.as_bytes()[3..23]);
|
||||
Self::P2PKH((prefix, rest.into()))
|
||||
} else if script.is_p2sh() {
|
||||
let (prefix, rest) = Self::split_slice(&script.as_bytes()[2..22]);
|
||||
Self::P2SH((prefix, rest.into()))
|
||||
} else if script.is_p2wpkh() {
|
||||
let (prefix, rest) = Self::split_slice(&script.as_bytes()[2..]);
|
||||
Self::P2WPKH((prefix, rest.into()))
|
||||
} else if script.is_p2wsh() {
|
||||
let (prefix, rest) = Self::split_slice(&script.as_bytes()[2..]);
|
||||
Self::P2WSH((prefix, rest.into()))
|
||||
} else if script.is_p2tr() {
|
||||
let (prefix, rest) = Self::split_slice(&script.as_bytes()[2..]);
|
||||
Self::P2TR((prefix, rest.into()))
|
||||
} else if script.is_empty() {
|
||||
let index = empty_addresses.inner();
|
||||
|
||||
empty_addresses.increment();
|
||||
|
||||
Self::Empty(index)
|
||||
} else if script.is_op_return() {
|
||||
let index = op_return_addresses.inner();
|
||||
|
||||
op_return_addresses.increment();
|
||||
|
||||
Self::OpReturn(index)
|
||||
} else if script.is_multisig() {
|
||||
let vec = multisig_addresses(script);
|
||||
|
||||
if vec.is_empty() {
|
||||
dbg!(txout);
|
||||
panic!("Multisig addresses cannot be empty !");
|
||||
}
|
||||
|
||||
let mut vec = vec.into_iter().sorted_unstable().concat();
|
||||
|
||||
// TODO: Terrible! Store everything instead of only the 510 first bytes but how
|
||||
// Sanakirja key limit is [u8; 510] and some multisig transactions have 999 keys
|
||||
if vec.len() > SANAKIRJA_MAX_KEY_SIZE {
|
||||
vec = vec.drain(..SANAKIRJA_MAX_KEY_SIZE).collect_vec();
|
||||
}
|
||||
|
||||
Self::MultiSig(vec.into())
|
||||
} else if script.is_push_only() {
|
||||
let index = push_only_addresses.inner();
|
||||
|
||||
push_only_addresses.increment();
|
||||
|
||||
Self::PushOnly(index)
|
||||
} else {
|
||||
Self::new_unknown(unknown_addresses)
|
||||
}
|
||||
}
|
||||
|
||||
fn new_unknown(unknown_addresses: &mut Counter) -> Address {
|
||||
let index = unknown_addresses.inner();
|
||||
unknown_addresses.increment();
|
||||
Self::Unknown(index)
|
||||
}
|
||||
|
||||
fn split_slice(slice: &[u8]) -> (u16, &[u8]) {
|
||||
let prefix = ((slice[0] as u16) << 2) + ((slice[1] as u16) >> 6);
|
||||
let rest = &slice[1..];
|
||||
(prefix, rest)
|
||||
}
|
||||
}
|
||||
112
parser/src/structs/address_data.rs
Normal file
112
parser/src/structs/address_data.rs
Normal file
@@ -0,0 +1,112 @@
|
||||
use allocative::Allocative;
|
||||
use color_eyre::eyre::eyre;
|
||||
use sanakirja::{direct_repr, Storable, UnsizedStorable};
|
||||
|
||||
use super::{AddressType, EmptyAddressData, LiquidityClassification, Price, WAmount};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default, Allocative)]
|
||||
pub struct AddressData {
|
||||
pub address_type: AddressType,
|
||||
pub amount: WAmount,
|
||||
pub sent: WAmount,
|
||||
pub received: WAmount,
|
||||
pub realized_cap: Price,
|
||||
pub outputs_len: u32,
|
||||
}
|
||||
direct_repr!(AddressData);
|
||||
|
||||
impl AddressData {
|
||||
pub fn new(address_type: AddressType) -> Self {
|
||||
Self {
|
||||
address_type,
|
||||
amount: WAmount::ZERO,
|
||||
sent: WAmount::ZERO,
|
||||
received: WAmount::ZERO,
|
||||
realized_cap: Price::ZERO,
|
||||
outputs_len: 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn receive(&mut self, amount: WAmount, price: Price) {
|
||||
let previous_amount = self.amount;
|
||||
|
||||
let new_amount = previous_amount + amount;
|
||||
|
||||
self.amount = new_amount;
|
||||
|
||||
self.received += amount;
|
||||
|
||||
self.outputs_len += 1;
|
||||
|
||||
let received_value = price * amount;
|
||||
|
||||
self.realized_cap += received_value;
|
||||
}
|
||||
|
||||
pub fn send(
|
||||
&mut self,
|
||||
amount: WAmount,
|
||||
current_price: Price,
|
||||
sent_amount_price: Price,
|
||||
) -> color_eyre::Result<ProfitOrLoss> {
|
||||
let previous_amount = self.amount;
|
||||
|
||||
if previous_amount < amount {
|
||||
return Err(eyre!("previous_amount smaller than sent amount"));
|
||||
}
|
||||
|
||||
let new_amount = previous_amount - amount;
|
||||
|
||||
self.amount = new_amount;
|
||||
|
||||
self.sent += amount;
|
||||
|
||||
self.outputs_len -= 1;
|
||||
|
||||
let previous_sent_dollar_value = sent_amount_price * amount;
|
||||
self.realized_cap -= previous_sent_dollar_value;
|
||||
|
||||
let current_sent_dollar_value = current_price * amount;
|
||||
|
||||
let profit_or_loss = if current_sent_dollar_value >= previous_sent_dollar_value {
|
||||
ProfitOrLoss::Profit(current_sent_dollar_value - previous_sent_dollar_value)
|
||||
} else {
|
||||
ProfitOrLoss::Loss(previous_sent_dollar_value - current_sent_dollar_value)
|
||||
};
|
||||
|
||||
Ok(profit_or_loss)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
if self.amount == WAmount::ZERO {
|
||||
if self.outputs_len != 0 {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_empty(empty: &EmptyAddressData) -> Self {
|
||||
Self {
|
||||
address_type: empty.address_type,
|
||||
amount: WAmount::ZERO,
|
||||
sent: empty.transfered,
|
||||
received: empty.transfered,
|
||||
realized_cap: Price::ZERO,
|
||||
outputs_len: 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn compute_liquidity_classification(&self) -> LiquidityClassification {
|
||||
LiquidityClassification::new(self.sent, self.received)
|
||||
}
|
||||
}
|
||||
|
||||
pub enum ProfitOrLoss {
|
||||
Profit(Price),
|
||||
Loss(Price),
|
||||
}
|
||||
46
parser/src/structs/address_realized_data.rs
Normal file
46
parser/src/structs/address_realized_data.rs
Normal file
@@ -0,0 +1,46 @@
|
||||
use super::{AddressData, Price, ProfitOrLoss, WAmount};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct AddressRealizedData {
|
||||
pub initial_address_data: AddressData,
|
||||
pub received: WAmount,
|
||||
pub sent: WAmount,
|
||||
pub profit: Price,
|
||||
pub loss: Price,
|
||||
pub utxos_created: u32,
|
||||
pub utxos_destroyed: u32,
|
||||
}
|
||||
|
||||
impl AddressRealizedData {
|
||||
pub fn default(initial_address_data: &AddressData) -> Self {
|
||||
Self {
|
||||
received: WAmount::ZERO,
|
||||
sent: WAmount::ZERO,
|
||||
profit: Price::ZERO,
|
||||
loss: Price::ZERO,
|
||||
utxos_created: 0,
|
||||
utxos_destroyed: 0,
|
||||
initial_address_data: *initial_address_data,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn receive(&mut self, amount: WAmount) {
|
||||
self.received += amount;
|
||||
self.utxos_created += 1;
|
||||
}
|
||||
|
||||
pub fn send(&mut self, amount: WAmount, realized_profit_or_loss: ProfitOrLoss) {
|
||||
self.sent += amount;
|
||||
|
||||
self.utxos_destroyed += 1;
|
||||
|
||||
match realized_profit_or_loss {
|
||||
ProfitOrLoss::Profit(price) => {
|
||||
self.profit += price;
|
||||
}
|
||||
ProfitOrLoss::Loss(price) => {
|
||||
self.loss += price;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
32
parser/src/structs/address_size.rs
Normal file
32
parser/src/structs/address_size.rs
Normal file
@@ -0,0 +1,32 @@
|
||||
use allocative::Allocative;
|
||||
|
||||
use super::WAmount;
|
||||
|
||||
#[derive(PartialEq, PartialOrd, Ord, Eq, Debug, Allocative)]
|
||||
pub enum AddressSize {
|
||||
Empty,
|
||||
Plankton,
|
||||
Shrimp,
|
||||
Crab,
|
||||
Fish,
|
||||
Shark,
|
||||
Whale,
|
||||
Humpback,
|
||||
Megalodon,
|
||||
}
|
||||
|
||||
impl AddressSize {
|
||||
pub fn from_amount(amount: WAmount) -> Self {
|
||||
match amount.to_sat() {
|
||||
0 => Self::Empty,
|
||||
1..=9_999_999 => Self::Plankton,
|
||||
10_000_000..=99_999_999 => Self::Shrimp,
|
||||
100_000_000..=999_999_999 => Self::Crab,
|
||||
1_000_000_000..=9_999_999_999 => Self::Fish,
|
||||
10_000_000_000..=99_999_999_999 => Self::Shark,
|
||||
100_000_000_000..=999_999_999_999 => Self::Whale,
|
||||
1_000_000_000_000..=9_999_999_999_999 => Self::Humpback,
|
||||
10_000_000_000_000..=u64::MAX => Self::Megalodon,
|
||||
}
|
||||
}
|
||||
}
|
||||
11
parser/src/structs/address_split.rs
Normal file
11
parser/src/structs/address_split.rs
Normal file
@@ -0,0 +1,11 @@
|
||||
use allocative::Allocative;
|
||||
|
||||
use super::{AddressSize, AddressType};
|
||||
|
||||
#[derive(Default, Allocative)]
|
||||
pub enum AddressSplit {
|
||||
#[default]
|
||||
All,
|
||||
Type(AddressType),
|
||||
Size(AddressSize),
|
||||
}
|
||||
21
parser/src/structs/address_type.rs
Normal file
21
parser/src/structs/address_type.rs
Normal file
@@ -0,0 +1,21 @@
|
||||
use allocative::Allocative;
|
||||
use bincode::{Decode, Encode};
|
||||
|
||||
// https://unchained.com/blog/bitcoin-address-types-compared/
|
||||
#[derive(
|
||||
Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Default, Encode, Decode, Allocative,
|
||||
)]
|
||||
pub enum AddressType {
|
||||
Empty,
|
||||
OpReturn,
|
||||
PushOnly,
|
||||
#[default]
|
||||
Unknown,
|
||||
MultiSig,
|
||||
P2PK,
|
||||
P2PKH,
|
||||
P2SH,
|
||||
P2WPKH,
|
||||
P2WSH,
|
||||
P2TR,
|
||||
}
|
||||
22
parser/src/structs/any_map.rs
Normal file
22
parser/src/structs/any_map.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
pub trait AnyMap {
|
||||
fn path(&self) -> &str;
|
||||
fn path_last(&self) -> &Option<String>;
|
||||
|
||||
fn t_name(&self) -> &str;
|
||||
|
||||
fn exported_path_with_t_name(&self) -> Vec<(&str, &str)> {
|
||||
let t_name = self.t_name();
|
||||
|
||||
if let Some(path_last) = self.path_last() {
|
||||
vec![(self.path(), t_name), (path_last, t_name)]
|
||||
} else {
|
||||
vec![(self.path(), t_name)]
|
||||
}
|
||||
}
|
||||
|
||||
// fn reset(&mut self) -> color_eyre::Result<()>;
|
||||
|
||||
fn pre_export(&mut self);
|
||||
fn export(&self) -> color_eyre::Result<()>;
|
||||
fn post_export(&mut self);
|
||||
}
|
||||
341
parser/src/structs/bi_map.rs
Normal file
341
parser/src/structs/bi_map.rs
Normal file
@@ -0,0 +1,341 @@
|
||||
use std::{
|
||||
iter::Sum,
|
||||
ops::{Add, Div, Mul, RangeInclusive, Sub},
|
||||
};
|
||||
|
||||
use allocative::Allocative;
|
||||
use ordered_float::FloatCore;
|
||||
|
||||
use crate::{bitcoin::TARGET_BLOCKS_PER_DAY, utils::LossyFrom};
|
||||
|
||||
use super::{AnyDateMap, AnyHeightMap, AnyMap, DateMap, HeightMap, MapValue, WNaiveDate};
|
||||
|
||||
#[derive(Default, Allocative)]
|
||||
pub struct BiMap<T>
|
||||
where
|
||||
T: MapValue,
|
||||
{
|
||||
pub height: HeightMap<T>,
|
||||
pub date: DateMap<T>,
|
||||
}
|
||||
|
||||
impl<T> BiMap<T>
|
||||
where
|
||||
T: MapValue,
|
||||
{
|
||||
pub fn new_bin(version: u32, path: &str) -> Self {
|
||||
Self {
|
||||
height: HeightMap::_new_bin(version, path, true),
|
||||
date: DateMap::_new_bin(version, path, false),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_json(version: u32, path: &str) -> Self {
|
||||
Self {
|
||||
height: HeightMap::new_json(version, path, true),
|
||||
date: DateMap::new_json(version, path, false),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn date_insert_sum_range(
|
||||
&mut self,
|
||||
date: WNaiveDate,
|
||||
date_blocks_range: &RangeInclusive<usize>,
|
||||
) where
|
||||
T: Sum,
|
||||
{
|
||||
self.date
|
||||
.insert(date, self.height.sum_range(date_blocks_range));
|
||||
}
|
||||
|
||||
pub fn multi_date_insert_sum_range(
|
||||
&mut self,
|
||||
dates: &[WNaiveDate],
|
||||
first_height: &mut DateMap<usize>,
|
||||
last_height: &mut DateMap<usize>,
|
||||
) where
|
||||
T: Sum,
|
||||
{
|
||||
dates.iter().for_each(|date| {
|
||||
let first_height = first_height.get_or_import(date).unwrap();
|
||||
let last_height = last_height.get_or_import(date).unwrap();
|
||||
let range = first_height..=last_height;
|
||||
|
||||
self.date.insert(*date, self.height.sum_range(&range));
|
||||
})
|
||||
}
|
||||
|
||||
pub fn multi_insert_const(&mut self, heights: &[usize], dates: &[WNaiveDate], constant: T) {
|
||||
self.height.multi_insert_const(heights, constant);
|
||||
|
||||
self.date.multi_insert_const(dates, constant);
|
||||
}
|
||||
|
||||
pub fn multi_insert_simple_transform<F, K>(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
dates: &[WNaiveDate],
|
||||
source: &mut BiMap<K>,
|
||||
transform: &F,
|
||||
) where
|
||||
T: Div<Output = T>,
|
||||
F: Fn(K) -> T,
|
||||
K: MapValue,
|
||||
{
|
||||
self.height
|
||||
.multi_insert_simple_transform(heights, &mut source.height, transform);
|
||||
self.date
|
||||
.multi_insert_simple_transform(dates, &mut source.date, transform);
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn multi_insert_add<A, B>(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
dates: &[WNaiveDate],
|
||||
added: &mut BiMap<A>,
|
||||
adder: &mut BiMap<B>,
|
||||
) where
|
||||
A: MapValue,
|
||||
B: MapValue,
|
||||
T: LossyFrom<A> + LossyFrom<B>,
|
||||
T: Add<Output = T>,
|
||||
{
|
||||
self.height
|
||||
.multi_insert_add(heights, &mut added.height, &mut adder.height);
|
||||
self.date
|
||||
.multi_insert_add(dates, &mut added.date, &mut adder.date);
|
||||
}
|
||||
|
||||
pub fn multi_insert_subtract<A, B>(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
dates: &[WNaiveDate],
|
||||
subtracted: &mut BiMap<A>,
|
||||
subtracter: &mut BiMap<B>,
|
||||
) where
|
||||
A: MapValue,
|
||||
B: MapValue,
|
||||
T: LossyFrom<A> + LossyFrom<B>,
|
||||
T: Sub<Output = T>,
|
||||
{
|
||||
self.height
|
||||
.multi_insert_subtract(heights, &mut subtracted.height, &mut subtracter.height);
|
||||
|
||||
self.date
|
||||
.multi_insert_subtract(dates, &mut subtracted.date, &mut subtracter.date);
|
||||
}
|
||||
|
||||
pub fn multi_insert_multiply<A, B>(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
dates: &[WNaiveDate],
|
||||
multiplied: &mut BiMap<A>,
|
||||
multiplier: &mut BiMap<B>,
|
||||
) where
|
||||
A: MapValue,
|
||||
B: MapValue,
|
||||
T: LossyFrom<A> + LossyFrom<B>,
|
||||
T: Mul<Output = T>,
|
||||
{
|
||||
self.height
|
||||
.multi_insert_multiply(heights, &mut multiplied.height, &mut multiplier.height);
|
||||
self.date
|
||||
.multi_insert_multiply(dates, &mut multiplied.date, &mut multiplier.date);
|
||||
}
|
||||
|
||||
pub fn multi_insert_divide<A, B>(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
dates: &[WNaiveDate],
|
||||
divided: &mut BiMap<A>,
|
||||
divider: &mut BiMap<B>,
|
||||
) where
|
||||
A: MapValue,
|
||||
B: MapValue,
|
||||
T: LossyFrom<A> + LossyFrom<B>,
|
||||
T: Div<Output = T> + Mul<Output = T> + From<u8>,
|
||||
{
|
||||
self.height
|
||||
.multi_insert_divide(heights, &mut divided.height, &mut divider.height);
|
||||
self.date
|
||||
.multi_insert_divide(dates, &mut divided.date, &mut divider.date);
|
||||
}
|
||||
|
||||
pub fn multi_insert_percentage<A, B>(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
dates: &[WNaiveDate],
|
||||
divided: &mut BiMap<A>,
|
||||
divider: &mut BiMap<B>,
|
||||
) where
|
||||
A: MapValue,
|
||||
B: MapValue,
|
||||
T: LossyFrom<A> + LossyFrom<B>,
|
||||
T: Div<Output = T> + Mul<Output = T> + From<u8>,
|
||||
{
|
||||
self.height
|
||||
.multi_insert_percentage(heights, &mut divided.height, &mut divider.height);
|
||||
self.date
|
||||
.multi_insert_percentage(dates, &mut divided.date, &mut divider.date);
|
||||
}
|
||||
|
||||
pub fn multi_insert_cumulative<K>(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
dates: &[WNaiveDate],
|
||||
source: &mut BiMap<K>,
|
||||
) where
|
||||
K: MapValue,
|
||||
T: LossyFrom<K>,
|
||||
T: Add<Output = T> + Sub<Output = T>,
|
||||
{
|
||||
self.height
|
||||
.multi_insert_cumulative(heights, &mut source.height);
|
||||
|
||||
self.date.multi_insert_cumulative(dates, &mut source.date);
|
||||
}
|
||||
|
||||
pub fn multi_insert_last_x_sum<K>(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
dates: &[WNaiveDate],
|
||||
source: &mut BiMap<K>,
|
||||
days: usize,
|
||||
) where
|
||||
K: MapValue,
|
||||
T: LossyFrom<K>,
|
||||
T: Add<Output = T> + Sub<Output = T>,
|
||||
{
|
||||
self.height.multi_insert_last_x_sum(
|
||||
heights,
|
||||
&mut source.height,
|
||||
TARGET_BLOCKS_PER_DAY * days,
|
||||
);
|
||||
|
||||
self.date
|
||||
.multi_insert_last_x_sum(dates, &mut source.date, days);
|
||||
}
|
||||
|
||||
pub fn multi_insert_simple_average<K>(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
dates: &[WNaiveDate],
|
||||
source: &mut BiMap<K>,
|
||||
days: usize,
|
||||
) where
|
||||
T: Into<f32> + From<f32>,
|
||||
K: MapValue + Sum,
|
||||
f32: LossyFrom<K>,
|
||||
{
|
||||
self.height.multi_insert_simple_average(
|
||||
heights,
|
||||
&mut source.height,
|
||||
TARGET_BLOCKS_PER_DAY * days,
|
||||
);
|
||||
self.date
|
||||
.multi_insert_simple_average(dates, &mut source.date, days);
|
||||
}
|
||||
|
||||
pub fn multi_insert_net_change(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
dates: &[WNaiveDate],
|
||||
source: &mut BiMap<T>,
|
||||
days: usize,
|
||||
) where
|
||||
T: Sub<Output = T>,
|
||||
{
|
||||
self.height.multi_insert_net_change(
|
||||
heights,
|
||||
&mut source.height,
|
||||
TARGET_BLOCKS_PER_DAY * days,
|
||||
);
|
||||
self.date
|
||||
.multi_insert_net_change(dates, &mut source.date, days);
|
||||
}
|
||||
|
||||
pub fn multi_insert_median(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
dates: &[WNaiveDate],
|
||||
source: &mut BiMap<T>,
|
||||
days: Option<usize>,
|
||||
) where
|
||||
T: FloatCore,
|
||||
{
|
||||
self.height.multi_insert_median(
|
||||
heights,
|
||||
&mut source.height,
|
||||
days.map(|days| TARGET_BLOCKS_PER_DAY * days),
|
||||
);
|
||||
self.date.multi_insert_median(dates, &mut source.date, days);
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn multi_insert_percentile(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
dates: &[WNaiveDate],
|
||||
source: &mut BiMap<T>,
|
||||
percentile: f32,
|
||||
days: Option<usize>,
|
||||
) where
|
||||
T: FloatCore,
|
||||
{
|
||||
self.height.multi_insert_percentile(
|
||||
heights,
|
||||
&mut source.height,
|
||||
percentile,
|
||||
days.map(|days| TARGET_BLOCKS_PER_DAY * days),
|
||||
);
|
||||
self.date
|
||||
.multi_insert_percentile(dates, &mut source.date, percentile, days);
|
||||
}
|
||||
}
|
||||
|
||||
pub trait AnyBiMap {
|
||||
#[allow(unused)]
|
||||
fn as_any_map(&self) -> Vec<&(dyn AnyMap + Send + Sync)>;
|
||||
|
||||
fn as_any_mut_map(&mut self) -> Vec<&mut dyn AnyMap>;
|
||||
|
||||
fn get_height(&self) -> &(dyn AnyHeightMap + Send + Sync);
|
||||
|
||||
#[allow(unused)]
|
||||
fn get_mut_height(&mut self) -> &mut dyn AnyHeightMap;
|
||||
|
||||
fn get_date(&self) -> &(dyn AnyDateMap + Send + Sync);
|
||||
|
||||
#[allow(unused)]
|
||||
fn get_mut_date(&mut self) -> &mut dyn AnyDateMap;
|
||||
}
|
||||
|
||||
impl<T> AnyBiMap for BiMap<T>
|
||||
where
|
||||
T: MapValue,
|
||||
{
|
||||
fn as_any_map(&self) -> Vec<&(dyn AnyMap + Send + Sync)> {
|
||||
vec![self.date.as_any_map(), self.height.as_any_map()]
|
||||
}
|
||||
|
||||
fn as_any_mut_map(&mut self) -> Vec<&mut dyn AnyMap> {
|
||||
vec![self.date.as_any_mut_map(), self.height.as_any_mut_map()]
|
||||
}
|
||||
|
||||
fn get_height(&self) -> &(dyn AnyHeightMap + Send + Sync) {
|
||||
&self.height
|
||||
}
|
||||
|
||||
fn get_mut_height(&mut self) -> &mut dyn AnyHeightMap {
|
||||
&mut self.height
|
||||
}
|
||||
|
||||
fn get_date(&self) -> &(dyn AnyDateMap + Send + Sync) {
|
||||
&self.date
|
||||
}
|
||||
|
||||
fn get_mut_date(&mut self) -> &mut dyn AnyDateMap {
|
||||
&mut self.date
|
||||
}
|
||||
}
|
||||
41
parser/src/structs/block_data.rs
Normal file
41
parser/src/structs/block_data.rs
Normal file
@@ -0,0 +1,41 @@
|
||||
use allocative::Allocative;
|
||||
use bincode::{Decode, Encode};
|
||||
|
||||
use super::{Price, WAmount};
|
||||
|
||||
#[derive(Debug, Encode, Decode, Allocative)]
|
||||
pub struct BlockData {
|
||||
pub height: u32,
|
||||
pub price: Price,
|
||||
pub timestamp: u32,
|
||||
pub amount: WAmount,
|
||||
pub utxos: u32,
|
||||
}
|
||||
|
||||
impl BlockData {
|
||||
pub fn new(height: u32, price: Price, timestamp: u32) -> Self {
|
||||
Self {
|
||||
height,
|
||||
price,
|
||||
timestamp,
|
||||
amount: WAmount::ZERO,
|
||||
utxos: 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn send(&mut self, amount: WAmount) {
|
||||
self.utxos -= 1;
|
||||
|
||||
if self.amount < amount {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
self.amount -= amount;
|
||||
}
|
||||
|
||||
pub fn receive(&mut self, amount: WAmount) {
|
||||
self.utxos += 1;
|
||||
|
||||
self.amount += amount;
|
||||
}
|
||||
}
|
||||
25
parser/src/structs/block_path.rs
Normal file
25
parser/src/structs/block_path.rs
Normal file
@@ -0,0 +1,25 @@
|
||||
use allocative::Allocative;
|
||||
use bincode::{Decode, Encode};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Copy, Encode, Decode, Allocative)]
|
||||
pub struct BlockPath {
|
||||
pub date_index: u16,
|
||||
pub block_index: u16,
|
||||
}
|
||||
|
||||
impl BlockPath {
|
||||
pub fn new(date_index: u16, block_index: u16) -> Self {
|
||||
Self {
|
||||
date_index,
|
||||
block_index,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::hash::Hash for BlockPath {
|
||||
fn hash<H: std::hash::Hasher>(&self, hasher: &mut H) {
|
||||
hasher.write_u32(((self.date_index as u32) << 16_u32) + self.block_index as u32)
|
||||
}
|
||||
}
|
||||
|
||||
// impl nohash::IsEnabled for BlockPath {}
|
||||
28
parser/src/structs/counter.rs
Normal file
28
parser/src/structs/counter.rs
Normal file
@@ -0,0 +1,28 @@
|
||||
use allocative::Allocative;
|
||||
use bincode::{Decode, Encode};
|
||||
use derive_deref::{Deref, DerefMut};
|
||||
|
||||
#[derive(Debug, Deref, DerefMut, Default, Clone, Copy, Encode, Decode, Allocative)]
|
||||
pub struct Counter(u32);
|
||||
|
||||
impl Counter {
|
||||
#[inline(always)]
|
||||
pub fn increment(&mut self) {
|
||||
self.0 += 1;
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn decrement(&mut self) {
|
||||
self.0 -= 1;
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn reset(&mut self) {
|
||||
self.0 = 0;
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn inner(&self) -> u32 {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
20
parser/src/structs/date_data.rs
Normal file
20
parser/src/structs/date_data.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
use allocative::Allocative;
|
||||
use bincode::{Decode, Encode};
|
||||
|
||||
use super::{BlockData, BlockPath, WNaiveDate};
|
||||
|
||||
#[derive(Debug, Encode, Decode, Allocative)]
|
||||
pub struct DateData {
|
||||
pub date: WNaiveDate,
|
||||
pub blocks: Vec<BlockData>,
|
||||
}
|
||||
|
||||
impl DateData {
|
||||
pub fn new(date: WNaiveDate, blocks: Vec<BlockData>) -> Self {
|
||||
Self { date, blocks }
|
||||
}
|
||||
|
||||
pub fn get_block_data(&self, block_path: &BlockPath) -> Option<&BlockData> {
|
||||
self.blocks.get(block_path.block_index as usize)
|
||||
}
|
||||
}
|
||||
1256
parser/src/structs/date_map.rs
Normal file
1256
parser/src/structs/date_map.rs
Normal file
File diff suppressed because it is too large
Load Diff
25
parser/src/structs/empty_address_data.rs
Normal file
25
parser/src/structs/empty_address_data.rs
Normal file
@@ -0,0 +1,25 @@
|
||||
use allocative::Allocative;
|
||||
use sanakirja::{direct_repr, Storable, UnsizedStorable};
|
||||
|
||||
use super::{AddressData, AddressType, WAmount};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Default, Allocative)]
|
||||
pub struct EmptyAddressData {
|
||||
pub address_type: AddressType,
|
||||
pub transfered: WAmount,
|
||||
}
|
||||
direct_repr!(EmptyAddressData);
|
||||
|
||||
impl EmptyAddressData {
|
||||
pub fn from_non_empty(non_empty: &AddressData) -> Self {
|
||||
if non_empty.sent != non_empty.received {
|
||||
dbg!(&non_empty);
|
||||
panic!("Trying to convert not empty wallet to empty !");
|
||||
}
|
||||
|
||||
Self {
|
||||
address_type: non_empty.address_type,
|
||||
transfered: non_empty.sent,
|
||||
}
|
||||
}
|
||||
}
|
||||
918
parser/src/structs/height_map.rs
Normal file
918
parser/src/structs/height_map.rs
Normal file
@@ -0,0 +1,918 @@
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
collections::{BTreeMap, VecDeque},
|
||||
fmt::Debug,
|
||||
fs,
|
||||
iter::Sum,
|
||||
mem,
|
||||
ops::{Add, Div, Mul, RangeInclusive, Sub},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use allocative::Allocative;
|
||||
use bincode::{Decode, Encode};
|
||||
use itertools::Itertools;
|
||||
use ordered_float::{FloatCore, OrderedFloat};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
bitcoin::NUMBER_OF_UNSAFE_BLOCKS,
|
||||
io::{format_path, Serialization},
|
||||
utils::{log, LossyFrom},
|
||||
};
|
||||
|
||||
use super::{AnyMap, MapValue};
|
||||
|
||||
pub const HEIGHT_MAP_CHUNK_SIZE: usize = 10_000;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Encode, Decode, Allocative)]
|
||||
pub struct SerializedHeightMap<T> {
|
||||
version: u32,
|
||||
map: Vec<T>,
|
||||
}
|
||||
|
||||
#[derive(Default, Allocative)]
|
||||
pub struct HeightMap<T>
|
||||
where
|
||||
T: MapValue,
|
||||
{
|
||||
version: u32,
|
||||
|
||||
path_all: String,
|
||||
path_last: Option<String>,
|
||||
|
||||
chunks_in_memory: usize,
|
||||
|
||||
serialization: Serialization,
|
||||
|
||||
initial_last_height: Option<usize>,
|
||||
initial_first_unsafe_height: Option<usize>,
|
||||
|
||||
imported: BTreeMap<usize, SerializedHeightMap<T>>,
|
||||
to_insert: BTreeMap<usize, BTreeMap<usize, T>>,
|
||||
}
|
||||
|
||||
impl<T> HeightMap<T>
|
||||
where
|
||||
T: MapValue,
|
||||
{
|
||||
pub fn new_bin(version: u32, path: &str) -> Self {
|
||||
Self::new(version, path, Serialization::Binary, 1, true)
|
||||
}
|
||||
|
||||
pub fn _new_bin(version: u32, path: &str, export_last: bool) -> Self {
|
||||
Self::new(version, path, Serialization::Binary, 1, export_last)
|
||||
}
|
||||
|
||||
pub fn new_json(version: u32, path: &str, export_last: bool) -> Self {
|
||||
Self::new(version, path, Serialization::Json, usize::MAX, export_last)
|
||||
}
|
||||
|
||||
fn new(
|
||||
version: u32,
|
||||
path: &str,
|
||||
serialization: Serialization,
|
||||
chunks_in_memory: usize,
|
||||
export_last: bool,
|
||||
) -> Self {
|
||||
if chunks_in_memory < 1 {
|
||||
panic!("Should always have at least the latest chunk in memory");
|
||||
}
|
||||
|
||||
let path = format_path(path);
|
||||
|
||||
let path_all = format!("{path}/height");
|
||||
|
||||
fs::create_dir_all(&path_all).unwrap();
|
||||
|
||||
let path_last = {
|
||||
if export_last {
|
||||
Some(serialization.append_extension(&format!("{path}/last")))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
let mut s = Self {
|
||||
version,
|
||||
|
||||
path_all,
|
||||
path_last,
|
||||
|
||||
chunks_in_memory,
|
||||
|
||||
serialization,
|
||||
|
||||
initial_first_unsafe_height: None,
|
||||
initial_last_height: None,
|
||||
|
||||
to_insert: BTreeMap::default(),
|
||||
imported: BTreeMap::default(),
|
||||
};
|
||||
|
||||
s.read_dir()
|
||||
.into_iter()
|
||||
.rev()
|
||||
.take(chunks_in_memory)
|
||||
.for_each(|(chunk_start, path)| {
|
||||
if let Ok(serialized) = s.import(&path) {
|
||||
if serialized.version == s.version {
|
||||
s.imported.insert(chunk_start, serialized);
|
||||
} else {
|
||||
s.read_dir()
|
||||
.iter()
|
||||
.for_each(|(_, path)| fs::remove_file(path).unwrap())
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
s.initial_last_height = s
|
||||
.imported
|
||||
.iter()
|
||||
.last()
|
||||
.map(|(chunk_start, serialized)| chunk_start + serialized.map.len());
|
||||
|
||||
s.initial_first_unsafe_height = s.initial_last_height.and_then(|last_height| {
|
||||
let offset = NUMBER_OF_UNSAFE_BLOCKS - 1;
|
||||
last_height.checked_sub(offset)
|
||||
});
|
||||
|
||||
if s.initial_first_unsafe_height.is_none() {
|
||||
log(&format!("New {path}"));
|
||||
}
|
||||
|
||||
s
|
||||
}
|
||||
|
||||
fn height_to_chunk_name(height: usize) -> String {
|
||||
let start = Self::height_to_chunk_start(height);
|
||||
let end = start + HEIGHT_MAP_CHUNK_SIZE;
|
||||
|
||||
format!("{start}..{end}")
|
||||
}
|
||||
|
||||
fn height_to_chunk_start(height: usize) -> usize {
|
||||
height / HEIGHT_MAP_CHUNK_SIZE * HEIGHT_MAP_CHUNK_SIZE
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, height: usize, value: T) -> T {
|
||||
if !self.is_height_safe(height) {
|
||||
self.to_insert
|
||||
.entry(Self::height_to_chunk_start(height))
|
||||
.or_default()
|
||||
.insert(height % HEIGHT_MAP_CHUNK_SIZE, value);
|
||||
}
|
||||
|
||||
value
|
||||
}
|
||||
|
||||
pub fn insert_default(&mut self, height: usize) -> T {
|
||||
self.insert(height, T::default())
|
||||
}
|
||||
|
||||
pub fn get(&self, height: &usize) -> Option<T> {
|
||||
let chunk_start = Self::height_to_chunk_start(*height);
|
||||
|
||||
self.to_insert
|
||||
.get(&chunk_start)
|
||||
.and_then(|map| map.get(&(height - chunk_start)).cloned())
|
||||
.or_else(|| {
|
||||
self.imported
|
||||
.get(&chunk_start)
|
||||
.and_then(|serialized| serialized.map.get(height - chunk_start))
|
||||
.cloned()
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_or_import(&mut self, height: &usize) -> T {
|
||||
let chunk_start = Self::height_to_chunk_start(*height);
|
||||
|
||||
self.to_insert
|
||||
.get(&chunk_start)
|
||||
.and_then(|map| map.get(&(height - chunk_start)).cloned())
|
||||
.or_else(|| {
|
||||
#[allow(clippy::map_entry)] // Can't be mut and then use read_dir()
|
||||
if !self.imported.contains_key(&chunk_start) {
|
||||
let dir_content = self.read_dir();
|
||||
|
||||
let path = dir_content.get(&chunk_start).unwrap_or_else(|| {
|
||||
dbg!(self.path(), chunk_start, &dir_content);
|
||||
panic!();
|
||||
});
|
||||
|
||||
let serialized = self.import(path).unwrap();
|
||||
|
||||
self.imported.insert(chunk_start, serialized);
|
||||
}
|
||||
|
||||
self.imported
|
||||
.get(&chunk_start)
|
||||
.and_then(|serialized| serialized.map.get(height - chunk_start))
|
||||
.cloned()
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
dbg!(height, self.path());
|
||||
panic!();
|
||||
})
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn is_height_safe(&self, height: usize) -> bool {
|
||||
self.initial_first_unsafe_height.unwrap_or(0) > height
|
||||
}
|
||||
|
||||
fn read_dir(&self) -> BTreeMap<usize, PathBuf> {
|
||||
Self::_read_dir(&self.path_all, &self.serialization)
|
||||
}
|
||||
|
||||
pub fn _read_dir(path: &str, serialization: &Serialization) -> BTreeMap<usize, PathBuf> {
|
||||
fs::read_dir(path)
|
||||
.unwrap()
|
||||
.map(|entry| entry.unwrap().path())
|
||||
.filter(|path| {
|
||||
let extension = path.extension().unwrap().to_str().unwrap();
|
||||
|
||||
path.is_file() && extension == serialization.to_extension()
|
||||
})
|
||||
.map(|path| {
|
||||
(
|
||||
path.file_stem()
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.split("..")
|
||||
.next()
|
||||
.unwrap()
|
||||
.parse::<usize>()
|
||||
.unwrap(),
|
||||
path,
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn import(&self, path: &Path) -> color_eyre::Result<SerializedHeightMap<T>> {
|
||||
self.serialization
|
||||
.import::<SerializedHeightMap<T>>(path.to_str().unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> AnyMap for HeightMap<T>
|
||||
where
|
||||
T: MapValue,
|
||||
{
|
||||
fn path(&self) -> &str {
|
||||
&self.path_all
|
||||
}
|
||||
|
||||
fn path_last(&self) -> &Option<String> {
|
||||
&self.path_last
|
||||
}
|
||||
|
||||
fn t_name(&self) -> &str {
|
||||
std::any::type_name::<T>()
|
||||
}
|
||||
|
||||
// fn reset(&mut self) -> color_eyre::Result<()> {
|
||||
// fs::remove_dir(&self.path_all)?;
|
||||
|
||||
// self.initial_last_height = None;
|
||||
// self.initial_first_unsafe_height = None;
|
||||
|
||||
// self.imported.clear();
|
||||
// self.to_insert.clear();
|
||||
|
||||
// Ok(())
|
||||
// }
|
||||
|
||||
fn pre_export(&mut self) {
|
||||
self.to_insert.iter_mut().for_each(|(chunk_start, map)| {
|
||||
let serialized = self
|
||||
.imported
|
||||
.entry(*chunk_start)
|
||||
.or_insert(SerializedHeightMap {
|
||||
version: self.version,
|
||||
map: vec![],
|
||||
});
|
||||
|
||||
mem::take(map)
|
||||
.into_iter()
|
||||
.for_each(
|
||||
|(chunk_height, value)| match serialized.map.len().cmp(&chunk_height) {
|
||||
Ordering::Greater => serialized.map[chunk_height] = value,
|
||||
Ordering::Equal => serialized.map.push(value),
|
||||
Ordering::Less => panic!(),
|
||||
},
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
fn export(&self) -> color_eyre::Result<()> {
|
||||
let len = self.imported.len();
|
||||
|
||||
self.to_insert.iter().enumerate().try_for_each(
|
||||
|(index, (chunk_start, map))| -> color_eyre::Result<()> {
|
||||
if !map.is_empty() {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
let chunk_name = Self::height_to_chunk_name(*chunk_start);
|
||||
|
||||
let path = self
|
||||
.serialization
|
||||
.append_extension(&format!("{}/{}", self.path_all, chunk_name));
|
||||
|
||||
let serialized = self.imported.get(chunk_start).unwrap_or_else(|| {
|
||||
dbg!(&self.path_all, chunk_start, &self.imported);
|
||||
panic!();
|
||||
});
|
||||
|
||||
self.serialization.export(&path, serialized)?;
|
||||
|
||||
if index == len - 1 {
|
||||
if let Some(path_last) = self.path_last.as_ref() {
|
||||
self.serialization
|
||||
.export(path_last, serialized.map.last().unwrap())?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn post_export(&mut self) {
|
||||
self.imported
|
||||
.keys()
|
||||
.rev()
|
||||
.enumerate()
|
||||
.filter(|(index, _)| *index + 1 > self.chunks_in_memory)
|
||||
.map(|(_, key)| *key)
|
||||
.collect_vec()
|
||||
.iter()
|
||||
.for_each(|key| {
|
||||
self.imported.remove(key);
|
||||
});
|
||||
|
||||
self.to_insert.clear();
|
||||
}
|
||||
}
|
||||
|
||||
pub trait AnyHeightMap: AnyMap {
|
||||
fn get_initial_first_unsafe_height(&self) -> Option<usize>;
|
||||
|
||||
fn get_initial_last_height(&self) -> Option<usize>;
|
||||
|
||||
fn as_any_map(&self) -> &(dyn AnyMap + Send + Sync);
|
||||
|
||||
fn as_any_mut_map(&mut self) -> &mut dyn AnyMap;
|
||||
}
|
||||
|
||||
impl<T> AnyHeightMap for HeightMap<T>
|
||||
where
|
||||
T: MapValue,
|
||||
{
|
||||
#[inline(always)]
|
||||
fn get_initial_first_unsafe_height(&self) -> Option<usize> {
|
||||
self.initial_first_unsafe_height
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn get_initial_last_height(&self) -> Option<usize> {
|
||||
self.initial_last_height
|
||||
}
|
||||
|
||||
fn as_any_map(&self) -> &(dyn AnyMap + Send + Sync) {
|
||||
self
|
||||
}
|
||||
|
||||
fn as_any_mut_map(&mut self) -> &mut dyn AnyMap {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> HeightMap<T>
|
||||
where
|
||||
T: MapValue,
|
||||
{
|
||||
pub fn sum_range(&self, range: &RangeInclusive<usize>) -> T
|
||||
where
|
||||
T: Sum,
|
||||
{
|
||||
range
|
||||
.to_owned()
|
||||
.flat_map(|height| self.get(&height))
|
||||
.sum::<T>()
|
||||
}
|
||||
|
||||
pub fn multi_insert_const(&mut self, heights: &[usize], constant: T) {
|
||||
heights.iter().for_each(|height| {
|
||||
let height = *height;
|
||||
|
||||
self.insert(height, constant);
|
||||
});
|
||||
}
|
||||
|
||||
pub fn multi_insert_simple_transform<K, F>(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
source: &mut HeightMap<K>,
|
||||
transform: F,
|
||||
) where
|
||||
K: MapValue,
|
||||
F: Fn(K) -> T,
|
||||
{
|
||||
heights.iter().for_each(|height| {
|
||||
self.insert(*height, transform(source.get_or_import(height)));
|
||||
});
|
||||
}
|
||||
|
||||
pub fn multi_insert_complex_transform<K, F>(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
source: &mut HeightMap<K>,
|
||||
transform: F,
|
||||
) where
|
||||
K: MapValue,
|
||||
F: Fn((K, &usize)) -> T,
|
||||
{
|
||||
heights.iter().for_each(|height| {
|
||||
self.insert(*height, transform((source.get_or_import(height), height)));
|
||||
});
|
||||
}
|
||||
|
||||
pub fn multi_insert_add<A, B>(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
added: &mut HeightMap<A>,
|
||||
adder: &mut HeightMap<B>,
|
||||
) where
|
||||
A: MapValue,
|
||||
B: MapValue,
|
||||
T: LossyFrom<A> + LossyFrom<B>,
|
||||
T: Add<Output = T>,
|
||||
{
|
||||
heights.iter().for_each(|height| {
|
||||
self.insert(
|
||||
*height,
|
||||
T::lossy_from(added.get_or_import(height))
|
||||
+ T::lossy_from(adder.get_or_import(height)),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
pub fn multi_insert_subtract<A, B>(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
subtracted: &mut HeightMap<A>,
|
||||
subtracter: &mut HeightMap<B>,
|
||||
) where
|
||||
A: MapValue,
|
||||
B: MapValue,
|
||||
T: LossyFrom<A> + LossyFrom<B>,
|
||||
T: Sub<Output = T>,
|
||||
{
|
||||
heights.iter().for_each(|height| {
|
||||
self.insert(
|
||||
*height,
|
||||
T::lossy_from(subtracted.get_or_import(height))
|
||||
- T::lossy_from(subtracter.get_or_import(height)),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
pub fn multi_insert_multiply<A, B>(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
multiplied: &mut HeightMap<A>,
|
||||
multiplier: &mut HeightMap<B>,
|
||||
) where
|
||||
A: MapValue,
|
||||
B: MapValue,
|
||||
T: LossyFrom<A> + LossyFrom<B>,
|
||||
T: Mul<Output = T>,
|
||||
{
|
||||
heights.iter().for_each(|height| {
|
||||
self.insert(
|
||||
*height,
|
||||
T::lossy_from(multiplied.get_or_import(height))
|
||||
* T::lossy_from(multiplier.get_or_import(height)),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
pub fn multi_insert_divide<A, B>(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
divided: &mut HeightMap<A>,
|
||||
divider: &mut HeightMap<B>,
|
||||
) where
|
||||
A: MapValue,
|
||||
B: MapValue,
|
||||
T: LossyFrom<A> + LossyFrom<B>,
|
||||
T: Div<Output = T> + Mul<Output = T> + From<u8>,
|
||||
{
|
||||
self._multi_insert_divide(heights, divided, divider, false)
|
||||
}
|
||||
|
||||
pub fn multi_insert_percentage<A, B>(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
divided: &mut HeightMap<A>,
|
||||
divider: &mut HeightMap<B>,
|
||||
) where
|
||||
A: MapValue,
|
||||
B: MapValue,
|
||||
T: LossyFrom<A> + LossyFrom<B>,
|
||||
T: Div<Output = T> + Mul<Output = T> + From<u8>,
|
||||
{
|
||||
self._multi_insert_divide(heights, divided, divider, true)
|
||||
}
|
||||
|
||||
pub fn _multi_insert_divide<A, B>(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
divided: &mut HeightMap<A>,
|
||||
divider: &mut HeightMap<B>,
|
||||
as_percentage: bool,
|
||||
) where
|
||||
A: MapValue,
|
||||
B: MapValue,
|
||||
T: LossyFrom<A> + LossyFrom<B>,
|
||||
T: Div<Output = T> + Mul<Output = T> + From<u8>,
|
||||
{
|
||||
let multiplier = T::from(if as_percentage { 100 } else { 1 });
|
||||
|
||||
heights.iter().for_each(|height| {
|
||||
self.insert(
|
||||
*height,
|
||||
T::lossy_from(divided.get_or_import(height))
|
||||
/ T::lossy_from(divider.get_or_import(height))
|
||||
* multiplier,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
pub fn multi_insert_cumulative<K>(&mut self, heights: &[usize], source: &mut HeightMap<K>)
|
||||
where
|
||||
K: MapValue,
|
||||
T: LossyFrom<K>,
|
||||
T: Add<Output = T> + Sub<Output = T>,
|
||||
{
|
||||
self._multi_insert_last_x_sum(heights, source, None)
|
||||
}
|
||||
|
||||
pub fn multi_insert_last_x_sum<K>(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
source: &mut HeightMap<K>,
|
||||
block_time: usize,
|
||||
) where
|
||||
K: MapValue,
|
||||
T: LossyFrom<K>,
|
||||
T: Add<Output = T> + Sub<Output = T>,
|
||||
{
|
||||
self._multi_insert_last_x_sum(heights, source, Some(block_time))
|
||||
}
|
||||
|
||||
fn _multi_insert_last_x_sum<K>(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
source: &mut HeightMap<K>,
|
||||
block_time: Option<usize>,
|
||||
) where
|
||||
K: MapValue,
|
||||
T: LossyFrom<K>,
|
||||
T: Add<Output = T> + Sub<Output = T>,
|
||||
{
|
||||
let mut sum = None;
|
||||
|
||||
heights.iter().for_each(|height| {
|
||||
let to_subtract = block_time
|
||||
.and_then(|x| {
|
||||
(height + 1)
|
||||
.checked_sub(x)
|
||||
.map(|previous_height| source.get_or_import(&previous_height))
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let previous_sum = sum.unwrap_or_else(|| {
|
||||
height
|
||||
.checked_sub(1)
|
||||
.map(|previous_sum_height| self.get_or_import(&previous_sum_height))
|
||||
.unwrap_or_default()
|
||||
});
|
||||
|
||||
let last_value = source.get_or_import(height);
|
||||
|
||||
sum.replace(previous_sum + T::lossy_from(last_value) - T::lossy_from(to_subtract));
|
||||
|
||||
self.insert(*height, sum.unwrap());
|
||||
});
|
||||
}
|
||||
|
||||
pub fn multi_insert_simple_average<K>(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
source: &mut HeightMap<K>,
|
||||
block_time: usize,
|
||||
) where
|
||||
T: Into<f32> + From<f32>,
|
||||
K: MapValue + Sum,
|
||||
f32: LossyFrom<K>,
|
||||
{
|
||||
if block_time <= 1 {
|
||||
panic!("Average of 1 or less is not useful");
|
||||
}
|
||||
|
||||
let mut average = None;
|
||||
|
||||
heights.iter().for_each(|height| {
|
||||
let height = *height;
|
||||
|
||||
let previous_average: f32 = average
|
||||
.unwrap_or_else(|| {
|
||||
height
|
||||
.checked_sub(block_time)
|
||||
.and_then(|previous_average_height| self.get(&previous_average_height))
|
||||
.unwrap_or_default()
|
||||
})
|
||||
.into();
|
||||
|
||||
let last_value = f32::lossy_from(source.get_or_import(&height));
|
||||
|
||||
average.replace(
|
||||
((previous_average * (block_time as f32 - 1.0) + last_value) / block_time as f32)
|
||||
.into(),
|
||||
);
|
||||
|
||||
self.insert(height, average.unwrap());
|
||||
});
|
||||
}
|
||||
|
||||
pub fn multi_insert_net_change(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
source: &mut HeightMap<T>,
|
||||
block_time: usize,
|
||||
) where
|
||||
T: Sub<Output = T>,
|
||||
{
|
||||
heights.iter().for_each(|height| {
|
||||
let height = *height;
|
||||
|
||||
let previous_value = height
|
||||
.checked_sub(block_time)
|
||||
.map(|height| source.get_or_import(&height))
|
||||
.unwrap_or_default();
|
||||
|
||||
let last_value = source.get_or_import(&height);
|
||||
|
||||
let net = last_value - previous_value;
|
||||
|
||||
self.insert(height, net);
|
||||
});
|
||||
}
|
||||
|
||||
pub fn multi_insert_median(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
source: &mut HeightMap<T>,
|
||||
block_time: Option<usize>,
|
||||
) where
|
||||
T: FloatCore,
|
||||
{
|
||||
self.multi_insert_percentile(heights, source, 0.5, block_time);
|
||||
}
|
||||
|
||||
pub fn multi_insert_percentile(
|
||||
&mut self,
|
||||
heights: &[usize],
|
||||
source: &mut HeightMap<T>,
|
||||
percentile: f32,
|
||||
block_time: Option<usize>,
|
||||
) where
|
||||
T: FloatCore,
|
||||
{
|
||||
if !(0.0..=1.0).contains(&percentile) {
|
||||
panic!("The percentile should be between 0.0 and 1.0");
|
||||
}
|
||||
|
||||
if block_time.map_or(false, |size| size < 3) {
|
||||
panic!("Computing a median for a size lower than 3 is useless");
|
||||
}
|
||||
|
||||
let mut ordered_vec = None;
|
||||
let mut sorted_vec = None;
|
||||
|
||||
heights.iter().for_each(|height| {
|
||||
let height = *height;
|
||||
|
||||
let value = {
|
||||
if let Some(start) = block_time.map_or(Some(0), |size| height.checked_sub(size)) {
|
||||
if ordered_vec.is_none() {
|
||||
let mut vec = (start..=height)
|
||||
.map(|height| OrderedFloat(source.get_or_import(&height)))
|
||||
.collect_vec();
|
||||
|
||||
if block_time.is_some() {
|
||||
ordered_vec.replace(VecDeque::from(vec.clone()));
|
||||
}
|
||||
|
||||
vec.sort_unstable();
|
||||
sorted_vec.replace(vec);
|
||||
} else {
|
||||
let float_value = OrderedFloat(source.get_or_import(&height));
|
||||
|
||||
if block_time.is_some() {
|
||||
let first = ordered_vec.as_mut().unwrap().pop_front().unwrap();
|
||||
let pos = sorted_vec.as_ref().unwrap().binary_search(&first).unwrap();
|
||||
sorted_vec.as_mut().unwrap().remove(pos);
|
||||
|
||||
ordered_vec.as_mut().unwrap().push_back(float_value);
|
||||
}
|
||||
|
||||
let pos = sorted_vec
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.binary_search(&float_value)
|
||||
.unwrap_or_else(|pos| pos);
|
||||
sorted_vec.as_mut().unwrap().insert(pos, float_value);
|
||||
}
|
||||
|
||||
let vec = sorted_vec.as_ref().unwrap();
|
||||
|
||||
let index = vec.len() as f32 * percentile;
|
||||
|
||||
if index.fract() != 0.0 {
|
||||
(vec.get(index.ceil() as usize)
|
||||
.unwrap_or_else(|| {
|
||||
dbg!(index, &self.path_all, &source.path_all, block_time);
|
||||
panic!()
|
||||
})
|
||||
.0
|
||||
+ vec
|
||||
.get(index.floor() as usize)
|
||||
.unwrap_or_else(|| {
|
||||
dbg!(index, &self.path_all, &source.path_all, block_time);
|
||||
panic!()
|
||||
})
|
||||
.0)
|
||||
/ T::from(2.0).unwrap()
|
||||
} else {
|
||||
vec.get(index as usize).unwrap().0
|
||||
}
|
||||
} else {
|
||||
T::default()
|
||||
}
|
||||
};
|
||||
|
||||
self.insert(height, value);
|
||||
});
|
||||
}
|
||||
|
||||
// pub fn insert_cumulative(&mut self, height: usize, source: &HeightMap<T>) -> T
|
||||
// where
|
||||
// T: Add<Output = T> + Sub<Output = T>,
|
||||
// {
|
||||
// let previous_cum = height
|
||||
// .checked_sub(1)
|
||||
// .map(|previous_sum_height| {
|
||||
// self.get(&previous_sum_height).unwrap_or_else(|| {
|
||||
// dbg!(previous_sum_height);
|
||||
// panic!()
|
||||
// })
|
||||
// })
|
||||
// .unwrap_or_default();
|
||||
|
||||
// let last_value = source.get(&height).unwrap();
|
||||
|
||||
// let cum_value = previous_cum + last_value;
|
||||
|
||||
// self.insert(height, cum_value);
|
||||
|
||||
// cum_value
|
||||
// }
|
||||
|
||||
// pub fn insert_last_x_sum(&mut self, height: usize, source: &HeightMap<T>, x: usize) -> T
|
||||
// where
|
||||
// T: Add<Output = T> + Sub<Output = T>,
|
||||
// {
|
||||
// let to_subtract = (height + 1)
|
||||
// .checked_sub(x)
|
||||
// .map(|previous_height| {
|
||||
// source.get(&previous_height).unwrap_or_else(|| {
|
||||
// dbg!(&self.path_all, &source.path_all, previous_height);
|
||||
// panic!()
|
||||
// })
|
||||
// })
|
||||
// .unwrap_or_default();
|
||||
|
||||
// let previous_sum = height
|
||||
// .checked_sub(1)
|
||||
// .map(|previous_sum_height| self.get(&previous_sum_height).unwrap())
|
||||
// .unwrap_or_default();
|
||||
|
||||
// let last_value = source.get(&height).unwrap();
|
||||
|
||||
// let sum = previous_sum + last_value - to_subtract;
|
||||
|
||||
// self.insert(height, sum);
|
||||
|
||||
// sum
|
||||
// }
|
||||
|
||||
// pub fn insert_simple_average(&mut self, height: usize, source: &HeightMap<T>, block_time: usize)
|
||||
// where
|
||||
// T: Into<f32> + From<f32>,
|
||||
// {
|
||||
// let to_subtract: f32 = (height + 1)
|
||||
// .checked_sub(block_time)
|
||||
// .map(|previous_height| source.get(&previous_height).unwrap())
|
||||
// .unwrap_or_default()
|
||||
// .into();
|
||||
|
||||
// let previous_average: f32 = height
|
||||
// .checked_sub(1)
|
||||
// .map(|previous_average_height| self.get(&previous_average_height).unwrap())
|
||||
// .unwrap_or_default()
|
||||
// .into();
|
||||
|
||||
// let last_value: f32 = source.get(&height).unwrap().into();
|
||||
|
||||
// let sum = previous_average * block_time as f32 - to_subtract + last_value;
|
||||
|
||||
// let average: T = (sum / block_time as f32).into();
|
||||
|
||||
// self.insert(height, average);
|
||||
// }
|
||||
|
||||
// pub fn insert_net_change(&mut self, height: usize, source: &HeightMap<T>, offset: usize) -> T
|
||||
// where
|
||||
// T: Sub<Output = T>,
|
||||
// {
|
||||
// let previous_value = height
|
||||
// .checked_sub(offset)
|
||||
// .map(|height| {
|
||||
// source.get(&height).unwrap_or_else(|| {
|
||||
// dbg!(&self.path_all, &source.path_all, offset);
|
||||
// panic!();
|
||||
// })
|
||||
// })
|
||||
// .unwrap_or_default();
|
||||
|
||||
// let last_value = source.get(&height).unwrap();
|
||||
|
||||
// let net = last_value - previous_value;
|
||||
|
||||
// self.insert(height, net);
|
||||
|
||||
// net
|
||||
// }
|
||||
|
||||
// pub fn insert_median(&mut self, height: usize, source: &HeightMap<T>, size: usize) -> T
|
||||
// where
|
||||
// T: FloatCore,
|
||||
// {
|
||||
// if size < 3 {
|
||||
// panic!("Computing a median for a size lower than 3 is useless");
|
||||
// }
|
||||
|
||||
// let median = {
|
||||
// if let Some(start) = height.checked_sub(size - 1) {
|
||||
// let even = size % 2 == 0;
|
||||
// let median_index = size / 2;
|
||||
|
||||
// let mut vec = (start..=height)
|
||||
// .map(|height| {
|
||||
// OrderedFloat(source.get(&height).unwrap_or_else(|| {
|
||||
// dbg!(height, &source.path_all, size);
|
||||
// panic!()
|
||||
// }))
|
||||
// })
|
||||
// .collect_vec();
|
||||
|
||||
// vec.sort_unstable();
|
||||
|
||||
// if even {
|
||||
// (vec.get(median_index)
|
||||
// .unwrap_or_else(|| {
|
||||
// dbg!(median_index, &self.path_all, &source.path_all, size);
|
||||
// panic!()
|
||||
// })
|
||||
// .0
|
||||
// + vec.get(median_index - 1).unwrap().0)
|
||||
// / T::from(2.0).unwrap()
|
||||
// } else {
|
||||
// vec.get(median_index).unwrap().0
|
||||
// }
|
||||
// } else {
|
||||
// T::default()
|
||||
// }
|
||||
// };
|
||||
|
||||
// self.insert(height, median);
|
||||
|
||||
// median
|
||||
// }
|
||||
}
|
||||
178
parser/src/structs/liquidity.rs
Normal file
178
parser/src/structs/liquidity.rs
Normal file
@@ -0,0 +1,178 @@
|
||||
use std::{
|
||||
f64::consts::E,
|
||||
ops::{AddAssign, SubAssign},
|
||||
};
|
||||
|
||||
use allocative::Allocative;
|
||||
|
||||
use super::WAmount;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct LiquidityClassification {
|
||||
illiquid: f64,
|
||||
liquid: f64,
|
||||
// highly_liquid: f64,
|
||||
}
|
||||
|
||||
impl LiquidityClassification {
|
||||
/// Following this:
|
||||
/// https://insights.glassnode.com/bitcoin-liquid-supply/
|
||||
/// https://www.desmos.com/calculator/dutgni5rtj
|
||||
pub fn new(sent: WAmount, received: WAmount) -> Self {
|
||||
if received == WAmount::ZERO {
|
||||
dbg!(sent, received);
|
||||
panic!()
|
||||
}
|
||||
|
||||
let liquidity = {
|
||||
if sent > received {
|
||||
panic!("Shouldn't be possible");
|
||||
}
|
||||
|
||||
if sent == WAmount::ZERO {
|
||||
0.0
|
||||
} else {
|
||||
let liquidity = sent.to_sat() as f64 / received.to_sat() as f64;
|
||||
|
||||
if liquidity.is_nan() {
|
||||
dbg!(sent, received);
|
||||
unreachable!()
|
||||
} else {
|
||||
liquidity
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let illiquid_line = Self::compute_illiquid_line(liquidity);
|
||||
let liquid_line = Self::compute_liquid_line(liquidity);
|
||||
|
||||
let illiquid = illiquid_line;
|
||||
let liquid = liquid_line - illiquid_line;
|
||||
let highly_liquid = 1.0 - liquid_line;
|
||||
|
||||
if illiquid < 0.0 || liquid < 0.0 || highly_liquid < 0.0 {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
Self {
|
||||
illiquid,
|
||||
liquid,
|
||||
// highly_liquid: 1.0 - liquid - illiquid,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn split(&self, value: f64) -> LiquiditySplitResult {
|
||||
let illiquid = value * self.illiquid;
|
||||
let liquid = value * self.liquid;
|
||||
let highly_liquid = value - illiquid - liquid;
|
||||
|
||||
LiquiditySplitResult {
|
||||
illiquid,
|
||||
liquid,
|
||||
highly_liquid,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns value in range 0.0..1.0
|
||||
#[inline(always)]
|
||||
fn compute_illiquid_line(x: f64) -> f64 {
|
||||
Self::compute_ratio(x, 0.25)
|
||||
}
|
||||
|
||||
/// Returns value in range 0.0..1.0
|
||||
#[inline(always)]
|
||||
fn compute_liquid_line(x: f64) -> f64 {
|
||||
Self::compute_ratio(x, 0.75)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn compute_ratio(x: f64, x0: f64) -> f64 {
|
||||
let l = 1.0;
|
||||
let k = 25.0;
|
||||
|
||||
l / (1.0 + E.powf(k * (x - x0)))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct LiquiditySplitResult {
|
||||
pub illiquid: f64,
|
||||
pub liquid: f64,
|
||||
pub highly_liquid: f64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq, PartialOrd, Clone, Copy, Allocative)]
|
||||
pub struct SplitByLiquidity<T>
|
||||
where
|
||||
T: Default,
|
||||
{
|
||||
pub all: T,
|
||||
pub illiquid: T,
|
||||
pub liquid: T,
|
||||
pub highly_liquid: T,
|
||||
}
|
||||
|
||||
impl<T> AddAssign for SplitByLiquidity<T>
|
||||
where
|
||||
T: AddAssign + Default,
|
||||
{
|
||||
fn add_assign(&mut self, rhs: Self) {
|
||||
self.all += rhs.all;
|
||||
self.illiquid += rhs.illiquid;
|
||||
self.liquid += rhs.liquid;
|
||||
self.highly_liquid += rhs.highly_liquid;
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> SubAssign for SplitByLiquidity<T>
|
||||
where
|
||||
T: SubAssign + Default,
|
||||
{
|
||||
fn sub_assign(&mut self, rhs: Self) {
|
||||
self.all -= rhs.all;
|
||||
self.illiquid -= rhs.illiquid;
|
||||
self.liquid -= rhs.liquid;
|
||||
self.highly_liquid -= rhs.highly_liquid;
|
||||
}
|
||||
}
|
||||
|
||||
// impl<T> SplitByLiquidity<T>
|
||||
// where
|
||||
// T: Default,
|
||||
// {
|
||||
// // pub fn get(&self, id: &LiquidityId) -> &T {
|
||||
// // match id {
|
||||
// // LiquidityId::All => &self.all,
|
||||
// // LiquidityId::Illiquid => &self.illiquid,
|
||||
// // LiquidityId::Liquid => &self.liquid,
|
||||
// // LiquidityId::HighlyLiquid => &self.highly_liquid,
|
||||
// // }
|
||||
// // }
|
||||
|
||||
// pub fn get_mut(&mut self, id: &LiquidityId) -> &mut T {
|
||||
// match id {
|
||||
// LiquidityId::All => &mut self.all,
|
||||
// LiquidityId::Illiquid => &mut self.illiquid,
|
||||
// LiquidityId::Liquid => &mut self.liquid,
|
||||
// LiquidityId::HighlyLiquid => &mut self.highly_liquid,
|
||||
// }
|
||||
// }
|
||||
|
||||
// pub fn as_vec(&self) -> Vec<(&T, LiquidityId)> {
|
||||
// vec![
|
||||
// (&self.all, LiquidityId::All),
|
||||
// (&self.illiquid, LiquidityId::Illiquid),
|
||||
// (&self.liquid, LiquidityId::Liquid),
|
||||
// (&self.highly_liquid, LiquidityId::HighlyLiquid),
|
||||
// ]
|
||||
// }
|
||||
// }
|
||||
|
||||
// #[derive(Debug, Clone, Copy)]
|
||||
// pub enum LiquidityId {
|
||||
// All,
|
||||
// Illiquid,
|
||||
// Liquid,
|
||||
// HighlyLiquid,
|
||||
// }
|
||||
22
parser/src/structs/map_value.rs
Normal file
22
parser/src/structs/map_value.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
use std::fmt::Debug;
|
||||
|
||||
use bincode::{Decode, Encode};
|
||||
use serde::{de::DeserializeOwned, Serialize};
|
||||
|
||||
use crate::datasets::OHLC;
|
||||
|
||||
use super::WNaiveDate;
|
||||
|
||||
pub trait MapValue:
|
||||
Clone + Copy + Default + Debug + Serialize + DeserializeOwned + Encode + Decode + Sync + Send
|
||||
{
|
||||
}
|
||||
|
||||
impl MapValue for u16 {}
|
||||
impl MapValue for u32 {}
|
||||
impl MapValue for u64 {}
|
||||
impl MapValue for usize {}
|
||||
impl MapValue for f32 {}
|
||||
impl MapValue for f64 {}
|
||||
impl MapValue for WNaiveDate {}
|
||||
impl MapValue for OHLC {}
|
||||
49
parser/src/structs/mod.rs
Normal file
49
parser/src/structs/mod.rs
Normal file
@@ -0,0 +1,49 @@
|
||||
mod address;
|
||||
mod address_data;
|
||||
mod address_realized_data;
|
||||
mod address_size;
|
||||
mod address_split;
|
||||
mod address_type;
|
||||
mod any_map;
|
||||
mod bi_map;
|
||||
mod block_data;
|
||||
mod block_path;
|
||||
mod counter;
|
||||
mod date_data;
|
||||
mod date_map;
|
||||
mod empty_address_data;
|
||||
mod height_map;
|
||||
mod liquidity;
|
||||
mod map_value;
|
||||
mod partial_txout_data;
|
||||
mod price;
|
||||
mod sent_data;
|
||||
mod tx_data;
|
||||
mod txout_index;
|
||||
mod wamount;
|
||||
mod wnaivedate;
|
||||
|
||||
pub use address::*;
|
||||
pub use address_data::*;
|
||||
pub use address_realized_data::*;
|
||||
pub use address_size::*;
|
||||
pub use address_split::*;
|
||||
pub use address_type::*;
|
||||
pub use any_map::*;
|
||||
pub use bi_map::*;
|
||||
pub use block_data::*;
|
||||
pub use block_path::*;
|
||||
pub use counter::*;
|
||||
pub use date_data::*;
|
||||
pub use date_map::*;
|
||||
pub use empty_address_data::*;
|
||||
pub use height_map::*;
|
||||
pub use liquidity::*;
|
||||
pub use map_value::*;
|
||||
pub use partial_txout_data::*;
|
||||
pub use price::*;
|
||||
pub use sent_data::*;
|
||||
pub use tx_data::*;
|
||||
pub use txout_index::*;
|
||||
pub use wamount::*;
|
||||
pub use wnaivedate::*;
|
||||
18
parser/src/structs/partial_txout_data.rs
Normal file
18
parser/src/structs/partial_txout_data.rs
Normal file
@@ -0,0 +1,18 @@
|
||||
use super::{Address, WAmount};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct PartialTxoutData {
|
||||
pub amount: WAmount,
|
||||
pub address: Option<Address>,
|
||||
pub address_index_opt: Option<u32>,
|
||||
}
|
||||
|
||||
impl PartialTxoutData {
|
||||
pub fn new(address: Option<Address>, amount: WAmount, address_index_opt: Option<u32>) -> Self {
|
||||
Self {
|
||||
address,
|
||||
amount,
|
||||
address_index_opt,
|
||||
}
|
||||
}
|
||||
}
|
||||
93
parser/src/structs/price.rs
Normal file
93
parser/src/structs/price.rs
Normal file
@@ -0,0 +1,93 @@
|
||||
use std::ops::{Add, AddAssign, Div, Mul, Sub, SubAssign};
|
||||
|
||||
use allocative::Allocative;
|
||||
use bincode::{Decode, Encode};
|
||||
|
||||
use super::WAmount;
|
||||
|
||||
#[derive(
|
||||
Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Encode, Decode, Allocative,
|
||||
)]
|
||||
pub struct Price(u64);
|
||||
|
||||
const SIGNIFICANT_DIGITS: i32 = 3;
|
||||
|
||||
impl Price {
|
||||
pub const ZERO: Price = Price(0);
|
||||
|
||||
pub fn to_cent(self) -> u64 {
|
||||
self.0
|
||||
}
|
||||
|
||||
pub fn to_dollar(self) -> f64 {
|
||||
self.0 as f64 / 100.0
|
||||
}
|
||||
|
||||
pub fn from_cent(cent: u64) -> Self {
|
||||
Self(cent)
|
||||
}
|
||||
|
||||
pub fn from_dollar(dollar: f64) -> Self {
|
||||
Self((dollar * 100.0) as u64)
|
||||
}
|
||||
|
||||
pub fn to_significant(self) -> Self {
|
||||
let mut price = self;
|
||||
|
||||
let ilog10 = price.0.checked_ilog10().unwrap_or(0) as i32;
|
||||
|
||||
if ilog10 >= SIGNIFICANT_DIGITS {
|
||||
let log_diff = ilog10 - SIGNIFICANT_DIGITS + 1;
|
||||
|
||||
let pow = 10.0_f64.powi(log_diff);
|
||||
|
||||
price = Price::from_cent(((price.0 as f64 / pow).round() * pow) as u64);
|
||||
}
|
||||
|
||||
price
|
||||
}
|
||||
}
|
||||
|
||||
impl Add for Price {
|
||||
type Output = Self;
|
||||
|
||||
fn add(self, rhs: Self) -> Self::Output {
|
||||
Self(self.0 + rhs.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign for Price {
|
||||
fn add_assign(&mut self, rhs: Self) {
|
||||
self.0 += rhs.0;
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub for Price {
|
||||
type Output = Self;
|
||||
|
||||
fn sub(self, rhs: Self) -> Self::Output {
|
||||
Self(self.0 - rhs.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl SubAssign for Price {
|
||||
fn sub_assign(&mut self, rhs: Self) {
|
||||
self.0 -= rhs.0;
|
||||
}
|
||||
}
|
||||
|
||||
impl Mul<WAmount> for Price {
|
||||
type Output = Self;
|
||||
|
||||
fn mul(self, rhs: WAmount) -> Self::Output {
|
||||
Self((self.to_cent() as f64 * rhs.to_sat() as f64 / WAmount::ONE_BTC_F64).round() as u64)
|
||||
}
|
||||
}
|
||||
|
||||
impl Div<WAmount> for Price {
|
||||
type Output = Self;
|
||||
|
||||
fn div(self, rhs: WAmount) -> Self::Output {
|
||||
Self((self.to_cent() as f64 * WAmount::ONE_BTC_F64 / rhs.to_sat() as f64).round() as u64)
|
||||
}
|
||||
}
|
||||
14
parser/src/structs/sent_data.rs
Normal file
14
parser/src/structs/sent_data.rs
Normal file
@@ -0,0 +1,14 @@
|
||||
use super::WAmount;
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct SentData {
|
||||
pub volume: WAmount,
|
||||
pub count: u32,
|
||||
}
|
||||
|
||||
impl SentData {
|
||||
pub fn send(&mut self, amount: WAmount) {
|
||||
self.volume += amount;
|
||||
self.count += 1;
|
||||
}
|
||||
}
|
||||
27
parser/src/structs/tx_data.rs
Normal file
27
parser/src/structs/tx_data.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
use allocative::Allocative;
|
||||
use sanakirja::{direct_repr, Storable, UnsizedStorable};
|
||||
|
||||
use super::BlockPath;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Allocative)]
|
||||
pub struct TxData {
|
||||
pub index: u32,
|
||||
pub block_path: BlockPath,
|
||||
pub utxos: u16,
|
||||
}
|
||||
direct_repr!(TxData);
|
||||
|
||||
impl TxData {
|
||||
pub fn new(index: u32, block_path: BlockPath, utxos: u16) -> Self {
|
||||
Self {
|
||||
index,
|
||||
block_path,
|
||||
utxos,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.utxos == 0
|
||||
}
|
||||
}
|
||||
28
parser/src/structs/txout_index.rs
Normal file
28
parser/src/structs/txout_index.rs
Normal file
@@ -0,0 +1,28 @@
|
||||
use allocative::Allocative;
|
||||
use bincode::{Decode, Encode};
|
||||
use sanakirja::{direct_repr, Storable, UnsizedStorable};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Encode, Decode, Allocative)]
|
||||
pub struct TxoutIndex {
|
||||
pub tx_index: u32,
|
||||
pub vout: u16,
|
||||
}
|
||||
direct_repr!(TxoutIndex);
|
||||
|
||||
impl TxoutIndex {
|
||||
pub fn new(tx_index: u32, vout: u16) -> Self {
|
||||
Self { tx_index, vout }
|
||||
}
|
||||
|
||||
pub fn as_u64(&self) -> u64 {
|
||||
((self.tx_index as u64) << 16_u64) + self.vout as u64
|
||||
}
|
||||
}
|
||||
|
||||
impl std::hash::Hash for TxoutIndex {
|
||||
fn hash<H: std::hash::Hasher>(&self, hasher: &mut H) {
|
||||
hasher.write_u64(self.as_u64())
|
||||
}
|
||||
}
|
||||
|
||||
// impl nohash::IsEnabled for TxoutIndex {}
|
||||
127
parser/src/structs/wamount.rs
Normal file
127
parser/src/structs/wamount.rs
Normal file
@@ -0,0 +1,127 @@
|
||||
use std::{
|
||||
iter::Sum,
|
||||
ops::{Add, AddAssign, Mul, Sub, SubAssign},
|
||||
};
|
||||
|
||||
use allocative::{Allocative, Visitor};
|
||||
use bincode::{
|
||||
de::{BorrowDecoder, Decoder},
|
||||
enc::Encoder,
|
||||
error::{DecodeError, EncodeError},
|
||||
BorrowDecode, Decode, Encode,
|
||||
};
|
||||
use bitcoin::Amount;
|
||||
use derive_deref::{Deref, DerefMut};
|
||||
use sanakirja::{direct_repr, Storable, UnsizedStorable};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(
|
||||
Debug,
|
||||
PartialEq,
|
||||
Eq,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
Clone,
|
||||
Copy,
|
||||
Deref,
|
||||
DerefMut,
|
||||
Default,
|
||||
Serialize,
|
||||
Deserialize,
|
||||
)]
|
||||
pub struct WAmount(Amount);
|
||||
direct_repr!(WAmount);
|
||||
|
||||
impl WAmount {
|
||||
pub const ZERO: Self = Self(Amount::ZERO);
|
||||
pub const ONE_BTC_F64: f64 = 100_000_000.0;
|
||||
|
||||
#[inline(always)]
|
||||
pub fn wrap(amount: Amount) -> Self {
|
||||
Self(amount)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn from_sat(sats: u64) -> Self {
|
||||
Self(Amount::from_sat(sats))
|
||||
}
|
||||
}
|
||||
|
||||
impl Add for WAmount {
|
||||
type Output = WAmount;
|
||||
|
||||
fn add(self, rhs: WAmount) -> Self::Output {
|
||||
WAmount::from_sat(self.to_sat() + rhs.to_sat())
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign for WAmount {
|
||||
fn add_assign(&mut self, rhs: Self) {
|
||||
*self = WAmount::from_sat(self.to_sat() + rhs.to_sat());
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub for WAmount {
|
||||
type Output = WAmount;
|
||||
|
||||
fn sub(self, rhs: WAmount) -> Self::Output {
|
||||
WAmount::from_sat(self.to_sat() - rhs.to_sat())
|
||||
}
|
||||
}
|
||||
|
||||
impl SubAssign for WAmount {
|
||||
fn sub_assign(&mut self, rhs: Self) {
|
||||
*self = WAmount::from_sat(self.to_sat() - rhs.to_sat());
|
||||
}
|
||||
}
|
||||
|
||||
impl Mul<WAmount> for WAmount {
|
||||
type Output = WAmount;
|
||||
|
||||
fn mul(self, rhs: WAmount) -> Self::Output {
|
||||
WAmount::from_sat(self.to_sat() * rhs.to_sat())
|
||||
}
|
||||
}
|
||||
|
||||
impl Mul<u64> for WAmount {
|
||||
type Output = WAmount;
|
||||
|
||||
fn mul(self, rhs: u64) -> Self::Output {
|
||||
WAmount::from_sat(self.to_sat() * rhs)
|
||||
}
|
||||
}
|
||||
|
||||
impl Sum for WAmount {
|
||||
fn sum<I: Iterator<Item = Self>>(iter: I) -> Self {
|
||||
let sats = iter.map(|amt| amt.to_sat()).sum();
|
||||
WAmount::from_sat(sats)
|
||||
}
|
||||
}
|
||||
|
||||
impl Encode for WAmount {
|
||||
fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), EncodeError> {
|
||||
Encode::encode(&self.to_sat(), encoder)
|
||||
}
|
||||
}
|
||||
|
||||
impl Decode for WAmount {
|
||||
fn decode<D: Decoder>(decoder: &mut D) -> core::result::Result<Self, DecodeError> {
|
||||
let sats: u64 = Decode::decode(decoder)?;
|
||||
|
||||
Ok(WAmount::from_sat(sats))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> BorrowDecode<'de> for WAmount {
|
||||
fn borrow_decode<D: BorrowDecoder<'de>>(decoder: &mut D) -> Result<Self, DecodeError> {
|
||||
let sats: u64 = BorrowDecode::borrow_decode(decoder)?;
|
||||
|
||||
Ok(WAmount::from_sat(sats))
|
||||
}
|
||||
}
|
||||
|
||||
impl Allocative for WAmount {
|
||||
fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
|
||||
visitor.visit_simple_sized::<Self>();
|
||||
}
|
||||
}
|
||||
76
parser/src/structs/wnaivedate.rs
Normal file
76
parser/src/structs/wnaivedate.rs
Normal file
@@ -0,0 +1,76 @@
|
||||
use std::{fmt, str::FromStr};
|
||||
|
||||
use allocative::{Allocative, Visitor};
|
||||
use bincode::{
|
||||
de::{BorrowDecoder, Decoder},
|
||||
enc::Encoder,
|
||||
error::{DecodeError, EncodeError},
|
||||
BorrowDecode, Decode, Encode,
|
||||
};
|
||||
use chrono::{NaiveDate, TimeZone, Utc};
|
||||
use derive_deref::{Deref, DerefMut};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(
|
||||
Debug,
|
||||
PartialEq,
|
||||
Eq,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
Clone,
|
||||
Copy,
|
||||
Deref,
|
||||
DerefMut,
|
||||
Default,
|
||||
Serialize,
|
||||
Deserialize,
|
||||
)]
|
||||
pub struct WNaiveDate(NaiveDate);
|
||||
|
||||
impl WNaiveDate {
|
||||
pub fn wrap(date: NaiveDate) -> Self {
|
||||
Self(date)
|
||||
}
|
||||
|
||||
pub fn from_timestamp(timestamp: u32) -> Self {
|
||||
Self(
|
||||
Utc.timestamp_opt(i64::from(timestamp), 0)
|
||||
.unwrap()
|
||||
.date_naive(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for WNaiveDate {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Debug::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Encode for WNaiveDate {
|
||||
fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), EncodeError> {
|
||||
Encode::encode(&self.to_string(), encoder)
|
||||
}
|
||||
}
|
||||
|
||||
impl Decode for WNaiveDate {
|
||||
fn decode<D: Decoder>(decoder: &mut D) -> core::result::Result<Self, DecodeError> {
|
||||
let str: String = Decode::decode(decoder)?;
|
||||
|
||||
Ok(Self(NaiveDate::from_str(&str).unwrap()))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> BorrowDecode<'de> for WNaiveDate {
|
||||
fn borrow_decode<D: BorrowDecoder<'de>>(decoder: &mut D) -> Result<Self, DecodeError> {
|
||||
let str: String = BorrowDecode::borrow_decode(decoder)?;
|
||||
|
||||
Ok(Self(NaiveDate::from_str(&str).unwrap()))
|
||||
}
|
||||
}
|
||||
|
||||
impl Allocative for WNaiveDate {
|
||||
fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
|
||||
visitor.visit_simple_sized::<Self>();
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user