general: snapshot

This commit is contained in:
k
2024-07-20 23:13:41 +02:00
parent d8a5b4a2e6
commit a145b35ad1
100 changed files with 5402 additions and 2967 deletions

View File

@@ -2,14 +2,14 @@ use allocative::Allocative;
use color_eyre::eyre::eyre;
use sanakirja::{direct_repr, Storable, UnsizedStorable};
use super::{AddressType, EmptyAddressData, LiquidityClassification, Price, WAmount};
use super::{AddressType, Amount, EmptyAddressData, LiquidityClassification, Price};
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default, Allocative)]
pub struct AddressData {
pub address_type: AddressType,
pub amount: WAmount,
pub sent: WAmount,
pub received: WAmount,
pub amount: Amount,
pub sent: Amount,
pub received: Amount,
pub realized_cap: Price,
pub outputs_len: u32,
}
@@ -19,15 +19,15 @@ impl AddressData {
pub fn new(address_type: AddressType) -> Self {
Self {
address_type,
amount: WAmount::ZERO,
sent: WAmount::ZERO,
received: WAmount::ZERO,
amount: Amount::ZERO,
sent: Amount::ZERO,
received: Amount::ZERO,
realized_cap: Price::ZERO,
outputs_len: 0,
}
}
pub fn receive(&mut self, amount: WAmount, price: Price) {
pub fn receive(&mut self, amount: Amount, price: Price) {
let previous_amount = self.amount;
let new_amount = previous_amount + amount;
@@ -43,7 +43,7 @@ impl AddressData {
self.realized_cap += received_value;
}
pub fn send(&mut self, amount: WAmount, previous_price: Price) -> color_eyre::Result<()> {
pub fn send(&mut self, amount: Amount, previous_price: Price) -> color_eyre::Result<()> {
let previous_amount = self.amount;
if previous_amount < amount {
@@ -66,7 +66,7 @@ impl AddressData {
#[inline(always)]
pub fn is_empty(&self) -> bool {
if self.amount == WAmount::ZERO {
if self.amount == Amount::ZERO {
if self.outputs_len != 0 {
unreachable!();
}
@@ -80,7 +80,7 @@ impl AddressData {
pub fn from_empty(empty: &EmptyAddressData) -> Self {
Self {
address_type: empty.address_type,
amount: WAmount::ZERO,
amount: Amount::ZERO,
sent: empty.transfered,
received: empty.transfered,
realized_cap: Price::ZERO,

View File

@@ -1,10 +1,10 @@
use super::{AddressData, Price, WAmount};
use super::{AddressData, Amount, Price};
#[derive(Debug)]
pub struct AddressRealizedData {
pub initial_address_data: AddressData,
pub received: WAmount,
pub sent: WAmount,
pub received: Amount,
pub sent: Amount,
pub profit: Price,
pub loss: Price,
pub value_created: Price,
@@ -16,8 +16,8 @@ pub struct AddressRealizedData {
impl AddressRealizedData {
pub fn default(initial_address_data: &AddressData) -> Self {
Self {
received: WAmount::ZERO,
sent: WAmount::ZERO,
received: Amount::ZERO,
sent: Amount::ZERO,
profit: Price::ZERO,
loss: Price::ZERO,
utxos_created: 0,
@@ -28,12 +28,12 @@ impl AddressRealizedData {
}
}
pub fn receive(&mut self, amount: WAmount) {
pub fn receive(&mut self, amount: Amount) {
self.received += amount;
self.utxos_created += 1;
}
pub fn send(&mut self, amount: WAmount, current_price: Price, previous_price: Price) {
pub fn send(&mut self, amount: Amount, current_price: Price, previous_price: Price) {
self.sent += amount;
self.utxos_destroyed += 1;

View File

@@ -1,6 +1,6 @@
use allocative::Allocative;
use super::WAmount;
use super::Amount;
#[derive(PartialEq, PartialOrd, Ord, Eq, Debug, Allocative)]
pub enum AddressSize {
@@ -16,7 +16,7 @@ pub enum AddressSize {
}
impl AddressSize {
pub fn from_amount(amount: WAmount) -> Self {
pub fn from_amount(amount: Amount) -> Self {
match amount.to_sat() {
0 => Self::Empty,
1..=9_999_999 => Self::Plankton,

View File

@@ -10,11 +10,13 @@ use bincode::{
error::{DecodeError, EncodeError},
BorrowDecode, Decode, Encode,
};
use bitcoin::Amount;
use bitcoin::Amount as BitcoinAmount;
use derive_deref::{Deref, DerefMut};
use sanakirja::{direct_repr, Storable, UnsizedStorable};
use serde::{Deserialize, Serialize};
use super::Height;
#[derive(
Debug,
PartialEq,
@@ -29,98 +31,106 @@ use serde::{Deserialize, Serialize};
Serialize,
Deserialize,
)]
pub struct WAmount(Amount);
direct_repr!(WAmount);
pub struct Amount(BitcoinAmount);
direct_repr!(Amount);
impl WAmount {
pub const ZERO: Self = Self(Amount::ZERO);
impl Amount {
pub const ZERO: Self = Self(BitcoinAmount::ZERO);
pub const ONE_BTC_F64: f64 = 100_000_000.0;
#[inline(always)]
pub fn wrap(amount: Amount) -> Self {
pub fn wrap(amount: BitcoinAmount) -> Self {
Self(amount)
}
#[inline(always)]
pub fn from_sat(sats: u64) -> Self {
Self(Amount::from_sat(sats))
Self(BitcoinAmount::from_sat(sats))
}
}
impl Add for WAmount {
type Output = WAmount;
impl Add for Amount {
type Output = Amount;
fn add(self, rhs: WAmount) -> Self::Output {
WAmount::from_sat(self.to_sat() + rhs.to_sat())
fn add(self, rhs: Amount) -> Self::Output {
Amount::from_sat(self.to_sat() + rhs.to_sat())
}
}
impl AddAssign for WAmount {
impl AddAssign for Amount {
fn add_assign(&mut self, rhs: Self) {
*self = WAmount::from_sat(self.to_sat() + rhs.to_sat());
*self = Amount::from_sat(self.to_sat() + rhs.to_sat());
}
}
impl Sub for WAmount {
type Output = WAmount;
impl Sub for Amount {
type Output = Amount;
fn sub(self, rhs: WAmount) -> Self::Output {
WAmount::from_sat(self.to_sat() - rhs.to_sat())
fn sub(self, rhs: Amount) -> Self::Output {
Amount::from_sat(self.to_sat() - rhs.to_sat())
}
}
impl SubAssign for WAmount {
impl SubAssign for Amount {
fn sub_assign(&mut self, rhs: Self) {
*self = WAmount::from_sat(self.to_sat() - rhs.to_sat());
*self = Amount::from_sat(self.to_sat() - rhs.to_sat());
}
}
impl Mul<WAmount> for WAmount {
type Output = WAmount;
impl Mul<Amount> for Amount {
type Output = Amount;
fn mul(self, rhs: WAmount) -> Self::Output {
WAmount::from_sat(self.to_sat() * rhs.to_sat())
fn mul(self, rhs: Amount) -> Self::Output {
Amount::from_sat(self.to_sat() * rhs.to_sat())
}
}
impl Mul<u64> for WAmount {
type Output = WAmount;
impl Mul<u64> for Amount {
type Output = Amount;
fn mul(self, rhs: u64) -> Self::Output {
WAmount::from_sat(self.to_sat() * rhs)
Amount::from_sat(self.to_sat() * rhs)
}
}
impl Sum for WAmount {
impl Mul<Height> for Amount {
type Output = Amount;
fn mul(self, rhs: Height) -> Self::Output {
Amount::from_sat(self.to_sat() * *rhs as u64)
}
}
impl Sum for Amount {
fn sum<I: Iterator<Item = Self>>(iter: I) -> Self {
let sats = iter.map(|amt| amt.to_sat()).sum();
WAmount::from_sat(sats)
Amount::from_sat(sats)
}
}
impl Encode for WAmount {
impl Encode for Amount {
fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), EncodeError> {
Encode::encode(&self.to_sat(), encoder)
}
}
impl Decode for WAmount {
impl Decode for Amount {
fn decode<D: Decoder>(decoder: &mut D) -> core::result::Result<Self, DecodeError> {
let sats: u64 = Decode::decode(decoder)?;
Ok(WAmount::from_sat(sats))
Ok(Amount::from_sat(sats))
}
}
impl<'de> BorrowDecode<'de> for WAmount {
impl<'de> BorrowDecode<'de> for Amount {
fn borrow_decode<D: BorrowDecoder<'de>>(decoder: &mut D) -> Result<Self, DecodeError> {
let sats: u64 = BorrowDecode::borrow_decode(decoder)?;
Ok(WAmount::from_sat(sats))
Ok(Amount::from_sat(sats))
}
}
impl Allocative for WAmount {
impl Allocative for Amount {
fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
visitor.visit_simple_sized::<Self>();
}

View File

@@ -8,7 +8,7 @@ use ordered_float::FloatCore;
use crate::{bitcoin::TARGET_BLOCKS_PER_DAY, utils::LossyFrom};
use super::{AnyDateMap, AnyHeightMap, AnyMap, DateMap, HeightMap, MapValue, WNaiveDate};
use super::{AnyDateMap, AnyHeightMap, AnyMap, Date, DateMap, Height, HeightMap, MapValue};
#[derive(Default, Allocative)]
pub struct BiMap<T>
@@ -37,11 +37,8 @@ where
}
}
pub fn date_insert_sum_range(
&mut self,
date: WNaiveDate,
date_blocks_range: &RangeInclusive<usize>,
) where
pub fn date_insert_sum_range(&mut self, date: Date, date_blocks_range: &RangeInclusive<u32>)
where
T: Sum,
{
self.date
@@ -50,22 +47,22 @@ where
pub fn multi_date_insert_sum_range(
&mut self,
dates: &[WNaiveDate],
first_height: &mut DateMap<usize>,
last_height: &mut DateMap<usize>,
dates: &[Date],
first_height: &mut DateMap<Height>,
last_height: &mut DateMap<Height>,
) where
T: Sum,
{
dates.iter().for_each(|date| {
let first_height = first_height.get_or_import(date).unwrap();
let last_height = last_height.get_or_import(date).unwrap();
let range = first_height..=last_height;
let range = (*first_height)..=(*last_height);
self.date.insert(*date, self.height.sum_range(&range));
})
}
pub fn multi_insert_const(&mut self, heights: &[usize], dates: &[WNaiveDate], constant: T) {
pub fn multi_insert_const(&mut self, heights: &[Height], dates: &[Date], constant: T) {
self.height.multi_insert_const(heights, constant);
self.date.multi_insert_const(dates, constant);
@@ -73,8 +70,8 @@ where
pub fn multi_insert_simple_transform<F, K>(
&mut self,
heights: &[usize],
dates: &[WNaiveDate],
heights: &[Height],
dates: &[Date],
source: &mut BiMap<K>,
transform: &F,
) where
@@ -91,8 +88,8 @@ where
#[allow(unused)]
pub fn multi_insert_add<A, B>(
&mut self,
heights: &[usize],
dates: &[WNaiveDate],
heights: &[Height],
dates: &[Date],
added: &mut BiMap<A>,
adder: &mut BiMap<B>,
) where
@@ -109,8 +106,8 @@ where
pub fn multi_insert_subtract<A, B>(
&mut self,
heights: &[usize],
dates: &[WNaiveDate],
heights: &[Height],
dates: &[Date],
subtracted: &mut BiMap<A>,
subtracter: &mut BiMap<B>,
) where
@@ -128,8 +125,8 @@ where
pub fn multi_insert_multiply<A, B>(
&mut self,
heights: &[usize],
dates: &[WNaiveDate],
heights: &[Height],
dates: &[Date],
multiplied: &mut BiMap<A>,
multiplier: &mut BiMap<B>,
) where
@@ -146,8 +143,8 @@ where
pub fn multi_insert_divide<A, B>(
&mut self,
heights: &[usize],
dates: &[WNaiveDate],
heights: &[Height],
dates: &[Date],
divided: &mut BiMap<A>,
divider: &mut BiMap<B>,
) where
@@ -164,8 +161,8 @@ where
pub fn multi_insert_percentage<A, B>(
&mut self,
heights: &[usize],
dates: &[WNaiveDate],
heights: &[Height],
dates: &[Date],
divided: &mut BiMap<A>,
divider: &mut BiMap<B>,
) where
@@ -182,8 +179,8 @@ where
pub fn multi_insert_cumulative<K>(
&mut self,
heights: &[usize],
dates: &[WNaiveDate],
heights: &[Height],
dates: &[Date],
source: &mut BiMap<K>,
) where
K: MapValue,
@@ -198,8 +195,8 @@ where
pub fn multi_insert_last_x_sum<K>(
&mut self,
heights: &[usize],
dates: &[WNaiveDate],
heights: &[Height],
dates: &[Date],
source: &mut BiMap<K>,
days: usize,
) where
@@ -219,8 +216,8 @@ where
pub fn multi_insert_simple_average<K>(
&mut self,
heights: &[usize],
dates: &[WNaiveDate],
heights: &[Height],
dates: &[Date],
source: &mut BiMap<K>,
days: usize,
) where
@@ -239,8 +236,8 @@ where
pub fn multi_insert_net_change(
&mut self,
heights: &[usize],
dates: &[WNaiveDate],
heights: &[Height],
dates: &[Date],
source: &mut BiMap<T>,
days: usize,
) where
@@ -257,8 +254,8 @@ where
pub fn multi_insert_median(
&mut self,
heights: &[usize],
dates: &[WNaiveDate],
heights: &[Height],
dates: &[Date],
source: &mut BiMap<T>,
days: Option<usize>,
) where
@@ -275,8 +272,8 @@ where
#[allow(unused)]
pub fn multi_insert_percentile(
&mut self,
heights: &[usize],
dates: &[WNaiveDate],
heights: &[Height],
dates: &[Date],
mut map_and_percentiles: Vec<(&mut BiMap<T>, f32)>,
days: Option<usize>,
) where

View File

@@ -1,29 +1,29 @@
use allocative::Allocative;
use bincode::{Decode, Encode};
use super::{Price, WAmount};
use super::{Amount, Height, Price};
#[derive(Debug, Encode, Decode, Allocative)]
pub struct BlockData {
pub height: u32,
pub height: Height,
pub price: Price,
pub timestamp: u32,
pub amount: WAmount,
pub amount: Amount,
pub utxos: u32,
}
impl BlockData {
pub fn new(height: u32, price: Price, timestamp: u32) -> Self {
pub fn new(height: Height, price: Price, timestamp: u32) -> Self {
Self {
height,
price,
timestamp,
amount: WAmount::ZERO,
amount: Amount::ZERO,
utxos: 0,
}
}
pub fn send(&mut self, amount: WAmount) {
pub fn send(&mut self, amount: Amount) {
self.utxos -= 1;
if self.amount < amount {
@@ -33,7 +33,7 @@ impl BlockData {
self.amount -= amount;
}
pub fn receive(&mut self, amount: WAmount) {
pub fn receive(&mut self, amount: Amount) {
self.utxos += 1;
self.amount += amount;

130
parser/src/structs/date.rs Normal file
View File

@@ -0,0 +1,130 @@
use std::{fmt, str::FromStr};
use allocative::{Allocative, Visitor};
use bincode::{
de::{BorrowDecoder, Decoder},
enc::Encoder,
error::{DecodeError, EncodeError},
BorrowDecode, Decode, Encode,
};
use chrono::{Datelike, Days, NaiveDate, TimeZone, Utc};
use derive_deref::{Deref, DerefMut};
use serde::{Deserialize, Serialize};
use super::{DateMapChunkId, MapKey};
const NUMBER_OF_UNSAFE_DATES: usize = 2;
const MIN_YEAR: i32 = 2009;
const APPROX_MAX_YEAR: i32 = 2100;
#[derive(
Debug,
PartialEq,
Eq,
PartialOrd,
Ord,
Clone,
Copy,
Deref,
DerefMut,
Default,
Serialize,
Deserialize,
)]
pub struct Date(NaiveDate);
impl Date {
pub fn wrap(date: NaiveDate) -> Self {
Self(date)
}
pub fn from_timestamp(timestamp: u32) -> Self {
Self(
Utc.timestamp_opt(i64::from(timestamp), 0)
.unwrap()
.date_naive(),
)
}
}
impl MapKey<DateMapChunkId> for Date {
fn to_chunk_id(&self) -> DateMapChunkId {
DateMapChunkId::new(self)
}
fn to_first_unsafe(&self) -> Option<Self> {
let offset = NUMBER_OF_UNSAFE_DATES - 1;
self.checked_sub_days(Days::new(offset as u64))
.map(Date::wrap)
}
fn to_serialized_key(&self) -> Self {
*self
}
fn is_out_of_bounds(&self) -> bool {
!(MIN_YEAR..=APPROX_MAX_YEAR).contains(&self.year())
}
fn is_first(&self) -> bool {
let day = self.day();
if self.year() == 2009 && self.month() == 1 {
day == 3
} else {
day == 1
}
}
fn checked_sub(&self, days: usize) -> Option<Self> {
self.checked_sub_days(Days::new(days as u64))
.map(Self::wrap)
}
fn min_percentile_key() -> Self {
Self::wrap(NaiveDate::from_ymd_opt(2012, 1, 1).unwrap())
}
fn iter_up_to(&self, other: &Self) -> impl Iterator<Item = Self> {
self.iter_days().take_while(|d| d <= other).map(Date::wrap)
}
fn map_name<'a>() -> &'a str {
"date"
}
}
impl fmt::Display for Date {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.0, f)
}
}
impl Encode for Date {
fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), EncodeError> {
Encode::encode(&self.to_string(), encoder)
}
}
impl Decode for Date {
fn decode<D: Decoder>(decoder: &mut D) -> core::result::Result<Self, DecodeError> {
let str: String = Decode::decode(decoder)?;
Ok(Self(NaiveDate::from_str(&str).unwrap()))
}
}
impl<'de> BorrowDecode<'de> for Date {
fn borrow_decode<D: BorrowDecoder<'de>>(decoder: &mut D) -> Result<Self, DecodeError> {
let str: String = BorrowDecode::borrow_decode(decoder)?;
Ok(Self(NaiveDate::from_str(&str).unwrap()))
}
}
impl Allocative for Date {
fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
visitor.visit_simple_sized::<Self>();
}
}

View File

@@ -1,16 +1,16 @@
use allocative::Allocative;
use bincode::{Decode, Encode};
use super::{BlockData, BlockPath, WNaiveDate};
use super::{BlockData, BlockPath, Date};
#[derive(Debug, Encode, Decode, Allocative)]
pub struct DateData {
pub date: WNaiveDate,
pub date: Date,
pub blocks: Vec<BlockData>,
}
impl DateData {
pub fn new(date: WNaiveDate, blocks: Vec<BlockData>) -> Self {
pub fn new(date: Date, blocks: Vec<BlockData>) -> Self {
Self { date, blocks }
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,33 @@
use allocative::Allocative;
use chrono::Datelike;
use crate::Date;
use super::MapChunkId;
#[derive(Debug, Default, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Allocative)]
pub struct DateMapChunkId(i32);
impl DateMapChunkId {
pub fn new(date: &Date) -> Self {
Self(date.year())
}
}
impl MapChunkId for DateMapChunkId {
fn to_name(&self) -> String {
self.0.to_string()
}
fn from_name(name: &str) -> Self {
Self(name.parse::<i32>().unwrap())
}
fn to_usize(self) -> usize {
self.0 as usize
}
fn from_usize(id: usize) -> Self {
Self(id as i32)
}
}

View File

@@ -1,12 +1,12 @@
use allocative::Allocative;
use sanakirja::{direct_repr, Storable, UnsizedStorable};
use super::{AddressData, AddressType, WAmount};
use super::{AddressData, AddressType, Amount};
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Default, Allocative)]
pub struct EmptyAddressData {
pub address_type: AddressType,
pub transfered: WAmount,
pub transfered: Amount,
}
direct_repr!(EmptyAddressData);

View File

@@ -0,0 +1,860 @@
use std::{
collections::{BTreeMap, VecDeque},
fmt::Debug,
fs,
iter::Sum,
mem,
ops::{Add, ControlFlow, Div, Mul, Sub},
path::{Path, PathBuf},
};
use allocative::Allocative;
use bincode::{Decode, Encode};
use itertools::Itertools;
use ordered_float::{FloatCore, OrderedFloat};
use serde::{de::DeserializeOwned, Serialize};
use crate::{log, utils::LossyFrom, Serialization};
use super::{AnyMap, MapValue};
pub trait MapKey<ChunkId>
where
Self: Sized + PartialOrd + Ord + Clone + Copy + Debug,
ChunkId: MapChunkId,
{
fn to_chunk_id(&self) -> ChunkId;
fn to_first_unsafe(&self) -> Option<Self>;
fn to_serialized_key(&self) -> Self;
fn is_out_of_bounds(&self) -> bool;
fn is_first(&self) -> bool;
fn checked_sub(&self, x: usize) -> Option<Self>;
fn min_percentile_key() -> Self;
fn iter_up_to(&self, other: &Self) -> impl Iterator<Item = Self>;
fn map_name<'a>() -> &'a str;
fn from_usize(_: usize) -> Self {
unreachable!()
}
fn to_usize(&self) -> usize {
unreachable!()
}
}
pub trait MapSerialized<Key, Value, ChunkId>
where
Self: Debug + Serialize + DeserializeOwned + Encode + Decode,
ChunkId: MapChunkId,
{
fn new(version: u32) -> Self;
fn get_last_key(&self, last_chunk_id: &ChunkId) -> Option<Key>;
fn version(&self) -> u32;
fn get(&self, serialized_key: &Key) -> Option<&Value>;
fn last(&self) -> Option<&Value>;
fn extend(&mut self, map: BTreeMap<Key, Value>);
}
pub trait MapChunkId
where
Self: Ord + Debug + Copy + Clone,
{
fn to_name(&self) -> String;
fn from_name(name: &str) -> Self;
fn to_usize(self) -> usize;
fn from_usize(id: usize) -> Self;
}
#[derive(Default, Debug, Allocative)]
pub struct GenericMap<Key, Value, ChunkId, Serialized> {
version: u32,
path_all: String,
path_last: Option<String>,
chunks_in_memory: usize,
serialization: Serialization,
pub initial_last_key: Option<Key>,
pub initial_first_unsafe_key: Option<Key>,
imported: BTreeMap<ChunkId, Serialized>,
to_insert: BTreeMap<ChunkId, BTreeMap<Key, Value>>,
}
impl<Key, Value, ChunkId, Serialized> GenericMap<Key, Value, ChunkId, Serialized>
where
Value: MapValue,
ChunkId: MapChunkId,
Key: MapKey<ChunkId>,
Serialized: MapSerialized<Key, Value, ChunkId>,
{
pub fn new_bin(version: u32, path: &str) -> Self {
Self::new(version, path, Serialization::Binary, 1, true)
}
pub fn _new_bin(version: u32, path: &str, export_last: bool) -> Self {
Self::new(version, path, Serialization::Binary, 1, export_last)
}
pub fn new_json(version: u32, path: &str, export_last: bool) -> Self {
Self::new(version, path, Serialization::Json, usize::MAX, export_last)
}
fn new(
version: u32,
path: &str,
serialization: Serialization,
chunks_in_memory: usize,
export_last: bool,
) -> Self {
if chunks_in_memory < 1 {
panic!("Should always have at least the latest chunk in memory");
}
let path = path.replace(['-', '_', ' '], "/");
let path_all = format!("{path}/{}", Key::map_name());
fs::create_dir_all(&path_all).unwrap();
let path_last = {
if export_last {
Some(serialization.append_extension(&format!("{path}/last")))
} else {
None
}
};
let mut s = Self {
version,
path_all,
path_last,
chunks_in_memory,
serialization,
initial_last_key: None,
initial_first_unsafe_key: None,
to_insert: BTreeMap::default(),
imported: BTreeMap::default(),
};
s.read_dir()
.into_iter()
.rev()
.take(chunks_in_memory)
.for_each(|(chunk_start, path)| {
if let Ok(serialized) = s.import(&path) {
if serialized.version() == s.version {
s.imported.insert(chunk_start, serialized);
} else {
s.read_dir()
.iter()
.for_each(|(_, path)| fs::remove_file(path).unwrap())
}
}
});
s.initial_last_key = s
.imported
.iter()
.last()
.and_then(|(last_chunk_id, serialized)| serialized.get_last_key(last_chunk_id));
s.initial_first_unsafe_key = s
.initial_last_key
.and_then(|last_key| last_key.to_first_unsafe());
if s.initial_first_unsafe_key.is_none() {
log(&format!("New {path}/{}", Key::map_name()));
}
s
}
fn read_dir(&self) -> BTreeMap<ChunkId, PathBuf> {
Self::_read_dir(&self.path_all, &self.serialization)
}
pub fn _read_dir(path: &str, serialization: &Serialization) -> BTreeMap<ChunkId, PathBuf> {
fs::read_dir(path)
.unwrap()
.map(|entry| entry.unwrap().path())
.filter(|path| {
let extension = path.extension().unwrap().to_str().unwrap();
path.is_file() && extension == serialization.to_extension()
})
.map(|path| {
let chunk_id = ChunkId::from_name(path.file_stem().unwrap().to_str().unwrap());
(chunk_id, path)
})
.collect()
}
fn import(&self, path: &Path) -> color_eyre::Result<Serialized> {
self.serialization
.import::<Serialized>(path.to_str().unwrap())
}
pub fn insert(&mut self, key: Key, value: Value) -> Value {
if !self.is_key_safe(key) {
self.to_insert
.entry(key.to_chunk_id())
.or_default()
.insert(key.to_serialized_key(), value);
}
value
}
pub fn insert_default(&mut self, key: Key) -> Value {
self.insert(key, Value::default())
}
#[inline(always)]
pub fn is_key_safe(&self, key: Key) -> bool {
self.initial_first_unsafe_key
.map_or(false, |initial_first_unsafe_key| {
initial_first_unsafe_key > key
})
}
pub fn get(&self, key: &Key) -> Option<Value> {
let chunk_id = key.to_chunk_id();
let serialized_key = key.to_serialized_key();
self.to_insert
.get(&chunk_id)
.and_then(|tree| tree.get(&serialized_key).cloned())
.or_else(|| {
self.imported
.get(&chunk_id)
.and_then(|serialized| serialized.get(&serialized_key))
.cloned()
})
}
pub fn get_or_import(&mut self, key: &Key) -> Option<Value> {
if key.is_out_of_bounds() {
return None;
}
let chunk_id = key.to_chunk_id();
let serialized_key = key.to_serialized_key();
self.to_insert
.get(&chunk_id)
.and_then(|tree| tree.get(&serialized_key).cloned())
.or_else(|| {
#[allow(clippy::map_entry)] // Can't be mut and then use read_dir()
if !self.imported.contains_key(&chunk_id) {
let dir_content = self.read_dir();
if let Some(path) = dir_content.get(&chunk_id) {
let serialized = self.import(path).unwrap();
self.imported.insert(chunk_id, serialized);
}
}
self.imported
.get(&chunk_id)
.and_then(|serialized| serialized.get(&serialized_key))
.cloned()
})
}
}
impl<Key, Value, ChunkId, Serialized> AnyMap for GenericMap<Key, Value, ChunkId, Serialized>
where
Value: MapValue,
ChunkId: MapChunkId,
Key: MapKey<ChunkId>,
Serialized: MapSerialized<Key, Value, ChunkId>,
{
fn path(&self) -> &str {
&self.path_all
}
fn path_last(&self) -> &Option<String> {
&self.path_last
}
fn t_name(&self) -> &str {
std::any::type_name::<Value>()
}
fn pre_export(&mut self) {
self.to_insert.iter_mut().for_each(|(chunk_id, map)| {
if let Some((key, _)) = map.first_key_value() {
if !key.is_first() && !self.imported.contains_key(chunk_id) {
// Had to copy paste many lines from functions as calling a function from self isn't allowed because of the &mut
let dir_content = Self::_read_dir(&self.path_all, &self.serialization);
let path = dir_content.get(chunk_id).unwrap_or_else(|| {
dbg!(&self.path_all, chunk_id, &dir_content);
panic!();
});
let serialized = self
.serialization
.import::<Serialized>(path.to_str().unwrap())
.unwrap();
self.imported.insert(*chunk_id, serialized);
}
}
self.imported
.entry(*chunk_id)
.or_insert(Serialized::new(self.version))
.extend(mem::take(map));
});
}
fn export(&self) -> color_eyre::Result<()> {
let len = self.imported.len();
self.to_insert.iter().enumerate().try_for_each(
|(index, (chunk_id, map))| -> color_eyre::Result<()> {
if !map.is_empty() {
unreachable!()
}
let path = self.serialization.append_extension(&format!(
"{}/{}",
self.path_all,
chunk_id.to_name()
));
let serialized = self.imported.get(chunk_id).unwrap_or_else(|| {
dbg!(&self.path_all, chunk_id, &self.imported);
panic!();
});
self.serialization.export(&path, serialized)?;
if index == len - 1 {
if let Some(path_last) = self.path_last.as_ref() {
self.serialization
.export(path_last, serialized.last().unwrap())?;
}
}
Ok(())
},
)
}
fn post_export(&mut self) {
self.imported
.keys()
.rev()
.enumerate()
.filter(|(index, _)| *index + 1 > self.chunks_in_memory)
.map(|(_, key)| *key)
.collect_vec()
.iter()
.for_each(|key| {
self.imported.remove(key);
});
self.to_insert.clear();
}
}
impl<Key, Value, ChunkId, Serialized> GenericMap<Key, Value, ChunkId, Serialized>
where
Value: MapValue,
ChunkId: MapChunkId,
Key: MapKey<ChunkId>,
Serialized: MapSerialized<Key, Value, ChunkId>,
{
pub fn sum_keys(&self, keys: &[Key]) -> Value
where
Value: Sum,
{
keys.iter().flat_map(|key| self.get(key)).sum::<Value>()
}
pub fn average_keys(&self, keys: &[Key]) -> f32
where
Value: Sum,
f32: LossyFrom<Value>,
{
f32::lossy_from(self.sum_keys(keys)) / keys.len() as f32
}
pub fn multi_insert<F>(&mut self, keys: &[Key], mut callback: F)
where
F: FnMut(&Key) -> Value,
{
keys.iter().for_each(|key| {
self.insert(*key, callback(key));
});
}
pub fn multi_insert_const(&mut self, keys: &[Key], constant: Value) {
keys.iter().for_each(|key| {
self.insert(*key, constant);
});
}
pub fn multi_insert_simple_transform<SourceValue, SourceSerialized, F>(
&mut self,
keys: &[Key],
source: &mut GenericMap<Key, SourceValue, ChunkId, SourceSerialized>,
transform: F,
) where
SourceValue: MapValue,
SourceSerialized: MapSerialized<Key, SourceValue, ChunkId>,
F: Fn(SourceValue) -> Value,
{
keys.iter().for_each(|key| {
self.insert(*key, transform(source.get_or_import(key).unwrap()));
});
}
pub fn multi_insert_complex_transform<SourceValue, SourceSerialized, F>(
&mut self,
keys: &[Key],
source: &mut GenericMap<Key, SourceValue, ChunkId, SourceSerialized>,
mut transform: F,
) where
SourceValue: MapValue,
SourceSerialized: MapSerialized<Key, SourceValue, ChunkId>,
F: FnMut(
(
SourceValue,
&Key,
&mut GenericMap<Key, SourceValue, ChunkId, SourceSerialized>,
),
) -> Value,
{
keys.iter().for_each(|key| {
self.insert(
*key,
transform((source.get_or_import(key).unwrap(), key, source)),
);
});
}
pub fn multi_insert_add<A, B, ASerialized, BSerialized>(
&mut self,
keys: &[Key],
added: &mut GenericMap<Key, A, ChunkId, ASerialized>,
adder: &mut GenericMap<Key, B, ChunkId, BSerialized>,
) where
A: MapValue,
ASerialized: MapSerialized<Key, A, ChunkId>,
B: MapValue,
BSerialized: MapSerialized<Key, B, ChunkId>,
Value: LossyFrom<A> + LossyFrom<B> + Add<Output = Value>,
{
keys.iter().for_each(|key| {
self.insert(
*key,
Value::lossy_from(added.get_or_import(key).unwrap())
+ Value::lossy_from(adder.get_or_import(key).unwrap()),
);
});
}
pub fn multi_insert_subtract<A, B, ASerialized, BSerialized>(
&mut self,
keys: &[Key],
subtracted: &mut GenericMap<Key, A, ChunkId, ASerialized>,
subtracter: &mut GenericMap<Key, B, ChunkId, BSerialized>,
) where
A: MapValue,
ASerialized: MapSerialized<Key, A, ChunkId>,
B: MapValue,
BSerialized: MapSerialized<Key, B, ChunkId>,
Value: LossyFrom<A> + LossyFrom<B> + Sub<Output = Value>,
{
keys.iter().for_each(|key| {
self.insert(
*key,
Value::lossy_from(subtracted.get_or_import(key).unwrap())
- Value::lossy_from(subtracter.get_or_import(key).unwrap()),
);
});
}
pub fn multi_insert_multiply<A, B, ASerialized, BSerialized>(
&mut self,
keys: &[Key],
multiplied: &mut GenericMap<Key, A, ChunkId, ASerialized>,
multiplier: &mut GenericMap<Key, B, ChunkId, BSerialized>,
) where
A: MapValue,
ASerialized: MapSerialized<Key, A, ChunkId>,
B: MapValue,
BSerialized: MapSerialized<Key, B, ChunkId>,
Value: LossyFrom<A> + LossyFrom<B> + Mul<Output = Value>,
{
keys.iter().for_each(|key| {
self.insert(
*key,
Value::lossy_from(multiplied.get_or_import(key).unwrap())
* Value::lossy_from(multiplier.get_or_import(key).unwrap()),
);
});
}
pub fn multi_insert_divide<A, B, ASerialized, BSerialized>(
&mut self,
keys: &[Key],
divided: &mut GenericMap<Key, A, ChunkId, ASerialized>,
divider: &mut GenericMap<Key, B, ChunkId, BSerialized>,
) where
A: MapValue,
ASerialized: MapSerialized<Key, A, ChunkId>,
B: MapValue,
BSerialized: MapSerialized<Key, B, ChunkId>,
Value: LossyFrom<A> + LossyFrom<B> + Div<Output = Value> + Mul<Output = Value> + From<u8>,
{
self._multi_insert_divide(keys, divided, divider, false)
}
pub fn multi_insert_percentage<A, B, ASerialized, BSerialized>(
&mut self,
keys: &[Key],
divided: &mut GenericMap<Key, A, ChunkId, ASerialized>,
divider: &mut GenericMap<Key, B, ChunkId, BSerialized>,
) where
A: MapValue,
ASerialized: MapSerialized<Key, A, ChunkId>,
B: MapValue,
BSerialized: MapSerialized<Key, B, ChunkId>,
Value: LossyFrom<A> + LossyFrom<B> + Div<Output = Value> + Mul<Output = Value> + From<u8>,
{
self._multi_insert_divide(keys, divided, divider, true)
}
fn _multi_insert_divide<A, B, ASerialized, BSerialized>(
&mut self,
keys: &[Key],
divided: &mut GenericMap<Key, A, ChunkId, ASerialized>,
divider: &mut GenericMap<Key, B, ChunkId, BSerialized>,
as_percentage: bool,
) where
A: MapValue,
ASerialized: MapSerialized<Key, A, ChunkId>,
B: MapValue,
BSerialized: MapSerialized<Key, B, ChunkId>,
Value: LossyFrom<A> + LossyFrom<B> + Div<Output = Value> + Mul<Output = Value> + From<u8>,
{
let multiplier = Value::from(if as_percentage { 100 } else { 1 });
keys.iter().for_each(|key| {
self.insert(
*key,
Value::lossy_from(divided.get_or_import(key).unwrap())
/ Value::lossy_from(divider.get_or_import(key).unwrap())
* multiplier,
);
});
}
pub fn multi_insert_cumulative<SourceValue, SourceSerialized>(
&mut self,
keys: &[Key],
source: &mut GenericMap<Key, SourceValue, ChunkId, SourceSerialized>,
) where
SourceValue: MapValue,
SourceSerialized: MapSerialized<Key, SourceValue, ChunkId>,
Value: LossyFrom<SourceValue> + Add<Output = Value> + Sub<Output = Value>,
{
self._multi_insert_last_x_sum(keys, source, None)
}
pub fn multi_insert_last_x_sum<SourceValue, SourceSerialized>(
&mut self,
keys: &[Key],
source: &mut GenericMap<Key, SourceValue, ChunkId, SourceSerialized>,
len: usize,
) where
SourceValue: MapValue,
SourceSerialized: MapSerialized<Key, SourceValue, ChunkId>,
Value: LossyFrom<SourceValue> + Add<Output = Value> + Sub<Output = Value>,
{
self._multi_insert_last_x_sum(keys, source, Some(len))
}
fn _multi_insert_last_x_sum<SourceValue, SourceSerialized>(
&mut self,
keys: &[Key],
source: &mut GenericMap<Key, SourceValue, ChunkId, SourceSerialized>,
len: Option<usize>,
) where
SourceValue: MapValue,
SourceSerialized: MapSerialized<Key, SourceValue, ChunkId>,
Value: LossyFrom<SourceValue> + Add<Output = Value> + Sub<Output = Value>,
{
let mut sum = None;
keys.iter().for_each(|key| {
let to_subtract = len
.and_then(|x| {
key.checked_sub(x)
.and_then(|previous_key| source.get_or_import(&previous_key))
})
.unwrap_or_default();
let previous_sum = sum.unwrap_or_else(|| {
key.checked_sub(1)
.and_then(|previous_sum_key| self.get_or_import(&previous_sum_key))
.unwrap_or_default()
});
let last_value = source.get_or_import(key).unwrap_or_else(|| {
dbg!(&source.to_insert, &source.path(), key);
panic!();
});
sum.replace(
previous_sum + Value::lossy_from(last_value) - Value::lossy_from(to_subtract),
);
self.insert(*key, sum.unwrap());
});
}
pub fn multi_insert_simple_average<SourceValue, SourceSerialized>(
&mut self,
keys: &[Key],
source: &mut GenericMap<Key, SourceValue, ChunkId, SourceSerialized>,
len: usize,
) where
SourceValue: MapValue + Sum,
SourceSerialized: MapSerialized<Key, SourceValue, ChunkId>,
Value: Into<f32> + From<f32>,
f32: LossyFrom<SourceValue>,
{
if len <= 1 {
panic!("Average of 1 or less is not useful");
}
let len = len as f32;
let mut average = None;
keys.iter().for_each(|key| {
let previous_average: f32 = average
.unwrap_or_else(|| {
key.checked_sub(1)
.and_then(|previous_average_key| self.get(&previous_average_key))
.unwrap_or_default()
})
.into();
let mut last_value = f32::lossy_from(source.get_or_import(key).unwrap_or_else(|| {
dbg!(key);
panic!()
}));
if last_value.is_nan() {
last_value = 0.0;
}
average.replace(((previous_average * (len - 1.0) + last_value) / len).into());
self.insert(*key, average.unwrap());
});
}
pub fn multi_insert_net_change(&mut self, keys: &[Key], source: &mut Self, len: usize)
where
Value: Sub<Output = Value>,
{
keys.iter().for_each(|key| {
let previous_value = key
.checked_sub(len)
.and_then(|previous_key| source.get_or_import(&previous_key))
.unwrap_or_default();
let last_value = source.get_or_import(key).unwrap();
let net_change = last_value - previous_value;
self.insert(*key, net_change);
});
}
pub fn multi_insert_percentage_change(&mut self, keys: &[Key], source: &mut Self, len: usize)
where
Value: Sub<Output = Value> + FloatCore,
{
let one = Value::from(1.0).unwrap();
let hundred = Value::from(100.0).unwrap();
keys.iter().for_each(|key| {
let previous_value = key
.checked_sub(len)
.and_then(|previous_key| source.get_or_import(&previous_key))
.unwrap_or_default();
let last_value = source.get_or_import(key).unwrap();
let percentage_change = ((last_value / previous_value) - one) * hundred;
self.insert(*key, percentage_change);
});
}
pub fn multi_insert_median(&mut self, keys: &[Key], source: &mut Self, len: Option<usize>)
where
Value: FloatCore,
{
source.multi_insert_percentile(keys, vec![(self, 0.5)], len);
}
pub fn multi_insert_percentile(
&mut self,
keys: &[Key],
mut map_and_percentiles: Vec<(&mut Self, f32)>,
len: Option<usize>,
) where
Value: FloatCore,
{
if len.map_or(false, |size| size < 3) {
panic!("Computing a percentile for a size lower than 3 is useless");
}
let mut ordered_vec = None;
let mut sorted_vec = None;
let min_percentile_key = Key::min_percentile_key();
let nan = Value::from(f32::NAN).unwrap();
let two = Value::from(2.0).unwrap();
keys.iter().cloned().try_for_each(|key| {
if key < min_percentile_key {
map_and_percentiles.iter_mut().for_each(|(map, _)| {
(*map).insert(key, nan);
});
return ControlFlow::Continue::<()>(());
}
if let Some(start) = len.map_or(Some(min_percentile_key), |size| key.checked_sub(size))
{
if sorted_vec.is_none() {
let mut vec = start
.iter_up_to(&key)
.flat_map(|key| self.get_or_import(&key))
.filter(|f| !f.is_nan())
.map(|f| OrderedFloat(f))
.collect_vec();
if len.is_some() {
ordered_vec.replace(VecDeque::from(vec.clone()));
}
vec.sort_unstable();
sorted_vec.replace(vec);
} else {
let float_value = self.get_or_import(&key).unwrap();
if !float_value.is_nan() {
let float_value = OrderedFloat(float_value);
if let Some(len) = len {
if let Some(ordered_vec) = ordered_vec.as_mut() {
if ordered_vec.len() == len {
let first = ordered_vec.pop_front().unwrap();
let pos =
sorted_vec.as_ref().unwrap().binary_search(&first).unwrap();
sorted_vec.as_mut().unwrap().remove(pos);
}
ordered_vec.push_back(float_value);
}
}
let pos = sorted_vec
.as_ref()
.unwrap()
.binary_search(&float_value)
.unwrap_or_else(|pos| pos);
sorted_vec.as_mut().unwrap().insert(pos, float_value);
}
}
let vec = sorted_vec.as_ref().unwrap();
let len = vec.len();
map_and_percentiles
.iter_mut()
.for_each(|(map, percentile)| {
if !(0.0..=1.0).contains(percentile) {
panic!("The percentile should be between 0.0 and 1.0");
}
let value = {
if len < 2 {
nan
} else {
let index = (len - 1) as f32 * *percentile;
let fract = index.fract();
if fract != 0.0 {
(vec.get(index.ceil() as usize)
.unwrap_or_else(|| {
dbg!(vec, index, &self.path_all, &self.path_all, len);
panic!()
})
.0
+ vec
.get(index as usize)
.unwrap_or_else(|| {
dbg!(
vec,
index,
&self.path_all,
&self.path_all,
len
);
panic!()
})
.0)
/ two
} else {
vec.get(index as usize)
.unwrap_or_else(|| {
dbg!(vec, index);
panic!();
})
.0
}
}
};
(*map).insert(key, value);
});
} else {
map_and_percentiles.iter_mut().for_each(|(map, _)| {
(*map).insert(key, nan);
});
}
ControlFlow::Continue(())
});
}
}

View File

@@ -0,0 +1,154 @@
use std::{
fmt,
ops::{Add, AddAssign, Sub},
};
use allocative::Allocative;
use bincode::{Decode, Encode};
use derive_deref::{Deref, DerefMut};
use serde::{Deserialize, Serialize};
use crate::{bitcoin::NUMBER_OF_UNSAFE_BLOCKS, HEIGHT_MAP_CHUNK_SIZE};
use super::{HeightMapChunkId, MapKey};
#[derive(
Debug,
PartialEq,
Eq,
PartialOrd,
Ord,
Clone,
Copy,
Deref,
DerefMut,
Default,
Serialize,
Deserialize,
Encode,
Decode,
Allocative,
)]
pub struct Height(u32);
impl Height {
pub const ZERO: Height = Height(0);
pub fn new(height: u32) -> Self {
Self(height)
}
pub fn is_close_to_end(&self, block_count: usize) -> bool {
**self > (block_count - (NUMBER_OF_UNSAFE_BLOCKS * 3)) as u32
}
pub fn is_safe(&self, block_count: usize) -> bool {
**self < (block_count - NUMBER_OF_UNSAFE_BLOCKS) as u32
}
}
impl PartialEq<u64> for Height {
fn eq(&self, other: &u64) -> bool {
**self == *other as u32
}
}
impl Add<u32> for Height {
type Output = Height;
fn add(self, rhs: u32) -> Self::Output {
Self::new(*self + rhs)
}
}
impl Add<usize> for Height {
type Output = Height;
fn add(self, rhs: usize) -> Self::Output {
Self::new(*self + rhs as u32)
}
}
impl Sub<Height> for Height {
type Output = Height;
fn sub(self, rhs: Height) -> Self::Output {
Self::new(*self - *rhs)
}
}
impl Sub<u32> for Height {
type Output = Height;
fn sub(self, rhs: u32) -> Self::Output {
Self::new(*self - rhs)
}
}
impl Sub<usize> for Height {
type Output = Height;
fn sub(self, rhs: usize) -> Self::Output {
Self::new(*self - rhs as u32)
}
}
impl AddAssign<usize> for Height {
fn add_assign(&mut self, rhs: usize) {
*self = self.add(rhs);
}
}
impl fmt::Display for Height {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", **self)
}
}
impl MapKey<HeightMapChunkId> for Height {
fn to_chunk_id(&self) -> HeightMapChunkId {
HeightMapChunkId::new(self)
}
fn to_first_unsafe(&self) -> Option<Self> {
let offset = NUMBER_OF_UNSAFE_BLOCKS - 1;
self.checked_sub(offset)
}
fn to_serialized_key(&self) -> Self {
Height::new(**self % HEIGHT_MAP_CHUNK_SIZE)
}
fn is_out_of_bounds(&self) -> bool {
!(0..=2_100_000).contains(&**self)
}
fn is_first(&self) -> bool {
**self == 0
}
fn checked_sub(&self, x: usize) -> Option<Self> {
(**self).checked_sub(x as u32).map(Height::new)
}
fn min_percentile_key() -> Self {
Self(160_000)
}
fn iter_up_to(&self, other: &Self) -> impl Iterator<Item = Self> {
(**self..=**other).map(Height::new)
}
fn map_name<'a>() -> &'a str {
"height"
}
fn to_usize(&self) -> usize {
(**self) as usize
}
fn from_usize(h: usize) -> Self {
Self(h as u32)
}
}

View File

@@ -0,0 +1,986 @@
use std::{
cmp::Ordering,
collections::{BTreeMap, VecDeque},
fmt::Debug,
fs,
iter::Sum,
mem,
ops::{Add, ControlFlow, Div, Mul, RangeInclusive, Sub},
path::{Path, PathBuf},
};
use allocative::Allocative;
use bincode::{Decode, Encode};
use itertools::Itertools;
use ordered_float::{FloatCore, OrderedFloat};
use serde::{Deserialize, Serialize};
use crate::{
bitcoin::NUMBER_OF_UNSAFE_BLOCKS,
io::{format_path, Serialization},
utils::{log, LossyFrom},
};
use super::{AnyMap, MapValue};
pub const HEIGHT_MAP_CHUNK_SIZE: usize = 10_000;
#[derive(Debug, Serialize, Deserialize, Encode, Decode, Allocative)]
pub struct SerializedHeightMap<T> {
version: u32,
map: Vec<T>,
}
#[derive(Default, Allocative)]
pub struct HeightMap<T>
where
T: MapValue,
{
version: u32,
path_all: String,
path_last: Option<String>,
chunks_in_memory: usize,
serialization: Serialization,
initial_last_height: Option<usize>,
initial_first_unsafe_height: Option<usize>,
imported: BTreeMap<usize, SerializedHeightMap<T>>,
to_insert: BTreeMap<usize, BTreeMap<usize, T>>,
}
impl<T> HeightMap<T>
where
T: MapValue,
{
pub fn new_bin(version: u32, path: &str) -> Self {
Self::new(version, path, Serialization::Binary, 1, true)
}
pub fn _new_bin(version: u32, path: &str, export_last: bool) -> Self {
Self::new(version, path, Serialization::Binary, 1, export_last)
}
pub fn new_json(version: u32, path: &str, export_last: bool) -> Self {
Self::new(version, path, Serialization::Json, usize::MAX, export_last)
}
fn new(
version: u32,
path: &str,
serialization: Serialization,
chunks_in_memory: usize,
export_last: bool,
) -> Self {
if chunks_in_memory < 1 {
panic!("Should always have at least the latest chunk in memory");
}
let path = format_path(path);
let path_all = format!("{path}/height");
fs::create_dir_all(&path_all).unwrap();
let path_last = {
if export_last {
Some(serialization.append_extension(&format!("{path}/last")))
} else {
None
}
};
let mut s = Self {
version,
path_all,
path_last,
chunks_in_memory,
serialization,
initial_first_unsafe_height: None,
initial_last_height: None,
to_insert: BTreeMap::default(),
imported: BTreeMap::default(),
};
s.read_dir()
.into_iter()
.rev()
.take(chunks_in_memory)
.for_each(|(chunk_start, path)| {
if let Ok(serialized) = s.import(&path) {
if serialized.version == s.version {
s.imported.insert(chunk_start, serialized);
} else {
s.read_dir()
.iter()
.for_each(|(_, path)| fs::remove_file(path).unwrap())
}
}
});
s.initial_last_height = s
.imported
.iter()
.last()
.map(|(chunk_start, serialized)| chunk_start + serialized.map.len());
s.initial_first_unsafe_height = s.initial_last_height.and_then(|last_height| {
let offset = NUMBER_OF_UNSAFE_BLOCKS - 1;
last_height.checked_sub(offset)
});
if s.initial_first_unsafe_height.is_none() {
log(&format!("New {path}"));
}
s
}
fn height_to_chunk_name(height: Height) -> String {
let start = Self::height_to_chunk_start(height);
let end = start + HEIGHT_MAP_CHUNK_SIZE;
format!("{start}..{end}")
}
fn height_to_chunk_start(height: Height) -> usize {
height / HEIGHT_MAP_CHUNK_SIZE * HEIGHT_MAP_CHUNK_SIZE
}
pub fn insert(&mut self, height: Height, value: T) -> T {
if !self.is_height_safe(height) {
self.to_insert
.entry(Self::height_to_chunk_start(height))
.or_default()
.insert(height % HEIGHT_MAP_CHUNK_SIZE, value);
}
value
}
pub fn insert_default(&mut self, height: Height) -> T {
self.insert(height, T::default())
}
pub fn get(&self, height: &usize) -> Option<T> {
let chunk_start = Self::height_to_chunk_start(*height);
self.to_insert
.get(&chunk_start)
.and_then(|map| map.get(&(height - chunk_start)).cloned())
.or_else(|| {
self.imported
.get(&chunk_start)
.and_then(|serialized| serialized.map.get(height - chunk_start))
.cloned()
})
}
pub fn get_or_import(&mut self, height: &usize) -> T {
let chunk_start = Self::height_to_chunk_start(*height);
self.to_insert
.get(&chunk_start)
.and_then(|map| map.get(&(height - chunk_start)).cloned())
.or_else(|| {
#[allow(clippy::map_entry)] // Can't be mut and then use read_dir()
if !self.imported.contains_key(&chunk_start) {
let dir_content = self.read_dir();
let path = dir_content.get(&chunk_start).unwrap_or_else(|| {
dbg!(self.path(), chunk_start, &dir_content);
panic!();
});
let serialized = self.import(path).unwrap();
self.imported.insert(chunk_start, serialized);
}
self.imported
.get(&chunk_start)
.and_then(|serialized| serialized.map.get(height - chunk_start))
.cloned()
})
.unwrap_or_else(|| {
dbg!(height, self.path());
panic!();
})
}
#[inline(always)]
pub fn is_height_safe(&self, height: Height) -> bool {
self.initial_first_unsafe_height.unwrap_or(0) > height
}
fn read_dir(&self) -> BTreeMap<usize, PathBuf> {
Self::_read_dir(&self.path_all, &self.serialization)
}
pub fn _read_dir(path: &str, serialization: &Serialization) -> BTreeMap<usize, PathBuf> {
fs::read_dir(path)
.unwrap()
.map(|entry| entry.unwrap().path())
.filter(|path| {
let extension = path.extension().unwrap().to_str().unwrap();
path.is_file() && extension == serialization.to_extension()
})
.map(|path| {
(
path.file_stem()
.unwrap()
.to_str()
.unwrap()
.split("..")
.next()
.unwrap()
.parse::<usize>()
.unwrap(),
path,
)
})
.collect()
}
fn import(&self, path: &Path) -> color_eyre::Result<SerializedHeightMap<T>> {
self.serialization
.import::<SerializedHeightMap<T>>(path.to_str().unwrap())
}
}
impl<T> AnyMap for HeightMap<T>
where
T: MapValue,
{
fn path(&self) -> &str {
&self.path_all
}
fn path_last(&self) -> &Option<String> {
&self.path_last
}
fn t_name(&self) -> &str {
std::any::type_name::<T>()
}
fn pre_export(&mut self) {
let to_insert = &mut self.to_insert;
to_insert.iter_mut().for_each(|(chunk_start, map)| {
if let Some((key, _)) = map.first_key_value() {
if *key > 0 && !self.imported.contains_key(chunk_start) {
// Had to copy paste many lines from functions as calling a function from self isn't allowed because of the &mut
let dir_content = Self::_read_dir(&self.path_all, &self.serialization);
let path = dir_content.get(chunk_start).unwrap_or_else(|| {
dbg!(&self.path_all, chunk_start, &dir_content);
panic!();
});
let serialized = self
.serialization
.import::<SerializedHeightMap<T>>(path.to_str().unwrap())
.unwrap();
self.imported.insert(*chunk_start, serialized);
}
}
let serialized = self
.imported
.entry(*chunk_start)
.or_insert(SerializedHeightMap {
version: self.version,
map: vec![],
});
mem::take(map)
.into_iter()
.for_each(
|(chunk_height, value)| match serialized.map.len().cmp(&chunk_height) {
Ordering::Greater => serialized.map[chunk_height] = value,
Ordering::Equal => serialized.map.push(value),
Ordering::Less => {
dbg!(&self.path_all, &serialized.map, chunk_height, value);
panic!()
}
},
);
});
}
fn export(&self) -> color_eyre::Result<()> {
let len = self.imported.len();
self.to_insert.iter().enumerate().try_for_each(
|(index, (chunk_start, map))| -> color_eyre::Result<()> {
if !map.is_empty() {
unreachable!()
}
let chunk_name = Self::height_to_chunk_name(*chunk_start);
let path = self
.serialization
.append_extension(&format!("{}/{}", self.path_all, chunk_name));
let serialized = self.imported.get(chunk_start).unwrap_or_else(|| {
dbg!(&self.path_all, chunk_start, &self.imported);
panic!();
});
self.serialization.export(&path, serialized)?;
if index == len - 1 {
if let Some(path_last) = self.path_last.as_ref() {
self.serialization
.export(path_last, serialized.map.last().unwrap())?;
}
}
Ok(())
},
)
}
fn post_export(&mut self) {
self.imported
.keys()
.rev()
.enumerate()
.filter(|(index, _)| *index + 1 > self.chunks_in_memory)
.map(|(_, key)| *key)
.collect_vec()
.iter()
.for_each(|key| {
self.imported.remove(key);
});
self.to_insert.clear();
}
}
pub trait AnyHeightMap: AnyMap {
fn get_initial_first_unsafe_height(&self) -> Option<usize>;
fn get_initial_last_height(&self) -> Option<usize>;
fn as_any_map(&self) -> &(dyn AnyMap + Send + Sync);
fn as_any_mut_map(&mut self) -> &mut dyn AnyMap;
}
impl<T> AnyHeightMap for HeightMap<T>
where
T: MapValue,
{
#[inline(always)]
fn get_initial_first_unsafe_height(&self) -> Option<usize> {
self.initial_first_unsafe_height
}
#[inline(always)]
fn get_initial_last_height(&self) -> Option<usize> {
self.initial_last_height
}
fn as_any_map(&self) -> &(dyn AnyMap + Send + Sync) {
self
}
fn as_any_mut_map(&mut self) -> &mut dyn AnyMap {
self
}
}
impl<T> HeightMap<T>
where
T: MapValue,
{
pub fn sum_range(&self, range: &RangeInclusive<usize>) -> T
where
T: Sum,
{
range
.to_owned()
.flat_map(|height| self.get(&height))
.sum::<T>()
}
pub fn multi_insert_const(&mut self, heights: &[Height], constant: T) {
heights.iter().for_each(|height| {
let height = *height;
self.insert(height, constant);
});
}
pub fn multi_insert_simple_transform<K, F>(
&mut self,
heights: &[Height],
source: &mut HeightMap<K>,
transform: F,
) where
K: MapValue,
F: Fn(K) -> T,
{
heights.iter().for_each(|height| {
self.insert(*height, transform(source.get_or_import(height)));
});
}
pub fn multi_insert_complex_transform<K, F>(
&mut self,
heights: &[Height],
source: &mut HeightMap<K>,
mut transform: F,
) where
K: MapValue,
F: FnMut((K, &usize)) -> T,
{
heights.iter().for_each(|height| {
self.insert(*height, transform((source.get_or_import(height), height)));
});
}
pub fn multi_insert_add<A, B>(
&mut self,
heights: &[Height],
added: &mut HeightMap<A>,
adder: &mut HeightMap<B>,
) where
A: MapValue,
B: MapValue,
T: LossyFrom<A> + LossyFrom<B>,
T: Add<Output = T>,
{
heights.iter().for_each(|height| {
self.insert(
*height,
T::lossy_from(added.get_or_import(height))
+ T::lossy_from(adder.get_or_import(height)),
);
});
}
pub fn multi_insert_subtract<A, B>(
&mut self,
heights: &[Height],
subtracted: &mut HeightMap<A>,
subtracter: &mut HeightMap<B>,
) where
A: MapValue,
B: MapValue,
T: LossyFrom<A> + LossyFrom<B>,
T: Sub<Output = T>,
{
heights.iter().for_each(|height| {
self.insert(
*height,
T::lossy_from(subtracted.get_or_import(height))
- T::lossy_from(subtracter.get_or_import(height)),
);
});
}
pub fn multi_insert_multiply<A, B>(
&mut self,
heights: &[Height],
multiplied: &mut HeightMap<A>,
multiplier: &mut HeightMap<B>,
) where
A: MapValue,
B: MapValue,
T: LossyFrom<A> + LossyFrom<B>,
T: Mul<Output = T>,
{
heights.iter().for_each(|height| {
self.insert(
*height,
T::lossy_from(multiplied.get_or_import(height))
* T::lossy_from(multiplier.get_or_import(height)),
);
});
}
pub fn multi_insert_divide<A, B>(
&mut self,
heights: &[Height],
divided: &mut HeightMap<A>,
divider: &mut HeightMap<B>,
) where
A: MapValue,
B: MapValue,
T: LossyFrom<A> + LossyFrom<B>,
T: Div<Output = T> + Mul<Output = T> + From<u8>,
{
self._multi_insert_divide(heights, divided, divider, false)
}
pub fn multi_insert_percentage<A, B>(
&mut self,
heights: &[Height],
divided: &mut HeightMap<A>,
divider: &mut HeightMap<B>,
) where
A: MapValue,
B: MapValue,
T: LossyFrom<A> + LossyFrom<B>,
T: Div<Output = T> + Mul<Output = T> + From<u8>,
{
self._multi_insert_divide(heights, divided, divider, true)
}
pub fn _multi_insert_divide<A, B>(
&mut self,
heights: &[Height],
divided: &mut HeightMap<A>,
divider: &mut HeightMap<B>,
as_percentage: bool,
) where
A: MapValue,
B: MapValue,
T: LossyFrom<A> + LossyFrom<B>,
T: Div<Output = T> + Mul<Output = T> + From<u8>,
{
let multiplier = T::from(if as_percentage { 100 } else { 1 });
heights.iter().for_each(|height| {
self.insert(
*height,
T::lossy_from(divided.get_or_import(height))
/ T::lossy_from(divider.get_or_import(height))
* multiplier,
);
});
}
pub fn multi_insert_cumulative<K>(&mut self, heights: &[Height], source: &mut HeightMap<K>)
where
K: MapValue,
T: LossyFrom<K>,
T: Add<Output = T> + Sub<Output = T>,
{
self._multi_insert_last_x_sum(heights, source, None)
}
pub fn multi_insert_last_x_sum<K>(
&mut self,
heights: &[Height],
source: &mut HeightMap<K>,
block_time: usize,
) where
K: MapValue,
T: LossyFrom<K>,
T: Add<Output = T> + Sub<Output = T>,
{
self._multi_insert_last_x_sum(heights, source, Some(block_time))
}
fn _multi_insert_last_x_sum<K>(
&mut self,
heights: &[Height],
source: &mut HeightMap<K>,
block_time: Option<usize>,
) where
K: MapValue,
T: LossyFrom<K>,
T: Add<Output = T> + Sub<Output = T>,
{
let mut sum = None;
heights.iter().for_each(|height| {
let to_subtract = block_time
.and_then(|x| {
(height + 1)
.checked_sub(x)
.map(|previous_height| source.get_or_import(&previous_height))
})
.unwrap_or_default();
let previous_sum = sum.unwrap_or_else(|| {
height
.checked_sub(1)
.map(|previous_sum_height| self.get_or_import(&previous_sum_height))
.unwrap_or_default()
});
let last_value = source.get_or_import(height);
sum.replace(previous_sum + T::lossy_from(last_value) - T::lossy_from(to_subtract));
self.insert(*height, sum.unwrap());
});
}
pub fn multi_insert_simple_average<K>(
&mut self,
heights: &[Height],
source: &mut HeightMap<K>,
block_time: usize,
) where
T: Into<f32> + From<f32>,
K: MapValue + Sum,
f32: LossyFrom<K>,
{
if block_time <= 1 {
panic!("Average of 1 or less is not useful");
}
let mut average = None;
heights.iter().for_each(|height| {
let height = *height;
let previous_average: f32 = average
.unwrap_or_else(|| {
height
.checked_sub(block_time)
.and_then(|previous_average_height| self.get(&previous_average_height))
.unwrap_or_default()
})
.into();
let mut last_value = f32::lossy_from(source.get_or_import(&height));
if last_value.is_nan() {
last_value = 0.0;
}
average.replace(
((previous_average * (block_time as f32 - 1.0) + last_value) / block_time as f32)
.into(),
);
self.insert(height, average.unwrap());
});
}
pub fn multi_insert_net_change(
&mut self,
heights: &[Height],
source: &mut HeightMap<T>,
block_time: usize,
) where
T: Sub<Output = T>,
{
heights.iter().for_each(|height| {
let height = *height;
let previous_value = height
.checked_sub(block_time)
.map(|height| source.get_or_import(&height))
.unwrap_or_default();
let last_value = source.get_or_import(&height);
let net = last_value - previous_value;
self.insert(height, net);
});
}
pub fn multi_insert_median(
&mut self,
heights: &[Height],
source: &mut HeightMap<T>,
block_time: Option<usize>,
) where
T: FloatCore,
{
source.multi_insert_percentile(heights, vec![(self, 0.5)], block_time);
}
pub fn multi_insert_percentile(
&mut self,
heights: &[Height],
mut map_and_percentiles: Vec<(&mut HeightMap<T>, f32)>,
block_time: Option<usize>,
) where
T: FloatCore,
{
if block_time.map_or(false, |size| size < 3) {
panic!("Computing a percentile for a size lower than 3 is useless");
}
let mut ordered_vec = None;
let mut sorted_vec = None;
let min_percentile_height = 160_000;
let nan = T::from(f32::NAN).unwrap();
let two = T::from(2.0).unwrap();
if min_percentile_height % HEIGHT_MAP_CHUNK_SIZE != 0 {
panic!("Should be 0");
}
heights.iter().cloned().try_for_each(|height| {
if height < min_percentile_height {
map_and_percentiles.iter_mut().for_each(|(map, _)| {
(*map).insert(height, nan);
});
return ControlFlow::Continue::<()>(());
}
if let Some(start) =
block_time.map_or(Some(min_percentile_height), |size| height.checked_sub(size))
{
if sorted_vec.is_none() {
let mut vec = (start..=height)
.map(|height| self.get_or_import(&height))
.filter(|f| !f.is_nan())
.map(|f| OrderedFloat(f))
.collect_vec();
if block_time.is_some() {
ordered_vec.replace(VecDeque::from(vec.clone()));
}
vec.sort_unstable();
sorted_vec.replace(vec);
} else {
let float_value = self.get_or_import(&height);
if !float_value.is_nan() {
let float_value = OrderedFloat(float_value);
if block_time.is_some() {
let first = ordered_vec.as_mut().unwrap().pop_front().unwrap();
let pos = sorted_vec.as_ref().unwrap().binary_search(&first).unwrap();
sorted_vec.as_mut().unwrap().remove(pos);
ordered_vec.as_mut().unwrap().push_back(float_value);
}
let pos = sorted_vec
.as_ref()
.unwrap()
.binary_search(&float_value)
.unwrap_or_else(|pos| pos);
sorted_vec.as_mut().unwrap().insert(pos, float_value);
}
}
let vec = sorted_vec.as_ref().unwrap();
let len = vec.len();
map_and_percentiles
.iter_mut()
.for_each(|(map, percentile)| {
if !(0.0..=1.0).contains(percentile) {
panic!("The percentile should be between 0.0 and 1.0");
}
let value = {
if len < 2 {
nan
} else {
let index = (len - 1) as f32 * *percentile;
let fract = index.fract();
if fract != 0.0 {
(vec.get(index.ceil() as usize)
.unwrap_or_else(|| {
dbg!(
index,
&self.path_all,
&self.path_all,
&self.to_insert,
block_time,
vec
);
panic!()
})
.0
+ vec
.get(index.floor() as usize)
.unwrap_or_else(|| {
dbg!(
index,
&self.path_all,
&self.path_all,
block_time
);
panic!()
})
.0)
/ two
} else {
vec.get(index as usize).unwrap().0
}
}
};
(*map).insert(height, value);
});
} else {
map_and_percentiles.iter_mut().for_each(|(map, _)| {
(*map).insert(height, nan);
});
}
ControlFlow::Continue(())
});
}
// pub fn insert_cumulative(&mut self, height: Height, source: &HeightMap<T>) -> T
// where
// T: Add<Output = T> + Sub<Output = T>,
// {
// let previous_cum = height
// .checked_sub(1)
// .map(|previous_sum_height| {
// self.get(&previous_sum_height).unwrap_or_else(|| {
// dbg!(previous_sum_height);
// panic!()
// })
// })
// .unwrap_or_default();
// let last_value = source.get(&height).unwrap();
// let cum_value = previous_cum + last_value;
// self.insert(height, cum_value);
// cum_value
// }
// pub fn insert_last_x_sum(&mut self, height: Height, source: &HeightMap<T>, x: usize) -> T
// where
// T: Add<Output = T> + Sub<Output = T>,
// {
// let to_subtract = (height + 1)
// .checked_sub(x)
// .map(|previous_height| {
// source.get(&previous_height).unwrap_or_else(|| {
// dbg!(&self.path_all, &source.path_all, previous_height);
// panic!()
// })
// })
// .unwrap_or_default();
// let previous_sum = height
// .checked_sub(1)
// .map(|previous_sum_height| self.get(&previous_sum_height).unwrap())
// .unwrap_or_default();
// let last_value = source.get(&height).unwrap();
// let sum = previous_sum + last_value - to_subtract;
// self.insert(height, sum);
// sum
// }
// pub fn insert_simple_average(&mut self, height: Height, source: &HeightMap<T>, block_time: usize)
// where
// T: Into<f32> + From<f32>,
// {
// let to_subtract: f32 = (height + 1)
// .checked_sub(block_time)
// .map(|previous_height| source.get(&previous_height).unwrap())
// .unwrap_or_default()
// .into();
// let previous_average: f32 = height
// .checked_sub(1)
// .map(|previous_average_height| self.get(&previous_average_height).unwrap())
// .unwrap_or_default()
// .into();
// let last_value: f32 = source.get(&height).unwrap().into();
// let sum = previous_average * block_time as f32 - to_subtract + last_value;
// let average: T = (sum / block_time as f32).into();
// self.insert(height, average);
// }
// pub fn insert_net_change(&mut self, height: Height, source: &HeightMap<T>, offset: usize) -> T
// where
// T: Sub<Output = T>,
// {
// let previous_value = height
// .checked_sub(offset)
// .map(|height| {
// source.get(&height).unwrap_or_else(|| {
// dbg!(&self.path_all, &source.path_all, offset);
// panic!();
// })
// })
// .unwrap_or_default();
// let last_value = source.get(&height).unwrap();
// let net = last_value - previous_value;
// self.insert(height, net);
// net
// }
// pub fn insert_median(&mut self, height: Height, source: &HeightMap<T>, size: usize) -> T
// where
// T: FloatCore,
// {
// if size < 3 {
// panic!("Computing a median for a size lower than 3 is useless");
// }
// let median = {
// if let Some(start) = height.checked_sub(size - 1) {
// let even = size % 2 == 0;
// let median_index = size / 2;
// let mut vec = (start..=height)
// .map(|height| {
// OrderedFloat(source.get(&height).unwrap_or_else(|| {
// dbg!(height, &source.path_all, size);
// panic!()
// }))
// })
// .collect_vec();
// vec.sort_unstable();
// if even {
// (vec.get(median_index)
// .unwrap_or_else(|| {
// dbg!(median_index, &self.path_all, &source.path_all, size);
// panic!()
// })
// .0
// + vec.get(median_index - 1).unwrap().0)
// / T::from(2.0).unwrap()
// } else {
// vec.get(median_index).unwrap().0
// }
// } else {
// T::default()
// }
// };
// self.insert(height, median);
// median
// }
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,42 @@
use allocative::Allocative;
use derive_deref::{Deref, DerefMut};
use crate::HEIGHT_MAP_CHUNK_SIZE;
use super::{Height, MapChunkId};
#[derive(
Debug, Default, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Allocative, Deref, DerefMut,
)]
pub struct HeightMapChunkId(Height);
impl HeightMapChunkId {
pub fn new(height: &Height) -> Self {
Self(Height::new(
**height / HEIGHT_MAP_CHUNK_SIZE * HEIGHT_MAP_CHUNK_SIZE,
))
}
}
impl MapChunkId for HeightMapChunkId {
fn to_name(&self) -> String {
let start = ***self;
let end = start + HEIGHT_MAP_CHUNK_SIZE;
format!("{start}..{end}")
}
fn from_name(name: &str) -> Self {
Self(Height::new(
name.split("..").next().unwrap().parse::<u32>().unwrap(),
))
}
fn to_usize(self) -> usize {
**self as usize
}
fn from_usize(id: usize) -> Self {
Self(Height::new(id as u32))
}
}

View File

@@ -5,7 +5,7 @@ use std::{
use allocative::Allocative;
use super::WAmount;
use super::Amount;
#[derive(Debug)]
pub struct LiquidityClassification {
@@ -18,8 +18,8 @@ impl LiquidityClassification {
/// Following this:
/// https://insights.glassnode.com/bitcoin-liquid-supply/
/// https://www.desmos.com/calculator/dutgni5rtj
pub fn new(sent: WAmount, received: WAmount) -> Self {
if received == WAmount::ZERO {
pub fn new(sent: Amount, received: Amount) -> Self {
if received == Amount::ZERO {
dbg!(sent, received);
panic!()
}
@@ -29,7 +29,7 @@ impl LiquidityClassification {
panic!("Shouldn't be possible");
}
if sent == WAmount::ZERO {
if sent == Amount::ZERO {
0.0
} else {
let liquidity = sent.to_sat() as f64 / received.to_sat() as f64;

View File

@@ -1,14 +1,25 @@
use std::fmt::Debug;
use allocative::Allocative;
use bincode::{Decode, Encode};
use serde::{de::DeserializeOwned, Serialize};
use crate::datasets::OHLC;
use super::WNaiveDate;
use super::{Date, Height};
pub trait MapValue:
Clone + Copy + Default + Debug + Serialize + DeserializeOwned + Encode + Decode + Sync + Send
Clone
+ Copy
+ Default
+ Debug
+ Serialize
+ DeserializeOwned
+ Encode
+ Decode
+ Sync
+ Send
+ Allocative
{
}
@@ -18,5 +29,6 @@ impl MapValue for u64 {}
impl MapValue for usize {}
impl MapValue for f32 {}
impl MapValue for f64 {}
impl MapValue for WNaiveDate {}
impl MapValue for Date {}
impl MapValue for OHLC {}
impl MapValue for Height {}

View File

@@ -4,25 +4,31 @@ mod address_realized_data;
mod address_size;
mod address_split;
mod address_type;
mod amount;
mod any_map;
mod bi_map;
mod block_data;
mod block_path;
mod config;
mod counter;
mod date;
mod date_data;
mod date_map;
mod date_map_chunk_id;
mod empty_address_data;
mod generic_map;
mod height;
mod height_map;
mod height_map_chunk_id;
mod liquidity;
mod map_value;
mod partial_txout_data;
mod price;
mod sent_data;
mod serialized_btreemap;
mod serialized_vec;
mod tx_data;
mod txout_index;
mod wamount;
mod wnaivedate;
pub use address::*;
pub use address_data::*;
@@ -30,22 +36,28 @@ pub use address_realized_data::*;
pub use address_size::*;
pub use address_split::*;
pub use address_type::*;
pub use amount::*;
pub use any_map::*;
pub use bi_map::*;
pub use block_data::*;
pub use block_path::*;
pub use config::*;
pub use counter::*;
pub use date::*;
pub use date_data::*;
pub use date_map::*;
pub use date_map_chunk_id::*;
pub use empty_address_data::*;
pub use generic_map::*;
pub use height::*;
pub use height_map::*;
pub use height_map_chunk_id::*;
pub use liquidity::*;
pub use map_value::*;
pub use partial_txout_data::*;
pub use price::*;
pub use sent_data::*;
pub use serialized_btreemap::*;
pub use serialized_vec::*;
pub use tx_data::*;
pub use txout_index::*;
pub use wamount::*;
pub use wnaivedate::*;

View File

@@ -1,14 +1,14 @@
use super::{Address, WAmount};
use super::{Address, Amount};
#[derive(Debug)]
pub struct PartialTxoutData {
pub amount: WAmount,
pub amount: Amount,
pub address: Option<Address>,
pub address_index_opt: Option<u32>,
}
impl PartialTxoutData {
pub fn new(address: Option<Address>, amount: WAmount, address_index_opt: Option<u32>) -> Self {
pub fn new(address: Option<Address>, amount: Amount, address_index_opt: Option<u32>) -> Self {
Self {
address,
amount,

View File

@@ -3,7 +3,7 @@ use std::ops::{Add, AddAssign, Div, Mul, Sub, SubAssign};
use allocative::Allocative;
use bincode::{Decode, Encode};
use super::WAmount;
use super::Amount;
#[derive(
Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Encode, Decode, Allocative,
@@ -76,18 +76,18 @@ impl SubAssign for Price {
}
}
impl Mul<WAmount> for Price {
impl Mul<Amount> for Price {
type Output = Self;
fn mul(self, rhs: WAmount) -> Self::Output {
Self((self.to_cent() as f64 * rhs.to_sat() as f64 / WAmount::ONE_BTC_F64).round() as u64)
fn mul(self, rhs: Amount) -> Self::Output {
Self((self.to_cent() as f64 * rhs.to_sat() as f64 / Amount::ONE_BTC_F64).round() as u64)
}
}
impl Div<WAmount> for Price {
impl Div<Amount> for Price {
type Output = Self;
fn div(self, rhs: WAmount) -> Self::Output {
Self((self.to_cent() as f64 * WAmount::ONE_BTC_F64 / rhs.to_sat() as f64).round() as u64)
fn div(self, rhs: Amount) -> Self::Output {
Self((self.to_cent() as f64 * Amount::ONE_BTC_F64 / rhs.to_sat() as f64).round() as u64)
}
}

View File

@@ -1,13 +1,13 @@
use super::WAmount;
use super::Amount;
#[derive(Default, Debug)]
pub struct SentData {
pub volume: WAmount,
pub volume: Amount,
pub count: u32,
}
impl SentData {
pub fn send(&mut self, amount: WAmount) {
pub fn send(&mut self, amount: Amount) {
self.volume += amount;
self.count += 1;
}

View File

@@ -0,0 +1,51 @@
use std::{collections::BTreeMap, fmt::Debug};
use allocative::Allocative;
use bincode::{Decode, Encode};
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use super::{MapChunkId, MapKey, MapSerialized, MapValue};
#[derive(Debug, Default, Serialize, Deserialize, Encode, Decode, Allocative)]
pub struct SerializedBTreeMap<Key, Value>
where
Key: Ord,
{
version: u32,
map: BTreeMap<Key, Value>,
}
impl<Key, Value, ChunkId> MapSerialized<Key, Value, ChunkId> for SerializedBTreeMap<Key, Value>
where
Self: Debug + Serialize + DeserializeOwned + Encode + Decode,
ChunkId: MapChunkId,
Key: MapKey<ChunkId>,
Value: MapValue,
{
fn new(version: u32) -> Self {
Self {
version,
map: BTreeMap::default(),
}
}
fn get_last_key(&self, _: &ChunkId) -> Option<Key> {
self.map.last_key_value().map(|(k, _)| k.to_owned())
}
fn version(&self) -> u32 {
self.version
}
fn get(&self, key: &Key) -> Option<&Value> {
self.map.get(key)
}
fn last(&self) -> Option<&Value> {
self.map.last_key_value().map(|(_, v)| v)
}
fn extend(&mut self, map: BTreeMap<Key, Value>) {
self.map.extend(map)
}
}

View File

@@ -0,0 +1,59 @@
use std::{cmp::Ordering, collections::BTreeMap, fmt::Debug};
use allocative::Allocative;
use bincode::{Decode, Encode};
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use super::{MapChunkId, MapKey, MapSerialized, MapValue};
#[derive(Debug, Default, Serialize, Deserialize, Encode, Decode, Allocative)]
pub struct SerializedVec<Value> {
version: u32,
map: Vec<Value>,
}
impl<Key, Value, ChunkId> MapSerialized<Key, Value, ChunkId> for SerializedVec<Value>
where
Self: Debug + Serialize + DeserializeOwned + Encode + Decode,
ChunkId: MapChunkId,
Key: MapKey<ChunkId>,
Value: MapValue,
{
fn new(version: u32) -> Self {
Self {
version,
map: vec![],
}
}
fn get_last_key(&self, chunk_id: &ChunkId) -> Option<Key> {
Some(Key::from_usize(chunk_id.to_usize() + self.map.len()))
}
fn version(&self) -> u32 {
self.version
}
fn get(&self, serialized_key: &Key) -> Option<&Value> {
self.map.get(serialized_key.to_usize())
}
fn last(&self) -> Option<&Value> {
self.map.last()
}
fn extend(&mut self, map: BTreeMap<Key, Value>) {
map.into_iter().for_each(|(key, value)| {
let key = key.to_serialized_key().to_usize();
match self.map.len().cmp(&key) {
Ordering::Greater => self.map[key] = value,
Ordering::Equal => self.map.push(value),
Ordering::Less => {
dbg!(&self.map, key, value);
panic!()
}
}
});
}
}

View File

@@ -1,76 +0,0 @@
use std::{fmt, str::FromStr};
use allocative::{Allocative, Visitor};
use bincode::{
de::{BorrowDecoder, Decoder},
enc::Encoder,
error::{DecodeError, EncodeError},
BorrowDecode, Decode, Encode,
};
use chrono::{NaiveDate, TimeZone, Utc};
use derive_deref::{Deref, DerefMut};
use serde::{Deserialize, Serialize};
#[derive(
Debug,
PartialEq,
Eq,
PartialOrd,
Ord,
Clone,
Copy,
Deref,
DerefMut,
Default,
Serialize,
Deserialize,
)]
pub struct WNaiveDate(NaiveDate);
impl WNaiveDate {
pub fn wrap(date: NaiveDate) -> Self {
Self(date)
}
pub fn from_timestamp(timestamp: u32) -> Self {
Self(
Utc.timestamp_opt(i64::from(timestamp), 0)
.unwrap()
.date_naive(),
)
}
}
impl fmt::Display for WNaiveDate {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.0, f)
}
}
impl Encode for WNaiveDate {
fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), EncodeError> {
Encode::encode(&self.to_string(), encoder)
}
}
impl Decode for WNaiveDate {
fn decode<D: Decoder>(decoder: &mut D) -> core::result::Result<Self, DecodeError> {
let str: String = Decode::decode(decoder)?;
Ok(Self(NaiveDate::from_str(&str).unwrap()))
}
}
impl<'de> BorrowDecode<'de> for WNaiveDate {
fn borrow_decode<D: BorrowDecoder<'de>>(decoder: &mut D) -> Result<Self, DecodeError> {
let str: String = BorrowDecode::borrow_decode(decoder)?;
Ok(Self(NaiveDate::from_str(&str).unwrap()))
}
}
impl Allocative for WNaiveDate {
fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
visitor.visit_simple_sized::<Self>();
}
}