general: snapshot

This commit is contained in:
k
2024-07-20 23:13:41 +02:00
parent d8a5b4a2e6
commit a145b35ad1
100 changed files with 5402 additions and 2967 deletions

View File

@@ -4,15 +4,15 @@ use crate::{
databases::Databases,
datasets::AllDatasets,
states::States,
structs::WNaiveDate,
structs::{Date, Height},
utils::{log, time},
};
pub struct ExportedData<'a> {
pub databases: Option<&'a mut Databases>,
pub datasets: &'a mut AllDatasets,
pub date: WNaiveDate,
pub height: usize,
pub date: Date,
pub height: Height,
pub states: Option<&'a States>,
}

View File

@@ -8,11 +8,11 @@ use parse::ParseData;
use crate::{
actions::{export, find_first_inserted_unsafe_height, parse},
bitcoin::{check_if_height_safe, BitcoinDB, NUMBER_OF_UNSAFE_BLOCKS},
bitcoin::BitcoinDB,
databases::Databases,
datasets::{AllDatasets, ComputeData},
states::{AddressCohortsDurableStates, States, UTXOCohortsDurableStates},
structs::{DateData, WNaiveDate},
structs::{Date, DateData, MapKey},
utils::{generate_allocation_files, log, time},
};
@@ -44,7 +44,7 @@ pub fn iter_blocks(bitcoin_db: &BitcoinDB, block_count: usize) -> color_eyre::Re
log(&format!("Starting parsing at height: {height}"));
let mut block_iter = bitcoin_db.iter_block(height, block_count);
let mut block_iter = bitcoin_db.iter_block(height.to_usize(), block_count);
let mut next_block_opt = None;
let mut blocks_loop_date = None;
@@ -70,7 +70,7 @@ pub fn iter_blocks(bitcoin_db: &BitcoinDB, block_count: usize) -> color_eyre::Re
if let Some(current_block) = current_block_opt {
let timestamp = current_block.header.time;
let current_block_date = WNaiveDate::from_timestamp(timestamp);
let current_block_date = Date::from_timestamp(timestamp);
let current_block_height = height + blocks_loop_i;
if states.address_cohorts_durable_states.is_none()
@@ -95,10 +95,14 @@ pub fn iter_blocks(bitcoin_db: &BitcoinDB, block_count: usize) -> color_eyre::Re
let next_block_date = next_block_opt
.as_ref()
.map(|next_block| WNaiveDate::from_timestamp(next_block.header.time));
.map(|next_block| Date::from_timestamp(next_block.header.time));
// Always run for the first block of the loop
if blocks_loop_date.is_none() {
log(&format!(
"Processing {current_block_date} (height: {height})..."
));
blocks_loop_date.replace(current_block_date);
if states
@@ -112,9 +116,7 @@ pub fn iter_blocks(bitcoin_db: &BitcoinDB, block_count: usize) -> color_eyre::Re
.push(DateData::new(current_block_date, vec![]));
}
log(&format!(
"Processing {current_block_date} (height: {height})..."
));
processed_dates.insert(current_block_date);
}
let blocks_loop_date = blocks_loop_date.unwrap();
@@ -154,17 +156,12 @@ pub fn iter_blocks(bitcoin_db: &BitcoinDB, block_count: usize) -> color_eyre::Re
blocks_loop_i += 1;
if is_date_last_block {
processed_dates.insert(blocks_loop_date);
height += blocks_loop_i;
let is_new_month = next_block_date
.map_or(true, |next_block_date| next_block_date.day() == 1);
let is_close_to_the_end =
height > (block_count - (NUMBER_OF_UNSAFE_BLOCKS * 3));
if is_new_month || is_close_to_the_end {
if is_new_month || height.is_close_to_end(block_count) {
break 'days;
}
@@ -177,7 +174,7 @@ pub fn iter_blocks(bitcoin_db: &BitcoinDB, block_count: usize) -> color_eyre::Re
}
// Don't remember why -1
let last_height = height - 1;
let last_height = height - 1_u32;
log(&format!(
"Parsing month took {} seconds (last height: {last_height})\n",
@@ -186,15 +183,19 @@ pub fn iter_blocks(bitcoin_db: &BitcoinDB, block_count: usize) -> color_eyre::Re
if first_unsafe_heights.computed <= last_height {
time("Computing datasets", || {
let dates = processed_dates.into_iter().collect_vec();
let heights = processed_heights.into_iter().collect_vec();
datasets.compute(ComputeData {
dates: &processed_dates.into_iter().collect_vec(),
heights: &processed_heights.into_iter().collect_vec(),
dates: &dates,
heights: &heights,
})
});
}
if should_export {
let is_safe = check_if_height_safe(height, block_count);
let is_safe = height.is_safe(block_count);
export(ExportedData {
databases: is_safe.then_some(&mut databases),

View File

@@ -2,17 +2,18 @@ use crate::{
databases::Databases,
datasets::{AllDatasets, AnyDatasets},
states::States,
structs::Height,
utils::log,
};
#[derive(Default, Debug)]
pub struct Heights {
pub inserted: usize,
pub computed: usize,
pub inserted: Height,
pub computed: Height,
}
impl Heights {
pub fn min(&self) -> usize {
pub fn min(&self) -> Height {
self.inserted.min(self.computed)
}
}
@@ -93,13 +94,13 @@ pub fn find_first_inserted_unsafe_height(
}) {
None
} else {
Some(last_date_height + 1)
Some(last_date_height + 1_u32)
}
})
).unwrap_or_default();
Some(Heights {
inserted: last_safe_height + 1,
inserted: last_safe_height + 1_u32,
computed,
})
}

View File

@@ -17,8 +17,8 @@ use crate::{
States, UTXOCohortsOneShotStates, UTXOCohortsSentStates,
},
structs::{
Address, AddressData, AddressRealizedData, BlockData, BlockPath, Counter, EmptyAddressData,
PartialTxoutData, Price, SentData, TxData, TxoutIndex, WAmount, WNaiveDate,
Address, AddressData, AddressRealizedData, Amount, BlockData, BlockPath, Counter, Date,
EmptyAddressData, Height, PartialTxoutData, Price, SentData, TxData, TxoutIndex,
},
};
@@ -29,9 +29,9 @@ pub struct ParseData<'a> {
pub compute_addresses: bool,
pub databases: &'a mut Databases,
pub datasets: &'a mut AllDatasets,
pub date: WNaiveDate,
pub first_date_height: usize,
pub height: usize,
pub date: Date,
pub first_date_height: Height,
pub height: Height,
pub is_date_last_block: bool,
pub states: &'a mut States,
pub timestamp: u32,
@@ -61,13 +61,11 @@ pub fn parse(
let date_index = states.date_data_vec.len() - 1;
let previous_timestamp = if height > 0 {
Some(
datasets
.block_metadata
.timestamp
.get_or_import(&(height - 1)),
)
let previous_timestamp = if let Some(previous_height) = height.checked_sub(1) {
datasets
.block_metadata
.timestamp
.get_or_import(&Height::new(previous_height))
} else {
None
};
@@ -105,20 +103,20 @@ pub fn parse(
.last_mut()
.unwrap()
.blocks
.push(BlockData::new(height as u32, block_price, timestamp));
.push(BlockData::new(height, block_price, timestamp));
let mut block_path_to_sent_data: BTreeMap<BlockPath, SentData> = BTreeMap::default();
// let mut received_data: ReceivedData = ReceivedData::default();
let mut address_index_to_address_realized_data: BTreeMap<u32, AddressRealizedData> =
BTreeMap::default();
let mut coinbase = WAmount::ZERO;
let mut satblocks_destroyed = WAmount::ZERO;
let mut satdays_destroyed = WAmount::ZERO;
let mut amount_sent = WAmount::ZERO;
let mut coinbase = Amount::ZERO;
let mut satblocks_destroyed = Amount::ZERO;
let mut satdays_destroyed = Amount::ZERO;
let mut amount_sent = Amount::ZERO;
let mut transaction_count = 0;
let mut fees = vec![];
let mut fees_total = WAmount::ZERO;
let mut fees_total = Amount::ZERO;
let (
TxoutsParsingResults {
@@ -183,7 +181,7 @@ pub fn parse(
// ---
let mut utxos = BTreeMap::new();
let mut spendable_amount = WAmount::ZERO;
let mut spendable_amount = Amount::ZERO;
let is_coinbase = tx.is_coinbase();
@@ -191,8 +189,8 @@ pub fn parse(
unreachable!();
}
let mut inputs_sum = WAmount::ZERO;
let mut outputs_sum = WAmount::ZERO;
let mut inputs_sum = Amount::ZERO;
let mut outputs_sum = Amount::ZERO;
let last_block = states.date_data_vec.last_mut_block().unwrap();
@@ -205,7 +203,7 @@ pub fn parse(
panic!("vout can indeed be bigger than u16::MAX !");
}
let amount = WAmount::wrap(tx_out.value);
let amount = Amount::wrap(tx_out.value);
if is_coinbase {
coinbase += amount;
@@ -440,8 +438,7 @@ pub fn parse(
.or_default()
.send(input_amount);
satblocks_destroyed +=
input_amount * (height as u64 - input_block_data.height as u64);
satblocks_destroyed += input_amount * (height - input_block_data.height);
satdays_destroyed += input_amount
* date.signed_duration_since(*input_date_data.date).num_days() as u64;
@@ -569,7 +566,7 @@ pub fn parse(
let block_data =
states.date_data_vec.get_block_data(block_path).unwrap();
if block_data.height != height as u32 {
if block_data.height != height {
states
.utxo_cohorts_durable_states
.as_mut()
@@ -585,7 +582,7 @@ pub fn parse(
let last_block_data = states.date_data_vec.last_block().unwrap();
if last_block_data.height != height as u32 {
if last_block_data.height != height {
unreachable!()
}
@@ -744,7 +741,7 @@ pub fn parse(
compute_addresses,
databases,
date,
date_blocks_range: &(first_date_height..=height),
date_blocks_range: &(*first_date_height..=*height),
date_first_height: first_date_height,
difficulty,
fees: &fees,
@@ -763,7 +760,7 @@ pub fn parse(
pub struct TxoutsParsingResults {
partial_txout_data_vec: Vec<Option<PartialTxoutData>>,
provably_unspendable: WAmount,
provably_unspendable: Amount,
op_returns: usize,
}
@@ -776,7 +773,7 @@ fn pre_process_outputs(
empty_addresses: &mut Counter,
address_to_address_index: &mut AddressToAddressIndex,
) -> TxoutsParsingResults {
let mut provably_unspendable = WAmount::ZERO;
let mut provably_unspendable = Amount::ZERO;
let mut op_returns = 0;
let mut partial_txout_data_vec = block
@@ -785,12 +782,12 @@ fn pre_process_outputs(
.flat_map(|tx| &tx.output)
.map(|txout| {
let script = &txout.script_pubkey;
let amount = WAmount::wrap(txout.value);
let amount = Amount::wrap(txout.value);
// 0 sats outputs are possible and allowed !
// https://mempool.space/tx/2f2442f68e38b980a6c4cec21e71851b0d8a5847d85208331a27321a9967bbd6
// https://bitcoin.stackexchange.com/questions/104937/transaction-outputs-with-value-0
if amount == WAmount::ZERO {
if amount == Amount::ZERO {
return None;
}
@@ -859,7 +856,7 @@ fn pre_process_inputs<'a>(
compute_addresses: bool,
) -> (
BTreeMap<&'a Txid, Option<TxData>>,
BTreeMap<TxoutIndex, (WAmount, Option<u32>)>,
BTreeMap<TxoutIndex, (Amount, Option<u32>)>,
) {
let mut txid_to_tx_data: BTreeMap<&Txid, Option<TxData>> = block
.txdata
@@ -937,7 +934,7 @@ fn compute_address_index_to_address_data(
address_index_to_address_data_db: &mut AddressIndexToAddressData,
address_index_to_empty_address_data_db: &mut AddressIndexToEmptyAddressData,
partial_txout_data_vec: &[Option<PartialTxoutData>],
txout_index_to_amount_and_address_index: &BTreeMap<TxoutIndex, (WAmount, Option<u32>)>,
txout_index_to_amount_and_address_index: &BTreeMap<TxoutIndex, (Amount, Option<u32>)>,
compute_addresses: bool,
) -> BTreeMap<u32, AddressData> {
if !compute_addresses {

View File

@@ -4,6 +4,7 @@ use color_eyre::eyre::eyre;
use serde_json::Value;
use crate::{
structs::Height,
utils::{log, log_output, retry},
Config,
};
@@ -71,10 +72,10 @@ impl BitcoinDaemon {
}
}
pub fn wait_for_new_block(&self, last_block_height: usize) {
pub fn wait_for_new_block(&self, last_block_height: Height) {
log("Waiting for new block...");
while self.get_blockchain_info().headers as usize == last_block_height {
while last_block_height == self.get_blockchain_info().headers {
sleep(Duration::from_secs(5))
}
}

View File

@@ -1,5 +0,0 @@
use super::NUMBER_OF_UNSAFE_BLOCKS;
pub fn check_if_height_safe(height: usize, block_count: usize) -> bool {
height < block_count - NUMBER_OF_UNSAFE_BLOCKS
}

View File

@@ -2,10 +2,8 @@ mod addresses;
mod consts;
mod daemon;
mod db;
mod height;
pub use addresses::*;
pub use consts::*;
pub use daemon::*;
pub use db::*;
pub use height::*;

View File

@@ -1,6 +1,9 @@
use std::{fs, io};
use crate::{structs::WNaiveDate, utils::log};
use crate::{
structs::{Date, Height},
utils::log,
};
use super::databases_folder_path;
@@ -10,7 +13,7 @@ where
{
fn import() -> Self;
fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()>;
fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()>;
fn folder<'a>() -> &'a str;

View File

@@ -8,7 +8,7 @@ use allocative::Allocative;
use rayon::prelude::*;
use crate::{
structs::{AddressData, WNaiveDate},
structs::{AddressData, Date, Height},
utils::time,
};
@@ -97,7 +97,11 @@ impl AddressIndexToAddressData {
}
fn open_all(&mut self) {
fs::read_dir(databases_folder_path(Self::folder()))
let path = Self::full_path();
fs::create_dir_all(&path).unwrap();
fs::read_dir(path)
.unwrap()
.map(|entry| {
entry
@@ -128,7 +132,7 @@ impl AnyDatabaseGroup for AddressIndexToAddressData {
}
}
fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()> {
fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> {
mem::take(&mut self.map)
.into_par_iter()
.try_for_each(|(_, db)| db.export())?;

View File

@@ -7,7 +7,7 @@ use std::{
use allocative::Allocative;
use rayon::prelude::*;
use crate::structs::{EmptyAddressData, WNaiveDate};
use crate::structs::{Date, EmptyAddressData, Height};
use super::{AnyDatabaseGroup, Metadata, SizedDatabase};
@@ -103,7 +103,7 @@ impl AnyDatabaseGroup for AddressIndexToEmptyAddressData {
}
}
fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()> {
fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> {
mem::take(&mut self.map)
.into_par_iter()
.try_for_each(|(_, db)| db.export())?;

View File

@@ -3,7 +3,7 @@ use std::{collections::BTreeMap, mem, thread};
use allocative::Allocative;
use rayon::prelude::*;
use crate::structs::{Address, WNaiveDate};
use crate::structs::{Address, Date, Height};
use super::{
AnyDatabaseGroup, Database, Metadata, SizedDatabase, U8x19, U8x31,
@@ -261,7 +261,7 @@ impl AnyDatabaseGroup for AddressToAddressIndex {
}
}
fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()> {
fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> {
thread::scope(|s| {
s.spawn(|| {
mem::take(&mut self.p2pk)

View File

@@ -8,7 +8,7 @@ use std::{
use crate::{
io::Binary,
structs::{Counter, WNaiveDate},
structs::{Counter, Date, Height},
};
#[derive(Default, Debug, Encode, Decode, Allocative)]
@@ -39,7 +39,7 @@ impl Metadata {
}
}
pub fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()> {
pub fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> {
if self.last_height.unwrap_or_default() < height {
self.last_height.replace(height);
}
@@ -77,8 +77,8 @@ impl Metadata {
pub struct MetadataData {
pub serial: usize,
pub len: Counter,
pub last_height: Option<usize>,
pub last_date: Option<WNaiveDate>,
pub last_height: Option<Height>,
pub last_date: Option<Date>,
}
impl MetadataData {

View File

@@ -22,7 +22,10 @@ pub use txid_to_tx_data::*;
pub use txout_index_to_address_index::*;
pub use txout_index_to_amount::*;
use crate::{structs::WNaiveDate, utils::time};
use crate::{
structs::{Date, Height},
utils::time,
};
#[derive(Allocative)]
pub struct Databases {
@@ -58,7 +61,7 @@ impl Databases {
}
}
pub fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()> {
pub fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> {
thread::scope(|s| {
s.spawn(|| {
time("> Database txid_to_tx_data", || {
@@ -115,13 +118,13 @@ impl Databases {
let _ = self.txout_index_to_amount.reset();
}
pub fn check_if_needs_to_compute_addresses(&self, height: usize, date: WNaiveDate) -> bool {
let check_height = |last_height: Option<usize>| {
pub fn check_if_needs_to_compute_addresses(&self, height: Height, date: Date) -> bool {
let check_height = |last_height: Option<Height>| {
last_height.map_or(true, |last_height| last_height < height)
};
let check_date =
|last_date: Option<WNaiveDate>| last_date.map_or(true, |last_date| last_date < date);
|last_date: Option<Date>| last_date.map_or(true, |last_date| last_date < date);
let check_metadata = |metadata: &Metadata| {
check_height(metadata.last_height) || check_date(metadata.last_date)
@@ -133,8 +136,8 @@ impl Databases {
pub fn check_if_usable(
&self,
min_initial_last_address_height: Option<usize>,
min_initial_last_address_date: Option<WNaiveDate>,
min_initial_last_address_height: Option<Height>,
min_initial_last_address_date: Option<Date>,
) -> bool {
let are_tx_databases_in_sync = self
.txout_index_to_amount

View File

@@ -8,7 +8,7 @@ use allocative::Allocative;
use bitcoin::Txid;
use rayon::prelude::*;
use crate::structs::{TxData, WNaiveDate};
use crate::structs::{Date, Height, TxData};
use super::{AnyDatabaseGroup, Metadata, SizedDatabase, U8x31};
@@ -127,7 +127,7 @@ impl AnyDatabaseGroup for TxidToTxData {
}
}
fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()> {
fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> {
mem::take(&mut self.map)
.into_par_iter()
.try_for_each(|(_, db)| db.export())?;

View File

@@ -7,7 +7,7 @@ use std::{
use allocative::Allocative;
use rayon::prelude::*;
use crate::structs::{TxoutIndex, WNaiveDate};
use crate::structs::{Date, Height, TxoutIndex};
use super::{AnyDatabaseGroup, Metadata, SizedDatabase};
@@ -94,7 +94,7 @@ impl AnyDatabaseGroup for TxoutIndexToAddressIndex {
}
}
fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()> {
fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> {
mem::take(&mut self.map)
.into_par_iter()
.try_for_each(|(_, db)| db.export())?;

View File

@@ -7,12 +7,12 @@ use std::{
use allocative::Allocative;
use rayon::prelude::*;
use crate::structs::{TxoutIndex, WAmount, WNaiveDate};
use crate::structs::{Amount, Date, Height, TxoutIndex};
use super::{AnyDatabaseGroup, Metadata, SizedDatabase};
type Key = TxoutIndex;
type Value = WAmount;
type Value = Amount;
type Database = SizedDatabase<Key, Value>;
#[derive(Allocative)]
@@ -94,7 +94,7 @@ impl AnyDatabaseGroup for TxoutIndexToAmount {
}
}
fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()> {
fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> {
mem::take(&mut self.map)
.into_par_iter()
.try_for_each(|(_, db)| db.export())?;

View File

@@ -3,7 +3,7 @@ use rayon::prelude::*;
use crate::{
datasets::ComputeData,
structs::{AnyBiMap, AnyDateMap, AnyHeightMap, AnyMap, WNaiveDate},
structs::{AnyBiMap, AnyDateMap, AnyHeightMap, AnyMap, Date, Height},
};
use super::MinInitialStates;
@@ -11,23 +11,23 @@ use super::MinInitialStates;
pub trait AnyDataset {
fn get_min_initial_states(&self) -> &MinInitialStates;
fn needs_insert(&self, height: usize, date: WNaiveDate) -> bool {
fn needs_insert(&self, height: Height, date: Date) -> bool {
self.needs_insert_height(height) || self.needs_insert_date(date)
}
#[inline(always)]
fn needs_insert_height(&self, height: usize) -> bool {
fn needs_insert_height(&self, height: Height) -> bool {
!self.to_all_inserted_height_map_vec().is_empty()
&& self
.get_min_initial_states()
.inserted
.first_unsafe_height
.unwrap_or(0)
.unwrap_or(Height::ZERO)
<= height
}
#[inline(always)]
fn needs_insert_date(&self, date: WNaiveDate) -> bool {
fn needs_insert_date(&self, date: Date) -> bool {
!self.to_all_inserted_date_map_vec().is_empty()
&& self
.get_min_initial_states()
@@ -117,18 +117,18 @@ pub trait AnyDataset {
}
#[inline(always)]
fn should_compute_height(&self, height: usize) -> bool {
fn should_compute_height(&self, height: Height) -> bool {
!self.to_all_computed_height_map_vec().is_empty()
&& self
.get_min_initial_states()
.computed
.first_unsafe_height
.unwrap_or(0)
.unwrap_or(Height::ZERO)
<= height
}
#[inline(always)]
fn should_compute_date(&self, date: WNaiveDate) -> bool {
fn should_compute_date(&self, date: Date) -> bool {
!self.to_all_computed_date_map_vec().is_empty()
&& self
.get_min_initial_states()

View File

@@ -1,6 +1,6 @@
use allocative::Allocative;
use crate::structs::{AnyDateMap, AnyHeightMap, WNaiveDate};
use crate::structs::{AnyDateMap, AnyHeightMap, Date, Height};
use super::{AnyDataset, AnyDatasets};
@@ -33,10 +33,10 @@ impl MinInitialStates {
#[derive(Default, Debug, Clone, Copy, Allocative)]
pub struct MinInitialState {
pub first_unsafe_date: Option<WNaiveDate>,
pub first_unsafe_height: Option<usize>,
pub last_date: Option<WNaiveDate>,
pub last_height: Option<usize>,
pub first_unsafe_date: Option<Date>,
pub first_unsafe_height: Option<Height>,
pub last_date: Option<Date>,
pub last_height: Option<Height>,
}
enum Mode {
@@ -172,8 +172,8 @@ impl MinInitialState {
fn min_datasets_date(
datasets: &dyn AnyDatasets,
is_not_empty: impl Fn(&&(dyn AnyDataset + Sync + Send)) -> bool,
map: impl Fn(&(dyn AnyDataset + Sync + Send)) -> Option<WNaiveDate>,
) -> Option<WNaiveDate> {
map: impl Fn(&(dyn AnyDataset + Sync + Send)) -> Option<Date>,
) -> Option<Date> {
Self::min_date(
datasets
.to_any_dataset_vec()
@@ -186,8 +186,8 @@ impl MinInitialState {
fn min_datasets_height(
datasets: &dyn AnyDatasets,
is_not_empty: impl Fn(&&(dyn AnyDataset + Sync + Send)) -> bool,
map: impl Fn(&(dyn AnyDataset + Sync + Send)) -> Option<usize>,
) -> Option<usize> {
map: impl Fn(&(dyn AnyDataset + Sync + Send)) -> Option<Height>,
) -> Option<Height> {
Self::min_height(
datasets
.to_any_dataset_vec()
@@ -235,38 +235,38 @@ impl MinInitialState {
#[inline(always)]
fn compute_min_initial_last_date_from_dataset(
arr: &[&(dyn AnyDateMap + Sync + Send)],
) -> Option<WNaiveDate> {
) -> Option<Date> {
Self::min_date(arr.iter().map(|map| map.get_initial_last_date()))
}
#[inline(always)]
fn compute_min_initial_last_height_from_dataset(
arr: &[&(dyn AnyHeightMap + Sync + Send)],
) -> Option<usize> {
) -> Option<Height> {
Self::min_height(arr.iter().map(|map| map.get_initial_last_height()))
}
#[inline(always)]
fn compute_min_initial_first_unsafe_date_from_dataset(
arr: &[&(dyn AnyDateMap + Sync + Send)],
) -> Option<WNaiveDate> {
) -> Option<Date> {
Self::min_date(arr.iter().map(|map| map.get_initial_first_unsafe_date()))
}
#[inline(always)]
fn compute_min_initial_first_unsafe_height_from_dataset(
arr: &[&(dyn AnyHeightMap + Sync + Send)],
) -> Option<usize> {
) -> Option<Height> {
Self::min_height(arr.iter().map(|map| map.get_initial_first_unsafe_height()))
}
#[inline(always)]
fn min_date(iter: impl Iterator<Item = Option<WNaiveDate>>) -> Option<WNaiveDate> {
fn min_date(iter: impl Iterator<Item = Option<Date>>) -> Option<Date> {
iter.min().and_then(|opt| opt)
}
#[inline(always)]
fn min_height(iter: impl Iterator<Item = Option<usize>>) -> Option<usize> {
fn min_height(iter: impl Iterator<Item = Option<Height>>) -> Option<Height> {
iter.min().and_then(|opt| opt)
}
}

View File

@@ -62,7 +62,7 @@ impl AllAddressesMetadataDataset {
}
}
pub fn compute(&mut self, &ComputeData { heights, dates }: &ComputeData) {
pub fn compute(&mut self, &ComputeData { heights, dates, .. }: &ComputeData) {
self.new_addresses
.multi_insert_net_change(heights, dates, &mut self.created_addreses, 1)
}

View File

@@ -6,7 +6,7 @@ use crate::{
AnyDataset, AnyDatasetGroup, ComputeData, InsertData, MinInitialStates, SubDataset,
},
states::{AddressCohortDurableStates, AddressCohortId},
structs::{AddressSplit, AnyBiMap, AnyDateMap, AnyHeightMap, BiMap, WNaiveDate},
structs::{AddressSplit, AnyBiMap, AnyDateMap, AnyHeightMap, BiMap, Date, Height},
};
use super::cohort_metadata::MetadataDataset;
@@ -60,47 +60,47 @@ impl CohortDataset {
vec![&self.all, &self.illiquid, &self.liquid, &self.highly_liquid]
}
pub fn needs_insert_metadata(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_metadata(&self, height: Height, date: Date) -> bool {
self.metadata.needs_insert(height, date)
}
pub fn needs_insert_utxo(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_utxo(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.utxo.needs_insert(height, date))
}
pub fn needs_insert_capitalization(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_capitalization(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.capitalization.needs_insert(height, date))
}
pub fn needs_insert_supply(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_supply(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.supply.needs_insert(height, date))
}
pub fn needs_insert_price_paid(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_price_paid(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.price_paid.needs_insert(height, date))
}
pub fn needs_insert_realized(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_realized(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.realized.needs_insert(height, date))
}
pub fn needs_insert_unrealized(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_unrealized(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.unrealized.needs_insert(height, date))
}
pub fn needs_insert_input(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_input(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.input.needs_insert(height, date))

View File

@@ -6,7 +6,11 @@ use allocative::Allocative;
use itertools::Itertools;
use rayon::prelude::*;
use crate::{states::SplitByAddressCohort, structs::BiMap, WNaiveDate};
use crate::{
states::SplitByAddressCohort,
structs::{BiMap, Height},
Date,
};
use self::{all_metadata::AllAddressesMetadataDataset, cohort::CohortDataset};
@@ -59,7 +63,7 @@ impl AddressDatasets {
.for_each(|(cohort, _)| cohort.insert(insert_data))
}
pub fn needs_durable_states(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_durable_states(&self, height: Height, date: Date) -> bool {
let needs_insert_utxo = self.needs_insert_utxo(height, date);
let needs_insert_capitalization = self.needs_insert_capitalization(height, date);
let needs_insert_supply = self.needs_insert_supply(height, date);
@@ -71,57 +75,57 @@ impl AddressDatasets {
|| needs_one_shot_states
}
pub fn needs_one_shot_states(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_one_shot_states(&self, height: Height, date: Date) -> bool {
self.needs_insert_price_paid(height, date) || self.needs_insert_unrealized(height, date)
}
// pub fn needs_sent_states(&self, height: usize, date: WNaiveDate) -> bool {
// pub fn needs_sent_states(&self, height: Height, date: WNaiveDate) -> bool {
// self.needs_insert_input(height, date) || self.needs_insert_realized(height, date)
// }
pub fn needs_insert_utxo(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_utxo(&self, height: Height, date: Date) -> bool {
self.cohorts
.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_utxo(height, date))
}
pub fn needs_insert_capitalization(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_capitalization(&self, height: Height, date: Date) -> bool {
self.cohorts
.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_capitalization(height, date))
}
pub fn needs_insert_supply(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_supply(&self, height: Height, date: Date) -> bool {
self.cohorts
.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_supply(height, date))
}
pub fn needs_insert_price_paid(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_price_paid(&self, height: Height, date: Date) -> bool {
self.cohorts
.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_price_paid(height, date))
}
// pub fn needs_insert_realized(&self, height: usize, date: WNaiveDate) -> bool {
// pub fn needs_insert_realized(&self, height: Height, date: WNaiveDate) -> bool {
// self.cohorts
// .as_vec()
// .iter()
// .any(|(dataset, _)| dataset.needs_insert_realized(height, date))
// }
pub fn needs_insert_unrealized(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_unrealized(&self, height: Height, date: Date) -> bool {
self.cohorts
.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_unrealized(height, date))
}
// pub fn needs_insert_input(&self, height: usize, date: WNaiveDate) -> bool {
// pub fn needs_insert_input(&self, height: Height, date: WNaiveDate) -> bool {
// self.cohorts
// .as_vec()
// .iter()

View File

@@ -2,7 +2,7 @@ use allocative::Allocative;
use crate::{
datasets::AnyDataset,
structs::{AnyHeightMap, HeightMap, WNaiveDate},
structs::{AnyHeightMap, Date, HeightMap},
};
use super::{InsertData, MinInitialStates};
@@ -12,7 +12,7 @@ pub struct BlockMetadataDataset {
min_initial_states: MinInitialStates,
// Inserted
pub date: HeightMap<WNaiveDate>,
pub date: HeightMap<Date>,
pub timestamp: HeightMap<u32>,
}
@@ -41,8 +41,7 @@ impl BlockMetadataDataset {
) {
self.timestamp.insert(height, timestamp);
self.date
.insert(height, WNaiveDate::from_timestamp(timestamp));
self.date.insert(height, Date::from_timestamp(timestamp));
}
}

View File

@@ -1,7 +1,7 @@
use allocative::Allocative;
use crate::{
structs::{AnyBiMap, BiMap, DateMap},
structs::{AnyBiMap, BiMap, DateMap, Height},
utils::{ONE_DAY_IN_DAYS, ONE_YEAR_IN_DAYS, THREE_MONTHS_IN_DAYS, TWO_WEEK_IN_DAYS},
};
@@ -166,8 +166,8 @@ impl CointimeDataset {
pub fn compute(
&mut self,
compute_data: &ComputeData,
first_height: &mut DateMap<usize>,
last_height: &mut DateMap<usize>,
first_height: &mut DateMap<Height>,
last_height: &mut DateMap<Height>,
closes: &mut BiMap<f32>,
circulating_supply: &mut BiMap<f64>,
realized_cap: &mut BiMap<f32>,
@@ -176,7 +176,7 @@ impl CointimeDataset {
annualized_transaction_volume: &mut BiMap<f32>,
cumulative_subsidy_in_dollars: &mut BiMap<f32>,
) {
let &ComputeData { heights, dates } = compute_data;
let &ComputeData { heights, dates, .. } = compute_data;
self.cumulative_coinblocks_destroyed
.multi_insert_cumulative(heights, dates, &mut self.coinblocks_destroyed);
@@ -403,7 +403,7 @@ impl CointimeDataset {
.multi_insert_complex_transform(
heights,
&mut self.active_cap.height,
|(active_cap, height)| {
|(active_cap, height, ..)| {
let investor_cap = self.investor_cap.height.get(height).unwrap();
(active_cap - investor_cap) / active_cap

View File

@@ -34,7 +34,7 @@ impl ConstantDataset {
Ok(s)
}
pub fn compute(&mut self, &ComputeData { heights, dates }: &ComputeData) {
pub fn compute(&mut self, &ComputeData { heights, dates, .. }: &ComputeData) {
self._0.multi_insert_const(heights, dates, 0);
self._1.multi_insert_const(heights, dates, 1);
self._50.multi_insert_const(heights, dates, 50);

View File

@@ -2,7 +2,7 @@ use allocative::Allocative;
use crate::{
datasets::AnyDataset,
structs::{AnyDateMap, DateMap},
structs::{AnyDateMap, DateMap, Height},
};
use super::{InsertData, MinInitialStates};
@@ -12,8 +12,8 @@ pub struct DateMetadataDataset {
min_initial_states: MinInitialStates,
// Inserted
pub first_height: DateMap<usize>,
pub last_height: DateMap<usize>,
pub first_height: DateMap<Height>,
pub last_height: DateMap<Height>,
}
impl DateMetadataDataset {

View File

@@ -3,7 +3,9 @@ use allocative::Allocative;
use crate::{
bitcoin::TARGET_BLOCKS_PER_DAY,
datasets::AnyDataset,
structs::{AnyBiMap, AnyDateMap, AnyHeightMap, BiMap, DateMap, HeightMap, WAmount},
structs::{
Amount, AnyBiMap, AnyDateMap, AnyHeightMap, BiMap, DateMap, Height, HeightMap, MapKey,
},
utils::{BYTES_IN_MB, ONE_DAY_IN_DAYS, ONE_MONTH_IN_DAYS, ONE_WEEK_IN_DAYS, ONE_YEAR_IN_DAYS},
};
@@ -224,7 +226,7 @@ impl MiningDataset {
.height
.insert(height, (block_price * coinbase).to_dollar() as f32);
let sumed_fees = WAmount::from_sat(fees.iter().map(|amount| amount.to_sat()).sum());
let sumed_fees = Amount::from_sat(fees.iter().map(|amount| amount.to_sat()).sum());
self.fees.height.insert(height, sumed_fees.to_btc());
@@ -281,10 +283,10 @@ impl MiningDataset {
self.last_fees_in_dollars
.insert(date, sumed_fees_in_dollars);
let total_blocks_mined = self.total_blocks_mined.insert(date, height + 1);
let total_blocks_mined = self.total_blocks_mined.insert(date, height.to_usize() + 1);
self.blocks_mined
.insert(date, total_blocks_mined - date_first_height);
.insert(date, total_blocks_mined - date_first_height.to_usize());
self.difficulty.date.insert(date, difficulty);
}
@@ -292,8 +294,8 @@ impl MiningDataset {
pub fn compute(
&mut self,
&ComputeData { heights, dates }: &ComputeData,
last_height: &mut DateMap<usize>,
&ComputeData { heights, dates, .. }: &ComputeData,
last_height: &mut DateMap<Height>,
) {
self.blocks_mined_1w_sum.multi_insert_last_x_sum(
dates,

View File

@@ -44,31 +44,31 @@ use crate::{
// UTXOCohortsReceivedStates,
UTXOCohortsSentStates,
},
structs::{Price, WAmount, WNaiveDate},
structs::{Amount, Date, Height, Price},
};
pub struct InsertData<'a> {
pub address_cohorts_input_states: &'a Option<AddressCohortsInputStates>,
pub address_cohorts_one_shot_states: &'a Option<AddressCohortsOneShotStates>,
pub address_cohorts_realized_states: &'a Option<AddressCohortsRealizedStates>,
pub amount_sent: WAmount,
pub amount_sent: Amount,
pub block_interval: u32,
pub block_price: Price,
pub block_size: usize,
pub block_vbytes: u64,
pub block_weight: u64,
pub coinbase: WAmount,
pub coinbase: Amount,
pub compute_addresses: bool,
pub databases: &'a Databases,
pub date: WNaiveDate,
pub date_blocks_range: &'a RangeInclusive<usize>,
pub date_first_height: usize,
pub date: Date,
pub date_blocks_range: &'a RangeInclusive<u32>,
pub date_first_height: Height,
pub difficulty: f64,
pub fees: &'a Vec<WAmount>,
pub height: usize,
pub fees: &'a Vec<Amount>,
pub height: Height,
pub is_date_last_block: bool,
pub satblocks_destroyed: WAmount,
pub satdays_destroyed: WAmount,
pub satblocks_destroyed: Amount,
pub satdays_destroyed: Amount,
pub states: &'a States,
pub timestamp: u32,
pub transaction_count: usize,
@@ -78,8 +78,8 @@ pub struct InsertData<'a> {
}
pub struct ComputeData<'a> {
pub heights: &'a [usize],
pub dates: &'a [WNaiveDate],
pub heights: &'a [Height],
pub dates: &'a [Date],
}
#[derive(Allocative)]

View File

@@ -10,7 +10,7 @@ pub use ohlc::*;
use crate::{
price::{Binance, Kraken},
structs::{AnyBiMap, AnyDateMap, BiMap, DateMap, WNaiveDate},
structs::{AnyBiMap, AnyDateMap, BiMap, Date, DateMap, Height, MapKey},
utils::{ONE_MONTH_IN_DAYS, ONE_WEEK_IN_DAYS, ONE_YEAR_IN_DAYS},
};
@@ -20,7 +20,7 @@ use super::{AnyDataset, ComputeData, MinInitialStates, RatioDataset};
pub struct PriceDatasets {
min_initial_states: MinInitialStates,
kraken_daily: Option<BTreeMap<WNaiveDate, OHLC>>,
kraken_daily: Option<BTreeMap<Date, OHLC>>,
kraken_1mn: Option<BTreeMap<u32, OHLC>>,
binance_1mn: Option<BTreeMap<u32, OHLC>>,
binance_har: Option<BTreeMap<u32, OHLC>>,
@@ -90,8 +90,8 @@ impl PriceDatasets {
kraken_daily: None,
satonomics_by_height: BTreeMap::default(),
ohlcs: BiMap::new_json(1, &format!("{price_path}/ohlc")),
closes: BiMap::new_json(1, &f("close")),
ohlcs: BiMap::new_json(1, price_path),
closes: BiMap::new_bin(1, &f("close")),
market_cap: BiMap::new_bin(1, &f("market_cap")),
price_1w_sma: BiMap::new_bin(1, &f("price_1w_sma")),
price_1w_sma_ratio: RatioDataset::import(datasets_path, "price_1w_sma")?,
@@ -139,7 +139,7 @@ impl PriceDatasets {
}
pub fn compute(&mut self, compute_data: &ComputeData, circulating_supply: &mut BiMap<f64>) {
let &ComputeData { dates, heights } = compute_data;
let &ComputeData { dates, heights, .. } = compute_data;
self.closes
.multi_insert_simple_transform(heights, dates, &mut self.ohlcs, &|ohlc| ohlc.close);
@@ -265,7 +265,7 @@ impl PriceDatasets {
|(last_value, date, closes)| {
let previous_value = date
.checked_sub_days(Days::new(4 * ONE_YEAR_IN_DAYS as u64))
.and_then(|date| closes.get_or_import(&WNaiveDate::wrap(date)))
.and_then(|date| closes.get_or_import(&Date::wrap(date)))
.unwrap_or_default();
(((last_value / previous_value).powf(1.0 / 4.0)) - 1.0) * 100.0
@@ -300,8 +300,8 @@ impl PriceDatasets {
.compute(compute_data, &mut self.closes, &mut self.price_200w_sma);
}
pub fn get_date_ohlc(&mut self, date: WNaiveDate) -> color_eyre::Result<OHLC> {
if self.ohlcs.date.is_date_safe(date) {
pub fn get_date_ohlc(&mut self, date: Date) -> color_eyre::Result<OHLC> {
if self.ohlcs.date.is_key_safe(date) {
Ok(self.ohlcs.date.get(&date).unwrap().to_owned())
} else {
let ohlc = self.get_from_daily_kraken(&date)?;
@@ -312,7 +312,7 @@ impl PriceDatasets {
}
}
fn get_from_daily_kraken(&mut self, date: &WNaiveDate) -> color_eyre::Result<OHLC> {
fn get_from_daily_kraken(&mut self, date: &Date) -> color_eyre::Result<OHLC> {
if self.kraken_daily.is_none() {
self.kraken_daily.replace(
Kraken::fetch_daily_prices()
@@ -330,7 +330,7 @@ impl PriceDatasets {
pub fn get_height_ohlc(
&mut self,
height: usize,
height: Height,
timestamp: u32,
previous_timestamp: Option<u32>,
) -> color_eyre::Result<OHLC> {
@@ -351,7 +351,7 @@ impl PriceDatasets {
let timestamp = clean_timestamp(timestamp);
if previous_timestamp.is_none() && height > 0 {
if previous_timestamp.is_none() && !height.is_first() {
panic!("Shouldn't be possible");
}
@@ -364,7 +364,7 @@ impl PriceDatasets {
.unwrap_or_else(|_| {
self.get_from_har_binance(timestamp, previous_timestamp)
.unwrap_or_else(|_| {
let date = WNaiveDate::from_timestamp(timestamp);
let date = Date::from_timestamp(timestamp);
panic!(
"Can't find the price for: height: {height} - date: {date}

View File

@@ -79,7 +79,7 @@ impl CapitalizationDataset {
closes: &mut BiMap<f32>,
cohort_supply: &mut BiMap<f64>,
) {
let &ComputeData { heights, dates } = compute_data;
let &ComputeData { heights, dates, .. } = compute_data;
self.realized_price.multi_insert_divide(
heights,

View File

@@ -5,7 +5,7 @@ mod input;
mod price_paid;
mod ratio;
mod realized;
// mod recap;
mod recap;
mod supply;
mod unrealized;
mod utxo;
@@ -15,7 +15,7 @@ pub use input::*;
pub use price_paid::*;
pub use ratio::*;
pub use realized::*;
// pub use recap::*;
pub use recap::*;
pub use supply::*;
pub use unrealized::*;
pub use utxo::*;

View File

@@ -4,7 +4,7 @@ use itertools::Itertools;
use crate::{
datasets::{AnyDataset, InsertData, MinInitialStates},
states::PricePaidState,
structs::{AnyBiMap, BiMap, WNaiveDate},
structs::{AnyBiMap, BiMap, Date, Height},
};
#[derive(Default, Allocative)]
@@ -217,13 +217,13 @@ impl PricePaidSubDataset {
}
}
fn insert_height_default(&mut self, height: usize) {
fn insert_height_default(&mut self, height: Height) {
self.inserted_as_mut_vec().into_iter().for_each(|bi| {
bi.height.insert_default(height);
})
}
fn insert_date_default(&mut self, date: WNaiveDate) {
fn insert_date_default(&mut self, date: Date) {
self.inserted_as_mut_vec().into_iter().for_each(|bi| {
bi.date.insert_default(date);
})

View File

@@ -68,7 +68,7 @@ impl RatioDataset {
pub fn compute(
&mut self,
&ComputeData { heights, dates }: &ComputeData,
&ComputeData { heights, dates, .. }: &ComputeData,
market_price: &mut BiMap<f32>,
other_price: &mut BiMap<f32>,
) {
@@ -112,9 +112,13 @@ impl RatioDataset {
self.ratio_1y_sma_momentum_oscillator
.height
.multi_insert_complex_transform(heights, &mut self.ratio.height, |(ratio, height)| {
(ratio / self.ratio_1y_sma.height.get_or_import(height)) - 1.0
});
.multi_insert_complex_transform(
heights,
&mut self.ratio.height,
|(ratio, height, ..)| {
(ratio / self.ratio_1y_sma.height.get_or_import(height).unwrap()) - 1.0
},
);
self.ratio_1y_sma_momentum_oscillator
.date

View File

@@ -130,7 +130,7 @@ impl RealizedSubDataset {
pub fn compute(
&mut self,
&ComputeData { heights, dates }: &ComputeData,
&ComputeData { heights, dates, .. }: &ComputeData,
market_cap: &mut BiMap<f32>,
) {
self.negative_realized_loss.multi_insert_simple_transform(

View File

@@ -6,33 +6,40 @@ use crate::{
DateMap, HeightMap,
};
#[derive(Default, Allocative)]
#[derive(Allocative)]
pub enum RecapTime {
Insert,
Compute,
}
#[derive(Allocative)]
pub struct RecapDataset<T> {
min_initial_states: MinInitialStates,
time: RecapTime,
// Computed
min: Option<DateMap<T>>,
max: Option<DateMap<T>>,
median: Option<DateMap<T>>,
average: Option<DateMap<T>>,
sum: Option<DateMap<T>>,
max: Option<DateMap<T>>,
_90p: Option<DateMap<T>>,
_75p: Option<DateMap<T>>,
median: Option<DateMap<T>>,
_25p: Option<DateMap<T>>,
_10p: Option<DateMap<T>>,
min: Option<DateMap<T>>,
}
#[derive(Default)]
struct RecapOptions {
min: bool,
max: bool,
median: bool,
pub struct RecapOptions {
average: bool,
sum: bool,
max: bool,
_90p: bool,
_75p: bool,
median: bool,
_25p: bool,
_10p: bool,
min: bool,
}
impl RecapOptions {
@@ -77,11 +84,16 @@ impl<T> RecapDataset<T>
where
T: MapValue,
{
pub fn import(parent_path: &str, options: RecapOptions) -> color_eyre::Result<Self> {
pub fn import(
parent_path: &str,
time: RecapTime,
options: RecapOptions,
) -> color_eyre::Result<Self> {
let f = |s: &str| format!("{parent_path}/{s}");
let mut s = Self {
min_initial_states: MinInitialStates::default(),
time,
min: options.min.then(|| DateMap::new_bin(1, &f("min"))),
max: options.max.then(|| DateMap::new_bin(1, &f("max"))),
@@ -102,44 +114,48 @@ where
pub fn compute(
&mut self,
&ComputeData { heights, dates }: &ComputeData,
&ComputeData { heights, dates, .. }: &ComputeData,
source: &mut HeightMap<f32>,
) {
if let Some(min) = self.min.as_ref() {
// v.push(min);
}
dates.iter().enumerate().for_each(|(index, date)| {
// let heights = heights_by_date.get(index).unwrap();
if let Some(max) = self.max.as_ref() {
// v.push(max);
}
if let Some(sum) = self.sum.as_ref() {
// v.push(sum);
}
if let Some(median) = self.median.as_ref() {
// v.push(median);
}
if let Some(average) = self.average.as_ref() {
// v.push(average);
}
if let Some(average) = self.average.as_ref() {
// v.push(average);
}
if let Some(max) = self.max.as_ref() {
// v.push(max);
}
if let Some(sum) = self.sum.as_ref() {
// v.push(sum);
}
if let Some(_90p) = self._90p.as_ref() {
// v.push(_90p);
}
if let Some(_90p) = self._90p.as_ref() {
// v.push(_90p);
}
if let Some(_75p) = self._75p.as_ref() {
// v.push(_75p);
}
if let Some(_75p) = self._75p.as_ref() {
// v.push(_75p);
}
if let Some(median) = self.median.as_ref() {
// v.push(median);
}
if let Some(_25p) = self._25p.as_ref() {
// v.push(_25p);
}
if let Some(_25p) = self._25p.as_ref() {
// v.push(_25p);
}
if let Some(_10p) = self._10p.as_ref() {
// v.push(_10p);
}
if let Some(_10p) = self._10p.as_ref() {
// v.push(_10p);
}
if let Some(min) = self.min.as_ref() {
// v.push(min);
}
});
}
}

View File

@@ -70,7 +70,7 @@ impl SupplySubDataset {
#[allow(unused_variables)]
pub fn compute(
&mut self,
&ComputeData { heights, dates }: &ComputeData,
&ComputeData { heights, dates, .. }: &ComputeData,
circulating_supply: &mut BiMap<f64>,
) {
self.supply_to_circulating_supply_ratio

View File

@@ -115,7 +115,7 @@ impl UnrealizedSubDataset {
pub fn compute(
&mut self,
&ComputeData { heights, dates }: &ComputeData,
&ComputeData { heights, dates, .. }: &ComputeData,
own_supply: &mut BiMap<f64>,
circulating_supply: &mut BiMap<f64>,
market_cap: &mut BiMap<f32>,

View File

@@ -107,7 +107,7 @@ impl TransactionDataset {
pub fn compute(
&mut self,
&ComputeData { heights, dates }: &ComputeData,
&ComputeData { heights, dates, .. }: &ComputeData,
circulating_supply: &mut BiMap<f64>,
block_interval: &mut HeightMap<u32>,
) {

View File

@@ -6,7 +6,7 @@ use crate::{
AnyDataset, AnyDatasetGroup, ComputeData, InsertData, MinInitialStates, SubDataset,
},
states::UTXOCohortId,
structs::{AnyBiMap, AnyDateMap, AnyHeightMap, BiMap, WNaiveDate},
structs::{AnyBiMap, AnyDateMap, AnyHeightMap, BiMap, Date, Height},
};
#[derive(Default, Allocative)]
@@ -122,31 +122,31 @@ impl UTXODataset {
// }
}
pub fn needs_insert_utxo(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_utxo(&self, height: Height, date: Date) -> bool {
self.subs.utxo.needs_insert(height, date)
}
pub fn needs_insert_capitalization(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_capitalization(&self, height: Height, date: Date) -> bool {
self.subs.capitalization.needs_insert(height, date)
}
pub fn needs_insert_supply(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_supply(&self, height: Height, date: Date) -> bool {
self.subs.supply.needs_insert(height, date)
}
pub fn needs_insert_price_paid(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_price_paid(&self, height: Height, date: Date) -> bool {
self.subs.price_paid.needs_insert(height, date)
}
pub fn needs_insert_realized(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_realized(&self, height: Height, date: Date) -> bool {
self.subs.realized.needs_insert(height, date)
}
pub fn needs_insert_unrealized(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_unrealized(&self, height: Height, date: Date) -> bool {
self.subs.unrealized.needs_insert(height, date)
}
pub fn needs_insert_input(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_input(&self, height: Height, date: Date) -> bool {
self.subs.input.needs_insert(height, date)
}

View File

@@ -9,7 +9,7 @@ use itertools::Itertools;
use crate::{
datasets::AnyDatasets,
states::{SplitByUTXOCohort, UTXOCohortId},
structs::{BiMap, WNaiveDate},
structs::{BiMap, Date, Height},
};
use super::{AnyDataset, ComputeData, InsertData, MinInitialStates};
@@ -55,7 +55,7 @@ impl UTXODatasets {
.for_each(|(cohort, _)| cohort.insert(insert_data))
}
pub fn needs_durable_states(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_durable_states(&self, height: Height, date: Date) -> bool {
let needs_insert_utxo = self.needs_insert_utxo(height, date);
let needs_insert_capitalization = self.needs_insert_capitalization(height, date);
let needs_insert_supply = self.needs_insert_supply(height, date);
@@ -67,51 +67,51 @@ impl UTXODatasets {
|| needs_one_shot_states
}
pub fn needs_one_shot_states(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_one_shot_states(&self, height: Height, date: Date) -> bool {
self.needs_insert_price_paid(height, date) || self.needs_insert_unrealized(height, date)
}
pub fn needs_sent_states(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_sent_states(&self, height: Height, date: Date) -> bool {
self.needs_insert_input(height, date) || self.needs_insert_realized(height, date)
}
pub fn needs_insert_utxo(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_utxo(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_utxo(height, date))
}
pub fn needs_insert_capitalization(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_capitalization(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_capitalization(height, date))
}
pub fn needs_insert_supply(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_supply(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_supply(height, date))
}
pub fn needs_insert_price_paid(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_price_paid(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_price_paid(height, date))
}
pub fn needs_insert_realized(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_realized(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_realized(height, date))
}
pub fn needs_insert_unrealized(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_unrealized(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_unrealized(height, date))
}
pub fn needs_insert_input(&self, height: usize, date: WNaiveDate) -> bool {
pub fn needs_insert_input(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_input(height, date))

View File

@@ -1,11 +1,9 @@
mod binary;
mod consts;
mod json;
mod path;
mod serialization;
pub use binary::*;
pub use consts::*;
pub use json::*;
pub use path::*;
pub use serialization::*;

View File

@@ -1,3 +0,0 @@
pub fn format_path(path: &str) -> String {
path.replace(['-', '_', ' '], "/")
}

View File

@@ -14,7 +14,7 @@ pub use crate::{
datasets::OHLC,
io::{Binary, Json, Serialization},
structs::{
Config, DateMap, HeightMap, SerializedDateMap, SerializedHeightMap, WNaiveDate,
Config, Date, DateMap, Height, HeightMap, MapChunkId, SerializedBTreeMap, SerializedVec,
HEIGHT_MAP_CHUNK_SIZE,
},
utils::log,

View File

@@ -1,6 +1,6 @@
use std::{path::Path, thread::sleep, time::Duration};
use parser::{iter_blocks, log, BitcoinDB, BitcoinDaemon, Config};
use parser::{iter_blocks, log, BitcoinDB, BitcoinDaemon, Config, Height};
fn main() -> color_eyre::Result<()> {
color_eyre::install()?;
@@ -38,7 +38,7 @@ fn main() -> color_eyre::Result<()> {
daemon.start();
if daemon.check_if_fully_synced() {
daemon.wait_for_new_block(block_count - 1);
daemon.wait_for_new_block(Height::new(block_count as u32 - 1));
} else {
daemon.wait_sync();
}

View File

@@ -9,7 +9,7 @@ use serde_json::Value;
use crate::{
datasets::OHLC,
io::{Json, IMPORTS_FOLDER_PATH},
structs::WNaiveDate,
structs::Date,
utils::{log, retry},
};
@@ -150,7 +150,7 @@ impl Binance {
)
}
pub fn fetch_daily_prices() -> color_eyre::Result<BTreeMap<WNaiveDate, OHLC>> {
pub fn fetch_daily_prices() -> color_eyre::Result<BTreeMap<Date, OHLC>> {
log("binance: fetch 1d");
retry(
@@ -168,7 +168,7 @@ impl Binance {
// [timestamp, open, high, low, close, volume, ...]
let array = value.as_array().unwrap();
let date = WNaiveDate::from_timestamp(
let date = Date::from_timestamp(
array.first().unwrap().as_u64().unwrap() as u32 / 1000,
);

View File

@@ -5,7 +5,7 @@ use serde_json::Value;
use crate::{
datasets::OHLC,
structs::WNaiveDate,
structs::Date,
utils::{log, retry},
};
@@ -66,7 +66,7 @@ impl Kraken {
)
}
pub fn fetch_daily_prices() -> color_eyre::Result<BTreeMap<WNaiveDate, OHLC>> {
pub fn fetch_daily_prices() -> color_eyre::Result<BTreeMap<Date, OHLC>> {
log("fetch kraken daily");
retry(
@@ -91,9 +91,8 @@ impl Kraken {
.map(|value| {
let array = value.as_array().unwrap();
let date = WNaiveDate::from_timestamp(
array.first().unwrap().as_u64().unwrap() as u32,
);
let date =
Date::from_timestamp(array.first().unwrap().as_u64().unwrap() as u32);
let get_f32 = |index: usize| {
array

View File

@@ -2,14 +2,14 @@ use allocative::Allocative;
use crate::{
states::{DurableStates, OneShotStates, PriceToValue, UnrealizedState},
structs::{LiquiditySplitResult, Price, SplitByLiquidity, WAmount},
structs::{Amount, LiquiditySplitResult, Price, SplitByLiquidity},
};
#[derive(Default, Debug, Allocative)]
pub struct AddressCohortDurableStates {
pub address_count: usize,
pub split_durable_states: SplitByLiquidity<DurableStates>,
pub price_to_split_amount: PriceToValue<SplitByLiquidity<WAmount>>,
pub price_to_split_amount: PriceToValue<SplitByLiquidity<Amount>>,
}
const ONE_THIRD: f64 = 1.0 / 3.0;
@@ -19,7 +19,7 @@ impl AddressCohortDurableStates {
#[allow(clippy::too_many_arguments)]
pub fn increment(
&mut self,
amount: WAmount,
amount: Amount,
utxo_count: usize,
realized_cap: Price,
mean_price_paid: Price,
@@ -44,7 +44,7 @@ impl AddressCohortDurableStates {
#[allow(clippy::too_many_arguments)]
pub fn decrement(
&mut self,
amount: WAmount,
amount: Amount,
utxo_count: usize,
realized_cap: Price,
mean_price_paid: Price,
@@ -69,7 +69,7 @@ impl AddressCohortDurableStates {
#[allow(clippy::too_many_arguments)]
pub fn _crement(
&mut self,
amount: WAmount,
amount: Amount,
utxo_count: usize,
realized_cap: Price,
mean_price_paid: Price,
@@ -98,7 +98,7 @@ impl AddressCohortDurableStates {
let illiquid_amount = split_sat_amount_result.illiquid.trunc();
let illiquid_amount_rest = split_sat_amount_result.illiquid - illiquid_amount;
let mut illiquid_amount = WAmount::from_sat(illiquid_amount as u64);
let mut illiquid_amount = Amount::from_sat(illiquid_amount as u64);
let mut illiquid_utxo_count = split_utxo_count_result.illiquid.trunc() as usize;
let illiquid_utxo_count_rest = split_utxo_count_result.illiquid.fract();
let mut illiquid_realized_cap =
@@ -107,7 +107,7 @@ impl AddressCohortDurableStates {
let liquid_amount = split_sat_amount_result.liquid.trunc();
let liquid_amount_rest = split_sat_amount_result.liquid - liquid_amount;
let mut liquid_amount = WAmount::from_sat(liquid_amount as u64);
let mut liquid_amount = Amount::from_sat(liquid_amount as u64);
let mut liquid_utxo_count = split_utxo_count_result.liquid.trunc() as usize;
let liquid_utxo_count_rest = split_utxo_count_result.liquid.fract();
let mut liquid_realized_cap =
@@ -120,7 +120,7 @@ impl AddressCohortDurableStates {
realized_cap - illiquid_realized_cap - liquid_realized_cap;
let amount_diff = amount - illiquid_amount - liquid_amount - highly_liquid_amount;
if amount_diff > WAmount::ZERO {
if amount_diff > Amount::ZERO {
if illiquid_amount_rest >= ONE_THIRD && illiquid_amount_rest > liquid_amount_rest {
illiquid_amount += amount_diff;
} else if illiquid_amount_rest >= ONE_THIRD {
@@ -337,7 +337,7 @@ impl AddressCohortDurableStates {
);
}
if split_amount.illiquid > WAmount::ZERO {
if split_amount.illiquid > Amount::ZERO {
one_shot_states_ref.illiquid.price_paid_state.iterate(
price_paid,
split_amount.illiquid,
@@ -359,7 +359,7 @@ impl AddressCohortDurableStates {
}
}
if split_amount.liquid > WAmount::ZERO {
if split_amount.liquid > Amount::ZERO {
one_shot_states_ref.liquid.price_paid_state.iterate(
price_paid,
split_amount.liquid,
@@ -381,7 +381,7 @@ impl AddressCohortDurableStates {
}
}
if split_amount.highly_liquid > WAmount::ZERO {
if split_amount.highly_liquid > Amount::ZERO {
one_shot_states_ref.highly_liquid.price_paid_state.iterate(
price_paid,
split_amount.highly_liquid,

View File

@@ -2,7 +2,7 @@ use derive_deref::{Deref, DerefMut};
use crate::{
states::InputState,
structs::{AddressRealizedData, LiquidityClassification, SplitByLiquidity, WAmount},
structs::{AddressRealizedData, Amount, LiquidityClassification, SplitByLiquidity},
};
use super::SplitByAddressCohort;
@@ -27,17 +27,17 @@ impl AddressCohortsInputStates {
state.illiquid.iterate(
split_count.illiquid,
WAmount::from_sat(split_volume.illiquid.round() as u64),
Amount::from_sat(split_volume.illiquid.round() as u64),
);
state.liquid.iterate(
split_count.liquid,
WAmount::from_sat(split_volume.liquid.round() as u64),
Amount::from_sat(split_volume.liquid.round() as u64),
);
state.highly_liquid.iterate(
split_count.highly_liquid,
WAmount::from_sat(split_volume.highly_liquid.round() as u64),
Amount::from_sat(split_volume.highly_liquid.round() as u64),
);
Ok(())

View File

@@ -2,7 +2,7 @@ use derive_deref::{Deref, DerefMut};
use crate::{
states::OutputState,
structs::{AddressRealizedData, LiquidityClassification, SplitByLiquidity, WAmount},
structs::{AddressRealizedData, Amount, LiquidityClassification, SplitByLiquidity},
};
use super::SplitByAddressCohort;
@@ -27,17 +27,17 @@ impl AddressCohortsOutputStates {
state.illiquid.iterate(
split_count.illiquid,
WAmount::from_sat(split_volume.illiquid.round() as u64),
Amount::from_sat(split_volume.illiquid.round() as u64),
);
state.liquid.iterate(
split_count.liquid,
WAmount::from_sat(split_volume.liquid.round() as u64),
Amount::from_sat(split_volume.liquid.round() as u64),
);
state.highly_liquid.iterate(
split_count.highly_liquid,
WAmount::from_sat(split_volume.highly_liquid.round() as u64),
Amount::from_sat(split_volume.highly_liquid.round() as u64),
);
Ok(())

View File

@@ -1,7 +1,7 @@
use allocative::Allocative;
use color_eyre::eyre::eyre;
use crate::structs::{Price, WAmount};
use crate::structs::{Amount, Price};
use super::{CapitalizationState, SupplyState, UTXOState};
@@ -15,11 +15,11 @@ pub struct DurableStates {
impl DurableStates {
pub fn increment(
&mut self,
amount: WAmount,
amount: Amount,
utxo_count: usize,
realized_cap: Price,
) -> color_eyre::Result<()> {
if amount == WAmount::ZERO {
if amount == Amount::ZERO {
if utxo_count != 0 {
dbg!(amount, utxo_count);
return Err(eyre!("Shouldn't be possible"));
@@ -35,11 +35,11 @@ impl DurableStates {
pub fn decrement(
&mut self,
amount: WAmount,
amount: Amount,
utxo_count: usize,
realized_cap: Price,
) -> color_eyre::Result<()> {
if amount == WAmount::ZERO {
if amount == Amount::ZERO {
if utxo_count != 0 {
dbg!(amount, utxo_count);
unreachable!("Shouldn't be possible")

View File

@@ -1,13 +1,13 @@
use crate::structs::WAmount;
use crate::structs::Amount;
#[derive(Debug, Default)]
pub struct InputState {
pub count: f64,
pub volume: WAmount,
pub volume: Amount,
}
impl InputState {
pub fn iterate(&mut self, count: f64, volume: WAmount) {
pub fn iterate(&mut self, count: f64, volume: Amount) {
self.count += count;
self.volume += volume;
}

View File

@@ -1,13 +1,13 @@
use crate::structs::WAmount;
use crate::structs::Amount;
#[derive(Debug, Default)]
pub struct OutputState {
pub count: f64,
pub volume: WAmount,
pub volume: Amount,
}
impl OutputState {
pub fn iterate(&mut self, count: f64, volume: WAmount) {
pub fn iterate(&mut self, count: f64, volume: Amount) {
self.count += count;
self.volume += volume;
}

View File

@@ -1,4 +1,4 @@
use crate::structs::{Price, WAmount};
use crate::structs::{Amount, Price};
#[derive(Default, Debug)]
pub struct PricePaidState {
@@ -22,11 +22,11 @@ pub struct PricePaidState {
pub pp_90p: Option<Price>,
pub pp_95p: Option<Price>,
pub processed_amount: WAmount,
pub processed_amount: Amount,
}
impl PricePaidState {
pub fn iterate(&mut self, price: Price, amount: WAmount, total_supply: WAmount) {
pub fn iterate(&mut self, price: Price, amount: Amount, total_supply: Amount) {
let PricePaidState {
processed_amount,
pp_05p,

View File

@@ -8,7 +8,7 @@ use allocative::Allocative;
use color_eyre::eyre::eyre;
use derive_deref::{Deref, DerefMut};
use crate::structs::{Price, SplitByLiquidity, WAmount};
use crate::structs::{Amount, Price, SplitByLiquidity};
#[derive(Deref, DerefMut, Default, Debug, Allocative)]
pub struct PriceToValue<T>(BTreeMap<u32, T>);
@@ -82,13 +82,13 @@ pub trait CanSubtract {
fn can_subtract(&self, other: &Self) -> bool;
}
impl CanSubtract for WAmount {
impl CanSubtract for Amount {
fn can_subtract(&self, other: &Self) -> bool {
self >= other
}
}
impl CanSubtract for SplitByLiquidity<WAmount> {
impl CanSubtract for SplitByLiquidity<Amount> {
fn can_subtract(&self, other: &Self) -> bool {
self.all >= other.all
&& self.illiquid >= other.illiquid
@@ -101,23 +101,23 @@ pub trait IsZero {
fn is_zero(&self) -> color_eyre::Result<bool>;
}
impl IsZero for WAmount {
impl IsZero for Amount {
fn is_zero(&self) -> color_eyre::Result<bool> {
Ok(*self == WAmount::ZERO)
Ok(*self == Amount::ZERO)
}
}
impl IsZero for SplitByLiquidity<WAmount> {
impl IsZero for SplitByLiquidity<Amount> {
fn is_zero(&self) -> color_eyre::Result<bool> {
if self.all == WAmount::ZERO
&& (self.illiquid != WAmount::ZERO
|| self.liquid != WAmount::ZERO
|| self.highly_liquid != WAmount::ZERO)
if self.all == Amount::ZERO
&& (self.illiquid != Amount::ZERO
|| self.liquid != Amount::ZERO
|| self.highly_liquid != Amount::ZERO)
{
dbg!(&self);
Err(eyre!("Bad split"))
} else {
Ok(self.all == WAmount::ZERO)
Ok(self.all == Amount::ZERO)
}
}
}

View File

@@ -1,19 +1,19 @@
use allocative::Allocative;
use color_eyre::eyre::eyre;
use crate::structs::WAmount;
use crate::structs::Amount;
#[derive(Debug, Default, Allocative)]
pub struct SupplyState {
pub supply: WAmount,
pub supply: Amount,
}
impl SupplyState {
pub fn increment(&mut self, amount: WAmount) {
pub fn increment(&mut self, amount: Amount) {
self.supply += amount;
}
pub fn decrement(&mut self, amount: WAmount) -> color_eyre::Result<()> {
pub fn decrement(&mut self, amount: Amount) -> color_eyre::Result<()> {
if self.supply < amount {
dbg!(self.supply, amount);

View File

@@ -1,17 +1,17 @@
use std::{cmp::Ordering, ops::Add};
use crate::structs::{Price, WAmount};
use crate::structs::{Amount, Price};
#[derive(Debug, Default)]
pub struct UnrealizedState {
pub supply_in_profit: WAmount,
pub supply_in_profit: Amount,
pub unrealized_profit: Price,
pub unrealized_loss: Price,
}
impl UnrealizedState {
#[inline]
pub fn iterate(&mut self, price_then: Price, price_now: Price, amount: WAmount) {
pub fn iterate(&mut self, price_then: Price, price_now: Price, amount: Amount) {
match price_then.cmp(&price_now) {
Ordering::Less => {
self.unrealized_profit += (price_now - price_then) * amount;

View File

@@ -2,19 +2,19 @@ use allocative::Allocative;
use crate::{
states::{DurableStates, OneShotStates, PriceToValue, UnrealizedState},
structs::{Price, WAmount},
structs::{Amount, Price},
};
#[derive(Default, Debug, Allocative)]
pub struct UTXOCohortDurableStates {
pub durable_states: DurableStates,
pub price_to_amount: PriceToValue<WAmount>,
pub price_to_amount: PriceToValue<Amount>,
}
impl UTXOCohortDurableStates {
pub fn increment(
&mut self,
amount: WAmount,
amount: Amount,
utxo_count: usize,
price: Price,
) -> color_eyre::Result<()> {
@@ -23,7 +23,7 @@ impl UTXOCohortDurableStates {
pub fn decrement(
&mut self,
amount: WAmount,
amount: Amount,
utxo_count: usize,
price: Price,
) -> color_eyre::Result<()> {
@@ -32,7 +32,7 @@ impl UTXOCohortDurableStates {
pub fn _crement(
&mut self,
amount: WAmount,
amount: Amount,
utxo_count: usize,
price: Price,
increment: bool,

View File

@@ -5,9 +5,9 @@ use rayon::prelude::*;
use crate::{
states::DateDataVec,
structs::{BlockData, Price, SentData, WAmount},
structs::{Amount, BlockData, Price, SentData},
utils::difference_in_days_between_timestamps,
WNaiveDate,
Date,
};
use super::{SplitByUTXOCohort, UTXOCohortDurableStates, UTXOCohortsOneShotStates};
@@ -33,7 +33,7 @@ impl UTXOCohortsDurableStates {
let utxo_count = block_data.utxos as usize;
// No need to either insert or remove if 0
if amount == WAmount::ZERO {
if amount == Amount::ZERO {
return;
}
@@ -65,12 +65,12 @@ impl UTXOCohortsDurableStates {
let price = block_data.price;
// No need to either insert or remove if 0
if amount == WAmount::ZERO {
if amount == Amount::ZERO {
return;
}
if block_data.height == last_block_data.height {
let year = WNaiveDate::from_timestamp(block_data.timestamp).year() as u32;
let year = Date::from_timestamp(block_data.timestamp).year() as u32;
self.initial_filtered_apply(&0, &year, |state| {
state.increment(amount, utxo_count, price).unwrap();
@@ -118,7 +118,7 @@ impl UTXOCohortsDurableStates {
let utxo_count = sent_data.count as usize;
// No need to either insert or remove if 0
if amount == WAmount::ZERO {
if amount == Amount::ZERO {
return;
}
@@ -127,7 +127,7 @@ impl UTXOCohortsDurableStates {
previous_last_block_data.timestamp,
);
let year = WNaiveDate::from_timestamp(block_data.timestamp).year() as u32;
let year = Date::from_timestamp(block_data.timestamp).year() as u32;
self.initial_filtered_apply(&days_old, &year, |state| {
state

View File

@@ -2,14 +2,14 @@ use allocative::Allocative;
use color_eyre::eyre::eyre;
use sanakirja::{direct_repr, Storable, UnsizedStorable};
use super::{AddressType, EmptyAddressData, LiquidityClassification, Price, WAmount};
use super::{AddressType, Amount, EmptyAddressData, LiquidityClassification, Price};
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default, Allocative)]
pub struct AddressData {
pub address_type: AddressType,
pub amount: WAmount,
pub sent: WAmount,
pub received: WAmount,
pub amount: Amount,
pub sent: Amount,
pub received: Amount,
pub realized_cap: Price,
pub outputs_len: u32,
}
@@ -19,15 +19,15 @@ impl AddressData {
pub fn new(address_type: AddressType) -> Self {
Self {
address_type,
amount: WAmount::ZERO,
sent: WAmount::ZERO,
received: WAmount::ZERO,
amount: Amount::ZERO,
sent: Amount::ZERO,
received: Amount::ZERO,
realized_cap: Price::ZERO,
outputs_len: 0,
}
}
pub fn receive(&mut self, amount: WAmount, price: Price) {
pub fn receive(&mut self, amount: Amount, price: Price) {
let previous_amount = self.amount;
let new_amount = previous_amount + amount;
@@ -43,7 +43,7 @@ impl AddressData {
self.realized_cap += received_value;
}
pub fn send(&mut self, amount: WAmount, previous_price: Price) -> color_eyre::Result<()> {
pub fn send(&mut self, amount: Amount, previous_price: Price) -> color_eyre::Result<()> {
let previous_amount = self.amount;
if previous_amount < amount {
@@ -66,7 +66,7 @@ impl AddressData {
#[inline(always)]
pub fn is_empty(&self) -> bool {
if self.amount == WAmount::ZERO {
if self.amount == Amount::ZERO {
if self.outputs_len != 0 {
unreachable!();
}
@@ -80,7 +80,7 @@ impl AddressData {
pub fn from_empty(empty: &EmptyAddressData) -> Self {
Self {
address_type: empty.address_type,
amount: WAmount::ZERO,
amount: Amount::ZERO,
sent: empty.transfered,
received: empty.transfered,
realized_cap: Price::ZERO,

View File

@@ -1,10 +1,10 @@
use super::{AddressData, Price, WAmount};
use super::{AddressData, Amount, Price};
#[derive(Debug)]
pub struct AddressRealizedData {
pub initial_address_data: AddressData,
pub received: WAmount,
pub sent: WAmount,
pub received: Amount,
pub sent: Amount,
pub profit: Price,
pub loss: Price,
pub value_created: Price,
@@ -16,8 +16,8 @@ pub struct AddressRealizedData {
impl AddressRealizedData {
pub fn default(initial_address_data: &AddressData) -> Self {
Self {
received: WAmount::ZERO,
sent: WAmount::ZERO,
received: Amount::ZERO,
sent: Amount::ZERO,
profit: Price::ZERO,
loss: Price::ZERO,
utxos_created: 0,
@@ -28,12 +28,12 @@ impl AddressRealizedData {
}
}
pub fn receive(&mut self, amount: WAmount) {
pub fn receive(&mut self, amount: Amount) {
self.received += amount;
self.utxos_created += 1;
}
pub fn send(&mut self, amount: WAmount, current_price: Price, previous_price: Price) {
pub fn send(&mut self, amount: Amount, current_price: Price, previous_price: Price) {
self.sent += amount;
self.utxos_destroyed += 1;

View File

@@ -1,6 +1,6 @@
use allocative::Allocative;
use super::WAmount;
use super::Amount;
#[derive(PartialEq, PartialOrd, Ord, Eq, Debug, Allocative)]
pub enum AddressSize {
@@ -16,7 +16,7 @@ pub enum AddressSize {
}
impl AddressSize {
pub fn from_amount(amount: WAmount) -> Self {
pub fn from_amount(amount: Amount) -> Self {
match amount.to_sat() {
0 => Self::Empty,
1..=9_999_999 => Self::Plankton,

View File

@@ -10,11 +10,13 @@ use bincode::{
error::{DecodeError, EncodeError},
BorrowDecode, Decode, Encode,
};
use bitcoin::Amount;
use bitcoin::Amount as BitcoinAmount;
use derive_deref::{Deref, DerefMut};
use sanakirja::{direct_repr, Storable, UnsizedStorable};
use serde::{Deserialize, Serialize};
use super::Height;
#[derive(
Debug,
PartialEq,
@@ -29,98 +31,106 @@ use serde::{Deserialize, Serialize};
Serialize,
Deserialize,
)]
pub struct WAmount(Amount);
direct_repr!(WAmount);
pub struct Amount(BitcoinAmount);
direct_repr!(Amount);
impl WAmount {
pub const ZERO: Self = Self(Amount::ZERO);
impl Amount {
pub const ZERO: Self = Self(BitcoinAmount::ZERO);
pub const ONE_BTC_F64: f64 = 100_000_000.0;
#[inline(always)]
pub fn wrap(amount: Amount) -> Self {
pub fn wrap(amount: BitcoinAmount) -> Self {
Self(amount)
}
#[inline(always)]
pub fn from_sat(sats: u64) -> Self {
Self(Amount::from_sat(sats))
Self(BitcoinAmount::from_sat(sats))
}
}
impl Add for WAmount {
type Output = WAmount;
impl Add for Amount {
type Output = Amount;
fn add(self, rhs: WAmount) -> Self::Output {
WAmount::from_sat(self.to_sat() + rhs.to_sat())
fn add(self, rhs: Amount) -> Self::Output {
Amount::from_sat(self.to_sat() + rhs.to_sat())
}
}
impl AddAssign for WAmount {
impl AddAssign for Amount {
fn add_assign(&mut self, rhs: Self) {
*self = WAmount::from_sat(self.to_sat() + rhs.to_sat());
*self = Amount::from_sat(self.to_sat() + rhs.to_sat());
}
}
impl Sub for WAmount {
type Output = WAmount;
impl Sub for Amount {
type Output = Amount;
fn sub(self, rhs: WAmount) -> Self::Output {
WAmount::from_sat(self.to_sat() - rhs.to_sat())
fn sub(self, rhs: Amount) -> Self::Output {
Amount::from_sat(self.to_sat() - rhs.to_sat())
}
}
impl SubAssign for WAmount {
impl SubAssign for Amount {
fn sub_assign(&mut self, rhs: Self) {
*self = WAmount::from_sat(self.to_sat() - rhs.to_sat());
*self = Amount::from_sat(self.to_sat() - rhs.to_sat());
}
}
impl Mul<WAmount> for WAmount {
type Output = WAmount;
impl Mul<Amount> for Amount {
type Output = Amount;
fn mul(self, rhs: WAmount) -> Self::Output {
WAmount::from_sat(self.to_sat() * rhs.to_sat())
fn mul(self, rhs: Amount) -> Self::Output {
Amount::from_sat(self.to_sat() * rhs.to_sat())
}
}
impl Mul<u64> for WAmount {
type Output = WAmount;
impl Mul<u64> for Amount {
type Output = Amount;
fn mul(self, rhs: u64) -> Self::Output {
WAmount::from_sat(self.to_sat() * rhs)
Amount::from_sat(self.to_sat() * rhs)
}
}
impl Sum for WAmount {
impl Mul<Height> for Amount {
type Output = Amount;
fn mul(self, rhs: Height) -> Self::Output {
Amount::from_sat(self.to_sat() * *rhs as u64)
}
}
impl Sum for Amount {
fn sum<I: Iterator<Item = Self>>(iter: I) -> Self {
let sats = iter.map(|amt| amt.to_sat()).sum();
WAmount::from_sat(sats)
Amount::from_sat(sats)
}
}
impl Encode for WAmount {
impl Encode for Amount {
fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), EncodeError> {
Encode::encode(&self.to_sat(), encoder)
}
}
impl Decode for WAmount {
impl Decode for Amount {
fn decode<D: Decoder>(decoder: &mut D) -> core::result::Result<Self, DecodeError> {
let sats: u64 = Decode::decode(decoder)?;
Ok(WAmount::from_sat(sats))
Ok(Amount::from_sat(sats))
}
}
impl<'de> BorrowDecode<'de> for WAmount {
impl<'de> BorrowDecode<'de> for Amount {
fn borrow_decode<D: BorrowDecoder<'de>>(decoder: &mut D) -> Result<Self, DecodeError> {
let sats: u64 = BorrowDecode::borrow_decode(decoder)?;
Ok(WAmount::from_sat(sats))
Ok(Amount::from_sat(sats))
}
}
impl Allocative for WAmount {
impl Allocative for Amount {
fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
visitor.visit_simple_sized::<Self>();
}

View File

@@ -8,7 +8,7 @@ use ordered_float::FloatCore;
use crate::{bitcoin::TARGET_BLOCKS_PER_DAY, utils::LossyFrom};
use super::{AnyDateMap, AnyHeightMap, AnyMap, DateMap, HeightMap, MapValue, WNaiveDate};
use super::{AnyDateMap, AnyHeightMap, AnyMap, Date, DateMap, Height, HeightMap, MapValue};
#[derive(Default, Allocative)]
pub struct BiMap<T>
@@ -37,11 +37,8 @@ where
}
}
pub fn date_insert_sum_range(
&mut self,
date: WNaiveDate,
date_blocks_range: &RangeInclusive<usize>,
) where
pub fn date_insert_sum_range(&mut self, date: Date, date_blocks_range: &RangeInclusive<u32>)
where
T: Sum,
{
self.date
@@ -50,22 +47,22 @@ where
pub fn multi_date_insert_sum_range(
&mut self,
dates: &[WNaiveDate],
first_height: &mut DateMap<usize>,
last_height: &mut DateMap<usize>,
dates: &[Date],
first_height: &mut DateMap<Height>,
last_height: &mut DateMap<Height>,
) where
T: Sum,
{
dates.iter().for_each(|date| {
let first_height = first_height.get_or_import(date).unwrap();
let last_height = last_height.get_or_import(date).unwrap();
let range = first_height..=last_height;
let range = (*first_height)..=(*last_height);
self.date.insert(*date, self.height.sum_range(&range));
})
}
pub fn multi_insert_const(&mut self, heights: &[usize], dates: &[WNaiveDate], constant: T) {
pub fn multi_insert_const(&mut self, heights: &[Height], dates: &[Date], constant: T) {
self.height.multi_insert_const(heights, constant);
self.date.multi_insert_const(dates, constant);
@@ -73,8 +70,8 @@ where
pub fn multi_insert_simple_transform<F, K>(
&mut self,
heights: &[usize],
dates: &[WNaiveDate],
heights: &[Height],
dates: &[Date],
source: &mut BiMap<K>,
transform: &F,
) where
@@ -91,8 +88,8 @@ where
#[allow(unused)]
pub fn multi_insert_add<A, B>(
&mut self,
heights: &[usize],
dates: &[WNaiveDate],
heights: &[Height],
dates: &[Date],
added: &mut BiMap<A>,
adder: &mut BiMap<B>,
) where
@@ -109,8 +106,8 @@ where
pub fn multi_insert_subtract<A, B>(
&mut self,
heights: &[usize],
dates: &[WNaiveDate],
heights: &[Height],
dates: &[Date],
subtracted: &mut BiMap<A>,
subtracter: &mut BiMap<B>,
) where
@@ -128,8 +125,8 @@ where
pub fn multi_insert_multiply<A, B>(
&mut self,
heights: &[usize],
dates: &[WNaiveDate],
heights: &[Height],
dates: &[Date],
multiplied: &mut BiMap<A>,
multiplier: &mut BiMap<B>,
) where
@@ -146,8 +143,8 @@ where
pub fn multi_insert_divide<A, B>(
&mut self,
heights: &[usize],
dates: &[WNaiveDate],
heights: &[Height],
dates: &[Date],
divided: &mut BiMap<A>,
divider: &mut BiMap<B>,
) where
@@ -164,8 +161,8 @@ where
pub fn multi_insert_percentage<A, B>(
&mut self,
heights: &[usize],
dates: &[WNaiveDate],
heights: &[Height],
dates: &[Date],
divided: &mut BiMap<A>,
divider: &mut BiMap<B>,
) where
@@ -182,8 +179,8 @@ where
pub fn multi_insert_cumulative<K>(
&mut self,
heights: &[usize],
dates: &[WNaiveDate],
heights: &[Height],
dates: &[Date],
source: &mut BiMap<K>,
) where
K: MapValue,
@@ -198,8 +195,8 @@ where
pub fn multi_insert_last_x_sum<K>(
&mut self,
heights: &[usize],
dates: &[WNaiveDate],
heights: &[Height],
dates: &[Date],
source: &mut BiMap<K>,
days: usize,
) where
@@ -219,8 +216,8 @@ where
pub fn multi_insert_simple_average<K>(
&mut self,
heights: &[usize],
dates: &[WNaiveDate],
heights: &[Height],
dates: &[Date],
source: &mut BiMap<K>,
days: usize,
) where
@@ -239,8 +236,8 @@ where
pub fn multi_insert_net_change(
&mut self,
heights: &[usize],
dates: &[WNaiveDate],
heights: &[Height],
dates: &[Date],
source: &mut BiMap<T>,
days: usize,
) where
@@ -257,8 +254,8 @@ where
pub fn multi_insert_median(
&mut self,
heights: &[usize],
dates: &[WNaiveDate],
heights: &[Height],
dates: &[Date],
source: &mut BiMap<T>,
days: Option<usize>,
) where
@@ -275,8 +272,8 @@ where
#[allow(unused)]
pub fn multi_insert_percentile(
&mut self,
heights: &[usize],
dates: &[WNaiveDate],
heights: &[Height],
dates: &[Date],
mut map_and_percentiles: Vec<(&mut BiMap<T>, f32)>,
days: Option<usize>,
) where

View File

@@ -1,29 +1,29 @@
use allocative::Allocative;
use bincode::{Decode, Encode};
use super::{Price, WAmount};
use super::{Amount, Height, Price};
#[derive(Debug, Encode, Decode, Allocative)]
pub struct BlockData {
pub height: u32,
pub height: Height,
pub price: Price,
pub timestamp: u32,
pub amount: WAmount,
pub amount: Amount,
pub utxos: u32,
}
impl BlockData {
pub fn new(height: u32, price: Price, timestamp: u32) -> Self {
pub fn new(height: Height, price: Price, timestamp: u32) -> Self {
Self {
height,
price,
timestamp,
amount: WAmount::ZERO,
amount: Amount::ZERO,
utxos: 0,
}
}
pub fn send(&mut self, amount: WAmount) {
pub fn send(&mut self, amount: Amount) {
self.utxos -= 1;
if self.amount < amount {
@@ -33,7 +33,7 @@ impl BlockData {
self.amount -= amount;
}
pub fn receive(&mut self, amount: WAmount) {
pub fn receive(&mut self, amount: Amount) {
self.utxos += 1;
self.amount += amount;

130
parser/src/structs/date.rs Normal file
View File

@@ -0,0 +1,130 @@
use std::{fmt, str::FromStr};
use allocative::{Allocative, Visitor};
use bincode::{
de::{BorrowDecoder, Decoder},
enc::Encoder,
error::{DecodeError, EncodeError},
BorrowDecode, Decode, Encode,
};
use chrono::{Datelike, Days, NaiveDate, TimeZone, Utc};
use derive_deref::{Deref, DerefMut};
use serde::{Deserialize, Serialize};
use super::{DateMapChunkId, MapKey};
const NUMBER_OF_UNSAFE_DATES: usize = 2;
const MIN_YEAR: i32 = 2009;
const APPROX_MAX_YEAR: i32 = 2100;
#[derive(
Debug,
PartialEq,
Eq,
PartialOrd,
Ord,
Clone,
Copy,
Deref,
DerefMut,
Default,
Serialize,
Deserialize,
)]
pub struct Date(NaiveDate);
impl Date {
pub fn wrap(date: NaiveDate) -> Self {
Self(date)
}
pub fn from_timestamp(timestamp: u32) -> Self {
Self(
Utc.timestamp_opt(i64::from(timestamp), 0)
.unwrap()
.date_naive(),
)
}
}
impl MapKey<DateMapChunkId> for Date {
fn to_chunk_id(&self) -> DateMapChunkId {
DateMapChunkId::new(self)
}
fn to_first_unsafe(&self) -> Option<Self> {
let offset = NUMBER_OF_UNSAFE_DATES - 1;
self.checked_sub_days(Days::new(offset as u64))
.map(Date::wrap)
}
fn to_serialized_key(&self) -> Self {
*self
}
fn is_out_of_bounds(&self) -> bool {
!(MIN_YEAR..=APPROX_MAX_YEAR).contains(&self.year())
}
fn is_first(&self) -> bool {
let day = self.day();
if self.year() == 2009 && self.month() == 1 {
day == 3
} else {
day == 1
}
}
fn checked_sub(&self, days: usize) -> Option<Self> {
self.checked_sub_days(Days::new(days as u64))
.map(Self::wrap)
}
fn min_percentile_key() -> Self {
Self::wrap(NaiveDate::from_ymd_opt(2012, 1, 1).unwrap())
}
fn iter_up_to(&self, other: &Self) -> impl Iterator<Item = Self> {
self.iter_days().take_while(|d| d <= other).map(Date::wrap)
}
fn map_name<'a>() -> &'a str {
"date"
}
}
impl fmt::Display for Date {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.0, f)
}
}
impl Encode for Date {
fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), EncodeError> {
Encode::encode(&self.to_string(), encoder)
}
}
impl Decode for Date {
fn decode<D: Decoder>(decoder: &mut D) -> core::result::Result<Self, DecodeError> {
let str: String = Decode::decode(decoder)?;
Ok(Self(NaiveDate::from_str(&str).unwrap()))
}
}
impl<'de> BorrowDecode<'de> for Date {
fn borrow_decode<D: BorrowDecoder<'de>>(decoder: &mut D) -> Result<Self, DecodeError> {
let str: String = BorrowDecode::borrow_decode(decoder)?;
Ok(Self(NaiveDate::from_str(&str).unwrap()))
}
}
impl Allocative for Date {
fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
visitor.visit_simple_sized::<Self>();
}
}

View File

@@ -1,16 +1,16 @@
use allocative::Allocative;
use bincode::{Decode, Encode};
use super::{BlockData, BlockPath, WNaiveDate};
use super::{BlockData, BlockPath, Date};
#[derive(Debug, Encode, Decode, Allocative)]
pub struct DateData {
pub date: WNaiveDate,
pub date: Date,
pub blocks: Vec<BlockData>,
}
impl DateData {
pub fn new(date: WNaiveDate, blocks: Vec<BlockData>) -> Self {
pub fn new(date: Date, blocks: Vec<BlockData>) -> Self {
Self { date, blocks }
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,33 @@
use allocative::Allocative;
use chrono::Datelike;
use crate::Date;
use super::MapChunkId;
#[derive(Debug, Default, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Allocative)]
pub struct DateMapChunkId(i32);
impl DateMapChunkId {
pub fn new(date: &Date) -> Self {
Self(date.year())
}
}
impl MapChunkId for DateMapChunkId {
fn to_name(&self) -> String {
self.0.to_string()
}
fn from_name(name: &str) -> Self {
Self(name.parse::<i32>().unwrap())
}
fn to_usize(self) -> usize {
self.0 as usize
}
fn from_usize(id: usize) -> Self {
Self(id as i32)
}
}

View File

@@ -1,12 +1,12 @@
use allocative::Allocative;
use sanakirja::{direct_repr, Storable, UnsizedStorable};
use super::{AddressData, AddressType, WAmount};
use super::{AddressData, AddressType, Amount};
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Default, Allocative)]
pub struct EmptyAddressData {
pub address_type: AddressType,
pub transfered: WAmount,
pub transfered: Amount,
}
direct_repr!(EmptyAddressData);

View File

@@ -0,0 +1,860 @@
use std::{
collections::{BTreeMap, VecDeque},
fmt::Debug,
fs,
iter::Sum,
mem,
ops::{Add, ControlFlow, Div, Mul, Sub},
path::{Path, PathBuf},
};
use allocative::Allocative;
use bincode::{Decode, Encode};
use itertools::Itertools;
use ordered_float::{FloatCore, OrderedFloat};
use serde::{de::DeserializeOwned, Serialize};
use crate::{log, utils::LossyFrom, Serialization};
use super::{AnyMap, MapValue};
pub trait MapKey<ChunkId>
where
Self: Sized + PartialOrd + Ord + Clone + Copy + Debug,
ChunkId: MapChunkId,
{
fn to_chunk_id(&self) -> ChunkId;
fn to_first_unsafe(&self) -> Option<Self>;
fn to_serialized_key(&self) -> Self;
fn is_out_of_bounds(&self) -> bool;
fn is_first(&self) -> bool;
fn checked_sub(&self, x: usize) -> Option<Self>;
fn min_percentile_key() -> Self;
fn iter_up_to(&self, other: &Self) -> impl Iterator<Item = Self>;
fn map_name<'a>() -> &'a str;
fn from_usize(_: usize) -> Self {
unreachable!()
}
fn to_usize(&self) -> usize {
unreachable!()
}
}
pub trait MapSerialized<Key, Value, ChunkId>
where
Self: Debug + Serialize + DeserializeOwned + Encode + Decode,
ChunkId: MapChunkId,
{
fn new(version: u32) -> Self;
fn get_last_key(&self, last_chunk_id: &ChunkId) -> Option<Key>;
fn version(&self) -> u32;
fn get(&self, serialized_key: &Key) -> Option<&Value>;
fn last(&self) -> Option<&Value>;
fn extend(&mut self, map: BTreeMap<Key, Value>);
}
pub trait MapChunkId
where
Self: Ord + Debug + Copy + Clone,
{
fn to_name(&self) -> String;
fn from_name(name: &str) -> Self;
fn to_usize(self) -> usize;
fn from_usize(id: usize) -> Self;
}
#[derive(Default, Debug, Allocative)]
pub struct GenericMap<Key, Value, ChunkId, Serialized> {
version: u32,
path_all: String,
path_last: Option<String>,
chunks_in_memory: usize,
serialization: Serialization,
pub initial_last_key: Option<Key>,
pub initial_first_unsafe_key: Option<Key>,
imported: BTreeMap<ChunkId, Serialized>,
to_insert: BTreeMap<ChunkId, BTreeMap<Key, Value>>,
}
impl<Key, Value, ChunkId, Serialized> GenericMap<Key, Value, ChunkId, Serialized>
where
Value: MapValue,
ChunkId: MapChunkId,
Key: MapKey<ChunkId>,
Serialized: MapSerialized<Key, Value, ChunkId>,
{
pub fn new_bin(version: u32, path: &str) -> Self {
Self::new(version, path, Serialization::Binary, 1, true)
}
pub fn _new_bin(version: u32, path: &str, export_last: bool) -> Self {
Self::new(version, path, Serialization::Binary, 1, export_last)
}
pub fn new_json(version: u32, path: &str, export_last: bool) -> Self {
Self::new(version, path, Serialization::Json, usize::MAX, export_last)
}
fn new(
version: u32,
path: &str,
serialization: Serialization,
chunks_in_memory: usize,
export_last: bool,
) -> Self {
if chunks_in_memory < 1 {
panic!("Should always have at least the latest chunk in memory");
}
let path = path.replace(['-', '_', ' '], "/");
let path_all = format!("{path}/{}", Key::map_name());
fs::create_dir_all(&path_all).unwrap();
let path_last = {
if export_last {
Some(serialization.append_extension(&format!("{path}/last")))
} else {
None
}
};
let mut s = Self {
version,
path_all,
path_last,
chunks_in_memory,
serialization,
initial_last_key: None,
initial_first_unsafe_key: None,
to_insert: BTreeMap::default(),
imported: BTreeMap::default(),
};
s.read_dir()
.into_iter()
.rev()
.take(chunks_in_memory)
.for_each(|(chunk_start, path)| {
if let Ok(serialized) = s.import(&path) {
if serialized.version() == s.version {
s.imported.insert(chunk_start, serialized);
} else {
s.read_dir()
.iter()
.for_each(|(_, path)| fs::remove_file(path).unwrap())
}
}
});
s.initial_last_key = s
.imported
.iter()
.last()
.and_then(|(last_chunk_id, serialized)| serialized.get_last_key(last_chunk_id));
s.initial_first_unsafe_key = s
.initial_last_key
.and_then(|last_key| last_key.to_first_unsafe());
if s.initial_first_unsafe_key.is_none() {
log(&format!("New {path}/{}", Key::map_name()));
}
s
}
fn read_dir(&self) -> BTreeMap<ChunkId, PathBuf> {
Self::_read_dir(&self.path_all, &self.serialization)
}
pub fn _read_dir(path: &str, serialization: &Serialization) -> BTreeMap<ChunkId, PathBuf> {
fs::read_dir(path)
.unwrap()
.map(|entry| entry.unwrap().path())
.filter(|path| {
let extension = path.extension().unwrap().to_str().unwrap();
path.is_file() && extension == serialization.to_extension()
})
.map(|path| {
let chunk_id = ChunkId::from_name(path.file_stem().unwrap().to_str().unwrap());
(chunk_id, path)
})
.collect()
}
fn import(&self, path: &Path) -> color_eyre::Result<Serialized> {
self.serialization
.import::<Serialized>(path.to_str().unwrap())
}
pub fn insert(&mut self, key: Key, value: Value) -> Value {
if !self.is_key_safe(key) {
self.to_insert
.entry(key.to_chunk_id())
.or_default()
.insert(key.to_serialized_key(), value);
}
value
}
pub fn insert_default(&mut self, key: Key) -> Value {
self.insert(key, Value::default())
}
#[inline(always)]
pub fn is_key_safe(&self, key: Key) -> bool {
self.initial_first_unsafe_key
.map_or(false, |initial_first_unsafe_key| {
initial_first_unsafe_key > key
})
}
pub fn get(&self, key: &Key) -> Option<Value> {
let chunk_id = key.to_chunk_id();
let serialized_key = key.to_serialized_key();
self.to_insert
.get(&chunk_id)
.and_then(|tree| tree.get(&serialized_key).cloned())
.or_else(|| {
self.imported
.get(&chunk_id)
.and_then(|serialized| serialized.get(&serialized_key))
.cloned()
})
}
pub fn get_or_import(&mut self, key: &Key) -> Option<Value> {
if key.is_out_of_bounds() {
return None;
}
let chunk_id = key.to_chunk_id();
let serialized_key = key.to_serialized_key();
self.to_insert
.get(&chunk_id)
.and_then(|tree| tree.get(&serialized_key).cloned())
.or_else(|| {
#[allow(clippy::map_entry)] // Can't be mut and then use read_dir()
if !self.imported.contains_key(&chunk_id) {
let dir_content = self.read_dir();
if let Some(path) = dir_content.get(&chunk_id) {
let serialized = self.import(path).unwrap();
self.imported.insert(chunk_id, serialized);
}
}
self.imported
.get(&chunk_id)
.and_then(|serialized| serialized.get(&serialized_key))
.cloned()
})
}
}
impl<Key, Value, ChunkId, Serialized> AnyMap for GenericMap<Key, Value, ChunkId, Serialized>
where
Value: MapValue,
ChunkId: MapChunkId,
Key: MapKey<ChunkId>,
Serialized: MapSerialized<Key, Value, ChunkId>,
{
fn path(&self) -> &str {
&self.path_all
}
fn path_last(&self) -> &Option<String> {
&self.path_last
}
fn t_name(&self) -> &str {
std::any::type_name::<Value>()
}
fn pre_export(&mut self) {
self.to_insert.iter_mut().for_each(|(chunk_id, map)| {
if let Some((key, _)) = map.first_key_value() {
if !key.is_first() && !self.imported.contains_key(chunk_id) {
// Had to copy paste many lines from functions as calling a function from self isn't allowed because of the &mut
let dir_content = Self::_read_dir(&self.path_all, &self.serialization);
let path = dir_content.get(chunk_id).unwrap_or_else(|| {
dbg!(&self.path_all, chunk_id, &dir_content);
panic!();
});
let serialized = self
.serialization
.import::<Serialized>(path.to_str().unwrap())
.unwrap();
self.imported.insert(*chunk_id, serialized);
}
}
self.imported
.entry(*chunk_id)
.or_insert(Serialized::new(self.version))
.extend(mem::take(map));
});
}
fn export(&self) -> color_eyre::Result<()> {
let len = self.imported.len();
self.to_insert.iter().enumerate().try_for_each(
|(index, (chunk_id, map))| -> color_eyre::Result<()> {
if !map.is_empty() {
unreachable!()
}
let path = self.serialization.append_extension(&format!(
"{}/{}",
self.path_all,
chunk_id.to_name()
));
let serialized = self.imported.get(chunk_id).unwrap_or_else(|| {
dbg!(&self.path_all, chunk_id, &self.imported);
panic!();
});
self.serialization.export(&path, serialized)?;
if index == len - 1 {
if let Some(path_last) = self.path_last.as_ref() {
self.serialization
.export(path_last, serialized.last().unwrap())?;
}
}
Ok(())
},
)
}
fn post_export(&mut self) {
self.imported
.keys()
.rev()
.enumerate()
.filter(|(index, _)| *index + 1 > self.chunks_in_memory)
.map(|(_, key)| *key)
.collect_vec()
.iter()
.for_each(|key| {
self.imported.remove(key);
});
self.to_insert.clear();
}
}
impl<Key, Value, ChunkId, Serialized> GenericMap<Key, Value, ChunkId, Serialized>
where
Value: MapValue,
ChunkId: MapChunkId,
Key: MapKey<ChunkId>,
Serialized: MapSerialized<Key, Value, ChunkId>,
{
pub fn sum_keys(&self, keys: &[Key]) -> Value
where
Value: Sum,
{
keys.iter().flat_map(|key| self.get(key)).sum::<Value>()
}
pub fn average_keys(&self, keys: &[Key]) -> f32
where
Value: Sum,
f32: LossyFrom<Value>,
{
f32::lossy_from(self.sum_keys(keys)) / keys.len() as f32
}
pub fn multi_insert<F>(&mut self, keys: &[Key], mut callback: F)
where
F: FnMut(&Key) -> Value,
{
keys.iter().for_each(|key| {
self.insert(*key, callback(key));
});
}
pub fn multi_insert_const(&mut self, keys: &[Key], constant: Value) {
keys.iter().for_each(|key| {
self.insert(*key, constant);
});
}
pub fn multi_insert_simple_transform<SourceValue, SourceSerialized, F>(
&mut self,
keys: &[Key],
source: &mut GenericMap<Key, SourceValue, ChunkId, SourceSerialized>,
transform: F,
) where
SourceValue: MapValue,
SourceSerialized: MapSerialized<Key, SourceValue, ChunkId>,
F: Fn(SourceValue) -> Value,
{
keys.iter().for_each(|key| {
self.insert(*key, transform(source.get_or_import(key).unwrap()));
});
}
pub fn multi_insert_complex_transform<SourceValue, SourceSerialized, F>(
&mut self,
keys: &[Key],
source: &mut GenericMap<Key, SourceValue, ChunkId, SourceSerialized>,
mut transform: F,
) where
SourceValue: MapValue,
SourceSerialized: MapSerialized<Key, SourceValue, ChunkId>,
F: FnMut(
(
SourceValue,
&Key,
&mut GenericMap<Key, SourceValue, ChunkId, SourceSerialized>,
),
) -> Value,
{
keys.iter().for_each(|key| {
self.insert(
*key,
transform((source.get_or_import(key).unwrap(), key, source)),
);
});
}
pub fn multi_insert_add<A, B, ASerialized, BSerialized>(
&mut self,
keys: &[Key],
added: &mut GenericMap<Key, A, ChunkId, ASerialized>,
adder: &mut GenericMap<Key, B, ChunkId, BSerialized>,
) where
A: MapValue,
ASerialized: MapSerialized<Key, A, ChunkId>,
B: MapValue,
BSerialized: MapSerialized<Key, B, ChunkId>,
Value: LossyFrom<A> + LossyFrom<B> + Add<Output = Value>,
{
keys.iter().for_each(|key| {
self.insert(
*key,
Value::lossy_from(added.get_or_import(key).unwrap())
+ Value::lossy_from(adder.get_or_import(key).unwrap()),
);
});
}
pub fn multi_insert_subtract<A, B, ASerialized, BSerialized>(
&mut self,
keys: &[Key],
subtracted: &mut GenericMap<Key, A, ChunkId, ASerialized>,
subtracter: &mut GenericMap<Key, B, ChunkId, BSerialized>,
) where
A: MapValue,
ASerialized: MapSerialized<Key, A, ChunkId>,
B: MapValue,
BSerialized: MapSerialized<Key, B, ChunkId>,
Value: LossyFrom<A> + LossyFrom<B> + Sub<Output = Value>,
{
keys.iter().for_each(|key| {
self.insert(
*key,
Value::lossy_from(subtracted.get_or_import(key).unwrap())
- Value::lossy_from(subtracter.get_or_import(key).unwrap()),
);
});
}
pub fn multi_insert_multiply<A, B, ASerialized, BSerialized>(
&mut self,
keys: &[Key],
multiplied: &mut GenericMap<Key, A, ChunkId, ASerialized>,
multiplier: &mut GenericMap<Key, B, ChunkId, BSerialized>,
) where
A: MapValue,
ASerialized: MapSerialized<Key, A, ChunkId>,
B: MapValue,
BSerialized: MapSerialized<Key, B, ChunkId>,
Value: LossyFrom<A> + LossyFrom<B> + Mul<Output = Value>,
{
keys.iter().for_each(|key| {
self.insert(
*key,
Value::lossy_from(multiplied.get_or_import(key).unwrap())
* Value::lossy_from(multiplier.get_or_import(key).unwrap()),
);
});
}
pub fn multi_insert_divide<A, B, ASerialized, BSerialized>(
&mut self,
keys: &[Key],
divided: &mut GenericMap<Key, A, ChunkId, ASerialized>,
divider: &mut GenericMap<Key, B, ChunkId, BSerialized>,
) where
A: MapValue,
ASerialized: MapSerialized<Key, A, ChunkId>,
B: MapValue,
BSerialized: MapSerialized<Key, B, ChunkId>,
Value: LossyFrom<A> + LossyFrom<B> + Div<Output = Value> + Mul<Output = Value> + From<u8>,
{
self._multi_insert_divide(keys, divided, divider, false)
}
pub fn multi_insert_percentage<A, B, ASerialized, BSerialized>(
&mut self,
keys: &[Key],
divided: &mut GenericMap<Key, A, ChunkId, ASerialized>,
divider: &mut GenericMap<Key, B, ChunkId, BSerialized>,
) where
A: MapValue,
ASerialized: MapSerialized<Key, A, ChunkId>,
B: MapValue,
BSerialized: MapSerialized<Key, B, ChunkId>,
Value: LossyFrom<A> + LossyFrom<B> + Div<Output = Value> + Mul<Output = Value> + From<u8>,
{
self._multi_insert_divide(keys, divided, divider, true)
}
fn _multi_insert_divide<A, B, ASerialized, BSerialized>(
&mut self,
keys: &[Key],
divided: &mut GenericMap<Key, A, ChunkId, ASerialized>,
divider: &mut GenericMap<Key, B, ChunkId, BSerialized>,
as_percentage: bool,
) where
A: MapValue,
ASerialized: MapSerialized<Key, A, ChunkId>,
B: MapValue,
BSerialized: MapSerialized<Key, B, ChunkId>,
Value: LossyFrom<A> + LossyFrom<B> + Div<Output = Value> + Mul<Output = Value> + From<u8>,
{
let multiplier = Value::from(if as_percentage { 100 } else { 1 });
keys.iter().for_each(|key| {
self.insert(
*key,
Value::lossy_from(divided.get_or_import(key).unwrap())
/ Value::lossy_from(divider.get_or_import(key).unwrap())
* multiplier,
);
});
}
pub fn multi_insert_cumulative<SourceValue, SourceSerialized>(
&mut self,
keys: &[Key],
source: &mut GenericMap<Key, SourceValue, ChunkId, SourceSerialized>,
) where
SourceValue: MapValue,
SourceSerialized: MapSerialized<Key, SourceValue, ChunkId>,
Value: LossyFrom<SourceValue> + Add<Output = Value> + Sub<Output = Value>,
{
self._multi_insert_last_x_sum(keys, source, None)
}
pub fn multi_insert_last_x_sum<SourceValue, SourceSerialized>(
&mut self,
keys: &[Key],
source: &mut GenericMap<Key, SourceValue, ChunkId, SourceSerialized>,
len: usize,
) where
SourceValue: MapValue,
SourceSerialized: MapSerialized<Key, SourceValue, ChunkId>,
Value: LossyFrom<SourceValue> + Add<Output = Value> + Sub<Output = Value>,
{
self._multi_insert_last_x_sum(keys, source, Some(len))
}
fn _multi_insert_last_x_sum<SourceValue, SourceSerialized>(
&mut self,
keys: &[Key],
source: &mut GenericMap<Key, SourceValue, ChunkId, SourceSerialized>,
len: Option<usize>,
) where
SourceValue: MapValue,
SourceSerialized: MapSerialized<Key, SourceValue, ChunkId>,
Value: LossyFrom<SourceValue> + Add<Output = Value> + Sub<Output = Value>,
{
let mut sum = None;
keys.iter().for_each(|key| {
let to_subtract = len
.and_then(|x| {
key.checked_sub(x)
.and_then(|previous_key| source.get_or_import(&previous_key))
})
.unwrap_or_default();
let previous_sum = sum.unwrap_or_else(|| {
key.checked_sub(1)
.and_then(|previous_sum_key| self.get_or_import(&previous_sum_key))
.unwrap_or_default()
});
let last_value = source.get_or_import(key).unwrap_or_else(|| {
dbg!(&source.to_insert, &source.path(), key);
panic!();
});
sum.replace(
previous_sum + Value::lossy_from(last_value) - Value::lossy_from(to_subtract),
);
self.insert(*key, sum.unwrap());
});
}
pub fn multi_insert_simple_average<SourceValue, SourceSerialized>(
&mut self,
keys: &[Key],
source: &mut GenericMap<Key, SourceValue, ChunkId, SourceSerialized>,
len: usize,
) where
SourceValue: MapValue + Sum,
SourceSerialized: MapSerialized<Key, SourceValue, ChunkId>,
Value: Into<f32> + From<f32>,
f32: LossyFrom<SourceValue>,
{
if len <= 1 {
panic!("Average of 1 or less is not useful");
}
let len = len as f32;
let mut average = None;
keys.iter().for_each(|key| {
let previous_average: f32 = average
.unwrap_or_else(|| {
key.checked_sub(1)
.and_then(|previous_average_key| self.get(&previous_average_key))
.unwrap_or_default()
})
.into();
let mut last_value = f32::lossy_from(source.get_or_import(key).unwrap_or_else(|| {
dbg!(key);
panic!()
}));
if last_value.is_nan() {
last_value = 0.0;
}
average.replace(((previous_average * (len - 1.0) + last_value) / len).into());
self.insert(*key, average.unwrap());
});
}
pub fn multi_insert_net_change(&mut self, keys: &[Key], source: &mut Self, len: usize)
where
Value: Sub<Output = Value>,
{
keys.iter().for_each(|key| {
let previous_value = key
.checked_sub(len)
.and_then(|previous_key| source.get_or_import(&previous_key))
.unwrap_or_default();
let last_value = source.get_or_import(key).unwrap();
let net_change = last_value - previous_value;
self.insert(*key, net_change);
});
}
pub fn multi_insert_percentage_change(&mut self, keys: &[Key], source: &mut Self, len: usize)
where
Value: Sub<Output = Value> + FloatCore,
{
let one = Value::from(1.0).unwrap();
let hundred = Value::from(100.0).unwrap();
keys.iter().for_each(|key| {
let previous_value = key
.checked_sub(len)
.and_then(|previous_key| source.get_or_import(&previous_key))
.unwrap_or_default();
let last_value = source.get_or_import(key).unwrap();
let percentage_change = ((last_value / previous_value) - one) * hundred;
self.insert(*key, percentage_change);
});
}
pub fn multi_insert_median(&mut self, keys: &[Key], source: &mut Self, len: Option<usize>)
where
Value: FloatCore,
{
source.multi_insert_percentile(keys, vec![(self, 0.5)], len);
}
pub fn multi_insert_percentile(
&mut self,
keys: &[Key],
mut map_and_percentiles: Vec<(&mut Self, f32)>,
len: Option<usize>,
) where
Value: FloatCore,
{
if len.map_or(false, |size| size < 3) {
panic!("Computing a percentile for a size lower than 3 is useless");
}
let mut ordered_vec = None;
let mut sorted_vec = None;
let min_percentile_key = Key::min_percentile_key();
let nan = Value::from(f32::NAN).unwrap();
let two = Value::from(2.0).unwrap();
keys.iter().cloned().try_for_each(|key| {
if key < min_percentile_key {
map_and_percentiles.iter_mut().for_each(|(map, _)| {
(*map).insert(key, nan);
});
return ControlFlow::Continue::<()>(());
}
if let Some(start) = len.map_or(Some(min_percentile_key), |size| key.checked_sub(size))
{
if sorted_vec.is_none() {
let mut vec = start
.iter_up_to(&key)
.flat_map(|key| self.get_or_import(&key))
.filter(|f| !f.is_nan())
.map(|f| OrderedFloat(f))
.collect_vec();
if len.is_some() {
ordered_vec.replace(VecDeque::from(vec.clone()));
}
vec.sort_unstable();
sorted_vec.replace(vec);
} else {
let float_value = self.get_or_import(&key).unwrap();
if !float_value.is_nan() {
let float_value = OrderedFloat(float_value);
if let Some(len) = len {
if let Some(ordered_vec) = ordered_vec.as_mut() {
if ordered_vec.len() == len {
let first = ordered_vec.pop_front().unwrap();
let pos =
sorted_vec.as_ref().unwrap().binary_search(&first).unwrap();
sorted_vec.as_mut().unwrap().remove(pos);
}
ordered_vec.push_back(float_value);
}
}
let pos = sorted_vec
.as_ref()
.unwrap()
.binary_search(&float_value)
.unwrap_or_else(|pos| pos);
sorted_vec.as_mut().unwrap().insert(pos, float_value);
}
}
let vec = sorted_vec.as_ref().unwrap();
let len = vec.len();
map_and_percentiles
.iter_mut()
.for_each(|(map, percentile)| {
if !(0.0..=1.0).contains(percentile) {
panic!("The percentile should be between 0.0 and 1.0");
}
let value = {
if len < 2 {
nan
} else {
let index = (len - 1) as f32 * *percentile;
let fract = index.fract();
if fract != 0.0 {
(vec.get(index.ceil() as usize)
.unwrap_or_else(|| {
dbg!(vec, index, &self.path_all, &self.path_all, len);
panic!()
})
.0
+ vec
.get(index as usize)
.unwrap_or_else(|| {
dbg!(
vec,
index,
&self.path_all,
&self.path_all,
len
);
panic!()
})
.0)
/ two
} else {
vec.get(index as usize)
.unwrap_or_else(|| {
dbg!(vec, index);
panic!();
})
.0
}
}
};
(*map).insert(key, value);
});
} else {
map_and_percentiles.iter_mut().for_each(|(map, _)| {
(*map).insert(key, nan);
});
}
ControlFlow::Continue(())
});
}
}

View File

@@ -0,0 +1,154 @@
use std::{
fmt,
ops::{Add, AddAssign, Sub},
};
use allocative::Allocative;
use bincode::{Decode, Encode};
use derive_deref::{Deref, DerefMut};
use serde::{Deserialize, Serialize};
use crate::{bitcoin::NUMBER_OF_UNSAFE_BLOCKS, HEIGHT_MAP_CHUNK_SIZE};
use super::{HeightMapChunkId, MapKey};
#[derive(
Debug,
PartialEq,
Eq,
PartialOrd,
Ord,
Clone,
Copy,
Deref,
DerefMut,
Default,
Serialize,
Deserialize,
Encode,
Decode,
Allocative,
)]
pub struct Height(u32);
impl Height {
pub const ZERO: Height = Height(0);
pub fn new(height: u32) -> Self {
Self(height)
}
pub fn is_close_to_end(&self, block_count: usize) -> bool {
**self > (block_count - (NUMBER_OF_UNSAFE_BLOCKS * 3)) as u32
}
pub fn is_safe(&self, block_count: usize) -> bool {
**self < (block_count - NUMBER_OF_UNSAFE_BLOCKS) as u32
}
}
impl PartialEq<u64> for Height {
fn eq(&self, other: &u64) -> bool {
**self == *other as u32
}
}
impl Add<u32> for Height {
type Output = Height;
fn add(self, rhs: u32) -> Self::Output {
Self::new(*self + rhs)
}
}
impl Add<usize> for Height {
type Output = Height;
fn add(self, rhs: usize) -> Self::Output {
Self::new(*self + rhs as u32)
}
}
impl Sub<Height> for Height {
type Output = Height;
fn sub(self, rhs: Height) -> Self::Output {
Self::new(*self - *rhs)
}
}
impl Sub<u32> for Height {
type Output = Height;
fn sub(self, rhs: u32) -> Self::Output {
Self::new(*self - rhs)
}
}
impl Sub<usize> for Height {
type Output = Height;
fn sub(self, rhs: usize) -> Self::Output {
Self::new(*self - rhs as u32)
}
}
impl AddAssign<usize> for Height {
fn add_assign(&mut self, rhs: usize) {
*self = self.add(rhs);
}
}
impl fmt::Display for Height {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", **self)
}
}
impl MapKey<HeightMapChunkId> for Height {
fn to_chunk_id(&self) -> HeightMapChunkId {
HeightMapChunkId::new(self)
}
fn to_first_unsafe(&self) -> Option<Self> {
let offset = NUMBER_OF_UNSAFE_BLOCKS - 1;
self.checked_sub(offset)
}
fn to_serialized_key(&self) -> Self {
Height::new(**self % HEIGHT_MAP_CHUNK_SIZE)
}
fn is_out_of_bounds(&self) -> bool {
!(0..=2_100_000).contains(&**self)
}
fn is_first(&self) -> bool {
**self == 0
}
fn checked_sub(&self, x: usize) -> Option<Self> {
(**self).checked_sub(x as u32).map(Height::new)
}
fn min_percentile_key() -> Self {
Self(160_000)
}
fn iter_up_to(&self, other: &Self) -> impl Iterator<Item = Self> {
(**self..=**other).map(Height::new)
}
fn map_name<'a>() -> &'a str {
"height"
}
fn to_usize(&self) -> usize {
(**self) as usize
}
fn from_usize(h: usize) -> Self {
Self(h as u32)
}
}

View File

@@ -0,0 +1,986 @@
use std::{
cmp::Ordering,
collections::{BTreeMap, VecDeque},
fmt::Debug,
fs,
iter::Sum,
mem,
ops::{Add, ControlFlow, Div, Mul, RangeInclusive, Sub},
path::{Path, PathBuf},
};
use allocative::Allocative;
use bincode::{Decode, Encode};
use itertools::Itertools;
use ordered_float::{FloatCore, OrderedFloat};
use serde::{Deserialize, Serialize};
use crate::{
bitcoin::NUMBER_OF_UNSAFE_BLOCKS,
io::{format_path, Serialization},
utils::{log, LossyFrom},
};
use super::{AnyMap, MapValue};
pub const HEIGHT_MAP_CHUNK_SIZE: usize = 10_000;
#[derive(Debug, Serialize, Deserialize, Encode, Decode, Allocative)]
pub struct SerializedHeightMap<T> {
version: u32,
map: Vec<T>,
}
#[derive(Default, Allocative)]
pub struct HeightMap<T>
where
T: MapValue,
{
version: u32,
path_all: String,
path_last: Option<String>,
chunks_in_memory: usize,
serialization: Serialization,
initial_last_height: Option<usize>,
initial_first_unsafe_height: Option<usize>,
imported: BTreeMap<usize, SerializedHeightMap<T>>,
to_insert: BTreeMap<usize, BTreeMap<usize, T>>,
}
impl<T> HeightMap<T>
where
T: MapValue,
{
pub fn new_bin(version: u32, path: &str) -> Self {
Self::new(version, path, Serialization::Binary, 1, true)
}
pub fn _new_bin(version: u32, path: &str, export_last: bool) -> Self {
Self::new(version, path, Serialization::Binary, 1, export_last)
}
pub fn new_json(version: u32, path: &str, export_last: bool) -> Self {
Self::new(version, path, Serialization::Json, usize::MAX, export_last)
}
fn new(
version: u32,
path: &str,
serialization: Serialization,
chunks_in_memory: usize,
export_last: bool,
) -> Self {
if chunks_in_memory < 1 {
panic!("Should always have at least the latest chunk in memory");
}
let path = format_path(path);
let path_all = format!("{path}/height");
fs::create_dir_all(&path_all).unwrap();
let path_last = {
if export_last {
Some(serialization.append_extension(&format!("{path}/last")))
} else {
None
}
};
let mut s = Self {
version,
path_all,
path_last,
chunks_in_memory,
serialization,
initial_first_unsafe_height: None,
initial_last_height: None,
to_insert: BTreeMap::default(),
imported: BTreeMap::default(),
};
s.read_dir()
.into_iter()
.rev()
.take(chunks_in_memory)
.for_each(|(chunk_start, path)| {
if let Ok(serialized) = s.import(&path) {
if serialized.version == s.version {
s.imported.insert(chunk_start, serialized);
} else {
s.read_dir()
.iter()
.for_each(|(_, path)| fs::remove_file(path).unwrap())
}
}
});
s.initial_last_height = s
.imported
.iter()
.last()
.map(|(chunk_start, serialized)| chunk_start + serialized.map.len());
s.initial_first_unsafe_height = s.initial_last_height.and_then(|last_height| {
let offset = NUMBER_OF_UNSAFE_BLOCKS - 1;
last_height.checked_sub(offset)
});
if s.initial_first_unsafe_height.is_none() {
log(&format!("New {path}"));
}
s
}
fn height_to_chunk_name(height: Height) -> String {
let start = Self::height_to_chunk_start(height);
let end = start + HEIGHT_MAP_CHUNK_SIZE;
format!("{start}..{end}")
}
fn height_to_chunk_start(height: Height) -> usize {
height / HEIGHT_MAP_CHUNK_SIZE * HEIGHT_MAP_CHUNK_SIZE
}
pub fn insert(&mut self, height: Height, value: T) -> T {
if !self.is_height_safe(height) {
self.to_insert
.entry(Self::height_to_chunk_start(height))
.or_default()
.insert(height % HEIGHT_MAP_CHUNK_SIZE, value);
}
value
}
pub fn insert_default(&mut self, height: Height) -> T {
self.insert(height, T::default())
}
pub fn get(&self, height: &usize) -> Option<T> {
let chunk_start = Self::height_to_chunk_start(*height);
self.to_insert
.get(&chunk_start)
.and_then(|map| map.get(&(height - chunk_start)).cloned())
.or_else(|| {
self.imported
.get(&chunk_start)
.and_then(|serialized| serialized.map.get(height - chunk_start))
.cloned()
})
}
pub fn get_or_import(&mut self, height: &usize) -> T {
let chunk_start = Self::height_to_chunk_start(*height);
self.to_insert
.get(&chunk_start)
.and_then(|map| map.get(&(height - chunk_start)).cloned())
.or_else(|| {
#[allow(clippy::map_entry)] // Can't be mut and then use read_dir()
if !self.imported.contains_key(&chunk_start) {
let dir_content = self.read_dir();
let path = dir_content.get(&chunk_start).unwrap_or_else(|| {
dbg!(self.path(), chunk_start, &dir_content);
panic!();
});
let serialized = self.import(path).unwrap();
self.imported.insert(chunk_start, serialized);
}
self.imported
.get(&chunk_start)
.and_then(|serialized| serialized.map.get(height - chunk_start))
.cloned()
})
.unwrap_or_else(|| {
dbg!(height, self.path());
panic!();
})
}
#[inline(always)]
pub fn is_height_safe(&self, height: Height) -> bool {
self.initial_first_unsafe_height.unwrap_or(0) > height
}
fn read_dir(&self) -> BTreeMap<usize, PathBuf> {
Self::_read_dir(&self.path_all, &self.serialization)
}
pub fn _read_dir(path: &str, serialization: &Serialization) -> BTreeMap<usize, PathBuf> {
fs::read_dir(path)
.unwrap()
.map(|entry| entry.unwrap().path())
.filter(|path| {
let extension = path.extension().unwrap().to_str().unwrap();
path.is_file() && extension == serialization.to_extension()
})
.map(|path| {
(
path.file_stem()
.unwrap()
.to_str()
.unwrap()
.split("..")
.next()
.unwrap()
.parse::<usize>()
.unwrap(),
path,
)
})
.collect()
}
fn import(&self, path: &Path) -> color_eyre::Result<SerializedHeightMap<T>> {
self.serialization
.import::<SerializedHeightMap<T>>(path.to_str().unwrap())
}
}
impl<T> AnyMap for HeightMap<T>
where
T: MapValue,
{
fn path(&self) -> &str {
&self.path_all
}
fn path_last(&self) -> &Option<String> {
&self.path_last
}
fn t_name(&self) -> &str {
std::any::type_name::<T>()
}
fn pre_export(&mut self) {
let to_insert = &mut self.to_insert;
to_insert.iter_mut().for_each(|(chunk_start, map)| {
if let Some((key, _)) = map.first_key_value() {
if *key > 0 && !self.imported.contains_key(chunk_start) {
// Had to copy paste many lines from functions as calling a function from self isn't allowed because of the &mut
let dir_content = Self::_read_dir(&self.path_all, &self.serialization);
let path = dir_content.get(chunk_start).unwrap_or_else(|| {
dbg!(&self.path_all, chunk_start, &dir_content);
panic!();
});
let serialized = self
.serialization
.import::<SerializedHeightMap<T>>(path.to_str().unwrap())
.unwrap();
self.imported.insert(*chunk_start, serialized);
}
}
let serialized = self
.imported
.entry(*chunk_start)
.or_insert(SerializedHeightMap {
version: self.version,
map: vec![],
});
mem::take(map)
.into_iter()
.for_each(
|(chunk_height, value)| match serialized.map.len().cmp(&chunk_height) {
Ordering::Greater => serialized.map[chunk_height] = value,
Ordering::Equal => serialized.map.push(value),
Ordering::Less => {
dbg!(&self.path_all, &serialized.map, chunk_height, value);
panic!()
}
},
);
});
}
fn export(&self) -> color_eyre::Result<()> {
let len = self.imported.len();
self.to_insert.iter().enumerate().try_for_each(
|(index, (chunk_start, map))| -> color_eyre::Result<()> {
if !map.is_empty() {
unreachable!()
}
let chunk_name = Self::height_to_chunk_name(*chunk_start);
let path = self
.serialization
.append_extension(&format!("{}/{}", self.path_all, chunk_name));
let serialized = self.imported.get(chunk_start).unwrap_or_else(|| {
dbg!(&self.path_all, chunk_start, &self.imported);
panic!();
});
self.serialization.export(&path, serialized)?;
if index == len - 1 {
if let Some(path_last) = self.path_last.as_ref() {
self.serialization
.export(path_last, serialized.map.last().unwrap())?;
}
}
Ok(())
},
)
}
fn post_export(&mut self) {
self.imported
.keys()
.rev()
.enumerate()
.filter(|(index, _)| *index + 1 > self.chunks_in_memory)
.map(|(_, key)| *key)
.collect_vec()
.iter()
.for_each(|key| {
self.imported.remove(key);
});
self.to_insert.clear();
}
}
pub trait AnyHeightMap: AnyMap {
fn get_initial_first_unsafe_height(&self) -> Option<usize>;
fn get_initial_last_height(&self) -> Option<usize>;
fn as_any_map(&self) -> &(dyn AnyMap + Send + Sync);
fn as_any_mut_map(&mut self) -> &mut dyn AnyMap;
}
impl<T> AnyHeightMap for HeightMap<T>
where
T: MapValue,
{
#[inline(always)]
fn get_initial_first_unsafe_height(&self) -> Option<usize> {
self.initial_first_unsafe_height
}
#[inline(always)]
fn get_initial_last_height(&self) -> Option<usize> {
self.initial_last_height
}
fn as_any_map(&self) -> &(dyn AnyMap + Send + Sync) {
self
}
fn as_any_mut_map(&mut self) -> &mut dyn AnyMap {
self
}
}
impl<T> HeightMap<T>
where
T: MapValue,
{
pub fn sum_range(&self, range: &RangeInclusive<usize>) -> T
where
T: Sum,
{
range
.to_owned()
.flat_map(|height| self.get(&height))
.sum::<T>()
}
pub fn multi_insert_const(&mut self, heights: &[Height], constant: T) {
heights.iter().for_each(|height| {
let height = *height;
self.insert(height, constant);
});
}
pub fn multi_insert_simple_transform<K, F>(
&mut self,
heights: &[Height],
source: &mut HeightMap<K>,
transform: F,
) where
K: MapValue,
F: Fn(K) -> T,
{
heights.iter().for_each(|height| {
self.insert(*height, transform(source.get_or_import(height)));
});
}
pub fn multi_insert_complex_transform<K, F>(
&mut self,
heights: &[Height],
source: &mut HeightMap<K>,
mut transform: F,
) where
K: MapValue,
F: FnMut((K, &usize)) -> T,
{
heights.iter().for_each(|height| {
self.insert(*height, transform((source.get_or_import(height), height)));
});
}
pub fn multi_insert_add<A, B>(
&mut self,
heights: &[Height],
added: &mut HeightMap<A>,
adder: &mut HeightMap<B>,
) where
A: MapValue,
B: MapValue,
T: LossyFrom<A> + LossyFrom<B>,
T: Add<Output = T>,
{
heights.iter().for_each(|height| {
self.insert(
*height,
T::lossy_from(added.get_or_import(height))
+ T::lossy_from(adder.get_or_import(height)),
);
});
}
pub fn multi_insert_subtract<A, B>(
&mut self,
heights: &[Height],
subtracted: &mut HeightMap<A>,
subtracter: &mut HeightMap<B>,
) where
A: MapValue,
B: MapValue,
T: LossyFrom<A> + LossyFrom<B>,
T: Sub<Output = T>,
{
heights.iter().for_each(|height| {
self.insert(
*height,
T::lossy_from(subtracted.get_or_import(height))
- T::lossy_from(subtracter.get_or_import(height)),
);
});
}
pub fn multi_insert_multiply<A, B>(
&mut self,
heights: &[Height],
multiplied: &mut HeightMap<A>,
multiplier: &mut HeightMap<B>,
) where
A: MapValue,
B: MapValue,
T: LossyFrom<A> + LossyFrom<B>,
T: Mul<Output = T>,
{
heights.iter().for_each(|height| {
self.insert(
*height,
T::lossy_from(multiplied.get_or_import(height))
* T::lossy_from(multiplier.get_or_import(height)),
);
});
}
pub fn multi_insert_divide<A, B>(
&mut self,
heights: &[Height],
divided: &mut HeightMap<A>,
divider: &mut HeightMap<B>,
) where
A: MapValue,
B: MapValue,
T: LossyFrom<A> + LossyFrom<B>,
T: Div<Output = T> + Mul<Output = T> + From<u8>,
{
self._multi_insert_divide(heights, divided, divider, false)
}
pub fn multi_insert_percentage<A, B>(
&mut self,
heights: &[Height],
divided: &mut HeightMap<A>,
divider: &mut HeightMap<B>,
) where
A: MapValue,
B: MapValue,
T: LossyFrom<A> + LossyFrom<B>,
T: Div<Output = T> + Mul<Output = T> + From<u8>,
{
self._multi_insert_divide(heights, divided, divider, true)
}
pub fn _multi_insert_divide<A, B>(
&mut self,
heights: &[Height],
divided: &mut HeightMap<A>,
divider: &mut HeightMap<B>,
as_percentage: bool,
) where
A: MapValue,
B: MapValue,
T: LossyFrom<A> + LossyFrom<B>,
T: Div<Output = T> + Mul<Output = T> + From<u8>,
{
let multiplier = T::from(if as_percentage { 100 } else { 1 });
heights.iter().for_each(|height| {
self.insert(
*height,
T::lossy_from(divided.get_or_import(height))
/ T::lossy_from(divider.get_or_import(height))
* multiplier,
);
});
}
pub fn multi_insert_cumulative<K>(&mut self, heights: &[Height], source: &mut HeightMap<K>)
where
K: MapValue,
T: LossyFrom<K>,
T: Add<Output = T> + Sub<Output = T>,
{
self._multi_insert_last_x_sum(heights, source, None)
}
pub fn multi_insert_last_x_sum<K>(
&mut self,
heights: &[Height],
source: &mut HeightMap<K>,
block_time: usize,
) where
K: MapValue,
T: LossyFrom<K>,
T: Add<Output = T> + Sub<Output = T>,
{
self._multi_insert_last_x_sum(heights, source, Some(block_time))
}
fn _multi_insert_last_x_sum<K>(
&mut self,
heights: &[Height],
source: &mut HeightMap<K>,
block_time: Option<usize>,
) where
K: MapValue,
T: LossyFrom<K>,
T: Add<Output = T> + Sub<Output = T>,
{
let mut sum = None;
heights.iter().for_each(|height| {
let to_subtract = block_time
.and_then(|x| {
(height + 1)
.checked_sub(x)
.map(|previous_height| source.get_or_import(&previous_height))
})
.unwrap_or_default();
let previous_sum = sum.unwrap_or_else(|| {
height
.checked_sub(1)
.map(|previous_sum_height| self.get_or_import(&previous_sum_height))
.unwrap_or_default()
});
let last_value = source.get_or_import(height);
sum.replace(previous_sum + T::lossy_from(last_value) - T::lossy_from(to_subtract));
self.insert(*height, sum.unwrap());
});
}
pub fn multi_insert_simple_average<K>(
&mut self,
heights: &[Height],
source: &mut HeightMap<K>,
block_time: usize,
) where
T: Into<f32> + From<f32>,
K: MapValue + Sum,
f32: LossyFrom<K>,
{
if block_time <= 1 {
panic!("Average of 1 or less is not useful");
}
let mut average = None;
heights.iter().for_each(|height| {
let height = *height;
let previous_average: f32 = average
.unwrap_or_else(|| {
height
.checked_sub(block_time)
.and_then(|previous_average_height| self.get(&previous_average_height))
.unwrap_or_default()
})
.into();
let mut last_value = f32::lossy_from(source.get_or_import(&height));
if last_value.is_nan() {
last_value = 0.0;
}
average.replace(
((previous_average * (block_time as f32 - 1.0) + last_value) / block_time as f32)
.into(),
);
self.insert(height, average.unwrap());
});
}
pub fn multi_insert_net_change(
&mut self,
heights: &[Height],
source: &mut HeightMap<T>,
block_time: usize,
) where
T: Sub<Output = T>,
{
heights.iter().for_each(|height| {
let height = *height;
let previous_value = height
.checked_sub(block_time)
.map(|height| source.get_or_import(&height))
.unwrap_or_default();
let last_value = source.get_or_import(&height);
let net = last_value - previous_value;
self.insert(height, net);
});
}
pub fn multi_insert_median(
&mut self,
heights: &[Height],
source: &mut HeightMap<T>,
block_time: Option<usize>,
) where
T: FloatCore,
{
source.multi_insert_percentile(heights, vec![(self, 0.5)], block_time);
}
pub fn multi_insert_percentile(
&mut self,
heights: &[Height],
mut map_and_percentiles: Vec<(&mut HeightMap<T>, f32)>,
block_time: Option<usize>,
) where
T: FloatCore,
{
if block_time.map_or(false, |size| size < 3) {
panic!("Computing a percentile for a size lower than 3 is useless");
}
let mut ordered_vec = None;
let mut sorted_vec = None;
let min_percentile_height = 160_000;
let nan = T::from(f32::NAN).unwrap();
let two = T::from(2.0).unwrap();
if min_percentile_height % HEIGHT_MAP_CHUNK_SIZE != 0 {
panic!("Should be 0");
}
heights.iter().cloned().try_for_each(|height| {
if height < min_percentile_height {
map_and_percentiles.iter_mut().for_each(|(map, _)| {
(*map).insert(height, nan);
});
return ControlFlow::Continue::<()>(());
}
if let Some(start) =
block_time.map_or(Some(min_percentile_height), |size| height.checked_sub(size))
{
if sorted_vec.is_none() {
let mut vec = (start..=height)
.map(|height| self.get_or_import(&height))
.filter(|f| !f.is_nan())
.map(|f| OrderedFloat(f))
.collect_vec();
if block_time.is_some() {
ordered_vec.replace(VecDeque::from(vec.clone()));
}
vec.sort_unstable();
sorted_vec.replace(vec);
} else {
let float_value = self.get_or_import(&height);
if !float_value.is_nan() {
let float_value = OrderedFloat(float_value);
if block_time.is_some() {
let first = ordered_vec.as_mut().unwrap().pop_front().unwrap();
let pos = sorted_vec.as_ref().unwrap().binary_search(&first).unwrap();
sorted_vec.as_mut().unwrap().remove(pos);
ordered_vec.as_mut().unwrap().push_back(float_value);
}
let pos = sorted_vec
.as_ref()
.unwrap()
.binary_search(&float_value)
.unwrap_or_else(|pos| pos);
sorted_vec.as_mut().unwrap().insert(pos, float_value);
}
}
let vec = sorted_vec.as_ref().unwrap();
let len = vec.len();
map_and_percentiles
.iter_mut()
.for_each(|(map, percentile)| {
if !(0.0..=1.0).contains(percentile) {
panic!("The percentile should be between 0.0 and 1.0");
}
let value = {
if len < 2 {
nan
} else {
let index = (len - 1) as f32 * *percentile;
let fract = index.fract();
if fract != 0.0 {
(vec.get(index.ceil() as usize)
.unwrap_or_else(|| {
dbg!(
index,
&self.path_all,
&self.path_all,
&self.to_insert,
block_time,
vec
);
panic!()
})
.0
+ vec
.get(index.floor() as usize)
.unwrap_or_else(|| {
dbg!(
index,
&self.path_all,
&self.path_all,
block_time
);
panic!()
})
.0)
/ two
} else {
vec.get(index as usize).unwrap().0
}
}
};
(*map).insert(height, value);
});
} else {
map_and_percentiles.iter_mut().for_each(|(map, _)| {
(*map).insert(height, nan);
});
}
ControlFlow::Continue(())
});
}
// pub fn insert_cumulative(&mut self, height: Height, source: &HeightMap<T>) -> T
// where
// T: Add<Output = T> + Sub<Output = T>,
// {
// let previous_cum = height
// .checked_sub(1)
// .map(|previous_sum_height| {
// self.get(&previous_sum_height).unwrap_or_else(|| {
// dbg!(previous_sum_height);
// panic!()
// })
// })
// .unwrap_or_default();
// let last_value = source.get(&height).unwrap();
// let cum_value = previous_cum + last_value;
// self.insert(height, cum_value);
// cum_value
// }
// pub fn insert_last_x_sum(&mut self, height: Height, source: &HeightMap<T>, x: usize) -> T
// where
// T: Add<Output = T> + Sub<Output = T>,
// {
// let to_subtract = (height + 1)
// .checked_sub(x)
// .map(|previous_height| {
// source.get(&previous_height).unwrap_or_else(|| {
// dbg!(&self.path_all, &source.path_all, previous_height);
// panic!()
// })
// })
// .unwrap_or_default();
// let previous_sum = height
// .checked_sub(1)
// .map(|previous_sum_height| self.get(&previous_sum_height).unwrap())
// .unwrap_or_default();
// let last_value = source.get(&height).unwrap();
// let sum = previous_sum + last_value - to_subtract;
// self.insert(height, sum);
// sum
// }
// pub fn insert_simple_average(&mut self, height: Height, source: &HeightMap<T>, block_time: usize)
// where
// T: Into<f32> + From<f32>,
// {
// let to_subtract: f32 = (height + 1)
// .checked_sub(block_time)
// .map(|previous_height| source.get(&previous_height).unwrap())
// .unwrap_or_default()
// .into();
// let previous_average: f32 = height
// .checked_sub(1)
// .map(|previous_average_height| self.get(&previous_average_height).unwrap())
// .unwrap_or_default()
// .into();
// let last_value: f32 = source.get(&height).unwrap().into();
// let sum = previous_average * block_time as f32 - to_subtract + last_value;
// let average: T = (sum / block_time as f32).into();
// self.insert(height, average);
// }
// pub fn insert_net_change(&mut self, height: Height, source: &HeightMap<T>, offset: usize) -> T
// where
// T: Sub<Output = T>,
// {
// let previous_value = height
// .checked_sub(offset)
// .map(|height| {
// source.get(&height).unwrap_or_else(|| {
// dbg!(&self.path_all, &source.path_all, offset);
// panic!();
// })
// })
// .unwrap_or_default();
// let last_value = source.get(&height).unwrap();
// let net = last_value - previous_value;
// self.insert(height, net);
// net
// }
// pub fn insert_median(&mut self, height: Height, source: &HeightMap<T>, size: usize) -> T
// where
// T: FloatCore,
// {
// if size < 3 {
// panic!("Computing a median for a size lower than 3 is useless");
// }
// let median = {
// if let Some(start) = height.checked_sub(size - 1) {
// let even = size % 2 == 0;
// let median_index = size / 2;
// let mut vec = (start..=height)
// .map(|height| {
// OrderedFloat(source.get(&height).unwrap_or_else(|| {
// dbg!(height, &source.path_all, size);
// panic!()
// }))
// })
// .collect_vec();
// vec.sort_unstable();
// if even {
// (vec.get(median_index)
// .unwrap_or_else(|| {
// dbg!(median_index, &self.path_all, &source.path_all, size);
// panic!()
// })
// .0
// + vec.get(median_index - 1).unwrap().0)
// / T::from(2.0).unwrap()
// } else {
// vec.get(median_index).unwrap().0
// }
// } else {
// T::default()
// }
// };
// self.insert(height, median);
// median
// }
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,42 @@
use allocative::Allocative;
use derive_deref::{Deref, DerefMut};
use crate::HEIGHT_MAP_CHUNK_SIZE;
use super::{Height, MapChunkId};
#[derive(
Debug, Default, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Allocative, Deref, DerefMut,
)]
pub struct HeightMapChunkId(Height);
impl HeightMapChunkId {
pub fn new(height: &Height) -> Self {
Self(Height::new(
**height / HEIGHT_MAP_CHUNK_SIZE * HEIGHT_MAP_CHUNK_SIZE,
))
}
}
impl MapChunkId for HeightMapChunkId {
fn to_name(&self) -> String {
let start = ***self;
let end = start + HEIGHT_MAP_CHUNK_SIZE;
format!("{start}..{end}")
}
fn from_name(name: &str) -> Self {
Self(Height::new(
name.split("..").next().unwrap().parse::<u32>().unwrap(),
))
}
fn to_usize(self) -> usize {
**self as usize
}
fn from_usize(id: usize) -> Self {
Self(Height::new(id as u32))
}
}

View File

@@ -5,7 +5,7 @@ use std::{
use allocative::Allocative;
use super::WAmount;
use super::Amount;
#[derive(Debug)]
pub struct LiquidityClassification {
@@ -18,8 +18,8 @@ impl LiquidityClassification {
/// Following this:
/// https://insights.glassnode.com/bitcoin-liquid-supply/
/// https://www.desmos.com/calculator/dutgni5rtj
pub fn new(sent: WAmount, received: WAmount) -> Self {
if received == WAmount::ZERO {
pub fn new(sent: Amount, received: Amount) -> Self {
if received == Amount::ZERO {
dbg!(sent, received);
panic!()
}
@@ -29,7 +29,7 @@ impl LiquidityClassification {
panic!("Shouldn't be possible");
}
if sent == WAmount::ZERO {
if sent == Amount::ZERO {
0.0
} else {
let liquidity = sent.to_sat() as f64 / received.to_sat() as f64;

View File

@@ -1,14 +1,25 @@
use std::fmt::Debug;
use allocative::Allocative;
use bincode::{Decode, Encode};
use serde::{de::DeserializeOwned, Serialize};
use crate::datasets::OHLC;
use super::WNaiveDate;
use super::{Date, Height};
pub trait MapValue:
Clone + Copy + Default + Debug + Serialize + DeserializeOwned + Encode + Decode + Sync + Send
Clone
+ Copy
+ Default
+ Debug
+ Serialize
+ DeserializeOwned
+ Encode
+ Decode
+ Sync
+ Send
+ Allocative
{
}
@@ -18,5 +29,6 @@ impl MapValue for u64 {}
impl MapValue for usize {}
impl MapValue for f32 {}
impl MapValue for f64 {}
impl MapValue for WNaiveDate {}
impl MapValue for Date {}
impl MapValue for OHLC {}
impl MapValue for Height {}

View File

@@ -4,25 +4,31 @@ mod address_realized_data;
mod address_size;
mod address_split;
mod address_type;
mod amount;
mod any_map;
mod bi_map;
mod block_data;
mod block_path;
mod config;
mod counter;
mod date;
mod date_data;
mod date_map;
mod date_map_chunk_id;
mod empty_address_data;
mod generic_map;
mod height;
mod height_map;
mod height_map_chunk_id;
mod liquidity;
mod map_value;
mod partial_txout_data;
mod price;
mod sent_data;
mod serialized_btreemap;
mod serialized_vec;
mod tx_data;
mod txout_index;
mod wamount;
mod wnaivedate;
pub use address::*;
pub use address_data::*;
@@ -30,22 +36,28 @@ pub use address_realized_data::*;
pub use address_size::*;
pub use address_split::*;
pub use address_type::*;
pub use amount::*;
pub use any_map::*;
pub use bi_map::*;
pub use block_data::*;
pub use block_path::*;
pub use config::*;
pub use counter::*;
pub use date::*;
pub use date_data::*;
pub use date_map::*;
pub use date_map_chunk_id::*;
pub use empty_address_data::*;
pub use generic_map::*;
pub use height::*;
pub use height_map::*;
pub use height_map_chunk_id::*;
pub use liquidity::*;
pub use map_value::*;
pub use partial_txout_data::*;
pub use price::*;
pub use sent_data::*;
pub use serialized_btreemap::*;
pub use serialized_vec::*;
pub use tx_data::*;
pub use txout_index::*;
pub use wamount::*;
pub use wnaivedate::*;

View File

@@ -1,14 +1,14 @@
use super::{Address, WAmount};
use super::{Address, Amount};
#[derive(Debug)]
pub struct PartialTxoutData {
pub amount: WAmount,
pub amount: Amount,
pub address: Option<Address>,
pub address_index_opt: Option<u32>,
}
impl PartialTxoutData {
pub fn new(address: Option<Address>, amount: WAmount, address_index_opt: Option<u32>) -> Self {
pub fn new(address: Option<Address>, amount: Amount, address_index_opt: Option<u32>) -> Self {
Self {
address,
amount,

View File

@@ -3,7 +3,7 @@ use std::ops::{Add, AddAssign, Div, Mul, Sub, SubAssign};
use allocative::Allocative;
use bincode::{Decode, Encode};
use super::WAmount;
use super::Amount;
#[derive(
Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Encode, Decode, Allocative,
@@ -76,18 +76,18 @@ impl SubAssign for Price {
}
}
impl Mul<WAmount> for Price {
impl Mul<Amount> for Price {
type Output = Self;
fn mul(self, rhs: WAmount) -> Self::Output {
Self((self.to_cent() as f64 * rhs.to_sat() as f64 / WAmount::ONE_BTC_F64).round() as u64)
fn mul(self, rhs: Amount) -> Self::Output {
Self((self.to_cent() as f64 * rhs.to_sat() as f64 / Amount::ONE_BTC_F64).round() as u64)
}
}
impl Div<WAmount> for Price {
impl Div<Amount> for Price {
type Output = Self;
fn div(self, rhs: WAmount) -> Self::Output {
Self((self.to_cent() as f64 * WAmount::ONE_BTC_F64 / rhs.to_sat() as f64).round() as u64)
fn div(self, rhs: Amount) -> Self::Output {
Self((self.to_cent() as f64 * Amount::ONE_BTC_F64 / rhs.to_sat() as f64).round() as u64)
}
}

View File

@@ -1,13 +1,13 @@
use super::WAmount;
use super::Amount;
#[derive(Default, Debug)]
pub struct SentData {
pub volume: WAmount,
pub volume: Amount,
pub count: u32,
}
impl SentData {
pub fn send(&mut self, amount: WAmount) {
pub fn send(&mut self, amount: Amount) {
self.volume += amount;
self.count += 1;
}

View File

@@ -0,0 +1,51 @@
use std::{collections::BTreeMap, fmt::Debug};
use allocative::Allocative;
use bincode::{Decode, Encode};
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use super::{MapChunkId, MapKey, MapSerialized, MapValue};
#[derive(Debug, Default, Serialize, Deserialize, Encode, Decode, Allocative)]
pub struct SerializedBTreeMap<Key, Value>
where
Key: Ord,
{
version: u32,
map: BTreeMap<Key, Value>,
}
impl<Key, Value, ChunkId> MapSerialized<Key, Value, ChunkId> for SerializedBTreeMap<Key, Value>
where
Self: Debug + Serialize + DeserializeOwned + Encode + Decode,
ChunkId: MapChunkId,
Key: MapKey<ChunkId>,
Value: MapValue,
{
fn new(version: u32) -> Self {
Self {
version,
map: BTreeMap::default(),
}
}
fn get_last_key(&self, _: &ChunkId) -> Option<Key> {
self.map.last_key_value().map(|(k, _)| k.to_owned())
}
fn version(&self) -> u32 {
self.version
}
fn get(&self, key: &Key) -> Option<&Value> {
self.map.get(key)
}
fn last(&self) -> Option<&Value> {
self.map.last_key_value().map(|(_, v)| v)
}
fn extend(&mut self, map: BTreeMap<Key, Value>) {
self.map.extend(map)
}
}

View File

@@ -0,0 +1,59 @@
use std::{cmp::Ordering, collections::BTreeMap, fmt::Debug};
use allocative::Allocative;
use bincode::{Decode, Encode};
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use super::{MapChunkId, MapKey, MapSerialized, MapValue};
#[derive(Debug, Default, Serialize, Deserialize, Encode, Decode, Allocative)]
pub struct SerializedVec<Value> {
version: u32,
map: Vec<Value>,
}
impl<Key, Value, ChunkId> MapSerialized<Key, Value, ChunkId> for SerializedVec<Value>
where
Self: Debug + Serialize + DeserializeOwned + Encode + Decode,
ChunkId: MapChunkId,
Key: MapKey<ChunkId>,
Value: MapValue,
{
fn new(version: u32) -> Self {
Self {
version,
map: vec![],
}
}
fn get_last_key(&self, chunk_id: &ChunkId) -> Option<Key> {
Some(Key::from_usize(chunk_id.to_usize() + self.map.len()))
}
fn version(&self) -> u32 {
self.version
}
fn get(&self, serialized_key: &Key) -> Option<&Value> {
self.map.get(serialized_key.to_usize())
}
fn last(&self) -> Option<&Value> {
self.map.last()
}
fn extend(&mut self, map: BTreeMap<Key, Value>) {
map.into_iter().for_each(|(key, value)| {
let key = key.to_serialized_key().to_usize();
match self.map.len().cmp(&key) {
Ordering::Greater => self.map[key] = value,
Ordering::Equal => self.map.push(value),
Ordering::Less => {
dbg!(&self.map, key, value);
panic!()
}
}
});
}
}

View File

@@ -1,76 +0,0 @@
use std::{fmt, str::FromStr};
use allocative::{Allocative, Visitor};
use bincode::{
de::{BorrowDecoder, Decoder},
enc::Encoder,
error::{DecodeError, EncodeError},
BorrowDecode, Decode, Encode,
};
use chrono::{NaiveDate, TimeZone, Utc};
use derive_deref::{Deref, DerefMut};
use serde::{Deserialize, Serialize};
#[derive(
Debug,
PartialEq,
Eq,
PartialOrd,
Ord,
Clone,
Copy,
Deref,
DerefMut,
Default,
Serialize,
Deserialize,
)]
pub struct WNaiveDate(NaiveDate);
impl WNaiveDate {
pub fn wrap(date: NaiveDate) -> Self {
Self(date)
}
pub fn from_timestamp(timestamp: u32) -> Self {
Self(
Utc.timestamp_opt(i64::from(timestamp), 0)
.unwrap()
.date_naive(),
)
}
}
impl fmt::Display for WNaiveDate {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.0, f)
}
}
impl Encode for WNaiveDate {
fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), EncodeError> {
Encode::encode(&self.to_string(), encoder)
}
}
impl Decode for WNaiveDate {
fn decode<D: Decoder>(decoder: &mut D) -> core::result::Result<Self, DecodeError> {
let str: String = Decode::decode(decoder)?;
Ok(Self(NaiveDate::from_str(&str).unwrap()))
}
}
impl<'de> BorrowDecode<'de> for WNaiveDate {
fn borrow_decode<D: BorrowDecoder<'de>>(decoder: &mut D) -> Result<Self, DecodeError> {
let str: String = BorrowDecode::borrow_decode(decoder)?;
Ok(Self(NaiveDate::from_str(&str).unwrap()))
}
}
impl Allocative for WNaiveDate {
fn visit<'a, 'b: 'a>(&self, visitor: &'a mut Visitor<'b>) {
visitor.visit_simple_sized::<Self>();
}
}

View File

@@ -2,13 +2,13 @@ use std::{fs, path::PathBuf};
use chrono::Local;
use crate::{databases::Databases, datasets::AllDatasets, states::States};
use crate::{databases::Databases, datasets::AllDatasets, states::States, structs::Height};
pub fn generate_allocation_files(
datasets: &AllDatasets,
databases: &Databases,
states: &States,
last_height: usize,
last_height: Height,
) -> color_eyre::Result<()> {
let mut flamegraph = allocative::FlameGraphBuilder::default();
flamegraph.visit_root(datasets);