brk: first commit

This commit is contained in:
nym21
2025-02-23 01:25:15 +01:00
parent 8c3f519016
commit 19cf34f9d4
266 changed files with 225 additions and 1268 deletions

View File

@@ -0,0 +1,63 @@
use log::info;
use crate::{
parser::{databases::Databases, datasets::Datasets, states::States},
structs::{Config, Date, Exit, Height},
utils::time,
};
pub struct ExportedData<'a> {
pub config: &'a Config,
pub databases: Option<&'a mut Databases>,
pub datasets: &'a mut Datasets,
pub date: Date,
pub defragment: bool,
pub exit: Exit,
pub height: Height,
pub states: Option<&'a States>,
}
pub fn export(
ExportedData {
config,
databases,
datasets,
date,
defragment,
exit,
height,
states,
}: ExportedData,
) -> color_eyre::Result<()> {
if exit.active() {
info!("Exit in progress, skipping export");
return Ok(());
}
exit.block();
let text = if defragment {
"Exporting and defragmenting..."
} else {
"Exporting..."
};
info!("{text}");
time("Finished export", || -> color_eyre::Result<()> {
datasets.export(config, height)?;
if let Some(databases) = databases {
databases.export(height, date, defragment)?;
}
if let Some(states) = states {
states.export(config)?;
}
Ok(())
})?;
exit.unblock();
Ok(())
}

View File

@@ -0,0 +1,228 @@
use std::{collections::BTreeSet, time::Instant};
use brk_parser::bitcoincore_rpc::Client;
use chrono::Datelike;
use export::ExportedData;
use itertools::Itertools;
use log::info;
use parse::ParseData;
use crate::{
parser::{
actions::{export, find_first_inserted_unsafe_height, parse},
databases::Databases,
datasets::{ComputeData, Datasets},
states::{AddressCohortsDurableStates, States, UTXOCohortsDurableStates},
},
structs::{Config, DateData, DisplayInstant, Exit, Height, MapKey, Timestamp},
utils::{generate_allocation_files, time},
};
pub fn iter_blocks(
config: &Config,
rpc: &Client,
approx_block_count: usize,
exit: Exit,
databases: &mut Databases,
datasets: &mut Datasets,
) -> color_eyre::Result<()> {
let mut states = States::import(config).unwrap_or_default();
info!("Imported states");
let first_unsafe_heights = find_first_inserted_unsafe_height(&mut states, databases, datasets, config);
let mut height = first_unsafe_heights.min();
info!("Starting parsing at height: {height}");
let mut next_block_opt = None;
let mut blocks_loop_date = None;
let mut next_date_opt;
let block_receiver = biter::new(
&config.path_bitcoindir(),
Some(height.to_usize()),
None,
Client::from(config),
);
let mut block_iter = block_receiver.iter();
'parsing: loop {
let mut processed_heights = BTreeSet::new();
let mut processed_dates = BTreeSet::new();
'days: loop {
let mut blocks_loop_i = 0;
if next_block_opt.is_some() {
blocks_loop_date.take();
}
let instant = Instant::now();
'blocks: loop {
let current_block_opt = next_block_opt.take().or_else(|| block_iter.next());
next_block_opt = block_iter.next();
if let Some((_current_block_height, current_block, _current_block_hash)) = current_block_opt {
let timestamp = Timestamp::from(current_block.header.time);
let current_block_date = timestamp.to_date();
let current_block_height: Height = height + blocks_loop_i;
if current_block_height.to_usize() != _current_block_height {
dbg!(current_block_height, _current_block_height);
panic!()
}
next_date_opt = next_block_opt
.as_ref()
.map(|(_, next_block, _)| Timestamp::from(next_block.header.time).to_date());
// Always run for the first block of the loop
if blocks_loop_date.is_none() {
blocks_loop_date.replace(current_block_date);
if states
.date_data_vec
.last()
.map(|date_data| *date_data.date < *current_block_date)
.unwrap_or(true)
{
states.date_data_vec.push(DateData::new(current_block_date, vec![]));
}
processed_dates.insert(current_block_date);
}
let blocks_loop_date = blocks_loop_date.unwrap();
if current_block_date > blocks_loop_date {
panic!("current block should always have the same date as the current blocks loop");
}
let is_date_last_block = next_date_opt
// Do NOT change `blocks_loop_date` to `current_block_date` !!!
.map_or(true, |next_block_date| blocks_loop_date < next_block_date);
processed_heights.insert(current_block_height);
if first_unsafe_heights.inserted <= current_block_height {
let compute_addresses =
databases.check_if_needs_to_compute_addresses(current_block_height, blocks_loop_date);
if states.address_cohorts_durable_states.is_none()
&& (compute_addresses
|| datasets
.address
.needs_durable_states(current_block_height, current_block_date))
{
states.address_cohorts_durable_states = Some(AddressCohortsDurableStates::init(
&mut databases.address_index_to_address_data,
));
}
if states.utxo_cohorts_durable_states.is_none()
&& datasets
.utxo
.needs_durable_states(current_block_height, current_block_date)
{
states.utxo_cohorts_durable_states =
Some(UTXOCohortsDurableStates::init(&states.date_data_vec));
}
parse(ParseData {
block: current_block,
block_index: blocks_loop_i,
compute_addresses,
config,
databases,
datasets,
date: blocks_loop_date,
first_date_height: height,
height: current_block_height,
is_date_last_block,
rpc,
states: &mut states,
});
}
blocks_loop_i += 1;
if is_date_last_block {
info!(
"Processed {current_block_date} ({height} - {current_block_height}) {}",
instant.display()
);
height += blocks_loop_i;
let is_check_point = next_date_opt.as_ref().map_or(true, |date| date.is_first_of_month());
if (is_check_point && instant.elapsed().as_secs() >= 1)
|| height.is_close_to_end(approx_block_count)
{
break 'days;
}
break 'blocks;
}
} else {
break 'parsing;
}
}
}
// Don't remember why -1
let last_height = height - 1_u32;
if first_unsafe_heights.computed <= last_height {
info!("Computing datasets...");
time("Computed datasets", || {
let dates = processed_dates.into_iter().collect_vec();
let heights = processed_heights.into_iter().collect_vec();
datasets.compute(ComputeData {
dates: &dates,
heights: &heights,
})
});
}
if !config.dry_run() {
let is_safe = height.is_safe(approx_block_count);
let defragment = is_safe
&& next_date_opt.is_some_and(|date| {
(date.year() >= 2020 && date.is_january() || date.year() >= 2022 && date.is_july())
&& date.is_first_of_month()
});
export(ExportedData {
config,
databases: is_safe.then_some(databases),
datasets,
date: blocks_loop_date.unwrap(),
defragment,
height: last_height,
states: is_safe.then_some(&states),
exit: exit.clone(),
})?;
if config.record_ram_usage() {
time("Exporting allocation files", || {
generate_allocation_files(datasets, databases, &states, last_height)
})?;
}
} else {
info!("Skipping export");
}
}
Ok(())
}

View File

@@ -0,0 +1,127 @@
use log::info;
use crate::{
parser::{
databases::Databases,
datasets::{AnyDatasets, Datasets},
states::States,
},
structs::{Config, Height},
};
#[derive(Default, Debug)]
pub struct Heights {
pub inserted: Height,
pub computed: Height,
}
impl Heights {
pub fn min(&self) -> Height {
self.inserted.min(self.computed)
}
}
pub fn find_first_inserted_unsafe_height(
states: &mut States,
databases: &mut Databases,
datasets: &mut Datasets,
config: &Config,
) -> Heights {
let min_initial_inserted_last_address_height = datasets
.address
.get_min_initial_states()
.inserted
.last_height
.as_ref()
.cloned();
let min_initial_inserted_last_address_date = datasets
.address
.get_min_initial_states()
.inserted
.last_date
.as_ref()
.cloned();
let usable_databases = databases.check_if_usable(
min_initial_inserted_last_address_height,
min_initial_inserted_last_address_date,
);
states
.date_data_vec
.iter()
.last()
.map(|date_data| date_data.date)
.and_then(|last_safe_date| {
if !usable_databases {
info!("Unusable databases");
return None;
}
let datasets_min_initial_states = datasets.get_min_initial_states().to_owned();
let min_datasets_inserted_last_height = datasets_min_initial_states.inserted.last_height;
let min_datasets_inserted_last_date = datasets_min_initial_states.inserted.last_date;
info!("min_datasets_inserted_last_height: {:?}", min_datasets_inserted_last_height);
info!("min_datasets_inserted_last_date: {:?}", min_datasets_inserted_last_date);
let inserted_last_date_is_older_than_saved_state = min_datasets_inserted_last_date.map_or(true, |min_datasets_last_date| min_datasets_last_date < last_safe_date);
if inserted_last_date_is_older_than_saved_state {
// dbg!(min_datasets_inserted_last_date , *last_safe_date);
return None;
}
datasets
.date_metadata
.last_height
.get_or_import(&last_safe_date)
.and_then(|last_safe_height| {
let inserted_heights_and_dates_are_out_of_sync = min_datasets_inserted_last_height.map_or(true, |min_datasets_inserted_last_height| min_datasets_inserted_last_height < last_safe_height);
if inserted_heights_and_dates_are_out_of_sync {
info!("last_safe_height ({last_safe_height}) > min_datasets_height ({min_datasets_inserted_last_height:?})");
None
} else {
let computed = datasets_min_initial_states.computed.last_date.and_then(
|last_date| datasets.date_metadata
.last_height
.get_or_import(&last_date)
.and_then(|last_date_height| {
if datasets_min_initial_states.computed.last_height.map_or(true, |last_height| {
last_height < last_date_height
}) {
None
} else {
Some(last_date_height + 1_u32)
}
})
).unwrap_or_default();
Some(Heights {
inserted: last_safe_height + 1_u32,
computed,
})
}
}
)
})
.unwrap_or_else(|| {
info!("Starting over...");
let include_addresses = !usable_databases
|| min_initial_inserted_last_address_date.is_none()
|| min_initial_inserted_last_address_height.is_none();
states.reset(config, include_addresses);
databases.reset(include_addresses);
Heights::default()
})
}

View File

@@ -0,0 +1,9 @@
mod export;
mod iter_blocks;
mod min_height;
mod parse;
pub use export::*;
pub use iter_blocks::*;
pub use min_height::*;
pub use parse::*;

View File

@@ -0,0 +1,945 @@
use std::{collections::BTreeMap, ops::ControlFlow, thread};
use brk_parser::{
bitcoin::{Block, Txid},
bitcoincore_rpc::RpcApi,
};
use itertools::Itertools;
use rayon::prelude::*;
use crate::{
parser::{
databases::{
AddressIndexToAddressData, AddressIndexToEmptyAddressData, AddressToAddressIndex, Databases, TxidToTxData,
TxoutIndexToAddressIndex, TxoutIndexToAmount,
},
datasets::{Datasets, InsertData},
states::{
AddressCohortsInputStates, AddressCohortsOutputStates, AddressCohortsRealizedStates, States,
UTXOCohortsOneShotStates, UTXOCohortsSentStates,
},
},
structs::{
Address, AddressData, AddressRealizedData, Amount, BlockData, BlockPath, Config, Counter, Date,
EmptyAddressData, Height, PartialTxoutData, Price, SentData, Timestamp, TxData, TxoutIndex,
},
};
pub struct ParseData<'a> {
// pub bitcoin_cli: &'a BitcoinCli,
pub block: Block,
pub block_index: usize,
pub config: &'a Config,
pub compute_addresses: bool,
pub databases: &'a mut Databases,
pub datasets: &'a mut Datasets,
pub date: Date,
pub first_date_height: Height,
pub height: Height,
pub is_date_last_block: bool,
pub rpc: &'a biter::bitcoincore_rpc::Client,
pub states: &'a mut States,
}
pub fn parse(
ParseData {
block,
block_index,
config,
compute_addresses,
databases,
datasets,
date,
first_date_height,
height,
is_date_last_block,
rpc,
states,
}: ParseData,
) {
// log(&format!("{height}"));
let timestamp = Timestamp::from(block.header.time);
// If false, expect that the code is flawless
// or create a 0 value txid database
let enable_check_if_txout_value_is_zero_in_db: bool = true;
let date_index = states.date_data_vec.len() - 1;
let previous_timestamp = height
.checked_sub(1)
.map(Height::new)
.and_then(|height| datasets.block_metadata.timestamp.get_or_import(&height));
let block_price = Price::from_dollar(
datasets
.price
.get_height_ohlc(height, timestamp, previous_timestamp, config)
.unwrap_or_else(|_| panic!("Expect {height} to have a price"))
.close as f64,
);
let date_price = Price::from_dollar(
datasets
.price
.get_date_ohlc(date)
.unwrap_or_else(|_| panic!("Expect {date} to have a price"))
.close as f64,
);
let difficulty = block.header.difficulty_float();
let block_size = block.total_size();
let block_weight = block.weight().to_wu();
let block_vbytes = block.weight().to_vbytes_floor();
let block_interval = previous_timestamp.map_or(Timestamp::ZERO, |previous_timestamp| {
if previous_timestamp >= timestamp {
Timestamp::ZERO
} else {
timestamp - previous_timestamp
}
});
states
.date_data_vec
.last_mut()
.unwrap()
.blocks
.push(BlockData::new(height, block_price, timestamp));
let mut block_path_to_sent_data: BTreeMap<BlockPath, SentData> = BTreeMap::default();
// let mut received_data: ReceivedData = ReceivedData::default();
let mut address_index_to_address_realized_data: BTreeMap<u32, AddressRealizedData> = BTreeMap::default();
let mut coinbase = Amount::ZERO;
let mut satblocks_destroyed = Amount::ZERO;
let mut satdays_destroyed = Amount::ZERO;
let mut amount_sent = Amount::ZERO;
let mut transaction_count = 0;
let mut fees = vec![];
let mut fees_total = Amount::ZERO;
let (
TxoutsParsingResults {
op_returns: _op_returns,
mut partial_txout_data_vec,
provably_unspendable: _provably_unspendable,
},
(mut txid_to_tx_data, mut txout_index_to_amount_and_address_index),
) = thread::scope(|scope| {
let output_handle = scope.spawn(|| {
let mut txouts_parsing_results = prepare_outputs(
&block,
compute_addresses,
&mut states.address_counters.multisig_addresses,
&mut states.address_counters.op_return_addresses,
&mut states.address_counters.push_only_addresses,
&mut states.address_counters.unknown_addresses,
&mut states.address_counters.empty_addresses,
&mut databases.address_to_address_index,
);
// Reverse to get in order via pop later
txouts_parsing_results.partial_txout_data_vec.reverse();
txouts_parsing_results
});
let input_handle = scope.spawn(|| {
prepare_inputs(
&block,
&mut databases.txid_to_tx_data,
&mut databases.txout_index_to_amount,
&mut databases.txout_index_to_address_index,
compute_addresses,
)
});
(output_handle.join().unwrap(), input_handle.join().unwrap())
});
let mut address_index_to_address_data = compute_addresses.then(|| {
compute_address_index_to_address_data(
&mut databases.address_index_to_address_data,
&mut databases.address_index_to_empty_address_data,
&partial_txout_data_vec,
&txout_index_to_amount_and_address_index,
compute_addresses,
)
});
block.txdata.iter().enumerate().try_for_each(|(block_tx_index, tx)| {
let txid = tx.compute_txid();
let tx_index = databases.txid_to_tx_data.metadata.serial as u32;
transaction_count += 1;
// --
// outputs
// ---
let mut utxos = BTreeMap::new();
let mut spendable_amount = Amount::ZERO;
let is_coinbase = tx.is_coinbase();
if is_coinbase != (block_tx_index == 0) {
unreachable!();
}
let mut inputs_sum = Amount::ZERO;
let mut outputs_sum = Amount::ZERO;
let last_block = states.date_data_vec.last_mut_block().unwrap();
// Before `input` to cover outputs being used in the same block as inputs
tx.output
.iter()
.enumerate()
.filter_map(|(vout, tx_out)| {
if vout > (u16::MAX as usize) {
panic!("vout can indeed be bigger than u16::MAX !");
}
let amount = Amount::wrap(tx_out.value);
if is_coinbase {
coinbase += amount;
} else {
outputs_sum += amount;
}
partial_txout_data_vec
.pop()
.unwrap()
// None if not worth parsing (empty/op_return/...)
.map(|partial_txout_data| (vout, partial_txout_data))
})
.for_each(|(vout, partial_txout_data)| {
let vout = vout as u16;
let txout_index = TxoutIndex::new(tx_index, vout);
let PartialTxoutData {
address,
address_index_opt,
amount,
} = partial_txout_data;
spendable_amount += amount;
last_block.receive(amount);
utxos.insert(vout, amount);
databases.txout_index_to_amount.insert_to_ram(txout_index, amount);
if compute_addresses {
let address = address.unwrap();
let address_index_to_address_data = address_index_to_address_data.as_mut().unwrap();
let (address_data, address_index) = {
if let Some(address_index) = address_index_opt
.or_else(|| databases.address_to_address_index.get_from_ram(&address).cloned())
{
let address_data = address_index_to_address_data.get_mut(&address_index).unwrap();
(address_data, address_index)
} else {
let address_index = databases.address_to_address_index.metadata.serial as u32;
let address_type = address.to_type();
if let Some(previous) = databases.address_to_address_index.insert(address, address_index) {
dbg!(previous);
panic!("address #{address_index} shouldn't be present during put");
}
// Checked new
let address_data = address_index_to_address_data
.entry(address_index)
.and_modify(|_| {
panic!("Shouldn't exist");
})
// Will always insert, it's to avoid insert + get
.or_insert(AddressData::new(address_type));
(address_data, address_index)
}
};
// MUST be before received !
let address_realized_data = address_index_to_address_realized_data
.entry(address_index)
.or_insert_with(|| AddressRealizedData::default(address_data));
address_data.receive(amount, block_price);
address_realized_data.receive(amount);
databases
.txout_index_to_address_index
.insert_to_ram(txout_index, address_index);
}
});
if !utxos.is_empty() {
databases.txid_to_tx_data.insert(
&txid,
TxData::new(
tx_index,
BlockPath::new(date_index as u16, block_index as u16),
utxos.len() as u16,
),
);
}
// ---
// inputs
// ---
if !is_coinbase {
tx.input.iter().try_for_each(|txin| {
let outpoint = txin.previous_output;
let input_txid = outpoint.txid;
let input_vout = outpoint.vout;
let remove_tx_data_from_cached_puts = {
let mut is_tx_data_from_cached_puts = false;
let input_tx_data = txid_to_tx_data.get_mut(&input_txid).unwrap().as_mut().or_else(|| {
is_tx_data_from_cached_puts = true;
databases.txid_to_tx_data.get_mut_from_ram(&input_txid)
});
// Can be none because 0 sats inputs happen
// https://mempool.space/tx/f329e55c2de9b821356e6f2c4bba923ea7030cad61120f5ced5d4429f5c86fda#vin=27
if input_tx_data.is_none() {
if !enable_check_if_txout_value_is_zero_in_db
|| rpc
.get_raw_transaction(&input_txid, None)
.unwrap()
.output
.get(input_vout as usize)
.unwrap()
.value
.to_sat()
== 0
{
return ControlFlow::Continue::<()>(());
}
dbg!((input_txid, txid, tx_index, input_vout));
panic!("Txid to be in txid_to_tx_data");
}
let input_tx_data = input_tx_data.unwrap();
let input_tx_index = input_tx_data.index;
let input_vout = input_vout as u16;
let input_txout_index = TxoutIndex::new(input_tx_index, input_vout);
// if input_tx_index == 2516 || input_tx_index == 2490 {
// dbg!(input_tx_index, &input_tx_data.utxos);
// }
// let input_amount = input_tx_data.utxos.remove(&input_vout);
let input_amount_and_address_index = databases
.txout_index_to_amount
.remove(&input_txout_index)
.map(|amount| {
(
amount,
databases.txout_index_to_address_index.remove(&input_txout_index),
)
}) // Remove from cached puts
.or_else(|| txout_index_to_amount_and_address_index.remove(&input_txout_index));
if input_amount_and_address_index.is_none() {
if !enable_check_if_txout_value_is_zero_in_db
|| rpc
.get_raw_transaction(&input_txid, None)
.unwrap()
.output
.get(input_vout as usize)
.unwrap()
.value
.to_sat()
== 0
{
return ControlFlow::Continue::<()>(());
}
dbg!((input_txid, tx_index, input_tx_index, input_vout, input_tx_data, txid,));
panic!("Txout index to be in txout_index_to_txout_value");
}
input_tx_data.utxos -= 1;
let (input_amount, input_address_index) = input_amount_and_address_index.unwrap();
let input_block_path = input_tx_data.block_path;
let BlockPath {
date_index: input_date_index,
block_index: input_block_index,
} = input_block_path;
let input_date_data =
states
.date_data_vec
.get_mut(input_date_index as usize)
.unwrap_or_else(|| {
dbg!(height, &input_txid, input_block_path, input_date_index);
panic!()
});
let input_block_data = input_date_data
.blocks
.get_mut(input_block_index as usize)
.unwrap_or_else(|| {
dbg!(
height,
&input_txid,
input_block_path,
input_date_index,
input_block_index,
);
panic!()
});
input_block_data.send(input_amount);
inputs_sum += input_amount;
block_path_to_sent_data
.entry(input_block_path)
.or_default()
.send(input_amount);
satblocks_destroyed += input_amount * (height - input_block_data.height);
satdays_destroyed +=
input_amount * date.signed_duration_since(*input_date_data.date).num_days() as u64;
if compute_addresses {
let input_address_index = input_address_index.unwrap_or_else(|| {
dbg!(
height,
input_amount,
&input_tx_data,
input_address_index,
input_txout_index,
txid,
input_txid,
input_vout
);
panic!()
});
let address_index_to_address_data = address_index_to_address_data.as_mut().unwrap();
let input_address_data = address_index_to_address_data
.get_mut(&input_address_index)
.unwrap_or_else(|| {
dbg!(input_address_index, input_txout_index, input_txid, input_vout);
panic!();
});
let input_address_realized_data = address_index_to_address_realized_data
.entry(input_address_index)
.or_insert_with(|| AddressRealizedData::default(input_address_data));
let previous_price = input_block_data.price;
// MUST be after `or_insert_with`
input_address_data
.send(input_amount, previous_price)
.unwrap_or_else(|_| {
dbg!(
input_address_index,
txid,
input_txid,
input_amount,
tx_index,
input_tx_index,
input_vout,
&input_address_data
);
panic!()
});
input_address_realized_data.send(
input_amount,
block_price,
previous_price,
timestamp,
input_block_data.timestamp,
);
};
is_tx_data_from_cached_puts && input_tx_data.is_empty()
};
if remove_tx_data_from_cached_puts {
// Pre remove tx_datas that are empty and weren't yet added to the database to avoid having it was in there or not (and thus avoid useless operations)
databases.txid_to_tx_data.remove_from_ram(&input_txid)
}
ControlFlow::Continue(())
})?;
}
amount_sent += inputs_sum;
let fee = inputs_sum - outputs_sum;
fees_total += fee;
fees.push(fee);
ControlFlow::Continue(())
});
if !partial_txout_data_vec.is_empty() {
panic!("partial_txout_data_vec should've been fully consumed");
}
txid_to_tx_data.into_iter().for_each(|(txid, tx_data)| {
if let Some(tx_data) = tx_data {
if tx_data.is_empty() {
databases.txid_to_tx_data.remove_later_from_disk(txid);
} else {
databases.txid_to_tx_data.update(txid, tx_data);
}
}
});
let mut utxo_cohorts_sent_states = UTXOCohortsSentStates::default();
let mut utxo_cohorts_one_shot_states = UTXOCohortsOneShotStates::default();
// let mut utxo_cohorts_received_states = UTXOCohortsReceivedStates::default();
let mut address_cohorts_input_states = None;
let mut address_cohorts_one_shot_states = None;
let mut address_cohorts_output_states = None;
let mut address_cohorts_realized_states = None;
// log("Starting heavy work...");
thread::scope(|scope| {
scope.spawn(|| {
let previous_last_block_data = states.date_data_vec.second_last_block();
if datasets.utxo.needs_durable_states(height, date) {
if let Some(previous_last_block_data) = previous_last_block_data {
block_path_to_sent_data.iter().for_each(|(block_path, sent_data)| {
let block_data = states.date_data_vec.get_block_data(block_path).unwrap();
if block_data.height != height {
states.utxo_cohorts_durable_states.as_mut().unwrap().subtract_moved(
block_data,
sent_data,
previous_last_block_data,
);
}
});
}
let last_block_data = states.date_data_vec.last_block().unwrap();
if last_block_data.height != height {
unreachable!()
}
states
.date_data_vec
.iter()
.flat_map(|date_data| &date_data.blocks)
.for_each(|block_data| {
states
.utxo_cohorts_durable_states
.as_mut()
.unwrap()
.udpate_age_if_needed(block_data, last_block_data, previous_last_block_data);
});
}
if datasets.utxo.needs_one_shot_states(height, date) {
utxo_cohorts_one_shot_states = states
.utxo_cohorts_durable_states
.as_ref()
.unwrap()
.compute_one_shot_states(block_price, if is_date_last_block { Some(date_price) } else { None });
}
});
// scope.spawn(|| {
// utxo_cohorts_received_states
// .compute(&states.date_data_vec, block_path_to_received_data);
// });
if datasets.utxo.needs_sent_states(height, date) {
scope.spawn(|| {
utxo_cohorts_sent_states.compute(
&states.date_data_vec,
&block_path_to_sent_data,
block_price,
timestamp,
);
});
}
if compute_addresses {
scope.spawn(|| {
let address_index_to_address_data = address_index_to_address_data.as_ref().unwrap();
// TODO: Only compute if needed
address_cohorts_realized_states.replace(AddressCohortsRealizedStates::default());
// TODO: Only compute if needed
address_cohorts_input_states.replace(AddressCohortsInputStates::default());
// TODO: Only compute if needed
address_cohorts_output_states.replace(AddressCohortsOutputStates::default());
address_index_to_address_realized_data
.iter()
.for_each(|(address_index, address_realized_data)| {
let current_address_data = address_index_to_address_data.get(address_index).unwrap();
states
.address_cohorts_durable_states
.as_mut()
.unwrap()
.iterate(address_realized_data, current_address_data)
.unwrap_or_else(|report| {
dbg!(report.to_string(), address_index);
panic!();
});
if !address_realized_data.initial_address_data.is_empty() {
// Realized == previous amount
// If a whale sent all its sats to another address at a loss, it's the whale that realized the loss not the now empty adress
let liquidity_classification = address_realized_data
.initial_address_data
.compute_liquidity_classification();
address_cohorts_realized_states
.as_mut()
.unwrap()
.iterate_realized(address_realized_data, &liquidity_classification)
.unwrap();
address_cohorts_input_states
.as_mut()
.unwrap()
.iterate_input(address_realized_data, &liquidity_classification)
.unwrap();
}
address_cohorts_output_states
.as_mut()
.unwrap()
.iterate_output(
address_realized_data,
&current_address_data.compute_liquidity_classification(),
)
.unwrap();
});
address_cohorts_one_shot_states.replace(
states
.address_cohorts_durable_states
.as_ref()
.unwrap()
.compute_one_shot_states(block_price, if is_date_last_block { Some(date_price) } else { None }),
);
});
}
});
if compute_addresses {
address_index_to_address_data
.unwrap()
.into_iter()
.for_each(|(address_index, address_data)| {
if address_data.is_empty() {
databases
.address_index_to_empty_address_data
.insert_to_ram(address_index, EmptyAddressData::from_non_empty(&address_data));
} else {
databases
.address_index_to_address_data
.insert_to_ram(address_index, address_data);
}
})
}
datasets.insert(InsertData {
address_cohorts_input_states: &address_cohorts_input_states,
block_size,
block_vbytes,
block_weight,
address_cohorts_one_shot_states: &address_cohorts_one_shot_states,
address_cohorts_realized_states: &address_cohorts_realized_states,
block_interval,
block_price,
coinbase,
compute_addresses,
databases,
date,
date_blocks_range: &(*first_date_height..=*height),
date_first_height: first_date_height,
difficulty,
fees: &fees,
height,
is_date_last_block,
satblocks_destroyed,
satdays_destroyed,
amount_sent,
states,
timestamp,
transaction_count,
utxo_cohorts_one_shot_states: &utxo_cohorts_one_shot_states,
utxo_cohorts_sent_states: &utxo_cohorts_sent_states,
});
}
pub struct TxoutsParsingResults {
partial_txout_data_vec: Vec<Option<PartialTxoutData>>,
provably_unspendable: Amount,
op_returns: usize,
}
#[allow(clippy::too_many_arguments)]
fn prepare_outputs(
block: &Block,
compute_addresses: bool,
multisig_addresses: &mut Counter,
op_return_addresses: &mut Counter,
push_only_addresses: &mut Counter,
unknown_addresses: &mut Counter,
empty_addresses: &mut Counter,
address_to_address_index: &mut AddressToAddressIndex,
) -> TxoutsParsingResults {
let mut provably_unspendable = Amount::ZERO;
let mut op_returns = 0;
let mut partial_txout_data_vec = block
.txdata
.iter()
.flat_map(|tx| &tx.output)
.map(|txout| {
let script = &txout.script_pubkey;
let amount = Amount::wrap(txout.value);
// 0 sats outputs are possible and allowed !
// https://mempool.space/tx/2f2442f68e38b980a6c4cec21e71851b0d8a5847d85208331a27321a9967bbd6
// https://bitcoin.stackexchange.com/questions/104937/transaction-outputs-with-value-0
if amount == Amount::ZERO {
return None;
}
// Op Return
// https://mempool.space/tx/139c004f477101c468767983536caaeef568613fab9c2ed9237521f5ff530afd
// Provably unspendable https://mempool.space/tx/8a68c461a2473653fe0add786f0ca6ebb99b257286166dfb00707be24716af3a#flow=&vout=0
#[allow(deprecated)]
if script.is_op_return() {
// TODO: Count fee paid to write said OP_RETURN, beware of coinbase transactions
// For coinbase transactions, count miners
op_returns += 1;
provably_unspendable += amount;
// return None;
// https://mempool.space/tx/8a68c461a2473653fe0add786f0ca6ebb99b257286166dfb00707be24716af3a#flow=&vout=0
} else if script.is_provably_unspendable() {
provably_unspendable += amount;
// return None;
}
let address_opt = compute_addresses.then(|| {
let address = Address::from(
txout,
multisig_addresses,
op_return_addresses,
push_only_addresses,
unknown_addresses,
empty_addresses,
);
address_to_address_index.open_db(&address);
address
});
Some(PartialTxoutData::new(address_opt, amount, None))
})
.collect_vec();
if compute_addresses {
partial_txout_data_vec.par_iter_mut().for_each(|partial_tx_out_data| {
if let Some(partial_tx_out_data) = partial_tx_out_data {
let address_index_opt = address_to_address_index
.unsafe_get(partial_tx_out_data.address.as_ref().unwrap())
.cloned();
partial_tx_out_data.address_index_opt = address_index_opt;
}
});
}
TxoutsParsingResults {
partial_txout_data_vec,
provably_unspendable,
op_returns,
}
}
#[allow(clippy::type_complexity)]
fn prepare_inputs<'a>(
block: &'a Block,
txid_to_tx_data_db: &mut TxidToTxData,
txout_index_to_amount_db: &mut TxoutIndexToAmount,
txout_index_to_address_index_db: &mut TxoutIndexToAddressIndex,
compute_addresses: bool,
) -> (
BTreeMap<&'a Txid, Option<TxData>>,
BTreeMap<TxoutIndex, (Amount, Option<u32>)>,
) {
let mut txid_to_tx_data: BTreeMap<&Txid, Option<TxData>> = block
.txdata
.iter()
.skip(1) // Skip coinbase transaction
.flat_map(|transaction| &transaction.input)
.fold(BTreeMap::default(), |mut tree, tx_in| {
let txid = &tx_in.previous_output.txid;
txid_to_tx_data_db.open_db(txid);
tree.entry(txid).or_default();
tree
});
let mut tx_datas = txid_to_tx_data
.par_iter()
.map(|(txid, _)| txid_to_tx_data_db.get(txid))
.collect::<Vec<_>>();
txid_to_tx_data.values_mut().rev().for_each(|tx_data_opt| {
*tx_data_opt = tx_datas.pop().unwrap().cloned();
});
let txout_index_to_amount_and_address_index = block
.txdata
.iter()
.skip(1) // Skip coinbase transaction
.flat_map(|transaction| &transaction.input)
.flat_map(|tx_in| {
let txid = &tx_in.previous_output.txid;
if let Some(Some(tx_data)) = txid_to_tx_data.get(txid) {
let txout_index = TxoutIndex::new(tx_data.index, tx_in.previous_output.vout as u16);
txout_index_to_amount_db.open_db(&txout_index);
if compute_addresses {
txout_index_to_address_index_db.open_db(&txout_index);
}
Some(txout_index)
} else {
None
}
})
.collect_vec()
.into_par_iter()
.flat_map(|txout_index| {
txout_index_to_amount_db
.unsafe_get(&txout_index)
// Will be None if value of utxo is 0
// https://mempool.space/tx/9d8a0d851c9fb2cdf1c6d9406ce97e19e6911ae3503ab2dd5f38640bacdac996
// which is used later as input
.map(|amount| {
let address_index =
compute_addresses.then(|| *txout_index_to_address_index_db.unsafe_get(&txout_index).unwrap());
(txout_index, (*amount, address_index))
})
})
.collect::<BTreeMap<_, _>>();
// No need to call remove, it's being called later in the parse function
// To more easily support removing cached puts
(txid_to_tx_data, txout_index_to_amount_and_address_index)
}
fn compute_address_index_to_address_data(
address_index_to_address_data_db: &mut AddressIndexToAddressData,
address_index_to_empty_address_data_db: &mut AddressIndexToEmptyAddressData,
partial_txout_data_vec: &[Option<PartialTxoutData>],
txout_index_to_amount_and_address_index: &BTreeMap<TxoutIndex, (Amount, Option<u32>)>,
compute_addresses: bool,
) -> BTreeMap<u32, AddressData> {
if !compute_addresses {
return BTreeMap::default();
}
let mut address_index_to_address_data = partial_txout_data_vec
.iter()
.flatten()
.flat_map(|partial_txout_data| partial_txout_data.address_index_opt)
.map(|address_index| (address_index, true))
.chain(
txout_index_to_amount_and_address_index
.values()
.map(|(_, address_index)| (*address_index.as_ref().unwrap(), false)), // False because we assume non zero inputs values
)
.map(|(address_index, open_empty)| {
address_index_to_address_data_db.open_db(&address_index);
if open_empty {
address_index_to_empty_address_data_db.open_db(&address_index);
}
(address_index, AddressData::default())
})
.collect::<BTreeMap<_, _>>();
address_index_to_address_data
.par_iter_mut()
.for_each(|(address_index, address_data)| {
if let Some(_address_data) = address_index_to_address_data_db.get_from_ram(address_index) {
_address_data.clone_into(address_data);
} else if let Some(empty_address_data) = address_index_to_empty_address_data_db.get_from_ram(address_index)
{
*address_data = AddressData::from_empty(empty_address_data);
} else if let Some(_address_data) = address_index_to_address_data_db.get_from_disk(address_index) {
_address_data.clone_into(address_data);
} else {
let empty_address_data = address_index_to_empty_address_data_db
.get_from_disk(address_index)
.unwrap();
*address_data = AddressData::from_empty(empty_address_data);
}
});
// Parallel unsafe_get + Linear remove = Parallel-ish take
address_index_to_address_data
.iter()
.for_each(|(address_index, address_data)| {
if address_data.is_empty() {
address_index_to_empty_address_data_db.remove(address_index);
} else {
address_index_to_address_data_db.remove(address_index);
}
});
address_index_to_address_data
}

View File

@@ -0,0 +1,60 @@
use std::{fs, io, path::Path};
use log::info;
use snkrj::AnyDatabase;
use crate::structs::{Config, Date, Height};
use super::Metadata;
pub trait AnyDatabaseGroup
where
Self: Sized,
{
fn init(config: &Config) -> Self {
let s = Self::import(config);
s.create_dir_all().unwrap();
s
}
fn import(config: &Config) -> Self;
fn drain_to_vec(&mut self) -> Vec<Box<dyn AnyDatabase + Send>>;
fn open_all(&mut self);
fn metadata(&mut self) -> &mut Metadata;
fn export_metadata(&mut self, height: Height, date: Date) -> color_eyre::Result<()> {
self.metadata().export(height, date)
}
fn create_dir_all(&self) -> color_eyre::Result<(), std::io::Error> {
fs::create_dir_all(self.path())
}
fn remove_dir_all(&self) -> color_eyre::Result<(), io::Error> {
fs::remove_dir_all(self.path())
}
fn reset(&mut self) -> color_eyre::Result<(), io::Error> {
info!(
"Reset {}",
self.path()
.components()
.last()
.unwrap()
.as_os_str()
.to_str()
.unwrap()
);
self.reset_metadata();
self.remove_dir_all()?;
self.create_dir_all()?;
Ok(())
}
fn reset_metadata(&mut self);
fn path(&self) -> &Path;
}

View File

@@ -0,0 +1,173 @@
use std::{
collections::BTreeMap,
fs, mem,
ops::{Deref, DerefMut},
path::{Path, PathBuf},
};
use allocative::Allocative;
use itertools::Itertools;
use rayon::prelude::*;
use snkrj::{AnyDatabase, Database as _Database};
use crate::{
parser::states::AddressCohortsDurableStates,
structs::{AddressData, Config},
};
use super::{AnyDatabaseGroup, Metadata};
type Key = u32;
type Value = AddressData;
type Database = _Database<Key, Value>;
#[derive(Allocative)]
pub struct AddressIndexToAddressData {
path: PathBuf,
pub metadata: Metadata,
#[allocative(skip)]
pub map: BTreeMap<usize, Database>,
}
impl Deref for AddressIndexToAddressData {
type Target = BTreeMap<usize, Database>;
fn deref(&self) -> &Self::Target {
&self.map
}
}
impl DerefMut for AddressIndexToAddressData {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.map
}
}
pub const ADDRESS_INDEX_DB_MAX_SIZE: usize = 250_000;
impl AddressIndexToAddressData {
pub fn insert_to_ram(&mut self, key: Key, value: Value) -> Option<Value> {
self.metadata.called_insert();
self.open_db(&key).insert_to_ram(key, value)
}
pub fn remove(&mut self, key: &Key) -> Option<Value> {
self.metadata.called_remove();
self.open_db(key).remove(key)
}
/// Doesn't check if the database is open contrary to `safe_get` which does and opens if needed
/// Though it makes it easy to use with rayon.
pub fn get_from_ram(&self, key: &Key) -> Option<&Value> {
let db_index = Self::db_index(key);
self.get(&db_index).unwrap().get_from_ram(key)
}
pub fn get_from_disk(&self, key: &Key) -> Option<&Value> {
let db_index = Self::db_index(key);
self.get(&db_index).unwrap().get_from_disk(key)
}
pub fn open_db(&mut self, key: &Key) -> &mut Database {
let db_index = Self::db_index(key);
let path = self.path().to_owned();
self.entry(db_index).or_insert_with(|| {
let db_name = format!(
"{}..{}",
db_index * ADDRESS_INDEX_DB_MAX_SIZE,
(db_index + 1) * ADDRESS_INDEX_DB_MAX_SIZE
);
let path = path.join(db_name);
Database::open(path).unwrap()
})
}
pub fn compute_addres_cohorts_durable_states(&mut self) -> AddressCohortsDurableStates {
// time("Iter through address_index_to_address_data", || {
self.open_all();
// MUST CLEAR MAP, otherwise some weird things are happening later in the export I think
mem::take(&mut self.map)
.par_iter()
.map(|(_, database)| {
let mut s = AddressCohortsDurableStates::default();
database
.iter_disk()
.map(|r| r.unwrap().1)
.for_each(|address_data| s.increment(address_data).unwrap());
s
})
.sum()
// })
}
fn db_index(key: &Key) -> usize {
*key as usize / ADDRESS_INDEX_DB_MAX_SIZE
}
}
impl AnyDatabaseGroup for AddressIndexToAddressData {
fn import(config: &Config) -> Self {
let path = config
.path_databases()
.join("address_index_to_address_data");
Self {
metadata: Metadata::import(&path, 1),
path,
map: BTreeMap::default(),
}
}
fn reset_metadata(&mut self) {
self.metadata.reset();
}
fn open_all(&mut self) {
let folder = fs::read_dir(&self.path);
if folder.is_err() {
return;
}
folder
.unwrap()
.map(|entry| {
entry
.unwrap()
.path()
.file_name()
.unwrap()
.to_str()
.unwrap()
.to_owned()
})
.filter(|file_name| file_name.contains(".."))
.for_each(|path| {
self.open_db(&path.split("..").next().unwrap().parse::<u32>().unwrap());
});
}
fn drain_to_vec(&mut self) -> Vec<Box<dyn AnyDatabase + Send>> {
mem::take(&mut self.map)
.into_values()
.map(|db| Box::new(db) as Box<dyn AnyDatabase + Send>)
.collect_vec()
}
fn metadata(&mut self) -> &mut Metadata {
&mut self.metadata
}
fn path(&self) -> &Path {
&self.path
}
}

View File

@@ -0,0 +1,151 @@
use std::{
collections::BTreeMap,
fs, mem,
ops::{Deref, DerefMut},
path::{Path, PathBuf},
};
use allocative::Allocative;
use itertools::Itertools;
use snkrj::{AnyDatabase, Database as _Database};
use crate::structs::{Config, EmptyAddressData};
use super::{AnyDatabaseGroup, Metadata, ADDRESS_INDEX_DB_MAX_SIZE};
type Key = u32;
type Value = EmptyAddressData;
type Database = _Database<Key, Value>;
#[derive(Allocative)]
pub struct AddressIndexToEmptyAddressData {
path: PathBuf,
pub metadata: Metadata,
#[allocative(skip)]
map: BTreeMap<usize, Database>,
}
impl Deref for AddressIndexToEmptyAddressData {
type Target = BTreeMap<usize, Database>;
fn deref(&self) -> &Self::Target {
&self.map
}
}
impl DerefMut for AddressIndexToEmptyAddressData {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.map
}
}
impl AddressIndexToEmptyAddressData {
pub fn insert_to_ram(&mut self, key: Key, value: Value) -> Option<Value> {
self.metadata.called_insert();
self.open_db(&key).insert_to_ram(key, value)
}
pub fn remove(&mut self, key: &Key) -> Option<Value> {
self.metadata.called_remove();
self.open_db(key).remove(key)
}
/// Doesn't check if the database is open contrary to `safe_get` which does and opens if needed
/// Though it makes it easy to use with rayon.
pub fn get_from_ram(&self, key: &Key) -> Option<&Value> {
let db_index = Self::db_index(key);
self.get(&db_index).and_then(|db| db.get_from_ram(key))
}
pub fn get_from_disk(&self, key: &Key) -> Option<&Value> {
let db_index = Self::db_index(key);
self.get(&db_index)
.unwrap_or_else(|| {
dbg!(&self.map.keys(), &key, &db_index);
panic!()
})
.get_from_disk(key)
}
pub fn open_db(&mut self, key: &Key) -> &mut Database {
let db_index = Self::db_index(key);
let path = self.path.to_owned();
self.entry(db_index).or_insert_with(|| {
let db_name = format!(
"{}..{}",
db_index * ADDRESS_INDEX_DB_MAX_SIZE,
(db_index + 1) * ADDRESS_INDEX_DB_MAX_SIZE
);
let path = path.join(db_name);
Database::open(path).unwrap()
})
}
fn db_index(key: &Key) -> usize {
*key as usize / ADDRESS_INDEX_DB_MAX_SIZE
}
}
impl AnyDatabaseGroup for AddressIndexToEmptyAddressData {
fn import(config: &Config) -> Self {
let path = config
.path_databases()
.join("address_index_to_empty_address_data");
Self {
metadata: Metadata::import(&path, 1),
path,
map: BTreeMap::default(),
}
}
fn reset_metadata(&mut self) {
self.metadata.reset();
}
fn open_all(&mut self) {
let folder = fs::read_dir(&self.path);
if folder.is_err() {
return;
}
folder
.unwrap()
.map(|entry| {
entry
.unwrap()
.path()
.file_name()
.unwrap()
.to_str()
.unwrap()
.to_owned()
})
.filter(|file_name| file_name.contains(".."))
.for_each(|path| {
self.open_db(&path.split("..").next().unwrap().parse::<u32>().unwrap());
});
}
fn drain_to_vec(&mut self) -> Vec<Box<dyn AnyDatabase + Send>> {
mem::take(&mut self.map)
.into_values()
.map(|db| Box::new(db) as Box<dyn AnyDatabase + Send>)
.collect_vec()
}
fn metadata(&mut self) -> &mut Metadata {
&mut self.metadata
}
fn path(&self) -> &Path {
&self.path
}
}

View File

@@ -0,0 +1,445 @@
use std::{
collections::BTreeMap,
fs, mem,
path::{Path, PathBuf},
};
use allocative::Allocative;
use itertools::Itertools;
use snkrj::{AnyDatabase, Database};
use crate::structs::{Address, Config, U8x19, U8x31};
use super::{AnyDatabaseGroup, Metadata};
type Value = u32;
type U8x19Database = Database<U8x19, Value>;
type U8x31Database = Database<U8x31, Value>;
type U32Database = Database<u32, Value>;
type P2PKDatabase = U8x19Database;
type P2PKHDatabase = U8x19Database;
type P2SHDatabase = U8x19Database;
type P2WPKHDatabase = U8x19Database;
type P2WSHDatabase = U8x31Database;
type P2TRDatabase = U8x31Database;
type UnknownDatabase = U32Database;
type OpReturnDatabase = U32Database;
type PushOnlyDatabase = U32Database;
type EmptyDatabase = U32Database;
type MultisigDatabase = U32Database;
#[derive(Allocative)]
pub struct AddressToAddressIndex {
path: PathBuf,
pub metadata: Metadata,
#[allocative(skip)]
p2pk: BTreeMap<u16, P2PKDatabase>,
#[allocative(skip)]
p2pkh: BTreeMap<u16, P2PKHDatabase>,
#[allocative(skip)]
p2sh: BTreeMap<u16, P2SHDatabase>,
#[allocative(skip)]
p2wpkh: BTreeMap<u16, P2WPKHDatabase>,
#[allocative(skip)]
p2wsh: BTreeMap<u16, P2WSHDatabase>,
#[allocative(skip)]
p2tr: BTreeMap<u16, P2TRDatabase>,
#[allocative(skip)]
op_return: Option<OpReturnDatabase>,
#[allocative(skip)]
push_only: Option<PushOnlyDatabase>,
#[allocative(skip)]
unknown: Option<UnknownDatabase>,
#[allocative(skip)]
empty: Option<EmptyDatabase>,
#[allocative(skip)]
multisig: Option<MultisigDatabase>,
}
impl AddressToAddressIndex {
pub fn open_db(&mut self, address: &Address) {
match address {
Address::Empty(_) => {
self.open_empty();
}
Address::Unknown(_) => {
self.open_unknown();
}
Address::OpReturn(_) => {
self.open_op_return();
}
Address::PushOnly(_) => {
self.open_push_only();
}
Address::MultiSig(_) => {
self.open_multisig();
}
Address::P2PK((prefix, _)) => {
self.open_p2pk(*prefix);
}
Address::P2PKH((prefix, _)) => {
self.open_p2pkh(*prefix);
}
Address::P2SH((prefix, _)) => {
self.open_p2sh(*prefix);
}
Address::P2WPKH((prefix, _)) => {
self.open_p2wpkh(*prefix);
}
Address::P2WSH((prefix, _)) => {
self.open_p2wsh(*prefix);
}
Address::P2TR((prefix, _)) => {
self.open_p2tr(*prefix);
}
}
}
/// Doesn't check if the database is open contrary to `safe_get` which does and opens if needed.
/// Though it makes it easy to use with rayon
pub fn unsafe_get(&self, address: &Address) -> Option<&Value> {
match address {
Address::Empty(key) => self.empty.as_ref().unwrap().get(key),
Address::Unknown(key) => self.unknown.as_ref().unwrap().get(key),
Address::OpReturn(key) => self.op_return.as_ref().unwrap().get(key),
Address::PushOnly(key) => self.push_only.as_ref().unwrap().get(key),
Address::MultiSig(key) => self.multisig.as_ref().unwrap().get(key),
Address::P2PK((prefix, key)) => self.p2pk.get(prefix).unwrap().get(key),
Address::P2PKH((prefix, key)) => self.p2pkh.get(prefix).unwrap().get(key),
Address::P2SH((prefix, key)) => self.p2sh.get(prefix).unwrap().get(key),
Address::P2WPKH((prefix, key)) => self.p2wpkh.get(prefix).unwrap().get(key),
Address::P2WSH((prefix, key)) => self.p2wsh.get(prefix).unwrap().get(key),
Address::P2TR((prefix, key)) => self.p2tr.get(prefix).unwrap().get(key),
}
}
pub fn get_from_ram(&self, address: &Address) -> Option<&Value> {
match address {
Address::Empty(key) => self.empty.as_ref().unwrap().get_from_ram(key),
Address::Unknown(key) => self.unknown.as_ref().unwrap().get_from_ram(key),
Address::OpReturn(key) => self.op_return.as_ref().unwrap().get_from_ram(key),
Address::PushOnly(key) => self.push_only.as_ref().unwrap().get_from_ram(key),
Address::MultiSig(key) => self.multisig.as_ref().unwrap().get_from_ram(key),
Address::P2PK((prefix, key)) => self.p2pk.get(prefix).unwrap().get_from_ram(key),
Address::P2PKH((prefix, key)) => self.p2pkh.get(prefix).unwrap().get_from_ram(key),
Address::P2SH((prefix, key)) => self.p2sh.get(prefix).unwrap().get_from_ram(key),
Address::P2WPKH((prefix, key)) => self.p2wpkh.get(prefix).unwrap().get_from_ram(key),
Address::P2WSH((prefix, key)) => self.p2wsh.get(prefix).unwrap().get_from_ram(key),
Address::P2TR((prefix, key)) => self.p2tr.get(prefix).unwrap().get_from_ram(key),
}
}
pub fn insert(&mut self, address: Address, value: Value) -> Option<Value> {
self.metadata.called_insert();
match address {
Address::Empty(key) => self.open_empty().insert(key, value),
Address::Unknown(key) => self.open_unknown().insert(key, value),
Address::OpReturn(key) => self.open_op_return().insert(key, value),
Address::PushOnly(key) => self.open_push_only().insert(key, value),
Address::MultiSig(key) => self.open_multisig().insert(key, value),
Address::P2PK((prefix, rest)) => self.open_p2pk(prefix).insert(rest, value),
Address::P2PKH((prefix, rest)) => self.open_p2pkh(prefix).insert(rest, value),
Address::P2SH((prefix, rest)) => self.open_p2sh(prefix).insert(rest, value),
Address::P2WPKH((prefix, rest)) => self.open_p2wpkh(prefix).insert(rest, value),
Address::P2WSH((prefix, rest)) => self.open_p2wsh(prefix).insert(rest, value),
Address::P2TR((prefix, rest)) => self.open_p2tr(prefix).insert(rest, value),
}
}
fn path_to_group_prefixes(path: &Path) -> Vec<u16> {
let folder = fs::read_dir(path);
if folder.is_err() {
return vec![];
}
folder
.unwrap()
.map(|entry| {
entry
.unwrap()
.path()
.file_name()
.unwrap()
.to_str()
.unwrap()
.to_owned()
.parse::<u16>()
.unwrap()
})
.collect_vec()
}
fn path_p2pk(&self) -> PathBuf {
self.path().join("p2pk")
}
pub fn open_p2pk(&mut self, prefix: u16) -> &mut P2PKDatabase {
let path = self.path_p2pk();
self.p2pk.entry(prefix).or_insert_with(|| {
let path = path.join(prefix.to_string());
Database::open(path).unwrap()
})
}
fn open_all_p2pk(&mut self) {
let path = self.path_p2pk();
Self::path_to_group_prefixes(&path)
.into_iter()
.for_each(|prefix| {
self.p2pk.insert(prefix, {
let path = path.join(prefix.to_string());
Database::open(path).unwrap()
});
});
}
fn path_p2pkh(&self) -> PathBuf {
self.path().join("p2pkh")
}
pub fn open_p2pkh(&mut self, prefix: u16) -> &mut P2PKHDatabase {
let path = self.path_p2pkh();
self.p2pkh.entry(prefix).or_insert_with(|| {
let path = path.join(prefix.to_string());
Database::open(path).unwrap()
})
}
fn open_all_p2pkh(&mut self) {
let path = self.path_p2pkh();
Self::path_to_group_prefixes(&path)
.into_iter()
.for_each(|prefix| {
self.p2pkh.insert(prefix, {
let path = path.join(prefix.to_string());
Database::open(path).unwrap()
});
});
}
fn path_p2sh(&self) -> PathBuf {
self.path().join("p2sh")
}
pub fn open_p2sh(&mut self, prefix: u16) -> &mut P2SHDatabase {
let path = self.path_p2sh();
self.p2sh.entry(prefix).or_insert_with(|| {
let path = path.join(prefix.to_string());
Database::open(path).unwrap()
})
}
fn open_all_p2sh(&mut self) {
let path = self.path_p2sh();
Self::path_to_group_prefixes(&path)
.into_iter()
.for_each(|prefix| {
self.p2sh.insert(prefix, {
let path = path.join(prefix.to_string());
Database::open(path).unwrap()
});
});
}
fn path_p2wpkh(&self) -> PathBuf {
self.path().join("p2wpkh")
}
pub fn open_p2wpkh(&mut self, prefix: u16) -> &mut P2WPKHDatabase {
let path = self.path_p2wpkh();
self.p2wpkh.entry(prefix).or_insert_with(|| {
let path = path.join(prefix.to_string());
Database::open(path).unwrap()
})
}
fn open_all_p2wpkh(&mut self) {
let path = self.path_p2wpkh();
Self::path_to_group_prefixes(&path)
.into_iter()
.for_each(|prefix| {
self.p2wpkh.insert(prefix, {
let path = path.join(prefix.to_string());
Database::open(path).unwrap()
});
});
}
fn path_p2wsh(&self) -> PathBuf {
self.path().join("p2wsh")
}
pub fn open_p2wsh(&mut self, prefix: u16) -> &mut P2WSHDatabase {
let path = self.path_p2wsh();
self.p2wsh.entry(prefix).or_insert_with(|| {
let path = path.join(prefix.to_string());
Database::open(path).unwrap()
})
}
fn open_all_p2wsh(&mut self) {
let path = self.path_p2wsh();
Self::path_to_group_prefixes(&path)
.into_iter()
.for_each(|prefix| {
self.p2wsh.insert(prefix, {
let path = path.join(prefix.to_string());
Database::open(path).unwrap()
});
});
}
fn path_p2tr(&self) -> PathBuf {
self.path().join("p2tr")
}
pub fn open_p2tr(&mut self, prefix: u16) -> &mut P2TRDatabase {
let path = self.path_p2tr();
self.p2tr.entry(prefix).or_insert_with(|| {
let path = path.join(prefix.to_string());
Database::open(path).unwrap()
})
}
fn open_all_p2tr(&mut self) {
let path = self.path_p2tr();
Self::path_to_group_prefixes(&path)
.into_iter()
.for_each(|prefix| {
self.p2tr.insert(prefix, {
let path = path.join(prefix.to_string());
Database::open(path).unwrap()
});
});
}
pub fn open_unknown(&mut self) -> &mut UnknownDatabase {
self.unknown
.get_or_insert_with(|| Database::open(self.path.join("unknown")).unwrap())
}
pub fn open_op_return(&mut self) -> &mut UnknownDatabase {
self.op_return
.get_or_insert_with(|| Database::open(self.path.join("op_return")).unwrap())
}
pub fn open_push_only(&mut self) -> &mut UnknownDatabase {
self.push_only
.get_or_insert_with(|| Database::open(self.path.join("push_only")).unwrap())
}
pub fn open_empty(&mut self) -> &mut UnknownDatabase {
self.empty
.get_or_insert_with(|| Database::open(self.path.join("empty")).unwrap())
}
pub fn open_multisig(&mut self) -> &mut MultisigDatabase {
self.multisig
.get_or_insert_with(|| Database::open(self.path.join("multisig")).unwrap())
}
}
impl AnyDatabaseGroup for AddressToAddressIndex {
fn import(config: &Config) -> Self {
let path = config.path_databases().join("address_to_address_index");
Self {
metadata: Metadata::import(&path, 1),
path,
p2pk: BTreeMap::default(),
p2pkh: BTreeMap::default(),
p2sh: BTreeMap::default(),
p2wpkh: BTreeMap::default(),
p2wsh: BTreeMap::default(),
p2tr: BTreeMap::default(),
op_return: None,
push_only: None,
unknown: None,
empty: None,
multisig: None,
}
}
fn create_dir_all(&self) -> color_eyre::Result<(), std::io::Error> {
fs::create_dir_all(self.path_p2pk()).unwrap();
fs::create_dir_all(self.path_p2pkh()).unwrap();
fs::create_dir_all(self.path_p2sh()).unwrap();
fs::create_dir_all(self.path_p2wpkh()).unwrap();
fs::create_dir_all(self.path_p2wsh()).unwrap();
fs::create_dir_all(self.path_p2tr())
}
fn reset_metadata(&mut self) {
self.metadata.reset()
}
fn drain_to_vec(&mut self) -> Vec<Box<dyn AnyDatabase + Send>> {
mem::take(&mut self.p2pk)
.into_values()
.map(|db| Box::new(db) as Box<dyn AnyDatabase + Send>)
.chain(
mem::take(&mut self.p2pkh)
.into_values()
.map(|db| Box::new(db) as Box<dyn AnyDatabase + Send>),
)
.chain(
mem::take(&mut self.p2sh)
.into_values()
.map(|db| Box::new(db) as Box<dyn AnyDatabase + Send>),
)
.chain(
mem::take(&mut self.p2wpkh)
.into_values()
.map(|db| Box::new(db) as Box<dyn AnyDatabase + Send>),
)
.chain(
mem::take(&mut self.p2wsh)
.into_values()
.map(|db| Box::new(db) as Box<dyn AnyDatabase + Send>),
)
.chain(
mem::take(&mut self.p2tr)
.into_values()
.map(|db| Box::new(db) as Box<dyn AnyDatabase + Send>),
)
.chain(
[
self.unknown.take(),
self.op_return.take(),
self.push_only.take(),
self.empty.take(),
self.multisig.take(),
]
.into_iter()
.flatten()
.map(|db| Box::new(db) as Box<dyn AnyDatabase + Send>),
)
.collect_vec()
}
fn open_all(&mut self) {
self.open_all_p2pk();
self.open_all_p2pkh();
self.open_all_p2wpkh();
self.open_all_p2wsh();
self.open_all_p2sh();
self.open_all_p2tr();
}
fn metadata(&mut self) -> &mut Metadata {
&mut self.metadata
}
fn path(&self) -> &Path {
&self.path
}
}

View File

@@ -0,0 +1,127 @@
use allocative::Allocative;
use bincode::{Decode, Encode};
use color_eyre::eyre::eyre;
use serde::{Deserialize, Serialize};
use std::{
fmt::Debug,
fs, io,
ops::{Deref, DerefMut},
path::{Path, PathBuf},
};
use crate::{
io::Serialization,
structs::{Counter, Date, Height},
};
#[derive(Default, Debug, Encode, Decode, Allocative)]
pub struct Metadata {
path: PathBuf,
data: MetadataData,
}
impl Deref for Metadata {
type Target = MetadataData;
fn deref(&self) -> &Self::Target {
&self.data
}
}
impl DerefMut for Metadata {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.data
}
}
impl Metadata {
pub fn import(path: &Path, version: u16) -> Self {
Self {
data: MetadataData::import(path, version),
path: path.to_owned(),
}
}
pub fn export(&mut self, height: Height, date: Date) -> color_eyre::Result<()> {
if self.last_height.unwrap_or_default() < height {
self.last_height.replace(height);
}
if self.last_date.unwrap_or_default() < date {
self.last_date.replace(date);
}
self.data.export(&self.path)
}
pub fn reset(&mut self) {
let _ = self.data.reset(&self.path);
}
pub fn called_insert(&mut self) {
self.serial += 1;
self.len.increment();
}
pub fn called_remove(&mut self) {
self.len.decrement();
}
pub fn check_if_in_sync(&self, other: &Self) -> bool {
self.last_date == other.last_date && self.last_height == other.last_height
}
pub fn check_farer_or_in_sync(&self, other: &Self) -> bool {
self.last_date >= other.last_date && self.last_height >= other.last_height
}
}
#[derive(Default, Debug, Encode, Decode, Serialize, Deserialize, Allocative)]
pub struct MetadataData {
version: u16,
pub serial: usize,
pub len: Counter,
pub last_height: Option<Height>,
pub last_date: Option<Date>,
}
impl MetadataData {
fn full_path(folder_path: &Path) -> PathBuf {
folder_path.join("metadata")
}
pub fn import(path: &Path, version: u16) -> Self {
let mut s = Self::_import(path, version).unwrap_or_default();
s.version = version;
s
}
fn _import(path: &Path, version: u16) -> color_eyre::Result<Self> {
fs::create_dir_all(path)?;
let s: MetadataData = Serialization::Binary.import(&Self::full_path(path))?;
if s.version != version {
return Err(eyre!("Bad version"));
}
Ok(s)
}
pub fn export(&self, path: &Path) -> color_eyre::Result<()> {
Serialization::Binary.export(Path::new(&Self::full_path(path)), self)
}
pub fn reset(&mut self, path: &Path) -> color_eyre::Result<(), io::Error> {
self.clear();
fs::remove_file(Self::full_path(path))
}
fn clear(&mut self) {
self.serial = 0;
self.len.reset();
self.last_height = None;
self.last_date = None;
}
}

View File

@@ -0,0 +1,175 @@
use allocative::Allocative;
mod _trait;
mod address_index_to_address_data;
mod address_index_to_empty_address_data;
mod address_to_address_index;
mod metadata;
mod txid_to_tx_data;
mod txout_index_to_address_index;
mod txout_index_to_amount;
use _trait::*;
pub use address_index_to_address_data::*;
pub use address_index_to_empty_address_data::*;
pub use address_to_address_index::*;
use itertools::Itertools;
use log::info;
use metadata::*;
use rayon::iter::{IntoParallelIterator, ParallelIterator};
use snkrj::AnyDatabase;
pub use txid_to_tx_data::*;
pub use txout_index_to_address_index::*;
pub use txout_index_to_amount::*;
use crate::structs::{Config, Date, Height};
#[derive(Allocative)]
pub struct Databases {
pub address_index_to_address_data: AddressIndexToAddressData,
pub address_index_to_empty_address_data: AddressIndexToEmptyAddressData,
pub address_to_address_index: AddressToAddressIndex,
pub txid_to_tx_data: TxidToTxData,
pub txout_index_to_address_index: TxoutIndexToAddressIndex,
pub txout_index_to_amount: TxoutIndexToAmount,
}
impl Databases {
pub fn import(config: &Config) -> Self {
let address_index_to_address_data = AddressIndexToAddressData::init(config);
let address_index_to_empty_address_data = AddressIndexToEmptyAddressData::init(config);
let address_to_address_index = AddressToAddressIndex::init(config);
let txid_to_tx_data = TxidToTxData::init(config);
let txout_index_to_address_index = TxoutIndexToAddressIndex::init(config);
let txout_index_to_amount = TxoutIndexToAmount::init(config);
info!("Imported databases");
Self {
address_index_to_address_data,
address_index_to_empty_address_data,
address_to_address_index,
txid_to_tx_data,
txout_index_to_address_index,
txout_index_to_amount,
}
}
pub fn drain_to_vec(&mut self) -> Vec<Box<dyn AnyDatabase + Send>> {
self.txid_to_tx_data
.drain_to_vec()
.into_iter()
.chain(self.txout_index_to_amount.drain_to_vec())
.chain(self.address_to_address_index.drain_to_vec())
.chain(self.address_index_to_address_data.drain_to_vec())
.chain(self.address_index_to_empty_address_data.drain_to_vec())
.chain(self.txout_index_to_address_index.drain_to_vec())
.collect_vec()
}
fn export_metadata(&mut self, height: Height, date: Date) -> color_eyre::Result<()> {
self.txid_to_tx_data.export_metadata(height, date)?;
self.txout_index_to_amount.export_metadata(height, date)?;
self.address_index_to_address_data
.export_metadata(height, date)?;
self.address_index_to_empty_address_data
.export_metadata(height, date)?;
self.address_to_address_index
.export_metadata(height, date)?;
self.txout_index_to_address_index
.export_metadata(height, date)?;
Ok(())
}
pub fn export(
&mut self,
height: Height,
date: Date,
defragment: bool,
) -> color_eyre::Result<()> {
self.export_metadata(height, date)?;
self.drain_to_vec()
.into_par_iter()
.try_for_each(|s| AnyDatabase::boxed_export(s, defragment))?;
Ok(())
}
pub fn reset(&mut self, include_addresses: bool) {
if include_addresses {
let _ = self.address_index_to_address_data.reset();
let _ = self.address_index_to_empty_address_data.reset();
let _ = self.address_to_address_index.reset();
let _ = self.txout_index_to_address_index.reset();
}
let _ = self.txid_to_tx_data.reset();
let _ = self.txout_index_to_amount.reset();
}
pub fn check_if_needs_to_compute_addresses(&self, height: Height, date: Date) -> bool {
let check_height = |last_height: Option<Height>| {
last_height.map_or(true, |last_height| last_height < height)
};
let check_date =
|last_date: Option<Date>| last_date.map_or(true, |last_date| last_date < date);
let check_metadata = |metadata: &Metadata| {
check_height(metadata.last_height) || check_date(metadata.last_date)
};
// We only need to check one as we previously checked that they're all in sync
check_metadata(&self.address_to_address_index.metadata)
}
pub fn check_if_usable(
&self,
last_address_height: Option<Height>,
last_address_date: Option<Date>,
) -> bool {
let are_tx_databases_in_sync = self
.txout_index_to_amount
.metadata
.check_if_in_sync(&self.txid_to_tx_data.metadata);
if !are_tx_databases_in_sync {
return false;
}
let are_address_databases_in_sync = self
.address_to_address_index
.metadata
.check_if_in_sync(&self.address_index_to_empty_address_data.metadata)
&& self
.address_to_address_index
.metadata
.check_if_in_sync(&self.address_index_to_address_data.metadata)
&& self
.address_to_address_index
.metadata
.check_if_in_sync(&self.txout_index_to_address_index.metadata);
if !are_address_databases_in_sync {
return false;
}
let are_address_databases_farer_or_in_sync_with_tx_database = self
.address_to_address_index
.metadata
.check_farer_or_in_sync(&self.txid_to_tx_data.metadata);
if !are_address_databases_farer_or_in_sync_with_tx_database {
return false;
}
last_address_height >= self.address_to_address_index.metadata.last_height
&& last_address_date >= self.address_to_address_index.metadata.last_date
}
}

View File

@@ -0,0 +1,156 @@
use std::{
collections::BTreeMap,
fs, mem,
path::{Path, PathBuf},
};
use allocative::Allocative;
use brk_parser::bitcoin::Txid;
use itertools::Itertools;
use snkrj::{AnyDatabase, Database as _Database};
use crate::structs::{Config, TxData, U8x31};
use super::{AnyDatabaseGroup, Metadata};
type Key = U8x31;
type Value = TxData;
type Database = _Database<Key, Value>;
#[derive(Allocative)]
pub struct TxidToTxData {
path: PathBuf,
pub metadata: Metadata,
#[allocative(skip)]
map: BTreeMap<u16, Database>,
}
impl TxidToTxData {
pub fn insert(&mut self, txid: &Txid, tx_index: Value) -> Option<Value> {
self.metadata.called_insert();
let txid_key = Self::txid_to_key(txid);
self.open_db(txid).insert(txid_key, tx_index)
}
/// Doesn't check if the database is open contrary to `safe_get` which does and opens if needed.
/// Though it makes it easy to use with rayon
pub fn get(&self, txid: &Txid) -> Option<&Value> {
let txid_key = Self::txid_to_key(txid);
let db_index = Self::db_index(txid);
self.map.get(&db_index).unwrap().get(&txid_key)
}
pub fn get_mut_from_ram(&mut self, txid: &Txid) -> Option<&mut Value> {
let txid_key = Self::txid_to_key(txid);
let db_index = Self::db_index(txid);
self.map.get_mut(&db_index).unwrap().get_mut_from_ram(&txid_key)
}
pub fn remove_later_from_disk(&mut self, txid: &Txid) {
self.metadata.called_remove();
let txid_key = Self::txid_to_key(txid);
self.open_db(txid).remove_later_from_disk(&txid_key);
}
pub fn remove_from_ram(&mut self, txid: &Txid) {
self.metadata.called_remove();
let txid_key = Self::txid_to_key(txid);
self.open_db(txid).remove_from_ram(&txid_key);
}
pub fn update(&mut self, txid: &Txid, tx_data: TxData) {
let txid_key = Self::txid_to_key(txid);
self.open_db(txid).update(txid_key, tx_data);
}
#[inline(always)]
pub fn open_db(&mut self, txid: &Txid) -> &mut Database {
let db_index = Self::db_index(txid);
self._open_db(db_index)
}
#[inline(always)]
fn _open_db(&mut self, db_index: u16) -> &mut Database {
let path = self.path.to_owned();
self.map.entry(db_index).or_insert_with(|| {
let path = path.join(db_index.to_string());
Database::open(path).unwrap()
})
}
fn txid_to_key(txid: &Txid) -> U8x31 {
U8x31::from(&txid[1..])
}
fn db_index(txid: &Txid) -> u16 {
((txid[0] as u16) << 5) + ((txid[1] as u16) >> 3)
}
}
impl AnyDatabaseGroup for TxidToTxData {
fn import(config: &Config) -> Self {
let path = config.path_databases().join("txid_to_tx_data");
let metadata = Metadata::import(&path, 2);
Self {
path,
metadata,
map: BTreeMap::default(),
}
}
fn reset_metadata(&mut self) {
self.metadata.reset();
}
fn open_all(&mut self) {
let folder = fs::read_dir(&self.path);
if folder.is_err() {
return;
}
folder
.unwrap()
.flat_map(|entry| {
entry
.unwrap()
.path()
.file_name()
.unwrap()
.to_str()
.unwrap()
.to_owned()
.parse::<u16>()
})
.for_each(|db_index| {
self._open_db(db_index);
});
}
fn drain_to_vec(&mut self) -> Vec<Box<dyn AnyDatabase + Send>> {
mem::take(&mut self.map)
.into_values()
.map(|db| Box::new(db) as Box<dyn AnyDatabase + Send>)
.collect_vec()
}
fn metadata(&mut self) -> &mut Metadata {
&mut self.metadata
}
fn path(&self) -> &Path {
&self.path
}
}

View File

@@ -0,0 +1,148 @@
use std::{
collections::BTreeMap,
fs, mem,
ops::{Deref, DerefMut},
path::{Path, PathBuf},
};
use allocative::Allocative;
use itertools::Itertools;
use snkrj::{AnyDatabase, Database as _Database};
use crate::structs::{Config, TxoutIndex};
use super::{AnyDatabaseGroup, Metadata};
type Key = TxoutIndex;
type Value = u32;
type Database = _Database<Key, Value>;
#[derive(Allocative)]
pub struct TxoutIndexToAddressIndex {
path: PathBuf,
pub metadata: Metadata,
#[allocative(skip)]
map: BTreeMap<usize, Database>,
}
impl Deref for TxoutIndexToAddressIndex {
type Target = BTreeMap<usize, Database>;
fn deref(&self) -> &Self::Target {
&self.map
}
}
impl DerefMut for TxoutIndexToAddressIndex {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.map
}
}
const DB_MAX_SIZE: usize = 10_000_000_000;
impl TxoutIndexToAddressIndex {
pub fn insert_to_ram(&mut self, key: Key, value: Value) -> Option<Value> {
self.metadata.called_insert();
self.open_db(&key).insert_to_ram(key, value)
}
pub fn remove(&mut self, key: &Key) -> Option<Value> {
self.metadata.called_remove();
self.open_db(key).remove(key)
}
/// Doesn't check if the database is open contrary to `safe_get` which does and opens if needed
/// Though it makes it easy to use with rayon.
pub fn unsafe_get(&self, key: &Key) -> Option<&Value> {
let db_index = Self::db_index(key);
self.get(&db_index).unwrap().get(key)
}
pub fn open_db(&mut self, key: &Key) -> &mut Database {
let db_index = Self::db_index(key);
let path = self.path.to_owned();
self.entry(db_index).or_insert_with(|| {
let db_name = format!(
"{}..{}",
db_index * DB_MAX_SIZE,
(db_index + 1) * DB_MAX_SIZE
);
let path = path.join(db_name);
Database::open(path).unwrap()
})
}
fn db_index(key: &Key) -> usize {
key.as_u64() as usize / DB_MAX_SIZE
}
}
impl AnyDatabaseGroup for TxoutIndexToAddressIndex {
fn import(config: &Config) -> Self {
let path = config.path_databases().join("txout_index_to_address_index");
Self {
metadata: Metadata::import(&path, 1),
path,
map: BTreeMap::default(),
}
}
fn reset_metadata(&mut self) {
self.metadata.reset();
}
fn open_all(&mut self) {
let folder = fs::read_dir(&self.path);
if folder.is_err() {
return;
}
folder
.unwrap()
.map(|entry| {
entry
.unwrap()
.path()
.file_name()
.unwrap()
.to_str()
.unwrap()
.to_owned()
})
.filter(|file_name| file_name.contains(".."))
.for_each(|path| {
self.open_db(
&path
.split("..")
.next()
.unwrap()
.parse::<u64>()
.unwrap()
.into(),
);
});
}
fn drain_to_vec(&mut self) -> Vec<Box<dyn AnyDatabase + Send>> {
mem::take(&mut self.map)
.into_values()
.map(|db| Box::new(db) as Box<dyn AnyDatabase + Send>)
.collect_vec()
}
fn metadata(&mut self) -> &mut Metadata {
&mut self.metadata
}
fn path(&self) -> &Path {
&self.path
}
}

View File

@@ -0,0 +1,148 @@
use std::{
collections::BTreeMap,
fs, mem,
ops::{Deref, DerefMut},
path::{Path, PathBuf},
};
use allocative::Allocative;
use itertools::Itertools;
use snkrj::{AnyDatabase, Database as _Database};
use crate::structs::{Amount, Config, TxoutIndex};
use super::{AnyDatabaseGroup, Metadata};
type Key = TxoutIndex;
type Value = Amount;
type Database = _Database<Key, Value>;
#[derive(Allocative)]
pub struct TxoutIndexToAmount {
path: PathBuf,
pub metadata: Metadata,
#[allocative(skip)]
map: BTreeMap<usize, Database>,
}
impl Deref for TxoutIndexToAmount {
type Target = BTreeMap<usize, Database>;
fn deref(&self) -> &Self::Target {
&self.map
}
}
impl DerefMut for TxoutIndexToAmount {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.map
}
}
const DB_MAX_SIZE: usize = 10_000_000_000;
impl TxoutIndexToAmount {
pub fn insert_to_ram(&mut self, key: Key, value: Value) -> Option<Value> {
self.metadata.called_insert();
self.open_db(&key).insert_to_ram(key, value)
}
pub fn remove(&mut self, key: &Key) -> Option<Value> {
self.metadata.called_remove();
self.open_db(key).remove(key)
}
/// Doesn't check if the database is open contrary to `safe_get` which does and opens if needed
/// Though it makes it easy to use with rayon.
pub fn unsafe_get(&self, key: &Key) -> Option<&Value> {
let db_index = Self::db_index(key);
self.get(&db_index).unwrap().get(key)
}
pub fn open_db(&mut self, key: &Key) -> &mut Database {
let db_index = Self::db_index(key);
let path = self.path.to_owned();
self.entry(db_index).or_insert_with(|| {
let db_name = format!(
"{}..{}",
db_index * DB_MAX_SIZE,
(db_index + 1) * DB_MAX_SIZE
);
let path = path.join(db_name);
Database::open(path).unwrap()
})
}
fn db_index(key: &Key) -> usize {
key.as_u64() as usize / DB_MAX_SIZE
}
}
impl AnyDatabaseGroup for TxoutIndexToAmount {
fn import(config: &Config) -> Self {
let path = config.path_databases().join("txout_index_to_amount");
Self {
metadata: Metadata::import(&path, 1),
path,
map: BTreeMap::default(),
}
}
fn reset_metadata(&mut self) {
self.metadata.reset();
}
fn open_all(&mut self) {
let folder = fs::read_dir(&self.path);
if folder.is_err() {
return;
}
folder
.unwrap()
.map(|entry| {
entry
.unwrap()
.path()
.file_name()
.unwrap()
.to_str()
.unwrap()
.to_owned()
})
.filter(|file_name| file_name.contains(".."))
.for_each(|path| {
self.open_db(
&path
.split("..")
.next()
.unwrap()
.parse::<u64>()
.unwrap()
.into(),
);
});
}
fn drain_to_vec(&mut self) -> Vec<Box<dyn AnyDatabase + Send>> {
mem::take(&mut self.map)
.into_values()
.map(|db| Box::new(db) as Box<dyn AnyDatabase + Send>)
.collect_vec()
}
fn metadata(&mut self) -> &mut Metadata {
&mut self.metadata
}
fn path(&self) -> &Path {
&self.path
}
}

View File

@@ -0,0 +1,887 @@
use itertools::Itertools;
use rayon::prelude::*;
use struct_iterable::Iterable;
use crate::{
parser::datasets::{
cohort_metadata::AddressCohortMetadataDataset, ComputeData, DateRecapDataset, RatioDataset,
SubDataset,
},
structs::{
AnyBiMap, AnyDateMap, AnyHeightMap, AnyMap, BiMap, Date, DateMap, Height, HeightMap,
MapKind, Timestamp, OHLC,
},
};
use super::{AnyDatasetGroup, MinInitialStates};
pub trait AnyDataset: Iterable {
fn get_min_initial_states(&self) -> &MinInitialStates;
fn needs_insert(&self, height: Height, date: Date) -> bool {
self.needs_insert_height(height) || self.needs_insert_date(date)
}
#[inline(always)]
fn needs_insert_height(&self, height: Height) -> bool {
!self.to_all_inserted_height_map_vec().is_empty()
&& self
.get_min_initial_states()
.inserted
.first_unsafe_height
.unwrap_or(Height::ZERO)
<= height
}
#[inline(always)]
fn needs_insert_date(&self, date: Date) -> bool {
!self.to_all_inserted_date_map_vec().is_empty()
&& self
.get_min_initial_states()
.inserted
.first_unsafe_date
.map_or(true, |min_initial_first_unsafe_date| {
min_initial_first_unsafe_date <= date
})
}
fn to_kind_bi_map_vec(&self, kind: MapKind) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
let mut v = vec![];
self.iter().for_each(|(_, any)| {
if let Some(map) = any.downcast_ref::<BiMap<u8>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<u16>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<u32>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<u64>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<usize>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<f32>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<f64>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<OHLC>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<Date>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<Height>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<BiMap<Timestamp>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyBiMap + Send + Sync))
}
} else if let Some(dataset) = any.downcast_ref::<RatioDataset>() {
match kind {
MapKind::Inserted => dataset.to_inserted_bi_map_vec(),
MapKind::Computed => dataset.to_computed_bi_map_vec(),
}
.into_iter()
.for_each(|map| {
v.push(map);
});
} else if let Some(dataset) = any.downcast_ref::<AddressCohortMetadataDataset>() {
match kind {
MapKind::Inserted => dataset.to_inserted_bi_map_vec(),
MapKind::Computed => dataset.to_computed_bi_map_vec(),
}
.into_iter()
.for_each(|map| {
v.push(map);
});
} else if let Some(dataset) = any.downcast_ref::<SubDataset>() {
dataset.as_vec().into_iter().for_each(|dataset| {
v.append(&mut dataset.to_kind_bi_map_vec(kind));
});
}
});
v
}
fn to_kind_mut_bi_map_vec(&mut self, kind: MapKind) -> Vec<&mut dyn AnyBiMap> {
let mut v = vec![];
self.iter_mut().for_each(|(_, any)| match any {
any if any.is::<BiMap<u8>>() => {
if let Some(map) = any.downcast_mut::<BiMap<u8>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<u16>>() => {
if let Some(map) = any.downcast_mut::<BiMap<u16>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<u32>>() => {
if let Some(map) = any.downcast_mut::<BiMap<u32>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<u64>>() => {
if let Some(map) = any.downcast_mut::<BiMap<u64>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<usize>>() => {
if let Some(map) = any.downcast_mut::<BiMap<usize>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<f32>>() => {
if let Some(map) = any.downcast_mut::<BiMap<f32>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<f64>>() => {
if let Some(map) = any.downcast_mut::<BiMap<f64>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<OHLC>>() => {
if let Some(map) = any.downcast_mut::<BiMap<OHLC>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<Date>>() => {
if let Some(map) = any.downcast_mut::<BiMap<Date>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<Height>>() => {
if let Some(map) = any.downcast_mut::<BiMap<Height>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<BiMap<Timestamp>>() => {
if let Some(map) = any.downcast_mut::<BiMap<Timestamp>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyBiMap);
}
}
}
any if any.is::<RatioDataset>() => {
if let Some(dataset) = any.downcast_mut::<RatioDataset>() {
match kind {
MapKind::Inserted => dataset.to_inserted_mut_bi_map_vec(),
MapKind::Computed => dataset.to_computed_mut_bi_map_vec(),
}
.into_iter()
.for_each(|map| {
v.push(map);
});
}
}
any if any.is::<AddressCohortMetadataDataset>() => {
if let Some(dataset) = any.downcast_mut::<AddressCohortMetadataDataset>() {
match kind {
MapKind::Inserted => dataset.to_inserted_mut_bi_map_vec(),
MapKind::Computed => dataset.to_computed_mut_bi_map_vec(),
}
.into_iter()
.for_each(|map| {
v.push(map);
});
}
}
any if any.is::<SubDataset>() => {
if let Some(dataset) = any.downcast_mut::<SubDataset>() {
dataset.as_mut_vec().into_iter().for_each(|dataset| {
v.append(&mut dataset.to_kind_mut_bi_map_vec(kind));
});
}
}
_ => {}
});
v
}
fn to_kind_date_map_vec(&self, kind: MapKind) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
let mut v = vec![];
self.iter().for_each(|(_, any)| {
if let Some(map) = any.downcast_ref::<DateMap<u8>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<u16>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<u32>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<u64>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<usize>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<f32>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<f64>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<OHLC>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<Date>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<Height>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<DateMap<Timestamp>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
} else if let Some(dataset) = any.downcast_ref::<DateRecapDataset<u32>>() {
dataset.as_vec().into_iter().for_each(|map| {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
});
} else if let Some(dataset) = any.downcast_ref::<DateRecapDataset<u64>>() {
dataset.as_vec().into_iter().for_each(|map| {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
});
} else if let Some(dataset) = any.downcast_ref::<DateRecapDataset<f32>>() {
dataset.as_vec().into_iter().for_each(|map| {
if map.kind() == kind {
v.push(map as &(dyn AnyDateMap + Send + Sync))
}
});
} else if let Some(dataset) = any.downcast_ref::<SubDataset>() {
dataset.as_vec().into_iter().for_each(|dataset| {
v.append(&mut dataset.to_kind_date_map_vec(kind));
});
} else if let Some(dataset) = any.downcast_ref::<AddressCohortMetadataDataset>() {
match kind {
MapKind::Inserted => dataset.to_inserted_date_map_vec(),
MapKind::Computed => dataset.to_computed_date_map_vec(),
}
.into_iter()
.for_each(|map| {
v.push(map);
});
}
});
v
}
fn to_kind_mut_date_map_vec(&mut self, kind: MapKind) -> Vec<&mut dyn AnyDateMap> {
let mut v = vec![];
self.iter_mut().for_each(|(_, any)| match any {
any if any.is::<DateMap<u8>>() => {
if let Some(map) = any.downcast_mut::<DateMap<u8>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<u16>>() => {
if let Some(map) = any.downcast_mut::<DateMap<u16>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<u32>>() => {
if let Some(map) = any.downcast_mut::<DateMap<u32>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<u64>>() => {
if let Some(map) = any.downcast_mut::<DateMap<u64>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<usize>>() => {
if let Some(map) = any.downcast_mut::<DateMap<usize>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<f32>>() => {
if let Some(map) = any.downcast_mut::<DateMap<f32>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<f64>>() => {
if let Some(map) = any.downcast_mut::<DateMap<f64>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<OHLC>>() => {
if let Some(map) = any.downcast_mut::<DateMap<OHLC>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<Date>>() => {
if let Some(map) = any.downcast_mut::<DateMap<Date>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<Height>>() => {
if let Some(map) = any.downcast_mut::<DateMap<Height>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateMap<Timestamp>>() => {
if let Some(map) = any.downcast_mut::<DateMap<Timestamp>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
}
}
any if any.is::<DateRecapDataset<u32>>() => {
if let Some(dataset) = any.downcast_mut::<DateRecapDataset<u32>>() {
dataset.as_mut_vec().into_iter().for_each(|map| {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
});
}
}
any if any.is::<DateRecapDataset<u64>>() => {
if let Some(dataset) = any.downcast_mut::<DateRecapDataset<u64>>() {
dataset.as_mut_vec().into_iter().for_each(|map| {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
});
}
}
any if any.is::<DateRecapDataset<f32>>() => {
if let Some(dataset) = any.downcast_mut::<DateRecapDataset<f32>>() {
dataset.as_mut_vec().into_iter().for_each(|map| {
if map.kind() == kind {
v.push(map as &mut dyn AnyDateMap);
}
});
}
}
any if any.is::<SubDataset>() => {
if let Some(dataset) = any.downcast_mut::<SubDataset>() {
dataset.as_mut_vec().into_iter().for_each(|dataset| {
v.append(&mut dataset.to_kind_mut_date_map_vec(kind));
});
}
}
any if any.is::<AddressCohortMetadataDataset>() => {
if let Some(dataset) = any.downcast_mut::<AddressCohortMetadataDataset>() {
match kind {
MapKind::Inserted => dataset.to_inserted_mut_date_map_vec(),
MapKind::Computed => dataset.to_computed_mut_date_map_vec(),
}
.into_iter()
.for_each(|map| {
v.push(map);
});
}
}
_ => {}
});
v
}
fn to_kind_height_map_vec(&self, kind: MapKind) -> Vec<&(dyn AnyHeightMap + Send + Sync)> {
let mut v = vec![];
self.iter().for_each(|(_, any)| {
if let Some(map) = any.downcast_ref::<HeightMap<u8>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<u16>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<u32>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<u64>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<usize>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<f32>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<f64>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<OHLC>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<Date>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<Height>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(map) = any.downcast_ref::<HeightMap<Timestamp>>() {
if map.kind() == kind {
v.push(map as &(dyn AnyHeightMap + Send + Sync))
}
} else if let Some(dataset) = any.downcast_ref::<SubDataset>() {
dataset.as_vec().into_iter().for_each(|dataset| {
v.append(&mut dataset.to_kind_height_map_vec(kind));
});
} else if let Some(dataset) = any.downcast_ref::<AddressCohortMetadataDataset>() {
match kind {
MapKind::Inserted => dataset.to_inserted_height_map_vec(),
MapKind::Computed => dataset.to_computed_height_map_vec(),
}
.into_iter()
.for_each(|map| {
v.push(map);
});
}
});
v
}
fn to_kind_mut_height_map_vec(&mut self, kind: MapKind) -> Vec<&mut dyn AnyHeightMap> {
let mut v = vec![];
self.iter_mut().for_each(|(_, any)| match any {
any if any.is::<HeightMap<u8>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<u8>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<u16>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<u16>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<u32>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<u32>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<u64>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<u64>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<usize>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<usize>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<f32>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<f32>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<f64>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<f64>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<OHLC>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<OHLC>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<Date>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<Date>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<Height>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<Height>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<HeightMap<Timestamp>>() => {
if let Some(map) = any.downcast_mut::<HeightMap<Timestamp>>() {
if map.kind() == kind {
v.push(map as &mut dyn AnyHeightMap);
}
}
}
any if any.is::<SubDataset>() => {
if let Some(dataset) = any.downcast_mut::<SubDataset>() {
dataset.as_mut_vec().into_iter().for_each(|dataset| {
v.append(&mut dataset.to_kind_mut_height_map_vec(kind));
});
}
}
any if any.is::<AddressCohortMetadataDataset>() => {
if let Some(dataset) = any.downcast_mut::<AddressCohortMetadataDataset>() {
match kind {
MapKind::Inserted => dataset.to_inserted_mut_height_map_vec(),
MapKind::Computed => dataset.to_computed_mut_height_map_vec(),
}
.into_iter()
.for_each(|map| {
v.push(map);
});
}
}
_ => {}
});
v
}
fn to_inserted_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
self.to_kind_bi_map_vec(MapKind::Inserted)
}
fn to_inserted_height_map_vec(&self) -> Vec<&(dyn AnyHeightMap + Send + Sync)> {
self.to_kind_height_map_vec(MapKind::Inserted)
}
fn to_inserted_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
self.to_kind_date_map_vec(MapKind::Inserted)
}
fn to_inserted_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
self.to_kind_mut_bi_map_vec(MapKind::Inserted)
}
fn to_inserted_mut_height_map_vec(&mut self) -> Vec<&mut dyn AnyHeightMap> {
self.to_kind_mut_height_map_vec(MapKind::Inserted)
}
fn to_inserted_mut_date_map_vec(&mut self) -> Vec<&mut dyn AnyDateMap> {
self.to_kind_mut_date_map_vec(MapKind::Inserted)
}
fn to_all_inserted_height_map_vec(&self) -> Vec<&(dyn AnyHeightMap + Send + Sync)> {
let mut vec = self.to_inserted_height_map_vec();
vec.append(
&mut self
.to_inserted_bi_map_vec()
.iter()
.map(|bi| bi.get_height())
.collect_vec(),
);
vec
}
fn to_all_inserted_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
let mut vec = self.to_inserted_date_map_vec();
vec.append(
&mut self
.to_inserted_bi_map_vec()
.iter()
.map(|bi| bi.get_date())
.collect_vec(),
);
vec
}
fn to_all_inserted_map_vec(&self) -> Vec<&(dyn AnyMap + Send + Sync)> {
let heights = self
.to_all_inserted_height_map_vec()
.into_iter()
.map(|d| d.as_any_map());
let dates = self
.to_all_inserted_date_map_vec()
.into_iter()
.map(|d| d.as_any_map());
heights.chain(dates).collect_vec()
}
#[inline(always)]
fn should_compute(&self, compute_data: &ComputeData) -> bool {
compute_data
.heights
.last()
.map_or(false, |height| self.should_compute_height(*height))
|| compute_data
.dates
.last()
.map_or(false, |date| self.should_compute_date(*date))
}
#[inline(always)]
fn should_compute_height(&self, height: Height) -> bool {
!self.to_all_computed_height_map_vec().is_empty()
&& self
.get_min_initial_states()
.computed
.first_unsafe_height
.unwrap_or(Height::ZERO)
<= height
}
#[inline(always)]
fn should_compute_date(&self, date: Date) -> bool {
!self.to_all_computed_date_map_vec().is_empty()
&& self
.get_min_initial_states()
.computed
.first_unsafe_date
.map_or(true, |min_initial_first_unsafe_date| {
min_initial_first_unsafe_date <= date
})
}
fn to_computed_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
self.to_kind_bi_map_vec(MapKind::Computed)
}
fn to_computed_height_map_vec(&self) -> Vec<&(dyn AnyHeightMap + Send + Sync)> {
self.to_kind_height_map_vec(MapKind::Computed)
}
fn to_computed_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
self.to_kind_date_map_vec(MapKind::Computed)
}
fn to_computed_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
self.to_kind_mut_bi_map_vec(MapKind::Computed)
}
fn to_computed_mut_height_map_vec(&mut self) -> Vec<&mut dyn AnyHeightMap> {
self.to_kind_mut_height_map_vec(MapKind::Computed)
}
fn to_computed_mut_date_map_vec(&mut self) -> Vec<&mut dyn AnyDateMap> {
self.to_kind_mut_date_map_vec(MapKind::Computed)
}
fn to_all_computed_height_map_vec(&self) -> Vec<&(dyn AnyHeightMap + Send + Sync)> {
let mut vec = self.to_computed_height_map_vec();
vec.append(
&mut self
.to_computed_bi_map_vec()
.iter()
.map(|bi| bi.get_height())
.collect_vec(),
);
vec
}
fn to_all_computed_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
let mut vec = self.to_computed_date_map_vec();
vec.append(
&mut self
.to_computed_bi_map_vec()
.iter()
.map(|bi| bi.get_date())
.collect_vec(),
);
vec
}
fn to_all_computed_map_vec(&self) -> Vec<&(dyn AnyMap + Send + Sync)> {
let heights = self
.to_all_computed_height_map_vec()
.into_iter()
.map(|d| d.as_any_map());
let dates = self
.to_all_computed_date_map_vec()
.into_iter()
.map(|d| d.as_any_map());
heights.chain(dates).collect_vec()
}
fn to_all_map_vec(&self) -> Vec<&(dyn AnyMap + Send + Sync)> {
let mut inserted = self.to_all_inserted_map_vec();
inserted.append(&mut self.to_all_computed_map_vec());
inserted
}
// #[inline(always)]
// fn is_empty(&self) -> bool {
// self.to_any_map_vec().is_empty()
// }
fn pre_export(&mut self) {
self.to_inserted_mut_height_map_vec()
.into_iter()
.for_each(|map| map.pre_export());
self.to_inserted_mut_date_map_vec()
.into_iter()
.for_each(|map| map.pre_export());
self.to_inserted_mut_bi_map_vec().into_iter().for_each(|d| {
d.as_any_mut_map()
.into_iter()
.for_each(|map| map.pre_export())
});
self.to_computed_mut_height_map_vec()
.into_iter()
.for_each(|map| map.pre_export());
self.to_computed_mut_date_map_vec()
.into_iter()
.for_each(|map| map.pre_export());
self.to_computed_mut_bi_map_vec().into_iter().for_each(|d| {
d.as_any_mut_map()
.into_iter()
.for_each(|map| map.pre_export())
});
}
fn export(&self) -> color_eyre::Result<()> {
self.to_all_map_vec()
.into_par_iter()
.try_for_each(|map| -> color_eyre::Result<()> { map.export() })
}
fn post_export(&mut self) {
self.to_inserted_mut_height_map_vec()
.into_iter()
.for_each(|map| map.post_export());
self.to_inserted_mut_date_map_vec()
.into_iter()
.for_each(|map| map.post_export());
self.to_inserted_mut_bi_map_vec().into_iter().for_each(|d| {
d.as_any_mut_map()
.into_iter()
.for_each(|map| map.post_export())
});
self.to_computed_mut_height_map_vec()
.into_iter()
.for_each(|map| map.post_export());
self.to_computed_mut_date_map_vec()
.into_iter()
.for_each(|map| map.post_export());
self.to_computed_mut_bi_map_vec().into_iter().for_each(|d| {
d.as_any_mut_map()
.into_iter()
.for_each(|map| map.post_export())
});
}
fn reset_computed(&self) {
self.to_all_computed_date_map_vec()
.iter()
.for_each(|map| map.delete_files());
self.to_all_computed_height_map_vec()
.iter()
.for_each(|map| map.delete_files());
}
}

View File

@@ -0,0 +1,7 @@
use super::AnyDataset;
pub trait AnyDatasetGroup {
fn as_vec(&self) -> Vec<&(dyn AnyDataset + Send + Sync)>;
fn as_mut_vec(&mut self) -> Vec<&mut dyn AnyDataset>;
}

View File

@@ -0,0 +1,9 @@
use super::{AnyDataset, MinInitialStates};
pub trait AnyDatasets {
fn get_min_initial_states(&self) -> &MinInitialStates;
fn to_any_dataset_vec(&self) -> Vec<&(dyn AnyDataset + Send + Sync)>;
fn to_mut_any_dataset_vec(&mut self) -> Vec<&mut dyn AnyDataset>;
}

View File

@@ -0,0 +1,279 @@
use allocative::Allocative;
use crate::structs::{AnyDateMap, AnyHeightMap, Config, Date, Height};
use super::{AnyDataset, AnyDatasets};
#[derive(Default, Debug, Clone, Copy, Allocative)]
pub struct MinInitialStates {
pub inserted: MinInitialState,
pub computed: MinInitialState,
}
impl MinInitialStates {
pub fn consume(&mut self, other: Self) {
self.inserted = other.inserted;
self.computed = other.computed;
}
pub fn compute_from_dataset(dataset: &dyn AnyDataset, config: &Config) -> Self {
Self {
inserted: MinInitialState::compute_from_dataset(dataset, Mode::Inserted, config),
computed: MinInitialState::compute_from_dataset(dataset, Mode::Computed, config),
}
}
pub fn compute_from_datasets(datasets: &dyn AnyDatasets, config: &Config) -> Self {
Self {
inserted: MinInitialState::compute_from_datasets(datasets, Mode::Inserted, config),
computed: MinInitialState::compute_from_datasets(datasets, Mode::Computed, config),
}
}
pub fn min_last_height(&self) -> Option<Height> {
self.computed.last_height.min(self.inserted.last_height)
}
}
#[derive(Default, Debug, Clone, Copy, Allocative)]
pub struct MinInitialState {
pub first_unsafe_date: Option<Date>,
pub first_unsafe_height: Option<Height>,
pub last_date: Option<Date>,
pub last_height: Option<Height>,
}
enum Mode {
Inserted,
Computed,
}
impl MinInitialState {
fn compute_from_datasets(datasets: &dyn AnyDatasets, mode: Mode, config: &Config) -> Self {
match mode {
Mode::Inserted => {
let contains_date_maps = |dataset: &&(dyn AnyDataset + Sync + Send)| {
!dataset.to_all_inserted_date_map_vec().is_empty()
};
let contains_height_maps = |dataset: &&(dyn AnyDataset + Sync + Send)| {
!dataset.to_all_inserted_height_map_vec().is_empty()
};
Self {
first_unsafe_date: Self::min_datasets_date(
datasets,
contains_date_maps,
|dataset| {
dataset
.get_min_initial_states()
.inserted
.first_unsafe_date
.as_ref()
.cloned()
},
),
first_unsafe_height: Self::min_datasets_height(
datasets,
contains_height_maps,
|dataset| {
dataset
.get_min_initial_states()
.inserted
.first_unsafe_height
.as_ref()
.cloned()
},
),
last_date: Self::min_datasets_date(datasets, contains_date_maps, |dataset| {
dataset
.get_min_initial_states()
.inserted
.last_date
.as_ref()
.cloned()
}),
last_height: Self::min_datasets_height(
datasets,
contains_height_maps,
|dataset| {
dataset
.get_min_initial_states()
.inserted
.last_height
.as_ref()
.cloned()
},
),
}
}
Mode::Computed => {
if config.recompute_computed() {
// datasets.reset_computed();
return Self::default();
}
let contains_date_maps = |dataset: &&(dyn AnyDataset + Sync + Send)| {
!dataset.to_all_computed_date_map_vec().is_empty()
};
let contains_height_maps = |dataset: &&(dyn AnyDataset + Sync + Send)| {
!dataset.to_all_computed_height_map_vec().is_empty()
};
Self {
first_unsafe_date: Self::min_datasets_date(
datasets,
contains_date_maps,
|dataset| {
dataset
.get_min_initial_states()
.computed
.first_unsafe_date
.as_ref()
.cloned()
},
),
first_unsafe_height: Self::min_datasets_height(
datasets,
contains_height_maps,
|dataset| {
dataset
.get_min_initial_states()
.computed
.first_unsafe_height
.as_ref()
.cloned()
},
),
last_date: Self::min_datasets_date(datasets, contains_date_maps, |dataset| {
dataset
.get_min_initial_states()
.computed
.last_date
.as_ref()
.cloned()
}),
last_height: Self::min_datasets_height(
datasets,
contains_height_maps,
|dataset| {
dataset
.get_min_initial_states()
.computed
.last_height
.as_ref()
.cloned()
},
),
}
}
}
}
fn min_datasets_date(
datasets: &dyn AnyDatasets,
is_not_empty: impl Fn(&&(dyn AnyDataset + Sync + Send)) -> bool,
map: impl Fn(&(dyn AnyDataset + Sync + Send)) -> Option<Date>,
) -> Option<Date> {
Self::min_date(
datasets
.to_any_dataset_vec()
.into_iter()
.filter(is_not_empty)
.map(map),
)
}
fn min_datasets_height(
datasets: &dyn AnyDatasets,
is_not_empty: impl Fn(&&(dyn AnyDataset + Sync + Send)) -> bool,
map: impl Fn(&(dyn AnyDataset + Sync + Send)) -> Option<Height>,
) -> Option<Height> {
Self::min_height(
datasets
.to_any_dataset_vec()
.into_iter()
.filter(is_not_empty)
.map(map),
)
}
fn compute_from_dataset(dataset: &dyn AnyDataset, mode: Mode, config: &Config) -> Self {
match mode {
Mode::Inserted => {
let date_vec = dataset.to_all_inserted_date_map_vec();
let height_vec = dataset.to_all_inserted_height_map_vec();
Self {
first_unsafe_date: Self::compute_min_initial_first_unsafe_date_from_dataset(
&date_vec,
),
first_unsafe_height: Self::compute_min_initial_first_unsafe_height_from_dataset(
&height_vec,
),
last_date: Self::compute_min_initial_last_date_from_dataset(&date_vec),
last_height: Self::compute_min_initial_last_height_from_dataset(&height_vec),
}
}
Mode::Computed => {
if config.recompute_computed() {
dataset.reset_computed();
return Self::default();
}
let date_vec = dataset.to_all_computed_date_map_vec();
let height_vec = dataset.to_all_computed_height_map_vec();
Self {
first_unsafe_date: Self::compute_min_initial_first_unsafe_date_from_dataset(
&date_vec,
),
first_unsafe_height: Self::compute_min_initial_first_unsafe_height_from_dataset(
&height_vec,
),
last_date: Self::compute_min_initial_last_date_from_dataset(&date_vec),
last_height: Self::compute_min_initial_last_height_from_dataset(&height_vec),
}
}
}
}
#[inline(always)]
fn compute_min_initial_last_date_from_dataset(
arr: &[&(dyn AnyDateMap + Sync + Send)],
) -> Option<Date> {
Self::min_date(arr.iter().map(|map| map.get_initial_last_date()))
}
#[inline(always)]
fn compute_min_initial_last_height_from_dataset(
arr: &[&(dyn AnyHeightMap + Sync + Send)],
) -> Option<Height> {
Self::min_height(arr.iter().map(|map| map.get_initial_last_height()))
}
#[inline(always)]
fn compute_min_initial_first_unsafe_date_from_dataset(
arr: &[&(dyn AnyDateMap + Sync + Send)],
) -> Option<Date> {
Self::min_date(arr.iter().map(|map| map.get_initial_first_unsafe_date()))
}
#[inline(always)]
fn compute_min_initial_first_unsafe_height_from_dataset(
arr: &[&(dyn AnyHeightMap + Sync + Send)],
) -> Option<Height> {
Self::min_height(arr.iter().map(|map| map.get_initial_first_unsafe_height()))
}
#[inline(always)]
fn min_date(iter: impl Iterator<Item = Option<Date>>) -> Option<Date> {
iter.min().and_then(|opt| opt)
}
#[inline(always)]
fn min_height(iter: impl Iterator<Item = Option<Height>>) -> Option<Height> {
iter.min().and_then(|opt| opt)
}
}

View File

@@ -0,0 +1,9 @@
mod any_dataset;
mod any_dataset_group;
mod any_datasets;
mod min_initial_state;
pub use any_dataset::*;
pub use any_dataset_group::*;
pub use any_datasets::*;
pub use min_initial_state::*;

View File

@@ -0,0 +1,74 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates},
structs::{BiMap, Config, MapKind, MapPath},
};
#[derive(Allocative, Iterable)]
pub struct AllAddressesMetadataDataset {
min_initial_states: MinInitialStates,
created_addreses: BiMap<u32>,
empty_addresses: BiMap<u32>,
new_addresses: BiMap<u32>,
}
impl AllAddressesMetadataDataset {
pub fn import(path: &MapPath, config: &Config) -> color_eyre::Result<Self> {
let f = |s: &str| path.join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// Inserted
created_addreses: BiMap::new_bin(1, MapKind::Inserted, &f("created_addresses")),
empty_addresses: BiMap::new_bin(1, MapKind::Inserted, &f("empty_addresses")),
// Computed
new_addresses: BiMap::new_bin(1, MapKind::Computed, &f("new_addresses")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(&mut self, insert_data: &InsertData) {
let &InsertData {
databases,
height,
date,
is_date_last_block,
..
} = insert_data;
let created_addresses = self
.created_addreses
.height
.insert(height, *databases.address_to_address_index.metadata.len);
let empty_addresses = self.empty_addresses.height.insert(
height,
*databases.address_index_to_empty_address_data.metadata.len,
);
if is_date_last_block {
self.created_addreses.date.insert(date, created_addresses);
self.empty_addresses.date.insert(date, empty_addresses);
}
}
pub fn compute(&mut self, &ComputeData { heights, dates, .. }: &ComputeData) {
self.new_addresses
.multi_insert_net_change(heights, dates, &mut self.created_addreses, 1)
}
}
impl AnyDataset for AllAddressesMetadataDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,381 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates, SubDataset},
states::{AddressCohortId, DurableStates},
},
structs::{AddressSplit, BiMap, Config, Date, Height, MapPath},
};
use super::cohort_metadata::AddressCohortMetadataDataset;
#[derive(Allocative, Iterable)]
pub struct CohortDataset {
min_initial_states: MinInitialStates,
split: AddressSplit,
metadata: AddressCohortMetadataDataset,
pub subs: SubDataset,
}
impl CohortDataset {
pub fn import(
path: &MapPath,
id: AddressCohortId,
config: &Config,
) -> color_eyre::Result<Self> {
let name = id.as_name().map(|s| s.to_owned());
let split = id.as_split();
let mut s = Self {
min_initial_states: MinInitialStates::default(),
split,
metadata: AddressCohortMetadataDataset::import(path, &name, config)?,
subs: SubDataset::import(path, &name, config)?,
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn sub_datasets_vec(&self) -> Vec<&SubDataset> {
vec![&self.subs]
}
pub fn needs_insert_metadata(&self, height: Height, date: Date) -> bool {
self.metadata.needs_insert(height, date)
}
pub fn needs_insert_utxo(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.utxo.needs_insert(height, date))
}
pub fn needs_insert_capitalization(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.capitalization.needs_insert(height, date))
}
pub fn needs_insert_supply(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.supply.needs_insert(height, date))
}
pub fn needs_insert_price_paid(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.price_paid.needs_insert(height, date))
}
pub fn needs_insert_realized(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.realized.needs_insert(height, date))
}
pub fn needs_insert_unrealized(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.unrealized.needs_insert(height, date))
}
pub fn needs_insert_input(&self, height: Height, date: Date) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.input.needs_insert(height, date))
}
// fn needs_insert_output(&self, insert_data: &InsertData) -> bool {
// self.sub_datasets_vec()
// .iter()
// .any(|sub| sub.output.needs_insert(height, date))
// }
pub fn insert_realized_data(&mut self, insert_data: &InsertData) {
let realized_state = insert_data
.address_cohorts_realized_states
.as_ref()
.unwrap()
.get(&self.split)
.unwrap();
self.subs.realized.insert(insert_data, realized_state);
}
fn insert_metadata(&mut self, insert_data: &InsertData) {
let address_count = insert_data
.states
.address_cohorts_durable_states
.as_ref()
.unwrap()
.get(&self.split)
.unwrap()
.address_count;
self.metadata.insert(insert_data, address_count);
}
fn insert_supply_data(&mut self, insert_data: &InsertData, durable_states: &DurableStates) {
self.subs
.supply
.insert(insert_data, &durable_states.supply_state);
}
fn insert_utxo_data(&mut self, insert_data: &InsertData, durable_states: &DurableStates) {
self.subs
.utxo
.insert(insert_data, &durable_states.utxo_state);
}
fn insert_capitalization_data(
&mut self,
insert_data: &InsertData,
durable_states: &DurableStates,
) {
self.subs
.capitalization
.insert(insert_data, &durable_states.capitalization_state);
}
fn insert_unrealized_data(&mut self, insert_data: &InsertData) {
let states = insert_data
.address_cohorts_one_shot_states
.as_ref()
.unwrap()
.get(&self.split)
.unwrap();
self.subs.unrealized.insert(
insert_data,
&states.unrealized_block_state,
&states.unrealized_date_state,
);
}
fn insert_price_paid_data(&mut self, insert_data: &InsertData) {
let states = insert_data
.address_cohorts_one_shot_states
.as_ref()
.unwrap()
.get(&self.split)
.unwrap();
self.subs
.price_paid
.insert(insert_data, &states.price_paid_state);
}
fn insert_input_data(&mut self, insert_data: &InsertData) {
let state = insert_data
.address_cohorts_input_states
.as_ref()
.unwrap()
.get(&self.split)
.unwrap();
self.subs.input.insert(insert_data, state);
}
// fn insert_output_data(&mut self, insert_data: &InsertData) {
// let state = insert_data
// .address_cohorts_output_states
// .as_ref()
// .unwrap()
// .get(&self.split)
// .unwrap();
// self.output.insert(insert_data, &state.all);
// self.illiquid.output.insert(insert_data, &state.illiquid);
// self.liquid.output.insert(insert_data, &state.liquid);
// self.highly_liquid
// .output
// .insert(insert_data, &state.highly_liquid);
// }
pub fn insert(&mut self, insert_data: &InsertData) {
if !insert_data.compute_addresses {
return;
}
let address_cohort_durable_states = insert_data
.states
.address_cohorts_durable_states
.as_ref()
.unwrap()
.get(&self.split);
if address_cohort_durable_states.is_none() {
return; // TODO: Check if should panic instead
}
let address_cohort_durable_states = address_cohort_durable_states.unwrap();
if self.needs_insert_metadata(insert_data.height, insert_data.date) {
self.insert_metadata(insert_data);
}
if self.needs_insert_utxo(insert_data.height, insert_data.date) {
self.insert_utxo_data(insert_data, &address_cohort_durable_states.durable_states);
}
if self.needs_insert_capitalization(insert_data.height, insert_data.date) {
self.insert_capitalization_data(
insert_data,
&address_cohort_durable_states.durable_states,
);
}
if self.needs_insert_supply(insert_data.height, insert_data.date) {
self.insert_supply_data(insert_data, &address_cohort_durable_states.durable_states);
}
if self.needs_insert_realized(insert_data.height, insert_data.date) {
self.insert_realized_data(insert_data);
}
if self.needs_insert_unrealized(insert_data.height, insert_data.date) {
self.insert_unrealized_data(insert_data);
}
if self.needs_insert_price_paid(insert_data.height, insert_data.date) {
self.insert_price_paid_data(insert_data);
}
if self.needs_insert_input(insert_data.height, insert_data.date) {
self.insert_input_data(insert_data);
}
// if self.needs_insert_output(insert_data) {
// self.insert_output_data(insert_data);
// }
}
// pub fn should_compute_metadata(&self, compute_data: &ComputeData) -> bool {
// self.metadata.should_compute(compute_data)
// }
// pub fn should_compute_utxo(&self, compute_data: &ComputeData) -> bool {
// self.sub_datasets_vec()
// .iter()
// .any(|sub| sub.utxo.should_compute(compute_data))
// }
pub fn should_compute_supply(&self, compute_data: &ComputeData) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.supply.should_compute(compute_data))
}
pub fn should_compute_capitalization(&self, compute_data: &ComputeData) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.capitalization.should_compute(compute_data))
}
fn should_compute_realized(&self, compute_data: &ComputeData) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.realized.should_compute(compute_data))
}
fn should_compute_unrealized(&self, compute_data: &ComputeData) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.unrealized.should_compute(compute_data))
}
// fn should_compute_input(&self, compute_data: &ComputeData) -> bool {
// self.sub_datasets_vec()
// .iter()
// .any(|sub| sub.input.should_compute(compute_data))
// }
// fn should_compute_output(&self, compute_data: &ComputeData) -> bool {
// self.sub_datasets_vec()
// .iter()
// .any(|sub| sub.output.should_compute(compute_data))
// }
fn compute_supply_data(
&mut self,
compute_data: &ComputeData,
circulating_supply: &mut BiMap<f64>,
) {
self.subs.supply.compute(compute_data, circulating_supply);
}
fn compute_unrealized_data(
&mut self,
compute_data: &ComputeData,
circulating_supply: &mut BiMap<f64>,
market_cap: &mut BiMap<f32>,
) {
self.subs.unrealized.compute(
compute_data,
&mut self.subs.supply.supply,
circulating_supply,
market_cap,
);
}
fn compute_realized_data(&mut self, compute_data: &ComputeData, market_cap: &mut BiMap<f32>) {
self.subs.realized.compute(compute_data, market_cap);
}
fn compute_capitalization_data(&mut self, compute_data: &ComputeData, closes: &mut BiMap<f32>) {
self.subs
.capitalization
.compute(compute_data, closes, &mut self.subs.supply.supply);
}
// fn compute_output_data(&mut self, compute_data: &ComputeData) {
// self.all
// .output
// .compute(compute_data, &mut self.supply.total);
// }
pub fn compute(
&mut self,
compute_data: &ComputeData,
closes: &mut BiMap<f32>,
circulating_supply: &mut BiMap<f64>,
market_cap: &mut BiMap<f32>,
) {
if self.should_compute_supply(compute_data) {
self.compute_supply_data(compute_data, circulating_supply);
}
if self.should_compute_unrealized(compute_data) {
self.compute_unrealized_data(compute_data, circulating_supply, market_cap);
}
if self.should_compute_realized(compute_data) {
self.compute_realized_data(compute_data, market_cap);
}
// MUST BE after compute_supply
if self.should_compute_capitalization(compute_data) {
self.compute_capitalization_data(compute_data, closes);
}
// if self.should_compute_output(compute_data) {
// self.compute_output_data(compute_data);
// }
}
}
impl AnyDataset for CohortDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,70 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::datasets::{AnyDataset, InsertData, MinInitialStates},
structs::{BiMap, Config, MapKind, MapPath},
};
#[derive(Allocative, Iterable)]
pub struct AddressCohortMetadataDataset {
min_initial_states: MinInitialStates,
address_count: BiMap<f64>,
// pub output: OutputSubDataset,
// Sending addresses
// Receiving addresses
// Active addresses (Unique(Sending + Receiving))
}
impl AddressCohortMetadataDataset {
pub fn import(
path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let f = |s: &str| {
if let Some(name) = name {
path.join(&format!("{name}/{s}"))
} else {
path.join(s)
}
};
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// Inserted
address_count: BiMap::new_bin(1, MapKind::Inserted, &f("address_count")),
// output: OutputSubDataset::import(parent_path)?,
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
..
}: &InsertData,
address_count: f64,
) {
self.address_count.height.insert(height, address_count);
if is_date_last_block {
self.address_count.date.insert(date, address_count);
}
}
}
impl AnyDataset for AddressCohortMetadataDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,176 @@
mod all_metadata;
mod cohort;
pub mod cohort_metadata;
use allocative::Allocative;
use itertools::Itertools;
use rayon::prelude::*;
use crate::{
parser::states::SplitByAddressCohort,
structs::{BiMap, Config, Date, Height},
};
use self::{all_metadata::AllAddressesMetadataDataset, cohort::CohortDataset};
use super::{AnyDataset, AnyDatasets, ComputeData, InsertData, MinInitialStates};
#[derive(Allocative)]
pub struct AddressDatasets {
min_initial_states: MinInitialStates,
metadata: AllAddressesMetadataDataset,
pub cohorts: SplitByAddressCohort<CohortDataset>,
}
impl AddressDatasets {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let mut cohorts = SplitByAddressCohort::<Option<CohortDataset>>::default();
let path_dataset = config.path_datasets();
cohorts
.as_vec()
.into_par_iter()
.map(|(_, id)| (id, CohortDataset::import(&path_dataset, id, config)))
.collect::<Vec<_>>()
.into_iter()
.try_for_each(|(id, dataset)| -> color_eyre::Result<()> {
cohorts.get_mut_from_id(&id).replace(dataset?);
Ok(())
})?;
let mut s = Self {
min_initial_states: MinInitialStates::default(),
metadata: AllAddressesMetadataDataset::import(&path_dataset, config)?,
cohorts: cohorts.unwrap(),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_datasets(&s, config));
Ok(s)
}
pub fn insert(&mut self, insert_data: &InsertData) {
self.metadata.insert(insert_data);
self.cohorts
.as_mut_vec()
.into_iter()
.for_each(|(cohort, _)| cohort.insert(insert_data))
}
pub fn needs_durable_states(&self, height: Height, date: Date) -> bool {
let needs_insert_utxo = self.needs_insert_utxo(height, date);
let needs_insert_capitalization = self.needs_insert_capitalization(height, date);
let needs_insert_supply = self.needs_insert_supply(height, date);
let needs_one_shot_states = self.needs_one_shot_states(height, date);
needs_insert_utxo
|| needs_insert_capitalization
|| needs_insert_supply
|| needs_one_shot_states
}
pub fn needs_one_shot_states(&self, height: Height, date: Date) -> bool {
self.needs_insert_price_paid(height, date) || self.needs_insert_unrealized(height, date)
}
// pub fn needs_sent_states(&self, height: Height, date: WNaiveDate) -> bool {
// self.needs_insert_input(height, date) || self.needs_insert_realized(height, date)
// }
pub fn needs_insert_utxo(&self, height: Height, date: Date) -> bool {
self.cohorts
.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_utxo(height, date))
}
pub fn needs_insert_capitalization(&self, height: Height, date: Date) -> bool {
self.cohorts
.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_capitalization(height, date))
}
pub fn needs_insert_supply(&self, height: Height, date: Date) -> bool {
self.cohorts
.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_supply(height, date))
}
pub fn needs_insert_price_paid(&self, height: Height, date: Date) -> bool {
self.cohorts
.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_price_paid(height, date))
}
// pub fn needs_insert_realized(&self, height: Height, date: WNaiveDate) -> bool {
// self.cohorts
// .as_vec()
// .iter()
// .any(|(dataset, _)| dataset.needs_insert_realized(height, date))
// }
pub fn needs_insert_unrealized(&self, height: Height, date: Date) -> bool {
self.cohorts
.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_unrealized(height, date))
}
// pub fn needs_insert_input(&self, height: Height, date: WNaiveDate) -> bool {
// self.cohorts
// .as_vec()
// .iter()
// .any(|(dataset, _)| dataset.needs_insert_input(height, date))
// }
pub fn compute(
&mut self,
compute_data: &ComputeData,
closes: &mut BiMap<f32>,
circulating_supply: &mut BiMap<f64>,
market_cap: &mut BiMap<f32>,
) {
self.metadata.compute(compute_data);
self.cohorts
.as_mut_vec()
.into_iter()
.for_each(|(cohort, _)| {
cohort.compute(compute_data, closes, circulating_supply, market_cap)
})
}
}
impl AnyDatasets for AddressDatasets {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_any_dataset_vec(&self) -> Vec<&(dyn AnyDataset + Send + Sync)> {
self.cohorts
.as_vec()
.into_iter()
.map(|(d, _)| d as &(dyn AnyDataset + Send + Sync))
.chain(vec![&self.metadata as &(dyn AnyDataset + Send + Sync)])
.collect_vec()
}
fn to_mut_any_dataset_vec(&mut self) -> Vec<&mut dyn AnyDataset> {
self.cohorts
.as_mut_vec()
.into_iter()
.map(|(d, _)| d as &mut dyn AnyDataset)
.chain(vec![&mut self.metadata as &mut dyn AnyDataset])
.collect_vec()
}
}

View File

@@ -0,0 +1,51 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::datasets::AnyDataset,
structs::{Config, Date, HeightMap, MapKind, Timestamp},
};
use super::{InsertData, MinInitialStates};
#[derive(Allocative, Iterable)]
pub struct BlockMetadataDataset {
min_initial_states: MinInitialStates,
pub date: HeightMap<Date>,
pub timestamp: HeightMap<Timestamp>,
}
impl BlockMetadataDataset {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let f = |s: &str| config.path_datasets().join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// Inserted
date: HeightMap::new_bin(1, MapKind::Inserted, &f("date")),
timestamp: HeightMap::new_bin(1, MapKind::Inserted, &f("timestamp")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height, timestamp, ..
}: &InsertData,
) {
self.timestamp.insert(height, timestamp);
self.date.insert(height, timestamp.to_date());
}
}
impl AnyDataset for BlockMetadataDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,65 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::datasets::AnyDataset,
structs::{Config, DateMap, HeightMap, MapKind},
};
use super::{InsertData, MinInitialStates};
#[derive(Allocative, Iterable)]
pub struct CoindaysDataset {
min_initial_states: MinInitialStates,
pub coindays_destroyed: HeightMap<f32>,
pub coindays_destroyed_1d_sum: DateMap<f32>,
}
impl CoindaysDataset {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let f = |s: &str| config.path_datasets().join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// Inserted
coindays_destroyed: HeightMap::new_bin(1, MapKind::Inserted, &f("coindays_destroyed")),
coindays_destroyed_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("coindays_destroyed_1d_sum"),
),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
satdays_destroyed,
date_blocks_range,
is_date_last_block,
date,
..
}: &InsertData,
) {
self.coindays_destroyed
.insert(height, satdays_destroyed.to_btc() as f32);
if is_date_last_block {
self.coindays_destroyed_1d_sum
.insert(date, self.coindays_destroyed.sum_range(date_blocks_range));
}
}
}
impl AnyDataset for CoindaysDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,659 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
structs::{BiMap, Config, DateMap, Height, HeightMap, MapKind},
utils::{ONE_DAY_IN_DAYS, ONE_YEAR_IN_DAYS, THREE_MONTHS_IN_DAYS, TWO_WEEK_IN_DAYS},
};
use super::{AnyDataset, ComputeData, InsertData, MinInitialStates, RatioDataset};
#[derive(Allocative, Iterable)]
pub struct CointimeDataset {
min_initial_states: MinInitialStates,
// Inserted
pub coinblocks_destroyed: HeightMap<f32>,
pub coinblocks_destroyed_1d_sum: DateMap<f32>,
// Computed
pub active_cap: BiMap<f32>,
pub active_price: BiMap<f32>,
pub active_price_ratio: RatioDataset,
pub active_supply: BiMap<f32>,
pub active_supply_3m_net_change: BiMap<f32>,
pub active_supply_net_change: BiMap<f32>,
pub activity_to_vaultedness_ratio: BiMap<f32>,
pub coinblocks_created: HeightMap<f32>,
pub coinblocks_created_1d_sum: DateMap<f32>,
pub coinblocks_stored: HeightMap<f32>,
pub coinblocks_stored_1d_sum: DateMap<f32>,
pub cointime_adjusted_velocity: DateMap<f32>,
pub cointime_adjusted_inflation_rate: DateMap<f32>,
pub cointime_adjusted_yearly_inflation_rate: DateMap<f32>,
pub cointime_cap: BiMap<f32>,
pub cointime_price: BiMap<f32>,
pub cointime_price_ratio: RatioDataset,
pub cointime_value_created: HeightMap<f32>,
pub cointime_value_created_1d_sum: DateMap<f32>,
pub cointime_value_destroyed: HeightMap<f32>,
pub cointime_value_destroyed_1d_sum: DateMap<f32>,
pub cointime_value_stored: HeightMap<f32>,
pub cointime_value_stored_1d_sum: DateMap<f32>,
pub concurrent_liveliness: DateMap<f32>,
pub concurrent_liveliness_2w_median: DateMap<f32>,
pub cumulative_coinblocks_created: BiMap<f32>,
pub cumulative_coinblocks_destroyed: BiMap<f32>,
pub cumulative_coinblocks_stored: BiMap<f32>,
pub investor_cap: BiMap<f32>,
pub investorness: BiMap<f32>,
pub liveliness: BiMap<f32>,
pub liveliness_net_change: BiMap<f32>,
pub liveliness_net_change_2w_median: BiMap<f32>,
pub producerness: BiMap<f32>,
pub thermo_cap: BiMap<f32>,
pub thermo_cap_to_investor_cap_ratio: BiMap<f32>,
pub total_cointime_value_created: BiMap<f32>,
pub total_cointime_value_destroyed: BiMap<f32>,
pub total_cointime_value_stored: BiMap<f32>,
pub true_market_deviation: BiMap<f32>,
pub true_market_mean: BiMap<f32>,
pub true_market_mean_ratio: RatioDataset,
pub true_market_net_unrealized_profit_and_loss: BiMap<f32>,
pub vaulted_cap: BiMap<f32>,
pub vaulted_price: BiMap<f32>,
pub vaulted_price_ratio: RatioDataset,
pub vaulted_supply: BiMap<f32>,
pub vaulted_supply_net_change: BiMap<f32>,
pub vaulted_supply_3m_net_change: BiMap<f32>,
pub vaultedness: BiMap<f32>,
pub vaulting_rate: BiMap<f32>,
}
impl CointimeDataset {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let path_dataset = config.path_datasets();
let f = |s: &str| path_dataset.join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// Inserted
coinblocks_destroyed: HeightMap::new_bin(
1,
MapKind::Inserted,
&f("coinblocks_destroyed"),
),
coinblocks_destroyed_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("coinblocks_destroyed_1d_sum"),
),
// Computed
active_cap: BiMap::new_bin(1, MapKind::Computed, &f("active_cap")),
active_price: BiMap::new_bin(1, MapKind::Computed, &f("active_price")),
active_price_ratio: RatioDataset::import(&path_dataset, "active_price", config)?,
active_supply: BiMap::new_bin(1, MapKind::Computed, &f("active_supply")),
active_supply_3m_net_change: BiMap::new_bin(
1,
MapKind::Computed,
&f("active_supply_3m_net_change"),
),
active_supply_net_change: BiMap::new_bin(
1,
MapKind::Computed,
&f("active_supply_net_change"),
),
activity_to_vaultedness_ratio: BiMap::new_bin(
2,
MapKind::Computed,
&f("activity_to_vaultedness_ratio"),
),
coinblocks_created: HeightMap::new_bin(1, MapKind::Computed, &f("coinblocks_created")),
coinblocks_created_1d_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("coinblocks_created_1d_sum"),
),
coinblocks_stored: HeightMap::new_bin(1, MapKind::Computed, &f("coinblocks_stored")),
coinblocks_stored_1d_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("coinblocks_stored_1d_sum"),
),
cointime_adjusted_velocity: DateMap::new_bin(
1,
MapKind::Computed,
&f("cointime_adjusted_velocity"),
),
cointime_adjusted_inflation_rate: DateMap::new_bin(
1,
MapKind::Computed,
&f("cointime_adjusted_inflation_rate"),
),
cointime_adjusted_yearly_inflation_rate: DateMap::new_bin(
1,
MapKind::Computed,
&f("cointime_adjusted_yearly_inflation_rate"),
),
cointime_cap: BiMap::new_bin(1, MapKind::Computed, &f("cointime_cap")),
cointime_price: BiMap::new_bin(1, MapKind::Computed, &f("cointime_price")),
cointime_price_ratio: RatioDataset::import(&path_dataset, "cointime_price", config)?,
cointime_value_created: HeightMap::new_bin(
1,
MapKind::Computed,
&f("cointime_value_created"),
),
cointime_value_created_1d_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("cointime_value_created_1d_sum"),
),
cointime_value_destroyed: HeightMap::new_bin(
1,
MapKind::Computed,
&f("cointime_value_destroyed"),
),
cointime_value_destroyed_1d_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("cointime_value_destroyed_1d_sum"),
),
cointime_value_stored: HeightMap::new_bin(
1,
MapKind::Computed,
&f("cointime_value_stored"),
),
cointime_value_stored_1d_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("cointime_value_stored_1d_sum"),
),
concurrent_liveliness: DateMap::new_bin(
1,
MapKind::Computed,
&f("concurrent_liveliness"),
),
concurrent_liveliness_2w_median: DateMap::new_bin(
2,
MapKind::Computed,
&f("concurrent_liveliness_2w_median"),
),
cumulative_coinblocks_created: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_coinblocks_created"),
),
cumulative_coinblocks_destroyed: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_coinblocks_destroyed"),
),
cumulative_coinblocks_stored: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_coinblocks_stored"),
),
investor_cap: BiMap::new_bin(1, MapKind::Computed, &f("investor_cap")),
investorness: BiMap::new_bin(1, MapKind::Computed, &f("investorness")),
liveliness: BiMap::new_bin(1, MapKind::Computed, &f("liveliness")),
liveliness_net_change: BiMap::new_bin(
1,
MapKind::Computed,
&f("liveliness_net_change"),
),
liveliness_net_change_2w_median: BiMap::new_bin(
3,
MapKind::Computed,
&f("liveliness_net_change_2w_median"),
),
producerness: BiMap::new_bin(1, MapKind::Computed, &f("producerness")),
thermo_cap: BiMap::new_bin(1, MapKind::Computed, &f("thermo_cap")),
thermo_cap_to_investor_cap_ratio: BiMap::new_bin(
2,
MapKind::Computed,
&f("thermo_cap_to_investor_cap_ratio"),
),
total_cointime_value_created: BiMap::new_bin(
1,
MapKind::Computed,
&f("total_cointime_value_created"),
),
total_cointime_value_destroyed: BiMap::new_bin(
1,
MapKind::Computed,
&f("total_cointime_value_destroyed"),
),
total_cointime_value_stored: BiMap::new_bin(
1,
MapKind::Computed,
&f("total_cointime_value_stored"),
),
true_market_deviation: BiMap::new_bin(
1,
MapKind::Computed,
&f("true_market_deviation"),
),
true_market_mean: BiMap::new_bin(1, MapKind::Computed, &f("true_market_mean")),
true_market_mean_ratio: RatioDataset::import(
&path_dataset,
"true_market_mean",
config,
)?,
true_market_net_unrealized_profit_and_loss: BiMap::new_bin(
1,
MapKind::Computed,
&f("true_market_net_unrealized_profit_and_loss"),
),
vaulted_cap: BiMap::new_bin(1, MapKind::Computed, &f("vaulted_cap")),
vaulted_price: BiMap::new_bin(1, MapKind::Computed, &f("vaulted_price")),
vaulted_price_ratio: RatioDataset::import(&path_dataset, "vaulted_price", config)?,
vaulted_supply: BiMap::new_bin(1, MapKind::Computed, &f("vaulted_supply")),
vaulted_supply_3m_net_change: BiMap::new_bin(
1,
MapKind::Computed,
&f("vaulted_supply_3m_net_change"),
),
vaulted_supply_net_change: BiMap::new_bin(
1,
MapKind::Computed,
&f("vaulted_supply_net_change"),
),
vaultedness: BiMap::new_bin(1, MapKind::Computed, &f("vaultedness")),
vaulting_rate: BiMap::new_bin(1, MapKind::Computed, &f("vaulting_rate")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
satblocks_destroyed,
date_blocks_range,
is_date_last_block,
..
}: &InsertData,
) {
self.coinblocks_destroyed
.insert(height, satblocks_destroyed.to_btc() as f32);
if is_date_last_block {
self.coinblocks_destroyed_1d_sum
.insert(date, self.coinblocks_destroyed.sum_range(date_blocks_range));
}
}
#[allow(clippy::too_many_arguments)]
pub fn compute(
&mut self,
compute_data: &ComputeData,
first_height: &mut DateMap<Height>,
last_height: &mut DateMap<Height>,
closes: &mut BiMap<f32>,
circulating_supply: &mut BiMap<f64>,
realized_cap: &mut BiMap<f32>,
realized_price: &mut BiMap<f32>,
inflation_rate: &mut DateMap<f64>,
yearly_inflation_rate: &mut DateMap<f64>,
annualized_transaction_volume: &mut DateMap<f32>,
cumulative_subsidy_in_dollars: &mut BiMap<f32>,
) {
let &ComputeData { heights, dates, .. } = compute_data;
self.cumulative_coinblocks_destroyed
.height
.multi_insert_cumulative(heights, &mut self.coinblocks_destroyed);
self.cumulative_coinblocks_destroyed
.date
.multi_insert_cumulative(dates, &mut self.coinblocks_destroyed_1d_sum);
self.coinblocks_created.multi_insert_simple_transform(
heights,
&mut circulating_supply.height,
|circulating_supply, _| circulating_supply as f32,
);
self.coinblocks_created_1d_sum.multi_insert_sum_range(
dates,
&self.coinblocks_created,
first_height,
last_height,
);
self.cumulative_coinblocks_created
.height
.multi_insert_cumulative(heights, &mut self.coinblocks_created);
self.cumulative_coinblocks_created
.date
.multi_insert_cumulative(dates, &mut self.coinblocks_created_1d_sum);
self.coinblocks_stored.multi_insert_subtract(
heights,
&mut self.coinblocks_created,
&mut self.coinblocks_destroyed,
);
self.coinblocks_stored_1d_sum.multi_insert_sum_range(
dates,
&self.coinblocks_stored,
first_height,
last_height,
);
self.cumulative_coinblocks_stored
.height
.multi_insert_cumulative(heights, &mut self.coinblocks_stored);
self.cumulative_coinblocks_stored
.date
.multi_insert_cumulative(dates, &mut self.coinblocks_stored_1d_sum);
self.liveliness.multi_insert_divide(
heights,
dates,
&mut self.cumulative_coinblocks_destroyed,
&mut self.cumulative_coinblocks_created,
);
self.vaultedness.multi_insert_simple_transform(
heights,
dates,
&mut self.liveliness,
&|liveliness| 1.0 - liveliness,
);
self.activity_to_vaultedness_ratio.multi_insert_divide(
heights,
dates,
&mut self.liveliness,
&mut self.vaultedness,
);
self.concurrent_liveliness.multi_insert_divide(
dates,
&mut self.coinblocks_destroyed_1d_sum,
&mut self.coinblocks_created_1d_sum,
);
self.concurrent_liveliness_2w_median.multi_insert_median(
dates,
&mut self.concurrent_liveliness,
Some(TWO_WEEK_IN_DAYS),
);
self.liveliness_net_change.multi_insert_net_change(
heights,
dates,
&mut self.liveliness,
ONE_DAY_IN_DAYS,
);
self.liveliness_net_change_2w_median
.multi_insert_net_change(heights, dates, &mut self.liveliness, TWO_WEEK_IN_DAYS);
self.vaulted_supply.multi_insert_multiply(
heights,
dates,
&mut self.vaultedness,
circulating_supply,
);
self.vaulted_supply_net_change.multi_insert_net_change(
heights,
dates,
&mut self.vaulted_supply,
ONE_DAY_IN_DAYS,
);
self.vaulted_supply_3m_net_change.multi_insert_net_change(
heights,
dates,
&mut self.vaulted_supply,
THREE_MONTHS_IN_DAYS,
);
self.vaulting_rate.multi_insert_simple_transform(
heights,
dates,
&mut self.vaulted_supply,
&|vaulted_supply| vaulted_supply * ONE_YEAR_IN_DAYS as f32,
);
self.active_supply.multi_insert_multiply(
heights,
dates,
&mut self.liveliness,
circulating_supply,
);
self.active_supply_net_change.multi_insert_net_change(
heights,
dates,
&mut self.active_supply,
ONE_DAY_IN_DAYS,
);
self.active_supply_3m_net_change.multi_insert_net_change(
heights,
dates,
&mut self.active_supply,
THREE_MONTHS_IN_DAYS,
);
// TODO: Do these
// let min_vaulted_supply = ;
// let max_active_supply = ;
self.cointime_adjusted_inflation_rate.multi_insert_multiply(
dates,
&mut self.activity_to_vaultedness_ratio.date,
inflation_rate,
);
self.cointime_adjusted_yearly_inflation_rate
.multi_insert_multiply(
dates,
&mut self.activity_to_vaultedness_ratio.date,
yearly_inflation_rate,
);
self.cointime_adjusted_velocity.multi_insert_divide(
dates,
annualized_transaction_volume,
&mut self.active_supply.date,
);
// TODO:
// const activeSupplyChangeFromTransactions90dChange =
// createNetChangeLazyDataset(activeSupplyChangeFromTransactions, 90);
// const activeSupplyChangeFromIssuance = createMultipliedLazyDataset(
// lastSubsidy,
// liveliness,
// );
self.thermo_cap.multi_insert_simple_transform(
heights,
dates,
cumulative_subsidy_in_dollars,
&|cumulative_subsidy_in_dollars| cumulative_subsidy_in_dollars,
);
self.investor_cap
.multi_insert_subtract(heights, dates, realized_cap, &mut self.thermo_cap);
self.thermo_cap_to_investor_cap_ratio
.multi_insert_percentage(heights, dates, &mut self.thermo_cap, &mut self.investor_cap);
// TODO:
// const activeSupplyChangeFromIssuance90dChange = createNetChangeLazyDataset(
// activeSupplyChangeFromIssuance,
// 90,
// );
self.active_price
.multi_insert_divide(heights, dates, realized_price, &mut self.liveliness);
self.active_cap.height.multi_insert_multiply(
heights,
&mut self.active_supply.height,
&mut closes.height,
);
self.active_cap.date.multi_insert_multiply(
dates,
&mut self.active_supply.date,
&mut closes.date,
);
self.vaulted_price.multi_insert_divide(
heights,
dates,
realized_price,
&mut self.vaultedness,
);
self.vaulted_cap.height.multi_insert_multiply(
heights,
&mut self.vaulted_supply.height,
&mut closes.height,
);
self.vaulted_cap.date.multi_insert_multiply(
dates,
&mut self.vaulted_supply.date,
&mut closes.date,
);
self.true_market_mean.multi_insert_divide(
heights,
dates,
&mut self.investor_cap,
&mut self.active_supply,
);
self.true_market_deviation.multi_insert_divide(
heights,
dates,
&mut self.active_cap,
&mut self.investor_cap,
);
self.true_market_net_unrealized_profit_and_loss
.height
.multi_insert_complex_transform(
heights,
&mut self.active_cap.height,
|(active_cap, height, ..)| {
let investor_cap = self.investor_cap.height.get(height).unwrap();
(active_cap - investor_cap) / active_cap
},
);
self.true_market_net_unrealized_profit_and_loss
.date
.multi_insert_complex_transform(
dates,
&mut self.active_cap.date,
|(active_cap, date, _, _)| {
let investor_cap = self.investor_cap.date.get(date).unwrap();
(active_cap - investor_cap) / active_cap
},
);
self.investorness
.multi_insert_divide(heights, dates, &mut self.investor_cap, realized_cap);
self.producerness
.multi_insert_divide(heights, dates, &mut self.thermo_cap, realized_cap);
self.cointime_value_destroyed.multi_insert_multiply(
heights,
&mut self.coinblocks_destroyed,
&mut closes.height,
);
self.cointime_value_destroyed_1d_sum.multi_insert_multiply(
dates,
&mut self.coinblocks_destroyed_1d_sum,
&mut closes.date,
);
self.cointime_value_created.multi_insert_multiply(
heights,
&mut self.coinblocks_created,
&mut closes.height,
);
self.cointime_value_created_1d_sum.multi_insert_multiply(
dates,
&mut self.coinblocks_created_1d_sum,
&mut closes.date,
);
self.cointime_value_stored.multi_insert_multiply(
heights,
&mut self.coinblocks_stored,
&mut closes.height,
);
self.cointime_value_stored_1d_sum.multi_insert_multiply(
dates,
&mut self.coinblocks_stored_1d_sum,
&mut closes.date,
);
self.total_cointime_value_created
.height
.multi_insert_cumulative(heights, &mut self.cointime_value_created);
self.total_cointime_value_created
.date
.multi_insert_cumulative(dates, &mut self.cointime_value_created_1d_sum);
self.total_cointime_value_destroyed
.height
.multi_insert_cumulative(heights, &mut self.cointime_value_destroyed);
self.total_cointime_value_destroyed
.date
.multi_insert_cumulative(dates, &mut self.cointime_value_destroyed_1d_sum);
self.total_cointime_value_stored
.height
.multi_insert_cumulative(heights, &mut self.cointime_value_stored);
self.total_cointime_value_stored
.date
.multi_insert_cumulative(dates, &mut self.cointime_value_stored_1d_sum);
self.cointime_price.multi_insert_divide(
heights,
dates,
&mut self.total_cointime_value_destroyed,
&mut self.cumulative_coinblocks_stored,
);
self.cointime_cap.multi_insert_multiply(
heights,
dates,
&mut self.cointime_price,
circulating_supply,
);
self.active_price_ratio
.compute(compute_data, closes, &mut self.active_price);
self.cointime_price_ratio
.compute(compute_data, closes, &mut self.cointime_price);
self.true_market_mean_ratio
.compute(compute_data, closes, &mut self.true_market_mean);
self.vaulted_price_ratio
.compute(compute_data, closes, &mut self.vaulted_price);
}
}
impl AnyDataset for CointimeDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,50 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::structs::{BiMap, Config, MapKind};
use super::{AnyDataset, ComputeData, MinInitialStates};
#[derive(Allocative, Iterable)]
pub struct ConstantDataset {
min_initial_states: MinInitialStates,
pub _0: BiMap<u16>,
pub _1: BiMap<u16>,
pub _50: BiMap<u16>,
pub _100: BiMap<u16>,
}
impl ConstantDataset {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let f = |s: &str| config.path_datasets().join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// Computed
_0: BiMap::new_bin(1, MapKind::Computed, &f("0")),
_1: BiMap::new_bin(1, MapKind::Computed, &f("1")),
_50: BiMap::new_bin(1, MapKind::Computed, &f("50")),
_100: BiMap::new_bin(1, MapKind::Computed, &f("100")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn compute(&mut self, &ComputeData { heights, dates, .. }: &ComputeData) {
self._0.multi_insert_const(heights, dates, 0);
self._1.multi_insert_const(heights, dates, 1);
self._50.multi_insert_const(heights, dates, 50);
self._100.multi_insert_const(heights, dates, 100);
}
}
impl AnyDataset for ConstantDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,56 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::datasets::AnyDataset,
structs::{Config, DateMap, Height, MapKind},
};
use super::{InsertData, MinInitialStates};
#[derive(Allocative, Iterable)]
pub struct DateMetadataDataset {
min_initial_states: MinInitialStates,
pub first_height: DateMap<Height>,
pub last_height: DateMap<Height>,
}
impl DateMetadataDataset {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let f = |s: &str| config.path_datasets().join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// Inserted
first_height: DateMap::new_bin(1, MapKind::Inserted, &f("first_height")),
last_height: DateMap::new_bin(1, MapKind::Inserted, &f("last_height")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
date,
date_first_height,
height,
..
}: &InsertData,
) {
self.first_height.insert(date, date_first_height);
self.last_height.insert(date, height);
}
}
impl AnyDataset for DateMetadataDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,760 @@
use allocative::Allocative;
use itertools::Itertools;
use ordered_float::OrderedFloat;
use struct_iterable::Iterable;
use crate::{
parser::datasets::AnyDataset,
structs::{Amount, BiMap, Config, DateMap, Height, HeightMap, MapKey, MapKind},
utils::{
BYTES_IN_MB, ONE_DAY_IN_DAYS, ONE_MONTH_IN_DAYS, ONE_WEEK_IN_DAYS, ONE_YEAR_IN_DAYS,
TARGET_BLOCKS_PER_DAY,
},
};
use super::{
ComputeData, DateRecapDataset, InsertData, MinInitialStates, RecapDataset, RecapOptions,
};
#[derive(Allocative, Iterable)]
pub struct MiningDataset {
min_initial_states: MinInitialStates,
// Inserted
pub blocks_mined: DateMap<usize>,
pub total_blocks_mined: DateMap<usize>,
pub coinbase: HeightMap<f64>,
pub coinbase_1d_sum: DateMap<f64>,
pub coinbase_in_dollars: HeightMap<f32>,
pub coinbase_in_dollars_1d_sum: DateMap<f32>,
pub fees: HeightMap<f64>,
pub fees_1d_sum: DateMap<f64>,
pub fees_in_dollars: HeightMap<f32>,
pub fees_in_dollars_1d_sum: DateMap<f32>,
// Raw
// pub average_fee_paid: BiMap<f32>,
// pub max_fee_paid: BiMap<f32>,
// pub _90th_percentile_fee_paid: BiMap<f32>,
// pub _75th_percentile_fee_paid: BiMap<f32>,
// pub median_fee_paid: BiMap<f32>,
// pub _25th_percentile_fee_paid: BiMap<f32>,
// pub _10th_percentile_fee_paid: BiMap<f32>,
// pub min_fee_paid: BiMap<f32>,
// sat/vB
// pub average_fee_price: BiMap<f32>,
// pub max_fee_price: BiMap<f32>,
// pub _90th_percentile_fee_price: BiMap<f32>,
// pub _75th_percentile_fee_price: BiMap<f32>,
// pub median_fee_price: BiMap<f32>,
// pub _25th_percentile_fee_price: BiMap<f32>,
// pub _10th_percentile_fee_price: BiMap<f32>,
// pub min_fee_price: BiMap<f32>,
// -
pub subsidy: HeightMap<f64>,
pub subsidy_1d_sum: DateMap<f64>,
pub subsidy_in_dollars: HeightMap<f32>,
pub subsidy_in_dollars_1d_sum: DateMap<f32>,
pub last_coinbase: DateMap<f64>,
pub last_coinbase_in_dollars: DateMap<f32>,
pub last_fees: DateMap<f64>,
pub last_fees_in_dollars: DateMap<f32>,
pub last_subsidy: DateMap<f64>,
pub last_subsidy_in_dollars: DateMap<f32>,
pub difficulty: BiMap<f64>,
pub block_size: HeightMap<f32>, // in MB
pub block_weight: HeightMap<f32>, // in MB
pub block_vbytes: HeightMap<u64>,
pub block_interval: HeightMap<u32>, // in s
// Computed
pub annualized_issuance: DateMap<f64>, // Same as subsidy_1y_sum
pub blocks_mined_1d_target: DateMap<usize>,
pub blocks_mined_1m_sma: DateMap<f32>,
pub blocks_mined_1m_sum: DateMap<usize>,
pub blocks_mined_1m_target: DateMap<usize>,
pub blocks_mined_1w_sma: DateMap<f32>,
pub blocks_mined_1w_sum: DateMap<usize>,
pub blocks_mined_1w_target: DateMap<usize>,
pub blocks_mined_1y_sum: DateMap<usize>,
pub blocks_mined_1y_target: DateMap<usize>,
pub cumulative_block_size: BiMap<f32>,
pub cumulative_block_size_gigabytes: BiMap<f32>,
pub subsidy_1y_sum: DateMap<f64>,
pub subsidy_in_dollars_1y_sum: DateMap<f64>,
pub cumulative_subsidy: BiMap<f64>,
pub cumulative_subsidy_in_dollars: BiMap<f32>,
pub coinbase_1y_sum: DateMap<f64>,
pub coinbase_in_dollars_1y_sum: DateMap<f64>,
pub coinbase_in_dollars_1d_sum_1y_sma: DateMap<f32>,
pub cumulative_coinbase: BiMap<f64>,
pub cumulative_coinbase_in_dollars: BiMap<f32>,
pub fees_1y_sum: DateMap<f64>,
pub fees_in_dollars_1y_sum: DateMap<f64>,
pub cumulative_fees: BiMap<f64>,
pub cumulative_fees_in_dollars: BiMap<f32>,
pub inflation_rate: DateMap<f64>,
pub yearly_inflation_rate: DateMap<f64>,
pub subsidy_to_coinbase_ratio: HeightMap<f64>,
pub subsidy_to_coinbase_1d_ratio: DateMap<f64>,
pub fees_to_coinbase_ratio: HeightMap<f64>,
pub fees_to_coinbase_1d_ratio: DateMap<f64>,
pub hash_rate: DateMap<f64>,
pub hash_rate_1w_sma: DateMap<f32>,
pub hash_rate_1m_sma: DateMap<f32>,
pub hash_rate_2m_sma: DateMap<f32>,
pub hash_price: DateMap<f64>,
pub hash_price_min: DateMap<f64>,
pub hash_price_rebound: DateMap<f64>,
pub difficulty_adjustment: DateMap<f64>,
pub block_size_recap: DateRecapDataset<f32>, // in MB
pub block_weight_recap: DateRecapDataset<f32>, // in MB
pub block_vbytes_recap: DateRecapDataset<u64>,
pub block_interval_recap: DateRecapDataset<u32>, // in s
pub puell_multiple: DateMap<f32>,
// pub hash_price_in_dollars: DateMap<f64>,
// pub hash_price_30d_volatility: BiMap<f32>,
// difficulty_adjustment
// next_difficulty_adjustment
// op return fees
// inscriptions fees
// until adjustement
// until halving in days
// until halving in blocks
}
impl MiningDataset {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let f = |s: &str| config.path_datasets().join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
total_blocks_mined: DateMap::new_bin(1, MapKind::Inserted, &f("total_blocks_mined")),
blocks_mined: DateMap::new_bin(1, MapKind::Inserted, &f("blocks_mined")),
coinbase: HeightMap::new_bin(1, MapKind::Inserted, &f("coinbase")),
coinbase_1d_sum: DateMap::new_bin(1, MapKind::Inserted, &f("coinbase_1d_sum")),
coinbase_in_dollars: HeightMap::new_bin(
1,
MapKind::Inserted,
&f("coinbase_in_dollars"),
),
coinbase_in_dollars_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("coinbase_in_dollars_1d_sum"),
),
fees: HeightMap::new_bin(1, MapKind::Inserted, &f("fees")),
fees_1d_sum: DateMap::new_bin(1, MapKind::Inserted, &f("fees_1d_sum")),
fees_in_dollars: HeightMap::new_bin(1, MapKind::Inserted, &f("fees_in_dollars")),
fees_in_dollars_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("fees_in_dollars_1d_sum"),
),
subsidy: HeightMap::new_bin(1, MapKind::Inserted, &f("subsidy")),
subsidy_1d_sum: DateMap::new_bin(1, MapKind::Inserted, &f("subsidy_1d_sum")),
subsidy_in_dollars: HeightMap::new_bin(1, MapKind::Inserted, &f("subsidy_in_dollars")),
subsidy_in_dollars_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("subsidy_in_dollars_1d_sum"),
),
last_subsidy: DateMap::new_bin(1, MapKind::Inserted, &f("last_subsidy")),
last_subsidy_in_dollars: DateMap::new_bin(
1,
MapKind::Inserted,
&f("last_subsidy_in_dollars"),
),
last_coinbase: DateMap::new_bin(1, MapKind::Inserted, &f("last_coinbase")),
last_coinbase_in_dollars: DateMap::new_bin(
1,
MapKind::Inserted,
&f("last_coinbase_in_dollars"),
),
last_fees: DateMap::new_bin(1, MapKind::Inserted, &f("last_fees")),
last_fees_in_dollars: DateMap::new_bin(
1,
MapKind::Inserted,
&f("last_fees_in_dollars"),
),
difficulty: BiMap::new_bin(1, MapKind::Inserted, &f("difficulty")),
block_size: HeightMap::new_bin(1, MapKind::Inserted, &f("block_size")),
block_weight: HeightMap::new_bin(1, MapKind::Inserted, &f("block_weight")),
block_vbytes: HeightMap::new_bin(1, MapKind::Inserted, &f("block_vbytes")),
block_interval: HeightMap::new_bin(2, MapKind::Inserted, &f("block_interval")),
// ---
// Computed
// ---
coinbase_1y_sum: DateMap::new_bin(1, MapKind::Computed, &f("coinbase_1y_sum")),
coinbase_in_dollars_1y_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("coinbase_in_dollars_1y_sum"),
),
coinbase_in_dollars_1d_sum_1y_sma: DateMap::new_bin(
1,
MapKind::Computed,
&f("coinbase_in_dollars_1d_sum_1y_sma"),
),
cumulative_coinbase: BiMap::new_bin(1, MapKind::Computed, &f("cumulative_coinbase")),
cumulative_coinbase_in_dollars: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_coinbase_in_dollars"),
),
fees_1y_sum: DateMap::new_bin(1, MapKind::Computed, &f("fees_1y_sum")),
fees_in_dollars_1y_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("fees_in_dollars_1y_sum"),
),
cumulative_fees: BiMap::new_bin(1, MapKind::Computed, &f("cumulative_fees")),
cumulative_fees_in_dollars: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_fees_in_dollars"),
),
subsidy_1y_sum: DateMap::new_bin(1, MapKind::Computed, &f("subsidy_1y_sum")),
subsidy_in_dollars_1y_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("subsidy_in_dollars_1y_sum"),
),
cumulative_subsidy: BiMap::new_bin(1, MapKind::Computed, &f("cumulative_subsidy")),
cumulative_subsidy_in_dollars: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_subsidy_in_dollars"),
),
subsidy_to_coinbase_ratio: HeightMap::new_bin(
1,
MapKind::Computed,
&f("subsidy_to_coinbase_ratio"),
),
subsidy_to_coinbase_1d_ratio: DateMap::new_bin(
1,
MapKind::Computed,
&f("subsidy_to_coinbase_1d_ratio"),
),
fees_to_coinbase_ratio: HeightMap::new_bin(
1,
MapKind::Computed,
&f("fees_to_coinbase_ratio"),
),
fees_to_coinbase_1d_ratio: DateMap::new_bin(
1,
MapKind::Computed,
&f("fees_to_coinbase_1d_ratio"),
),
annualized_issuance: DateMap::new_bin(1, MapKind::Computed, &f("annualized_issuance")),
inflation_rate: DateMap::new_bin(2, MapKind::Computed, &f("inflation_rate")),
yearly_inflation_rate: DateMap::new_bin(
1,
MapKind::Computed,
&f("yearly_inflation_rate"),
),
blocks_mined_1d_target: DateMap::new_bin(
1,
MapKind::Computed,
&f("blocks_mined_1d_target"),
),
blocks_mined_1w_sma: DateMap::new_bin(1, MapKind::Computed, &f("blocks_mined_1w_sma")),
blocks_mined_1m_sma: DateMap::new_bin(1, MapKind::Computed, &f("blocks_mined_1m_sma")),
blocks_mined_1w_sum: DateMap::new_bin(1, MapKind::Computed, &f("blocks_mined_1w_sum")),
blocks_mined_1m_sum: DateMap::new_bin(1, MapKind::Computed, &f("blocks_mined_1m_sum")),
blocks_mined_1y_sum: DateMap::new_bin(1, MapKind::Computed, &f("blocks_mined_1y_sum")),
blocks_mined_1w_target: DateMap::new_bin(
1,
MapKind::Computed,
&f("blocks_mined_1w_target"),
),
blocks_mined_1m_target: DateMap::new_bin(
1,
MapKind::Computed,
&f("blocks_mined_1m_target"),
),
blocks_mined_1y_target: DateMap::new_bin(
1,
MapKind::Computed,
&f("blocks_mined_1y_target"),
),
difficulty_adjustment: DateMap::new_bin(
1,
MapKind::Computed,
&f("difficulty_adjustment"),
),
block_size_recap: RecapDataset::import(
&f("block_size_1d"),
RecapOptions::default()
.add_sum()
.add_average()
.add_max()
.add_90p()
.add_75p()
.add_median()
.add_25p()
.add_10p()
.add_min(),
)?,
cumulative_block_size: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_block_size"),
),
cumulative_block_size_gigabytes: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_block_size_gigabytes"),
),
block_weight_recap: RecapDataset::import(
&f("block_weight_1d"),
RecapOptions::default()
.add_average()
.add_max()
.add_90p()
.add_75p()
.add_median()
.add_25p()
.add_10p()
.add_min(),
)?,
block_vbytes_recap: RecapDataset::import(
&f("block_vbytes_1d"),
RecapOptions::default()
.add_average()
.add_max()
.add_90p()
.add_75p()
.add_median()
.add_25p()
.add_10p()
.add_min(),
)?,
block_interval_recap: RecapDataset::import(
&f("block_interval_1d"),
RecapOptions::default()
.add_average()
.add_max()
.add_90p()
.add_75p()
.add_median()
.add_25p()
.add_10p()
.add_min(),
)?,
hash_rate: DateMap::new_bin(1, MapKind::Computed, &f("hash_rate")),
hash_rate_1w_sma: DateMap::new_bin(1, MapKind::Computed, &f("hash_rate_1w_sma")),
hash_rate_1m_sma: DateMap::new_bin(1, MapKind::Computed, &f("hash_rate_1m_sma")),
hash_rate_2m_sma: DateMap::new_bin(1, MapKind::Computed, &f("hash_rate_2m_sma")),
hash_price: DateMap::new_bin(1, MapKind::Computed, &f("hash_price")),
hash_price_min: DateMap::new_bin(1, MapKind::Computed, &f("hash_price_min")),
hash_price_rebound: DateMap::new_bin(1, MapKind::Computed, &f("hash_price_rebound")),
puell_multiple: DateMap::new_bin(1, MapKind::Computed, &f("puell_multiple")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
date_first_height,
height,
coinbase,
fees,
date_blocks_range,
is_date_last_block,
block_price,
date,
difficulty,
block_size,
block_vbytes,
block_weight,
block_interval,
..
}: &InsertData,
) {
self.coinbase.insert(height, coinbase.to_btc());
let coinbase_in_dollars = self
.coinbase_in_dollars
.insert(height, (block_price * coinbase).to_dollar() as f32);
let sumed_fees = Amount::from_sat(fees.iter().map(|amount| amount.to_sat()).sum());
self.fees.insert(height, sumed_fees.to_btc());
let sumed_fees_in_dollars = self
.fees_in_dollars
.insert(height, (block_price * sumed_fees).to_dollar() as f32);
let subsidy = coinbase - sumed_fees;
self.subsidy.insert(height, subsidy.to_btc());
let subsidy_in_dollars = self
.subsidy_in_dollars
.insert(height, (block_price * subsidy).to_dollar() as f32);
self.difficulty.height.insert(height, difficulty);
self.block_size
.insert(height, block_size as f32 / BYTES_IN_MB as f32);
self.block_weight
.insert(height, block_weight as f32 / BYTES_IN_MB as f32);
self.block_vbytes.insert(height, block_vbytes);
self.block_interval.insert(height, *block_interval);
if is_date_last_block {
self.coinbase_1d_sum
.insert(date, self.coinbase.sum_range(date_blocks_range));
self.coinbase_in_dollars_1d_sum
.insert(date, self.coinbase_in_dollars.sum_range(date_blocks_range));
self.fees_1d_sum
.insert(date, self.fees.sum_range(date_blocks_range));
self.fees_in_dollars_1d_sum
.insert(date, self.fees_in_dollars.sum_range(date_blocks_range));
self.subsidy_1d_sum
.insert(date, self.subsidy.sum_range(date_blocks_range));
self.subsidy_in_dollars_1d_sum
.insert(date, self.subsidy_in_dollars.sum_range(date_blocks_range));
self.last_coinbase.insert(date, coinbase.to_btc());
self.last_coinbase_in_dollars
.insert(date, coinbase_in_dollars);
self.last_subsidy.insert(date, subsidy.to_btc());
self.last_subsidy_in_dollars
.insert(date, subsidy_in_dollars);
self.last_fees.insert(date, sumed_fees.to_btc());
self.last_fees_in_dollars
.insert(date, sumed_fees_in_dollars);
let total_blocks_mined = self.total_blocks_mined.insert(date, height.to_usize() + 1);
self.blocks_mined
.insert(date, total_blocks_mined - date_first_height.to_usize());
self.difficulty.date.insert(date, difficulty);
}
}
pub fn compute(
&mut self,
&ComputeData { heights, dates, .. }: &ComputeData,
first_height: &mut DateMap<Height>,
last_height: &mut DateMap<Height>,
) {
self.blocks_mined_1w_sum.multi_insert_last_x_sum(
dates,
&mut self.blocks_mined,
ONE_WEEK_IN_DAYS,
);
self.blocks_mined_1m_sum.multi_insert_last_x_sum(
dates,
&mut self.blocks_mined,
ONE_MONTH_IN_DAYS,
);
self.blocks_mined_1y_sum.multi_insert_last_x_sum(
dates,
&mut self.blocks_mined,
ONE_YEAR_IN_DAYS,
);
self.subsidy_1y_sum.multi_insert_last_x_sum(
dates,
&mut self.subsidy_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.subsidy_in_dollars_1y_sum.multi_insert_last_x_sum(
dates,
&mut self.subsidy_in_dollars_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.cumulative_subsidy
.height
.multi_insert_cumulative(heights, &mut self.subsidy);
self.cumulative_subsidy
.date
.multi_insert_cumulative(dates, &mut self.subsidy_1d_sum);
self.cumulative_subsidy_in_dollars
.height
.multi_insert_cumulative(heights, &mut self.subsidy_in_dollars);
self.cumulative_subsidy_in_dollars
.date
.multi_insert_cumulative(dates, &mut self.subsidy_in_dollars_1d_sum);
self.fees_1y_sum
.multi_insert_last_x_sum(dates, &mut self.fees_1d_sum, ONE_YEAR_IN_DAYS);
self.fees_in_dollars_1y_sum.multi_insert_last_x_sum(
dates,
&mut self.fees_in_dollars_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.cumulative_fees
.height
.multi_insert_cumulative(heights, &mut self.fees);
self.cumulative_fees
.date
.multi_insert_cumulative(dates, &mut self.fees_1d_sum);
self.cumulative_fees_in_dollars
.height
.multi_insert_cumulative(heights, &mut self.fees_in_dollars);
self.cumulative_fees_in_dollars
.date
.multi_insert_cumulative(dates, &mut self.fees_in_dollars_1d_sum);
self.coinbase_1y_sum.multi_insert_last_x_sum(
dates,
&mut self.coinbase_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.coinbase_in_dollars_1y_sum.multi_insert_last_x_sum(
dates,
&mut self.coinbase_in_dollars_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.coinbase_in_dollars_1d_sum_1y_sma
.multi_insert_simple_average(
dates,
&mut self.coinbase_in_dollars_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.cumulative_coinbase
.height
.multi_insert_cumulative(heights, &mut self.coinbase);
self.cumulative_coinbase
.date
.multi_insert_cumulative(dates, &mut self.coinbase_1d_sum);
self.cumulative_coinbase_in_dollars
.height
.multi_insert_cumulative(heights, &mut self.coinbase_in_dollars);
self.cumulative_coinbase_in_dollars
.date
.multi_insert_cumulative(dates, &mut self.coinbase_in_dollars_1d_sum);
self.subsidy_to_coinbase_ratio.multi_insert_percentage(
heights,
&mut self.subsidy,
&mut self.coinbase,
);
self.subsidy_to_coinbase_1d_ratio.multi_insert_percentage(
dates,
&mut self.subsidy_1d_sum,
&mut self.coinbase_1d_sum,
);
self.fees_to_coinbase_ratio.multi_insert_percentage(
heights,
&mut self.fees,
&mut self.coinbase,
);
self.fees_to_coinbase_1d_ratio.multi_insert_percentage(
dates,
&mut self.fees_1d_sum,
&mut self.coinbase_1d_sum,
);
self.annualized_issuance.multi_insert_last_x_sum(
dates,
&mut self.subsidy_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.inflation_rate.multi_insert_simple_transform(
dates,
&mut self.subsidy_1d_sum,
|subsidy_1d_sum, date| {
subsidy_1d_sum * ONE_YEAR_IN_DAYS as f64
/ self.cumulative_subsidy.date.get_or_import(date).unwrap()
* 100.0
},
);
self.yearly_inflation_rate.multi_insert_percentage(
dates,
&mut self.annualized_issuance,
&mut self.cumulative_subsidy.date,
);
self.blocks_mined_1d_target
.multi_insert_const(dates, TARGET_BLOCKS_PER_DAY);
self.blocks_mined_1w_target
.multi_insert_const(dates, ONE_WEEK_IN_DAYS * TARGET_BLOCKS_PER_DAY);
self.blocks_mined_1m_target
.multi_insert_const(dates, ONE_MONTH_IN_DAYS * TARGET_BLOCKS_PER_DAY);
self.blocks_mined_1y_target
.multi_insert_const(dates, ONE_YEAR_IN_DAYS * TARGET_BLOCKS_PER_DAY);
self.blocks_mined_1w_sma.multi_insert_simple_average(
dates,
&mut self.blocks_mined,
ONE_WEEK_IN_DAYS,
);
self.blocks_mined_1m_sma.multi_insert_simple_average(
dates,
&mut self.blocks_mined,
ONE_MONTH_IN_DAYS,
);
self.cumulative_block_size
.height
.multi_insert_cumulative(heights, &mut self.block_size);
self.cumulative_block_size.date.multi_insert_last(
dates,
&mut self.cumulative_block_size.height,
last_height,
);
self.cumulative_block_size
.height
.multi_insert_cumulative(heights, &mut self.block_size);
self.cumulative_block_size_gigabytes
.multi_insert_simple_transform(heights, dates, &mut self.cumulative_block_size, &|v| {
v / 1000.0
});
// https://hashrateindex.com/blog/what-is-bitcoins-hashrate/
self.hash_rate.multi_insert(dates, |date| {
let blocks_mined = self.blocks_mined.get_or_import(date).unwrap();
let difficulty = self.difficulty.date.get_or_import(date).unwrap();
(blocks_mined as f64 / (date.get_day_completion() * TARGET_BLOCKS_PER_DAY as f64)
* difficulty
* 2.0_f64.powi(32))
/ 600.0
/ 1_000_000_000_000_000_000.0
});
self.hash_rate_1w_sma.multi_insert_simple_average(
dates,
&mut self.hash_rate,
ONE_WEEK_IN_DAYS,
);
self.hash_rate_1m_sma.multi_insert_simple_average(
dates,
&mut self.hash_rate,
ONE_MONTH_IN_DAYS,
);
self.hash_rate_2m_sma.multi_insert_simple_average(
dates,
&mut self.hash_rate,
2 * ONE_MONTH_IN_DAYS,
);
self.hash_price.multi_insert(dates, |date| {
let coinbase_in_dollars = self.coinbase_in_dollars_1d_sum.get_or_import(date).unwrap();
let hash_rate = self.hash_rate.get_or_import(date).unwrap();
coinbase_in_dollars as f64 / hash_rate / 1_000.0
});
self.hash_price_min
.multi_insert_min(dates, &mut self.hash_price, 0.0);
self.hash_price_rebound.multi_insert_percentage(
dates,
&mut self.hash_price,
&mut self.hash_price_min,
);
self.puell_multiple.multi_insert_divide(
dates,
&mut self.coinbase_in_dollars_1d_sum,
&mut self.coinbase_in_dollars_1d_sum_1y_sma,
);
self.puell_multiple.multi_insert_divide(
dates,
&mut self.coinbase_in_dollars_1d_sum,
&mut self.coinbase_in_dollars_1d_sum_1y_sma,
);
self.difficulty_adjustment.multi_insert_percentage_change(
dates,
&mut self.difficulty.date,
ONE_DAY_IN_DAYS,
);
dates.iter().for_each(|date| {
let first = first_height.get_or_import(date).unwrap();
let last = last_height.get_or_import(date).unwrap();
self.block_size_recap.compute(
*date,
&mut self
.block_size
.get_or_import_range_inclusive(first, last)
.into_iter()
.map(OrderedFloat)
.collect_vec(),
);
self.block_weight_recap.compute(
*date,
&mut self
.block_weight
.get_or_import_range_inclusive(first, last)
.into_iter()
.map(OrderedFloat)
.collect_vec(),
);
self.block_vbytes_recap.compute(
*date,
&mut self.block_vbytes.get_or_import_range_inclusive(first, last),
);
self.block_interval_recap.compute(
*date,
&mut self
.block_interval
.get_or_import_range_inclusive(first, last),
);
})
}
}
impl AnyDataset for MiningDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

347
_src/parser/datasets/mod.rs Normal file
View File

@@ -0,0 +1,347 @@
use std::{collections::BTreeMap, ops::RangeInclusive};
use allocative::Allocative;
use itertools::Itertools;
use log::info;
use rayon::prelude::*;
mod _traits;
mod address;
mod block_metadata;
mod coindays;
mod cointime;
mod constant;
mod date_metadata;
mod mining;
mod price;
mod subs;
mod transaction;
mod utxo;
pub use _traits::*;
pub use address::*;
pub use block_metadata::*;
pub use coindays::*;
pub use cointime::*;
pub use constant::*;
pub use date_metadata::*;
pub use mining::*;
pub use price::*;
use serde_json::Value;
pub use subs::*;
pub use transaction::*;
pub use utxo::*;
use crate::{
io::Json,
parser::{
databases::Databases,
states::{
AddressCohortsInputStates,
AddressCohortsOneShotStates,
AddressCohortsRealizedStates,
States,
UTXOCohortsOneShotStates,
// UTXOCohortsReceivedStates,
UTXOCohortsSentStates,
},
},
structs::{Amount, Config, Date, Height, Price, Timestamp},
};
pub struct InsertData<'a> {
pub address_cohorts_input_states: &'a Option<AddressCohortsInputStates>,
pub address_cohorts_one_shot_states: &'a Option<AddressCohortsOneShotStates>,
pub address_cohorts_realized_states: &'a Option<AddressCohortsRealizedStates>,
pub amount_sent: Amount,
pub block_interval: Timestamp,
pub block_price: Price,
pub block_size: usize,
pub block_vbytes: u64,
pub block_weight: u64,
pub coinbase: Amount,
pub compute_addresses: bool,
pub databases: &'a Databases,
pub date: Date,
pub date_blocks_range: &'a RangeInclusive<u32>,
pub date_first_height: Height,
pub difficulty: f64,
pub fees: &'a Vec<Amount>,
pub height: Height,
pub is_date_last_block: bool,
pub satblocks_destroyed: Amount,
pub satdays_destroyed: Amount,
pub states: &'a States,
pub timestamp: Timestamp,
pub transaction_count: usize,
pub utxo_cohorts_one_shot_states: &'a UTXOCohortsOneShotStates,
// pub utxo_cohorts_received_states: &'a UTXOCohortsReceivedStates,
pub utxo_cohorts_sent_states: &'a UTXOCohortsSentStates,
}
pub struct ComputeData<'a> {
pub heights: &'a [Height],
pub dates: &'a [Date],
}
#[derive(Allocative)]
pub struct Datasets {
min_initial_states: MinInitialStates,
pub constant: ConstantDataset,
pub address: AddressDatasets,
pub block_metadata: BlockMetadataDataset,
pub coindays: CoindaysDataset,
pub cointime: CointimeDataset,
pub date_metadata: DateMetadataDataset,
pub mining: MiningDataset,
pub price: PriceDatasets,
pub transaction: TransactionDataset,
pub utxo: UTXODatasets,
}
impl Datasets {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let price = PriceDatasets::import(config)?;
let constant = ConstantDataset::import(config)?;
let date_metadata = DateMetadataDataset::import(config)?;
let cointime = CointimeDataset::import(config)?;
let coindays = CoindaysDataset::import(config)?;
let mining = MiningDataset::import(config)?;
let block_metadata = BlockMetadataDataset::import(config)?;
let transaction = TransactionDataset::import(config)?;
let address = AddressDatasets::import(config)?;
let utxo = UTXODatasets::import(config)?;
let mut s = Self {
min_initial_states: MinInitialStates::default(),
address,
block_metadata,
cointime,
coindays,
constant,
date_metadata,
price,
mining,
transaction,
utxo,
};
s.set_initial_states(config);
info!("Imported datasets");
Ok(s)
}
fn set_initial_states(&mut self, config: &Config) {
self.min_initial_states
.consume(MinInitialStates::compute_from_datasets(self, config));
}
pub fn insert(&mut self, insert_data: InsertData) {
if insert_data.compute_addresses {
self.address.insert(&insert_data);
}
self.utxo.insert(&insert_data);
if self
.block_metadata
.needs_insert(insert_data.height, insert_data.date)
{
self.block_metadata.insert(&insert_data);
}
if self
.date_metadata
.needs_insert(insert_data.height, insert_data.date)
{
self.date_metadata.insert(&insert_data);
}
if self
.coindays
.needs_insert(insert_data.height, insert_data.date)
{
self.coindays.insert(&insert_data);
}
if self
.mining
.needs_insert(insert_data.height, insert_data.date)
{
self.mining.insert(&insert_data);
}
if self
.transaction
.needs_insert(insert_data.height, insert_data.date)
{
self.transaction.insert(&insert_data);
}
if self
.cointime
.needs_insert(insert_data.height, insert_data.date)
{
self.cointime.insert(&insert_data);
}
}
pub fn compute(&mut self, compute_data: ComputeData) {
if self.constant.should_compute(&compute_data) {
self.constant.compute(&compute_data);
}
if self.mining.should_compute(&compute_data) {
self.mining.compute(
&compute_data,
&mut self.date_metadata.first_height,
&mut self.date_metadata.last_height,
);
}
// No compute needed for now
self.price
.compute(&compute_data, &mut self.mining.cumulative_subsidy);
self.address.compute(
&compute_data,
&mut self.price.close,
&mut self.mining.cumulative_subsidy,
&mut self.price.market_cap,
);
self.utxo.compute(
&compute_data,
&mut self.price.close,
&mut self.mining.cumulative_subsidy,
&mut self.price.market_cap,
);
if self.transaction.should_compute(&compute_data) {
self.transaction.compute(
&compute_data,
&mut self.mining.cumulative_subsidy,
&mut self.mining.block_interval,
);
}
if self.cointime.should_compute(&compute_data) {
self.cointime.compute(
&compute_data,
&mut self.date_metadata.first_height,
&mut self.date_metadata.last_height,
&mut self.price.close,
&mut self.mining.cumulative_subsidy,
&mut self.address.cohorts.all.subs.capitalization.realized_cap,
&mut self.address.cohorts.all.subs.capitalization.realized_price,
&mut self.mining.inflation_rate,
&mut self.mining.yearly_inflation_rate,
&mut self.transaction.annualized_volume,
&mut self.mining.cumulative_subsidy_in_dollars,
);
}
}
pub fn export(&mut self, config: &Config, height: Height) -> color_eyre::Result<()> {
let is_new = self
.min_initial_states
.min_last_height()
.map_or(true, |last| last <= height);
self.to_mut_any_dataset_vec()
.into_iter()
.for_each(|dataset| dataset.pre_export());
self.to_any_dataset_vec()
.into_par_iter()
.try_for_each(|dataset| -> color_eyre::Result<()> { dataset.export() })?;
let mut path_to_last: BTreeMap<String, Value> = BTreeMap::default();
self.to_mut_any_dataset_vec()
.into_iter()
.for_each(|dataset| {
dataset.post_export();
if is_new {
dataset.to_all_map_vec().iter().for_each(|map| {
if map.path_last().is_some() {
if let Some(last_value) = map.last_value() {
path_to_last.insert(map.id(config), last_value);
}
}
});
}
});
if is_new {
Json::export(&config.path_datasets_last_values(), &path_to_last)?;
}
self.set_initial_states(config);
Ok(())
}
}
impl AnyDatasets for Datasets {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_any_dataset_vec(&self) -> Vec<&(dyn AnyDataset + Send + Sync)> {
vec![
vec![
&self.price as &(dyn AnyDataset + Send + Sync),
&self.constant,
],
self.address.to_any_dataset_vec(),
self.utxo.to_any_dataset_vec(),
vec![
&self.mining,
&self.transaction,
&self.block_metadata,
&self.date_metadata,
&self.cointime,
&self.coindays,
],
]
.into_iter()
.flatten()
.collect_vec()
}
fn to_mut_any_dataset_vec(&mut self) -> Vec<&mut dyn AnyDataset> {
vec![
vec![&mut self.price as &mut dyn AnyDataset, &mut self.constant],
self.address.to_mut_any_dataset_vec(),
self.utxo.to_mut_any_dataset_vec(),
vec![
&mut self.mining,
&mut self.transaction,
&mut self.block_metadata,
&mut self.date_metadata,
&mut self.cointime,
&mut self.coindays,
],
]
.into_iter()
.flatten()
.collect_vec()
}
}

View File

@@ -0,0 +1,733 @@
use std::collections::BTreeMap;
use allocative::Allocative;
use chrono::Days;
use color_eyre::eyre::Error;
use struct_iterable::Iterable;
use crate::{
parser::price::{Binance, Kibo, Kraken},
structs::{
Amount, BiMap, Config, Date, DateMap, DateMapChunkId, Height, HeightMapChunkId, MapKey,
MapKind, Timestamp, OHLC,
},
utils::{ONE_MONTH_IN_DAYS, ONE_WEEK_IN_DAYS, ONE_YEAR_IN_DAYS},
};
use super::{AnyDataset, ComputeData, MinInitialStates, RatioDataset};
#[derive(Allocative, Iterable)]
pub struct PriceDatasets {
min_initial_states: MinInitialStates,
kraken_daily: Option<BTreeMap<Date, OHLC>>,
kraken_1mn: Option<BTreeMap<u32, OHLC>>,
binance_1mn: Option<BTreeMap<u32, OHLC>>,
binance_daily: Option<BTreeMap<Date, OHLC>>,
binance_har: Option<BTreeMap<u32, OHLC>>,
kibo_by_height: BTreeMap<HeightMapChunkId, Vec<OHLC>>,
kibo_by_date: BTreeMap<DateMapChunkId, BTreeMap<Date, OHLC>>,
pub ohlc: BiMap<OHLC>,
pub open: BiMap<f32>,
pub high: BiMap<f32>,
pub low: BiMap<f32>,
pub close: BiMap<f32>,
pub market_cap: BiMap<f32>,
pub price_1w_sma: BiMap<f32>,
pub price_1w_sma_ratio: RatioDataset,
pub price_1m_sma: BiMap<f32>,
pub price_1m_sma_ratio: RatioDataset,
pub price_1y_sma: BiMap<f32>,
pub price_1y_sma_ratio: RatioDataset,
pub price_2y_sma: BiMap<f32>,
pub price_2y_sma_ratio: RatioDataset,
pub price_4y_sma: BiMap<f32>,
pub price_4y_sma_ratio: RatioDataset,
pub price_8d_sma: BiMap<f32>,
pub price_8d_sma_ratio: RatioDataset,
pub price_13d_sma: BiMap<f32>,
pub price_13d_sma_ratio: RatioDataset,
pub price_21d_sma: BiMap<f32>,
pub price_21d_sma_ratio: RatioDataset,
pub price_34d_sma: BiMap<f32>,
pub price_34d_sma_ratio: RatioDataset,
pub price_55d_sma: BiMap<f32>,
pub price_55d_sma_ratio: RatioDataset,
pub price_89d_sma: BiMap<f32>,
pub price_89d_sma_ratio: RatioDataset,
pub price_144d_sma: BiMap<f32>,
pub price_144d_sma_ratio: RatioDataset,
pub price_200w_sma: BiMap<f32>,
pub price_200w_sma_ratio: RatioDataset,
pub price_1d_total_return: DateMap<f32>,
pub price_1m_total_return: DateMap<f32>,
pub price_6m_total_return: DateMap<f32>,
pub price_1y_total_return: DateMap<f32>,
pub price_2y_total_return: DateMap<f32>,
pub price_3y_total_return: DateMap<f32>,
pub price_4y_total_return: DateMap<f32>,
pub price_6y_total_return: DateMap<f32>,
pub price_8y_total_return: DateMap<f32>,
pub price_10y_total_return: DateMap<f32>,
pub price_4y_compound_return: DateMap<f32>,
// projection via lowest 4y compound value
pub all_time_high: BiMap<f32>,
pub all_time_high_date: DateMap<Date>,
pub days_since_all_time_high: DateMap<u32>,
pub max_days_between_all_time_highs: DateMap<u32>,
pub max_years_between_all_time_highs: DateMap<f32>,
pub market_price_to_all_time_high_ratio: BiMap<f32>,
pub drawdown: BiMap<f32>,
pub sats_per_dollar: BiMap<f32>,
// volatility
}
impl PriceDatasets {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let path_dataset = config.path_datasets();
let f = |s: &str| path_dataset.join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
binance_1mn: None,
binance_daily: None,
binance_har: None,
kraken_1mn: None,
kraken_daily: None,
kibo_by_height: BTreeMap::default(),
kibo_by_date: BTreeMap::default(),
// ---
// Inserted
// ---
ohlc: BiMap::new_json(1, MapKind::Inserted, &config.path_price()),
// ---
// Computed
// ---
open: BiMap::new_bin(1, MapKind::Computed, &f("open")),
high: BiMap::new_bin(1, MapKind::Computed, &f("high")),
low: BiMap::new_bin(1, MapKind::Computed, &f("low")),
close: BiMap::new_bin(1, MapKind::Computed, &f("close")),
market_cap: BiMap::new_bin(1, MapKind::Computed, &f("market_cap")),
price_1w_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_1w_sma")),
price_1w_sma_ratio: RatioDataset::import(&path_dataset, "price_1w_sma", config)?,
price_1m_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_1m_sma")),
price_1m_sma_ratio: RatioDataset::import(&path_dataset, "price_1m_sma", config)?,
price_1y_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_1y_sma")),
price_1y_sma_ratio: RatioDataset::import(&path_dataset, "price_1y_sma", config)?,
price_2y_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_2y_sma")),
price_2y_sma_ratio: RatioDataset::import(&path_dataset, "price_2y_sma", config)?,
price_4y_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_4y_sma")),
price_4y_sma_ratio: RatioDataset::import(&path_dataset, "price_4y_sma", config)?,
price_8d_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_8d_sma")),
price_8d_sma_ratio: RatioDataset::import(&path_dataset, "price_8d_sma", config)?,
price_13d_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_13d_sma")),
price_13d_sma_ratio: RatioDataset::import(&path_dataset, "price_13d_sma", config)?,
price_21d_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_21d_sma")),
price_21d_sma_ratio: RatioDataset::import(&path_dataset, "price_21d_sma", config)?,
price_34d_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_34d_sma")),
price_34d_sma_ratio: RatioDataset::import(&path_dataset, "price_34d_sma", config)?,
price_55d_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_55d_sma")),
price_55d_sma_ratio: RatioDataset::import(&path_dataset, "price_55d_sma", config)?,
price_89d_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_89d_sma")),
price_89d_sma_ratio: RatioDataset::import(&path_dataset, "price_89d_sma", config)?,
price_144d_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_144d_sma")),
price_144d_sma_ratio: RatioDataset::import(&path_dataset, "price_144d_sma", config)?,
price_200w_sma: BiMap::new_bin(1, MapKind::Computed, &f("price_200w_sma")),
price_200w_sma_ratio: RatioDataset::import(&path_dataset, "price_200w_sma", config)?,
price_1d_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_1d_total_return"),
),
price_1m_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_1m_total_return"),
),
price_6m_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_6m_total_return"),
),
price_1y_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_1y_total_return"),
),
price_2y_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_2y_total_return"),
),
price_3y_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_3y_total_return"),
),
price_4y_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_4y_total_return"),
),
price_6y_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_6y_total_return"),
),
price_8y_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_8y_total_return"),
),
price_10y_total_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_10y_total_return"),
),
price_4y_compound_return: DateMap::new_bin(
1,
MapKind::Computed,
&f("price_4y_compound_return"),
),
all_time_high: BiMap::new_bin(1, MapKind::Computed, &f("all_time_high")),
all_time_high_date: DateMap::new_bin(1, MapKind::Computed, &f("all_time_high_date")),
days_since_all_time_high: DateMap::new_bin(
1,
MapKind::Computed,
&f("days_since_all_time_high"),
),
max_days_between_all_time_highs: DateMap::new_bin(
1,
MapKind::Computed,
&f("max_days_between_all_time_highs"),
),
max_years_between_all_time_highs: DateMap::new_bin(
2,
MapKind::Computed,
&f("max_years_between_all_time_highs"),
),
market_price_to_all_time_high_ratio: BiMap::new_bin(
1,
MapKind::Computed,
&f("market_price_to_all_time_high_ratio"),
),
drawdown: BiMap::new_bin(1, MapKind::Computed, &f("drawdown")),
sats_per_dollar: BiMap::new_bin(1, MapKind::Computed, &f("sats_per_dollar")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn compute(&mut self, compute_data: &ComputeData, circulating_supply: &mut BiMap<f64>) {
let &ComputeData { dates, heights, .. } = compute_data;
self.open
.multi_insert_simple_transform(heights, dates, &mut self.ohlc, &|ohlc| ohlc.open);
self.high
.multi_insert_simple_transform(heights, dates, &mut self.ohlc, &|ohlc| ohlc.high);
self.low
.multi_insert_simple_transform(heights, dates, &mut self.ohlc, &|ohlc| ohlc.low);
self.close
.multi_insert_simple_transform(heights, dates, &mut self.ohlc, &|ohlc| ohlc.close);
self.market_cap
.multi_insert_multiply(heights, dates, &mut self.close, circulating_supply);
self.price_1w_sma.multi_insert_simple_average(
heights,
dates,
&mut self.close,
ONE_WEEK_IN_DAYS,
);
self.price_1m_sma.multi_insert_simple_average(
heights,
dates,
&mut self.close,
ONE_MONTH_IN_DAYS,
);
self.price_1y_sma.multi_insert_simple_average(
heights,
dates,
&mut self.close,
ONE_YEAR_IN_DAYS,
);
self.price_2y_sma.multi_insert_simple_average(
heights,
dates,
&mut self.close,
2 * ONE_YEAR_IN_DAYS,
);
self.price_4y_sma.multi_insert_simple_average(
heights,
dates,
&mut self.close,
4 * ONE_YEAR_IN_DAYS,
);
self.price_8d_sma
.multi_insert_simple_average(heights, dates, &mut self.close, 8);
self.price_13d_sma
.multi_insert_simple_average(heights, dates, &mut self.close, 13);
self.price_21d_sma
.multi_insert_simple_average(heights, dates, &mut self.close, 21);
self.price_34d_sma
.multi_insert_simple_average(heights, dates, &mut self.close, 34);
self.price_55d_sma
.multi_insert_simple_average(heights, dates, &mut self.close, 55);
self.price_89d_sma
.multi_insert_simple_average(heights, dates, &mut self.close, 89);
self.price_144d_sma
.multi_insert_simple_average(heights, dates, &mut self.close, 144);
self.price_200w_sma.multi_insert_simple_average(
heights,
dates,
&mut self.close,
200 * ONE_WEEK_IN_DAYS,
);
self.price_1d_total_return
.multi_insert_percentage_change(dates, &mut self.close.date, 1);
self.price_1m_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
ONE_MONTH_IN_DAYS,
);
self.price_6m_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
6 * ONE_MONTH_IN_DAYS,
);
self.price_1y_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
ONE_YEAR_IN_DAYS,
);
self.price_2y_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
2 * ONE_YEAR_IN_DAYS,
);
self.price_3y_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
3 * ONE_YEAR_IN_DAYS,
);
self.price_4y_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
4 * ONE_YEAR_IN_DAYS,
);
self.price_6y_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
6 * ONE_YEAR_IN_DAYS,
);
self.price_8y_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
8 * ONE_YEAR_IN_DAYS,
);
self.price_10y_total_return.multi_insert_percentage_change(
dates,
&mut self.close.date,
10 * ONE_YEAR_IN_DAYS,
);
self.price_4y_compound_return
.multi_insert_complex_transform(
dates,
&mut self.close.date,
|(last_value, date, closes, _)| {
let previous_value = date
.checked_sub_days(Days::new(4 * ONE_YEAR_IN_DAYS as u64))
.and_then(|date| closes.get_or_import(&Date::wrap(date)))
.unwrap_or_default();
(((last_value / previous_value).powf(1.0 / 4.0)) - 1.0) * 100.0
},
);
self.price_1w_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_1w_sma);
self.price_1m_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_1m_sma);
self.price_1y_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_1y_sma);
self.price_2y_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_2y_sma);
self.price_4y_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_4y_sma);
self.price_8d_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_8d_sma);
self.price_13d_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_13d_sma);
self.price_21d_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_21d_sma);
self.price_34d_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_34d_sma);
self.price_55d_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_55d_sma);
self.price_89d_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_89d_sma);
self.price_144d_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_144d_sma);
self.price_200w_sma_ratio
.compute(compute_data, &mut self.close, &mut self.price_200w_sma);
self.all_time_high
.multi_insert_max(heights, dates, &mut self.high);
self.market_price_to_all_time_high_ratio
.multi_insert_percentage(heights, dates, &mut self.close, &mut self.all_time_high);
self.all_time_high_date.multi_insert_complex_transform(
dates,
&mut self.all_time_high.date,
|(value, date, _, map)| {
let high = self.high.date.get_or_import(date).unwrap();
let is_ath = high == value;
if is_ath {
*date
} else {
let previous_date = date.checked_sub(1).unwrap();
*map.get_or_import(&previous_date).as_ref().unwrap_or(date)
}
},
);
self.days_since_all_time_high.multi_insert_simple_transform(
dates,
&mut self.all_time_high_date,
|value, key| key.difference_in_days_between(value),
);
self.max_days_between_all_time_highs
.multi_insert_max(dates, &mut self.days_since_all_time_high);
self.max_years_between_all_time_highs
.multi_insert_simple_transform(
dates,
&mut self.max_days_between_all_time_highs,
|days, _| (days as f64 / ONE_YEAR_IN_DAYS as f64) as f32,
);
self.drawdown.multi_insert_simple_transform(
heights,
dates,
&mut self.market_price_to_all_time_high_ratio,
&|v| -(100.0 - v),
);
self.sats_per_dollar.multi_insert_simple_transform(
heights,
dates,
&mut self.close,
&|price| Amount::ONE_BTC_F32 / price,
);
}
pub fn get_date_ohlc(&mut self, date: Date) -> color_eyre::Result<OHLC> {
if self.ohlc.date.is_key_safe(date) {
Ok(self.ohlc.date.get_or_import(&date).unwrap().to_owned())
} else {
let ohlc = self
.get_from_daily_kraken(&date)
.or_else(|_| self.get_from_daily_binance(&date))
.or_else(|_| self.get_from_date_kibo(&date))?;
self.ohlc.date.insert(date, ohlc);
Ok(ohlc)
}
}
fn get_from_date_kibo(&mut self, date: &Date) -> color_eyre::Result<OHLC> {
let chunk_id = date.to_chunk_id();
#[allow(clippy::map_entry)]
if !self.kibo_by_date.contains_key(&chunk_id)
|| self
.kibo_by_date
.get(&chunk_id)
.unwrap()
.last_key_value()
.unwrap()
.0
< date
{
self.kibo_by_date
.insert(chunk_id, Kibo::fetch_date_prices(chunk_id)?);
}
self.kibo_by_date
.get(&chunk_id)
.unwrap()
.get(date)
.cloned()
.ok_or(Error::msg("Couldn't find date in satonomics"))
}
fn get_from_daily_kraken(&mut self, date: &Date) -> color_eyre::Result<OHLC> {
if self.kraken_daily.is_none()
|| self
.kraken_daily
.as_ref()
.unwrap()
.last_key_value()
.unwrap()
.0
< date
{
self.kraken_daily.replace(Kraken::fetch_daily_prices()?);
}
self.kraken_daily
.as_ref()
.unwrap()
.get(date)
.cloned()
.ok_or(Error::msg("Couldn't find date"))
}
fn get_from_daily_binance(&mut self, date: &Date) -> color_eyre::Result<OHLC> {
if self.binance_daily.is_none()
|| self
.binance_daily
.as_ref()
.unwrap()
.last_key_value()
.unwrap()
.0
< date
{
self.binance_daily.replace(Binance::fetch_daily_prices()?);
}
self.binance_daily
.as_ref()
.unwrap()
.get(date)
.cloned()
.ok_or(Error::msg("Couldn't find date"))
}
pub fn get_height_ohlc(
&mut self,
height: Height,
timestamp: Timestamp,
previous_timestamp: Option<Timestamp>,
config: &Config,
) -> color_eyre::Result<OHLC> {
if let Some(ohlc) = self.ohlc.height.get_or_import(&height) {
return Ok(ohlc);
}
let timestamp = timestamp.to_floored_seconds();
if previous_timestamp.is_none() && !height.is_first() {
panic!("Shouldn't be possible");
}
let previous_timestamp = previous_timestamp.map(|t| t.to_floored_seconds());
let ohlc = self
.get_from_1mn_kraken(timestamp, previous_timestamp)
.unwrap_or_else(|_| {
self.get_from_1mn_binance(timestamp, previous_timestamp)
.unwrap_or_else(|_| {
self.get_from_har_binance(timestamp, previous_timestamp, config)
.unwrap_or_else(|_| {
self.get_from_height_kibo(&height).unwrap_or_else(|_| {
let date = timestamp.to_date();
panic!(
"Can't find the price for: height: {height} - date: {date}
1mn APIs are limited to the last 16 hours for Binance's and the last 10 hours for Kraken's
How to fix this:
1. Go to https://www.binance.com/en/trade/BTC_USDT?type=spot
2. Select 1mn interval
3. Open the inspector/dev tools
4. Go to the Network Tab
5. Filter URLs by 'uiKlines'
6. Go back to the chart and scroll until you pass the date mentioned few lines ago
7. Go back to the dev tools
8. Export to a har file (if there is no explicit button, click on the cog button)
9. Move the file to 'parser/imports/binance.har'
"
)
})
})
})
});
self.ohlc.height.insert(height, ohlc);
Ok(ohlc)
}
fn get_from_height_kibo(&mut self, height: &Height) -> color_eyre::Result<OHLC> {
let chunk_id = height.to_chunk_id();
#[allow(clippy::map_entry)]
if !self.kibo_by_height.contains_key(&chunk_id)
|| ((chunk_id.to_usize() + self.kibo_by_height.get(&chunk_id).unwrap().len())
<= height.to_usize())
{
self.kibo_by_height
.insert(chunk_id, Kibo::fetch_height_prices(chunk_id)?);
}
self.kibo_by_height
.get(&chunk_id)
.unwrap()
.get(height.to_serialized_key().to_usize())
.cloned()
.ok_or(Error::msg("Couldn't find height in kibo"))
}
fn get_from_1mn_kraken(
&mut self,
timestamp: Timestamp,
previous_timestamp: Option<Timestamp>,
) -> color_eyre::Result<OHLC> {
if self.kraken_1mn.is_none()
|| self
.kraken_1mn
.as_ref()
.unwrap()
.last_key_value()
.unwrap()
.0
<= &timestamp
{
self.kraken_1mn.replace(Kraken::fetch_1mn_prices()?);
}
Self::find_height_ohlc(&self.kraken_1mn, timestamp, previous_timestamp, "kraken 1m")
}
fn get_from_1mn_binance(
&mut self,
timestamp: Timestamp,
previous_timestamp: Option<Timestamp>,
) -> color_eyre::Result<OHLC> {
if self.binance_1mn.is_none()
|| self
.binance_1mn
.as_ref()
.unwrap()
.last_key_value()
.unwrap()
.0
<= &timestamp
{
self.binance_1mn.replace(Binance::fetch_1mn_prices()?);
}
Self::find_height_ohlc(
&self.binance_1mn,
timestamp,
previous_timestamp,
"binance 1m",
)
}
fn get_from_har_binance(
&mut self,
timestamp: Timestamp,
previous_timestamp: Option<Timestamp>,
config: &Config,
) -> color_eyre::Result<OHLC> {
if self.binance_har.is_none() {
self.binance_har
.replace(Binance::read_har_file(config).unwrap_or_default());
}
Self::find_height_ohlc(
&self.binance_har,
timestamp,
previous_timestamp,
"binance har",
)
}
fn find_height_ohlc(
tree: &Option<BTreeMap<u32, OHLC>>,
timestamp: Timestamp,
previous_timestamp: Option<Timestamp>,
name: &str,
) -> color_eyre::Result<OHLC> {
let tree = tree.as_ref().unwrap();
let err = Error::msg(format!("Couldn't find timestamp in {name}"));
let previous_ohlc = previous_timestamp
.map_or(Some(OHLC::default()), |previous_timestamp| {
tree.get(&previous_timestamp).cloned()
});
let last_ohlc = tree.get(&timestamp);
if previous_ohlc.is_none() || last_ohlc.is_none() {
return Err(err);
}
let previous_ohlc = previous_ohlc.unwrap();
let mut final_ohlc = OHLC {
open: previous_ohlc.close,
high: previous_ohlc.close,
low: previous_ohlc.close,
close: previous_ohlc.close,
};
let start = previous_timestamp.unwrap_or_default();
let end = timestamp;
// Otherwise it's a re-org
if start < end {
tree.range(&*start..=&*end).skip(1).for_each(|(_, ohlc)| {
if ohlc.high > final_ohlc.high {
final_ohlc.high = ohlc.high
}
if ohlc.low < final_ohlc.low {
final_ohlc.low = ohlc.low
}
final_ohlc.close = ohlc.close;
});
}
Ok(final_ohlc)
}
}
impl AnyDataset for PriceDatasets {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,123 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates},
states::CapitalizationState,
},
structs::{BiMap, Config, MapKind, MapPath},
utils::ONE_MONTH_IN_DAYS,
};
use super::RatioDataset;
#[derive(Allocative, Iterable)]
pub struct CapitalizationDataset {
min_initial_states: MinInitialStates,
pub realized_cap: BiMap<f32>,
pub realized_price: BiMap<f32>,
realized_cap_1m_net_change: BiMap<f32>,
realized_price_ratio: RatioDataset,
}
impl CapitalizationDataset {
pub fn import(
path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let f = |s: &str| {
if let Some(name) = name {
path.join(&format!("{name}/{s}"))
} else {
path.join(s)
}
};
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
realized_cap: BiMap::new_bin(1, MapKind::Inserted, &f("realized_cap")),
// ---
// Computed
// ---
realized_cap_1m_net_change: BiMap::new_bin(
1,
MapKind::Computed,
&f("realized_cap_1m_net_change"),
),
realized_price: BiMap::new_bin(1, MapKind::Computed, &f("realized_price")),
realized_price_ratio: RatioDataset::import(
path,
&format!(
"{}realized_price",
name.as_ref().map_or("".to_owned(), |n| format!("{n}-"))
),
config,
)?,
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
..
}: &InsertData,
state: &CapitalizationState,
) {
let realized_cap = self
.realized_cap
.height
.insert(height, state.realized_cap().to_dollar() as f32);
if is_date_last_block {
self.realized_cap.date.insert(date, realized_cap);
}
}
pub fn compute(
&mut self,
compute_data: &ComputeData,
closes: &mut BiMap<f32>,
cohort_supply: &mut BiMap<f64>,
) {
let &ComputeData { heights, dates, .. } = compute_data;
self.realized_price.multi_insert_divide(
heights,
dates,
&mut self.realized_cap,
cohort_supply,
);
self.realized_cap_1m_net_change.multi_insert_net_change(
heights,
dates,
&mut self.realized_cap,
ONE_MONTH_IN_DAYS,
);
self.realized_price_ratio
.compute(compute_data, closes, &mut self.realized_price);
}
}
impl AnyDataset for CapitalizationDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,86 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, InsertData, MinInitialStates},
states::InputState,
},
structs::{BiMap, Config, DateMap, HeightMap, MapKind, MapPath},
};
#[derive(Allocative, Iterable)]
pub struct InputSubDataset {
min_initial_states: MinInitialStates,
// Inserted
pub count: BiMap<u64>,
pub volume: HeightMap<f64>,
pub volume_1d_sum: DateMap<f64>,
// Computed
// add inputs_per_second
}
impl InputSubDataset {
pub fn import(
path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let f = |s: &str| {
if let Some(name) = name {
path.join(&format!("{name}/{s}"))
} else {
path.join(s)
}
};
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
count: BiMap::new_bin(1, MapKind::Inserted, &f("input_count")),
volume: HeightMap::new_bin(1, MapKind::Inserted, &f("input_volume")),
volume_1d_sum: DateMap::new_bin(1, MapKind::Inserted, &f("input_volume_1d_sum")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
date_blocks_range,
..
}: &InsertData,
state: &InputState,
) {
let count = self
.count
.height
.insert(height, state.count().round() as u64);
self.volume.insert(height, state.volume().to_btc());
if is_date_last_block {
self.count.date.insert(date, count);
self.volume_1d_sum
.insert(date, self.volume.sum_range(date_blocks_range));
}
}
}
impl AnyDataset for InputSubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,90 @@
use allocative::Allocative;
mod capitalization;
mod input;
mod price_paid;
mod ratio;
mod realized;
mod recap;
mod supply;
mod unrealized;
mod utxo;
pub use capitalization::*;
pub use input::*;
pub use price_paid::*;
pub use ratio::*;
pub use realized::*;
pub use recap::*;
use struct_iterable::Iterable;
pub use supply::*;
pub use unrealized::*;
pub use utxo::*;
use crate::{
parser::datasets::AnyDataset,
structs::{Config, MapPath},
};
use super::AnyDatasetGroup;
#[derive(Allocative, Iterable)]
pub struct SubDataset {
pub capitalization: CapitalizationDataset,
pub input: InputSubDataset,
// pub output: OutputSubDataset,
pub price_paid: PricePaidSubDataset,
pub realized: RealizedSubDataset,
pub supply: SupplySubDataset,
pub unrealized: UnrealizedSubDataset,
pub utxo: UTXOSubDataset,
}
impl SubDataset {
pub fn import(
parent_path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let s = Self {
capitalization: CapitalizationDataset::import(parent_path, name, config)?,
input: InputSubDataset::import(parent_path, name, config)?,
// output: OutputSubDataset::import(parent_path)?,
price_paid: PricePaidSubDataset::import(parent_path, name, config)?,
realized: RealizedSubDataset::import(parent_path, name, config)?,
supply: SupplySubDataset::import(parent_path, name, config)?,
unrealized: UnrealizedSubDataset::import(parent_path, name, config)?,
utxo: UTXOSubDataset::import(parent_path, name, config)?,
};
Ok(s)
}
}
impl AnyDatasetGroup for SubDataset {
fn as_vec(&self) -> Vec<&(dyn AnyDataset + Send + Sync)> {
vec![
&self.capitalization,
&self.price_paid,
&self.realized,
&self.supply,
&self.unrealized,
&self.utxo,
&self.input,
// &self.output,
]
}
fn as_mut_vec(&mut self) -> Vec<&mut dyn AnyDataset> {
vec![
&mut self.capitalization,
&mut self.price_paid,
&mut self.realized,
&mut self.supply,
&mut self.unrealized,
&mut self.utxo,
&mut self.input,
// &mut self.output,
]
}
}

View File

@@ -0,0 +1,87 @@
use crate::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates},
states::OutputState,
structs::{AnyBiMap, BiMap},
utils::ONE_YEAR_IN_DAYS,
};
pub struct OutputSubDataset {
min_initial_states: MinInitialStates,
// Inserted
pub count: BiMap<f32>,
pub volume: BiMap<f32>,
// Computed
pub annualized_volume: BiMap<f32>,
pub velocity: BiMap<f32>,
// add outputs_per_second
}
impl OutputSubDataset {
pub fn import(parent_path: &str) -> color_eyre::Result<Self> {
let f = |s: &str| format!("{parent_path}/{s}");
let mut s = Self {
min_initial_states: MinInitialStates::default(),
count: BiMap::new_bin(1, &f("output_count")),
volume: BiMap::new_bin(1, &f("output_volume")),
annualized_volume: BiMap::new_bin(1, &f("annualized_output_volume")),
velocity: BiMap::new_bin(1, &f("output_velocity")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
date_blocks_range,
..
}: &InsertData,
state: &OutputState,
) {
let count = self.count.height.insert(height, state.count);
self.volume.height.insert(height, state.volume);
if is_date_last_block {
self.count.date.insert(date, count);
self.volume.date_insert_sum_range(date, date_blocks_range);
}
}
pub fn compute(
&mut self,
&ComputeData { heights, dates }: &ComputeData,
cohort_supply: &mut BiMap<f32>,
) {
self.annualized_volume.multi_insert_last_x_sum(
heights,
dates,
&mut self.volume,
ONE_YEAR_IN_DAYS,
);
self.velocity.multi_insert_divide(
heights,
dates,
&mut self.annualized_volume,
cohort_supply,
);
}
}
impl AnyDataset for OutputSubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,266 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, InsertData, MinInitialStates},
states::PricePaidState,
},
structs::{BiMap, Config, Date, Height, MapKind, MapPath},
};
#[derive(Allocative, Iterable)]
pub struct PricePaidSubDataset {
min_initial_states: MinInitialStates,
pp_median: BiMap<f32>,
pp_95p: BiMap<f32>,
pp_90p: BiMap<f32>,
pp_85p: BiMap<f32>,
pp_80p: BiMap<f32>,
pp_75p: BiMap<f32>,
pp_70p: BiMap<f32>,
pp_65p: BiMap<f32>,
pp_60p: BiMap<f32>,
pp_55p: BiMap<f32>,
pp_45p: BiMap<f32>,
pp_40p: BiMap<f32>,
pp_35p: BiMap<f32>,
pp_30p: BiMap<f32>,
pp_25p: BiMap<f32>,
pp_20p: BiMap<f32>,
pp_15p: BiMap<f32>,
pp_10p: BiMap<f32>,
pp_05p: BiMap<f32>,
}
impl PricePaidSubDataset {
pub fn import(
path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let f = |s: &str| {
if let Some(name) = name {
path.join(&format!("{name}/{s}"))
} else {
path.join(s)
}
};
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
pp_median: BiMap::new_bin(1, MapKind::Inserted, &f("median_price_paid")),
pp_95p: BiMap::new_bin(1, MapKind::Inserted, &f("95p_price_paid")),
pp_90p: BiMap::new_bin(1, MapKind::Inserted, &f("90p_price_paid")),
pp_85p: BiMap::new_bin(1, MapKind::Inserted, &f("85p_price_paid")),
pp_80p: BiMap::new_bin(1, MapKind::Inserted, &f("80p_price_paid")),
pp_75p: BiMap::new_bin(1, MapKind::Inserted, &f("75p_price_paid")),
pp_70p: BiMap::new_bin(1, MapKind::Inserted, &f("70p_price_paid")),
pp_65p: BiMap::new_bin(1, MapKind::Inserted, &f("65p_price_paid")),
pp_60p: BiMap::new_bin(1, MapKind::Inserted, &f("60p_price_paid")),
pp_55p: BiMap::new_bin(1, MapKind::Inserted, &f("55p_price_paid")),
pp_45p: BiMap::new_bin(1, MapKind::Inserted, &f("45p_price_paid")),
pp_40p: BiMap::new_bin(1, MapKind::Inserted, &f("40p_price_paid")),
pp_35p: BiMap::new_bin(1, MapKind::Inserted, &f("35p_price_paid")),
pp_30p: BiMap::new_bin(1, MapKind::Inserted, &f("30p_price_paid")),
pp_25p: BiMap::new_bin(1, MapKind::Inserted, &f("25p_price_paid")),
pp_20p: BiMap::new_bin(1, MapKind::Inserted, &f("20p_price_paid")),
pp_15p: BiMap::new_bin(1, MapKind::Inserted, &f("15p_price_paid")),
pp_10p: BiMap::new_bin(1, MapKind::Inserted, &f("10p_price_paid")),
pp_05p: BiMap::new_bin(1, MapKind::Inserted, &f("05p_price_paid")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
is_date_last_block,
date,
..
}: &InsertData,
state: &PricePaidState,
) {
let pp_05p = state.pp_05p();
let pp_10p = state.pp_10p();
let pp_15p = state.pp_15p();
let pp_20p = state.pp_20p();
let pp_25p = state.pp_25p();
let pp_30p = state.pp_30p();
let pp_35p = state.pp_35p();
let pp_40p = state.pp_40p();
let pp_45p = state.pp_45p();
let pp_median = state.pp_median();
let pp_55p = state.pp_55p();
let pp_60p = state.pp_60p();
let pp_65p = state.pp_65p();
let pp_70p = state.pp_70p();
let pp_75p = state.pp_75p();
let pp_80p = state.pp_80p();
let pp_85p = state.pp_85p();
let pp_90p = state.pp_90p();
let pp_95p = state.pp_95p();
// Check if iter was empty
if pp_05p.is_none() {
self.insert_height_default(height);
if is_date_last_block {
self.insert_date_default(date);
}
return;
}
let pp_05p = self
.pp_05p
.height
.insert(height, pp_05p.unwrap().to_dollar() as f32);
let pp_10p = self
.pp_10p
.height
.insert(height, pp_10p.unwrap().to_dollar() as f32);
let pp_15p = self
.pp_15p
.height
.insert(height, pp_15p.unwrap().to_dollar() as f32);
let pp_20p = self
.pp_20p
.height
.insert(height, pp_20p.unwrap().to_dollar() as f32);
let pp_25p = self
.pp_25p
.height
.insert(height, pp_25p.unwrap().to_dollar() as f32);
let pp_30p = self
.pp_30p
.height
.insert(height, pp_30p.unwrap().to_dollar() as f32);
let pp_35p = self
.pp_35p
.height
.insert(height, pp_35p.unwrap().to_dollar() as f32);
let pp_40p = self
.pp_40p
.height
.insert(height, pp_40p.unwrap().to_dollar() as f32);
let pp_45p = self
.pp_45p
.height
.insert(height, pp_45p.unwrap().to_dollar() as f32);
let pp_median = self
.pp_median
.height
.insert(height, pp_median.unwrap().to_dollar() as f32);
let pp_55p = self
.pp_55p
.height
.insert(height, pp_55p.unwrap().to_dollar() as f32);
let pp_60p = self
.pp_60p
.height
.insert(height, pp_60p.unwrap().to_dollar() as f32);
let pp_65p = self
.pp_65p
.height
.insert(height, pp_65p.unwrap().to_dollar() as f32);
let pp_70p = self
.pp_70p
.height
.insert(height, pp_70p.unwrap().to_dollar() as f32);
let pp_75p = self
.pp_75p
.height
.insert(height, pp_75p.unwrap().to_dollar() as f32);
let pp_80p = self
.pp_80p
.height
.insert(height, pp_80p.unwrap().to_dollar() as f32);
let pp_85p = self
.pp_85p
.height
.insert(height, pp_85p.unwrap().to_dollar() as f32);
let pp_90p = self
.pp_90p
.height
.insert(height, pp_90p.unwrap().to_dollar() as f32);
let pp_95p = self
.pp_95p
.height
.insert(height, pp_95p.unwrap().to_dollar() as f32);
if is_date_last_block {
self.pp_05p.date.insert(date, pp_05p);
self.pp_10p.date.insert(date, pp_10p);
self.pp_15p.date.insert(date, pp_15p);
self.pp_20p.date.insert(date, pp_20p);
self.pp_25p.date.insert(date, pp_25p);
self.pp_30p.date.insert(date, pp_30p);
self.pp_35p.date.insert(date, pp_35p);
self.pp_40p.date.insert(date, pp_40p);
self.pp_45p.date.insert(date, pp_45p);
self.pp_median.date.insert(date, pp_median);
self.pp_55p.date.insert(date, pp_55p);
self.pp_60p.date.insert(date, pp_60p);
self.pp_65p.date.insert(date, pp_65p);
self.pp_70p.date.insert(date, pp_70p);
self.pp_75p.date.insert(date, pp_75p);
self.pp_80p.date.insert(date, pp_80p);
self.pp_85p.date.insert(date, pp_85p);
self.pp_90p.date.insert(date, pp_90p);
self.pp_95p.date.insert(date, pp_95p);
}
}
fn insert_height_default(&mut self, height: Height) {
self.inserted_as_mut_vec().into_iter().for_each(|bi| {
bi.height.insert_default(height);
})
}
fn insert_date_default(&mut self, date: Date) {
self.inserted_as_mut_vec().into_iter().for_each(|bi| {
bi.date.insert_default(date);
})
}
pub fn inserted_as_mut_vec(&mut self) -> Vec<&mut BiMap<f32>> {
vec![
&mut self.pp_95p,
&mut self.pp_90p,
&mut self.pp_85p,
&mut self.pp_80p,
&mut self.pp_75p,
&mut self.pp_70p,
&mut self.pp_65p,
&mut self.pp_60p,
&mut self.pp_55p,
&mut self.pp_median,
&mut self.pp_45p,
&mut self.pp_40p,
&mut self.pp_35p,
&mut self.pp_30p,
&mut self.pp_25p,
&mut self.pp_20p,
&mut self.pp_15p,
&mut self.pp_10p,
&mut self.pp_05p,
]
}
}
impl AnyDataset for PricePaidSubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,171 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::datasets::{AnyDataset, ComputeData, MinInitialStates},
structs::{BiMap, Config, MapKind, MapPath},
utils::{ONE_MONTH_IN_DAYS, ONE_WEEK_IN_DAYS, ONE_YEAR_IN_DAYS},
};
#[derive(Allocative, Iterable)]
pub struct RatioDataset {
min_initial_states: MinInitialStates,
ratio: BiMap<f32>,
ratio_1w_sma: BiMap<f32>,
ratio_1m_sma: BiMap<f32>,
ratio_1y_sma: BiMap<f32>,
ratio_1y_sma_momentum_oscillator: BiMap<f32>,
ratio_99p: BiMap<f32>,
ratio_99_5p: BiMap<f32>,
ratio_99_9p: BiMap<f32>,
ratio_1p: BiMap<f32>,
ratio_0_5p: BiMap<f32>,
ratio_0_1p: BiMap<f32>,
price_99p: BiMap<f32>,
price_99_5p: BiMap<f32>,
price_99_9p: BiMap<f32>,
price_1p: BiMap<f32>,
price_0_5p: BiMap<f32>,
price_0_1p: BiMap<f32>,
}
impl RatioDataset {
pub fn import(path: &MapPath, name: &str, config: &Config) -> color_eyre::Result<Self> {
let f_ratio = |s: &str| path.join(&format!("market_price_to_{name}_{s}"));
let f_price = |s: &str| path.join(&format!("{name}_{s}"));
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Computed
// ---
ratio: BiMap::new_bin(1, MapKind::Computed, &f_ratio("ratio")),
ratio_1w_sma: BiMap::new_bin(2, MapKind::Computed, &f_ratio("ratio_1w_sma")),
ratio_1m_sma: BiMap::new_bin(2, MapKind::Computed, &f_ratio("ratio_1m_sma")),
ratio_1y_sma: BiMap::new_bin(2, MapKind::Computed, &f_ratio("ratio_1y_sma")),
ratio_1y_sma_momentum_oscillator: BiMap::new_bin(
2,
MapKind::Computed,
&f_ratio("ratio_1y_sma_momentum_oscillator"),
),
ratio_99p: BiMap::new_bin(3, MapKind::Computed, &f_ratio("ratio_99p")),
ratio_99_5p: BiMap::new_bin(3, MapKind::Computed, &f_ratio("ratio_99_5p")),
ratio_99_9p: BiMap::new_bin(3, MapKind::Computed, &f_ratio("ratio_99_9p")),
ratio_1p: BiMap::new_bin(3, MapKind::Computed, &f_ratio("ratio_1p")),
ratio_0_5p: BiMap::new_bin(3, MapKind::Computed, &f_ratio("ratio_0_5p")),
ratio_0_1p: BiMap::new_bin(3, MapKind::Computed, &f_ratio("ratio_0_1p")),
price_99p: BiMap::new_bin(4, MapKind::Computed, &f_price("99p")),
price_99_5p: BiMap::new_bin(4, MapKind::Computed, &f_price("99_5p")),
price_99_9p: BiMap::new_bin(4, MapKind::Computed, &f_price("99_9p")),
price_1p: BiMap::new_bin(4, MapKind::Computed, &f_price("1p")),
price_0_5p: BiMap::new_bin(4, MapKind::Computed, &f_price("0_5p")),
price_0_1p: BiMap::new_bin(4, MapKind::Computed, &f_price("0_1p")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn compute(
&mut self,
&ComputeData { heights, dates, .. }: &ComputeData,
market_price: &mut BiMap<f32>,
other_price: &mut BiMap<f32>,
) {
self.ratio.height.multi_insert_divide(
heights,
&mut market_price.height,
&mut other_price.height,
);
self.ratio
.date
.multi_insert_divide(dates, &mut market_price.date, &mut other_price.date);
self.ratio_1w_sma.multi_insert_simple_average(
heights,
dates,
&mut self.ratio,
ONE_WEEK_IN_DAYS,
);
self.ratio_1m_sma.multi_insert_simple_average(
heights,
dates,
&mut self.ratio,
ONE_MONTH_IN_DAYS,
);
self.ratio_1m_sma.multi_insert_simple_average(
heights,
dates,
&mut self.ratio,
ONE_MONTH_IN_DAYS,
);
self.ratio_1y_sma.multi_insert_simple_average(
heights,
dates,
&mut self.ratio,
ONE_YEAR_IN_DAYS,
);
self.ratio_1y_sma_momentum_oscillator
.height
.multi_insert_complex_transform(
heights,
&mut self.ratio.height,
|(ratio, height, ..)| {
(ratio / self.ratio_1y_sma.height.get_or_import(height).unwrap()) - 1.0
},
);
self.ratio_1y_sma_momentum_oscillator
.date
.multi_insert_complex_transform(dates, &mut self.ratio.date, |(ratio, date, _, _)| {
(ratio / self.ratio_1y_sma.date.get_or_import(date).unwrap()) - 1.0
});
self.ratio.multi_insert_percentile(
heights,
dates,
vec![
(&mut self.ratio_99p, 0.99),
(&mut self.ratio_99_5p, 0.995),
(&mut self.ratio_99_9p, 0.999),
(&mut self.ratio_1p, 0.1),
(&mut self.ratio_0_5p, 0.005),
(&mut self.ratio_0_1p, 0.001),
],
None,
);
self.price_99p
.multi_insert_multiply(heights, dates, other_price, &mut self.ratio_99p);
self.price_99_5p
.multi_insert_multiply(heights, dates, other_price, &mut self.ratio_99_5p);
self.price_99_9p
.multi_insert_multiply(heights, dates, other_price, &mut self.ratio_99_9p);
self.price_1p
.multi_insert_multiply(heights, dates, other_price, &mut self.ratio_1p);
self.price_0_5p
.multi_insert_multiply(heights, dates, other_price, &mut self.ratio_0_5p);
self.price_0_1p
.multi_insert_multiply(heights, dates, other_price, &mut self.ratio_0_1p);
}
}
impl AnyDataset for RatioDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,387 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates},
states::RealizedState,
},
structs::{BiMap, Config, DateMap, HeightMap, MapKind, MapPath, Price},
utils::ONE_MONTH_IN_DAYS,
};
#[derive(Allocative, Iterable)]
pub struct RealizedSubDataset {
min_initial_states: MinInitialStates,
realized_profit: HeightMap<f32>,
realized_loss: HeightMap<f32>,
value_created: HeightMap<f32>,
adjusted_value_created: HeightMap<f32>,
value_destroyed: HeightMap<f32>,
adjusted_value_destroyed: HeightMap<f32>,
realized_profit_1d_sum: DateMap<f32>,
realized_loss_1d_sum: DateMap<f32>,
value_created_1d_sum: DateMap<f32>,
adjusted_value_created_1d_sum: DateMap<f32>,
value_destroyed_1d_sum: DateMap<f32>,
adjusted_value_destroyed_1d_sum: DateMap<f32>,
spent_output_profit_ratio: BiMap<f32>,
adjusted_spent_output_profit_ratio: BiMap<f32>,
negative_realized_loss: HeightMap<f32>,
negative_realized_loss_1d_sum: DateMap<f32>,
net_realized_profit_and_loss: HeightMap<f32>,
net_realized_profit_and_loss_1d_sum: DateMap<f32>,
net_realized_profit_and_loss_1d_sum_to_market_cap_ratio: DateMap<f32>,
cumulative_realized_profit: BiMap<f32>,
cumulative_realized_loss: BiMap<f32>,
cumulative_net_realized_profit_and_loss: BiMap<f32>,
cumulative_net_realized_profit_and_loss_1m_net_change: BiMap<f32>,
realized_value: HeightMap<f32>,
realized_value_1d_sum: DateMap<f32>,
sell_side_risk_ratio: DateMap<f32>,
realized_profit_to_loss_ratio: HeightMap<f32>,
realized_profit_to_loss_1d_sum_ratio: DateMap<f32>,
}
impl RealizedSubDataset {
pub fn import(
path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let f = |s: &str| {
if let Some(name) = name {
path.join(&format!("{name}/{s}"))
} else {
path.join(s)
}
};
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
realized_profit: HeightMap::new_bin(1, MapKind::Inserted, &f("realized_profit")),
realized_loss: HeightMap::new_bin(1, MapKind::Inserted, &f("realized_loss")),
value_created: HeightMap::new_bin(1, MapKind::Inserted, &f("value_created")),
adjusted_value_created: HeightMap::new_bin(
1,
MapKind::Inserted,
&f("adjusted_value_created"),
),
value_destroyed: HeightMap::new_bin(1, MapKind::Inserted, &f("value_destroyed")),
adjusted_value_destroyed: HeightMap::new_bin(
1,
MapKind::Inserted,
&f("adjusted_value_destroyed"),
),
realized_profit_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("realized_profit_1d_sum"),
),
realized_loss_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("realized_loss_1d_sum"),
),
value_created_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("value_created_1d_sum"),
),
adjusted_value_created_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("adjusted_value_created_1d_sum"),
),
value_destroyed_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("value_destroyed_1d_sum"),
),
adjusted_value_destroyed_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("adjusted_value_destroyed_1d_sum"),
),
spent_output_profit_ratio: BiMap::new_bin(
2,
MapKind::Inserted,
&f("spent_output_profit_ratio"),
),
adjusted_spent_output_profit_ratio: BiMap::new_bin(
2,
MapKind::Inserted,
&f("adjusted_spent_output_profit_ratio"),
),
// ---
// Computed
// ---
negative_realized_loss: HeightMap::new_bin(
2,
MapKind::Computed,
&f("negative_realized_loss"),
),
negative_realized_loss_1d_sum: DateMap::new_bin(
2,
MapKind::Computed,
&f("negative_realized_loss_1d_sum"),
),
net_realized_profit_and_loss: HeightMap::new_bin(
1,
MapKind::Computed,
&f("net_realized_profit_and_loss"),
),
net_realized_profit_and_loss_1d_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("net_realized_profit_and_loss_1d_sum"),
),
net_realized_profit_and_loss_1d_sum_to_market_cap_ratio: DateMap::new_bin(
2,
MapKind::Computed,
&f("net_realized_profit_and_loss_to_market_cap_ratio"),
),
cumulative_realized_profit: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_realized_profit"),
),
cumulative_realized_loss: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_realized_loss"),
),
cumulative_net_realized_profit_and_loss: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_net_realized_profit_and_loss"),
),
cumulative_net_realized_profit_and_loss_1m_net_change: BiMap::new_bin(
1,
MapKind::Computed,
&f("cumulative_net_realized_profit_and_loss_1m_net_change"),
),
realized_value: HeightMap::new_bin(1, MapKind::Computed, &f("realized_value")),
realized_value_1d_sum: DateMap::new_bin(
1,
MapKind::Computed,
&f("realized_value_1d_sum"),
),
sell_side_risk_ratio: DateMap::new_bin(
1,
MapKind::Computed,
&f("sell_side_risk_ratio"),
),
realized_profit_to_loss_ratio: HeightMap::new_bin(
1,
MapKind::Computed,
&f("realized_profit_to_loss_ratio"),
),
realized_profit_to_loss_1d_sum_ratio: DateMap::new_bin(
1,
MapKind::Computed,
&f("realized_profit_to_loss_1d_sum_ratio"),
),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
date_blocks_range,
..
}: &InsertData,
height_state: &RealizedState,
) {
self.realized_profit
.insert(height, height_state.realized_profit().to_dollar() as f32);
self.realized_loss
.insert(height, height_state.realized_loss().to_dollar() as f32);
self.value_created
.insert(height, height_state.value_created().to_dollar() as f32);
self.adjusted_value_created.insert(
height,
height_state.adjusted_value_created().to_dollar() as f32,
);
self.value_destroyed
.insert(height, height_state.value_destroyed().to_dollar() as f32);
self.adjusted_value_destroyed.insert(
height,
height_state.adjusted_value_destroyed().to_dollar() as f32,
);
self.spent_output_profit_ratio.height.insert(height, {
if height_state.value_destroyed() > Price::ZERO {
(height_state.value_created().to_cent() as f64
/ height_state.value_destroyed().to_cent() as f64) as f32
} else {
1.0
}
});
self.adjusted_spent_output_profit_ratio
.height
.insert(height, {
if height_state.adjusted_value_destroyed() > Price::ZERO {
(height_state.adjusted_value_created().to_cent() as f64
/ height_state.adjusted_value_destroyed().to_cent() as f64)
as f32
} else {
1.0
}
});
if is_date_last_block {
self.realized_profit_1d_sum
.insert(date, self.realized_profit.sum_range(date_blocks_range));
self.realized_loss_1d_sum
.insert(date, self.realized_loss.sum_range(date_blocks_range));
let value_created_1d_sum = self
.value_created_1d_sum
.insert(date, self.value_created.sum_range(date_blocks_range));
let adjusted_value_created_1d_sum = self.adjusted_value_created_1d_sum.insert(
date,
self.adjusted_value_created.sum_range(date_blocks_range),
);
let value_destroyed_1d_sum = self
.value_destroyed_1d_sum
.insert(date, self.value_destroyed.sum_range(date_blocks_range));
let adjusted_value_destroyed_1d_sum = self.adjusted_value_destroyed_1d_sum.insert(
date,
self.adjusted_value_destroyed.sum_range(date_blocks_range),
);
self.spent_output_profit_ratio
.date
.insert(date, value_created_1d_sum / value_destroyed_1d_sum);
self.adjusted_spent_output_profit_ratio.date.insert(
date,
adjusted_value_created_1d_sum / adjusted_value_destroyed_1d_sum,
);
}
}
pub fn compute(
&mut self,
&ComputeData { heights, dates, .. }: &ComputeData,
market_cap: &mut BiMap<f32>,
) {
self.negative_realized_loss.multi_insert_simple_transform(
heights,
&mut self.realized_loss,
|v, _| v * -1.0,
);
self.negative_realized_loss_1d_sum
.multi_insert_simple_transform(dates, &mut self.realized_loss_1d_sum, |v, _| v * -1.0);
self.net_realized_profit_and_loss.multi_insert_subtract(
heights,
&mut self.realized_profit,
&mut self.realized_loss,
);
self.net_realized_profit_and_loss_1d_sum
.multi_insert_subtract(
dates,
&mut self.realized_profit_1d_sum,
&mut self.realized_loss_1d_sum,
);
self.net_realized_profit_and_loss_1d_sum_to_market_cap_ratio
.multi_insert_percentage(
dates,
&mut self.net_realized_profit_and_loss_1d_sum,
&mut market_cap.date,
);
self.cumulative_realized_profit
.height
.multi_insert_cumulative(heights, &mut self.realized_profit);
self.cumulative_realized_profit
.date
.multi_insert_cumulative(dates, &mut self.realized_profit_1d_sum);
self.cumulative_realized_loss
.height
.multi_insert_cumulative(heights, &mut self.realized_loss);
self.cumulative_realized_loss
.date
.multi_insert_cumulative(dates, &mut self.realized_loss_1d_sum);
self.cumulative_net_realized_profit_and_loss
.height
.multi_insert_cumulative(heights, &mut self.net_realized_profit_and_loss);
self.cumulative_net_realized_profit_and_loss
.date
.multi_insert_cumulative(dates, &mut self.net_realized_profit_and_loss_1d_sum);
self.cumulative_net_realized_profit_and_loss_1m_net_change
.multi_insert_net_change(
heights,
dates,
&mut self.cumulative_net_realized_profit_and_loss,
ONE_MONTH_IN_DAYS,
);
self.realized_value.multi_insert_add(
heights,
&mut self.realized_profit,
&mut self.realized_loss,
);
self.realized_value_1d_sum.multi_insert_add(
dates,
&mut self.realized_profit_1d_sum,
&mut self.realized_loss_1d_sum,
);
self.sell_side_risk_ratio.multi_insert_percentage(
dates,
&mut self.realized_value_1d_sum,
&mut market_cap.date,
);
self.realized_profit_to_loss_ratio.multi_insert_divide(
heights,
&mut self.realized_profit,
&mut self.realized_loss,
);
self.realized_profit_to_loss_1d_sum_ratio
.multi_insert_divide(
dates,
&mut self.realized_profit_1d_sum,
&mut self.realized_loss_1d_sum,
);
}
}
impl AnyDataset for RealizedSubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,277 @@
use std::{iter::Sum, ops::Add};
use allocative::Allocative;
use crate::{
structs::{
Date, DateMapChunkId, GenericMap, MapChunkId, MapKey, MapKind, MapPath, MapSerialized,
MapValue, SerializedDateMap,
},
utils::{get_percentile, LossyFrom},
};
pub type DateRecapDataset<T> = RecapDataset<Date, T, DateMapChunkId, SerializedDateMap<T>>;
#[derive(Allocative)]
pub struct RecapDataset<Key, Value, ChunkId, Serialized> {
average: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
sum: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
max: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
_90p: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
_75p: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
median: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
_25p: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
_10p: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
min: Option<GenericMap<Key, Value, ChunkId, Serialized>>,
}
#[derive(Default)]
pub struct RecapOptions {
average: bool,
sum: bool,
max: bool,
_90p: bool,
_75p: bool,
median: bool,
_25p: bool,
_10p: bool,
min: bool,
}
impl RecapOptions {
pub fn add_min(mut self) -> Self {
self.min = true;
self
}
pub fn add_max(mut self) -> Self {
self.max = true;
self
}
pub fn add_median(mut self) -> Self {
self.median = true;
self
}
pub fn add_average(mut self) -> Self {
self.average = true;
self
}
#[allow(unused)]
pub fn add_sum(mut self) -> Self {
self.sum = true;
self
}
pub fn add_90p(mut self) -> Self {
self._90p = true;
self
}
pub fn add_75p(mut self) -> Self {
self._75p = true;
self
}
pub fn add_25p(mut self) -> Self {
self._25p = true;
self
}
pub fn add_10p(mut self) -> Self {
self._10p = true;
self
}
}
impl<Key, Value, ChunkId, Serialized> RecapDataset<Key, Value, ChunkId, Serialized>
where
Value: MapValue,
ChunkId: MapChunkId,
Key: MapKey<ChunkId>,
Serialized: MapSerialized<Key, Value, ChunkId>,
{
pub fn import(path: &MapPath, options: RecapOptions) -> color_eyre::Result<Self> {
let f = |s: &str| path.join(s);
let s = Self {
// ---
// Computed
// ---
min: options
.min
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("min"))),
max: options
.max
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("max"))),
median: options
.median
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("median"))),
average: options
.average
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("average"))),
sum: options
.sum
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("sum"))),
_90p: options
._90p
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("90p"))),
_75p: options
._75p
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("75p"))),
_25p: options
._25p
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("25p"))),
_10p: options
._10p
.then(|| GenericMap::new_bin(1, MapKind::Computed, &f("10p"))),
};
Ok(s)
}
pub fn compute<'a, Value2>(&mut self, key: Key, values: &'a mut [Value2])
where
Value: LossyFrom<f32> + LossyFrom<Value2>,
Value2: Sum<&'a Value2> + Ord + Add<Output = Value2> + Clone + Copy + LossyFrom<f32>,
f32: LossyFrom<Value> + LossyFrom<Value2>,
{
if self.max.is_some()
|| self._90p.is_some()
|| self._75p.is_some()
|| self.median.is_some()
|| self._25p.is_some()
|| self._10p.is_some()
|| self.min.is_some()
{
values.sort_unstable();
if let Some(max) = self.max.as_mut() {
max.insert_computed(key, Value::lossy_from(*values.last().unwrap()));
}
if let Some(_90p) = self._90p.as_mut() {
_90p.insert_computed(key, Value::lossy_from(get_percentile(values, 0.90)));
}
if let Some(_75p) = self._75p.as_mut() {
_75p.insert_computed(key, Value::lossy_from(get_percentile(values, 0.75)));
}
if let Some(median) = self.median.as_mut() {
median.insert_computed(key, Value::lossy_from(get_percentile(values, 0.50)));
}
if let Some(_25p) = self._25p.as_mut() {
_25p.insert_computed(key, Value::lossy_from(get_percentile(values, 0.25)));
}
if let Some(_10p) = self._10p.as_mut() {
_10p.insert_computed(key, Value::lossy_from(get_percentile(values, 0.10)));
}
if let Some(min) = self.min.as_mut() {
min.insert_computed(key, Value::lossy_from(*values.first().unwrap()));
}
}
if self.sum.is_some() || self.average.is_some() {
let sum = Value::lossy_from(values.iter().sum::<Value2>());
if let Some(sum_map) = self.sum.as_mut() {
sum_map.insert_computed(key, sum);
}
if let Some(average) = self.average.as_mut() {
let len = values.len() as f32;
average.insert_computed(key, Value::lossy_from(f32::lossy_from(sum) / len));
}
}
}
pub fn as_vec(&self) -> Vec<&GenericMap<Key, Value, ChunkId, Serialized>> {
let mut v = vec![];
if let Some(min) = self.min.as_ref() {
v.push(min);
}
if let Some(max) = self.max.as_ref() {
v.push(max);
}
if let Some(median) = self.median.as_ref() {
v.push(median);
}
if let Some(average) = self.average.as_ref() {
v.push(average);
}
if let Some(sum) = self.sum.as_ref() {
v.push(sum);
}
if let Some(_90p) = self._90p.as_ref() {
v.push(_90p);
}
if let Some(_75p) = self._75p.as_ref() {
v.push(_75p);
}
if let Some(_25p) = self._25p.as_ref() {
v.push(_25p);
}
if let Some(_10p) = self._10p.as_ref() {
v.push(_10p);
}
v
}
pub fn as_mut_vec(&mut self) -> Vec<&mut GenericMap<Key, Value, ChunkId, Serialized>> {
let mut v = vec![];
if let Some(min) = self.min.as_mut() {
v.push(min);
}
if let Some(max) = self.max.as_mut() {
v.push(max);
}
if let Some(median) = self.median.as_mut() {
v.push(median);
}
if let Some(average) = self.average.as_mut() {
v.push(average);
}
if let Some(sum) = self.sum.as_mut() {
v.push(sum);
}
if let Some(_90p) = self._90p.as_mut() {
v.push(_90p);
}
if let Some(_75p) = self._75p.as_mut() {
v.push(_75p);
}
if let Some(_25p) = self._25p.as_mut() {
v.push(_25p);
}
if let Some(_10p) = self._10p.as_mut() {
v.push(_10p);
}
v
}
}

View File

@@ -0,0 +1,109 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates},
states::SupplyState,
},
structs::{BiMap, Config, MapKind, MapPath},
};
#[derive(Allocative, Iterable)]
pub struct SupplySubDataset {
min_initial_states: MinInitialStates,
pub supply: BiMap<f64>,
pub supply_to_circulating_supply_ratio: BiMap<f64>,
pub halved_supply: BiMap<f64>,
pub halved_supply_to_circulating_supply_ratio: BiMap<f64>,
}
impl SupplySubDataset {
pub fn import(
path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let f = |s: &str| {
if let Some(name) = name {
path.join(&format!("{name}/{s}"))
} else {
path.join(s)
}
};
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
supply: BiMap::new_bin(1, MapKind::Inserted, &f("supply")),
// ---
// Computed,
// ---
supply_to_circulating_supply_ratio: BiMap::new_bin(
1,
MapKind::Computed,
&f("supply_to_circulating_supply_ratio"),
),
halved_supply: BiMap::new_bin(1, MapKind::Computed, &f("halved_supply")),
halved_supply_to_circulating_supply_ratio: BiMap::new_bin(
1,
MapKind::Computed,
&f("halved_supply_to_circulating_supply_ratio"),
),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
..
}: &InsertData,
state: &SupplyState,
) {
let total_supply = self.supply.height.insert(height, state.supply().to_btc());
if is_date_last_block {
self.supply.date.insert(date, total_supply);
}
}
#[allow(unused_variables)]
pub fn compute(
&mut self,
&ComputeData { heights, dates, .. }: &ComputeData,
circulating_supply: &mut BiMap<f64>,
) {
self.supply_to_circulating_supply_ratio
.multi_insert_percentage(heights, dates, &mut self.supply, circulating_supply);
self.halved_supply
.multi_insert_simple_transform(heights, dates, &mut self.supply, &|v| v / 2.0);
self.halved_supply_to_circulating_supply_ratio
.multi_insert_simple_transform(
heights,
dates,
&mut self.supply_to_circulating_supply_ratio,
&|v| v / 2.0,
);
}
}
impl AnyDataset for SupplySubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,199 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates},
states::UnrealizedState,
},
structs::{BiMap, Config, MapKind, MapPath},
};
#[derive(Allocative, Iterable)]
pub struct UnrealizedSubDataset {
min_initial_states: MinInitialStates,
supply_in_profit: BiMap<f64>,
unrealized_profit: BiMap<f32>,
unrealized_loss: BiMap<f32>,
supply_in_loss: BiMap<f64>,
negative_unrealized_loss: BiMap<f32>,
net_unrealized_profit_and_loss: BiMap<f32>,
net_unrealized_profit_and_loss_to_market_cap_ratio: BiMap<f32>,
supply_in_profit_to_own_supply_ratio: BiMap<f64>,
supply_in_profit_to_circulating_supply_ratio: BiMap<f64>,
supply_in_loss_to_own_supply_ratio: BiMap<f64>,
supply_in_loss_to_circulating_supply_ratio: BiMap<f64>,
}
impl UnrealizedSubDataset {
pub fn import(
path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let f = |s: &str| {
if let Some(name) = name {
path.join(&format!("{name}/{s}"))
} else {
path.join(s)
}
};
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
supply_in_profit: BiMap::new_bin(1, MapKind::Inserted, &f("supply_in_profit")),
unrealized_profit: BiMap::new_bin(1, MapKind::Inserted, &f("unrealized_profit")),
unrealized_loss: BiMap::new_bin(1, MapKind::Inserted, &f("unrealized_loss")),
// ---
// Inserted
// ---
supply_in_loss: BiMap::new_bin(1, MapKind::Computed, &f("supply_in_loss")),
negative_unrealized_loss: BiMap::new_bin(
1,
MapKind::Computed,
&f("negative_unrealized_loss"),
),
net_unrealized_profit_and_loss: BiMap::new_bin(
1,
MapKind::Computed,
&f("net_unrealized_profit_and_loss"),
),
net_unrealized_profit_and_loss_to_market_cap_ratio: BiMap::new_bin(
2,
MapKind::Computed,
&f("net_unrealized_profit_and_loss_to_market_cap_ratio"),
),
supply_in_profit_to_own_supply_ratio: BiMap::new_bin(
1,
MapKind::Computed,
&f("supply_in_profit_to_own_supply_ratio"),
),
supply_in_profit_to_circulating_supply_ratio: BiMap::new_bin(
1,
MapKind::Computed,
&f("supply_in_profit_to_circulating_supply_ratio"),
),
supply_in_loss_to_own_supply_ratio: BiMap::new_bin(
1,
MapKind::Computed,
&f("supply_in_loss_to_own_supply_ratio"),
),
supply_in_loss_to_circulating_supply_ratio: BiMap::new_bin(
1,
MapKind::Computed,
&f("supply_in_loss_to_circulating_supply_ratio"),
),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
..
}: &InsertData,
block_state: &UnrealizedState,
date_state: &Option<UnrealizedState>,
) {
self.supply_in_profit
.height
.insert(height, block_state.supply_in_profit().to_btc());
self.unrealized_profit
.height
.insert(height, block_state.unrealized_profit().to_dollar() as f32);
self.unrealized_loss
.height
.insert(height, block_state.unrealized_loss().to_dollar() as f32);
if is_date_last_block {
let date_state = date_state.as_ref().unwrap();
self.supply_in_profit
.date
.insert(date, date_state.supply_in_profit().to_btc());
self.unrealized_profit
.date
.insert(date, date_state.unrealized_profit().to_dollar() as f32);
self.unrealized_loss
.date
.insert(date, date_state.unrealized_loss().to_dollar() as f32);
}
}
pub fn compute(
&mut self,
&ComputeData { heights, dates, .. }: &ComputeData,
own_supply: &mut BiMap<f64>,
circulating_supply: &mut BiMap<f64>,
market_cap: &mut BiMap<f32>,
) {
self.supply_in_loss.multi_insert_subtract(
heights,
dates,
own_supply,
&mut self.supply_in_profit,
);
self.negative_unrealized_loss.multi_insert_simple_transform(
heights,
dates,
&mut self.unrealized_loss,
&|v| v * -1.0,
);
self.net_unrealized_profit_and_loss.multi_insert_subtract(
heights,
dates,
&mut self.unrealized_profit,
&mut self.unrealized_loss,
);
self.net_unrealized_profit_and_loss_to_market_cap_ratio
.multi_insert_percentage(
heights,
dates,
&mut self.net_unrealized_profit_and_loss,
market_cap,
);
self.supply_in_profit_to_own_supply_ratio
.multi_insert_percentage(heights, dates, &mut self.supply_in_profit, own_supply);
self.supply_in_profit_to_circulating_supply_ratio
.multi_insert_percentage(
heights,
dates,
&mut self.supply_in_profit,
circulating_supply,
);
self.supply_in_loss_to_own_supply_ratio
.multi_insert_percentage(heights, dates, &mut self.supply_in_loss, own_supply);
self.supply_in_loss_to_circulating_supply_ratio
.multi_insert_percentage(heights, dates, &mut self.supply_in_loss, circulating_supply);
}
}
impl AnyDataset for UnrealizedSubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,70 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, InsertData, MinInitialStates},
states::UTXOState,
},
structs::{BiMap, Config, MapKind, MapPath},
};
#[derive(Allocative, Iterable)]
pub struct UTXOSubDataset {
min_initial_states: MinInitialStates,
count: BiMap<f64>,
}
impl UTXOSubDataset {
pub fn import(
path: &MapPath,
name: &Option<String>,
config: &Config,
) -> color_eyre::Result<Self> {
let f = |s: &str| {
if let Some(name) = name {
path.join(&format!("{name}/{s}"))
} else {
path.join(s)
}
};
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
count: BiMap::new_bin(1, MapKind::Inserted, &f("utxo_count")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
is_date_last_block,
date,
..
}: &InsertData,
state: &UTXOState,
) {
let count = self.count.height.insert(height, state.count());
if is_date_last_block {
self.count.date.insert(date, count);
}
}
}
impl AnyDataset for UTXOSubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,325 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::datasets::InsertData,
structs::{BiMap, Config, DateMap, HeightMap, MapKind},
utils::{
ONE_DAY_IN_S, ONE_MONTH_IN_DAYS, ONE_WEEK_IN_DAYS, ONE_YEAR_IN_DAYS, TARGET_BLOCKS_PER_DAY,
},
};
use super::{AnyDataset, ComputeData, MinInitialStates};
#[derive(Allocative, Iterable)]
pub struct TransactionDataset {
min_initial_states: MinInitialStates,
pub count: HeightMap<usize>,
pub count_1d_sum: DateMap<usize>,
pub volume: HeightMap<f64>,
pub volume_1d_sum: DateMap<f64>,
pub volume_in_dollars: HeightMap<f32>,
pub volume_in_dollars_1d_sum: DateMap<f32>,
// Average sent
// Average sent in dollars
// Median sent
// Median sent in dollars
// Min
// Max
// 10th 25th 75th 90th percentiles
// type
// version
pub count_1w_sma: HeightMap<f32>,
pub count_1d_sum_1w_sma: DateMap<f32>,
pub count_1m_sma: HeightMap<f32>,
pub count_1d_sum_1m_sma: DateMap<f32>,
pub volume_1w_sma: HeightMap<f32>,
pub volume_1d_sum_1w_sma: DateMap<f32>,
pub volume_1m_sma: HeightMap<f32>,
pub volume_1d_sum_1m_sma: DateMap<f32>,
pub volume_in_dollars_1w_sma: HeightMap<f32>,
pub volume_in_dollars_1d_sum_1w_sma: DateMap<f32>,
pub volume_in_dollars_1m_sma: HeightMap<f32>,
pub volume_in_dollars_1d_sum_1m_sma: DateMap<f32>,
pub annualized_volume: DateMap<f32>,
pub annualized_volume_in_dollars: DateMap<f32>,
pub velocity: DateMap<f32>,
pub transactions_per_second: BiMap<f32>,
pub transactions_per_second_1w_sma: BiMap<f32>,
pub transactions_per_second_1m_sma: BiMap<f32>,
}
impl TransactionDataset {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let f = |s: &str| config.path_datasets().join(s);
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// ---
// Inserted
// ---
count: HeightMap::new_bin(1, MapKind::Inserted, &f("transaction_count")),
count_1d_sum: DateMap::new_bin(1, MapKind::Inserted, &f("transaction_count_1d_sum")),
volume: HeightMap::new_bin(1, MapKind::Inserted, &f("transaction_volume")),
volume_1d_sum: DateMap::new_bin(1, MapKind::Inserted, &f("transaction_volume_1d_sum")),
volume_in_dollars: HeightMap::new_bin(
1,
MapKind::Inserted,
&f("transaction_volume_in_dollars"),
),
volume_in_dollars_1d_sum: DateMap::new_bin(
1,
MapKind::Inserted,
&f("transaction_volume_in_dollars_1d_sum"),
),
// ---
// Inserted
// ---
count_1w_sma: HeightMap::new_bin(1, MapKind::Computed, &f("transaction_count_1w_sma")),
count_1d_sum_1w_sma: DateMap::new_bin(
1,
MapKind::Computed,
&f("transaction_count_1d_sum_1w_sma"),
),
count_1m_sma: HeightMap::new_bin(1, MapKind::Computed, &f("transaction_count_1m_sma")),
count_1d_sum_1m_sma: DateMap::new_bin(
1,
MapKind::Computed,
&f("transaction_count_1d_sum_1m_sma"),
),
volume_1w_sma: HeightMap::new_bin(
1,
MapKind::Computed,
&f("transaction_volume_1w_sma"),
),
volume_1d_sum_1w_sma: DateMap::new_bin(
1,
MapKind::Computed,
&f("transaction_volume_1d_sum_1w_sma"),
),
volume_1m_sma: HeightMap::new_bin(
1,
MapKind::Computed,
&f("transaction_volume_1m_sma"),
),
volume_1d_sum_1m_sma: DateMap::new_bin(
1,
MapKind::Computed,
&f("transaction_volume_1d_sum_1m_sma"),
),
volume_in_dollars_1w_sma: HeightMap::new_bin(
1,
MapKind::Computed,
&f("transaction_volume_in_dollars_1w_sma"),
),
volume_in_dollars_1d_sum_1w_sma: DateMap::new_bin(
1,
MapKind::Computed,
&f("transaction_volume_in_dollars_1d_sum_1w_sma"),
),
volume_in_dollars_1m_sma: HeightMap::new_bin(
1,
MapKind::Computed,
&f("transaction_volume_in_dollars_1m_sma"),
),
volume_in_dollars_1d_sum_1m_sma: DateMap::new_bin(
1,
MapKind::Computed,
&f("transaction_volume_in_dollars_1d_sum_1m_sma"),
),
annualized_volume: DateMap::new_bin(
1,
MapKind::Computed,
&f("annualized_transaction_volume"),
),
annualized_volume_in_dollars: DateMap::new_bin(
2,
MapKind::Computed,
&f("annualized_transaction_volume_in_dollars"),
),
velocity: DateMap::new_bin(1, MapKind::Computed, &f("transaction_velocity")),
transactions_per_second: BiMap::new_bin(
1,
MapKind::Computed,
&f("transactions_per_second"),
),
transactions_per_second_1w_sma: BiMap::new_bin(
1,
MapKind::Computed,
&f("transactions_per_second_1w_sma"),
),
transactions_per_second_1m_sma: BiMap::new_bin(
1,
MapKind::Computed,
&f("transactions_per_second_1m_sma"),
),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
amount_sent,
transaction_count,
is_date_last_block,
date_blocks_range,
block_price,
..
}: &InsertData,
) {
self.count.insert(height, transaction_count);
self.volume.insert(height, amount_sent.to_btc());
self.volume_in_dollars
.insert(height, (block_price * amount_sent).to_dollar() as f32);
if is_date_last_block {
self.count_1d_sum
.insert(date, self.count.sum_range(date_blocks_range));
self.volume_1d_sum
.insert(date, self.volume.sum_range(date_blocks_range));
self.volume_in_dollars_1d_sum
.insert(date, self.volume_in_dollars.sum_range(date_blocks_range));
}
}
pub fn compute(
&mut self,
&ComputeData { heights, dates, .. }: &ComputeData,
circulating_supply: &mut BiMap<f64>,
block_interval: &mut HeightMap<u32>,
) {
self.count_1w_sma.multi_insert_simple_average(
heights,
&mut self.count,
TARGET_BLOCKS_PER_DAY * ONE_WEEK_IN_DAYS,
);
self.count_1d_sum_1w_sma.multi_insert_simple_average(
dates,
&mut self.count_1d_sum,
ONE_WEEK_IN_DAYS,
);
self.count_1m_sma.multi_insert_simple_average(
heights,
&mut self.count,
TARGET_BLOCKS_PER_DAY * ONE_MONTH_IN_DAYS,
);
self.count_1d_sum_1m_sma.multi_insert_simple_average(
dates,
&mut self.count_1d_sum,
ONE_MONTH_IN_DAYS,
);
self.volume_1w_sma.multi_insert_simple_average(
heights,
&mut self.volume,
TARGET_BLOCKS_PER_DAY * ONE_WEEK_IN_DAYS,
);
self.volume_1d_sum_1w_sma.multi_insert_simple_average(
dates,
&mut self.volume_1d_sum,
ONE_WEEK_IN_DAYS,
);
self.volume_1m_sma.multi_insert_simple_average(
heights,
&mut self.volume,
TARGET_BLOCKS_PER_DAY * ONE_MONTH_IN_DAYS,
);
self.volume_1d_sum_1m_sma.multi_insert_simple_average(
dates,
&mut self.volume_1d_sum,
ONE_MONTH_IN_DAYS,
);
self.volume_in_dollars_1w_sma.multi_insert_simple_average(
heights,
&mut self.volume_in_dollars,
TARGET_BLOCKS_PER_DAY * ONE_WEEK_IN_DAYS,
);
self.volume_in_dollars_1d_sum_1w_sma
.multi_insert_simple_average(
dates,
&mut self.volume_in_dollars_1d_sum,
ONE_WEEK_IN_DAYS,
);
self.volume_in_dollars_1m_sma.multi_insert_simple_average(
heights,
&mut self.volume_in_dollars,
TARGET_BLOCKS_PER_DAY * ONE_MONTH_IN_DAYS,
);
self.volume_in_dollars_1d_sum_1m_sma
.multi_insert_simple_average(
dates,
&mut self.volume_in_dollars_1d_sum,
ONE_MONTH_IN_DAYS,
);
self.annualized_volume.multi_insert_last_x_sum(
dates,
&mut self.volume_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.annualized_volume_in_dollars.multi_insert_last_x_sum(
dates,
&mut self.volume_in_dollars_1d_sum,
ONE_YEAR_IN_DAYS,
);
self.velocity.multi_insert_divide(
dates,
&mut self.annualized_volume,
&mut circulating_supply.date,
);
self.transactions_per_second.height.multi_insert_divide(
heights,
&mut self.count,
block_interval,
);
self.transactions_per_second
.date
.multi_insert_simple_transform(dates, &mut self.count_1d_sum, |count, date| {
count as f32 / (date.get_day_completion() as f32 * ONE_DAY_IN_S as f32)
});
self.transactions_per_second_1w_sma
.multi_insert_simple_average(
heights,
dates,
&mut self.transactions_per_second,
ONE_WEEK_IN_DAYS,
);
self.transactions_per_second_1m_sma
.multi_insert_simple_average(
heights,
dates,
&mut self.transactions_per_second,
ONE_MONTH_IN_DAYS,
);
}
}
impl AnyDataset for TransactionDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,199 @@
use allocative::Allocative;
use struct_iterable::Iterable;
use crate::{
parser::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates, SubDataset},
states::UTXOCohortId,
},
structs::{BiMap, Config, Date, Height, MapPath},
};
#[derive(Allocative, Iterable)]
pub struct UTXODataset {
id: UTXOCohortId,
min_initial_states: MinInitialStates,
pub subs: SubDataset,
}
impl UTXODataset {
pub fn import(
parent_path: &MapPath,
id: UTXOCohortId,
config: &Config,
) -> color_eyre::Result<Self> {
let name = id.name().to_owned();
let mut s = Self {
min_initial_states: MinInitialStates::default(),
id,
subs: SubDataset::import(parent_path, &Some(name), config)?,
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s, config));
Ok(s)
}
pub fn insert(&mut self, insert_data: &InsertData) {
let &InsertData {
states,
utxo_cohorts_one_shot_states,
// utxo_cohorts_received_states,
utxo_cohorts_sent_states,
..
} = insert_data;
if self.needs_insert_supply(insert_data.height, insert_data.date) {
self.subs.supply.insert(
insert_data,
&states
.utxo_cohorts_durable_states
.as_ref()
.unwrap()
.get(&self.id)
.durable_states
.supply_state,
);
}
if self.needs_insert_utxo(insert_data.height, insert_data.date) {
self.subs.utxo.insert(
insert_data,
&states
.utxo_cohorts_durable_states
.as_ref()
.unwrap()
.get(&self.id)
.durable_states
.utxo_state,
);
}
if self.needs_insert_capitalization(insert_data.height, insert_data.date) {
self.subs.capitalization.insert(
insert_data,
&states
.utxo_cohorts_durable_states
.as_ref()
.unwrap()
.get(&self.id)
.durable_states
.capitalization_state,
);
}
if self.needs_insert_unrealized(insert_data.height, insert_data.date) {
self.subs.unrealized.insert(
insert_data,
&utxo_cohorts_one_shot_states
.get(&self.id)
.unrealized_block_state,
&utxo_cohorts_one_shot_states
.get(&self.id)
.unrealized_date_state,
);
}
if self.needs_insert_price_paid(insert_data.height, insert_data.date) {
self.subs.price_paid.insert(
insert_data,
&utxo_cohorts_one_shot_states.get(&self.id).price_paid_state,
);
}
if self.needs_insert_realized(insert_data.height, insert_data.date) {
self.subs.realized.insert(
insert_data,
&utxo_cohorts_sent_states.get(&self.id).realized,
);
}
if self.needs_insert_input(insert_data.height, insert_data.date) {
self.subs
.input
.insert(insert_data, &utxo_cohorts_sent_states.get(&self.id).input);
}
// TODO: move output from common to address
// if self.subs.output.needs_insert(insert_data) {
// self.subs
// .output
// .insert(insert_data, utxo_cohorts_received_states.get(&self.id));
// }
}
pub fn needs_insert_utxo(&self, height: Height, date: Date) -> bool {
self.subs.utxo.needs_insert(height, date)
}
pub fn needs_insert_capitalization(&self, height: Height, date: Date) -> bool {
self.subs.capitalization.needs_insert(height, date)
}
pub fn needs_insert_supply(&self, height: Height, date: Date) -> bool {
self.subs.supply.needs_insert(height, date)
}
pub fn needs_insert_price_paid(&self, height: Height, date: Date) -> bool {
self.subs.price_paid.needs_insert(height, date)
}
pub fn needs_insert_realized(&self, height: Height, date: Date) -> bool {
self.subs.realized.needs_insert(height, date)
}
pub fn needs_insert_unrealized(&self, height: Height, date: Date) -> bool {
self.subs.unrealized.needs_insert(height, date)
}
pub fn needs_insert_input(&self, height: Height, date: Date) -> bool {
self.subs.input.needs_insert(height, date)
}
pub fn compute(
&mut self,
compute_data: &ComputeData,
closes: &mut BiMap<f32>,
circulating_supply: &mut BiMap<f64>,
market_cap: &mut BiMap<f32>,
) {
if self.subs.supply.should_compute(compute_data) {
self.subs.supply.compute(compute_data, circulating_supply);
}
if self.subs.unrealized.should_compute(compute_data) {
self.subs.unrealized.compute(
compute_data,
&mut self.subs.supply.supply,
circulating_supply,
market_cap,
);
}
if self.subs.realized.should_compute(compute_data) {
self.subs.realized.compute(compute_data, market_cap);
}
if self.subs.capitalization.should_compute(compute_data) {
self.subs
.capitalization
.compute(compute_data, closes, &mut self.subs.supply.supply);
}
// if self.subs.output.should_compute(compute_data) {
// self.subs
// .output
// .compute(compute_data, &mut self.subs.supply.total);
// }
}
}
impl AnyDataset for UTXODataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,164 @@
mod dataset;
use allocative::Allocative;
use dataset::*;
use rayon::prelude::*;
use itertools::Itertools;
use crate::{
parser::datasets::AnyDatasets,
parser::states::{SplitByUTXOCohort, UTXOCohortId},
structs::{BiMap, Config, Date, Height},
};
use super::{AnyDataset, ComputeData, InsertData, MinInitialStates};
#[derive(Allocative)]
pub struct UTXODatasets {
min_initial_states: MinInitialStates,
cohorts: SplitByUTXOCohort<UTXODataset>,
}
impl UTXODatasets {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
let mut cohorts = SplitByUTXOCohort::<Option<UTXODataset>>::default();
let path_dataset = config.path_datasets();
cohorts
.as_vec()
.into_par_iter()
.map(|(_, id)| (id, UTXODataset::import(&path_dataset, id, config)))
.collect::<Vec<_>>()
.into_iter()
.try_for_each(|(id, dataset)| -> color_eyre::Result<()> {
cohorts.get_mut(&id).replace(dataset?);
Ok(())
})?;
let mut s = Self {
min_initial_states: MinInitialStates::default(),
cohorts: cohorts.unwrap(),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_datasets(&s, config));
Ok(s)
}
pub fn insert(&mut self, insert_data: &InsertData) {
self.cohorts
.as_mut_vec()
.into_iter()
.for_each(|(cohort, _)| cohort.insert(insert_data))
}
pub fn needs_durable_states(&self, height: Height, date: Date) -> bool {
let needs_insert_utxo = self.needs_insert_utxo(height, date);
let needs_insert_capitalization = self.needs_insert_capitalization(height, date);
let needs_insert_supply = self.needs_insert_supply(height, date);
let needs_one_shot_states = self.needs_one_shot_states(height, date);
needs_insert_utxo
|| needs_insert_capitalization
|| needs_insert_supply
|| needs_one_shot_states
}
pub fn needs_one_shot_states(&self, height: Height, date: Date) -> bool {
self.needs_insert_price_paid(height, date) || self.needs_insert_unrealized(height, date)
}
pub fn needs_sent_states(&self, height: Height, date: Date) -> bool {
self.needs_insert_input(height, date) || self.needs_insert_realized(height, date)
}
pub fn needs_insert_utxo(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_utxo(height, date))
}
pub fn needs_insert_capitalization(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_capitalization(height, date))
}
pub fn needs_insert_supply(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_supply(height, date))
}
pub fn needs_insert_price_paid(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_price_paid(height, date))
}
pub fn needs_insert_realized(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_realized(height, date))
}
pub fn needs_insert_unrealized(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_unrealized(height, date))
}
pub fn needs_insert_input(&self, height: Height, date: Date) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_input(height, date))
}
pub fn compute(
&mut self,
compute_data: &ComputeData,
closes: &mut BiMap<f32>,
circulating_supply: &mut BiMap<f64>,
market_cap: &mut BiMap<f32>,
) {
self.cohorts
.as_mut_vec()
.into_iter()
.for_each(|(cohort, _)| {
cohort.compute(compute_data, closes, circulating_supply, market_cap)
})
}
fn as_vec(&self) -> Vec<(&UTXODataset, UTXOCohortId)> {
self.cohorts.as_vec()
}
fn as_mut_vec(&mut self) -> Vec<(&mut UTXODataset, UTXOCohortId)> {
self.cohorts.as_mut_vec()
}
}
impl AnyDatasets for UTXODatasets {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_any_dataset_vec(&self) -> Vec<&(dyn AnyDataset + Send + Sync)> {
self.as_vec()
.into_iter()
.map(|(dataset, _)| dataset as &(dyn AnyDataset + Send + Sync))
.collect_vec()
}
fn to_mut_any_dataset_vec(&mut self) -> Vec<&mut dyn AnyDataset> {
self.as_mut_vec()
.into_iter()
.map(|(dataset, _)| dataset as &mut dyn AnyDataset)
.collect_vec()
}
}

42
_src/parser/mod.rs Normal file
View File

@@ -0,0 +1,42 @@
use std::{thread::sleep, time::Duration};
use brk_parser::bitcoincore_rpc::{Client, RpcApi};
mod actions;
mod databases;
mod datasets;
mod price;
mod states;
pub use actions::*;
pub use databases::*;
pub use datasets::*;
use log::info;
pub use states::*;
use crate::structs::{Config, Exit};
pub fn main(config: &Config, rpc: &Client, exit: &Exit) -> color_eyre::Result<()> {
loop {
let block_count = rpc.get_blockchain_info().unwrap().blocks as usize;
info!("{block_count} blocks found.");
let mut databases = Databases::import(config);
let mut datasets = Datasets::import(config)?;
iter_blocks(config, rpc, block_count, exit.clone(), &mut databases, &mut datasets)?;
if let Some(delay) = config.delay() {
sleep(Duration::from_secs(delay))
}
info!("Waiting for a new block...");
while block_count == rpc.get_blockchain_info().unwrap().blocks as usize {
sleep(Duration::from_secs(1))
}
}
// Ok(())
}

View File

@@ -0,0 +1,213 @@
#![allow(dead_code)]
use std::{collections::BTreeMap, fs};
use color_eyre::eyre::ContextCompat;
use itertools::Itertools;
use log::info;
use serde_json::Value;
use crate::{
io::Json,
structs::{Config, Date, Timestamp, OHLC},
utils::retry,
};
pub struct Binance;
impl Binance {
pub fn read_har_file(config: &Config) -> color_eyre::Result<BTreeMap<u32, OHLC>> {
info!("binance: read har file");
let path = config.path_inputs();
fs::create_dir_all(&path)?;
let path_binance_har = path.join("binance.har");
let json: BTreeMap<String, Value> = Json::import(&path_binance_har).unwrap_or_default();
Ok(json
.get("log")
.context("Expect object to have log attribute")?
.as_object()
.context("Expect to be an object")?
.get("entries")
.context("Expect object to have entries")?
.as_array()
.context("Expect to be an array")?
.iter()
.filter(|entry| {
entry
.as_object()
.unwrap()
.get("request")
.unwrap()
.as_object()
.unwrap()
.get("url")
.unwrap()
.as_str()
.unwrap()
.contains("/uiKlines")
})
.flat_map(|entry| {
let response = entry
.as_object()
.unwrap()
.get("response")
.unwrap()
.as_object()
.unwrap();
let content = response.get("content").unwrap().as_object().unwrap();
let text = content.get("text");
if text.is_none() {
return vec![];
}
let text = text.unwrap().as_str().unwrap();
let arrays: Value = serde_json::from_str(text).unwrap();
arrays
.as_array()
.unwrap()
.iter()
.map(|array| {
let array = array.as_array().unwrap();
let timestamp = (array.first().unwrap().as_u64().unwrap() / 1000) as u32;
let get_f32 = |index: usize| {
array
.get(index)
.unwrap()
.as_str()
.unwrap()
.parse::<f32>()
.unwrap()
};
(
timestamp,
OHLC {
open: get_f32(1),
high: get_f32(2),
low: get_f32(3),
close: get_f32(4),
},
)
})
.collect_vec()
})
.collect::<BTreeMap<_, _>>())
}
pub fn fetch_1mn_prices() -> color_eyre::Result<BTreeMap<u32, OHLC>> {
info!("binance: fetch 1mn");
retry(
|_| {
let body: Value = reqwest::blocking::get(
"https://api.binance.com/api/v3/uiKlines?symbol=BTCUSDT&interval=1m&limit=1000",
)?
.json()?;
Ok(body
.as_array()
.context("Expect to be an array")?
.iter()
.map(|value| -> color_eyre::Result<_> {
// [timestamp, open, high, low, close, volume, ...]
let array = value.as_array().context("Expect to be array")?;
let timestamp = (array
.first()
.context("Expect to have first")?
.as_u64()
.context("Expect to be convertible to u64")?
/ 1_000) as u32;
let get_f32 = |index: usize| -> color_eyre::Result<f32> {
Ok(array
.get(index)
.context("Expect to have index")?
.as_str()
.context("Expect to have &str")?
.parse::<f32>()?)
};
Ok((
timestamp,
OHLC {
open: get_f32(1)?,
high: get_f32(2)?,
low: get_f32(3)?,
close: get_f32(4)?,
},
))
})
.collect::<Result<BTreeMap<_, _>, _>>()?)
},
30,
10,
)
}
pub fn fetch_daily_prices() -> color_eyre::Result<BTreeMap<Date, OHLC>> {
info!("binance: fetch 1d");
retry(
|_| {
let body: Value = reqwest::blocking::get(
"https://api.binance.com/api/v3/uiKlines?symbol=BTCUSDT&interval=1d",
)?
.json()?;
Ok(body
.as_array()
.context("Expect to be an array")?
.iter()
.map(|value| -> color_eyre::Result<_> {
// [timestamp, open, high, low, close, volume, ...]
let array = value.as_array().context("Expect to be array")?;
let date = Timestamp::from(
(array
.first()
.context("Expect to have first")?
.as_u64()
.context("Expect to be convertible to u64")?
/ 1_000) as u32,
)
.to_date();
let get_f32 = |index: usize| -> color_eyre::Result<f32> {
Ok(array
.get(index)
.context("Expect to have index")?
.as_str()
.context("Expect to have &str")?
.parse::<f32>()?)
};
Ok((
date,
OHLC {
open: get_f32(1)?,
high: get_f32(2)?,
low: get_f32(3)?,
close: get_f32(4)?,
},
))
})
.collect::<Result<BTreeMap<_, _>, _>>()?)
},
30,
10,
)
}
}

118
_src/parser/price/kibo.rs Normal file
View File

@@ -0,0 +1,118 @@
use std::{collections::BTreeMap, str::FromStr};
use chrono::NaiveDate;
use color_eyre::eyre::ContextCompat;
use log::info;
use serde_json::Value;
use crate::{
structs::{Date, DateMapChunkId, HeightMapChunkId, MapChunkId, OHLC},
utils::retry,
};
pub struct Kibo;
const KIBO_OFFICIAL_URL: &str = "https://kibo.money/api";
const KIBO_OFFICIAL_BACKUP_URL: &str = "https://backup.kibo.money/api";
const RETRIES: usize = 10;
impl Kibo {
fn get_base_url(try_index: usize) -> &'static str {
if try_index < RETRIES / 2 {
KIBO_OFFICIAL_URL
} else {
KIBO_OFFICIAL_BACKUP_URL
}
}
pub fn fetch_height_prices(chunk_id: HeightMapChunkId) -> color_eyre::Result<Vec<OHLC>> {
info!("kibo: fetch height prices");
retry(
|try_index| {
let base_url = Self::get_base_url(try_index);
let body: Value = reqwest::blocking::get(format!(
"{base_url}/height-to-price?chunk={}",
chunk_id.to_usize()
))?
.json()?;
let vec = body
.as_object()
.context("Expect to be an object")?
.get("dataset")
.context("Expect object to have dataset")?
.as_object()
.context("Expect to be an object")?
.get("map")
.context("Expect to have map")?
.as_array()
.context("Expect to be an array")?
.iter()
.map(Self::value_to_ohlc)
.collect::<Result<Vec<_>, _>>()?;
Ok(vec)
},
30,
RETRIES,
)
}
pub fn fetch_date_prices(chunk_id: DateMapChunkId) -> color_eyre::Result<BTreeMap<Date, OHLC>> {
info!("kibo: fetch date prices");
retry(
|try_index| {
let base_url = Self::get_base_url(try_index);
let body: Value = reqwest::blocking::get(format!(
"{base_url}/date-to-price?chunk={}",
chunk_id.to_usize()
))?
.json()?;
Ok(body
.as_object()
.context("Expect to be an object")?
.get("dataset")
.context("Expect object to have dataset")?
.as_object()
.context("Expect to be an object")?
.get("map")
.context("Expect to have map")?
.as_object()
.context("Expect to be an object")?
.iter()
.map(|(serialized_date, value)| -> color_eyre::Result<_> {
let date = Date::wrap(NaiveDate::from_str(serialized_date)?);
Ok((date, Self::value_to_ohlc(value)?))
})
.collect::<Result<BTreeMap<_, _>, _>>()?)
},
30,
RETRIES,
)
}
fn value_to_ohlc(value: &Value) -> color_eyre::Result<OHLC> {
let ohlc = value.as_object().context("Expect as_object to work")?;
let get_value = |key: &str| -> color_eyre::Result<f32> {
Ok(ohlc
.get(key)
.context("Expect get key to work")?
.as_f64()
.context("Expect as_f64 to work")? as f32)
};
Ok(OHLC {
open: get_value("open")?,
high: get_value("high")?,
low: get_value("low")?,
close: get_value("close")?,
})
}
}

133
_src/parser/price/kraken.rs Normal file
View File

@@ -0,0 +1,133 @@
use std::collections::BTreeMap;
use color_eyre::eyre::ContextCompat;
use log::info;
use serde_json::Value;
use crate::{
structs::{Date, Timestamp, OHLC},
utils::retry,
};
pub struct Kraken;
impl Kraken {
pub fn fetch_1mn_prices() -> color_eyre::Result<BTreeMap<u32, OHLC>> {
info!("kraken: fetch 1mn");
retry(
|_| {
let body: Value = reqwest::blocking::get(
"https://api.kraken.com/0/public/OHLC?pair=XBTUSD&interval=1",
)?
.json()?;
Ok(body
.as_object()
.context("Expect to be an object")?
.get("result")
.context("Expect object to have result")?
.as_object()
.context("Expect to be an object")?
.get("XXBTZUSD")
.context("Expect to have XXBTZUSD")?
.as_array()
.context("Expect to be an array")?
.iter()
.map(|value| -> color_eyre::Result<_> {
let array = value.as_array().context("Expect as_array to work")?;
let timestamp = array
.first()
.context("Expect first to work")?
.as_u64()
.expect("Expect as_u64 to work")
as u32;
let get_f32 = |index: usize| -> color_eyre::Result<f32> {
Ok(array
.get(index)
.context("Expect get index to work")?
.as_str()
.context("Expect as_str to work")?
.parse::<f32>()?)
};
Ok((
timestamp,
OHLC {
open: get_f32(1)?,
high: get_f32(2)?,
low: get_f32(3)?,
close: get_f32(4)?,
},
))
})
.collect::<Result<BTreeMap<_, _>, _>>()?)
},
30,
10,
)
}
pub fn fetch_daily_prices() -> color_eyre::Result<BTreeMap<Date, OHLC>> {
info!("fetch kraken daily");
retry(
|_| {
let body: Value = reqwest::blocking::get(
"https://api.kraken.com/0/public/OHLC?pair=XBTUSD&interval=1440",
)?
.json()?;
Ok(body
.as_object()
.context("Expect to be an object")?
.get("result")
.context("Expect object to have result")?
.as_object()
.context("Expect to be an object")?
.get("XXBTZUSD")
.context("Expect to have XXBTZUSD")?
.as_array()
.context("Expect to be an array")?
.iter()
.map(|value| -> color_eyre::Result<_> {
let array = value.as_array().context("Expect as_array to work")?;
let date = Timestamp::from(
array
.first()
.context("Expect first to work")?
.as_u64()
.context("Expect as_u64 to work")?
as u32,
)
.to_date();
let get_f32 = |index: usize| -> color_eyre::Result<f32> {
Ok(array
.get(index)
.context("Expect get index to work")?
.as_str()
.context("Expect as_str to work")?
.parse::<f32>()?)
};
Ok((
date,
OHLC {
open: get_f32(1)?,
high: get_f32(2)?,
low: get_f32(3)?,
close: get_f32(4)?,
},
))
})
.collect::<Result<BTreeMap<_, _>, _>>()?)
},
30,
10,
)
}
}

7
_src/parser/price/mod.rs Normal file
View File

@@ -0,0 +1,7 @@
mod binance;
mod kibo;
mod kraken;
pub use binance::*;
pub use kibo::*;
pub use kraken::*;

View File

@@ -0,0 +1,36 @@
use std::{
fmt::Debug,
fs, io,
path::{Path, PathBuf},
};
use bincode::{Decode, Encode};
use serde::{de::DeserializeOwned, Serialize};
use crate::{io::Serialization, structs::Config};
pub trait AnyState
where
Self: Debug + Encode + Decode + Serialize + DeserializeOwned,
{
fn name<'a>() -> &'a str;
fn path(config: &Config) -> PathBuf {
config.path_states().join(Self::name())
}
fn reset(&mut self, config: &Config) -> color_eyre::Result<(), io::Error> {
self.clear();
fs::remove_file(Self::path(config))
}
fn import(config: &Config) -> color_eyre::Result<Self> {
Serialization::Binary.import(&Self::path(config))
}
fn export(&self, config: &Config) -> color_eyre::Result<()> {
Serialization::Binary.export(Path::new(&Self::path(config)), self)
}
fn clear(&mut self);
}

View File

@@ -0,0 +1,121 @@
use std::ops::AddAssign;
use allocative::Allocative;
use crate::{
parser::states::{DurableStates, IsZero, OneShotStates, PriceToValue, UnrealizedState},
structs::{Amount, Price},
};
#[derive(Default, Debug, Allocative)]
pub struct AddressCohortDurableStates {
pub address_count: f64,
pub durable_states: DurableStates,
pub price_to_amount: PriceToValue<Amount>,
}
impl AddressCohortDurableStates {
#[allow(clippy::too_many_arguments)]
pub fn increment(
&mut self,
address_count: f64,
amount: Amount,
utxo_count: f64,
realized_cap: Price,
mean_price_paid: Price,
) -> color_eyre::Result<()> {
self.address_count += address_count;
self._crement(amount, utxo_count, realized_cap, mean_price_paid, true)
}
#[allow(clippy::too_many_arguments)]
pub fn decrement(
&mut self,
address_count: f64,
amount: Amount,
utxo_count: f64,
realized_cap: Price,
mean_price_paid: Price,
) -> color_eyre::Result<()> {
self.address_count -= address_count;
self._crement(amount, utxo_count, realized_cap, mean_price_paid, false)
}
#[allow(clippy::too_many_arguments)]
pub fn _crement(
&mut self,
amount: Amount,
utxo_count: f64,
realized_cap: Price,
mean_price_paid: Price,
increment: bool,
) -> color_eyre::Result<()> {
if increment {
self.durable_states
.increment(amount, utxo_count, realized_cap)
} else {
self.durable_states
.decrement(amount, utxo_count, realized_cap)
}
.inspect_err(|report| {
dbg!(report);
})?;
if !amount.is_zero()? {
if increment {
self.price_to_amount.increment(mean_price_paid, amount);
} else {
self.price_to_amount
.decrement(mean_price_paid, amount)
.inspect_err(|report| {
dbg!(report, "cents_to_split_amount decrement",);
})?;
}
}
Ok(())
}
pub fn compute_one_shot_states(
&self,
block_price: Price,
date_price: Option<Price>,
) -> OneShotStates {
let mut one_shot_states = OneShotStates::default();
if date_price.is_some() {
one_shot_states
.unrealized_date_state
.replace(UnrealizedState::default());
}
let one_shot_states_ref = &mut one_shot_states;
let supply = self.durable_states.supply_state.supply();
self.price_to_amount.iterate(supply, |price_paid, amount| {
one_shot_states_ref
.price_paid_state
.iterate(price_paid, amount, supply);
one_shot_states_ref
.unrealized_block_state
.iterate(price_paid, block_price, amount);
if let Some(unrealized_date_state) = one_shot_states_ref.unrealized_date_state.as_mut()
{
unrealized_date_state.iterate(price_paid, date_price.unwrap(), amount);
}
});
one_shot_states
}
}
impl AddAssign for AddressCohortDurableStates {
fn add_assign(&mut self, rhs: Self) {
self.address_count += rhs.address_count;
self.durable_states += rhs.durable_states;
self.price_to_amount += rhs.price_to_amount;
}
}

View File

@@ -0,0 +1,80 @@
use crate::structs::{AddressLiquidity, AddressSize, AddressSplit, AddressType};
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)]
pub enum AddressCohortId {
All,
Illiquid,
Liquid,
HighlyLiquid,
Plankton,
Shrimp,
Crab,
Fish,
Shark,
Whale,
Humpback,
Megalodon,
P2PK,
P2PKH,
P2SH,
P2WPKH,
P2WSH,
P2TR,
}
impl AddressCohortId {
pub fn as_name(&self) -> Option<&str> {
match self {
Self::All => None,
Self::Illiquid => Some("illiquid"),
Self::Liquid => Some("liquid"),
Self::HighlyLiquid => Some("highly_liquid"),
Self::Plankton => Some("plankton"),
Self::Shrimp => Some("shrimp"),
Self::Crab => Some("crab"),
Self::Fish => Some("fish"),
Self::Shark => Some("shark"),
Self::Whale => Some("whale"),
Self::Humpback => Some("humpback"),
Self::Megalodon => Some("megalodon"),
Self::P2PK => Some("p2pk"),
Self::P2PKH => Some("p2pkh"),
Self::P2SH => Some("p2sh"),
Self::P2WPKH => Some("p2wpkh"),
Self::P2WSH => Some("p2wsh"),
Self::P2TR => Some("p2tr"),
}
}
pub fn as_split(&self) -> AddressSplit {
match self {
Self::All => AddressSplit::All,
Self::Illiquid => AddressSplit::Liquidity(AddressLiquidity::Illiquid),
Self::Liquid => AddressSplit::Liquidity(AddressLiquidity::Liquid),
Self::HighlyLiquid => AddressSplit::Liquidity(AddressLiquidity::HighlyLiquid),
Self::Plankton => AddressSplit::Size(AddressSize::Plankton),
Self::Shrimp => AddressSplit::Size(AddressSize::Shrimp),
Self::Crab => AddressSplit::Size(AddressSize::Crab),
Self::Fish => AddressSplit::Size(AddressSize::Fish),
Self::Shark => AddressSplit::Size(AddressSize::Shark),
Self::Whale => AddressSplit::Size(AddressSize::Whale),
Self::Humpback => AddressSplit::Size(AddressSize::Humpback),
Self::Megalodon => AddressSplit::Size(AddressSize::Megalodon),
Self::P2PK => AddressSplit::Type(AddressType::P2PK),
Self::P2PKH => AddressSplit::Type(AddressType::P2PKH),
Self::P2SH => AddressSplit::Type(AddressType::P2SH),
Self::P2WPKH => AddressSplit::Type(AddressType::P2WPKH),
Self::P2WSH => AddressSplit::Type(AddressType::P2WSH),
Self::P2TR => AddressSplit::Type(AddressType::P2TR),
}
}
}

View File

@@ -0,0 +1,156 @@
use std::iter::Sum;
use allocative::Allocative;
use derive_deref::{Deref, DerefMut};
use rayon::prelude::*;
use crate::{
parser::databases::AddressIndexToAddressData,
structs::{AddressData, AddressRealizedData, Amount, Price},
};
use super::{AddressCohortDurableStates, AddressCohortsOneShotStates, SplitByAddressCohort};
#[derive(Default, Deref, DerefMut, Allocative)]
pub struct AddressCohortsDurableStates(SplitByAddressCohort<AddressCohortDurableStates>);
impl AddressCohortsDurableStates {
pub fn init(address_index_to_address_data: &mut AddressIndexToAddressData) -> Self {
address_index_to_address_data.compute_addres_cohorts_durable_states()
}
pub fn iterate(
&mut self,
address_realized_data: &AddressRealizedData,
current_address_data: &AddressData,
) -> color_eyre::Result<()> {
self.decrement(&address_realized_data.initial_address_data)
.inspect_err(|report| {
dbg!(report);
dbg!(address_realized_data, current_address_data);
dbg!("decrement initial address_data");
})?;
self.increment(current_address_data).inspect_err(|report| {
dbg!(report);
dbg!(address_realized_data, current_address_data);
dbg!("increment address_data");
})?;
Ok(())
}
/// Should always increment using current address data state
pub fn increment(&mut self, address_data: &AddressData) -> color_eyre::Result<()> {
self._crement(address_data, true)
}
/// Should always decrement using initial address data state
fn decrement(&mut self, address_data: &AddressData) -> color_eyre::Result<()> {
self._crement(address_data, false)
}
fn _crement(&mut self, address_data: &AddressData, increment: bool) -> color_eyre::Result<()> {
// No need to either insert or remove if empty
if address_data.is_empty() {
return Ok(());
}
let amount = address_data.amount;
let utxo_count = address_data.outputs_len as f64;
let realized_cap = address_data.realized_cap;
let mean_price_paid = address_data.realized_cap / amount;
let liquidity_classification = address_data.compute_liquidity_classification();
let split_address_count = liquidity_classification.split(1.0);
let split_sat_amount = liquidity_classification.split(amount.to_sat() as f64);
let split_utxo_count = liquidity_classification.split(utxo_count);
let split_realized_cap = liquidity_classification.split(realized_cap.to_dollar());
self.0.iterate(
address_data,
|state| {
// Unsplit it must be one
let address_count = 1.0;
if increment {
state.increment(
address_count,
amount,
utxo_count,
realized_cap,
mean_price_paid,
)
} else {
state.decrement(
address_count,
amount,
utxo_count,
realized_cap,
mean_price_paid,
)
}
},
|liquidity, state| {
let address_count = split_address_count.from(liquidity);
let amount = Amount::from_sat(split_sat_amount.from(liquidity).floor() as u64);
let utxo_count = split_utxo_count.from(liquidity);
let realized_cap = Price::from_dollar(split_realized_cap.from(liquidity));
if increment {
state.increment(
address_count,
amount,
utxo_count,
realized_cap,
mean_price_paid,
)
} else {
state.decrement(
address_count,
amount,
utxo_count,
realized_cap,
mean_price_paid,
)
}
},
)?;
Ok(())
}
pub fn compute_one_shot_states(
&self,
block_price: Price,
date_price: Option<Price>,
) -> AddressCohortsOneShotStates {
let mut one_shot_states = AddressCohortsOneShotStates::default();
self.as_vec()
.into_par_iter()
.map(|(states, address_cohort_id)| {
(
address_cohort_id,
states.compute_one_shot_states(block_price, date_price),
)
})
.collect::<Vec<_>>()
.into_iter()
.for_each(|(address_cohort_id, states)| {
*one_shot_states.get_mut_from_id(&address_cohort_id) = states;
});
one_shot_states
}
}
impl Sum for AddressCohortsDurableStates {
fn sum<I: Iterator<Item = Self>>(iter: I) -> Self {
iter.fold(Self::default(), |mut a, b| {
a.0 += b.0;
a
})
}
}

View File

@@ -0,0 +1,45 @@
use derive_deref::{Deref, DerefMut};
use crate::{
parser::states::InputState,
structs::{AddressRealizedData, Amount, LiquidityClassification},
};
use super::SplitByAddressCohort;
#[derive(Deref, DerefMut, Default)]
pub struct AddressCohortsInputStates(SplitByAddressCohort<InputState>);
impl AddressCohortsInputStates {
pub fn iterate_input(
&mut self,
realized_data: &AddressRealizedData,
liquidity_classification: &LiquidityClassification,
) -> color_eyre::Result<()> {
let count = realized_data.utxos_destroyed as f64;
let sent = realized_data.sent;
let normal_iteration = move |state: &mut InputState| -> color_eyre::Result<()> {
state.iterate(count, sent);
Ok(())
};
let split_count = liquidity_classification.split(count);
let split_sent = liquidity_classification.split(sent.to_sat() as f64);
let liquified_iteration =
move |liquidity, state: &mut InputState| -> color_eyre::Result<()> {
state.iterate(
split_count.from(liquidity),
Amount::from_sat(split_sent.from(liquidity).round() as u64),
);
Ok(())
};
self.iterate(
&realized_data.initial_address_data,
normal_iteration,
liquified_iteration,
)
}
}

View File

@@ -0,0 +1,8 @@
use derive_deref::{Deref, DerefMut};
use crate::parser::states::OneShotStates;
use super::SplitByAddressCohort;
#[derive(Deref, DerefMut, Default)]
pub struct AddressCohortsOneShotStates(pub SplitByAddressCohort<OneShotStates>);

View File

@@ -0,0 +1,45 @@
use derive_deref::{Deref, DerefMut};
use crate::{
parser::states::OutputState,
structs::{AddressRealizedData, Amount, LiquidityClassification},
};
use super::SplitByAddressCohort;
#[derive(Deref, DerefMut, Default)]
pub struct AddressCohortsOutputStates(SplitByAddressCohort<OutputState>);
impl AddressCohortsOutputStates {
pub fn iterate_output(
&mut self,
realized_data: &AddressRealizedData,
liquidity_classification: &LiquidityClassification,
) -> color_eyre::Result<()> {
let count = realized_data.utxos_created as f64;
let volume = realized_data.received;
let normal_iteration = move |state: &mut OutputState| -> color_eyre::Result<()> {
state.iterate(count, volume);
Ok(())
};
let split_count = liquidity_classification.split(count);
let split_volume = liquidity_classification.split(volume.to_sat() as f64);
let liquified_iteration =
move |liquidity, state: &mut OutputState| -> color_eyre::Result<()> {
state.iterate(
split_count.from(liquidity),
Amount::from_sat(split_volume.from(liquidity).round() as u64),
);
Ok(())
};
self.iterate(
&realized_data.initial_address_data,
normal_iteration,
liquified_iteration,
)
}
}

View File

@@ -0,0 +1,68 @@
use derive_deref::{Deref, DerefMut};
use crate::{
parser::states::RealizedState,
structs::{AddressRealizedData, LiquidityClassification, Price},
};
use super::SplitByAddressCohort;
#[derive(Deref, DerefMut, Default)]
pub struct AddressCohortsRealizedStates(SplitByAddressCohort<RealizedState>);
impl AddressCohortsRealizedStates {
pub fn iterate_realized(
&mut self,
realized_data: &AddressRealizedData,
liquidity_classification: &LiquidityClassification,
) -> color_eyre::Result<()> {
let realized_profit = realized_data.profit;
let realized_loss = realized_data.loss;
let value_created = realized_data.value_created;
let adjusted_value_created = realized_data.adjusted_value_created;
let value_destroyed = realized_data.value_destroyed;
let adjusted_value_destroyed = realized_data.adjusted_value_destroyed;
let normal_iteration = move |state: &mut RealizedState| -> color_eyre::Result<()> {
state.iterate(
realized_profit,
realized_loss,
value_created,
adjusted_value_created,
value_destroyed,
adjusted_value_destroyed,
);
Ok(())
};
let split_realized_profit =
liquidity_classification.split(realized_profit.to_cent() as f64);
let split_realized_loss = liquidity_classification.split(realized_loss.to_cent() as f64);
let split_value_created = liquidity_classification.split(value_created.to_cent() as f64);
let split_adjusted_value_created =
liquidity_classification.split(adjusted_value_created.to_cent() as f64);
let split_value_destroyed =
liquidity_classification.split(value_destroyed.to_cent() as f64);
let split_adjusted_value_destroyed =
liquidity_classification.split(adjusted_value_destroyed.to_cent() as f64);
let liquified_iteration =
move |liquidity, state: &mut RealizedState| -> color_eyre::Result<()> {
state.iterate(
Price::from_cent(split_realized_profit.from(liquidity) as u64),
Price::from_cent(split_realized_loss.from(liquidity) as u64),
Price::from_cent(split_value_created.from(liquidity) as u64),
Price::from_cent(split_adjusted_value_created.from(liquidity) as u64),
Price::from_cent(split_value_destroyed.from(liquidity) as u64),
Price::from_cent(split_adjusted_value_destroyed.from(liquidity) as u64),
);
Ok(())
};
self.iterate(
&realized_data.initial_address_data,
normal_iteration,
liquified_iteration,
)
}
}

View File

@@ -0,0 +1,17 @@
mod cohort_durable_states;
mod cohort_id;
mod cohorts_durable_states;
mod cohorts_input_states;
mod cohorts_one_shot_states;
mod cohorts_output_states;
mod cohorts_realized_states;
mod split_by_address_cohort;
pub use cohort_durable_states::*;
pub use cohort_id::*;
pub use cohorts_durable_states::*;
pub use cohorts_input_states::*;
pub use cohorts_one_shot_states::*;
pub use cohorts_output_states::*;
pub use cohorts_realized_states::*;
pub use split_by_address_cohort::*;

View File

@@ -0,0 +1,273 @@
use std::ops::AddAssign;
use allocative::Allocative;
use crate::structs::{AddressData, AddressLiquidity, AddressSize, AddressSplit, AddressType};
use super::AddressCohortId;
#[derive(Default, Allocative)]
pub struct SplitByAddressCohort<T> {
pub all: T,
pub illiquid: T,
pub liquid: T,
pub highly_liquid: T,
pub plankton: T,
pub shrimp: T,
pub crab: T,
pub fish: T,
pub shark: T,
pub whale: T,
pub humpback: T,
pub megalodon: T,
pub p2pk: T,
pub p2pkh: T,
pub p2sh: T,
pub p2wpkh: T,
pub p2wsh: T,
pub p2tr: T,
}
impl<T> SplitByAddressCohort<T> {
pub fn get(&self, split: &AddressSplit) -> Option<&T> {
match &split {
AddressSplit::All => Some(&self.all),
AddressSplit::Liquidity(address_liquidity) => match address_liquidity {
AddressLiquidity::Illiquid => Some(&self.illiquid),
AddressLiquidity::Liquid => Some(&self.liquid),
AddressLiquidity::HighlyLiquid => Some(&self.highly_liquid),
},
AddressSplit::Type(address_type) => match address_type {
AddressType::P2PK => Some(&self.p2pk),
AddressType::P2PKH => Some(&self.p2pkh),
AddressType::P2SH => Some(&self.p2sh),
AddressType::P2WPKH => Some(&self.p2wpkh),
AddressType::P2WSH => Some(&self.p2wsh),
AddressType::P2TR => Some(&self.p2tr),
AddressType::MultiSig => None,
AddressType::Unknown => None,
AddressType::OpReturn => None,
AddressType::PushOnly => None,
AddressType::Empty => None,
},
AddressSplit::Size(address_size) => match address_size {
AddressSize::Plankton => Some(&self.plankton),
AddressSize::Shrimp => Some(&self.shrimp),
AddressSize::Crab => Some(&self.crab),
AddressSize::Fish => Some(&self.fish),
AddressSize::Shark => Some(&self.shark),
AddressSize::Whale => Some(&self.whale),
AddressSize::Humpback => Some(&self.humpback),
AddressSize::Megalodon => Some(&self.megalodon),
AddressSize::Empty => None,
},
}
}
pub fn iterate(
&mut self,
address_data: &AddressData,
normal_iteration: impl Fn(&mut T) -> color_eyre::Result<()>,
liquified_iteration: impl Fn(AddressLiquidity, &mut T) -> color_eyre::Result<()>,
) -> color_eyre::Result<()> {
normal_iteration(self.get_mut_from_split(&AddressSplit::All).unwrap())?;
let mut _liquified_iteration = |address_liquidity| {
liquified_iteration(
address_liquidity,
self.get_mut_from_split(&AddressSplit::Liquidity(address_liquidity))
.unwrap(),
)
};
_liquified_iteration(AddressLiquidity::Illiquid)?;
_liquified_iteration(AddressLiquidity::Liquid)?;
_liquified_iteration(AddressLiquidity::HighlyLiquid)?;
if let Some(state) = self.get_mut_from_split(&AddressSplit::Type(address_data.address_type))
{
normal_iteration(state)?;
}
if let Some(state) = self.get_mut_from_split(&AddressSplit::Size(AddressSize::from_amount(
address_data.amount,
))) {
normal_iteration(state)?;
}
Ok(())
}
fn get_mut_from_split(&mut self, split: &AddressSplit) -> Option<&mut T> {
match &split {
AddressSplit::All => Some(&mut self.all),
AddressSplit::Liquidity(address_liquidity) => match address_liquidity {
AddressLiquidity::Illiquid => Some(&mut self.illiquid),
AddressLiquidity::Liquid => Some(&mut self.liquid),
AddressLiquidity::HighlyLiquid => Some(&mut self.highly_liquid),
},
AddressSplit::Type(address_type) => match address_type {
AddressType::P2PK => Some(&mut self.p2pk),
AddressType::P2PKH => Some(&mut self.p2pkh),
AddressType::P2SH => Some(&mut self.p2sh),
AddressType::P2WPKH => Some(&mut self.p2wpkh),
AddressType::P2WSH => Some(&mut self.p2wsh),
AddressType::P2TR => Some(&mut self.p2tr),
AddressType::MultiSig => None,
AddressType::Unknown => None,
AddressType::OpReturn => None,
AddressType::PushOnly => None,
AddressType::Empty => None,
},
AddressSplit::Size(address_size) => match address_size {
AddressSize::Plankton => Some(&mut self.plankton),
AddressSize::Shrimp => Some(&mut self.shrimp),
AddressSize::Crab => Some(&mut self.crab),
AddressSize::Fish => Some(&mut self.fish),
AddressSize::Shark => Some(&mut self.shark),
AddressSize::Whale => Some(&mut self.whale),
AddressSize::Humpback => Some(&mut self.humpback),
AddressSize::Megalodon => Some(&mut self.megalodon),
AddressSize::Empty => None,
},
}
}
pub fn get_mut_from_id(&mut self, id: &AddressCohortId) -> &mut T {
match id {
AddressCohortId::All => &mut self.all,
AddressCohortId::Illiquid => &mut self.illiquid,
AddressCohortId::Liquid => &mut self.liquid,
AddressCohortId::HighlyLiquid => &mut self.highly_liquid,
AddressCohortId::Plankton => &mut self.plankton,
AddressCohortId::Shrimp => &mut self.shrimp,
AddressCohortId::Crab => &mut self.crab,
AddressCohortId::Fish => &mut self.fish,
AddressCohortId::Shark => &mut self.shark,
AddressCohortId::Whale => &mut self.whale,
AddressCohortId::Humpback => &mut self.humpback,
AddressCohortId::Megalodon => &mut self.megalodon,
AddressCohortId::P2PK => &mut self.p2pk,
AddressCohortId::P2PKH => &mut self.p2pkh,
AddressCohortId::P2SH => &mut self.p2sh,
AddressCohortId::P2WPKH => &mut self.p2wpkh,
AddressCohortId::P2WSH => &mut self.p2wsh,
AddressCohortId::P2TR => &mut self.p2tr,
}
}
pub fn as_vec(&self) -> Vec<(&T, AddressCohortId)> {
vec![
(&self.all, AddressCohortId::All),
(&self.illiquid, AddressCohortId::Illiquid),
(&self.liquid, AddressCohortId::Liquid),
(&self.highly_liquid, AddressCohortId::HighlyLiquid),
(&self.plankton, AddressCohortId::Plankton),
(&self.shrimp, AddressCohortId::Shrimp),
(&self.crab, AddressCohortId::Crab),
(&self.fish, AddressCohortId::Fish),
(&self.shark, AddressCohortId::Shark),
(&self.whale, AddressCohortId::Whale),
(&self.humpback, AddressCohortId::Humpback),
(&self.megalodon, AddressCohortId::Megalodon),
(&self.p2pk, AddressCohortId::P2PK),
(&self.p2pkh, AddressCohortId::P2PKH),
(&self.p2sh, AddressCohortId::P2SH),
(&self.p2wpkh, AddressCohortId::P2WPKH),
(&self.p2wsh, AddressCohortId::P2WSH),
(&self.p2tr, AddressCohortId::P2TR),
]
}
pub fn as_mut_vec(&mut self) -> Vec<(&mut T, AddressCohortId)> {
vec![
(&mut self.all, AddressCohortId::All),
(&mut self.illiquid, AddressCohortId::Illiquid),
(&mut self.liquid, AddressCohortId::Liquid),
(&mut self.highly_liquid, AddressCohortId::HighlyLiquid),
(&mut self.plankton, AddressCohortId::Plankton),
(&mut self.shrimp, AddressCohortId::Shrimp),
(&mut self.crab, AddressCohortId::Crab),
(&mut self.fish, AddressCohortId::Fish),
(&mut self.shark, AddressCohortId::Shark),
(&mut self.whale, AddressCohortId::Whale),
(&mut self.humpback, AddressCohortId::Humpback),
(&mut self.megalodon, AddressCohortId::Megalodon),
(&mut self.p2pk, AddressCohortId::P2PK),
(&mut self.p2pkh, AddressCohortId::P2PKH),
(&mut self.p2sh, AddressCohortId::P2SH),
(&mut self.p2wpkh, AddressCohortId::P2WPKH),
(&mut self.p2wsh, AddressCohortId::P2WSH),
(&mut self.p2tr, AddressCohortId::P2TR),
]
}
}
impl<T> AddAssign for SplitByAddressCohort<T>
where
T: AddAssign,
{
fn add_assign(&mut self, rhs: Self) {
self.all += rhs.all;
self.illiquid += rhs.illiquid;
self.liquid += rhs.liquid;
self.highly_liquid += rhs.highly_liquid;
self.plankton += rhs.plankton;
self.shrimp += rhs.shrimp;
self.crab += rhs.crab;
self.fish += rhs.fish;
self.shark += rhs.shark;
self.whale += rhs.whale;
self.humpback += rhs.humpback;
self.megalodon += rhs.megalodon;
self.p2pk += rhs.p2pk;
self.p2pkh += rhs.p2pkh;
self.p2sh += rhs.p2sh;
self.p2wpkh += rhs.p2wpkh;
self.p2wsh += rhs.p2wsh;
self.p2tr += rhs.p2tr;
}
}
impl<T> SplitByAddressCohort<Option<T>> {
pub fn unwrap(self) -> SplitByAddressCohort<T> {
SplitByAddressCohort {
all: self.all.unwrap(),
illiquid: self.illiquid.unwrap(),
liquid: self.liquid.unwrap(),
highly_liquid: self.highly_liquid.unwrap(),
plankton: self.plankton.unwrap(),
shrimp: self.shrimp.unwrap(),
crab: self.crab.unwrap(),
fish: self.fish.unwrap(),
shark: self.shark.unwrap(),
whale: self.whale.unwrap(),
humpback: self.humpback.unwrap(),
megalodon: self.megalodon.unwrap(),
p2pk: self.p2pk.unwrap(),
p2pkh: self.p2pkh.unwrap(),
p2sh: self.p2sh.unwrap(),
p2wpkh: self.p2wpkh.unwrap(),
p2wsh: self.p2wsh.unwrap(),
p2tr: self.p2tr.unwrap(),
}
}
}

View File

@@ -0,0 +1,30 @@
use std::ops::AddAssign;
use allocative::Allocative;
use crate::structs::Price;
#[derive(Debug, Default, Allocative)]
pub struct CapitalizationState {
realized_cap: Price,
}
impl CapitalizationState {
pub fn realized_cap(&self) -> Price {
self.realized_cap
}
pub fn increment(&mut self, realized_cap: Price) {
self.realized_cap += realized_cap;
}
pub fn decrement(&mut self, realized_cap: Price) {
self.realized_cap -= realized_cap;
}
}
impl AddAssign for CapitalizationState {
fn add_assign(&mut self, rhs: Self) {
self.realized_cap += rhs.realized_cap;
}
}

View File

@@ -0,0 +1,50 @@
use std::ops::AddAssign;
use allocative::Allocative;
use crate::structs::{Amount, Price};
use super::{CapitalizationState, SupplyState, UTXOState};
#[derive(Default, Debug, Allocative)]
pub struct DurableStates {
pub capitalization_state: CapitalizationState,
pub supply_state: SupplyState,
pub utxo_state: UTXOState,
}
impl DurableStates {
pub fn increment(
&mut self,
amount: Amount,
utxo_count: f64,
realized_cap: Price,
) -> color_eyre::Result<()> {
self.utxo_state.increment(utxo_count);
self.capitalization_state.increment(realized_cap);
self.supply_state.increment(amount);
Ok(())
}
pub fn decrement(
&mut self,
amount: Amount,
utxo_count: f64,
realized_cap: Price,
) -> color_eyre::Result<()> {
self.utxo_state.decrement(utxo_count)?;
self.capitalization_state.decrement(realized_cap);
self.supply_state.decrement(amount)?;
Ok(())
}
}
impl AddAssign for DurableStates {
fn add_assign(&mut self, rhs: Self) {
self.capitalization_state += rhs.capitalization_state;
self.supply_state += rhs.supply_state;
self.utxo_state += rhs.utxo_state;
}
}

View File

@@ -0,0 +1,22 @@
use crate::structs::Amount;
#[derive(Debug, Default)]
pub struct InputState {
count: f64,
volume: Amount,
}
impl InputState {
pub fn count(&self) -> f64 {
self.count
}
pub fn volume(&self) -> Amount {
self.volume
}
pub fn iterate(&mut self, count: f64, volume: Amount) {
self.count += count;
self.volume += volume;
}
}

View File

@@ -0,0 +1,23 @@
mod capitalization_state;
mod durable_states;
mod input_state;
mod one_shot_states;
mod output_state;
mod price_paid_state;
mod price_to_value;
mod realized_state;
mod supply_state;
mod unrealized_state;
mod utxo_state;
pub use capitalization_state::*;
pub use durable_states::*;
pub use input_state::*;
pub use one_shot_states::*;
pub use output_state::*;
pub use price_paid_state::*;
pub use price_to_value::*;
pub use realized_state::*;
pub use supply_state::*;
pub use unrealized_state::*;
pub use utxo_state::*;

View File

@@ -0,0 +1,9 @@
use super::{PricePaidState, UnrealizedState};
#[derive(Default)]
pub struct OneShotStates {
pub price_paid_state: PricePaidState,
pub unrealized_block_state: UnrealizedState,
pub unrealized_date_state: Option<UnrealizedState>,
}

View File

@@ -0,0 +1,22 @@
use crate::structs::Amount;
#[derive(Debug, Default)]
pub struct OutputState {
count: f64,
volume: Amount,
}
impl OutputState {
// pub fn count(&self) -> f64 {
// self.count
// }
// pub fn volume(&self) -> Amount {
// self.volume
// }
pub fn iterate(&mut self, count: f64, volume: Amount) {
self.count += count;
self.volume += volume;
}
}

View File

@@ -0,0 +1,286 @@
use crate::structs::{Amount, Price};
#[derive(Default, Debug)]
pub struct PricePaidState {
pp_05p: Option<Price>,
pp_10p: Option<Price>,
pp_15p: Option<Price>,
pp_20p: Option<Price>,
pp_25p: Option<Price>,
pp_30p: Option<Price>,
pp_35p: Option<Price>,
pp_40p: Option<Price>,
pp_45p: Option<Price>,
pp_median: Option<Price>,
pp_55p: Option<Price>,
pp_60p: Option<Price>,
pp_65p: Option<Price>,
pp_70p: Option<Price>,
pp_75p: Option<Price>,
pp_80p: Option<Price>,
pp_85p: Option<Price>,
pp_90p: Option<Price>,
pp_95p: Option<Price>,
processed_amount: Amount,
}
impl PricePaidState {
pub fn pp_05p(&self) -> Option<Price> {
self.pp_05p
}
pub fn pp_10p(&self) -> Option<Price> {
self.pp_10p
}
pub fn pp_15p(&self) -> Option<Price> {
self.pp_15p
}
pub fn pp_20p(&self) -> Option<Price> {
self.pp_20p
}
pub fn pp_25p(&self) -> Option<Price> {
self.pp_25p
}
pub fn pp_30p(&self) -> Option<Price> {
self.pp_30p
}
pub fn pp_35p(&self) -> Option<Price> {
self.pp_35p
}
pub fn pp_40p(&self) -> Option<Price> {
self.pp_40p
}
pub fn pp_45p(&self) -> Option<Price> {
self.pp_45p
}
pub fn pp_median(&self) -> Option<Price> {
self.pp_median
}
pub fn pp_55p(&self) -> Option<Price> {
self.pp_55p
}
pub fn pp_60p(&self) -> Option<Price> {
self.pp_60p
}
pub fn pp_65p(&self) -> Option<Price> {
self.pp_65p
}
pub fn pp_70p(&self) -> Option<Price> {
self.pp_70p
}
pub fn pp_75p(&self) -> Option<Price> {
self.pp_75p
}
pub fn pp_80p(&self) -> Option<Price> {
self.pp_80p
}
pub fn pp_85p(&self) -> Option<Price> {
self.pp_85p
}
pub fn pp_90p(&self) -> Option<Price> {
self.pp_90p
}
pub fn pp_95p(&self) -> Option<Price> {
self.pp_95p
}
pub fn iterate(&mut self, price: Price, amount: Amount, supply: Amount) {
let PricePaidState {
processed_amount: processed_supply,
pp_05p,
pp_10p,
pp_15p,
pp_20p,
pp_25p,
pp_30p,
pp_35p,
pp_40p,
pp_45p,
pp_median,
pp_55p,
pp_60p,
pp_65p,
pp_70p,
pp_75p,
pp_80p,
pp_85p,
pp_90p,
pp_95p,
} = self;
*processed_supply += amount;
if pp_95p.is_some() {
return;
}
let processed_sat_amount = processed_supply.to_sat();
let total_sat_supply = supply.to_sat();
if processed_sat_amount >= total_sat_supply * 95 / 100 {
pp_95p.replace(price);
}
if pp_90p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 90 / 100 {
pp_90p.replace(price);
}
if pp_85p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 85 / 100 {
pp_85p.replace(price);
}
if pp_80p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 80 / 100 {
pp_80p.replace(price);
}
if pp_75p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 75 / 100 {
pp_75p.replace(price);
}
if pp_70p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 70 / 100 {
pp_70p.replace(price);
}
if pp_65p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 65 / 100 {
pp_65p.replace(price);
}
if pp_60p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 60 / 100 {
pp_60p.replace(price);
}
if pp_55p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 55 / 100 {
pp_55p.replace(price);
}
if pp_median.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply / 2 {
pp_median.replace(price);
}
if pp_45p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 45 / 100 {
pp_45p.replace(price);
}
if pp_40p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 40 / 100 {
pp_40p.replace(price);
}
if pp_35p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 35 / 100 {
pp_35p.replace(price);
}
if pp_30p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 30 / 100 {
pp_30p.replace(price);
}
if pp_25p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply / 4 {
pp_25p.replace(price);
}
if pp_20p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply / 5 {
pp_20p.replace(price);
}
if pp_15p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 15 / 100 {
pp_15p.replace(price);
}
if pp_10p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply / 10 {
pp_10p.replace(price);
}
if pp_05p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply / 20 {
pp_05p.replace(price);
}
}
}

View File

@@ -0,0 +1,113 @@
use std::{
collections::BTreeMap,
fmt::Debug,
ops::{AddAssign, SubAssign},
};
use allocative::Allocative;
use color_eyre::eyre::eyre;
use derive_deref::{Deref, DerefMut};
use crate::structs::{Amount, Price};
#[derive(Deref, DerefMut, Default, Debug, Allocative)]
pub struct PriceToValue<T>(BTreeMap<u32, T>);
impl<T> PriceToValue<T>
where
T: Default
+ Debug
+ AddAssign
+ SubAssign
+ CanSubtract
+ Default
+ Copy
+ Clone
+ PartialEq
+ IsZero,
{
pub fn increment(&mut self, price: Price, value: T) {
*self.entry(price.to_cent() as u32).or_default() += value;
}
pub fn decrement(&mut self, price: Price, value: T) -> color_eyre::Result<()> {
let cent = price.to_cent() as u32;
let delete = {
let self_value = self.get_mut(&cent);
if self_value.is_none() {
dbg!(&self.0, price, value);
return Err(eyre!("self_value is none"));
}
let self_value = self_value.unwrap();
if !self_value.can_subtract(&value) {
dbg!(*self_value, &self.0, price, value);
return Err(eyre!("self value < value"));
}
*self_value -= value;
self_value.is_zero()?
};
if delete {
self.remove(&cent).unwrap();
}
Ok(())
}
pub fn iterate(&self, supply: T, mut iterate: impl FnMut(Price, T)) {
let mut processed = T::default();
self.iter().for_each(|(cent, value)| {
let value = *value;
processed += value;
iterate(Price::from_cent(*cent as u64), value)
});
if processed != supply {
dbg!(processed, supply);
panic!("processed_amount isn't equal to supply")
}
}
}
impl<T> AddAssign for PriceToValue<T>
where
T: AddAssign + Copy,
{
fn add_assign(&mut self, rhs: Self) {
rhs.0.into_iter().for_each(|(key, value)| {
self.0
.entry(key)
.and_modify(|previous| *previous += value)
.or_insert(value);
});
}
}
pub trait CanSubtract {
fn can_subtract(&self, other: &Self) -> bool;
}
impl CanSubtract for Amount {
fn can_subtract(&self, other: &Self) -> bool {
self >= other
}
}
pub trait IsZero {
fn is_zero(&self) -> color_eyre::Result<bool>;
}
impl IsZero for Amount {
fn is_zero(&self) -> color_eyre::Result<bool> {
Ok(*self == Amount::ZERO)
}
}

View File

@@ -0,0 +1,54 @@
use crate::structs::Price;
#[derive(Debug, Default)]
pub struct RealizedState {
realized_profit: Price,
realized_loss: Price,
value_created: Price,
adjusted_value_created: Price,
value_destroyed: Price,
adjusted_value_destroyed: Price,
}
impl RealizedState {
pub fn realized_profit(&self) -> Price {
self.realized_profit
}
pub fn realized_loss(&self) -> Price {
self.realized_loss
}
pub fn value_created(&self) -> Price {
self.value_created
}
pub fn adjusted_value_created(&self) -> Price {
self.adjusted_value_created
}
pub fn value_destroyed(&self) -> Price {
self.value_destroyed
}
pub fn adjusted_value_destroyed(&self) -> Price {
self.adjusted_value_destroyed
}
pub fn iterate(
&mut self,
realized_profit: Price,
realized_loss: Price,
value_created: Price,
adjusted_value_created: Price,
value_destroyed: Price,
adjusted_value_destroyed: Price,
) {
self.realized_profit += realized_profit;
self.realized_loss += realized_loss;
self.value_created += value_created;
self.adjusted_value_created += adjusted_value_created;
self.value_destroyed += value_destroyed;
self.adjusted_value_destroyed += adjusted_value_destroyed;
}
}

View File

@@ -0,0 +1,39 @@
use std::ops::AddAssign;
use allocative::Allocative;
use color_eyre::eyre::eyre;
use crate::structs::Amount;
#[derive(Debug, Default, Allocative)]
pub struct SupplyState {
supply: Amount,
}
impl SupplyState {
pub fn supply(&self) -> Amount {
self.supply
}
pub fn increment(&mut self, amount: Amount) {
self.supply += amount;
}
pub fn decrement(&mut self, amount: Amount) -> color_eyre::Result<()> {
if self.supply < amount {
dbg!(self.supply, amount);
return Err(eyre!("supply smaller than supply"));
}
self.supply -= amount;
Ok(())
}
}
impl AddAssign for SupplyState {
fn add_assign(&mut self, rhs: Self) {
self.supply += rhs.supply;
}
}

View File

@@ -0,0 +1,50 @@
use std::{cmp::Ordering, ops::Add};
use crate::structs::{Amount, Price};
#[derive(Debug, Default)]
pub struct UnrealizedState {
supply_in_profit: Amount,
unrealized_profit: Price,
unrealized_loss: Price,
}
impl UnrealizedState {
pub fn supply_in_profit(&self) -> Amount {
self.supply_in_profit
}
pub fn unrealized_profit(&self) -> Price {
self.unrealized_profit
}
pub fn unrealized_loss(&self) -> Price {
self.unrealized_loss
}
#[inline]
pub fn iterate(&mut self, price_then: Price, price_now: Price, amount: Amount) {
match price_then.cmp(&price_now) {
Ordering::Less => {
self.unrealized_profit += (price_now - price_then) * amount;
self.supply_in_profit += amount;
}
Ordering::Greater => {
self.unrealized_loss += (price_then - price_now) * amount;
}
Ordering::Equal => {}
}
}
}
impl Add<UnrealizedState> for UnrealizedState {
type Output = UnrealizedState;
fn add(self, other: UnrealizedState) -> UnrealizedState {
UnrealizedState {
supply_in_profit: self.supply_in_profit + other.supply_in_profit,
unrealized_profit: self.unrealized_profit + other.unrealized_profit,
unrealized_loss: self.unrealized_loss + other.unrealized_loss,
}
}
}

View File

@@ -0,0 +1,37 @@
use std::ops::AddAssign;
use allocative::Allocative;
use color_eyre::eyre::eyre;
#[derive(Debug, Default, Allocative)]
pub struct UTXOState {
count: f64,
}
impl UTXOState {
pub fn count(&self) -> f64 {
self.count
}
pub fn increment(&mut self, utxo_count: f64) {
self.count += utxo_count;
}
pub fn decrement(&mut self, utxo_count: f64) -> color_eyre::Result<()> {
if self.count < utxo_count {
dbg!(self.count, utxo_count);
return Err(eyre!("self.count smaller than utxo_count"));
}
self.count -= utxo_count;
Ok(())
}
}
impl AddAssign for UTXOState {
fn add_assign(&mut self, rhs: Self) {
self.count += rhs.count;
}
}

View File

@@ -0,0 +1,7 @@
mod address;
mod any;
mod utxo;
pub use address::*;
pub use any::*;
pub use utxo::*;

View File

@@ -0,0 +1,109 @@
use allocative::Allocative;
use crate::{
parser::states::{DurableStates, OneShotStates, PriceToValue, UnrealizedState},
structs::{Amount, Price},
};
#[derive(Default, Debug, Allocative)]
pub struct UTXOCohortDurableStates {
pub durable_states: DurableStates,
pub price_to_amount: PriceToValue<Amount>,
}
impl UTXOCohortDurableStates {
pub fn increment(
&mut self,
amount: Amount,
utxo_count: f64,
price: Price,
) -> color_eyre::Result<()> {
self._crement(amount, utxo_count, price, true)
}
pub fn decrement(
&mut self,
amount: Amount,
utxo_count: f64,
price: Price,
) -> color_eyre::Result<()> {
self._crement(amount, utxo_count, price, false)
}
pub fn _crement(
&mut self,
amount: Amount,
utxo_count: f64,
price: Price,
increment: bool,
) -> color_eyre::Result<()> {
let realized_cap = price * amount;
if increment {
self.durable_states
.increment(amount, utxo_count, realized_cap)
} else {
self.durable_states
.decrement(amount, utxo_count, realized_cap)
}
.inspect_err(|report| {
dbg!(report, "split all failed", amount, utxo_count);
})?;
let rounded_price = price.to_significant();
if increment {
self.price_to_amount.increment(rounded_price, amount);
} else {
self.price_to_amount
.decrement(rounded_price, amount)
.inspect_err(|report| {
dbg!(
report,
"cents_to_amount decrement failed",
rounded_price,
price,
amount,
utxo_count
);
})?;
}
Ok(())
}
pub fn compute_one_shot_states(
&self,
block_price: Price,
date_price: Option<Price>,
) -> OneShotStates {
let mut one_shot_states = OneShotStates::default();
if date_price.is_some() {
one_shot_states
.unrealized_date_state
.replace(UnrealizedState::default());
}
let supply = self.durable_states.supply_state.supply();
let one_shot_states_ref = &mut one_shot_states;
self.price_to_amount.iterate(supply, |price_paid, amount| {
one_shot_states_ref
.price_paid_state
.iterate(price_paid, amount, supply);
one_shot_states_ref
.unrealized_block_state
.iterate(price_paid, block_price, amount);
if let Some(unrealized_date_state) = one_shot_states_ref.unrealized_date_state.as_mut()
{
unrealized_date_state.iterate(price_paid, date_price.unwrap(), amount);
}
});
one_shot_states
}
}

View File

@@ -0,0 +1,34 @@
use crate::structs::{Epoch, Height};
pub enum UTXOFilter {
To(u32),
FromTo { from: u32, to: u32 },
From(u32),
Epoch(Epoch),
}
impl UTXOCheck for UTXOFilter {
fn check(&self, days_old: &u32, height: &Height) -> bool {
match self {
UTXOFilter::From(from) => from <= days_old,
UTXOFilter::To(to) => to > days_old,
UTXOFilter::FromTo { from, to } => from <= days_old && to > days_old,
UTXOFilter::Epoch(epoch) => *epoch == height.into(),
}
}
fn check_days_old(&self, days_old: &u32) -> bool {
match self {
UTXOFilter::From(from) => from <= days_old,
UTXOFilter::To(to) => to > days_old,
UTXOFilter::FromTo { from, to } => from <= days_old && to > days_old,
UTXOFilter::Epoch(_) => unreachable!(),
}
}
}
pub trait UTXOCheck {
fn check(&self, days_old: &u32, height: &Height) -> bool;
fn check_days_old(&self, days_old: &u32) -> bool;
}

View File

@@ -0,0 +1,75 @@
use crate::structs::Epoch;
use super::{SplitByUTXOCohort, UTXOFilter};
pub const UTXO_FILTERS: SplitByUTXOCohort<UTXOFilter> = SplitByUTXOCohort {
up_to_1d: UTXOFilter::To(1),
up_to_1w: UTXOFilter::To(7),
up_to_1m: UTXOFilter::To(30),
up_to_2m: UTXOFilter::To(2 * 30),
up_to_3m: UTXOFilter::To(3 * 30),
up_to_4m: UTXOFilter::To(4 * 30),
up_to_5m: UTXOFilter::To(5 * 30),
up_to_6m: UTXOFilter::To(6 * 30),
up_to_1y: UTXOFilter::To(365),
up_to_2y: UTXOFilter::To(2 * 365),
up_to_3y: UTXOFilter::To(3 * 365),
up_to_5y: UTXOFilter::To(5 * 365),
up_to_7y: UTXOFilter::To(7 * 365),
up_to_10y: UTXOFilter::To(10 * 365),
up_to_15y: UTXOFilter::To(15 * 365),
from_1d_to_1w: UTXOFilter::FromTo { from: 1, to: 7 },
from_1w_to_1m: UTXOFilter::FromTo { from: 7, to: 30 },
from_1m_to_3m: UTXOFilter::FromTo {
from: 30,
to: 3 * 30,
},
from_3m_to_6m: UTXOFilter::FromTo {
from: 3 * 30,
to: 6 * 30,
},
from_6m_to_1y: UTXOFilter::FromTo {
from: 6 * 30,
to: 365,
},
from_1y_to_2y: UTXOFilter::FromTo {
from: 365,
to: 2 * 365,
},
from_2y_to_3y: UTXOFilter::FromTo {
from: 2 * 365,
to: 3 * 365,
},
from_3y_to_5y: UTXOFilter::FromTo {
from: 3 * 365,
to: 5 * 365,
},
from_5y_to_7y: UTXOFilter::FromTo {
from: 5 * 365,
to: 7 * 365,
},
from_7y_to_10y: UTXOFilter::FromTo {
from: 7 * 365,
to: 10 * 365,
},
from_10y_to_15y: UTXOFilter::FromTo {
from: 10 * 365,
to: 15 * 365,
},
from_1y: UTXOFilter::From(365),
from_2y: UTXOFilter::From(2 * 365),
from_4y: UTXOFilter::From(4 * 365),
from_10y: UTXOFilter::From(10 * 365),
from_15y: UTXOFilter::From(15 * 365),
epoch_1: UTXOFilter::Epoch(Epoch(1)),
epoch_2: UTXOFilter::Epoch(Epoch(2)),
epoch_3: UTXOFilter::Epoch(Epoch(3)),
epoch_4: UTXOFilter::Epoch(Epoch(4)),
epoch_5: UTXOFilter::Epoch(Epoch(5)),
sth: UTXOFilter::To(155),
lth: UTXOFilter::From(155),
};

View File

@@ -0,0 +1,97 @@
use allocative::Allocative;
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Default, Allocative)]
pub enum UTXOCohortId {
#[default]
UpTo1d,
UpTo1w,
UpTo1m,
UpTo2m,
UpTo3m,
UpTo4m,
UpTo5m,
UpTo6m,
UpTo1y,
UpTo2y,
UpTo3y,
UpTo5y,
UpTo7y,
UpTo10y,
UpTo15y,
From1dTo1w,
From1wTo1m,
From1mTo3m,
From3mTo6m,
From6mTo1y,
From1yTo2y,
From2yTo3y,
From3yTo5y,
From5yTo7y,
From7yTo10y,
From10yTo15y,
From1y,
From2y,
From4y,
From10y,
From15y,
Epoch1,
Epoch2,
Epoch3,
Epoch4,
Epoch5,
ShortTermHolders,
LongTermHolders,
}
impl UTXOCohortId {
pub fn name(&self) -> &str {
match self {
Self::UpTo1d => "up_to_1d",
Self::UpTo1w => "up_to_1w",
Self::UpTo1m => "up_to_1m",
Self::UpTo2m => "up_to_2m",
Self::UpTo3m => "up_to_3m",
Self::UpTo4m => "up_to_4m",
Self::UpTo5m => "up_to_5m",
Self::UpTo6m => "up_to_6m",
Self::UpTo1y => "up_to_1y",
Self::UpTo2y => "up_to_2y",
Self::UpTo3y => "up_to_3y",
Self::UpTo5y => "up_to_5y",
Self::UpTo7y => "up_to_7y",
Self::UpTo10y => "up_to_10y",
Self::UpTo15y => "up_to_15y",
Self::From1dTo1w => "from_1d_to_1w",
Self::From1wTo1m => "from_1w_to_1m",
Self::From1mTo3m => "from_1m_to_3m",
Self::From3mTo6m => "from_3m_to_6m",
Self::From6mTo1y => "from_6m_to_1y",
Self::From1yTo2y => "from_1y_to_2y",
Self::From2yTo3y => "from_2y_to_3y",
Self::From3yTo5y => "from_3y_to_5y",
Self::From5yTo7y => "from_5y_to_7y",
Self::From7yTo10y => "from_7y_to_10y",
Self::From10yTo15y => "from_10y_to_15y",
Self::From1y => "from_1y",
Self::From2y => "from_2y",
Self::From4y => "from_4y",
Self::From10y => "from_10y",
Self::From15y => "from_15y",
Self::Epoch1 => "epoch_1",
Self::Epoch2 => "epoch_2",
Self::Epoch3 => "epoch_3",
Self::Epoch4 => "epoch_4",
Self::Epoch5 => "epoch_5",
Self::ShortTermHolders => "sth",
Self::LongTermHolders => "lth",
}
}
}

View File

@@ -0,0 +1,152 @@
use allocative::Allocative;
use derive_deref::{Deref, DerefMut};
use rayon::prelude::*;
use crate::{
parser::states::DateDataVec,
structs::{Amount, BlockData, Price, SentData, Timestamp},
};
use super::{SplitByUTXOCohort, UTXOCohortDurableStates, UTXOCohortsOneShotStates};
#[derive(Default, Deref, DerefMut, Allocative)]
pub struct UTXOCohortsDurableStates(SplitByUTXOCohort<UTXOCohortDurableStates>);
impl UTXOCohortsDurableStates {
pub fn init(date_data_vec: &DateDataVec) -> Self {
let mut s = Self::default();
if let Some(last_block_data) = date_data_vec.last_block() {
date_data_vec.iter().for_each(|date_data| {
date_data.blocks.iter().for_each(|block_data| {
let amount = block_data.amount;
let utxo_count = block_data.utxos as f64;
// No need to either insert or remove if 0
if amount == Amount::ZERO {
return;
}
let increment_days_old = Timestamp::difference_in_days_between(
block_data.timestamp,
last_block_data.timestamp,
);
s.initial_filtered_apply(&increment_days_old, &block_data.height, |state| {
state
.increment(amount, utxo_count, block_data.price)
.unwrap();
});
})
});
}
s
}
pub fn udpate_age_if_needed(
&mut self,
block_data: &BlockData,
last_block_data: &BlockData,
previous_last_block_data: Option<&BlockData>,
) {
let height = block_data.height;
let amount = block_data.amount;
let utxo_count = block_data.utxos as f64;
let price = block_data.price;
// No need to either insert or remove if 0
if amount == Amount::ZERO {
return;
}
if block_data.height == last_block_data.height {
self.initial_filtered_apply(&0, &height, |state| {
state.increment(amount, utxo_count, price).unwrap();
})
} else {
let increment_days_old = Timestamp::difference_in_days_between(
block_data.timestamp,
last_block_data.timestamp,
);
let decrement_days_old = Timestamp::difference_in_days_between(
block_data.timestamp,
previous_last_block_data
.unwrap_or_else(|| {
dbg!(block_data, last_block_data, previous_last_block_data);
panic!()
})
.timestamp,
);
if increment_days_old == decrement_days_old {
return;
}
self.duo_filtered_apply(
&increment_days_old,
&decrement_days_old,
|state| {
state.increment(amount, utxo_count, price).unwrap();
},
|state| {
state.decrement(amount, utxo_count, price).unwrap();
},
);
}
}
pub fn subtract_moved(
&mut self,
block_data: &BlockData,
sent_data: &SentData,
previous_last_block_data: &BlockData,
) {
let amount = sent_data.volume;
let utxo_count = sent_data.count as f64;
// No need to either insert or remove if 0
if amount == Amount::ZERO {
return;
}
let days_old = Timestamp::difference_in_days_between(
block_data.timestamp,
previous_last_block_data.timestamp,
);
self.initial_filtered_apply(&days_old, &block_data.height, |state| {
state
.decrement(amount, utxo_count, block_data.price)
.unwrap_or_else(|report| {
dbg!(
report.to_string(),
block_data,
sent_data,
previous_last_block_data
);
panic!()
});
})
}
pub fn compute_one_shot_states(
&self,
block_price: Price,
date_price: Option<Price>,
) -> UTXOCohortsOneShotStates {
let mut one_shot_states = UTXOCohortsOneShotStates::default();
self.as_vec()
.into_par_iter()
.map(|(states, id)| (states.compute_one_shot_states(block_price, date_price), id))
.collect::<Vec<_>>()
.into_iter()
.for_each(|(states, id)| {
*one_shot_states.get_mut(&id) = states;
});
one_shot_states
}
}

View File

@@ -0,0 +1,8 @@
use derive_deref::{Deref, DerefMut};
use crate::parser::states::OneShotStates;
use super::SplitByUTXOCohort;
#[derive(Deref, DerefMut, Default)]
pub struct UTXOCohortsOneShotStates(pub SplitByUTXOCohort<OneShotStates>);

View File

@@ -0,0 +1,86 @@
use std::{cmp::Ordering, collections::BTreeMap};
use derive_deref::{Deref, DerefMut};
use crate::{
parser::states::{DateDataVec, InputState, RealizedState},
structs::{BlockPath, Price, SentData, Timestamp},
};
use super::SplitByUTXOCohort;
#[derive(Default, Debug)]
pub struct SentState {
pub input: InputState,
pub realized: RealizedState,
}
#[derive(Deref, DerefMut, Default)]
pub struct UTXOCohortsSentStates(SplitByUTXOCohort<SentState>);
impl UTXOCohortsSentStates {
pub fn compute(
&mut self,
date_data_vec: &DateDataVec,
block_path_to_sent_data: &BTreeMap<BlockPath, SentData>,
current_price: Price,
current_timestamp: Timestamp,
) {
if let Some(last_block_data) = date_data_vec.last_block() {
block_path_to_sent_data
.iter()
.for_each(|(block_path, sent_data)| {
let date_data = date_data_vec.get_date_data(block_path).unwrap();
let block_data = date_data.get_block_data(block_path).unwrap();
let days_old = Timestamp::difference_in_days_between(
block_data.timestamp,
last_block_data.timestamp,
);
let previous_timestamp = block_data.timestamp;
let previous_price = block_data.price;
let height = block_data.height;
let amount_sent = sent_data.volume;
self.initial_filtered_apply(&days_old, &height, |state| {
state.input.iterate(sent_data.count as f64, amount_sent);
let previous_value = previous_price * amount_sent;
let current_value = current_price * amount_sent;
let mut realized_profit = Price::ZERO;
let mut realized_loss = Price::ZERO;
let value_created = current_value;
let mut adjusted_value_created = Price::ZERO;
let value_destroyed = previous_value;
let mut adjusted_value_destroyed = Price::ZERO;
match previous_value.cmp(&current_value) {
Ordering::Less => realized_profit = current_value - previous_value,
Ordering::Greater => {
realized_loss = previous_value - current_value;
}
Ordering::Equal => {}
}
if previous_timestamp.older_by_1h_plus_than(current_timestamp) {
adjusted_value_created = value_created;
adjusted_value_destroyed = value_destroyed;
}
state.realized.iterate(
realized_profit,
realized_loss,
value_created,
adjusted_value_created,
value_destroyed,
adjusted_value_destroyed,
);
})
})
}
}
}

View File

@@ -0,0 +1,17 @@
mod cohort_durable_states;
mod cohort_filter;
mod cohort_filters;
mod cohort_id;
mod cohorts_durable_states;
mod cohorts_one_shot_states;
mod cohorts_sent_states;
mod split_by_utxo_cohort;
pub use cohort_durable_states::*;
pub use cohort_filter::*;
pub use cohort_filters::*;
pub use cohort_id::*;
pub use cohorts_durable_states::*;
pub use cohorts_one_shot_states::*;
pub use cohorts_sent_states::*;
pub use split_by_utxo_cohort::*;

View File

@@ -0,0 +1,719 @@
use allocative::Allocative;
use super::{UTXOCheck, UTXOCohortId, UTXO_FILTERS};
use crate::structs::Height;
#[derive(Default, Allocative)]
pub struct SplitByUTXOCohort<T> {
pub sth: T,
pub lth: T,
pub up_to_1d: T,
pub up_to_1w: T,
pub up_to_1m: T,
pub up_to_2m: T,
pub up_to_3m: T,
pub up_to_4m: T,
pub up_to_5m: T,
pub up_to_6m: T,
pub up_to_1y: T,
pub up_to_2y: T,
pub up_to_3y: T,
pub up_to_5y: T,
pub up_to_7y: T,
pub up_to_10y: T,
pub up_to_15y: T,
pub from_1d_to_1w: T,
pub from_1w_to_1m: T,
pub from_1m_to_3m: T,
pub from_3m_to_6m: T,
pub from_6m_to_1y: T,
pub from_1y_to_2y: T,
pub from_2y_to_3y: T,
pub from_3y_to_5y: T,
pub from_5y_to_7y: T,
pub from_7y_to_10y: T,
pub from_10y_to_15y: T,
pub from_1y: T,
pub from_2y: T,
pub from_4y: T,
pub from_10y: T,
pub from_15y: T,
pub epoch_1: T,
pub epoch_2: T,
pub epoch_3: T,
pub epoch_4: T,
pub epoch_5: T,
}
impl<T> SplitByUTXOCohort<T> {
pub fn get(&self, id: &UTXOCohortId) -> &T {
match id {
UTXOCohortId::UpTo1d => &self.up_to_1d,
UTXOCohortId::UpTo1w => &self.up_to_1w,
UTXOCohortId::UpTo1m => &self.up_to_1m,
UTXOCohortId::UpTo2m => &self.up_to_2m,
UTXOCohortId::UpTo3m => &self.up_to_3m,
UTXOCohortId::UpTo4m => &self.up_to_4m,
UTXOCohortId::UpTo5m => &self.up_to_5m,
UTXOCohortId::UpTo6m => &self.up_to_6m,
UTXOCohortId::UpTo1y => &self.up_to_1y,
UTXOCohortId::UpTo2y => &self.up_to_2y,
UTXOCohortId::UpTo3y => &self.up_to_3y,
UTXOCohortId::UpTo5y => &self.up_to_5y,
UTXOCohortId::UpTo7y => &self.up_to_7y,
UTXOCohortId::UpTo10y => &self.up_to_10y,
UTXOCohortId::UpTo15y => &self.up_to_15y,
UTXOCohortId::From1dTo1w => &self.from_1d_to_1w,
UTXOCohortId::From1wTo1m => &self.from_1w_to_1m,
UTXOCohortId::From1mTo3m => &self.from_1m_to_3m,
UTXOCohortId::From3mTo6m => &self.from_3m_to_6m,
UTXOCohortId::From6mTo1y => &self.from_6m_to_1y,
UTXOCohortId::From1yTo2y => &self.from_1y_to_2y,
UTXOCohortId::From2yTo3y => &self.from_2y_to_3y,
UTXOCohortId::From3yTo5y => &self.from_3y_to_5y,
UTXOCohortId::From5yTo7y => &self.from_5y_to_7y,
UTXOCohortId::From7yTo10y => &self.from_7y_to_10y,
UTXOCohortId::From10yTo15y => &self.from_10y_to_15y,
UTXOCohortId::From1y => &self.from_1y,
UTXOCohortId::From2y => &self.from_2y,
UTXOCohortId::From4y => &self.from_4y,
UTXOCohortId::From10y => &self.from_10y,
UTXOCohortId::From15y => &self.from_15y,
UTXOCohortId::Epoch1 => &self.epoch_1,
UTXOCohortId::Epoch2 => &self.epoch_2,
UTXOCohortId::Epoch3 => &self.epoch_3,
UTXOCohortId::Epoch4 => &self.epoch_4,
UTXOCohortId::Epoch5 => &self.epoch_5,
UTXOCohortId::ShortTermHolders => &self.sth,
UTXOCohortId::LongTermHolders => &self.lth,
}
}
pub fn get_mut(&mut self, id: &UTXOCohortId) -> &mut T {
match id {
UTXOCohortId::UpTo1d => &mut self.up_to_1d,
UTXOCohortId::UpTo1w => &mut self.up_to_1w,
UTXOCohortId::UpTo1m => &mut self.up_to_1m,
UTXOCohortId::UpTo2m => &mut self.up_to_2m,
UTXOCohortId::UpTo3m => &mut self.up_to_3m,
UTXOCohortId::UpTo4m => &mut self.up_to_4m,
UTXOCohortId::UpTo5m => &mut self.up_to_5m,
UTXOCohortId::UpTo6m => &mut self.up_to_6m,
UTXOCohortId::UpTo1y => &mut self.up_to_1y,
UTXOCohortId::UpTo2y => &mut self.up_to_2y,
UTXOCohortId::UpTo3y => &mut self.up_to_3y,
UTXOCohortId::UpTo5y => &mut self.up_to_5y,
UTXOCohortId::UpTo7y => &mut self.up_to_7y,
UTXOCohortId::UpTo10y => &mut self.up_to_10y,
UTXOCohortId::UpTo15y => &mut self.up_to_15y,
UTXOCohortId::From1dTo1w => &mut self.from_1d_to_1w,
UTXOCohortId::From1wTo1m => &mut self.from_1w_to_1m,
UTXOCohortId::From1mTo3m => &mut self.from_1m_to_3m,
UTXOCohortId::From3mTo6m => &mut self.from_3m_to_6m,
UTXOCohortId::From6mTo1y => &mut self.from_6m_to_1y,
UTXOCohortId::From1yTo2y => &mut self.from_1y_to_2y,
UTXOCohortId::From2yTo3y => &mut self.from_2y_to_3y,
UTXOCohortId::From3yTo5y => &mut self.from_3y_to_5y,
UTXOCohortId::From5yTo7y => &mut self.from_5y_to_7y,
UTXOCohortId::From7yTo10y => &mut self.from_7y_to_10y,
UTXOCohortId::From10yTo15y => &mut self.from_10y_to_15y,
UTXOCohortId::From1y => &mut self.from_1y,
UTXOCohortId::From2y => &mut self.from_2y,
UTXOCohortId::From4y => &mut self.from_4y,
UTXOCohortId::From10y => &mut self.from_10y,
UTXOCohortId::From15y => &mut self.from_15y,
UTXOCohortId::Epoch1 => &mut self.epoch_1,
UTXOCohortId::Epoch2 => &mut self.epoch_2,
UTXOCohortId::Epoch3 => &mut self.epoch_3,
UTXOCohortId::Epoch4 => &mut self.epoch_4,
UTXOCohortId::Epoch5 => &mut self.epoch_5,
UTXOCohortId::ShortTermHolders => &mut self.sth,
UTXOCohortId::LongTermHolders => &mut self.lth,
}
}
/// Excluding epochs since they're static
pub fn duo_filtered_apply(
&mut self,
current_days_old: &u32,
previous_days_old: &u32,
apply_if_current_only: impl Fn(&mut T),
apply_if_previous_only: impl Fn(&mut T),
) {
let is_up_to_1d = UTXO_FILTERS.up_to_1d.check_days_old(current_days_old);
let was_up_to_1d = UTXO_FILTERS.up_to_1d.check_days_old(previous_days_old);
if is_up_to_1d && !was_up_to_1d {
apply_if_current_only(&mut self.up_to_1d);
} else if was_up_to_1d && !is_up_to_1d {
apply_if_previous_only(&mut self.up_to_1d);
}
let is_up_to_1w = UTXO_FILTERS.up_to_1w.check_days_old(current_days_old);
let was_up_to_1w = UTXO_FILTERS.up_to_1w.check_days_old(previous_days_old);
if is_up_to_1w && !was_up_to_1w {
apply_if_current_only(&mut self.up_to_1w);
} else if was_up_to_1w && !is_up_to_1w {
apply_if_previous_only(&mut self.up_to_1w);
}
let is_up_to_1m = UTXO_FILTERS.up_to_1m.check_days_old(current_days_old);
let was_up_to_1m = UTXO_FILTERS.up_to_1m.check_days_old(previous_days_old);
if is_up_to_1m && !was_up_to_1m {
apply_if_current_only(&mut self.up_to_1m);
} else if was_up_to_1m && !is_up_to_1m {
apply_if_previous_only(&mut self.up_to_1m);
}
let is_up_to_2m = UTXO_FILTERS.up_to_2m.check_days_old(current_days_old);
let was_up_to_2m = UTXO_FILTERS.up_to_2m.check_days_old(previous_days_old);
if is_up_to_2m && !was_up_to_2m {
apply_if_current_only(&mut self.up_to_2m);
} else if was_up_to_2m && !is_up_to_2m {
apply_if_previous_only(&mut self.up_to_2m);
}
let is_up_to_3m = UTXO_FILTERS.up_to_3m.check_days_old(current_days_old);
let was_up_to_3m = UTXO_FILTERS.up_to_3m.check_days_old(previous_days_old);
if is_up_to_3m && !was_up_to_3m {
apply_if_current_only(&mut self.up_to_3m);
} else if was_up_to_3m && !is_up_to_3m {
apply_if_previous_only(&mut self.up_to_3m);
}
let is_up_to_4m = UTXO_FILTERS.up_to_4m.check_days_old(current_days_old);
let was_up_to_4m = UTXO_FILTERS.up_to_4m.check_days_old(previous_days_old);
if is_up_to_4m && !was_up_to_4m {
apply_if_current_only(&mut self.up_to_4m);
} else if was_up_to_4m && !is_up_to_4m {
apply_if_previous_only(&mut self.up_to_4m);
}
let is_up_to_5m = UTXO_FILTERS.up_to_5m.check_days_old(current_days_old);
let was_up_to_5m = UTXO_FILTERS.up_to_5m.check_days_old(previous_days_old);
if is_up_to_5m && !was_up_to_5m {
apply_if_current_only(&mut self.up_to_5m);
} else if was_up_to_5m && !is_up_to_5m {
apply_if_previous_only(&mut self.up_to_5m);
}
let is_up_to_6m = UTXO_FILTERS.up_to_6m.check_days_old(current_days_old);
let was_up_to_6m = UTXO_FILTERS.up_to_6m.check_days_old(previous_days_old);
if is_up_to_6m && !was_up_to_6m {
apply_if_current_only(&mut self.up_to_6m);
} else if was_up_to_6m && !is_up_to_6m {
apply_if_previous_only(&mut self.up_to_6m);
}
let is_up_to_1y = UTXO_FILTERS.up_to_1y.check_days_old(current_days_old);
let was_up_to_1y = UTXO_FILTERS.up_to_1y.check_days_old(previous_days_old);
if is_up_to_1y && !was_up_to_1y {
apply_if_current_only(&mut self.up_to_1y);
} else if was_up_to_1y && !is_up_to_1y {
apply_if_previous_only(&mut self.up_to_1y);
}
let is_up_to_2y = UTXO_FILTERS.up_to_2y.check_days_old(current_days_old);
let was_up_to_2y = UTXO_FILTERS.up_to_2y.check_days_old(previous_days_old);
if is_up_to_2y && !was_up_to_2y {
apply_if_current_only(&mut self.up_to_2y);
} else if was_up_to_2y && !is_up_to_2y {
apply_if_previous_only(&mut self.up_to_2y);
}
let is_up_to_3y = UTXO_FILTERS.up_to_3y.check_days_old(current_days_old);
let was_up_to_3y = UTXO_FILTERS.up_to_3y.check_days_old(previous_days_old);
if is_up_to_3y && !was_up_to_3y {
apply_if_current_only(&mut self.up_to_3y);
} else if was_up_to_3y && !is_up_to_3y {
apply_if_previous_only(&mut self.up_to_3y);
}
let is_up_to_5y = UTXO_FILTERS.up_to_5y.check_days_old(current_days_old);
let was_up_to_5y = UTXO_FILTERS.up_to_5y.check_days_old(previous_days_old);
if is_up_to_5y && !was_up_to_5y {
apply_if_current_only(&mut self.up_to_5y);
} else if was_up_to_5y && !is_up_to_5y {
apply_if_previous_only(&mut self.up_to_5y);
}
let is_up_to_7y = UTXO_FILTERS.up_to_7y.check_days_old(current_days_old);
let was_up_to_7y = UTXO_FILTERS.up_to_7y.check_days_old(previous_days_old);
if is_up_to_7y && !was_up_to_7y {
apply_if_current_only(&mut self.up_to_7y);
} else if was_up_to_7y && !is_up_to_7y {
apply_if_previous_only(&mut self.up_to_7y);
}
let is_up_to_10y = UTXO_FILTERS.up_to_10y.check_days_old(current_days_old);
let was_up_to_10y = UTXO_FILTERS.up_to_10y.check_days_old(previous_days_old);
if is_up_to_10y && !was_up_to_10y {
apply_if_current_only(&mut self.up_to_10y);
} else if was_up_to_10y && !is_up_to_10y {
apply_if_previous_only(&mut self.up_to_10y);
}
let is_up_to_15y = UTXO_FILTERS.up_to_15y.check_days_old(current_days_old);
let was_up_to_15y = UTXO_FILTERS.up_to_15y.check_days_old(previous_days_old);
if is_up_to_15y && !was_up_to_15y {
apply_if_current_only(&mut self.up_to_15y);
} else if was_up_to_15y && !is_up_to_15y {
apply_if_previous_only(&mut self.up_to_15y);
}
let is_from_1d_to_1w = UTXO_FILTERS.from_1d_to_1w.check_days_old(current_days_old);
let was_from_1d_to_1w = UTXO_FILTERS.from_1d_to_1w.check_days_old(previous_days_old);
if is_from_1d_to_1w && !was_from_1d_to_1w {
apply_if_current_only(&mut self.from_1d_to_1w);
} else if was_from_1d_to_1w && !is_from_1d_to_1w {
apply_if_previous_only(&mut self.from_1d_to_1w);
}
let is_from_1w_to_1m = UTXO_FILTERS.from_1w_to_1m.check_days_old(current_days_old);
let was_from_1w_to_1m = UTXO_FILTERS.from_1w_to_1m.check_days_old(previous_days_old);
if is_from_1w_to_1m && !was_from_1w_to_1m {
apply_if_current_only(&mut self.from_1w_to_1m);
} else if was_from_1w_to_1m && !is_from_1w_to_1m {
apply_if_previous_only(&mut self.from_1w_to_1m);
}
let is_from_1m_to_3m = UTXO_FILTERS.from_1m_to_3m.check_days_old(current_days_old);
let was_from_1m_to_3m = UTXO_FILTERS.from_1m_to_3m.check_days_old(previous_days_old);
if is_from_1m_to_3m && !was_from_1m_to_3m {
apply_if_current_only(&mut self.from_1m_to_3m);
} else if was_from_1m_to_3m && !is_from_1m_to_3m {
apply_if_previous_only(&mut self.from_1m_to_3m);
}
let is_from_3m_to_6m = UTXO_FILTERS.from_3m_to_6m.check_days_old(current_days_old);
let was_from_3m_to_6m = UTXO_FILTERS.from_3m_to_6m.check_days_old(previous_days_old);
if is_from_3m_to_6m && !was_from_3m_to_6m {
apply_if_current_only(&mut self.from_3m_to_6m);
} else if was_from_3m_to_6m && !is_from_3m_to_6m {
apply_if_previous_only(&mut self.from_3m_to_6m);
}
let is_from_6m_to_1y = UTXO_FILTERS.from_6m_to_1y.check_days_old(current_days_old);
let was_from_6m_to_1y = UTXO_FILTERS.from_6m_to_1y.check_days_old(previous_days_old);
if is_from_6m_to_1y && !was_from_6m_to_1y {
apply_if_current_only(&mut self.from_6m_to_1y);
} else if was_from_6m_to_1y && !is_from_6m_to_1y {
apply_if_previous_only(&mut self.from_6m_to_1y);
}
let is_from_1y_to_2y = UTXO_FILTERS.from_1y_to_2y.check_days_old(current_days_old);
let was_from_1y_to_2y = UTXO_FILTERS.from_1y_to_2y.check_days_old(previous_days_old);
if is_from_1y_to_2y && !was_from_1y_to_2y {
apply_if_current_only(&mut self.from_1y_to_2y);
} else if was_from_1y_to_2y && !is_from_1y_to_2y {
apply_if_previous_only(&mut self.from_1y_to_2y);
}
let is_from_2y_to_3y = UTXO_FILTERS.from_2y_to_3y.check_days_old(current_days_old);
let was_from_2y_to_3y = UTXO_FILTERS.from_2y_to_3y.check_days_old(previous_days_old);
if is_from_2y_to_3y && !was_from_2y_to_3y {
apply_if_current_only(&mut self.from_2y_to_3y);
} else if was_from_2y_to_3y && !is_from_2y_to_3y {
apply_if_previous_only(&mut self.from_2y_to_3y);
}
let is_from_3y_to_5y = UTXO_FILTERS.from_3y_to_5y.check_days_old(current_days_old);
let was_from_3y_to_5y = UTXO_FILTERS.from_3y_to_5y.check_days_old(previous_days_old);
if is_from_3y_to_5y && !was_from_3y_to_5y {
apply_if_current_only(&mut self.from_3y_to_5y);
} else if was_from_3y_to_5y && !is_from_3y_to_5y {
apply_if_previous_only(&mut self.from_3y_to_5y);
}
let is_from_5y_to_7y = UTXO_FILTERS.from_5y_to_7y.check_days_old(current_days_old);
let was_from_5y_to_7y = UTXO_FILTERS.from_5y_to_7y.check_days_old(previous_days_old);
if is_from_5y_to_7y && !was_from_5y_to_7y {
apply_if_current_only(&mut self.from_5y_to_7y);
} else if was_from_5y_to_7y && !is_from_5y_to_7y {
apply_if_previous_only(&mut self.from_5y_to_7y);
}
let is_from_7y_to_10y = UTXO_FILTERS.from_7y_to_10y.check_days_old(current_days_old);
let was_from_7y_to_10y = UTXO_FILTERS
.from_7y_to_10y
.check_days_old(previous_days_old);
if is_from_7y_to_10y && !was_from_7y_to_10y {
apply_if_current_only(&mut self.from_7y_to_10y);
} else if was_from_7y_to_10y && !is_from_7y_to_10y {
apply_if_previous_only(&mut self.from_7y_to_10y);
}
let is_from_10y_to_15y = UTXO_FILTERS
.from_10y_to_15y
.check_days_old(current_days_old);
let was_from_10y_to_15y = UTXO_FILTERS
.from_10y_to_15y
.check_days_old(previous_days_old);
if is_from_10y_to_15y && !was_from_10y_to_15y {
apply_if_current_only(&mut self.from_10y_to_15y);
} else if was_from_10y_to_15y && !is_from_10y_to_15y {
apply_if_previous_only(&mut self.from_10y_to_15y);
}
let is_from_1y = UTXO_FILTERS.from_1y.check_days_old(current_days_old);
let was_from_1y = UTXO_FILTERS.from_1y.check_days_old(previous_days_old);
if is_from_1y && !was_from_1y {
apply_if_current_only(&mut self.from_1y);
} else if was_from_1y && !is_from_1y {
apply_if_previous_only(&mut self.from_1y);
}
let is_from_2y = UTXO_FILTERS.from_2y.check_days_old(current_days_old);
let was_from_2y = UTXO_FILTERS.from_2y.check_days_old(previous_days_old);
if is_from_2y && !was_from_2y {
apply_if_current_only(&mut self.from_2y);
} else if was_from_2y && !is_from_2y {
apply_if_previous_only(&mut self.from_2y);
}
let is_from_4y = UTXO_FILTERS.from_4y.check_days_old(current_days_old);
let was_from_4y = UTXO_FILTERS.from_4y.check_days_old(previous_days_old);
if is_from_4y && !was_from_4y {
apply_if_current_only(&mut self.from_4y);
} else if was_from_4y && !is_from_4y {
apply_if_previous_only(&mut self.from_4y);
}
let is_from_10y = UTXO_FILTERS.from_10y.check_days_old(current_days_old);
let was_from_10y = UTXO_FILTERS.from_10y.check_days_old(previous_days_old);
if is_from_10y && !was_from_10y {
apply_if_current_only(&mut self.from_10y);
} else if was_from_10y && !is_from_10y {
apply_if_previous_only(&mut self.from_10y);
}
let is_from_15y = UTXO_FILTERS.from_15y.check_days_old(current_days_old);
let was_from_15y = UTXO_FILTERS.from_15y.check_days_old(previous_days_old);
if is_from_15y && !was_from_15y {
apply_if_current_only(&mut self.from_15y);
} else if was_from_15y && !is_from_15y {
apply_if_previous_only(&mut self.from_15y);
}
let is_sth = UTXO_FILTERS.sth.check_days_old(current_days_old);
let was_sth = UTXO_FILTERS.sth.check_days_old(previous_days_old);
if is_sth && !was_sth {
apply_if_current_only(&mut self.sth);
} else if was_sth && !is_sth {
apply_if_previous_only(&mut self.sth);
}
let is_lth = UTXO_FILTERS.lth.check_days_old(current_days_old);
let was_lth = UTXO_FILTERS.lth.check_days_old(previous_days_old);
if is_lth && !was_lth {
if is_sth {
unreachable!()
}
apply_if_current_only(&mut self.lth);
} else if was_lth && !is_lth {
if was_sth {
unreachable!()
}
// unreachable!();
apply_if_previous_only(&mut self.lth);
}
}
/// Includes epochs since it's the initial apply
pub fn initial_filtered_apply(
&mut self,
days_old: &u32,
height: &Height,
apply: impl Fn(&mut T),
) {
if UTXO_FILTERS.up_to_1d.check(days_old, height) {
apply(&mut self.up_to_1d);
} else if UTXO_FILTERS.from_1d_to_1w.check(days_old, height) {
apply(&mut self.from_1d_to_1w);
} else if UTXO_FILTERS.from_1w_to_1m.check(days_old, height) {
apply(&mut self.from_1w_to_1m);
} else if UTXO_FILTERS.from_1m_to_3m.check(days_old, height) {
apply(&mut self.from_1m_to_3m);
} else if UTXO_FILTERS.from_3m_to_6m.check(days_old, height) {
apply(&mut self.from_3m_to_6m);
} else if UTXO_FILTERS.from_6m_to_1y.check(days_old, height) {
apply(&mut self.from_6m_to_1y);
} else if UTXO_FILTERS.from_1y_to_2y.check(days_old, height) {
apply(&mut self.from_1y_to_2y);
} else if UTXO_FILTERS.from_2y_to_3y.check(days_old, height) {
apply(&mut self.from_2y_to_3y);
} else if UTXO_FILTERS.from_3y_to_5y.check(days_old, height) {
apply(&mut self.from_3y_to_5y);
} else if UTXO_FILTERS.from_5y_to_7y.check(days_old, height) {
apply(&mut self.from_5y_to_7y);
} else if UTXO_FILTERS.from_7y_to_10y.check(days_old, height) {
apply(&mut self.from_7y_to_10y);
} else if UTXO_FILTERS.from_10y_to_15y.check(days_old, height) {
apply(&mut self.from_10y_to_15y);
}
if UTXO_FILTERS.epoch_1.check(days_old, height) {
apply(&mut self.epoch_1);
} else if UTXO_FILTERS.epoch_2.check(days_old, height) {
apply(&mut self.epoch_2);
} else if UTXO_FILTERS.epoch_3.check(days_old, height) {
apply(&mut self.epoch_3);
} else if UTXO_FILTERS.epoch_4.check(days_old, height) {
apply(&mut self.epoch_4);
} else if UTXO_FILTERS.epoch_5.check(days_old, height) {
apply(&mut self.epoch_5);
}
if UTXO_FILTERS.sth.check(days_old, height) {
apply(&mut self.sth);
} else if UTXO_FILTERS.lth.check(days_old, height) {
apply(&mut self.lth);
} else {
unreachable!()
}
if UTXO_FILTERS.from_1y.check(days_old, height) {
apply(&mut self.from_1y);
}
if UTXO_FILTERS.from_2y.check(days_old, height) {
apply(&mut self.from_2y);
}
if UTXO_FILTERS.from_4y.check(days_old, height) {
apply(&mut self.from_4y);
}
if UTXO_FILTERS.from_10y.check(days_old, height) {
apply(&mut self.from_10y);
}
if UTXO_FILTERS.from_15y.check(days_old, height) {
apply(&mut self.from_15y);
}
if UTXO_FILTERS.up_to_15y.check(days_old, height) {
apply(&mut self.up_to_15y);
} else {
return;
}
if UTXO_FILTERS.up_to_10y.check(days_old, height) {
apply(&mut self.up_to_10y);
} else {
return;
}
if UTXO_FILTERS.up_to_7y.check(days_old, height) {
apply(&mut self.up_to_7y);
} else {
return;
}
if UTXO_FILTERS.up_to_5y.check(days_old, height) {
apply(&mut self.up_to_5y);
} else {
return;
}
if UTXO_FILTERS.up_to_3y.check(days_old, height) {
apply(&mut self.up_to_3y);
} else {
return;
}
if UTXO_FILTERS.up_to_2y.check(days_old, height) {
apply(&mut self.up_to_2y);
} else {
return;
}
if UTXO_FILTERS.up_to_1y.check(days_old, height) {
apply(&mut self.up_to_1y);
} else {
return;
}
if UTXO_FILTERS.up_to_6m.check(days_old, height) {
apply(&mut self.up_to_6m);
} else {
return;
}
if UTXO_FILTERS.up_to_5m.check(days_old, height) {
apply(&mut self.up_to_5m);
} else {
return;
}
if UTXO_FILTERS.up_to_4m.check(days_old, height) {
apply(&mut self.up_to_4m);
} else {
return;
}
if UTXO_FILTERS.up_to_3m.check(days_old, height) {
apply(&mut self.up_to_3m);
} else {
return;
}
if UTXO_FILTERS.up_to_2m.check(days_old, height) {
apply(&mut self.up_to_2m);
} else {
return;
}
if UTXO_FILTERS.up_to_1m.check(days_old, height) {
apply(&mut self.up_to_1m);
} else {
return;
}
if UTXO_FILTERS.up_to_1w.check(days_old, height) {
apply(&mut self.up_to_1w);
}
}
#[inline(always)]
pub fn as_vec(&self) -> Vec<(&T, UTXOCohortId)> {
vec![
(&self.up_to_1d, UTXOCohortId::UpTo1d),
(&self.up_to_1w, UTXOCohortId::UpTo1w),
(&self.up_to_1m, UTXOCohortId::UpTo1m),
(&self.up_to_2m, UTXOCohortId::UpTo2m),
(&self.up_to_3m, UTXOCohortId::UpTo3m),
(&self.up_to_4m, UTXOCohortId::UpTo4m),
(&self.up_to_5m, UTXOCohortId::UpTo5m),
(&self.up_to_6m, UTXOCohortId::UpTo6m),
(&self.up_to_1y, UTXOCohortId::UpTo1y),
(&self.up_to_2y, UTXOCohortId::UpTo2y),
(&self.up_to_3y, UTXOCohortId::UpTo3y),
(&self.up_to_5y, UTXOCohortId::UpTo5y),
(&self.up_to_7y, UTXOCohortId::UpTo7y),
(&self.up_to_10y, UTXOCohortId::UpTo10y),
(&self.up_to_15y, UTXOCohortId::UpTo15y),
(&self.from_1d_to_1w, UTXOCohortId::From1dTo1w),
(&self.from_1w_to_1m, UTXOCohortId::From1wTo1m),
(&self.from_1m_to_3m, UTXOCohortId::From1mTo3m),
(&self.from_3m_to_6m, UTXOCohortId::From3mTo6m),
(&self.from_6m_to_1y, UTXOCohortId::From6mTo1y),
(&self.from_1y_to_2y, UTXOCohortId::From1yTo2y),
(&self.from_2y_to_3y, UTXOCohortId::From2yTo3y),
(&self.from_3y_to_5y, UTXOCohortId::From3yTo5y),
(&self.from_5y_to_7y, UTXOCohortId::From5yTo7y),
(&self.from_7y_to_10y, UTXOCohortId::From7yTo10y),
(&self.from_10y_to_15y, UTXOCohortId::From10yTo15y),
(&self.from_1y, UTXOCohortId::From1y),
(&self.from_2y, UTXOCohortId::From2y),
(&self.from_4y, UTXOCohortId::From4y),
(&self.from_10y, UTXOCohortId::From10y),
(&self.from_15y, UTXOCohortId::From15y),
(&self.epoch_1, UTXOCohortId::Epoch1),
(&self.epoch_2, UTXOCohortId::Epoch2),
(&self.epoch_3, UTXOCohortId::Epoch3),
(&self.epoch_4, UTXOCohortId::Epoch4),
(&self.epoch_5, UTXOCohortId::Epoch5),
(&self.sth, UTXOCohortId::ShortTermHolders),
(&self.lth, UTXOCohortId::LongTermHolders),
]
}
#[inline(always)]
pub fn as_mut_vec(&mut self) -> Vec<(&mut T, UTXOCohortId)> {
vec![
(&mut self.up_to_1d, UTXOCohortId::UpTo1d),
(&mut self.up_to_1w, UTXOCohortId::UpTo1w),
(&mut self.up_to_1m, UTXOCohortId::UpTo1m),
(&mut self.up_to_2m, UTXOCohortId::UpTo2m),
(&mut self.up_to_3m, UTXOCohortId::UpTo3m),
(&mut self.up_to_4m, UTXOCohortId::UpTo4m),
(&mut self.up_to_5m, UTXOCohortId::UpTo5m),
(&mut self.up_to_6m, UTXOCohortId::UpTo6m),
(&mut self.up_to_1y, UTXOCohortId::UpTo1y),
(&mut self.up_to_2y, UTXOCohortId::UpTo2y),
(&mut self.up_to_3y, UTXOCohortId::UpTo3y),
(&mut self.up_to_5y, UTXOCohortId::UpTo5y),
(&mut self.up_to_7y, UTXOCohortId::UpTo7y),
(&mut self.up_to_10y, UTXOCohortId::UpTo10y),
(&mut self.up_to_15y, UTXOCohortId::UpTo15y),
(&mut self.from_1d_to_1w, UTXOCohortId::From1dTo1w),
(&mut self.from_1w_to_1m, UTXOCohortId::From1wTo1m),
(&mut self.from_1m_to_3m, UTXOCohortId::From1mTo3m),
(&mut self.from_3m_to_6m, UTXOCohortId::From3mTo6m),
(&mut self.from_6m_to_1y, UTXOCohortId::From6mTo1y),
(&mut self.from_1y_to_2y, UTXOCohortId::From1yTo2y),
(&mut self.from_2y_to_3y, UTXOCohortId::From2yTo3y),
(&mut self.from_3y_to_5y, UTXOCohortId::From3yTo5y),
(&mut self.from_5y_to_7y, UTXOCohortId::From5yTo7y),
(&mut self.from_7y_to_10y, UTXOCohortId::From7yTo10y),
(&mut self.from_10y_to_15y, UTXOCohortId::From10yTo15y),
(&mut self.from_1y, UTXOCohortId::From1y),
(&mut self.from_2y, UTXOCohortId::From2y),
(&mut self.from_4y, UTXOCohortId::From4y),
(&mut self.from_10y, UTXOCohortId::From10y),
(&mut self.from_15y, UTXOCohortId::From15y),
(&mut self.epoch_1, UTXOCohortId::Epoch1),
(&mut self.epoch_2, UTXOCohortId::Epoch2),
(&mut self.epoch_3, UTXOCohortId::Epoch3),
(&mut self.epoch_4, UTXOCohortId::Epoch4),
(&mut self.epoch_5, UTXOCohortId::Epoch5),
(&mut self.sth, UTXOCohortId::ShortTermHolders),
(&mut self.lth, UTXOCohortId::LongTermHolders),
]
}
}
impl<T> SplitByUTXOCohort<Option<T>> {
pub fn unwrap(self) -> SplitByUTXOCohort<T> {
SplitByUTXOCohort {
sth: self.sth.unwrap(),
lth: self.lth.unwrap(),
up_to_1d: self.up_to_1d.unwrap(),
up_to_1w: self.up_to_1w.unwrap(),
up_to_1m: self.up_to_1m.unwrap(),
up_to_2m: self.up_to_2m.unwrap(),
up_to_3m: self.up_to_3m.unwrap(),
up_to_4m: self.up_to_4m.unwrap(),
up_to_5m: self.up_to_5m.unwrap(),
up_to_6m: self.up_to_6m.unwrap(),
up_to_1y: self.up_to_1y.unwrap(),
up_to_2y: self.up_to_2y.unwrap(),
up_to_3y: self.up_to_3y.unwrap(),
up_to_5y: self.up_to_5y.unwrap(),
up_to_7y: self.up_to_7y.unwrap(),
up_to_10y: self.up_to_10y.unwrap(),
up_to_15y: self.up_to_15y.unwrap(),
from_1d_to_1w: self.from_1d_to_1w.unwrap(),
from_1w_to_1m: self.from_1w_to_1m.unwrap(),
from_1m_to_3m: self.from_1m_to_3m.unwrap(),
from_3m_to_6m: self.from_3m_to_6m.unwrap(),
from_6m_to_1y: self.from_6m_to_1y.unwrap(),
from_1y_to_2y: self.from_1y_to_2y.unwrap(),
from_2y_to_3y: self.from_2y_to_3y.unwrap(),
from_3y_to_5y: self.from_3y_to_5y.unwrap(),
from_5y_to_7y: self.from_5y_to_7y.unwrap(),
from_7y_to_10y: self.from_7y_to_10y.unwrap(),
from_10y_to_15y: self.from_10y_to_15y.unwrap(),
from_1y: self.from_1y.unwrap(),
from_2y: self.from_2y.unwrap(),
from_4y: self.from_4y.unwrap(),
from_10y: self.from_10y.unwrap(),
from_15y: self.from_15y.unwrap(),
epoch_1: self.epoch_1.unwrap(),
epoch_2: self.epoch_2.unwrap(),
epoch_3: self.epoch_3.unwrap(),
epoch_4: self.epoch_4.unwrap(),
epoch_5: self.epoch_5.unwrap(),
}
}
}

View File

@@ -0,0 +1,31 @@
use allocative::Allocative;
use bincode::{Decode, Encode};
use serde::{Deserialize, Serialize};
use crate::structs::Counter;
use super::AnyState;
#[derive(Default, Debug, Encode, Decode, Serialize, Deserialize, Allocative)]
pub struct Counters {
pub multisig_addresses: Counter,
pub op_return_addresses: Counter,
pub push_only_addresses: Counter,
pub unknown_addresses: Counter,
pub empty_addresses: Counter,
}
impl Counters {}
impl AnyState for Counters {
fn name<'a>() -> &'a str {
"counters"
}
fn clear(&mut self) {
self.multisig_addresses.reset();
self.push_only_addresses.reset();
self.unknown_addresses.reset();
self.empty_addresses.reset();
}
}

View File

@@ -0,0 +1,50 @@
use allocative::Allocative;
use bincode::{Decode, Encode};
use derive_deref::{Deref, DerefMut};
use serde::{Deserialize, Serialize};
use crate::structs::{BlockData, BlockPath, DateData};
use super::AnyState;
#[derive(Default, Deref, DerefMut, Debug, Serialize, Deserialize, Encode, Decode, Allocative)]
pub struct DateDataVec(Vec<DateData>);
impl DateDataVec {
pub fn last_block(&self) -> Option<&BlockData> {
self.iter().flat_map(|date_data| &date_data.blocks).last()
}
pub fn last_mut_block(&mut self) -> Option<&mut BlockData> {
self.iter_mut()
.flat_map(|date_data| &mut date_data.blocks)
.last()
}
pub fn second_last_block(&self) -> Option<&BlockData> {
self.iter()
.flat_map(|date_data| &date_data.blocks)
.rev()
.nth(1)
}
pub fn get_date_data(&self, block_path: &BlockPath) -> Option<&DateData> {
self.0.get(block_path.date_index as usize)
}
pub fn get_block_data(&self, block_path: &BlockPath) -> Option<&BlockData> {
self.0
.get(block_path.date_index as usize)
.and_then(|date_data| date_data.blocks.get(block_path.block_index as usize))
}
}
impl AnyState for DateDataVec {
fn name<'a>() -> &'a str {
"date_data_vec"
}
fn clear(&mut self) {
self.0.clear();
}
}

64
_src/parser/states/mod.rs Normal file
View File

@@ -0,0 +1,64 @@
use std::{fs, thread};
mod _trait;
mod cohorts_states;
mod counters;
mod date_data_vec;
pub use _trait::*;
use allocative::Allocative;
pub use cohorts_states::*;
use counters::*;
use date_data_vec::*;
use log::info;
use crate::structs::Config;
#[derive(Default, Allocative)]
pub struct States {
pub address_counters: Counters,
pub date_data_vec: DateDataVec,
pub address_cohorts_durable_states: Option<AddressCohortsDurableStates>,
pub utxo_cohorts_durable_states: Option<UTXOCohortsDurableStates>,
}
impl States {
pub fn import(config: &Config) -> color_eyre::Result<Self> {
fs::create_dir_all(config.path_states())?;
let date_data_vec = DateDataVec::import(config)?;
let address_counters = Counters::import(config)?;
Ok(Self {
address_cohorts_durable_states: None,
address_counters,
date_data_vec,
utxo_cohorts_durable_states: None,
})
}
pub fn reset(&mut self, config: &Config, include_addresses: bool) {
info!("Reseting all states...");
let _ = self.date_data_vec.reset(config);
self.utxo_cohorts_durable_states = None;
if include_addresses {
let _ = self.address_counters.reset(config);
self.address_cohorts_durable_states = None;
}
}
pub fn export(&self, config: &Config) -> color_eyre::Result<()> {
thread::scope(|s| {
s.spawn(|| self.address_counters.export(config).unwrap());
s.spawn(|| self.date_data_vec.export(config).unwrap());
});
Ok(())
}
}