git: reset

This commit is contained in:
k
2024-06-23 17:38:53 +02:00
commit a1a576d088
375 changed files with 40952 additions and 0 deletions

View File

@@ -0,0 +1,47 @@
use std::thread;
use crate::{
databases::Databases,
datasets::AllDatasets,
states::States,
structs::WNaiveDate,
utils::{log, time},
};
pub struct ExportedData<'a> {
pub databases: Option<&'a mut Databases>,
pub datasets: &'a mut AllDatasets,
pub date: WNaiveDate,
pub height: usize,
pub states: Option<&'a States>,
}
pub fn export(
ExportedData {
databases,
datasets,
states,
height,
date,
}: ExportedData,
) -> color_eyre::Result<()> {
log("Exporting... (Don't close !!)");
time("Total save time", || -> color_eyre::Result<()> {
time("Datasets saved", || datasets.export())?;
thread::scope(|s| {
if let Some(databases) = databases {
s.spawn(|| time("Databases saved", || databases.export(height, date)));
}
if let Some(states) = states {
s.spawn(|| time("States saved", || states.export()));
}
});
Ok(())
})?;
Ok(())
}

View File

@@ -0,0 +1,199 @@
use std::{collections::BTreeSet, time::Instant};
use chrono::Datelike;
use export::ExportedData;
use itertools::Itertools;
use parse::ParseData;
use crate::{
actions::{export, find_first_inserted_unsafe_height, parse},
bitcoin::{check_if_height_safe, BitcoinDB, NUMBER_OF_UNSAFE_BLOCKS},
databases::Databases,
datasets::{AllDatasets, ComputeData},
states::States,
structs::{DateData, WNaiveDate},
utils::{generate_allocation_files, log, time},
};
pub fn iter_blocks(bitcoin_db: &BitcoinDB, block_count: usize) -> color_eyre::Result<()> {
let should_insert = true;
let should_export = true;
let study_ram_usage = false;
log("Starting...");
let mut datasets = AllDatasets::import()?;
// RAM: 200MB at this point
log("Imported datasets");
let mut databases = Databases::import();
// RAM: 200MB too
log("Imported databases");
let mut states =
States::import(&mut databases.address_index_to_address_data, &datasets).unwrap_or_default();
log("Imported states");
let first_unsafe_heights =
find_first_inserted_unsafe_height(&mut states, &mut databases, &mut datasets);
let mut height = first_unsafe_heights.min();
log(&format!("Starting parsing at height: {height}"));
let mut block_iter = bitcoin_db.iter_block(height, block_count);
let mut next_block_opt = None;
let mut blocks_loop_date = None;
'parsing: loop {
let instant = Instant::now();
let mut processed_heights = BTreeSet::new();
let mut processed_dates = BTreeSet::new();
'days: loop {
let mut blocks_loop_i = 0;
if next_block_opt.is_some() {
blocks_loop_date.take();
}
'blocks: loop {
let current_block_opt = next_block_opt.take().or_else(|| block_iter.next());
next_block_opt = block_iter.next();
if let Some(current_block) = current_block_opt {
let timestamp = current_block.header.time;
let current_block_date = WNaiveDate::from_timestamp(timestamp);
let current_block_height = height + blocks_loop_i;
let next_block_date = next_block_opt
.as_ref()
.map(|next_block| WNaiveDate::from_timestamp(next_block.header.time));
// Always run for the first block of the loop
if blocks_loop_date.is_none() {
blocks_loop_date.replace(current_block_date);
if states
.date_data_vec
.last()
.map(|date_data| *date_data.date < *current_block_date)
.unwrap_or(true)
{
states
.date_data_vec
.push(DateData::new(current_block_date, vec![]));
}
log(&format!(
"Processing {current_block_date} (height: {height})..."
));
}
let blocks_loop_date = blocks_loop_date.unwrap();
if current_block_date > blocks_loop_date {
panic!("current block should always have the same date as the current blocks loop");
}
let is_date_last_block = next_block_date
// Do NOT change `blocks_loop_date` to `current_block_date` !!!
.map_or(true, |next_block_date| blocks_loop_date < next_block_date);
processed_heights.insert(current_block_height);
if should_insert && first_unsafe_heights.inserted <= current_block_height {
let compute_addresses = databases.check_if_needs_to_compute_addresses(
current_block_height,
blocks_loop_date,
);
parse(ParseData {
bitcoin_db,
block: current_block,
block_index: blocks_loop_i,
compute_addresses,
databases: &mut databases,
datasets: &mut datasets,
date: blocks_loop_date,
first_date_height: height,
height: current_block_height,
is_date_last_block,
states: &mut states,
timestamp,
});
}
blocks_loop_i += 1;
if is_date_last_block {
processed_dates.insert(blocks_loop_date);
height += blocks_loop_i;
let is_new_month = next_block_date
.map_or(true, |next_block_date| next_block_date.day() == 1);
let is_close_to_the_end =
height > (block_count - (NUMBER_OF_UNSAFE_BLOCKS * 3));
if is_new_month || is_close_to_the_end {
break 'days;
}
break 'blocks;
}
} else {
break 'parsing;
}
}
}
// Don't remember why -1
let last_height = height - 1;
log(&format!(
"Parsing month took {} seconds (last height: {last_height})\n",
instant.elapsed().as_secs_f32(),
));
if first_unsafe_heights.computed <= last_height {
datasets.compute(ComputeData {
dates: &processed_dates.into_iter().collect_vec(),
heights: &processed_heights.into_iter().collect_vec(),
});
}
if should_export {
let is_safe = check_if_height_safe(height, block_count);
export(ExportedData {
databases: is_safe.then_some(&mut databases),
datasets: &mut datasets,
date: blocks_loop_date.unwrap(),
height: last_height,
states: is_safe.then_some(&states),
})?;
if study_ram_usage {
time("Exporing allocation files", || {
generate_allocation_files(&datasets, &databases, &states, last_height)
})?;
}
} else {
log("Skipping export");
}
println!();
}
Ok(())
}

View File

@@ -0,0 +1,127 @@
use crate::{
databases::Databases,
datasets::{AllDatasets, AnyDatasets},
states::States,
utils::log,
};
#[derive(Default, Debug)]
pub struct Heights {
pub inserted: usize,
pub computed: usize,
}
impl Heights {
pub fn min(&self) -> usize {
self.inserted.min(self.computed)
}
}
pub fn find_first_inserted_unsafe_height(
states: &mut States,
databases: &mut Databases,
datasets: &mut AllDatasets,
) -> Heights {
let min_initial_inserted_last_address_height = datasets
.address
.get_min_initial_states()
.inserted
.last_height
.as_ref()
.cloned();
let min_initial_inserted_last_address_date = datasets
.address
.get_min_initial_states()
.inserted
.last_date
.as_ref()
.cloned();
let usable_databases = databases.check_if_usable(
min_initial_inserted_last_address_height,
min_initial_inserted_last_address_date,
);
states
.date_data_vec
.iter()
.last()
.map(|date_data| date_data.date)
.and_then(|last_safe_date| {
if !usable_databases {
log("Unusable databases");
return None;
}
let datasets_min_initial_states = datasets.get_min_initial_states().to_owned();
let min_datasets_inserted_last_height = datasets_min_initial_states.inserted.last_height;
let min_datasets_inserted_last_date = datasets_min_initial_states.inserted.last_date;
log(&format!("min_datasets_inserted_last_height: {:?}", min_datasets_inserted_last_height));
log(&format!("min_datasets_inserted_last_date: {:?}", min_datasets_inserted_last_date));
let inserted_last_date_is_older_than_saved_state = min_datasets_inserted_last_date.map_or(true, |min_datasets_last_date| min_datasets_last_date < last_safe_date);
if inserted_last_date_is_older_than_saved_state {
dbg!(min_datasets_inserted_last_date , *last_safe_date);
return None;
}
datasets
.date_metadata
.last_height
.get_or_import(&last_safe_date)
.and_then(|last_safe_height| {
let inserted_heights_and_dates_are_out_of_sync = min_datasets_inserted_last_height.map_or(true, |min_datasets_inserted_last_height| min_datasets_inserted_last_height < last_safe_height);
if inserted_heights_and_dates_are_out_of_sync {
log(&format!("last_safe_height ({last_safe_height}) > min_datasets_height ({min_datasets_inserted_last_height:?})"));
None
} else {
let computed = datasets_min_initial_states.computed.last_date.and_then(
|last_date| datasets.date_metadata
.last_height
.get(&last_date)
.and_then(|last_date_height| {
if datasets_min_initial_states.computed.last_height.map_or(true, |last_height| {
last_height < last_date_height
}) {
None
} else {
Some(last_date_height + 1)
}
})
).unwrap_or_default();
Some(Heights {
inserted: last_safe_height + 1,
computed,
})
}
}
)
})
.unwrap_or_else(|| {
log("Starting over...");
let include_addresses = !usable_databases
|| min_initial_inserted_last_address_date.is_none()
|| min_initial_inserted_last_address_height.is_none();
// if include_addresses {
// dbg!(include_addresses);
// panic!("");
// }
states.reset(include_addresses);
databases.reset(include_addresses);
Heights::default()
})
}

View File

@@ -0,0 +1,9 @@
mod export;
mod iter_blocks;
mod min_height;
mod parse;
pub use export::*;
pub use iter_blocks::*;
pub use min_height::*;
pub use parse::*;

981
parser/src/actions/parse.rs Normal file
View File

@@ -0,0 +1,981 @@
use std::{collections::BTreeMap, ops::ControlFlow, thread};
use bitcoin::{Block, Txid};
use itertools::Itertools;
use rayon::prelude::*;
use crate::{
bitcoin::BitcoinDB,
databases::{
AddressIndexToAddressData, AddressIndexToEmptyAddressData, AddressToAddressIndex,
Databases, TxidToTxData, TxoutIndexToAddressIndex, TxoutIndexToAmount,
},
datasets::{AllDatasets, InsertData},
states::{
AddressCohortsInputStates, AddressCohortsOutputStates, AddressCohortsRealizedStates,
States, UTXOCohortsOneShotStates, UTXOCohortsSentStates,
},
structs::{
Address, AddressData, AddressRealizedData, BlockData, BlockPath, Counter, EmptyAddressData,
PartialTxoutData, Price, SentData, TxData, TxoutIndex, WAmount, WNaiveDate,
},
};
pub struct ParseData<'a> {
pub bitcoin_db: &'a BitcoinDB,
pub block: Block,
pub block_index: usize,
pub compute_addresses: bool,
pub databases: &'a mut Databases,
pub datasets: &'a mut AllDatasets,
pub date: WNaiveDate,
pub first_date_height: usize,
pub height: usize,
pub is_date_last_block: bool,
pub states: &'a mut States,
pub timestamp: u32,
}
pub fn parse(
ParseData {
bitcoin_db,
block,
block_index,
compute_addresses,
databases,
datasets,
date,
first_date_height,
height,
is_date_last_block,
states,
timestamp,
}: ParseData,
) {
// If false, expect that the code is flawless
// or create a 0 value txid database
let enable_check_if_txout_value_is_zero_in_db: bool = true;
let date_index = states.date_data_vec.len() - 1;
let previous_timestamp = if height > 0 {
Some(
datasets
.block_metadata
.timestamp
.get_or_import(&(height - 1)),
)
} else {
None
};
let block_price = Price::from_dollar(
datasets
.price
.get_height_ohlc(height, timestamp, previous_timestamp)
.unwrap_or_else(|_| panic!("Expect {height} to have a price"))
.close as f64,
);
let date_price = Price::from_dollar(
datasets
.price
.get_date_ohlc(date)
.unwrap_or_else(|_| panic!("Expect {date} to have a price"))
.close as f64,
);
let difficulty = block.header.difficulty_float();
let block_size = block.total_size();
let block_weight = block.weight().to_wu();
let block_vbytes = block.weight().to_vbytes_floor();
let block_interval =
previous_timestamp.map_or(0, |previous_timestamp| timestamp - previous_timestamp);
states
.date_data_vec
.last_mut()
.unwrap()
.blocks
.push(BlockData::new(height as u32, block_price, timestamp));
let mut block_path_to_sent_data: BTreeMap<BlockPath, SentData> = BTreeMap::default();
// let mut received_data: ReceivedData = ReceivedData::default();
let mut address_index_to_address_realized_data: BTreeMap<u32, AddressRealizedData> =
BTreeMap::default();
let mut coinbase = WAmount::ZERO;
let mut satblocks_destroyed = WAmount::ZERO;
let mut satdays_destroyed = WAmount::ZERO;
let mut amount_sent = WAmount::ZERO;
let mut transaction_count = 0;
let mut fees = vec![];
let mut fees_total = WAmount::ZERO;
let (
TxoutsParsingResults {
op_returns: _op_returns,
mut partial_txout_data_vec,
provably_unspendable: _provably_unspendable,
},
(mut txid_to_tx_data, mut txout_index_to_amount_and_address_index),
) = thread::scope(|scope| {
let output_handle = scope.spawn(|| {
let mut txouts_parsing_results = pre_process_outputs(
&block,
compute_addresses,
&mut states.address_counters.op_return_addresses,
&mut states.address_counters.push_only_addresses,
&mut states.address_counters.unknown_addresses,
&mut states.address_counters.empty_addresses,
&mut databases.address_to_address_index,
);
// Reverse to get in order via pop later
txouts_parsing_results.partial_txout_data_vec.reverse();
txouts_parsing_results
});
let input_handle = scope.spawn(|| {
pre_process_inputs(
&block,
&mut databases.txid_to_tx_data,
&mut databases.txout_index_to_amount,
&mut databases.txout_index_to_address_index,
compute_addresses,
)
});
(output_handle.join().unwrap(), input_handle.join().unwrap())
});
let mut address_index_to_address_data = compute_addresses.then(|| {
compute_address_index_to_address_data(
&mut databases.address_index_to_address_data,
&mut databases.address_index_to_empty_address_data,
&partial_txout_data_vec,
&txout_index_to_amount_and_address_index,
compute_addresses,
)
});
block
.txdata
.iter()
.enumerate()
.try_for_each(|(block_tx_index, tx)| {
let txid = tx.compute_txid();
let tx_index = databases.txid_to_tx_data.metadata.serial as u32;
transaction_count += 1;
// --
// outputs
// ---
let mut utxos = BTreeMap::new();
let mut spendable_amount = WAmount::ZERO;
let is_coinbase = tx.is_coinbase();
if is_coinbase != (block_tx_index == 0) {
unreachable!();
}
let mut inputs_sum = WAmount::ZERO;
let mut outputs_sum = WAmount::ZERO;
let last_block = states.date_data_vec.last_mut_block().unwrap();
// Before `input` to cover outputs being used in the same block as inputs
tx.output
.iter()
.enumerate()
.filter_map(|(vout, tx_out)| {
if vout > (u16::MAX as usize) {
panic!("vout can indeed be bigger than u16::MAX !");
}
let amount = WAmount::wrap(tx_out.value);
if is_coinbase {
coinbase += amount;
} else {
outputs_sum += amount;
}
partial_txout_data_vec
.pop()
.unwrap()
// None if not worth parsing (empty/op_return/...)
.map(|partial_txout_data| (vout, partial_txout_data))
})
.for_each(|(vout, partial_txout_data)| {
let vout = vout as u16;
let txout_index = TxoutIndex::new(tx_index, vout);
let PartialTxoutData {
address,
address_index_opt,
amount,
} = partial_txout_data;
spendable_amount += amount;
last_block.receive(amount);
utxos.insert(vout, amount);
databases
.txout_index_to_amount
.unsafe_insert(txout_index, amount);
if compute_addresses {
let address = address.unwrap();
let address_index_to_address_data =
address_index_to_address_data.as_mut().unwrap();
let (address_data, address_index) = {
if let Some(address_index) = address_index_opt.or_else(|| {
databases
.address_to_address_index
.unsafe_get_from_puts(&address)
.cloned()
}) {
let address_data = address_index_to_address_data
.get_mut(&address_index)
.unwrap();
(address_data, address_index)
} else {
let address_index =
databases.address_to_address_index.metadata.serial as u32;
let address_type = address.to_type();
if let Some(previous) = databases
.address_to_address_index
.insert(address, address_index)
{
dbg!(previous);
panic!(
"address #{address_index} shouldn't be present during put"
);
}
// Checked new
let address_data = address_index_to_address_data
.entry(address_index)
.and_modify(|_| {
panic!("Shouldn't exist");
})
// Will always insert, it's to avoid insert + get
.or_insert(AddressData::new(address_type));
(address_data, address_index)
}
};
// MUST be before received !
let address_realized_data = address_index_to_address_realized_data
.entry(address_index)
.or_insert_with(|| AddressRealizedData::default(address_data));
address_data.receive(amount, block_price);
address_realized_data.receive(amount);
databases
.txout_index_to_address_index
.unsafe_insert(txout_index, address_index);
}
});
if !utxos.is_empty() {
databases.txid_to_tx_data.insert(
&txid,
TxData::new(
tx_index,
BlockPath::new(date_index as u16, block_index as u16),
utxos.len() as u16,
),
);
}
// ---
// inputs
// ---
if !is_coinbase {
tx.input.iter().try_for_each(|txin| {
let outpoint = txin.previous_output;
let input_txid = outpoint.txid;
let input_vout = outpoint.vout;
let remove_tx_data_from_cached_puts = {
let mut is_tx_data_from_cached_puts = false;
let input_tx_data = txid_to_tx_data
.get_mut(&input_txid)
.unwrap()
.as_mut()
.or_else(|| {
is_tx_data_from_cached_puts = true;
databases
.txid_to_tx_data
.unsafe_get_mut_from_puts(&input_txid)
});
// Can be none because 0 sats inputs happen
// https://mempool.space/tx/f329e55c2de9b821356e6f2c4bba923ea7030cad61120f5ced5d4429f5c86fda#vin=27
if input_tx_data.is_none() {
if !enable_check_if_txout_value_is_zero_in_db
|| bitcoin_db
.check_if_txout_value_is_zero(&input_txid, input_vout as usize)
{
return ControlFlow::Continue::<()>(());
}
dbg!((input_txid, txid, tx_index, input_vout));
panic!("Txid to be in txid_to_tx_data");
}
let input_tx_data = input_tx_data.unwrap();
let input_tx_index = input_tx_data.index;
let input_vout = input_vout as u16;
let input_txout_index = TxoutIndex::new(input_tx_index, input_vout);
// if input_tx_index == 2516 || input_tx_index == 2490 {
// dbg!(input_tx_index, &input_tx_data.utxos);
// }
// let input_amount = input_tx_data.utxos.remove(&input_vout);
let input_amount_and_address_index = databases
.txout_index_to_amount
.remove(&input_txout_index)
.map(|amount| {
(
amount,
databases
.txout_index_to_address_index
.remove(&input_txout_index),
)
}) // Remove from cached puts
.or_else(|| {
txout_index_to_amount_and_address_index.remove(&input_txout_index)
});
if input_amount_and_address_index.is_none() {
if !enable_check_if_txout_value_is_zero_in_db
|| bitcoin_db
.check_if_txout_value_is_zero(&input_txid, input_vout as usize)
{
return ControlFlow::Continue::<()>(());
}
dbg!((
input_txid,
tx_index,
input_tx_index,
input_vout,
input_tx_data,
txid,
));
panic!("Txout index to be in txout_index_to_txout_value");
}
input_tx_data.utxos -= 1;
let (input_amount, input_address_index) =
input_amount_and_address_index.unwrap();
let input_block_path = input_tx_data.block_path;
let BlockPath {
date_index: input_date_index,
block_index: input_block_index,
} = input_block_path;
let input_date_data = states
.date_data_vec
.get_mut(input_date_index as usize)
.unwrap_or_else(|| {
dbg!(height, &input_txid, input_block_path, input_date_index);
panic!()
});
let input_block_data = input_date_data
.blocks
.get_mut(input_block_index as usize)
.unwrap_or_else(|| {
dbg!(
height,
&input_txid,
input_block_path,
input_date_index,
input_block_index,
);
panic!()
});
input_block_data.send(input_amount);
inputs_sum += input_amount;
block_path_to_sent_data
.entry(input_block_path)
.or_default()
.send(input_amount);
satblocks_destroyed +=
input_amount * (height as u64 - input_block_data.height as u64);
satdays_destroyed += input_amount
* date.signed_duration_since(*input_date_data.date).num_days() as u64;
if compute_addresses {
let input_address_index = input_address_index.unwrap_or_else(|| {
dbg!(
height,
input_amount,
&input_tx_data,
input_address_index,
input_txout_index,
txid,
input_txid,
input_vout
);
panic!()
});
let address_index_to_address_data =
address_index_to_address_data.as_mut().unwrap();
let input_address_data = address_index_to_address_data
.get_mut(&input_address_index)
.unwrap_or_else(|| {
dbg!(
input_address_index,
input_txout_index,
input_txid,
input_vout
);
panic!();
});
let input_address_realized_data =
address_index_to_address_realized_data
.entry(input_address_index)
.or_insert_with(|| {
AddressRealizedData::default(input_address_data)
});
// MUST be after `or_insert_with`
let address_realized_profit_or_loss = input_address_data
.send(input_amount, block_price, input_block_data.price)
.unwrap_or_else(|_| {
dbg!(
input_address_index,
txid,
input_txid,
input_amount,
tx_index,
input_tx_index,
input_vout,
&input_address_data
);
panic!()
});
input_address_realized_data
.send(input_amount, address_realized_profit_or_loss);
};
is_tx_data_from_cached_puts && input_tx_data.is_empty()
};
if remove_tx_data_from_cached_puts {
// Pre remove tx_datas that are empty and weren't yet added to the database to avoid having it was in there or not (and thus avoid useless operations)
databases.txid_to_tx_data.remove_from_puts(&input_txid)
}
ControlFlow::Continue(())
})?;
}
amount_sent += inputs_sum;
let fee = inputs_sum - outputs_sum;
fees_total += fee;
fees.push(fee);
ControlFlow::Continue(())
});
if !partial_txout_data_vec.is_empty() {
panic!("partial_txout_data_vec should've been fully consumed");
}
txid_to_tx_data.into_iter().for_each(|(txid, tx_data)| {
if let Some(tx_data) = tx_data {
if tx_data.is_empty() {
databases.txid_to_tx_data.remove_from_db(txid);
} else {
databases.txid_to_tx_data.update(txid, tx_data);
}
}
});
// if !txin_ordered_tx_datas.is_empty() {
// panic!("txin_ordered_tx_indexes should've been fully consumed");
// }
let mut utxo_cohorts_sent_states = UTXOCohortsSentStates::default();
let mut utxo_cohorts_one_shot_states = UTXOCohortsOneShotStates::default();
// let mut utxo_cohorts_received_states = UTXOCohortsReceivedStates::default();
let mut address_cohorts_input_states = None;
let mut address_cohorts_one_shot_states = None;
let mut address_cohorts_output_states = None;
let mut address_cohorts_realized_states = None;
// log("Starting heavy work...");
thread::scope(|scope| {
scope.spawn(|| {
let previous_last_block_data = states.date_data_vec.second_last_block();
if datasets.utxo.needs_durable_states(height, date) {
if let Some(previous_last_block_data) = previous_last_block_data {
block_path_to_sent_data
.iter()
.for_each(|(block_path, sent_data)| {
let block_data =
states.date_data_vec.get_block_data(block_path).unwrap();
if block_data.height != height as u32 {
states.utxo_cohorts_durable_states.subtract_moved(
block_data,
sent_data,
previous_last_block_data,
);
}
});
}
let last_block_data = states.date_data_vec.last_block().unwrap();
if last_block_data.height != height as u32 {
unreachable!()
}
states
.date_data_vec
.iter()
.flat_map(|date_data| &date_data.blocks)
.for_each(|block_data| {
states.utxo_cohorts_durable_states.udpate_age_if_needed(
block_data,
last_block_data,
previous_last_block_data,
);
});
}
if datasets.utxo.needs_one_shot_states(height, date) {
utxo_cohorts_one_shot_states =
states.utxo_cohorts_durable_states.compute_one_shot_states(
block_price,
if is_date_last_block {
Some(date_price)
} else {
None
},
);
}
});
// scope.spawn(|| {
// utxo_cohorts_received_states
// .compute(&states.date_data_vec, block_path_to_received_data);
// });
if datasets.utxo.needs_sent_states(height, date) {
scope.spawn(|| {
utxo_cohorts_sent_states.compute(
&states.date_data_vec,
&block_path_to_sent_data,
block_price,
);
});
}
if compute_addresses {
scope.spawn(|| {
let address_index_to_address_data = address_index_to_address_data.as_ref().unwrap();
// TODO: Only compute if needed
address_cohorts_realized_states.replace(AddressCohortsRealizedStates::default());
// TODO: Only compute if needed
address_cohorts_input_states.replace(AddressCohortsInputStates::default());
// TODO: Only compute if needed
address_cohorts_output_states.replace(AddressCohortsOutputStates::default());
address_index_to_address_realized_data.iter().for_each(
|(address_index, address_realized_data)| {
let current_address_data =
address_index_to_address_data.get(address_index).unwrap();
states
.address_cohorts_durable_states
.iterate(address_realized_data, current_address_data)
.unwrap_or_else(|report| {
dbg!(report.to_string(), address_index);
panic!();
});
if !address_realized_data.initial_address_data.is_empty() {
// Realized == previous amount
// If a whale sent all its sats to another address at a loss, it's the whale that realized the loss not the now empty adress
let liquidity_classification = address_realized_data
.initial_address_data
.compute_liquidity_classification();
address_cohorts_realized_states
.as_mut()
.unwrap()
.iterate_realized(address_realized_data, &liquidity_classification)
.unwrap();
address_cohorts_input_states
.as_mut()
.unwrap()
.iterate_input(address_realized_data, &liquidity_classification)
.unwrap();
}
address_cohorts_output_states
.as_mut()
.unwrap()
.iterate_output(
address_realized_data,
&current_address_data.compute_liquidity_classification(),
)
.unwrap();
},
);
address_cohorts_one_shot_states.replace(
states
.address_cohorts_durable_states
.compute_one_shot_states(
block_price,
if is_date_last_block {
Some(date_price)
} else {
None
},
),
);
});
}
});
if compute_addresses {
address_index_to_address_data.unwrap().into_iter().for_each(
|(address_index, address_data)| {
if address_data.is_empty() {
databases.address_index_to_empty_address_data.unsafe_insert(
address_index,
EmptyAddressData::from_non_empty(&address_data),
);
} else {
databases
.address_index_to_address_data
.unsafe_insert(address_index, address_data);
}
},
)
}
datasets.insert(InsertData {
address_cohorts_input_states: &address_cohorts_input_states,
block_size,
block_vbytes,
block_weight,
address_cohorts_one_shot_states: &address_cohorts_one_shot_states,
address_cohorts_realized_states: &address_cohorts_realized_states,
block_interval,
block_price,
coinbase,
compute_addresses,
databases,
date,
date_blocks_range: &(first_date_height..=height),
date_first_height: first_date_height,
difficulty,
fees: &fees,
height,
is_date_last_block,
satblocks_destroyed,
satdays_destroyed,
amount_sent,
states,
timestamp,
transaction_count,
utxo_cohorts_one_shot_states: &utxo_cohorts_one_shot_states,
utxo_cohorts_sent_states: &utxo_cohorts_sent_states,
});
}
pub struct TxoutsParsingResults {
partial_txout_data_vec: Vec<Option<PartialTxoutData>>,
provably_unspendable: WAmount,
op_returns: usize,
}
fn pre_process_outputs(
block: &Block,
compute_addresses: bool,
op_return_addresses: &mut Counter,
push_only_addresses: &mut Counter,
unknown_addresses: &mut Counter,
empty_addresses: &mut Counter,
address_to_address_index: &mut AddressToAddressIndex,
) -> TxoutsParsingResults {
let mut provably_unspendable = WAmount::ZERO;
let mut op_returns = 0;
let mut partial_txout_data_vec = block
.txdata
.iter()
.flat_map(|tx| &tx.output)
.map(|txout| {
let script = &txout.script_pubkey;
let amount = WAmount::wrap(txout.value);
// 0 sats outputs are possible and allowed !
// https://mempool.space/tx/2f2442f68e38b980a6c4cec21e71851b0d8a5847d85208331a27321a9967bbd6
// https://bitcoin.stackexchange.com/questions/104937/transaction-outputs-with-value-0
if amount == WAmount::ZERO {
return None;
}
// Op Return
// https://mempool.space/tx/139c004f477101c468767983536caaeef568613fab9c2ed9237521f5ff530afd
// Provably unspendable https://mempool.space/tx/8a68c461a2473653fe0add786f0ca6ebb99b257286166dfb00707be24716af3a#flow=&vout=0
if script.is_op_return() {
// TODO: Count fee paid to write said OP_RETURN, beware of coinbase transactions
// For coinbase transactions, count miners
op_returns += 1;
provably_unspendable += amount;
// return None;
}
// https://mempool.space/tx/8a68c461a2473653fe0add786f0ca6ebb99b257286166dfb00707be24716af3a#flow=&vout=0
else if script.is_provably_unspendable() {
provably_unspendable += amount;
// return None;
}
let address_opt = compute_addresses.then(|| {
let address = Address::from(
txout,
op_return_addresses,
push_only_addresses,
unknown_addresses,
empty_addresses,
);
address_to_address_index.open_db(&address);
address
});
Some(PartialTxoutData::new(address_opt, amount, None))
})
.collect_vec();
if compute_addresses {
partial_txout_data_vec
.par_iter_mut()
.for_each(|partial_tx_out_data| {
if let Some(partial_tx_out_data) = partial_tx_out_data {
let address_index_opt = address_to_address_index
.unsafe_get(partial_tx_out_data.address.as_ref().unwrap())
.cloned();
partial_tx_out_data.address_index_opt = address_index_opt;
}
});
}
TxoutsParsingResults {
partial_txout_data_vec,
provably_unspendable,
op_returns,
}
}
#[allow(clippy::type_complexity)]
fn pre_process_inputs<'a>(
block: &'a Block,
txid_to_tx_data_db: &mut TxidToTxData,
txout_index_to_amount_db: &mut TxoutIndexToAmount,
txout_index_to_address_index_db: &mut TxoutIndexToAddressIndex,
compute_addresses: bool,
) -> (
BTreeMap<&'a Txid, Option<TxData>>,
BTreeMap<TxoutIndex, (WAmount, Option<u32>)>,
) {
let mut txid_to_tx_data: BTreeMap<&Txid, Option<TxData>> = block
.txdata
.iter()
.skip(1) // Skip coinbase transaction
.flat_map(|transaction| &transaction.input)
.fold(BTreeMap::default(), |mut tree, tx_in| {
let txid = &tx_in.previous_output.txid;
txid_to_tx_data_db.open_db(txid);
tree.entry(txid).or_default();
tree
});
let mut tx_datas = txid_to_tx_data
.par_iter()
.map(|(txid, _)| txid_to_tx_data_db.unsafe_get(txid))
.collect::<Vec<_>>();
txid_to_tx_data.values_mut().rev().for_each(|tx_data_opt| {
*tx_data_opt = tx_datas.pop().unwrap().cloned();
});
let txout_index_to_amount_and_address_index = block
.txdata
.iter()
.skip(1) // Skip coinbase transaction
.flat_map(|transaction| &transaction.input)
.flat_map(|tx_in| {
let txid = &tx_in.previous_output.txid;
if let Some(Some(tx_data)) = txid_to_tx_data.get(txid) {
let txout_index = TxoutIndex::new(tx_data.index, tx_in.previous_output.vout as u16);
txout_index_to_amount_db.open_db(&txout_index);
if compute_addresses {
txout_index_to_address_index_db.open_db(&txout_index);
}
Some(txout_index)
} else {
None
}
})
.collect_vec()
.into_par_iter()
.flat_map(|txout_index| {
txout_index_to_amount_db
.unsafe_get(&txout_index)
// Will be None if value of utxo is 0
// https://mempool.space/tx/9d8a0d851c9fb2cdf1c6d9406ce97e19e6911ae3503ab2dd5f38640bacdac996
// which is used later as input
.map(|amount| {
let address_index = compute_addresses.then(|| {
*txout_index_to_address_index_db
.unsafe_get(&txout_index)
.unwrap()
});
(txout_index, (*amount, address_index))
})
})
.collect::<BTreeMap<_, _>>();
// No need to call remove, it's being called later in the parse function
// To more easily support removing cached puts
(txid_to_tx_data, txout_index_to_amount_and_address_index)
}
fn compute_address_index_to_address_data(
address_index_to_address_data_db: &mut AddressIndexToAddressData,
address_index_to_empty_address_data_db: &mut AddressIndexToEmptyAddressData,
partial_txout_data_vec: &[Option<PartialTxoutData>],
txout_index_to_amount_and_address_index: &BTreeMap<TxoutIndex, (WAmount, Option<u32>)>,
compute_addresses: bool,
) -> BTreeMap<u32, AddressData> {
if !compute_addresses {
return BTreeMap::default();
}
let mut address_index_to_address_data = partial_txout_data_vec
.iter()
.flatten()
.flat_map(|partial_txout_data| partial_txout_data.address_index_opt)
.map(|address_index| (address_index, true))
.chain(
txout_index_to_amount_and_address_index
.values()
.map(|(_, address_index)| (*address_index.as_ref().unwrap(), false)), // False because we assume non zero inputs values
)
.map(|(address_index, open_empty)| {
address_index_to_address_data_db.open_db(&address_index);
if open_empty {
address_index_to_empty_address_data_db.open_db(&address_index);
}
(address_index, AddressData::default())
})
.collect::<BTreeMap<_, _>>();
address_index_to_address_data
.par_iter_mut()
.for_each(|(address_index, address_data)| {
if let Some(_address_data) =
address_index_to_address_data_db.unsafe_get_from_cache(address_index)
{
_address_data.clone_into(address_data);
} else if let Some(empty_address_data) =
address_index_to_empty_address_data_db.unsafe_get_from_cache(address_index)
{
*address_data = AddressData::from_empty(empty_address_data);
} else if let Some(_address_data) =
address_index_to_address_data_db.unsafe_get_from_db(address_index)
{
_address_data.clone_into(address_data);
} else {
let empty_address_data = address_index_to_empty_address_data_db
.unsafe_get_from_db(address_index)
.unwrap();
*address_data = AddressData::from_empty(empty_address_data);
}
});
// Parallel unsafe_get + Linear remove = Parallel-ish take
address_index_to_address_data
.iter()
.for_each(|(address_index, address_data)| {
if address_data.is_empty() {
address_index_to_empty_address_data_db.remove(address_index);
} else {
address_index_to_address_data_db.remove(address_index);
}
});
address_index_to_address_data
}

View File

@@ -0,0 +1,3 @@
mod multisig;
pub use multisig::*;

View File

@@ -0,0 +1,57 @@
//
// Code from bitcoin-explorer now deprecated
//
use bitcoin::{
blockdata::{
opcodes::all,
script::Instruction::{self, Op, PushBytes},
},
Opcode, Script,
};
///
/// Obtain addresses for multisig transactions.
///
pub fn multisig_addresses(script: &Script) -> Vec<Vec<u8>> {
let ops: Vec<Instruction> = script.instructions().filter_map(|o| o.ok()).collect();
// obtain number of keys
let num_keys = {
if let Some(Op(op)) = ops.get(ops.len() - 2) {
decode_from_op_n(op)
} else {
unreachable!()
}
};
// read public keys
let mut public_keys = Vec::with_capacity(num_keys as usize);
for op in ops.iter().skip(1).take(num_keys as usize) {
if let PushBytes(data) = op {
public_keys.push(data.as_bytes().to_vec());
} else {
unreachable!()
}
}
public_keys
}
///
/// Decode OP_N
///
/// translated from BitcoinJ:
/// [decodeFromOpN()](https://github.com/bitcoinj/bitcoinj/blob/d3d5edbcbdb91b25de4df3b6ed6740d7e2329efc/core/src/main/java/org/bitcoinj/script/Script.java#L515:L524)
///
#[inline]
fn decode_from_op_n(op: &Opcode) -> i32 {
if op.eq(&all::OP_PUSHBYTES_0) {
0
} else if op.eq(&all::OP_PUSHNUM_NEG1) {
-1
} else {
op.to_u8() as i32 + 1 - all::OP_PUSHNUM_1.to_u8() as i32
}
}

View File

@@ -0,0 +1,2 @@
pub const NUMBER_OF_UNSAFE_BLOCKS: usize = 100;
pub const TARGET_BLOCKS_PER_DAY: usize = 144;

View File

@@ -0,0 +1,122 @@
use std::{process::Command, thread::sleep, time::Duration};
use color_eyre::eyre::eyre;
use serde_json::Value;
use crate::utils::{log, log_output, retry};
struct BlockchainInfo {
pub headers: u64,
pub blocks: u64,
}
pub struct BitcoinDaemon<'a> {
path: &'a str,
}
impl<'a> BitcoinDaemon<'a> {
pub fn new(bitcoin_dir_path: &'a str) -> Self {
Self {
path: bitcoin_dir_path,
}
}
pub fn start(&self) {
sleep(Duration::from_secs(1));
let mut command = Command::new("bitcoind");
command
.arg(self.datadir_arg())
.arg("-blocksonly")
.arg("-txindex=1")
.arg("-daemon");
// bitcoind -datadir=/Users/k/Developer/bitcoin -blocksonly -txindex=1 -daemon
let output = command
.output()
.expect("bitcoind to be able to properly start");
log_output(&output);
}
pub fn stop(&self) {
// bitcoin-cli -datadir=/Users/k/Developer/bitcoin stop
let output = Command::new("bitcoin-cli")
.arg(self.datadir_arg())
.arg("stop")
.output()
.unwrap();
if output.status.success() {
log_output(&output);
sleep(Duration::from_secs(15));
}
}
pub fn wait_sync(&self) {
while !self.check_if_fully_synced() {
sleep(Duration::from_secs(5))
}
}
pub fn wait_for_new_block(&self, last_block_height: usize) {
log("Waiting for new block...");
while self.get_blockchain_info().headers as usize == last_block_height {
sleep(Duration::from_secs(5))
}
}
pub fn check_if_fully_synced(&self) -> bool {
let BlockchainInfo { blocks, headers } = self.get_blockchain_info();
let synced = blocks == headers;
if synced {
log(&format!("Synced ! ({blocks} blocks)"));
} else {
log(&format!("Syncing... ({} remaining)", headers - blocks));
}
synced
}
fn get_blockchain_info(&self) -> BlockchainInfo {
retry(
|| {
// bitcoin-cli -datadir=/Users/k/Developer/bitcoin getblockchaininfo
let output = Command::new("bitcoin-cli")
.arg(self.datadir_arg())
.arg("getblockchaininfo")
.output()?;
let output = String::from_utf8_lossy(&output.stdout);
let json: Value = serde_json::from_str(&output)?;
let json = json.as_object().ok_or(eyre!(""))?;
let blocks = json
.get("blocks")
.ok_or(eyre!(""))?
.as_u64()
.ok_or(eyre!(""))?;
let headers = json
.get("headers")
.ok_or(eyre!(""))?
.as_u64()
.ok_or(eyre!(""))?;
Ok(BlockchainInfo { headers, blocks })
},
1,
u64::MAX,
)
.unwrap()
}
fn datadir_arg(&self) -> String {
format!("-datadir={}", self.path)
}
}

View File

@@ -0,0 +1,152 @@
use std::{
collections::HashMap,
convert::From,
fs::{self, DirEntry, File},
io::{self, BufReader, Seek, SeekFrom},
path::{Path, PathBuf},
};
use bitcoin::{io::Cursor, Block, Transaction};
use derive_deref::{Deref, DerefMut};
use super::{
errors::{OpError, OpErrorKind, OpResult},
reader::BlockchainRead,
};
///
/// An index of all blk files found.
///
#[derive(Debug, Clone, Deref, DerefMut)]
pub struct BlkFiles(HashMap<i32, PathBuf>);
impl BlkFiles {
///
/// Construct an index of all blk files.
///
pub fn new(path: &Path) -> OpResult<Self> {
Ok(Self(Self::scan_path(path)?))
}
///
/// Read a Block from blk file.
///
#[inline]
pub fn read_raw_block(&self, n_file: i32, offset: u32) -> OpResult<Vec<u8>> {
if let Some(blk_path) = self.get(&n_file) {
let mut r = BufReader::new(File::open(blk_path)?);
r.seek(SeekFrom::Start(offset as u64 - 4))?;
let block_size = r.read_u32()?;
let block = r.read_u8_vec(block_size)?;
Ok(block)
} else {
Err(OpError::from("blk file not found, sync with bitcoin core"))
}
}
///
/// Read a Block from blk file.
///
pub fn read_block(&self, n_file: i32, offset: u32) -> OpResult<Block> {
Cursor::new(self.read_raw_block(n_file, offset)?).read_block()
}
///
/// Read a transaction from blk file.
///
pub fn read_transaction(
&self,
n_file: i32,
n_pos: u32,
n_tx_offset: u32,
) -> OpResult<Transaction> {
if let Some(blk_path) = self.get(&n_file) {
let mut r = BufReader::new(File::open(blk_path)?);
// the size of a header is 80.
r.seek(SeekFrom::Start(n_pos as u64 + n_tx_offset as u64 + 80))?;
r.read_transaction()
} else {
Err(OpError::from("blk file not found, sync with bitcoin core"))
}
}
///
/// Scan blk folder to build an index of all blk files.
///
fn scan_path(path: &Path) -> OpResult<HashMap<i32, PathBuf>> {
let mut collected = HashMap::with_capacity(4000);
for entry in fs::read_dir(path)? {
match entry {
Ok(de) => {
let path = Self::resolve_path(&de)?;
if !path.is_file() {
continue;
};
if let Some(file_name) = path.as_path().file_name() {
if let Some(file_name) = file_name.to_str() {
if let Some(index) = Self::parse_blk_index(file_name) {
collected.insert(index, path);
}
}
}
}
Err(msg) => {
return Err(OpError::from(msg));
}
}
}
collected.shrink_to_fit();
if collected.is_empty() {
Err(OpError::new(OpErrorKind::RuntimeError).join_msg("No blk files found!"))
} else {
Ok(collected)
}
}
///
/// Resolve symlink.
///
fn resolve_path(entry: &DirEntry) -> io::Result<PathBuf> {
if entry.file_type()?.is_symlink() {
fs::read_link(entry.path())
} else {
Ok(entry.path())
}
}
///
/// Extract index from block file name.
///
fn parse_blk_index(file_name: &str) -> Option<i32> {
let prefix = "blk";
let ext = ".dat";
if file_name.starts_with(prefix) && file_name.ends_with(ext) {
file_name[prefix.len()..(file_name.len() - ext.len())]
.parse::<i32>()
.ok()
} else {
None
}
}
}
// #[cfg(test)]
// mod tests {
// use super::*;
// #[test]
// fn test_parse_blk_index() {
// assert_eq!(0, BlkFiles::parse_blk_index("blk00000.dat").unwrap());
// assert_eq!(6, BlkFiles::parse_blk_index("blk6.dat").unwrap());
// assert_eq!(1202, BlkFiles::parse_blk_index("blk1202.dat").unwrap());
// assert_eq!(
// 13412451,
// BlkFiles::parse_blk_index("blk13412451.dat").unwrap()
// );
// assert!(BlkFiles::parse_blk_index("blkindex.dat").is_none());
// assert!(BlkFiles::parse_blk_index("invalid.dat").is_none());
// }
// }

View File

@@ -0,0 +1,45 @@
//!
//! View development note of iter_connected.rs for implementation
//! details of iter_block.rs, which follows similar principles.
//!
use bitcoin::Block;
use par_iter_sync::{IntoParallelIteratorSync, ParIterSync};
use super::BitcoinDB;
pub struct BlockIter(ParIterSync<Block>);
impl BlockIter {
/// the worker threads are dispatched in this `new` constructor!
pub fn new<T>(db: &BitcoinDB, heights: T) -> Self
where
T: IntoIterator<Item = usize> + Send + 'static,
<T as IntoIterator>::IntoIter: Send + 'static,
{
let db_ref = db.clone();
BlockIter(
heights.into_par_iter_sync(move |h| match db_ref.get_block(h) {
Ok(blk) => Ok(blk),
Err(_) => Err(()),
}),
)
}
/// the worker threads are dispatched in this `new` constructor!
pub fn from_range(db: &BitcoinDB, start: usize, end: usize) -> Self {
if end <= start {
BlockIter::new(db, Vec::new())
} else {
BlockIter::new(db, start..end)
}
}
}
impl Iterator for BlockIter {
type Item = Block;
fn next(&mut self) -> Option<Self::Item> {
self.0.next()
}
}

View File

@@ -0,0 +1,211 @@
use std::{collections::BTreeMap, fmt, path::Path};
use bitcoin::{block::Header, io::Cursor, BlockHash};
use derive_deref::{Deref, DerefMut};
use leveldb::{
database::{iterator::LevelDBIterator, Database},
iterator::Iterable,
options::{Options, ReadOptions},
};
use crate::utils::log;
use super::{BlockchainRead, OpResult};
///
/// See Bitcoin Core repository for definition.
///
const BLOCK_VALID_HEADER: u32 = 1;
const BLOCK_VALID_TREE: u32 = 2;
const BLOCK_VALID_TRANSACTIONS: u32 = 3;
const BLOCK_VALID_CHAIN: u32 = 4;
const BLOCK_VALID_SCRIPTS: u32 = 5;
const BLOCK_VALID_MASK: u32 = BLOCK_VALID_HEADER
| BLOCK_VALID_TREE
| BLOCK_VALID_TRANSACTIONS
| BLOCK_VALID_CHAIN
| BLOCK_VALID_SCRIPTS;
const BLOCK_HAVE_DATA: u32 = 8;
const BLOCK_HAVE_UNDO: u32 = 16;
///
/// - Map from block height to block hash (records)
/// - Map from block hash to block height (hash_to_height)
///
#[derive(Clone, Deref, DerefMut)]
pub struct BlocksIndexes(Box<[BlockIndexRecord]>);
///
/// BLOCK_INDEX RECORD as defined in Bitcoin Core.
///
#[derive(Clone)]
pub struct BlockIndexRecord {
pub n_version: i32,
pub n_height: i32,
pub n_status: u32,
pub n_tx: u32,
pub n_file: i32,
pub n_data_pos: u32,
pub n_undo_pos: u32,
pub header: Header,
}
impl BlocksIndexes {
///
/// Build a collections of block index.
///
pub(crate) fn new(p: &Path) -> OpResult<Self> {
Ok(Self(load_block_index(p)?.into_boxed_slice()))
}
}
///
/// Load all block index in memory from leveldb (i.e. `blocks/index` path).
///
/// Map from block height to block index record.
///
pub fn load_block_index(path: &Path) -> OpResult<Vec<BlockIndexRecord>> {
let mut block_index_by_block_hash = BTreeMap::new();
log("Start loading block_index");
let mut options = Options::new();
options.create_if_missing = false;
let db: Database<BlockKey> = Database::open(path, options)?;
let options = ReadOptions::new();
let mut iter = db.iter(options);
let mut max_height_block_hash = Option::<(BlockHash, i32)>::None;
while iter.advance() {
let k = iter.key();
let v = iter.value();
if is_block_index_record(&k.key) {
let record = BlockIndexRecord::from(&v)?;
// only add valid block index record that has block data.
if record.n_height == 0
|| (record.n_status & BLOCK_VALID_MASK >= BLOCK_VALID_SCRIPTS
&& record.n_status & BLOCK_HAVE_DATA > 0)
{
let block_hash = record.header.block_hash();
// find the block with max height
if let Some((hash, height)) = max_height_block_hash.as_mut() {
if record.n_height > *height {
*hash = block_hash;
*height = record.n_height;
}
} else {
max_height_block_hash = Some((block_hash, record.n_height));
}
block_index_by_block_hash.insert(block_hash, record);
}
}
}
// build the longest chain
if let Some((hash, height)) = max_height_block_hash {
let mut block_index = Vec::with_capacity(height as usize + 1);
let mut current_hash = hash;
let mut current_height = height;
// recursively build block index from max height block.
while current_height >= 0 {
let blk = block_index_by_block_hash
.remove(&current_hash)
.expect("block hash not found in block index!");
assert_eq!(
current_height, blk.n_height,
"some block info missing from block index levelDB,\
delete Bitcoin folder and re-download!"
);
current_hash = blk.header.prev_blockhash;
current_height -= 1;
block_index.push(blk);
}
block_index.reverse();
Ok(block_index)
} else {
Ok(Vec::with_capacity(0))
}
}
/// levelDB key util
struct BlockKey {
key: Vec<u8>,
}
/// levelDB key util
impl db_key::Key for BlockKey {
fn from_u8(key: &[u8]) -> Self {
BlockKey {
key: Vec::from(key),
}
}
fn as_slice<T, F: Fn(&[u8]) -> T>(&self, f: F) -> T {
f(&self.key)
}
}
impl BlockIndexRecord {
///
/// Decode levelDB value for Block Index Record.
///
fn from(values: &[u8]) -> OpResult<Self> {
let mut reader = Cursor::new(values);
let n_version = reader.read_varint()? as i32;
let n_height = reader.read_varint()? as i32;
let n_status = reader.read_varint()? as u32;
let n_tx = reader.read_varint()? as u32;
let n_file = if n_status & (BLOCK_HAVE_DATA | BLOCK_HAVE_UNDO) > 0 {
reader.read_varint()? as i32
} else {
-1
};
let n_data_pos = if n_status & BLOCK_HAVE_DATA > 0 {
reader.read_varint()? as u32
} else {
u32::MAX
};
let n_undo_pos = if n_status & BLOCK_HAVE_UNDO > 0 {
reader.read_varint()? as u32
} else {
u32::MAX
};
let header = reader.read_block_header()?;
Ok(BlockIndexRecord {
n_version,
n_height,
n_status,
n_tx,
n_file,
n_data_pos,
n_undo_pos,
header,
})
}
}
#[inline]
fn is_block_index_record(data: &[u8]) -> bool {
data.first() == Some(&b'b')
}
impl fmt::Debug for BlockIndexRecord {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("BlockIndexRecord")
.field("version", &self.n_version)
.field("height", &self.n_height)
.field("status", &self.n_status)
.field("n_tx", &self.n_tx)
.field("n_file", &self.n_file)
.field("n_data_pos", &self.n_data_pos)
.field("header", &self.header)
.finish()
}
}

View File

@@ -0,0 +1,135 @@
use std::convert::{self, From};
use std::error;
use std::fmt;
use std::io;
use std::string;
use std::sync;
pub type OpResult<T> = Result<T, OpError>;
#[derive(Debug)]
/// Custom error type
pub struct OpError {
pub kind: OpErrorKind,
pub message: String,
}
impl OpError {
pub fn new(kind: OpErrorKind) -> Self {
OpError {
kind,
message: String::new(),
}
}
/// Joins the Error with a new message and returns it
pub fn join_msg(mut self, msg: &str) -> Self {
self.message.push_str(msg);
OpError {
kind: self.kind,
message: self.message,
}
}
}
impl fmt::Display for OpError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.message.is_empty() {
write!(f, "{}", &self.kind)
} else {
write!(f, "{} {}", &self.message, &self.kind)
}
}
}
impl error::Error for OpError {
fn description(&self) -> &str {
self.message.as_ref()
}
fn cause(&self) -> Option<&dyn error::Error> {
self.kind.source()
}
}
#[derive(Debug)]
pub enum OpErrorKind {
None,
IoError(io::Error),
Utf8Error(string::FromUtf8Error),
RuntimeError,
PoisonError,
SendError,
}
impl fmt::Display for OpErrorKind {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
OpErrorKind::IoError(ref err) => write!(f, "I/O Error: {}", err),
OpErrorKind::Utf8Error(ref err) => write!(f, "Utf8 Conversion: {}", err),
ref err @ OpErrorKind::PoisonError => write!(f, "Threading Error: {}", err),
ref err @ OpErrorKind::SendError => write!(f, "Sync: {}", err),
ref err @ OpErrorKind::RuntimeError => write!(f, "RuntimeError: {}", err),
OpErrorKind::None => write!(f, ""),
}
}
}
impl error::Error for OpErrorKind {
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
match *self {
OpErrorKind::IoError(ref err) => Some(err),
OpErrorKind::Utf8Error(ref err) => Some(err),
ref err @ OpErrorKind::PoisonError => Some(err),
ref err @ OpErrorKind::SendError => Some(err),
_ => None,
}
}
}
impl From<io::Error> for OpError {
fn from(err: io::Error) -> Self {
Self::new(OpErrorKind::IoError(err))
}
}
impl From<bitcoin::consensus::encode::Error> for OpError {
fn from(_: bitcoin::consensus::encode::Error) -> Self {
Self::from("block decode error")
}
}
impl convert::From<i32> for OpError {
fn from(err_code: i32) -> Self {
Self::from(io::Error::from_raw_os_error(err_code))
}
}
impl convert::From<&str> for OpError {
fn from(err: &str) -> Self {
Self::new(OpErrorKind::None).join_msg(err)
}
}
impl<T> convert::From<sync::PoisonError<T>> for OpError {
fn from(_: sync::PoisonError<T>) -> Self {
Self::new(OpErrorKind::PoisonError)
}
}
impl<T> convert::From<sync::mpsc::SendError<T>> for OpError {
fn from(_: sync::mpsc::SendError<T>) -> Self {
Self::new(OpErrorKind::SendError)
}
}
impl convert::From<string::FromUtf8Error> for OpError {
fn from(err: string::FromUtf8Error) -> Self {
Self::new(OpErrorKind::Utf8Error(err))
}
}
impl convert::From<leveldb::error::Error> for OpError {
fn from(err: leveldb::error::Error) -> Self {
Self::from(err.to_string().as_ref())
}
}

View File

@@ -0,0 +1,172 @@
//!
//! Mostly a stripped down copy pasta of bitcoin-explorer
//!
//! Huge props to https://github.com/Congyuwang
//!
//! Crates APIs, essential structs, functions, methods are all here!
//!
//! To quickly understand how to use this crate, have a look at the
//! documentation for `bitcoin_explorer::BitcoinDB`!!.
//!
mod blk_files;
mod block_iter;
mod blocks_indexes;
mod errors;
mod reader;
mod txdb;
use blk_files::*;
use blocks_indexes::*;
use errors::*;
use reader::*;
use txdb::*;
use std::ops::Deref;
use std::path::Path;
use std::sync::Arc;
use bitcoin::{Block, Transaction, Txid};
pub use block_iter::BlockIter;
pub struct InnerDB {
pub blocks_indexes: BlocksIndexes,
pub blk_files: BlkFiles,
pub tx_db: TxDB,
}
///
/// This is the main struct of this crate!! Click and read the doc.
///
/// All queries start from initializing `BitcoinDB`.
///
/// Note: This is an Arc wrap around `InnerDB`.
///
#[derive(Clone)]
pub struct BitcoinDB(Arc<InnerDB>);
impl Deref for BitcoinDB {
type Target = InnerDB;
fn deref(&self) -> &Self::Target {
self.0.deref()
}
}
impl BitcoinDB {
///
/// This is the main structure for reading Bitcoin blockchain data.
///
/// Instantiating this class by passing the `-datadir` directory of
/// Bitcoin core to the `new()` method.
/// `tx_index`: whether to try to open tx_index levelDB.
///
pub fn new(p: &Path, tx_index: bool) -> OpResult<BitcoinDB> {
if !p.exists() {
return Err(OpError::from("data_dir does not exist"));
}
let blk_path = p.join("blocks");
let index_path = blk_path.join("index");
let blocks_indexes = BlocksIndexes::new(index_path.as_path())?;
let tx_db = if tx_index {
let tx_index_path = p.join("indexes").join("txindex");
TxDB::new(&tx_index_path)
} else {
TxDB::null()
};
let inner = InnerDB {
blocks_indexes,
blk_files: BlkFiles::new(blk_path.as_path())?,
tx_db,
};
Ok(BitcoinDB(Arc::new(inner)))
}
///
/// Get the maximum number of blocks downloaded.
///
/// This API guarantee that block 0 to `get_block_count() - 1`
/// have been downloaded and available for query.
///
pub fn get_block_count(&self) -> usize {
let records = self.blocks_indexes.len();
for h in 0..records {
// n_tx == 0 indicates that the block is not downloaded
if self.blocks_indexes.get(h).unwrap().n_tx == 0 {
return h;
}
}
records
}
///
/// Get a block
///
pub fn get_block(&self, height: usize) -> OpResult<Block> {
if let Some(index) = self.blocks_indexes.get(height) {
Ok(self.blk_files.read_block(index.n_file, index.n_data_pos)?)
} else {
Err(OpError::from("height not found"))
}
}
///
/// Get a transaction by providing txid.
///
/// This function requires `txindex` to be set to `true` for `BitcoinDB`,
/// and requires that flag `txindex=1` has been enabled when
/// running Bitcoin Core.
///
/// A transaction cannot be found using this function if it is
/// not yet indexed using `txindex`.
///
pub fn get_transaction(&self, txid: &Txid) -> OpResult<Transaction> {
if !self.tx_db.is_open() {
return Err(OpError::from("TxDB not open"));
}
// give special treatment for genesis transaction
if self.tx_db.is_genesis_tx(txid) {
return Ok(self.get_block(0)?.txdata.swap_remove(0));
}
let record = self.tx_db.get_tx_record(txid)?;
self.blk_files
.read_transaction(record.n_file, record.n_pos, record.n_tx_offset)
}
///
/// Iterate through all blocks from `start` to `end` (excluded).
///
/// # Performance
///
/// This iterator is implemented to read the blocks in concurrency,
/// but the result is still produced in sequential order.
/// Results read are stored in a synced queue for `next()`
/// to get.
///
/// The iterator stops automatically when a block cannot be
/// read (i.e., when the max height in the database met).
///
/// This is a very efficient implementation.
/// Using SSD and intel core i7 (4 core, 8 threads)
/// Iterating from height 0 to 700000 takes about 10 minutes.
///
pub fn iter_block(&self, start: usize, end: usize) -> BlockIter {
BlockIter::from_range(self, start, end)
}
pub fn check_if_txout_value_is_zero(&self, txid: &Txid, vout: usize) -> bool {
self.get_transaction(txid)
.unwrap()
.output
.get(vout)
.unwrap()
.to_owned()
.value
.to_sat()
== 0
}
}

View File

@@ -0,0 +1,90 @@
use std::{fs::File, io::BufReader};
use bitcoin::{block::Header, consensus::Decodable, io::Cursor, Block, Transaction};
use byteorder::{LittleEndian, ReadBytesExt};
use super::OpResult;
///
/// binary file read utilities.
///
pub trait BlockchainRead {
#[inline]
fn read_varint(&mut self) -> OpResult<usize>
where
Self: bitcoin::io::Read,
{
let mut n = 0;
loop {
let ch_data = self.read_u8()?;
n = (n << 7) | (ch_data & 0x7F) as usize;
if ch_data & 0x80 > 0 {
n += 1;
} else {
break;
}
}
Ok(n)
}
#[inline]
fn read_u8(&mut self) -> OpResult<u8>
where
Self: bitcoin::io::Read,
{
let mut slice = [0u8; 1];
self.read_exact(&mut slice).unwrap();
Ok(slice[0])
}
#[inline]
fn read_u32(&mut self) -> OpResult<u32>
where
Self: std::io::Read,
{
let u = ReadBytesExt::read_u32::<LittleEndian>(self)?;
Ok(u)
}
#[inline]
fn read_u8_vec(&mut self, count: u32) -> OpResult<Vec<u8>>
where
Self: bitcoin::io::Read,
{
let mut arr = vec![0u8; count as usize];
self.read_exact(&mut arr).unwrap();
Ok(arr)
}
#[inline]
fn read_block(&mut self) -> OpResult<Block>
where
Self: bitcoin::io::BufRead,
{
Ok(Block::consensus_decode(self)?)
}
#[inline]
fn read_transaction(&mut self) -> OpResult<Transaction>
where
Self: bitcoin::io::BufRead,
{
Ok(Transaction::consensus_decode(self)?)
}
#[inline]
fn read_block_header(&mut self) -> OpResult<Header>
where
Self: bitcoin::io::BufRead,
{
Ok(Header::consensus_decode(self)?)
}
}
impl<T> BlockchainRead for Cursor<T> {}
impl BlockchainRead for BufReader<File> {}

View File

@@ -0,0 +1,147 @@
use std::{path::Path, str::FromStr};
use bitcoin::{hashes::Hash, io::Cursor, Txid};
use leveldb::{
database::Database,
kv::KV,
options::{Options, ReadOptions},
};
use crate::utils::log;
use super::{BlockchainRead, OpError, OpResult};
const GENESIS_TXID: &str = "4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b";
///
/// tx-index: looking up transaction position using txid.
///
/// This is possible if Bitcoin Core has `txindex=1`.
///
pub struct TxDB {
db: Option<Database<TxKey>>,
genesis_txid: Txid,
}
/// Records transaction storage on disk
pub struct TransactionRecord {
pub txid: Txid,
pub n_file: i32,
pub n_pos: u32,
pub n_tx_offset: u32,
}
impl TransactionRecord {
fn from(key: &[u8], values: &[u8]) -> OpResult<Self> {
let mut reader = Cursor::new(values);
Ok(TransactionRecord {
txid: Txid::from_slice(key).unwrap(),
n_file: reader.read_varint()? as i32,
n_pos: reader.read_varint()? as u32,
n_tx_offset: reader.read_varint()? as u32,
})
}
}
impl TxDB {
/// initialize TxDB for transaction queries
pub fn new(path: &Path) -> TxDB {
let option_db = TxDB::try_open_db(path);
if let Some(db) = option_db {
TxDB {
db: Some(db),
genesis_txid: Txid::from_str(GENESIS_TXID).unwrap(),
}
} else {
TxDB::null()
}
}
#[inline]
pub fn is_open(&self) -> bool {
self.db.is_some()
}
#[inline]
pub fn null() -> TxDB {
TxDB {
db: None,
genesis_txid: Txid::from_str(GENESIS_TXID).unwrap(),
}
}
#[inline]
///
/// genesis tx is not included in UTXO because of Bitcoin Core Bug
///
pub fn is_genesis_tx(&self, txid: &Txid) -> bool {
txid == &self.genesis_txid
}
fn try_open_db(path: &Path) -> Option<Database<TxKey>> {
if !path.exists() {
log("Failed to open tx_index DB: tx_index not built");
return None;
}
let options = Options::new();
match Database::open(path, options) {
Ok(db) => {
log("Successfully opened tx_index DB!");
Some(db)
}
Err(e) => {
log(&format!("Failed to open tx_index DB: {:?}", e));
None
}
}
}
/// note that this function cannot find genesis block, which needs special treatment
pub fn get_tx_record(&self, txid: &Txid) -> OpResult<TransactionRecord> {
if let Some(db) = &self.db {
let inner = txid.as_byte_array();
let mut key = Vec::with_capacity(inner.len() + 1);
key.push(b't');
key.extend(inner);
let key = TxKey { key };
let read_options = ReadOptions::new();
match db.get(read_options, &key) {
Ok(value) => {
if let Some(value) = value {
Ok(TransactionRecord::from(&key.key[1..], value.as_slice())?)
} else {
Err(OpError::from(
format!("value not found for txid: {}", txid).as_str(),
))
}
}
Err(e) => Err(OpError::from(
format!("value not found for txid: {}", e).as_str(),
)),
}
} else {
Err(OpError::from("TxDB not open"))
}
}
}
/// levelDB key utility
struct TxKey {
key: Vec<u8>,
}
/// levelDB key utility
impl db_key::Key for TxKey {
fn from_u8(key: &[u8]) -> Self {
TxKey {
key: Vec::from(key),
}
}
fn as_slice<T, F: Fn(&[u8]) -> T>(&self, f: F) -> T {
f(&self.key)
}
}

View File

@@ -0,0 +1,5 @@
use super::NUMBER_OF_UNSAFE_BLOCKS;
pub fn check_if_height_safe(height: usize, block_count: usize) -> bool {
height < block_count - NUMBER_OF_UNSAFE_BLOCKS
}

11
parser/src/bitcoin/mod.rs Normal file
View File

@@ -0,0 +1,11 @@
mod addresses;
mod consts;
mod daemon;
mod db;
mod height;
pub use addresses::*;
pub use consts::*;
pub use daemon::*;
pub use db::*;
pub use height::*;

View File

@@ -0,0 +1,235 @@
use std::{
collections::{BTreeMap, BTreeSet},
fmt::Debug,
fs,
};
use allocative::Allocative;
use derive_deref::{Deref, DerefMut};
// https://docs.rs/sanakirja/latest/sanakirja/index.html
// https://pijul.org/posts/2021-02-06-rethinking-sanakirja/
//
// Seems indeed much faster than ReDB and LMDB (heed)
// But a lot has changed code wise between them so a retest wouldn't hurt
//
// Possible compression: https://pijul.org/posts/sanakirja-zstd/
use sanakirja::{
btree::{self, page, page_unsized, BTreeMutPage, Db_},
direct_repr, Commit, Env, Error, MutTxn, RootDb, Storable, UnsizedStorable,
};
use crate::io::OUTPUTS_FOLDER_PATH;
pub type SizedDatabase<Key, Value> = Database<Key, Key, Value, page::Page<Key, Value>>;
pub type UnsizedDatabase<KeyTree, KeyDB, Value> =
Database<KeyTree, KeyDB, Value, page_unsized::Page<KeyDB, Value>>;
#[derive(Allocative)]
#[allocative(bound = "KeyTree: Allocative, KeyDB, Value: Allocative, Page")]
/// There is no `cached_gets` since it's much cheaper and faster to do a parallel search first using `unsafe_get` than caching gets along the way.
pub struct Database<KeyTree, KeyDB, Value, Page>
where
KeyTree: Ord + Clone + Debug,
KeyDB: Ord + ?Sized + Storable,
Value: Storable + PartialEq,
Page: BTreeMutPage<KeyDB, Value>,
{
pub cached_puts: BTreeMap<KeyTree, Value>,
pub cached_dels: BTreeSet<KeyTree>,
#[allocative(skip)]
db: Db_<KeyDB, Value, Page>,
#[allocative(skip)]
txn: MutTxn<Env, ()>,
#[allocative(skip)]
key_tree_to_key_db: fn(&KeyTree) -> &KeyDB,
}
pub const SANAKIRJA_MAX_KEY_SIZE: usize = 510;
const ROOT_DB: usize = 0;
const PAGE_SIZE: u64 = 4096 * 256; // 1mo - Must be a multiplier of 4096
impl<KeyDB, KeyTree, Value, Page> Database<KeyTree, KeyDB, Value, Page>
where
KeyTree: Ord + Clone + Debug,
KeyDB: Ord + ?Sized + Storable,
Value: Storable + PartialEq,
Page: BTreeMutPage<KeyDB, Value>,
{
pub fn open(
folder: &str,
file: &str,
key_tree_to_key_db: fn(&KeyTree) -> &KeyDB,
) -> color_eyre::Result<Self> {
let mut txn = Self::init_txn(folder, file)?;
let db = txn
.root_db(ROOT_DB)
.unwrap_or_else(|| unsafe { btree::create_db_(&mut txn).unwrap() });
Ok(Self {
cached_puts: BTreeMap::default(),
cached_dels: BTreeSet::default(),
db,
txn,
key_tree_to_key_db,
})
}
pub fn iter<F>(&self, callback: &mut F)
where
F: FnMut((&KeyDB, &Value)),
{
btree::iter(&self.txn, &self.db, None)
.unwrap()
.for_each(|entry| callback(entry.unwrap()));
}
pub fn get(&self, key: &KeyTree) -> Option<&Value> {
if let Some(cached_put) = self.get_from_puts(key) {
return Some(cached_put);
}
self.db_get(key)
}
pub fn db_get(&self, key: &KeyTree) -> Option<&Value> {
let k = (self.key_tree_to_key_db)(key);
let option = btree::get(&self.txn, &self.db, k, None).unwrap();
if let Some((k_found, v)) = option {
if k == k_found {
return Some(v);
}
}
None
}
#[inline(always)]
pub fn get_from_puts(&self, key: &KeyTree) -> Option<&Value> {
self.cached_puts.get(key)
}
#[inline(always)]
pub fn get_mut_from_puts(&mut self, key: &KeyTree) -> Option<&mut Value> {
self.cached_puts.get_mut(key)
}
#[inline(always)]
pub fn remove(&mut self, key: &KeyTree) -> Option<Value> {
self.remove_from_puts(key).or_else(|| {
self.db_remove(key);
None
})
}
#[inline(always)]
pub fn db_remove(&mut self, key: &KeyTree) {
self.cached_dels.insert(key.clone());
}
pub fn update(&mut self, key: KeyTree, value: Value) -> Option<Value> {
self.cached_dels.insert(key.clone());
self.cached_puts.insert(key, value)
}
#[inline(always)]
pub fn remove_from_puts(&mut self, key: &KeyTree) -> Option<Value> {
self.cached_puts.remove(key)
}
#[inline(always)]
pub fn insert(&mut self, key: KeyTree, value: Value) -> Option<Value> {
self.cached_dels.remove(&key);
self.unsafe_insert(key, value)
}
#[inline(always)]
pub fn unsafe_insert(&mut self, key: KeyTree, value: Value) -> Option<Value> {
self.cached_puts.insert(key, value)
}
fn init_txn(folder: &str, file: &str) -> color_eyre::Result<MutTxn<Env, ()>> {
let path = databases_folder_path(folder);
fs::create_dir_all(&path)?;
let env = unsafe { Env::new_nolock(format!("{path}/{file}"), PAGE_SIZE, 1).unwrap() };
let txn = Env::mut_txn_begin(env)?;
Ok(txn)
}
pub fn export(mut self) -> color_eyre::Result<(), Error> {
if self.cached_dels.is_empty() && self.cached_puts.is_empty() {
return Ok(());
}
self.cached_dels
.into_iter()
.try_for_each(|key| -> Result<(), Error> {
btree::del(
&mut self.txn,
&mut self.db,
(self.key_tree_to_key_db)(&key),
None,
)?;
Ok(())
})?;
self.cached_puts
.into_iter()
.try_for_each(|(key, value)| -> Result<(), Error> {
btree::put(
&mut self.txn,
&mut self.db,
(self.key_tree_to_key_db)(&key),
&value,
)?;
Ok(())
})?;
self.txn.set_root(ROOT_DB, self.db.db.into());
self.txn.commit()
}
}
#[derive(
Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Deref, DerefMut, Default, Copy, Allocative,
)]
pub struct U8x19([u8; 19]);
direct_repr!(U8x19);
impl From<&[u8]> for U8x19 {
fn from(slice: &[u8]) -> Self {
let mut arr = Self::default();
arr.copy_from_slice(slice);
arr
}
}
#[derive(
Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Deref, DerefMut, Default, Copy, Allocative,
)]
pub struct U8x31([u8; 31]);
direct_repr!(U8x31);
impl From<&[u8]> for U8x31 {
fn from(slice: &[u8]) -> Self {
let mut arr = Self::default();
arr.copy_from_slice(slice);
arr
}
}
pub fn databases_folder_path(folder: &str) -> String {
format!("{OUTPUTS_FOLDER_PATH}/databases/{folder}")
}

View File

@@ -0,0 +1,32 @@
use std::{fs, io};
use crate::{structs::WNaiveDate, utils::log};
use super::databases_folder_path;
pub trait AnyDatabaseGroup
where
Self: Sized,
{
fn import() -> Self;
fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()>;
fn folder<'a>() -> &'a str;
fn reset(&mut self) -> color_eyre::Result<(), io::Error> {
log(&format!("Reset {}", Self::folder()));
self.reset_metadata();
fs::remove_dir_all(Self::full_path())?;
Ok(())
}
fn full_path() -> String {
databases_folder_path(Self::folder())
}
fn reset_metadata(&mut self);
}

View File

@@ -0,0 +1,148 @@
use std::{
collections::BTreeMap,
fs, mem,
ops::{Deref, DerefMut},
};
use allocative::Allocative;
use rayon::prelude::*;
use crate::{
structs::{AddressData, WNaiveDate},
utils::time,
};
use super::{databases_folder_path, AnyDatabaseGroup, Metadata, SizedDatabase};
type Key = u32;
type Value = AddressData;
type Database = SizedDatabase<Key, Value>;
#[derive(Allocative)]
pub struct AddressIndexToAddressData {
pub metadata: Metadata,
map: BTreeMap<usize, Database>,
}
impl Deref for AddressIndexToAddressData {
type Target = BTreeMap<usize, Database>;
fn deref(&self) -> &Self::Target {
&self.map
}
}
impl DerefMut for AddressIndexToAddressData {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.map
}
}
const DB_MAX_SIZE: usize = 500_000;
impl AddressIndexToAddressData {
pub fn unsafe_insert(&mut self, key: Key, value: Value) -> Option<Value> {
self.metadata.called_insert();
self.open_db(&key).unsafe_insert(key, value)
}
pub fn remove(&mut self, key: &Key) -> Option<Value> {
self.metadata.called_remove();
self.open_db(key).remove(key)
}
/// Doesn't check if the database is open contrary to `safe_get` which does and opens if needed
/// Though it makes it easy to use with rayon.
pub fn unsafe_get_from_cache(&self, key: &Key) -> Option<&Value> {
let db_index = Self::db_index(key);
self.get(&db_index).unwrap().get_from_puts(key)
}
pub fn unsafe_get_from_db(&self, key: &Key) -> Option<&Value> {
let db_index = Self::db_index(key);
self.get(&db_index).unwrap().db_get(key)
}
pub fn open_db(&mut self, key: &Key) -> &mut Database {
let db_index = Self::db_index(key);
self.entry(db_index).or_insert_with(|| {
let db_name = format!(
"{}..{}",
db_index * DB_MAX_SIZE,
(db_index + 1) * DB_MAX_SIZE
);
SizedDatabase::open(Self::folder(), &db_name, |key| key).unwrap()
})
}
pub fn iter<F>(&mut self, callback: &mut F)
where
F: FnMut((&Key, &Value)),
{
time("Iter through address_index_to_address_data", || {
self.open_all();
// MUST CLEAR MAP, otherwise some weird shit in happening later in the export I think
mem::take(&mut self.map)
.values()
.for_each(|database| database.iter(callback));
});
}
fn open_all(&mut self) {
fs::read_dir(databases_folder_path(Self::folder()))
.unwrap()
.map(|entry| {
entry
.unwrap()
.path()
.file_name()
.unwrap()
.to_str()
.unwrap()
.to_owned()
})
.filter(|file_name| file_name.contains(".."))
.for_each(|path| {
self.open_db(&path.split("..").next().unwrap().parse::<u32>().unwrap());
});
}
fn db_index(key: &Key) -> usize {
*key as usize / DB_MAX_SIZE
}
}
impl AnyDatabaseGroup for AddressIndexToAddressData {
fn import() -> Self {
Self {
map: BTreeMap::default(),
metadata: Metadata::import(&Self::full_path()),
}
}
fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()> {
mem::take(&mut self.map)
.into_par_iter()
.try_for_each(|(_, db)| db.export())?;
self.metadata.export(height, date).unwrap();
Ok(())
}
fn reset_metadata(&mut self) {
self.metadata.reset();
}
fn folder<'a>() -> &'a str {
"address_index_to_address_data"
}
}

View File

@@ -0,0 +1,123 @@
use std::{
collections::BTreeMap,
mem,
ops::{Deref, DerefMut},
};
use allocative::Allocative;
use rayon::prelude::*;
use crate::structs::{EmptyAddressData, WNaiveDate};
use super::{AnyDatabaseGroup, Metadata, SizedDatabase};
type Key = u32;
type Value = EmptyAddressData;
type Database = SizedDatabase<Key, Value>;
#[derive(Allocative)]
pub struct AddressIndexToEmptyAddressData {
pub metadata: Metadata,
map: BTreeMap<usize, Database>,
}
impl Deref for AddressIndexToEmptyAddressData {
type Target = BTreeMap<usize, Database>;
fn deref(&self) -> &Self::Target {
&self.map
}
}
impl DerefMut for AddressIndexToEmptyAddressData {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.map
}
}
const DB_MAX_SIZE: usize = 500_000;
impl AddressIndexToEmptyAddressData {
pub fn unsafe_insert(&mut self, key: Key, value: Value) -> Option<Value> {
self.metadata.called_insert();
self.open_db(&key).unsafe_insert(key, value)
}
// pub fn undo_insert(&mut self, key: &Key) -> Option<Value> {
// self.metadata.called_remove();
// self.open_db(key).remove_from_puts(key)
// }
pub fn remove(&mut self, key: &Key) -> Option<Value> {
self.metadata.called_remove();
self.open_db(key).remove(key)
}
/// Doesn't check if the database is open contrary to `safe_get` which does and opens if needed
/// Though it makes it easy to use with rayon.
pub fn unsafe_get_from_cache(&self, key: &Key) -> Option<&Value> {
let db_index = Self::db_index(key);
self.get(&db_index).and_then(|db| db.get_from_puts(key))
}
pub fn unsafe_get_from_db(&self, key: &Key) -> Option<&Value> {
let db_index = Self::db_index(key);
self.get(&db_index)
.unwrap_or_else(|| {
dbg!(&self.map.keys(), &key, &db_index);
panic!()
})
.db_get(key)
}
pub fn open_db(&mut self, key: &Key) -> &mut Database {
let db_index = Self::db_index(key);
self.entry(db_index).or_insert_with(|| {
let db_name = format!(
"{}..{}",
db_index * DB_MAX_SIZE,
(db_index + 1) * DB_MAX_SIZE
);
SizedDatabase::open(Self::folder(), &db_name, |key| key).unwrap()
})
}
fn db_index(key: &Key) -> usize {
*key as usize / DB_MAX_SIZE
}
}
impl AnyDatabaseGroup for AddressIndexToEmptyAddressData {
fn import() -> Self {
Self {
map: BTreeMap::default(),
metadata: Metadata::import(&Self::full_path()),
}
}
fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()> {
mem::take(&mut self.map)
.into_par_iter()
.try_for_each(|(_, db)| db.export())?;
self.metadata.export(height, date)?;
Ok(())
}
fn reset_metadata(&mut self) {
self.metadata.reset();
}
fn folder<'a>() -> &'a str {
"address_index_to_empty_address_data"
}
}

View File

@@ -0,0 +1,309 @@
use std::{collections::BTreeMap, mem, thread};
use allocative::Allocative;
use rayon::prelude::*;
use crate::structs::{Address, WNaiveDate};
use super::{
AnyDatabaseGroup, Database, Metadata, SizedDatabase, U8x19, U8x31,
UnsizedDatabase as _UnsizedDatabase,
};
type Value = u32;
type U8x19Database = SizedDatabase<U8x19, Value>;
type U8x31Database = SizedDatabase<U8x31, Value>;
type U32Database = SizedDatabase<u32, Value>;
type UnsizedDatabase = _UnsizedDatabase<Box<[u8]>, [u8], Value>;
type P2PKDatabase = U8x19Database;
type P2PKHDatabase = U8x19Database;
type P2SHDatabase = U8x19Database;
type P2WPKHDatabase = U8x19Database;
type P2WSHDatabase = U8x31Database;
type P2TRDatabase = U8x31Database;
type UnknownDatabase = U32Database;
type OpReturnDatabase = U32Database;
type PushOnlyDatabase = U32Database;
type EmptyDatabase = U32Database;
type MultisigDatabase = UnsizedDatabase;
#[derive(Allocative)]
pub struct AddressToAddressIndex {
pub metadata: Metadata,
p2pk: BTreeMap<u16, P2PKDatabase>,
p2pkh: BTreeMap<u16, P2PKHDatabase>,
p2sh: BTreeMap<u16, P2SHDatabase>,
p2wpkh: BTreeMap<u16, P2WPKHDatabase>,
p2wsh: BTreeMap<u16, P2WSHDatabase>,
p2tr: BTreeMap<u16, P2TRDatabase>,
op_return: Option<OpReturnDatabase>,
push_only: Option<PushOnlyDatabase>,
unknown: Option<UnknownDatabase>,
empty: Option<EmptyDatabase>,
multisig: Option<MultisigDatabase>,
}
impl AddressToAddressIndex {
// pub fn safe_get(&mut self, address: &Address) -> Option<&Value> {
// match address {
// Address::Empty(key) => self.open_empty().get(key),
// Address::Unknown(key) => self.open_unknown().get(key),
// Address::MultiSig(key) => self.open_multisig().get(key),
// Address::P2PK((prefix, rest)) => self.open_p2pk(*prefix).get(rest),
// Address::P2PKH((prefix, rest)) => self.open_p2pkh(*prefix).get(rest),
// Address::P2SH((prefix, rest)) => self.open_p2sh(*prefix).get(rest),
// Address::P2WPKH((prefix, rest)) => self.open_p2wpkh(*prefix).get(rest),
// Address::P2WSH((prefix, rest)) => self.open_p2wsh(*prefix).get(rest),
// Address::P2TR((prefix, rest)) => self.open_p2tr(*prefix).get(rest),
// }
// }
pub fn open_db(&mut self, address: &Address) {
match address {
Address::Empty(_) => {
self.open_empty();
}
Address::Unknown(_) => {
self.open_unknown();
}
Address::OpReturn(_) => {
self.open_op_return();
}
Address::PushOnly(_) => {
self.open_push_only();
}
Address::MultiSig(_) => {
self.open_multisig();
}
Address::P2PK((prefix, _)) => {
self.open_p2pk(*prefix);
}
Address::P2PKH((prefix, _)) => {
self.open_p2pkh(*prefix);
}
Address::P2SH((prefix, _)) => {
self.open_p2sh(*prefix);
}
Address::P2WPKH((prefix, _)) => {
self.open_p2wpkh(*prefix);
}
Address::P2WSH((prefix, _)) => {
self.open_p2wsh(*prefix);
}
Address::P2TR((prefix, _)) => {
self.open_p2tr(*prefix);
}
}
}
/// Doesn't check if the database is open contrary to `safe_get` which does and opens if needed.
/// Though it makes it easy to use with rayon
pub fn unsafe_get(&self, address: &Address) -> Option<&Value> {
match address {
Address::Empty(key) => self.empty.as_ref().unwrap().get(key),
Address::Unknown(key) => self.unknown.as_ref().unwrap().get(key),
Address::OpReturn(key) => self.op_return.as_ref().unwrap().get(key),
Address::PushOnly(key) => self.push_only.as_ref().unwrap().get(key),
Address::MultiSig(key) => self.multisig.as_ref().unwrap().get(key),
Address::P2PK((prefix, key)) => self.p2pk.get(prefix).unwrap().get(key),
Address::P2PKH((prefix, key)) => self.p2pkh.get(prefix).unwrap().get(key),
Address::P2SH((prefix, key)) => self.p2sh.get(prefix).unwrap().get(key),
Address::P2WPKH((prefix, key)) => self.p2wpkh.get(prefix).unwrap().get(key),
Address::P2WSH((prefix, key)) => self.p2wsh.get(prefix).unwrap().get(key),
Address::P2TR((prefix, key)) => self.p2tr.get(prefix).unwrap().get(key),
}
}
pub fn unsafe_get_from_puts(&self, address: &Address) -> Option<&Value> {
match address {
Address::Empty(key) => self.empty.as_ref().unwrap().get_from_puts(key),
Address::Unknown(key) => self.unknown.as_ref().unwrap().get_from_puts(key),
Address::OpReturn(key) => self.op_return.as_ref().unwrap().get_from_puts(key),
Address::PushOnly(key) => self.push_only.as_ref().unwrap().get_from_puts(key),
Address::MultiSig(key) => self.multisig.as_ref().unwrap().get_from_puts(key),
Address::P2PK((prefix, key)) => self.p2pk.get(prefix).unwrap().get_from_puts(key),
Address::P2PKH((prefix, key)) => self.p2pkh.get(prefix).unwrap().get_from_puts(key),
Address::P2SH((prefix, key)) => self.p2sh.get(prefix).unwrap().get_from_puts(key),
Address::P2WPKH((prefix, key)) => self.p2wpkh.get(prefix).unwrap().get_from_puts(key),
Address::P2WSH((prefix, key)) => self.p2wsh.get(prefix).unwrap().get_from_puts(key),
Address::P2TR((prefix, key)) => self.p2tr.get(prefix).unwrap().get_from_puts(key),
}
}
pub fn insert(&mut self, address: Address, value: Value) -> Option<Value> {
self.metadata.called_insert();
match address {
Address::Empty(key) => self.open_empty().insert(key, value),
Address::Unknown(key) => self.open_unknown().insert(key, value),
Address::OpReturn(key) => self.open_op_return().insert(key, value),
Address::PushOnly(key) => self.open_push_only().insert(key, value),
Address::MultiSig(key) => self.open_multisig().insert(key, value),
Address::P2PK((prefix, rest)) => self.open_p2pk(prefix).insert(rest, value),
Address::P2PKH((prefix, rest)) => self.open_p2pkh(prefix).insert(rest, value),
Address::P2SH((prefix, rest)) => self.open_p2sh(prefix).insert(rest, value),
Address::P2WPKH((prefix, rest)) => self.open_p2wpkh(prefix).insert(rest, value),
Address::P2WSH((prefix, rest)) => self.open_p2wsh(prefix).insert(rest, value),
Address::P2TR((prefix, rest)) => self.open_p2tr(prefix).insert(rest, value),
}
}
pub fn open_p2pk(&mut self, prefix: u16) -> &mut P2PKDatabase {
self.p2pk.entry(prefix).or_insert_with(|| {
Database::open(
&format!("{}/{}", Self::folder(), "p2pk"),
&prefix.to_string(),
|key| key,
)
.unwrap()
})
}
pub fn open_p2pkh(&mut self, prefix: u16) -> &mut P2PKHDatabase {
self.p2pkh.entry(prefix).or_insert_with(|| {
Database::open(
&format!("{}/{}", Self::folder(), "p2pkh"),
&prefix.to_string(),
|key| key,
)
.unwrap()
})
}
pub fn open_p2sh(&mut self, prefix: u16) -> &mut P2SHDatabase {
self.p2sh.entry(prefix).or_insert_with(|| {
Database::open(
&format!("{}/{}", Self::folder(), "p2sh"),
&prefix.to_string(),
|key| key,
)
.unwrap()
})
}
pub fn open_p2wpkh(&mut self, prefix: u16) -> &mut P2WPKHDatabase {
self.p2wpkh.entry(prefix).or_insert_with(|| {
Database::open(
&format!("{}/{}", Self::folder(), "p2wpkh"),
&prefix.to_string(),
|key| key,
)
.unwrap()
})
}
pub fn open_p2wsh(&mut self, prefix: u16) -> &mut P2WSHDatabase {
self.p2wsh.entry(prefix).or_insert_with(|| {
Database::open(
&format!("{}/{}", Self::folder(), "p2wsh"),
&prefix.to_string(),
|key| key,
)
.unwrap()
})
}
pub fn open_p2tr(&mut self, prefix: u16) -> &mut P2TRDatabase {
self.p2tr.entry(prefix).or_insert_with(|| {
Database::open(
&format!("{}/{}", Self::folder(), "p2tr"),
&prefix.to_string(),
|key| key,
)
.unwrap()
})
}
pub fn open_unknown(&mut self) -> &mut UnknownDatabase {
self.unknown
.get_or_insert_with(|| Database::open(Self::folder(), "unknown", |key| key).unwrap())
}
pub fn open_op_return(&mut self) -> &mut UnknownDatabase {
self.op_return
.get_or_insert_with(|| Database::open(Self::folder(), "op_return", |key| key).unwrap())
}
pub fn open_push_only(&mut self) -> &mut UnknownDatabase {
self.push_only
.get_or_insert_with(|| Database::open(Self::folder(), "push_only", |key| key).unwrap())
}
pub fn open_empty(&mut self) -> &mut UnknownDatabase {
self.empty
.get_or_insert_with(|| Database::open(Self::folder(), "empty", |key| key).unwrap())
}
pub fn open_multisig(&mut self) -> &mut MultisigDatabase {
self.multisig.get_or_insert_with(|| {
Database::open(Self::folder(), "multisig", |key| key as &[u8]).unwrap()
})
}
}
impl AnyDatabaseGroup for AddressToAddressIndex {
fn import() -> Self {
Self {
p2pk: BTreeMap::default(),
p2pkh: BTreeMap::default(),
p2sh: BTreeMap::default(),
p2wpkh: BTreeMap::default(),
p2wsh: BTreeMap::default(),
p2tr: BTreeMap::default(),
op_return: None,
push_only: None,
unknown: None,
empty: None,
multisig: None,
metadata: Metadata::import(&Self::full_path()),
}
}
fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()> {
thread::scope(|s| {
s.spawn(|| {
mem::take(&mut self.p2pk)
.into_par_iter()
.chain(mem::take(&mut self.p2pkh).into_par_iter())
.chain(mem::take(&mut self.p2sh).into_par_iter())
.chain(mem::take(&mut self.p2wpkh).into_par_iter())
.try_for_each(|(_, db)| db.export())
});
s.spawn(|| {
mem::take(&mut self.p2wsh)
.into_par_iter()
.chain(mem::take(&mut self.p2tr).into_par_iter())
.try_for_each(|(_, db)| db.export())
});
s.spawn(|| {
[
self.unknown.take(),
self.op_return.take(),
self.push_only.take(),
self.empty.take(),
]
.into_par_iter()
.flatten()
.try_for_each(|db| db.export())
});
self.multisig.take().map(|db| db.export());
});
self.metadata.export(height, date)?;
Ok(())
}
fn reset_metadata(&mut self) {
self.metadata.reset()
}
fn folder<'a>() -> &'a str {
"address_to_address_index"
}
}

View File

@@ -0,0 +1,116 @@
use allocative::Allocative;
use bincode::{Decode, Encode};
use std::{
fmt::Debug,
fs, io,
ops::{Deref, DerefMut},
};
use crate::{
io::Binary,
structs::{Counter, WNaiveDate},
};
#[derive(Default, Debug, Encode, Decode, Allocative)]
pub struct Metadata {
path: String,
data: MetadataData,
}
impl Deref for Metadata {
type Target = MetadataData;
fn deref(&self) -> &Self::Target {
&self.data
}
}
impl DerefMut for Metadata {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.data
}
}
impl Metadata {
pub fn import(path: &str) -> Self {
Self {
path: path.to_owned(),
data: MetadataData::import(path).unwrap_or_default(),
}
}
pub fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()> {
if self.last_height.unwrap_or_default() < height {
self.last_height.replace(height);
}
if self.last_date.unwrap_or_default() < date {
self.last_date.replace(date);
}
self.data.export(&self.path)
}
pub fn reset(&mut self) {
let _ = self.data.reset(&self.path);
}
pub fn called_insert(&mut self) {
self.serial += 1;
self.len.increment();
}
pub fn called_remove(&mut self) {
self.len.decrement();
}
pub fn check_if_in_sync(&self, other: &Self) -> bool {
self.last_date == other.last_date && self.last_height == other.last_height
}
pub fn check_farer_or_in_sync(&self, other: &Self) -> bool {
self.last_date >= other.last_date && self.last_height >= other.last_height
}
}
#[derive(Default, Debug, Encode, Decode, Allocative)]
pub struct MetadataData {
pub serial: usize,
pub len: Counter,
pub last_height: Option<usize>,
pub last_date: Option<WNaiveDate>,
}
impl MetadataData {
fn name<'a>() -> &'a str {
"metadata"
}
fn full_path(folder_path: &str) -> String {
let name = Self::name();
format!("{folder_path}/{name}.bin")
}
pub fn import(path: &str) -> color_eyre::Result<Self> {
fs::create_dir_all(path)?;
Binary::import(&Self::full_path(path))
}
pub fn export(&self, path: &str) -> color_eyre::Result<()> {
Binary::export(&Self::full_path(path), self)
}
pub fn reset(&mut self, path: &str) -> color_eyre::Result<(), io::Error> {
self.clear();
fs::remove_file(Self::full_path(path))
}
fn clear(&mut self) {
self.serial = 0;
self.len.reset();
self.last_height = None;
self.last_date = None;
}
}

178
parser/src/databases/mod.rs Normal file
View File

@@ -0,0 +1,178 @@
use std::thread::{self};
use allocative::Allocative;
mod _database;
mod _trait;
mod address_index_to_address_data;
mod address_index_to_empty_address_data;
mod address_to_address_index;
mod metadata;
mod txid_to_tx_data;
mod txout_index_to_address_index;
mod txout_index_to_amount;
pub use _database::*;
use _trait::*;
pub use address_index_to_address_data::*;
pub use address_index_to_empty_address_data::*;
pub use address_to_address_index::*;
use metadata::*;
pub use txid_to_tx_data::*;
pub use txout_index_to_address_index::*;
pub use txout_index_to_amount::*;
use crate::{structs::WNaiveDate, utils::time};
#[derive(Allocative)]
pub struct Databases {
pub address_index_to_address_data: AddressIndexToAddressData,
pub address_index_to_empty_address_data: AddressIndexToEmptyAddressData,
pub address_to_address_index: AddressToAddressIndex,
pub txid_to_tx_data: TxidToTxData,
pub txout_index_to_address_index: TxoutIndexToAddressIndex,
pub txout_index_to_amount: TxoutIndexToAmount,
}
impl Databases {
pub fn import() -> Self {
let address_index_to_address_data = AddressIndexToAddressData::import();
let address_index_to_empty_address_data = AddressIndexToEmptyAddressData::import();
let address_to_address_index = AddressToAddressIndex::import();
let txid_to_tx_data = TxidToTxData::import();
let txout_index_to_address_index = TxoutIndexToAddressIndex::import();
let txout_index_to_amount = TxoutIndexToAmount::import();
Self {
address_index_to_address_data,
address_index_to_empty_address_data,
address_to_address_index,
txid_to_tx_data,
txout_index_to_address_index,
txout_index_to_amount,
}
}
pub fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()> {
thread::scope(|s| {
s.spawn(|| {
time("> Database txid_to_tx_data", || {
self.txid_to_tx_data.export(height, date)
})
});
s.spawn(|| {
time("> Database txout_index_to_amount", || {
self.txout_index_to_amount.export(height, date)
})
});
});
thread::scope(|s| {
s.spawn(|| {
time("> Database address_index_to_address_data", || {
self.address_index_to_address_data.export(height, date)
})
});
s.spawn(|| {
time("> Database address_index_to_empty_address_data", || {
self.address_index_to_empty_address_data
.export(height, date)
})
});
s.spawn(|| {
time("> Database address_to_address_index", || {
self.address_to_address_index.export(height, date)
})
});
s.spawn(|| {
time("> Database txout_index_to_address_index", || {
self.txout_index_to_address_index.export(height, date)
})
});
});
Ok(())
}
pub fn reset(&mut self, include_addresses: bool) {
if include_addresses {
let _ = self.address_index_to_address_data.reset();
let _ = self.address_index_to_empty_address_data.reset();
let _ = self.address_to_address_index.reset();
let _ = self.txout_index_to_address_index.reset();
}
let _ = self.txid_to_tx_data.reset();
let _ = self.txout_index_to_amount.reset();
}
pub fn check_if_needs_to_compute_addresses(&self, height: usize, date: WNaiveDate) -> bool {
let check_height = |last_height: Option<usize>| {
last_height.map_or(true, |last_height| last_height < height)
};
let check_date =
|last_date: Option<WNaiveDate>| last_date.map_or(true, |last_date| last_date < date);
let check_metadata = |metadata: &Metadata| {
check_height(metadata.last_height) || check_date(metadata.last_date)
};
// We only need to check one as we previously checked that they're all in sync
check_metadata(&self.address_to_address_index.metadata)
}
pub fn check_if_usable(
&self,
min_initial_last_address_height: Option<usize>,
min_initial_last_address_date: Option<WNaiveDate>,
) -> bool {
let are_tx_databases_in_sync = self
.txout_index_to_amount
.metadata
.check_if_in_sync(&self.txid_to_tx_data.metadata);
if !are_tx_databases_in_sync {
return false;
}
let are_address_databases_in_sync = self
.address_to_address_index
.metadata
.check_if_in_sync(&self.address_index_to_empty_address_data.metadata)
&& self
.address_to_address_index
.metadata
.check_if_in_sync(&self.address_index_to_address_data.metadata)
&& self
.address_to_address_index
.metadata
.check_if_in_sync(&self.txout_index_to_address_index.metadata);
if !are_address_databases_in_sync {
return false;
}
let are_address_databases_farer_or_in_sync_with_tx_database = self
.address_to_address_index
.metadata
.check_farer_or_in_sync(&self.txid_to_tx_data.metadata);
if !are_address_databases_farer_or_in_sync_with_tx_database {
return false;
}
// let are_address_datasets_farer_or_in_sync_with_address_databases =
min_initial_last_address_height >= self.address_to_address_index.metadata.last_height
&& min_initial_last_address_date >= self.address_to_address_index.metadata.last_date
}
}

View File

@@ -0,0 +1,147 @@
use std::{
collections::BTreeMap,
mem,
ops::{Deref, DerefMut},
};
use allocative::Allocative;
use bitcoin::Txid;
use rayon::prelude::*;
use crate::structs::{TxData, WNaiveDate};
use super::{AnyDatabaseGroup, Metadata, SizedDatabase, U8x31};
type Key = U8x31;
type Value = TxData;
type Database = SizedDatabase<Key, Value>;
#[derive(Allocative)]
pub struct TxidToTxData {
pub metadata: Metadata,
map: BTreeMap<u8, Database>,
}
impl Deref for TxidToTxData {
type Target = BTreeMap<u8, Database>;
fn deref(&self) -> &Self::Target {
&self.map
}
}
impl DerefMut for TxidToTxData {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.map
}
}
impl TxidToTxData {
pub fn insert(&mut self, txid: &Txid, tx_index: Value) -> Option<Value> {
self.metadata.called_insert();
let txid_key = Self::txid_to_key(txid);
self.open_db(txid).insert(txid_key, tx_index)
}
// pub fn safe_get(&mut self, txid: &Txid) -> Option<&Value> {
// let txid_key = Self::txid_to_key(txid);
// self.open_db(txid).get(&txid_key)
// }
/// Doesn't check if the database is open contrary to `safe_get` which does and opens if needed.
/// Though it makes it easy to use with rayon
pub fn unsafe_get(&self, txid: &Txid) -> Option<&Value> {
let txid_key = Self::txid_to_key(txid);
let db_index = Self::db_index(txid);
self.get(&db_index).unwrap().get(&txid_key)
}
// pub fn unsafe_get_from_puts(&self, txid: &Txid) -> Option<&Value> {
// let txid_key = Self::txid_to_key(txid);
// let db_index = Self::db_index(txid);
// self.get(&db_index).unwrap().get_from_puts(&txid_key)
// }
pub fn unsafe_get_mut_from_puts(&mut self, txid: &Txid) -> Option<&mut Value> {
let txid_key = Self::txid_to_key(txid);
let db_index = Self::db_index(txid);
self.get_mut(&db_index)
.unwrap()
.get_mut_from_puts(&txid_key)
}
pub fn remove_from_db(&mut self, txid: &Txid) {
self.metadata.called_remove();
let txid_key = Self::txid_to_key(txid);
self.open_db(txid).db_remove(&txid_key);
}
pub fn remove_from_puts(&mut self, txid: &Txid) {
self.metadata.called_remove();
let txid_key = Self::txid_to_key(txid);
self.open_db(txid).remove_from_puts(&txid_key);
}
pub fn update(&mut self, txid: &Txid, tx_data: TxData) {
let txid_key = Self::txid_to_key(txid);
self.open_db(txid).update(txid_key, tx_data);
}
#[inline(always)]
pub fn open_db(&mut self, txid: &Txid) -> &mut Database {
let db_index = Self::db_index(txid);
self.entry(db_index).or_insert_with(|| {
SizedDatabase::open(Self::folder(), &db_index.to_string(), |key| key).unwrap()
})
}
fn txid_to_key(txid: &Txid) -> U8x31 {
U8x31::from(&txid[1..])
}
fn db_index(txid: &Txid) -> u8 {
txid[0]
}
}
impl AnyDatabaseGroup for TxidToTxData {
fn import() -> Self {
Self {
map: BTreeMap::default(),
metadata: Metadata::import(&Self::full_path()),
}
}
fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()> {
mem::take(&mut self.map)
.into_par_iter()
.try_for_each(|(_, db)| db.export())?;
self.metadata.export(height, date)?;
Ok(())
}
fn reset_metadata(&mut self) {
self.metadata.reset();
}
fn folder<'a>() -> &'a str {
"txid_to_tx_data"
}
}

View File

@@ -0,0 +1,114 @@
use std::{
collections::BTreeMap,
mem,
ops::{Deref, DerefMut},
};
use allocative::Allocative;
use rayon::prelude::*;
use crate::structs::{TxoutIndex, WNaiveDate};
use super::{AnyDatabaseGroup, Metadata, SizedDatabase};
type Key = TxoutIndex;
type Value = u32;
type Database = SizedDatabase<Key, Value>;
#[derive(Allocative)]
pub struct TxoutIndexToAddressIndex {
pub metadata: Metadata,
map: BTreeMap<usize, Database>,
}
impl Deref for TxoutIndexToAddressIndex {
type Target = BTreeMap<usize, Database>;
fn deref(&self) -> &Self::Target {
&self.map
}
}
impl DerefMut for TxoutIndexToAddressIndex {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.map
}
}
const DB_MAX_SIZE: usize = 10_000_000_000;
impl TxoutIndexToAddressIndex {
pub fn unsafe_insert(&mut self, key: Key, value: Value) -> Option<Value> {
self.metadata.called_insert();
self.open_db(&key).unsafe_insert(key, value)
}
// pub fn undo_insert(&mut self, key: &Key) -> Option<Value> {
// self.open_db(key).remove_from_puts(key).map(|v| {
// self.metadata.called_remove();
// v
// })
// }
pub fn remove(&mut self, key: &Key) -> Option<Value> {
self.metadata.called_remove();
self.open_db(key).remove(key)
}
/// Doesn't check if the database is open contrary to `safe_get` which does and opens if needed
/// Though it makes it easy to use with rayon.
pub fn unsafe_get(&self, key: &Key) -> Option<&Value> {
let db_index = Self::db_index(key);
self.get(&db_index).unwrap().get(key)
}
pub fn open_db(&mut self, key: &Key) -> &mut Database {
let db_index = Self::db_index(key);
self.entry(db_index).or_insert_with(|| {
let db_name = format!(
"{}..{}",
db_index * DB_MAX_SIZE,
(db_index + 1) * DB_MAX_SIZE
);
SizedDatabase::open(Self::folder(), &db_name, |key| key).unwrap()
})
}
fn db_index(key: &Key) -> usize {
key.as_u64() as usize / DB_MAX_SIZE
}
}
impl AnyDatabaseGroup for TxoutIndexToAddressIndex {
fn import() -> Self {
Self {
map: BTreeMap::default(),
metadata: Metadata::import(&Self::full_path()),
}
}
fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()> {
mem::take(&mut self.map)
.into_par_iter()
.try_for_each(|(_, db)| db.export())?;
self.metadata.export(height, date)?;
Ok(())
}
fn reset_metadata(&mut self) {
self.metadata.reset();
}
fn folder<'a>() -> &'a str {
"txout_index_to_address_index"
}
}

View File

@@ -0,0 +1,114 @@
use std::{
collections::BTreeMap,
mem,
ops::{Deref, DerefMut},
};
use allocative::Allocative;
use rayon::prelude::*;
use crate::structs::{TxoutIndex, WAmount, WNaiveDate};
use super::{AnyDatabaseGroup, Metadata, SizedDatabase};
type Key = TxoutIndex;
type Value = WAmount;
type Database = SizedDatabase<Key, Value>;
#[derive(Allocative)]
pub struct TxoutIndexToAmount {
pub metadata: Metadata,
pub map: BTreeMap<usize, Database>,
}
impl Deref for TxoutIndexToAmount {
type Target = BTreeMap<usize, Database>;
fn deref(&self) -> &Self::Target {
&self.map
}
}
impl DerefMut for TxoutIndexToAmount {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.map
}
}
const DB_MAX_SIZE: usize = 10_000_000_000;
impl TxoutIndexToAmount {
pub fn unsafe_insert(&mut self, key: Key, value: Value) -> Option<Value> {
self.metadata.called_insert();
self.open_db(&key).unsafe_insert(key, value)
}
// pub fn undo_insert(&mut self, key: &Key) -> Option<Value> {
// self.open_db(key).remove_from_puts(key).map(|v| {
// self.metadata.called_remove();
// v
// })
// }
pub fn remove(&mut self, key: &Key) -> Option<Value> {
self.metadata.called_remove();
self.open_db(key).remove(key)
}
/// Doesn't check if the database is open contrary to `safe_get` which does and opens if needed
/// Though it makes it easy to use with rayon.
pub fn unsafe_get(&self, key: &Key) -> Option<&Value> {
let db_index = Self::db_index(key);
self.get(&db_index).unwrap().get(key)
}
pub fn open_db(&mut self, key: &Key) -> &mut Database {
let db_index = Self::db_index(key);
self.entry(db_index).or_insert_with(|| {
let db_name = format!(
"{}..{}",
db_index * DB_MAX_SIZE,
(db_index + 1) * DB_MAX_SIZE
);
SizedDatabase::open(Self::folder(), &db_name, |key| key).unwrap()
})
}
fn db_index(key: &Key) -> usize {
key.as_u64() as usize / DB_MAX_SIZE
}
}
impl AnyDatabaseGroup for TxoutIndexToAmount {
fn import() -> Self {
Self {
map: BTreeMap::default(),
metadata: Metadata::import(&Self::full_path()),
}
}
fn export(&mut self, height: usize, date: WNaiveDate) -> color_eyre::Result<()> {
mem::take(&mut self.map)
.into_par_iter()
.try_for_each(|(_, db)| db.export())?;
self.metadata.export(height, date)?;
Ok(())
}
fn reset_metadata(&mut self) {
self.metadata.reset();
}
fn folder<'a>() -> &'a str {
"txout_index_to_amount"
}
}

View File

@@ -0,0 +1,286 @@
use itertools::Itertools;
use rayon::prelude::*;
use crate::{
datasets::ComputeData,
structs::{AnyBiMap, AnyDateMap, AnyHeightMap, AnyMap, WNaiveDate},
};
use super::MinInitialStates;
pub trait AnyDataset {
fn get_min_initial_states(&self) -> &MinInitialStates;
fn needs_insert(&self, height: usize, date: WNaiveDate) -> bool {
self.needs_insert_height(height) || self.needs_insert_date(date)
}
#[inline(always)]
fn needs_insert_height(&self, height: usize) -> bool {
!self.to_all_inserted_height_map_vec().is_empty()
&& self
.get_min_initial_states()
.inserted
.first_unsafe_height
.unwrap_or(0)
<= height
}
#[inline(always)]
fn needs_insert_date(&self, date: WNaiveDate) -> bool {
!self.to_all_inserted_date_map_vec().is_empty()
&& self
.get_min_initial_states()
.inserted
.first_unsafe_date
.map_or(true, |min_initial_first_unsafe_date| {
min_initial_first_unsafe_date <= date
})
}
fn to_inserted_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![]
}
fn to_inserted_height_map_vec(&self) -> Vec<&(dyn AnyHeightMap + Send + Sync)> {
vec![]
}
fn to_inserted_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
vec![]
}
fn to_inserted_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![]
}
fn to_inserted_mut_height_map_vec(&mut self) -> Vec<&mut dyn AnyHeightMap> {
vec![]
}
fn to_inserted_mut_date_map_vec(&mut self) -> Vec<&mut dyn AnyDateMap> {
vec![]
}
fn to_all_inserted_height_map_vec(&self) -> Vec<&(dyn AnyHeightMap + Send + Sync)> {
let mut vec = self.to_inserted_height_map_vec();
vec.append(
&mut self
.to_inserted_bi_map_vec()
.iter()
.map(|bi| bi.get_height())
.collect_vec(),
);
vec
}
fn to_all_inserted_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
let mut vec = self.to_inserted_date_map_vec();
vec.append(
&mut self
.to_inserted_bi_map_vec()
.iter()
.map(|bi| bi.get_date())
.collect_vec(),
);
vec
}
fn to_all_inserted_map_vec(&self) -> Vec<&(dyn AnyMap + Send + Sync)> {
let heights = self
.to_all_inserted_height_map_vec()
.into_iter()
.map(|d| d.as_any_map());
let dates = self
.to_all_inserted_date_map_vec()
.into_iter()
.map(|d| d.as_any_map());
heights.chain(dates).collect_vec()
}
#[inline(always)]
fn should_compute(&self, compute_data: &ComputeData) -> bool {
compute_data
.heights
.last()
.map_or(false, |height| self.should_compute_height(*height))
|| compute_data
.dates
.last()
.map_or(false, |date| self.should_compute_date(*date))
}
#[inline(always)]
fn should_compute_height(&self, height: usize) -> bool {
!self.to_all_computed_height_map_vec().is_empty()
&& self
.get_min_initial_states()
.computed
.first_unsafe_height
.unwrap_or(0)
<= height
}
#[inline(always)]
fn should_compute_date(&self, date: WNaiveDate) -> bool {
!self.to_all_computed_date_map_vec().is_empty()
&& self
.get_min_initial_states()
.computed
.first_unsafe_date
.map_or(true, |min_initial_first_unsafe_date| {
min_initial_first_unsafe_date <= date
})
}
fn to_computed_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![]
}
fn to_computed_height_map_vec(&self) -> Vec<&(dyn AnyHeightMap + Send + Sync)> {
vec![]
}
fn to_computed_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
vec![]
}
fn to_computed_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![]
}
fn to_computed_mut_height_map_vec(&mut self) -> Vec<&mut dyn AnyHeightMap> {
vec![]
}
fn to_computed_mut_date_map_vec(&mut self) -> Vec<&mut dyn AnyDateMap> {
vec![]
}
fn to_all_computed_height_map_vec(&self) -> Vec<&(dyn AnyHeightMap + Send + Sync)> {
let mut vec = self.to_computed_height_map_vec();
vec.append(
&mut self
.to_computed_bi_map_vec()
.iter()
.map(|bi| bi.get_height())
.collect_vec(),
);
vec
}
fn to_all_computed_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
let mut vec = self.to_computed_date_map_vec();
vec.append(
&mut self
.to_computed_bi_map_vec()
.iter()
.map(|bi| bi.get_date())
.collect_vec(),
);
vec
}
fn to_all_computed_map_vec(&self) -> Vec<&(dyn AnyMap + Send + Sync)> {
let heights = self
.to_all_computed_height_map_vec()
.into_iter()
.map(|d| d.as_any_map());
let dates = self
.to_all_computed_date_map_vec()
.into_iter()
.map(|d| d.as_any_map());
heights.chain(dates).collect_vec()
}
fn to_all_map_vec(&self) -> Vec<&(dyn AnyMap + Send + Sync)> {
let mut inserted = self.to_all_inserted_map_vec();
inserted.append(&mut self.to_all_computed_map_vec());
inserted
}
// #[inline(always)]
// fn is_empty(&self) -> bool {
// self.to_any_map_vec().is_empty()
// }
fn pre_export(&mut self) {
self.to_inserted_mut_height_map_vec()
.into_iter()
.for_each(|map| map.pre_export());
self.to_inserted_mut_date_map_vec()
.into_iter()
.for_each(|map| map.pre_export());
self.to_inserted_mut_bi_map_vec().into_iter().for_each(|d| {
d.as_any_mut_map()
.into_iter()
.for_each(|map| map.pre_export())
});
self.to_computed_mut_height_map_vec()
.into_iter()
.for_each(|map| map.pre_export());
self.to_computed_mut_date_map_vec()
.into_iter()
.for_each(|map| map.pre_export());
self.to_computed_mut_bi_map_vec().into_iter().for_each(|d| {
d.as_any_mut_map()
.into_iter()
.for_each(|map| map.pre_export())
});
}
fn export(&self) -> color_eyre::Result<()> {
self.to_all_map_vec()
.into_par_iter()
.try_for_each(|map| -> color_eyre::Result<()> { map.export() })
}
fn post_export(&mut self) {
self.to_inserted_mut_height_map_vec()
.into_iter()
.for_each(|map| map.post_export());
self.to_inserted_mut_date_map_vec()
.into_iter()
.for_each(|map| map.post_export());
self.to_inserted_mut_bi_map_vec().into_iter().for_each(|d| {
d.as_any_mut_map()
.into_iter()
.for_each(|map| map.post_export())
});
self.to_computed_mut_height_map_vec()
.into_iter()
.for_each(|map| map.post_export());
self.to_computed_mut_date_map_vec()
.into_iter()
.for_each(|map| map.post_export());
self.to_computed_mut_bi_map_vec().into_iter().for_each(|d| {
d.as_any_mut_map()
.into_iter()
.for_each(|map| map.post_export())
});
}
}

View File

@@ -0,0 +1,7 @@
use super::AnyDataset;
pub trait AnyDatasetGroup {
fn as_vec(&self) -> Vec<&(dyn AnyDataset + Send + Sync)>;
fn as_mut_vec(&mut self) -> Vec<&mut dyn AnyDataset>;
}

View File

@@ -0,0 +1,9 @@
use super::{AnyDataset, MinInitialStates};
pub trait AnyDatasets {
fn get_min_initial_states(&self) -> &MinInitialStates;
fn to_any_dataset_vec(&self) -> Vec<&(dyn AnyDataset + Send + Sync)>;
fn to_mut_any_dataset_vec(&mut self) -> Vec<&mut dyn AnyDataset>;
}

View File

@@ -0,0 +1,272 @@
use allocative::Allocative;
use crate::structs::{AnyDateMap, AnyHeightMap, WNaiveDate};
use super::{AnyDataset, AnyDatasets};
#[derive(Default, Debug, Clone, Copy, Allocative)]
pub struct MinInitialStates {
pub inserted: MinInitialState,
pub computed: MinInitialState,
}
impl MinInitialStates {
pub fn consume(&mut self, other: Self) {
self.inserted = other.inserted;
self.computed = other.computed;
}
pub fn compute_from_dataset(dataset: &dyn AnyDataset) -> Self {
Self {
inserted: MinInitialState::compute_from_dataset(dataset, Mode::Inserted),
computed: MinInitialState::compute_from_dataset(dataset, Mode::Computed),
}
}
pub fn compute_from_datasets(datasets: &dyn AnyDatasets) -> Self {
Self {
inserted: MinInitialState::compute_from_datasets(datasets, Mode::Inserted),
computed: MinInitialState::compute_from_datasets(datasets, Mode::Computed),
}
}
}
#[derive(Default, Debug, Clone, Copy, Allocative)]
pub struct MinInitialState {
pub first_unsafe_date: Option<WNaiveDate>,
pub first_unsafe_height: Option<usize>,
pub last_date: Option<WNaiveDate>,
pub last_height: Option<usize>,
}
enum Mode {
Inserted,
Computed,
}
impl MinInitialState {
// pub fn consume(&mut self, other: Self) {
// self.first_unsafe_date = other.first_unsafe_date;
// self.first_unsafe_height = other.first_unsafe_height;
// self.last_date = other.last_date;
// self.last_height = other.last_height;
// }
fn compute_from_datasets(datasets: &dyn AnyDatasets, mode: Mode) -> Self {
match mode {
Mode::Inserted => {
let contains_date_maps = |dataset: &&(dyn AnyDataset + Sync + Send)| {
!dataset.to_all_inserted_date_map_vec().is_empty()
};
let contains_height_maps = |dataset: &&(dyn AnyDataset + Sync + Send)| {
!dataset.to_all_inserted_height_map_vec().is_empty()
};
Self {
first_unsafe_date: Self::min_datasets_date(
datasets,
contains_date_maps,
|dataset| {
dataset
.get_min_initial_states()
.inserted
.first_unsafe_date
.as_ref()
.cloned()
},
),
first_unsafe_height: Self::min_datasets_height(
datasets,
contains_height_maps,
|dataset| {
dataset
.get_min_initial_states()
.inserted
.first_unsafe_height
.as_ref()
.cloned()
},
),
last_date: Self::min_datasets_date(datasets, contains_date_maps, |dataset| {
dataset
.get_min_initial_states()
.inserted
.last_date
.as_ref()
.cloned()
}),
last_height: Self::min_datasets_height(
datasets,
contains_height_maps,
|dataset| {
dataset
.get_min_initial_states()
.inserted
.last_height
.as_ref()
.cloned()
},
),
}
}
Mode::Computed => {
let contains_date_maps = |dataset: &&(dyn AnyDataset + Sync + Send)| {
!dataset.to_all_computed_date_map_vec().is_empty()
};
let contains_height_maps = |dataset: &&(dyn AnyDataset + Sync + Send)| {
!dataset.to_all_computed_height_map_vec().is_empty()
};
Self {
first_unsafe_date: Self::min_datasets_date(
datasets,
contains_date_maps,
|dataset| {
dataset
.get_min_initial_states()
.computed
.first_unsafe_date
.as_ref()
.cloned()
},
),
first_unsafe_height: Self::min_datasets_height(
datasets,
contains_height_maps,
|dataset| {
dataset
.get_min_initial_states()
.computed
.first_unsafe_height
.as_ref()
.cloned()
},
),
last_date: Self::min_datasets_date(datasets, contains_date_maps, |dataset| {
dataset
.get_min_initial_states()
.computed
.last_date
.as_ref()
.cloned()
}),
last_height: Self::min_datasets_height(
datasets,
contains_height_maps,
|dataset| {
dataset
.get_min_initial_states()
.computed
.last_height
.as_ref()
.cloned()
},
),
}
}
}
}
fn min_datasets_date(
datasets: &dyn AnyDatasets,
is_not_empty: impl Fn(&&(dyn AnyDataset + Sync + Send)) -> bool,
map: impl Fn(&(dyn AnyDataset + Sync + Send)) -> Option<WNaiveDate>,
) -> Option<WNaiveDate> {
Self::min_date(
datasets
.to_any_dataset_vec()
.into_iter()
.filter(is_not_empty)
.map(map),
)
}
fn min_datasets_height(
datasets: &dyn AnyDatasets,
is_not_empty: impl Fn(&&(dyn AnyDataset + Sync + Send)) -> bool,
map: impl Fn(&(dyn AnyDataset + Sync + Send)) -> Option<usize>,
) -> Option<usize> {
Self::min_height(
datasets
.to_any_dataset_vec()
.into_iter()
.filter(is_not_empty)
.map(map),
)
}
fn compute_from_dataset(dataset: &dyn AnyDataset, mode: Mode) -> Self {
match mode {
Mode::Inserted => {
let date_vec = dataset.to_all_inserted_date_map_vec();
let height_vec = dataset.to_all_inserted_height_map_vec();
Self {
first_unsafe_date: Self::compute_min_initial_first_unsafe_date_from_dataset(
&date_vec,
),
first_unsafe_height: Self::compute_min_initial_first_unsafe_height_from_dataset(
&height_vec,
),
last_date: Self::compute_min_initial_last_date_from_dataset(&date_vec),
last_height: Self::compute_min_initial_last_height_from_dataset(&height_vec),
}
}
Mode::Computed => {
let date_vec = dataset.to_all_computed_date_map_vec();
let height_vec = dataset.to_all_computed_height_map_vec();
Self {
first_unsafe_date: Self::compute_min_initial_first_unsafe_date_from_dataset(
&date_vec,
),
first_unsafe_height: Self::compute_min_initial_first_unsafe_height_from_dataset(
&height_vec,
),
last_date: Self::compute_min_initial_last_date_from_dataset(&date_vec),
last_height: Self::compute_min_initial_last_height_from_dataset(&height_vec),
}
}
}
}
#[inline(always)]
fn compute_min_initial_last_date_from_dataset(
arr: &[&(dyn AnyDateMap + Sync + Send)],
) -> Option<WNaiveDate> {
Self::min_date(arr.iter().map(|map| map.get_initial_last_date()))
}
#[inline(always)]
fn compute_min_initial_last_height_from_dataset(
arr: &[&(dyn AnyHeightMap + Sync + Send)],
) -> Option<usize> {
Self::min_height(arr.iter().map(|map| map.get_initial_last_height()))
}
#[inline(always)]
fn compute_min_initial_first_unsafe_date_from_dataset(
arr: &[&(dyn AnyDateMap + Sync + Send)],
) -> Option<WNaiveDate> {
Self::min_date(arr.iter().map(|map| map.get_initial_first_unsafe_date()))
}
#[inline(always)]
fn compute_min_initial_first_unsafe_height_from_dataset(
arr: &[&(dyn AnyHeightMap + Sync + Send)],
) -> Option<usize> {
Self::min_height(arr.iter().map(|map| map.get_initial_first_unsafe_height()))
}
#[inline(always)]
fn min_date(iter: impl Iterator<Item = Option<WNaiveDate>>) -> Option<WNaiveDate> {
iter.min().and_then(|opt| opt)
}
#[inline(always)]
fn min_height(iter: impl Iterator<Item = Option<usize>>) -> Option<usize> {
iter.min().and_then(|opt| opt)
}
}

View File

@@ -0,0 +1,9 @@
mod any_dataset;
mod any_dataset_group;
mod any_datasets;
mod min_initial_state;
pub use any_dataset::*;
pub use any_dataset_group::*;
pub use any_datasets::*;
pub use min_initial_state::*;

View File

@@ -0,0 +1,91 @@
use allocative::Allocative;
use crate::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates},
structs::{AnyBiMap, BiMap},
};
#[derive(Allocative)]
pub struct AllAddressesMetadataDataset {
min_initial_states: MinInitialStates,
// Inserted
created_addreses: BiMap<u32>,
empty_addresses: BiMap<u32>,
// Computed
new_addresses: BiMap<u32>,
}
impl AllAddressesMetadataDataset {
pub fn import(parent_path: &str) -> color_eyre::Result<Self> {
let f = |s: &str| format!("{parent_path}/{s}");
let mut s = Self {
min_initial_states: MinInitialStates::default(),
// TODO: Shouldn't be (like many others)
created_addreses: BiMap::new_bin(1, &f("created_addresses")),
empty_addresses: BiMap::new_bin(1, &f("empty_addresses")),
new_addresses: BiMap::new_bin(1, &f("new_addresses")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s));
Ok(s)
}
pub fn insert(&mut self, insert_data: &InsertData) {
let &InsertData {
databases,
height,
date,
is_date_last_block,
..
} = insert_data;
let created_addresses = self
.created_addreses
.height
.insert(height, *databases.address_to_address_index.metadata.len);
let empty_addresses = self.empty_addresses.height.insert(
height,
*databases.address_index_to_empty_address_data.metadata.len,
);
if is_date_last_block {
self.created_addreses.date.insert(date, created_addresses);
self.empty_addresses.date.insert(date, empty_addresses);
}
}
pub fn compute(&mut self, &ComputeData { heights, dates }: &ComputeData) {
self.new_addresses
.multi_insert_net_change(heights, dates, &mut self.created_addreses, 1)
}
}
impl AnyDataset for AllAddressesMetadataDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_inserted_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![&self.created_addreses, &self.empty_addresses]
}
fn to_inserted_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![&mut self.created_addreses, &mut self.empty_addresses]
}
fn to_computed_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![&self.new_addresses]
}
fn to_computed_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![&mut self.new_addresses]
}
}

View File

@@ -0,0 +1,703 @@
use allocative::Allocative;
use itertools::Itertools;
use crate::{
datasets::{
AnyDataset, AnyDatasetGroup, ComputeData, InsertData, MinInitialStates, SubDataset,
},
states::{AddressCohortDurableStates, AddressCohortId},
structs::{AddressSplit, AnyBiMap, AnyDateMap, AnyHeightMap, BiMap, WNaiveDate},
};
use super::cohort_metadata::MetadataDataset;
#[derive(Default, Allocative)]
pub struct CohortDataset {
min_initial_states: MinInitialStates,
split: AddressSplit,
metadata: MetadataDataset,
pub all: SubDataset,
illiquid: SubDataset,
liquid: SubDataset,
highly_liquid: SubDataset,
}
impl CohortDataset {
pub fn import(parent_path: &str, id: AddressCohortId) -> color_eyre::Result<Self> {
let name = id.as_name();
let split = id.as_split();
let folder_path = {
if let Some(name) = name {
format!("{parent_path}/{name}")
} else {
parent_path.to_owned()
}
};
let f = |s: &str| {
if let Some(name) = name {
format!("{parent_path}/{s}/{name}")
} else {
format!("{parent_path}/{s}")
}
};
let mut s = Self {
min_initial_states: MinInitialStates::default(),
split,
metadata: MetadataDataset::import(&folder_path)?,
all: SubDataset::import(&folder_path)?,
illiquid: SubDataset::import(&f("illiquid"))?,
liquid: SubDataset::import(&f("liquid"))?,
highly_liquid: SubDataset::import(&f("highly_liquid"))?,
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s));
Ok(s)
}
pub fn sub_datasets_vec(&self) -> Vec<&SubDataset> {
vec![&self.all, &self.illiquid, &self.liquid, &self.highly_liquid]
}
pub fn needs_insert_metadata(&self, height: usize, date: WNaiveDate) -> bool {
self.metadata.needs_insert(height, date)
}
pub fn needs_insert_utxo(&self, height: usize, date: WNaiveDate) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.utxo.needs_insert(height, date))
}
pub fn needs_insert_capitalization(&self, height: usize, date: WNaiveDate) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.capitalization.needs_insert(height, date))
}
pub fn needs_insert_supply(&self, height: usize, date: WNaiveDate) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.supply.needs_insert(height, date))
}
pub fn needs_insert_price_paid(&self, height: usize, date: WNaiveDate) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.price_paid.needs_insert(height, date))
}
fn needs_insert_realized(&self, height: usize, date: WNaiveDate) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.realized.needs_insert(height, date))
}
fn needs_insert_unrealized(&self, height: usize, date: WNaiveDate) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.unrealized.needs_insert(height, date))
}
fn needs_insert_input(&self, height: usize, date: WNaiveDate) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.input.needs_insert(height, date))
}
// fn needs_insert_output(&self, insert_data: &InsertData) -> bool {
// self.sub_datasets_vec()
// .iter()
// .any(|sub| sub.output.needs_insert(height, date))
// }
fn insert_realized_data(&mut self, insert_data: &InsertData) {
let split_realized_state = insert_data
.address_cohorts_realized_states
.as_ref()
.unwrap()
.get(&self.split)
.unwrap();
self.all
.realized
.insert(insert_data, &split_realized_state.all);
self.illiquid
.realized
.insert(insert_data, &split_realized_state.illiquid);
self.liquid
.realized
.insert(insert_data, &split_realized_state.liquid);
self.highly_liquid
.realized
.insert(insert_data, &split_realized_state.highly_liquid);
}
fn insert_metadata(&mut self, insert_data: &InsertData) {
let address_count = insert_data
.states
.address_cohorts_durable_states
.get(&self.split)
.unwrap()
.address_count;
self.metadata.insert(insert_data, address_count);
}
fn insert_supply_data(
&mut self,
insert_data: &InsertData,
liquidity_split_state: &AddressCohortDurableStates,
) {
self.all.supply.insert(
insert_data,
&liquidity_split_state.split_durable_states.all.supply_state,
);
self.illiquid.supply.insert(
insert_data,
&liquidity_split_state
.split_durable_states
.illiquid
.supply_state,
);
self.liquid.supply.insert(
insert_data,
&liquidity_split_state
.split_durable_states
.liquid
.supply_state,
);
self.highly_liquid.supply.insert(
insert_data,
&liquidity_split_state
.split_durable_states
.highly_liquid
.supply_state,
);
}
fn insert_utxo_data(
&mut self,
insert_data: &InsertData,
liquidity_split_state: &AddressCohortDurableStates,
) {
self.all.utxo.insert(
insert_data,
&liquidity_split_state.split_durable_states.all.utxo_state,
);
self.illiquid.utxo.insert(
insert_data,
&liquidity_split_state
.split_durable_states
.illiquid
.utxo_state,
);
self.liquid.utxo.insert(
insert_data,
&liquidity_split_state.split_durable_states.liquid.utxo_state,
);
self.highly_liquid.utxo.insert(
insert_data,
&liquidity_split_state
.split_durable_states
.highly_liquid
.utxo_state,
);
}
fn insert_capitalization_data(
&mut self,
insert_data: &InsertData,
liquidity_split_state: &AddressCohortDurableStates,
) {
self.all.capitalization.insert(
insert_data,
&liquidity_split_state
.split_durable_states
.all
.capitalization_state,
);
self.illiquid.capitalization.insert(
insert_data,
&liquidity_split_state
.split_durable_states
.illiquid
.capitalization_state,
);
self.liquid.capitalization.insert(
insert_data,
&liquidity_split_state
.split_durable_states
.liquid
.capitalization_state,
);
self.highly_liquid.capitalization.insert(
insert_data,
&liquidity_split_state
.split_durable_states
.highly_liquid
.capitalization_state,
);
}
fn insert_unrealized_data(&mut self, insert_data: &InsertData) {
let states = insert_data
.address_cohorts_one_shot_states
.as_ref()
.unwrap()
.get(&self.split)
.unwrap();
self.all.unrealized.insert(
insert_data,
&states.all.unrealized_block_state,
&states.all.unrealized_date_state,
);
self.illiquid.unrealized.insert(
insert_data,
&states.illiquid.unrealized_block_state,
&states.illiquid.unrealized_date_state,
);
self.liquid.unrealized.insert(
insert_data,
&states.liquid.unrealized_block_state,
&states.liquid.unrealized_date_state,
);
self.highly_liquid.unrealized.insert(
insert_data,
&states.highly_liquid.unrealized_block_state,
&states.highly_liquid.unrealized_date_state,
);
}
fn insert_price_paid_data(&mut self, insert_data: &InsertData) {
let states = insert_data
.address_cohorts_one_shot_states
.as_ref()
.unwrap()
.get(&self.split)
.unwrap();
self.all
.price_paid
.insert(insert_data, &states.all.price_paid_state);
self.illiquid
.price_paid
.insert(insert_data, &states.illiquid.price_paid_state);
self.liquid
.price_paid
.insert(insert_data, &states.liquid.price_paid_state);
self.highly_liquid
.price_paid
.insert(insert_data, &states.highly_liquid.price_paid_state);
}
fn insert_input_data(&mut self, insert_data: &InsertData) {
let state = insert_data
.address_cohorts_input_states
.as_ref()
.unwrap()
.get(&self.split)
.unwrap();
self.all.input.insert(insert_data, &state.all);
self.illiquid.input.insert(insert_data, &state.illiquid);
self.liquid.input.insert(insert_data, &state.liquid);
self.highly_liquid
.input
.insert(insert_data, &state.highly_liquid);
}
// fn insert_output_data(&mut self, insert_data: &InsertData) {
// let state = insert_data
// .address_cohorts_output_states
// .as_ref()
// .unwrap()
// .get(&self.split)
// .unwrap();
// self.all.output.insert(insert_data, &state.all);
// self.illiquid.output.insert(insert_data, &state.illiquid);
// self.liquid.output.insert(insert_data, &state.liquid);
// self.highly_liquid
// .output
// .insert(insert_data, &state.highly_liquid);
// }
fn as_vec(&self) -> Vec<&(dyn AnyDataset + Send + Sync)> {
vec![
self.all.as_vec(),
self.illiquid.as_vec(),
self.liquid.as_vec(),
self.highly_liquid.as_vec(),
vec![&self.metadata],
]
.into_iter()
.flatten()
.collect_vec()
}
fn as_mut_vec(&mut self) -> Vec<&mut dyn AnyDataset> {
vec![
self.all.as_mut_vec(),
self.illiquid.as_mut_vec(),
self.liquid.as_mut_vec(),
self.highly_liquid.as_mut_vec(),
vec![&mut self.metadata],
]
.into_iter()
.flatten()
.collect_vec()
}
pub fn insert(&mut self, insert_data: &InsertData) {
if !insert_data.compute_addresses {
return;
}
let liquidity_split_processed_address_state = insert_data
.states
.address_cohorts_durable_states
.get(&self.split);
if liquidity_split_processed_address_state.is_none() {
return; // TODO: Check if should panic instead
}
let liquidity_split_processed_address_state =
liquidity_split_processed_address_state.unwrap();
if self.needs_insert_metadata(insert_data.height, insert_data.date) {
self.insert_metadata(insert_data);
}
if self.needs_insert_utxo(insert_data.height, insert_data.date) {
self.insert_utxo_data(insert_data, liquidity_split_processed_address_state);
}
if self.needs_insert_capitalization(insert_data.height, insert_data.date) {
self.insert_capitalization_data(insert_data, liquidity_split_processed_address_state);
}
if self.needs_insert_supply(insert_data.height, insert_data.date) {
self.insert_supply_data(insert_data, liquidity_split_processed_address_state);
}
if self.needs_insert_realized(insert_data.height, insert_data.date) {
self.insert_realized_data(insert_data);
}
if self.needs_insert_unrealized(insert_data.height, insert_data.date) {
self.insert_unrealized_data(insert_data);
}
if self.needs_insert_price_paid(insert_data.height, insert_data.date) {
self.insert_price_paid_data(insert_data);
}
if self.needs_insert_input(insert_data.height, insert_data.date) {
self.insert_input_data(insert_data);
}
// if self.needs_insert_output(insert_data) {
// self.insert_output_data(insert_data);
// }
}
// pub fn should_compute_metadata(&self, compute_data: &ComputeData) -> bool {
// self.metadata.should_compute(compute_data)
// }
// pub fn should_compute_utxo(&self, compute_data: &ComputeData) -> bool {
// self.sub_datasets_vec()
// .iter()
// .any(|sub| sub.utxo.should_compute(compute_data))
// }
pub fn should_compute_supply(&self, compute_data: &ComputeData) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.supply.should_compute(compute_data))
}
pub fn should_compute_capitalization(&self, compute_data: &ComputeData) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.capitalization.should_compute(compute_data))
}
fn should_compute_realized(&self, compute_data: &ComputeData) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.realized.should_compute(compute_data))
}
fn should_compute_unrealized(&self, compute_data: &ComputeData) -> bool {
self.sub_datasets_vec()
.iter()
.any(|sub| sub.unrealized.should_compute(compute_data))
}
// fn should_compute_input(&self, compute_data: &ComputeData) -> bool {
// self.sub_datasets_vec()
// .iter()
// .any(|sub| sub.input.should_compute(compute_data))
// }
// fn should_compute_output(&self, compute_data: &ComputeData) -> bool {
// self.sub_datasets_vec()
// .iter()
// .any(|sub| sub.output.should_compute(compute_data))
// }
fn compute_supply_data(
&mut self,
compute_data: &ComputeData,
circulating_supply: &mut BiMap<f64>,
) {
self.all.supply.compute(compute_data, circulating_supply);
self.illiquid
.supply
.compute(compute_data, circulating_supply);
self.liquid.supply.compute(compute_data, circulating_supply);
self.highly_liquid
.supply
.compute(compute_data, circulating_supply);
}
fn compute_unrealized_data(
&mut self,
compute_data: &ComputeData,
circulating_supply: &mut BiMap<f64>,
market_cap: &mut BiMap<f32>,
) {
self.all.unrealized.compute(
compute_data,
&mut self.all.supply.supply,
circulating_supply,
market_cap,
);
self.illiquid.unrealized.compute(
compute_data,
&mut self.illiquid.supply.supply,
circulating_supply,
market_cap,
);
self.liquid.unrealized.compute(
compute_data,
&mut self.liquid.supply.supply,
circulating_supply,
market_cap,
);
self.highly_liquid.unrealized.compute(
compute_data,
&mut self.highly_liquid.supply.supply,
circulating_supply,
market_cap,
);
}
fn compute_realized_data(&mut self, compute_data: &ComputeData, market_cap: &mut BiMap<f32>) {
self.all.realized.compute(compute_data, market_cap);
self.illiquid.realized.compute(compute_data, market_cap);
self.liquid.realized.compute(compute_data, market_cap);
self.highly_liquid
.realized
.compute(compute_data, market_cap);
}
fn compute_capitalization_data(&mut self, compute_data: &ComputeData, closes: &mut BiMap<f32>) {
self.all
.capitalization
.compute(compute_data, closes, &mut self.all.supply.supply);
self.illiquid.capitalization.compute(
compute_data,
closes,
&mut self.illiquid.supply.supply,
);
self.liquid
.capitalization
.compute(compute_data, closes, &mut self.liquid.supply.supply);
self.highly_liquid.capitalization.compute(
compute_data,
closes,
&mut self.highly_liquid.supply.supply,
);
}
// fn compute_output_data(&mut self, compute_data: &ComputeData) {
// self.all
// .output
// .compute(compute_data, &mut self.all.supply.total);
// self.illiquid
// .output
// .compute(compute_data, &mut self.illiquid.supply.total);
// self.liquid
// .output
// .compute(compute_data, &mut self.liquid.supply.total);
// self.highly_liquid
// .output
// .compute(compute_data, &mut self.highly_liquid.supply.total);
// }
pub fn compute(
&mut self,
compute_data: &ComputeData,
closes: &mut BiMap<f32>,
circulating_supply: &mut BiMap<f64>,
market_cap: &mut BiMap<f32>,
) {
if self.should_compute_supply(compute_data) {
self.compute_supply_data(compute_data, circulating_supply);
}
if self.should_compute_unrealized(compute_data) {
self.compute_unrealized_data(compute_data, circulating_supply, market_cap);
}
if self.should_compute_realized(compute_data) {
self.compute_realized_data(compute_data, market_cap);
}
// MUST BE after compute_supply
if self.should_compute_capitalization(compute_data) {
self.compute_capitalization_data(compute_data, closes);
}
// if self.should_compute_output(compute_data) {
// self.compute_output_data(compute_data);
// }
}
}
impl AnyDataset for CohortDataset {
fn to_inserted_height_map_vec(&self) -> Vec<&(dyn AnyHeightMap + Send + Sync)> {
self.as_vec()
.into_iter()
.flat_map(|d| d.to_inserted_height_map_vec())
.collect_vec()
}
fn to_inserted_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
self.as_vec()
.into_iter()
.flat_map(|d| d.to_inserted_date_map_vec())
.collect_vec()
}
fn to_inserted_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
self.as_vec()
.into_iter()
.flat_map(|d| d.to_inserted_bi_map_vec())
.collect_vec()
}
fn to_inserted_mut_height_map_vec(&mut self) -> Vec<&mut dyn AnyHeightMap> {
self.as_mut_vec()
.into_iter()
.flat_map(|d| d.to_inserted_mut_height_map_vec())
.collect_vec()
}
fn to_inserted_mut_date_map_vec(&mut self) -> Vec<&mut dyn AnyDateMap> {
self.as_mut_vec()
.into_iter()
.flat_map(|d| d.to_inserted_mut_date_map_vec())
.collect_vec()
}
fn to_inserted_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
self.as_mut_vec()
.into_iter()
.flat_map(|d| d.to_inserted_mut_bi_map_vec())
.collect_vec()
}
fn to_computed_height_map_vec(&self) -> Vec<&(dyn AnyHeightMap + Send + Sync)> {
self.as_vec()
.into_iter()
.flat_map(|d| d.to_computed_height_map_vec())
.collect_vec()
}
fn to_computed_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
self.as_vec()
.into_iter()
.flat_map(|d| d.to_computed_date_map_vec())
.collect_vec()
}
fn to_computed_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
self.as_vec()
.into_iter()
.flat_map(|d| d.to_computed_bi_map_vec())
.collect_vec()
}
fn to_computed_mut_height_map_vec(&mut self) -> Vec<&mut dyn AnyHeightMap> {
self.as_mut_vec()
.into_iter()
.flat_map(|d| d.to_computed_mut_height_map_vec())
.collect_vec()
}
fn to_computed_mut_date_map_vec(&mut self) -> Vec<&mut dyn AnyDateMap> {
self.as_mut_vec()
.into_iter()
.flat_map(|d| d.to_computed_mut_date_map_vec())
.collect_vec()
}
fn to_computed_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
self.as_mut_vec()
.into_iter()
.flat_map(|d| d.to_computed_mut_bi_map_vec())
.collect_vec()
}
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,67 @@
use allocative::Allocative;
use crate::{
datasets::{AnyDataset, InsertData, MinInitialStates},
structs::{AnyBiMap, BiMap},
};
#[derive(Default, Allocative)]
pub struct MetadataDataset {
min_initial_states: MinInitialStates,
// Inserted
address_count: BiMap<usize>,
// pub output: OutputSubDataset,
// Sending addresses
// Receiving addresses
// Active addresses (Unique(Sending + Receiving))
}
impl MetadataDataset {
pub fn import(parent_path: &str) -> color_eyre::Result<Self> {
let f = |s: &str| format!("{parent_path}/{s}");
let mut s = Self {
min_initial_states: MinInitialStates::default(),
address_count: BiMap::new_bin(1, &f("address_count")),
// output: OutputSubDataset::import(parent_path)?,
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
..
}: &InsertData,
address_count: usize,
) {
self.address_count.height.insert(height, address_count);
if is_date_last_block {
self.address_count.date.insert(date, address_count);
}
}
}
impl AnyDataset for MetadataDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_inserted_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![&self.address_count]
}
fn to_inserted_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![&mut self.address_count]
}
}

View File

@@ -0,0 +1,151 @@
mod all_metadata;
mod cohort;
mod cohort_metadata;
use allocative::Allocative;
use itertools::Itertools;
use rayon::prelude::*;
use crate::{states::SplitByAddressCohort, structs::BiMap};
use self::{all_metadata::AllAddressesMetadataDataset, cohort::CohortDataset};
use super::{AnyDataset, AnyDatasets, ComputeData, InsertData, MinInitialStates};
#[derive(Allocative)]
pub struct AddressDatasets {
min_initial_states: MinInitialStates,
metadata: AllAddressesMetadataDataset,
pub cohorts: SplitByAddressCohort<CohortDataset>,
}
impl AddressDatasets {
pub fn import(parent_path: &str) -> color_eyre::Result<Self> {
let mut cohorts = SplitByAddressCohort::<CohortDataset>::default();
cohorts
.as_vec()
.into_par_iter()
.map(|(_, id)| (id, CohortDataset::import(parent_path, id)))
.collect::<Vec<_>>()
.into_iter()
.try_for_each(|(id, dataset)| -> color_eyre::Result<()> {
*cohorts.get_mut_from_id(&id) = dataset?;
Ok(())
})?;
let mut s = Self {
min_initial_states: MinInitialStates::default(),
metadata: AllAddressesMetadataDataset::import(parent_path)?,
cohorts,
};
s.min_initial_states
.consume(MinInitialStates::compute_from_datasets(&s));
Ok(s)
}
pub fn insert(&mut self, insert_data: &InsertData) {
self.metadata.insert(insert_data);
self.cohorts
.as_mut_vec()
.into_iter()
.for_each(|(cohort, _)| cohort.insert(insert_data))
}
// pub fn needs_insert_utxo(&self, height: usize, date: WNaiveDate) -> bool {
// self.cohorts
// .as_vec()
// .iter()
// .any(|(dataset, _)| dataset.utxo.needs_insert(height, date))
// }
// pub fn needs_insert_capitalization(&self, height: usize, date: WNaiveDate) -> bool {
// self.cohorts
// .as_vec()
// .iter()
// .any(|(dataset, _)| dataset.capitalization.needs_insert(height, date))
// }
// pub fn needs_insert_supply(&self, height: usize, date: WNaiveDate) -> bool {
// self.cohorts
// .as_vec()
// .iter()
// .any(|(dataset, _)| dataset.supply.needs_insert(height, date))
// }
// pub fn needs_insert_price_paid(&self, height: usize, date: WNaiveDate) -> bool {
// self.cohorts
// .as_vec()
// .iter()
// .any(|(dataset, _)| dataset.price_paid.needs_insert(height, date))
// }
// fn needs_insert_realized(&self, height: usize, date: WNaiveDate) -> bool {
// self.cohorts
// .as_vec()
// .iter()
// .any(|(dataset, _)| dataset.realized.needs_insert(height, date))
// }
// fn needs_insert_unrealized(&self, height: usize, date: WNaiveDate) -> bool {
// self.cohorts
// .as_vec()
// .iter()
// .any(|(dataset, _)| dataset.unrealized.needs_insert(height, date))
// }
// fn needs_insert_input(&self, height: usize, date: WNaiveDate) -> bool {
// self.cohorts
// .as_vec()
// .iter()
// .any(|(dataset, _)| dataset.input.needs_insert(height, date))
// }
pub fn compute(
&mut self,
compute_data: &ComputeData,
closes: &mut BiMap<f32>,
circulating_supply: &mut BiMap<f64>,
market_cap: &mut BiMap<f32>,
) {
self.metadata.compute(compute_data);
self.cohorts
.as_mut_vec()
.into_iter()
.for_each(|(cohort, _)| {
cohort.compute(compute_data, closes, circulating_supply, market_cap)
})
}
}
impl AnyDatasets for AddressDatasets {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_any_dataset_vec(&self) -> Vec<&(dyn AnyDataset + Send + Sync)> {
self.cohorts
.as_vec()
.into_iter()
.map(|(d, _)| d as &(dyn AnyDataset + Send + Sync))
.chain(vec![&self.metadata as &(dyn AnyDataset + Send + Sync)])
.collect_vec()
}
fn to_mut_any_dataset_vec(&mut self) -> Vec<&mut dyn AnyDataset> {
self.cohorts
.as_mut_vec()
.into_iter()
.map(|(d, _)| d as &mut dyn AnyDataset)
.chain(vec![&mut self.metadata as &mut dyn AnyDataset])
.collect_vec()
}
}

View File

@@ -0,0 +1,61 @@
use allocative::Allocative;
use crate::{
datasets::AnyDataset,
structs::{AnyHeightMap, HeightMap, WNaiveDate},
};
use super::{InsertData, MinInitialStates};
#[derive(Allocative)]
pub struct BlockMetadataDataset {
min_initial_states: MinInitialStates,
// Inserted
pub date: HeightMap<WNaiveDate>,
pub timestamp: HeightMap<u32>,
}
impl BlockMetadataDataset {
pub fn import(parent_path: &str) -> color_eyre::Result<Self> {
let f = |s: &str| format!("{parent_path}/{s}");
let mut s = Self {
min_initial_states: MinInitialStates::default(),
date: HeightMap::new_bin(1, &f("date")),
timestamp: HeightMap::new_bin(1, &f("timestamp")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height, timestamp, ..
}: &InsertData,
) {
self.timestamp.insert(height, timestamp);
self.date
.insert(height, WNaiveDate::from_timestamp(timestamp));
}
}
impl AnyDataset for BlockMetadataDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_inserted_height_map_vec(&self) -> Vec<&(dyn AnyHeightMap + Send + Sync)> {
vec![&self.date, &self.timestamp]
}
fn to_inserted_mut_height_map_vec(&mut self) -> Vec<&mut dyn AnyHeightMap> {
vec![&mut self.date, &mut self.timestamp]
}
}

View File

@@ -0,0 +1,68 @@
use allocative::Allocative;
use crate::{
datasets::AnyDataset,
structs::{AnyBiMap, BiMap},
};
use super::{InsertData, MinInitialStates};
#[derive(Allocative)]
pub struct CoindaysDataset {
min_initial_states: MinInitialStates,
// Inserted
pub coindays_destroyed: BiMap<f32>,
}
impl CoindaysDataset {
pub fn import(parent_path: &str) -> color_eyre::Result<Self> {
let f = |s: &str| format!("{parent_path}/{s}");
let mut s = Self {
min_initial_states: MinInitialStates::default(),
coindays_destroyed: BiMap::new_bin(1, &f("coindays_destroyed")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
satdays_destroyed,
date_blocks_range,
is_date_last_block,
date,
..
}: &InsertData,
) {
self.coindays_destroyed
.height
.insert(height, satdays_destroyed.to_btc() as f32);
if is_date_last_block {
self.coindays_destroyed
.date_insert_sum_range(date, date_blocks_range)
}
}
}
impl AnyDataset for CoindaysDataset {
fn to_inserted_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![&self.coindays_destroyed]
}
fn to_inserted_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![&mut self.coindays_destroyed]
}
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,594 @@
use allocative::Allocative;
use crate::{
structs::{AnyBiMap, BiMap, DateMap},
utils::{ONE_DAY_IN_DAYS, ONE_YEAR_IN_DAYS, THREE_MONTHS_IN_DAYS, TWO_WEEK_IN_DAYS},
};
use super::{AnyDataset, ComputeData, InsertData, MinInitialStates};
#[derive(Allocative)]
pub struct CointimeDataset {
min_initial_states: MinInitialStates,
// Inserted
pub coinblocks_destroyed: BiMap<f32>,
// Computed
pub active_cap: BiMap<f32>,
pub active_price: BiMap<f32>,
pub active_supply: BiMap<f32>,
pub active_supply_3m_net_change: BiMap<f32>,
pub active_supply_net_change: BiMap<f32>,
pub activity_to_vaultedness_ratio: BiMap<f32>,
pub coinblocks_created: BiMap<f32>,
pub coinblocks_stored: BiMap<f32>,
pub cointime_adjusted_velocity: BiMap<f32>,
pub cointime_adjusted_yearly_inflation_rate: BiMap<f32>,
pub cointime_cap: BiMap<f32>,
pub cointime_price: BiMap<f32>,
pub cointime_value_created: BiMap<f32>,
pub cointime_value_destroyed: BiMap<f32>,
pub cointime_value_stored: BiMap<f32>,
pub concurrent_liveliness: BiMap<f32>,
pub concurrent_liveliness_2w_median: BiMap<f32>,
pub cumulative_coinblocks_created: BiMap<f32>,
pub cumulative_coinblocks_destroyed: BiMap<f32>,
pub cumulative_coinblocks_stored: BiMap<f32>,
pub investor_cap: BiMap<f32>,
pub investorness: BiMap<f32>,
pub liveliness: BiMap<f32>,
pub liveliness_net_change: BiMap<f32>,
pub liveliness_net_change_2w_median: BiMap<f32>,
pub producerness: BiMap<f32>,
pub thermo_cap: BiMap<f32>,
pub thermo_cap_to_investor_cap_ratio: BiMap<f32>,
pub total_cointime_value_created: BiMap<f32>,
pub total_cointime_value_destroyed: BiMap<f32>,
pub total_cointime_value_stored: BiMap<f32>,
pub true_market_deviation: BiMap<f32>,
pub true_market_mean: BiMap<f32>,
pub true_market_net_unrealized_profit_and_loss: BiMap<f32>,
pub vaulted_cap: BiMap<f32>,
pub vaulted_price: BiMap<f32>,
pub vaulted_supply: BiMap<f32>,
pub vaulted_supply_net_change: BiMap<f32>,
pub vaulted_supply_3m_net_change: BiMap<f32>,
pub vaultedness: BiMap<f32>,
pub vaulting_rate: BiMap<f32>,
}
impl CointimeDataset {
pub fn import(parent_path: &str) -> color_eyre::Result<Self> {
let f = |s: &str| format!("{parent_path}/{s}");
let mut s = Self {
min_initial_states: MinInitialStates::default(),
active_cap: BiMap::new_bin(1, &f("active_cap")),
active_price: BiMap::new_bin(1, &f("active_price")),
active_supply: BiMap::new_bin(1, &f("active_supply")),
active_supply_3m_net_change: BiMap::new_bin(1, &f("active_supply_3m_net_change")),
active_supply_net_change: BiMap::new_bin(1, &f("active_supply_net_change")),
activity_to_vaultedness_ratio: BiMap::new_bin(1, &f("activity_to_vaultedness_ratio")),
coinblocks_created: BiMap::new_bin(1, &f("coinblocks_created")),
coinblocks_destroyed: BiMap::new_bin(1, &f("coinblocks_destroyed")),
coinblocks_stored: BiMap::new_bin(1, &f("coinblocks_stored")),
cointime_adjusted_velocity: BiMap::new_bin(1, &f("cointime_adjusted_velocity")),
cointime_adjusted_yearly_inflation_rate: BiMap::new_bin(
1,
&f("cointime_adjusted_yearly_inflation_rate"),
),
cointime_cap: BiMap::new_bin(1, &f("cointime_cap")),
cointime_price: BiMap::new_bin(1, &f("cointime_price")),
cointime_value_created: BiMap::new_bin(1, &f("cointime_value_created")),
cointime_value_destroyed: BiMap::new_bin(1, &f("cointime_value_destroyed")),
cointime_value_stored: BiMap::new_bin(1, &f("cointime_value_stored")),
concurrent_liveliness: BiMap::new_bin(1, &f("concurrent_liveliness")),
concurrent_liveliness_2w_median: BiMap::new_bin(
1,
&f("concurrent_liveliness_2w_median"),
),
cumulative_coinblocks_created: BiMap::new_bin(1, &f("cumulative_coinblocks_created")),
cumulative_coinblocks_destroyed: BiMap::new_bin(
1,
&f("cumulative_coinblocks_destroyed"),
),
cumulative_coinblocks_stored: BiMap::new_bin(1, &f("cumulative_coinblocks_stored")),
investor_cap: BiMap::new_bin(1, &f("investor_cap")),
investorness: BiMap::new_bin(1, &f("investorness")),
liveliness: BiMap::new_bin(1, &f("liveliness")),
liveliness_net_change: BiMap::new_bin(1, &f("liveliness_net_change")),
liveliness_net_change_2w_median: BiMap::new_bin(
1,
&f("liveliness_net_change_2w_median"),
),
producerness: BiMap::new_bin(1, &f("producerness")),
thermo_cap: BiMap::new_bin(1, &f("thermo_cap")),
thermo_cap_to_investor_cap_ratio: BiMap::new_bin(
1,
&f("thermo_cap_to_investor_cap_ratio"),
),
total_cointime_value_created: BiMap::new_bin(1, &f("total_cointime_value_created")),
total_cointime_value_destroyed: BiMap::new_bin(1, &f("total_cointime_value_destroyed")),
total_cointime_value_stored: BiMap::new_bin(1, &f("total_cointime_value_stored")),
true_market_deviation: BiMap::new_bin(1, &f("true_market_deviation")),
true_market_mean: BiMap::new_bin(1, &f("true_market_mean")),
true_market_net_unrealized_profit_and_loss: BiMap::new_bin(
1,
&f("true_market_net_unrealized_profit_and_loss"),
),
vaulted_cap: BiMap::new_bin(1, &f("vaulted_cap")),
vaulted_price: BiMap::new_bin(1, &f("vaulted_price")),
vaulted_supply: BiMap::new_bin(1, &f("vaulted_supply")),
vaulted_supply_3m_net_change: BiMap::new_bin(1, &f("vaulted_supply_3m_net_change")),
vaulted_supply_net_change: BiMap::new_bin(1, &f("vaulted_supply_net_change")),
vaultedness: BiMap::new_bin(1, &f("vaultedness")),
vaulting_rate: BiMap::new_bin(1, &f("vaulting_rate")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
satblocks_destroyed,
date_blocks_range,
is_date_last_block,
..
}: &InsertData,
) {
self.coinblocks_destroyed
.height
.insert(height, satblocks_destroyed.to_btc() as f32);
if is_date_last_block {
self.coinblocks_destroyed
.date_insert_sum_range(date, date_blocks_range);
}
}
#[allow(clippy::too_many_arguments)]
pub fn compute(
&mut self,
&ComputeData { heights, dates }: &ComputeData,
first_height: &mut DateMap<usize>,
last_height: &mut DateMap<usize>,
closes: &mut BiMap<f32>,
circulating_supply: &mut BiMap<f64>,
realized_cap: &mut BiMap<f32>,
realized_price: &mut BiMap<f32>,
yearly_inflation_rate: &mut BiMap<f64>,
annualized_transaction_volume: &mut BiMap<f32>,
cumulative_subsidy_in_dollars: &mut BiMap<f32>,
) {
self.cumulative_coinblocks_destroyed
.multi_insert_cumulative(heights, dates, &mut self.coinblocks_destroyed);
self.coinblocks_created
.height
.multi_insert_simple_transform(
heights,
&mut circulating_supply.height,
|circulating_supply| circulating_supply as f32,
);
self.coinblocks_created
.multi_date_insert_sum_range(dates, first_height, last_height);
self.cumulative_coinblocks_created.multi_insert_cumulative(
heights,
dates,
&mut self.coinblocks_created,
);
self.coinblocks_stored.height.multi_insert_subtract(
heights,
&mut self.coinblocks_created.height,
&mut self.coinblocks_destroyed.height,
);
self.coinblocks_stored
.multi_date_insert_sum_range(dates, first_height, last_height);
self.cumulative_coinblocks_stored.multi_insert_cumulative(
heights,
dates,
&mut self.coinblocks_stored,
);
self.liveliness.multi_insert_divide(
heights,
dates,
&mut self.cumulative_coinblocks_destroyed,
&mut self.cumulative_coinblocks_created,
);
self.vaultedness.multi_insert_simple_transform(
heights,
dates,
&mut self.liveliness,
&|liveliness| 1.0 - liveliness,
);
self.activity_to_vaultedness_ratio.multi_insert_divide(
heights,
dates,
&mut self.liveliness,
&mut self.vaultedness,
);
self.concurrent_liveliness.multi_insert_divide(
heights,
dates,
&mut self.coinblocks_destroyed,
&mut self.coinblocks_created,
);
self.concurrent_liveliness_2w_median.multi_insert_median(
heights,
dates,
&mut self.concurrent_liveliness,
Some(TWO_WEEK_IN_DAYS),
);
self.liveliness_net_change.multi_insert_net_change(
heights,
dates,
&mut self.liveliness,
ONE_DAY_IN_DAYS,
);
self.liveliness_net_change_2w_median
.multi_insert_net_change(heights, dates, &mut self.liveliness, TWO_WEEK_IN_DAYS);
self.vaulted_supply.multi_insert_multiply(
heights,
dates,
&mut self.vaultedness,
circulating_supply,
);
self.vaulted_supply_net_change.multi_insert_net_change(
heights,
dates,
&mut self.vaulted_supply,
ONE_DAY_IN_DAYS,
);
self.vaulted_supply_3m_net_change.multi_insert_net_change(
heights,
dates,
&mut self.vaulted_supply,
THREE_MONTHS_IN_DAYS,
);
self.vaulting_rate.multi_insert_simple_transform(
heights,
dates,
&mut self.vaulted_supply,
&|vaulted_supply| vaulted_supply * ONE_YEAR_IN_DAYS as f32,
);
self.active_supply.multi_insert_multiply(
heights,
dates,
&mut self.liveliness,
circulating_supply,
);
self.active_supply_net_change.multi_insert_net_change(
heights,
dates,
&mut self.active_supply,
ONE_DAY_IN_DAYS,
);
self.active_supply_3m_net_change.multi_insert_net_change(
heights,
dates,
&mut self.active_supply,
THREE_MONTHS_IN_DAYS,
);
// TODO: Do these
// let min_vaulted_supply = ;
// let max_active_supply = ;
self.cointime_adjusted_yearly_inflation_rate
.multi_insert_multiply(
heights,
dates,
&mut self.activity_to_vaultedness_ratio,
yearly_inflation_rate,
);
self.cointime_adjusted_velocity.multi_insert_divide(
heights,
dates,
annualized_transaction_volume,
&mut self.active_supply,
);
// TODO:
// const activeSupplyChangeFromTransactions90dChange =
// createNetChangeLazyDataset(activeSupplyChangeFromTransactions, 90);
// const activeSupplyChangeFromIssuance = createMultipliedLazyDataset(
// lastSubsidy,
// liveliness,
// );
self.thermo_cap.multi_insert_simple_transform(
heights,
dates,
cumulative_subsidy_in_dollars,
&|cumulative_subsidy_in_dollars| cumulative_subsidy_in_dollars,
);
self.investor_cap
.multi_insert_subtract(heights, dates, realized_cap, &mut self.thermo_cap);
self.thermo_cap_to_investor_cap_ratio.multi_insert_divide(
heights,
dates,
&mut self.thermo_cap,
&mut self.investor_cap,
);
// TODO:
// const activeSupplyChangeFromIssuance90dChange = createNetChangeLazyDataset(
// activeSupplyChangeFromIssuance,
// 90,
// );
self.active_price
.multi_insert_divide(heights, dates, realized_price, &mut self.liveliness);
self.active_cap.height.multi_insert_multiply(
heights,
&mut self.active_supply.height,
&mut closes.height,
);
self.active_cap.date.multi_insert_multiply(
dates,
&mut self.active_supply.date,
&mut closes.date,
);
self.vaulted_price.multi_insert_divide(
heights,
dates,
realized_price,
&mut self.vaultedness,
);
self.vaulted_cap.height.multi_insert_multiply(
heights,
&mut self.vaulted_supply.height,
&mut closes.height,
);
self.vaulted_cap.date.multi_insert_multiply(
dates,
&mut self.vaulted_supply.date,
&mut closes.date,
);
self.true_market_mean.multi_insert_divide(
heights,
dates,
&mut self.investor_cap,
&mut self.active_supply,
);
self.true_market_deviation.multi_insert_divide(
heights,
dates,
&mut self.active_cap,
&mut self.investor_cap,
);
self.true_market_net_unrealized_profit_and_loss
.height
.multi_insert_complex_transform(
heights,
&mut self.active_cap.height,
|(active_cap, height)| {
let investor_cap = self.investor_cap.height.get(height).unwrap();
(active_cap - investor_cap) / active_cap
},
);
self.true_market_net_unrealized_profit_and_loss
.date
.multi_insert_complex_transform(
dates,
&mut self.active_cap.date,
|(active_cap, date, _)| {
let investor_cap = self.investor_cap.date.get(date).unwrap();
(active_cap - investor_cap) / active_cap
},
);
self.investorness
.multi_insert_divide(heights, dates, &mut self.investor_cap, realized_cap);
self.producerness
.multi_insert_divide(heights, dates, &mut self.thermo_cap, realized_cap);
self.cointime_value_destroyed.height.multi_insert_multiply(
heights,
&mut self.coinblocks_destroyed.height,
&mut closes.height,
);
self.cointime_value_destroyed.date.multi_insert_multiply(
dates,
&mut self.coinblocks_destroyed.date,
&mut closes.date,
);
self.cointime_value_created.height.multi_insert_multiply(
heights,
&mut self.coinblocks_created.height,
&mut closes.height,
);
self.cointime_value_created.date.multi_insert_multiply(
dates,
&mut self.coinblocks_created.date,
&mut closes.date,
);
self.cointime_value_stored.height.multi_insert_multiply(
heights,
&mut self.coinblocks_stored.height,
&mut closes.height,
);
self.cointime_value_stored.date.multi_insert_multiply(
dates,
&mut self.coinblocks_stored.date,
&mut closes.date,
);
self.total_cointime_value_created.multi_insert_cumulative(
heights,
dates,
&mut self.cointime_value_created,
);
self.total_cointime_value_destroyed.multi_insert_cumulative(
heights,
dates,
&mut self.cointime_value_destroyed,
);
self.total_cointime_value_stored.multi_insert_cumulative(
heights,
dates,
&mut self.cointime_value_stored,
);
self.cointime_price.multi_insert_divide(
heights,
dates,
&mut self.total_cointime_value_destroyed,
&mut self.cumulative_coinblocks_stored,
);
self.cointime_cap.multi_insert_multiply(
heights,
dates,
&mut self.cointime_price,
circulating_supply,
);
}
}
impl AnyDataset for CointimeDataset {
fn to_inserted_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![&self.coinblocks_destroyed]
}
fn to_inserted_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![&mut self.coinblocks_destroyed]
}
fn to_computed_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![
&self.active_cap,
&self.active_price,
&self.active_supply,
&self.active_supply_3m_net_change,
&self.active_supply_net_change,
&self.activity_to_vaultedness_ratio,
&self.coinblocks_created,
&self.coinblocks_stored,
&self.cointime_adjusted_velocity,
&self.cointime_adjusted_yearly_inflation_rate,
&self.cointime_cap,
&self.cointime_price,
&self.cointime_value_created,
&self.cointime_value_destroyed,
&self.cointime_value_stored,
&self.concurrent_liveliness,
&self.concurrent_liveliness_2w_median,
&self.cumulative_coinblocks_created,
&self.cumulative_coinblocks_destroyed,
&self.cumulative_coinblocks_stored,
&self.investor_cap,
&self.investorness,
&self.liveliness,
&self.liveliness_net_change,
&self.liveliness_net_change_2w_median,
&self.producerness,
&self.thermo_cap,
&self.thermo_cap_to_investor_cap_ratio,
&self.total_cointime_value_created,
&self.total_cointime_value_destroyed,
&self.total_cointime_value_stored,
&self.true_market_deviation,
&self.true_market_mean,
&self.true_market_net_unrealized_profit_and_loss,
&self.vaulted_cap,
&self.vaulted_price,
&self.vaulted_supply,
&self.vaulted_supply_net_change,
&self.vaulted_supply_3m_net_change,
&self.vaultedness,
&self.vaulting_rate,
]
}
fn to_computed_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![
&mut self.active_cap,
&mut self.active_price,
&mut self.active_supply,
&mut self.active_supply_3m_net_change,
&mut self.active_supply_net_change,
&mut self.activity_to_vaultedness_ratio,
&mut self.coinblocks_created,
&mut self.coinblocks_stored,
&mut self.cointime_adjusted_velocity,
&mut self.cointime_adjusted_yearly_inflation_rate,
&mut self.cointime_cap,
&mut self.cointime_price,
&mut self.cointime_value_created,
&mut self.cointime_value_destroyed,
&mut self.cointime_value_stored,
&mut self.concurrent_liveliness,
&mut self.concurrent_liveliness_2w_median,
&mut self.cumulative_coinblocks_created,
&mut self.cumulative_coinblocks_destroyed,
&mut self.cumulative_coinblocks_stored,
&mut self.investor_cap,
&mut self.investorness,
&mut self.liveliness,
&mut self.liveliness_net_change,
&mut self.liveliness_net_change_2w_median,
&mut self.producerness,
&mut self.thermo_cap,
&mut self.thermo_cap_to_investor_cap_ratio,
&mut self.total_cointime_value_created,
&mut self.total_cointime_value_destroyed,
&mut self.total_cointime_value_stored,
&mut self.true_market_deviation,
&mut self.true_market_mean,
&mut self.true_market_net_unrealized_profit_and_loss,
&mut self.vaulted_cap,
&mut self.vaulted_price,
&mut self.vaulted_supply,
&mut self.vaulted_supply_net_change,
&mut self.vaulted_supply_3m_net_change,
&mut self.vaultedness,
&mut self.vaulting_rate,
]
}
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,52 @@
use allocative::Allocative;
use crate::structs::{AnyBiMap, BiMap};
use super::{AnyDataset, ComputeData, MinInitialStates};
#[derive(Allocative)]
pub struct ConstantDataset {
min_initial_states: MinInitialStates,
// Computed
pub _50: BiMap<u16>,
pub _100: BiMap<u16>,
}
impl ConstantDataset {
pub fn import(parent_path: &str) -> color_eyre::Result<Self> {
let f = |s: &str| format!("{parent_path}/{s}");
let mut s = Self {
min_initial_states: MinInitialStates::default(),
_50: BiMap::new_bin(1, &f("50")),
_100: BiMap::new_bin(1, &f("100")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s));
Ok(s)
}
pub fn compute(&mut self, &ComputeData { heights, dates }: &ComputeData) {
self._50.multi_insert_const(heights, dates, 50);
self._100.multi_insert_const(heights, dates, 100);
}
}
impl AnyDataset for ConstantDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_computed_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![&self._50, &self._100]
}
fn to_computed_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![&mut self._50, &mut self._100]
}
}

View File

@@ -0,0 +1,63 @@
use allocative::Allocative;
use crate::{
datasets::AnyDataset,
structs::{AnyDateMap, DateMap},
};
use super::{InsertData, MinInitialStates};
#[derive(Allocative)]
pub struct DateMetadataDataset {
min_initial_states: MinInitialStates,
// Inserted
pub first_height: DateMap<usize>,
pub last_height: DateMap<usize>,
}
impl DateMetadataDataset {
pub fn import(parent_path: &str) -> color_eyre::Result<Self> {
let f = |s: &str| format!("{parent_path}/{s}");
let mut s = Self {
min_initial_states: MinInitialStates::default(),
first_height: DateMap::new_bin(1, &f("first_height")),
last_height: DateMap::new_bin(1, &f("last_height")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
date,
date_first_height,
height,
..
}: &InsertData,
) {
self.first_height.insert(date, date_first_height);
self.last_height.insert(date, height);
}
}
impl AnyDataset for DateMetadataDataset {
fn to_inserted_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
vec![&self.first_height, &self.last_height]
}
fn to_inserted_mut_date_map_vec(&mut self) -> Vec<&mut dyn AnyDateMap> {
vec![&mut self.first_height, &mut self.last_height]
}
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
}

View File

@@ -0,0 +1,643 @@
use allocative::Allocative;
use crate::{
bitcoin::TARGET_BLOCKS_PER_DAY,
datasets::AnyDataset,
structs::{AnyBiMap, AnyDateMap, AnyHeightMap, BiMap, DateMap, HeightMap, WAmount},
utils::{BYTES_IN_MB, ONE_DAY_IN_DAYS, ONE_MONTH_IN_DAYS, ONE_WEEK_IN_DAYS, ONE_YEAR_IN_DAYS},
};
use super::{ComputeData, InsertData, MinInitialStates};
#[derive(Allocative)]
pub struct MiningDataset {
min_initial_states: MinInitialStates,
// Inserted
pub blocks_mined: DateMap<usize>,
pub total_blocks_mined: DateMap<usize>,
pub coinbase: BiMap<f64>,
pub coinbase_in_dollars: BiMap<f32>,
pub fees: BiMap<f64>,
pub fees_in_dollars: BiMap<f32>,
// Raw
// pub average_fee_paid: BiMap<f32>,
// pub max_fee_paid: BiMap<f32>,
// pub _90th_percentile_fee_paid: BiMap<f32>,
// pub _75th_percentile_fee_paid: BiMap<f32>,
// pub median_fee_paid: BiMap<f32>,
// pub _25th_percentile_fee_paid: BiMap<f32>,
// pub _10th_percentile_fee_paid: BiMap<f32>,
// pub min_fee_paid: BiMap<f32>,
// sat/vB
// pub average_fee_price: BiMap<f32>,
// pub max_fee_price: BiMap<f32>,
// pub _90th_percentile_fee_price: BiMap<f32>,
// pub _75th_percentile_fee_price: BiMap<f32>,
// pub median_fee_price: BiMap<f32>,
// pub _25th_percentile_fee_price: BiMap<f32>,
// pub _10th_percentile_fee_price: BiMap<f32>,
// pub min_fee_price: BiMap<f32>,
// -
pub subsidy: BiMap<f64>,
pub subsidy_in_dollars: BiMap<f32>,
pub last_coinbase: DateMap<f64>,
pub last_coinbase_in_dollars: DateMap<f32>,
pub last_fees: DateMap<f64>,
pub last_fees_in_dollars: DateMap<f32>,
pub last_subsidy: DateMap<f64>,
pub last_subsidy_in_dollars: DateMap<f32>,
pub difficulty: BiMap<f64>,
pub block_size: HeightMap<f32>, // in MB
pub block_weight: HeightMap<f32>, // in MB
pub block_vbytes: HeightMap<u64>,
pub block_interval: HeightMap<u32>, // in ms
// Computed
pub annualized_issuance: BiMap<f64>, // Same as subsidy_1y_sum
pub blocks_mined_1d_target: DateMap<usize>,
pub blocks_mined_1m_sma: DateMap<f32>,
pub blocks_mined_1m_sum: DateMap<usize>,
pub blocks_mined_1m_target: DateMap<usize>,
pub blocks_mined_1w_sma: DateMap<f32>,
pub blocks_mined_1w_sum: DateMap<usize>,
pub blocks_mined_1w_target: DateMap<usize>,
pub blocks_mined_1y_sum: DateMap<usize>,
pub blocks_mined_1y_target: DateMap<usize>,
pub cumulative_block_size: BiMap<f32>,
pub subsidy_1y_sum: DateMap<f64>,
pub subsidy_in_dollars_1y_sum: DateMap<f64>,
pub cumulative_subsidy: BiMap<f64>,
pub cumulative_subsidy_in_dollars: BiMap<f32>,
pub coinbase_1y_sum: DateMap<f64>,
pub coinbase_in_dollars_1y_sum: DateMap<f64>,
pub coinbase_in_dollars_1y_sma: DateMap<f32>,
pub cumulative_coinbase: BiMap<f64>,
pub cumulative_coinbase_in_dollars: BiMap<f32>,
pub fees_1y_sum: DateMap<f64>,
pub fees_in_dollars_1y_sum: DateMap<f64>,
pub cumulative_fees: BiMap<f64>,
pub cumulative_fees_in_dollars: BiMap<f32>,
pub yearly_inflation_rate: BiMap<f64>,
pub subsidy_to_coinbase_ratio: BiMap<f64>,
pub fees_to_coinbase_ratio: BiMap<f64>,
pub hash_rate: DateMap<f64>,
pub hash_rate_1w_sma: DateMap<f32>,
pub hash_rate_1m_sma: DateMap<f32>,
pub hash_rate_2m_sma: DateMap<f32>,
pub hash_price: DateMap<f64>,
pub difficulty_adjustment: DateMap<f64>,
pub puell_multiple: DateMap<f32>,
// pub average_block_size: DateMap<f32>, // in MB
// pub average_block_weight: DateMap<f32>, // in MB
// pub average_block_vbytes: DateMap<u64>,
// pub average_block_interval: DateMap<u32>, // in ms
// pub blocks_size: DateMap<f32>,
// pub average_block_size: DateMap<f32>,
// pub median_block_size: DateMap<f32>,
// pub average_block_weight: DateMap<f32>,
// pub median_block_weight: DateMap<f32>,
// pub average_block_interval: DateMap<u32>,
// pub median_block_interval: DateMap<u32>,
// pub hash_price_in_dollars: DateMap<f64>,
// pub hash_price_30d_volatility: BiMap<f32>,
// difficulty_adjustment
// next_difficulty_adjustment
// op return fees
// inscriptions fees
// until adjustement
// until halving in days
// until halving in blocks
}
impl MiningDataset {
pub fn import(parent_path: &str) -> color_eyre::Result<Self> {
let f = |s: &str| format!("{parent_path}/{s}");
let mut s = Self {
min_initial_states: MinInitialStates::default(),
total_blocks_mined: DateMap::new_bin(1, &f("total_blocks_mined")),
blocks_mined: DateMap::new_bin(1, &f("blocks_mined")),
coinbase: BiMap::new_bin(1, &f("coinbase")),
coinbase_in_dollars: BiMap::new_bin(1, &f("coinbase_in_dollars")),
coinbase_1y_sum: DateMap::new_bin(1, &f("coinbase_1y_sum")),
coinbase_in_dollars_1y_sum: DateMap::new_bin(1, &f("coinbase_in_dollars_1y_sum")),
coinbase_in_dollars_1y_sma: DateMap::new_bin(1, &f("coinbase_in_dollars_1y_sma")),
cumulative_coinbase: BiMap::new_bin(1, &f("cumulative_coinbase")),
cumulative_coinbase_in_dollars: BiMap::new_bin(1, &f("cumulative_coinbase_in_dollars")),
fees: BiMap::new_bin(1, &f("fees")),
fees_in_dollars: BiMap::new_bin(1, &f("fees_in_dollars")),
fees_1y_sum: DateMap::new_bin(1, &f("fees_1y_sum")),
fees_in_dollars_1y_sum: DateMap::new_bin(1, &f("fees_in_dollars_1y_sum")),
cumulative_fees: BiMap::new_bin(1, &f("cumulative_fees")),
cumulative_fees_in_dollars: BiMap::new_bin(1, &f("cumulative_fees_in_dollars")),
subsidy: BiMap::new_bin(1, &f("subsidy")),
subsidy_in_dollars: BiMap::new_bin(1, &f("subsidy_in_dollars")),
subsidy_1y_sum: DateMap::new_bin(1, &f("subsidy_1y_sum")),
subsidy_in_dollars_1y_sum: DateMap::new_bin(1, &f("subsidy_in_dollars_1y_sum")),
cumulative_subsidy: BiMap::new_bin(1, &f("cumulative_subsidy")),
cumulative_subsidy_in_dollars: BiMap::new_bin(1, &f("cumulative_subsidy_in_dollars")),
subsidy_to_coinbase_ratio: BiMap::new_bin(1, &f("subsidy_to_coinbase_ratio")),
fees_to_coinbase_ratio: BiMap::new_bin(1, &f("fees_to_coinbase_ratio")),
annualized_issuance: BiMap::new_bin(1, &f("annualized_issuance")),
yearly_inflation_rate: BiMap::new_bin(1, &f("yearly_inflation_rate")),
last_subsidy: DateMap::new_bin(1, &f("last_subsidy")),
last_subsidy_in_dollars: DateMap::new_bin(1, &f("last_subsidy_in_dollars")),
last_coinbase: DateMap::new_bin(1, &f("last_coinbase")),
last_coinbase_in_dollars: DateMap::new_bin(1, &f("last_coinbase_in_dollars")),
last_fees: DateMap::new_bin(1, &f("last_fees")),
last_fees_in_dollars: DateMap::new_bin(1, &f("last_fees_in_dollars")),
blocks_mined_1d_target: DateMap::new_bin(1, &f("blocks_mined_1d_target")),
blocks_mined_1w_sma: DateMap::new_bin(1, &f("blocks_mined_1w_sma")),
blocks_mined_1m_sma: DateMap::new_bin(1, &f("blocks_mined_1m_sma")),
blocks_mined_1w_sum: DateMap::new_bin(1, &f("blocks_mined_1w_sum")),
blocks_mined_1m_sum: DateMap::new_bin(1, &f("blocks_mined_1m_sum")),
blocks_mined_1y_sum: DateMap::new_bin(1, &f("blocks_mined_1y_sum")),
blocks_mined_1w_target: DateMap::new_bin(1, &f("blocks_mined_1w_target")),
blocks_mined_1m_target: DateMap::new_bin(1, &f("blocks_mined_1m_target")),
blocks_mined_1y_target: DateMap::new_bin(1, &f("blocks_mined_1y_target")),
difficulty: BiMap::new_bin(1, &f("difficulty")),
difficulty_adjustment: DateMap::new_bin(1, &f("difficulty_adjustment")),
block_size: HeightMap::new_bin(1, &f("block_size")),
cumulative_block_size: BiMap::new_bin(1, &f("cumulative_block_size")),
block_weight: HeightMap::new_bin(1, &f("block_weight")),
block_vbytes: HeightMap::new_bin(1, &f("block_vbytes")),
block_interval: HeightMap::new_bin(1, &f("block_interval")),
hash_rate: DateMap::new_bin(1, &f("hash_rate")),
hash_rate_1w_sma: DateMap::new_bin(1, &f("hash_rate_1w_sma")),
hash_rate_1m_sma: DateMap::new_bin(1, &f("hash_rate_1m_sma")),
hash_rate_2m_sma: DateMap::new_bin(1, &f("hash_rate_2m_sma")),
hash_price: DateMap::new_bin(1, &f("hash_price")),
puell_multiple: DateMap::new_bin(1, &f("puell_multiple")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
date_first_height,
height,
coinbase,
fees,
date_blocks_range,
is_date_last_block,
block_price,
date,
difficulty,
block_size,
block_vbytes,
block_weight,
block_interval,
..
}: &InsertData,
) {
self.coinbase.height.insert(height, coinbase.to_btc());
let coinbase_in_dollars = self
.coinbase_in_dollars
.height
.insert(height, (block_price * coinbase).to_dollar() as f32);
let sumed_fees = WAmount::from_sat(fees.iter().map(|amount| amount.to_sat()).sum());
self.fees.height.insert(height, sumed_fees.to_btc());
let sumed_fees_in_dollars = self
.fees_in_dollars
.height
.insert(height, (block_price * sumed_fees).to_dollar() as f32);
let subsidy = coinbase - sumed_fees;
self.subsidy.height.insert(height, subsidy.to_btc());
let subsidy_in_dollars = self
.subsidy_in_dollars
.height
.insert(height, (block_price * subsidy).to_dollar() as f32);
self.difficulty.height.insert(height, difficulty);
self.block_size
.insert(height, block_size as f32 / BYTES_IN_MB as f32);
self.block_weight
.insert(height, block_weight as f32 / BYTES_IN_MB as f32);
self.block_vbytes.insert(height, block_vbytes);
self.block_interval.insert(height, block_interval);
if is_date_last_block {
self.coinbase.date_insert_sum_range(date, date_blocks_range);
self.coinbase_in_dollars
.date_insert_sum_range(date, date_blocks_range);
self.fees.date_insert_sum_range(date, date_blocks_range);
self.fees_in_dollars
.date_insert_sum_range(date, date_blocks_range);
self.subsidy.date_insert_sum_range(date, date_blocks_range);
self.subsidy_in_dollars
.date_insert_sum_range(date, date_blocks_range);
self.last_coinbase.insert(date, coinbase.to_btc());
self.last_coinbase_in_dollars
.insert(date, coinbase_in_dollars);
self.last_subsidy.insert(date, subsidy.to_btc());
self.last_subsidy_in_dollars
.insert(date, subsidy_in_dollars);
self.last_fees.insert(date, sumed_fees.to_btc());
self.last_fees_in_dollars
.insert(date, sumed_fees_in_dollars);
let total_blocks_mined = self.total_blocks_mined.insert(date, height + 1);
self.blocks_mined
.insert(date, total_blocks_mined - date_first_height);
self.difficulty.date.insert(date, difficulty);
}
}
pub fn compute(
&mut self,
&ComputeData { heights, dates }: &ComputeData,
last_height: &mut DateMap<usize>,
) {
self.blocks_mined_1w_sum.multi_insert_last_x_sum(
dates,
&mut self.blocks_mined,
ONE_WEEK_IN_DAYS,
);
self.blocks_mined_1m_sum.multi_insert_last_x_sum(
dates,
&mut self.blocks_mined,
ONE_MONTH_IN_DAYS,
);
self.blocks_mined_1y_sum.multi_insert_last_x_sum(
dates,
&mut self.blocks_mined,
ONE_YEAR_IN_DAYS,
);
self.subsidy_1y_sum.multi_insert_last_x_sum(
dates,
&mut self.subsidy.date,
ONE_YEAR_IN_DAYS,
);
self.subsidy_in_dollars_1y_sum.multi_insert_last_x_sum(
dates,
&mut self.subsidy_in_dollars.date,
ONE_YEAR_IN_DAYS,
);
self.cumulative_subsidy
.multi_insert_cumulative(heights, dates, &mut self.subsidy);
self.cumulative_subsidy_in_dollars.multi_insert_cumulative(
heights,
dates,
&mut self.subsidy_in_dollars,
);
self.fees_1y_sum
.multi_insert_last_x_sum(dates, &mut self.fees.date, ONE_YEAR_IN_DAYS);
self.fees_in_dollars_1y_sum.multi_insert_last_x_sum(
dates,
&mut self.fees_in_dollars.date,
ONE_YEAR_IN_DAYS,
);
self.cumulative_fees
.multi_insert_cumulative(heights, dates, &mut self.fees);
self.cumulative_fees_in_dollars.multi_insert_cumulative(
heights,
dates,
&mut self.fees_in_dollars,
);
self.coinbase_1y_sum.multi_insert_last_x_sum(
dates,
&mut self.coinbase.date,
ONE_YEAR_IN_DAYS,
);
self.coinbase_in_dollars_1y_sum.multi_insert_last_x_sum(
dates,
&mut self.coinbase_in_dollars.date,
ONE_YEAR_IN_DAYS,
);
self.coinbase_in_dollars_1y_sma.multi_insert_simple_average(
dates,
&mut self.coinbase_in_dollars.date,
ONE_YEAR_IN_DAYS,
);
self.cumulative_coinbase
.multi_insert_cumulative(heights, dates, &mut self.coinbase);
self.cumulative_coinbase_in_dollars.multi_insert_cumulative(
heights,
dates,
&mut self.coinbase_in_dollars,
);
self.subsidy_to_coinbase_ratio.multi_insert_percentage(
heights,
dates,
&mut self.subsidy,
&mut self.coinbase,
);
self.fees_to_coinbase_ratio.multi_insert_percentage(
heights,
dates,
&mut self.fees,
&mut self.coinbase,
);
self.annualized_issuance.multi_insert_last_x_sum(
heights,
dates,
&mut self.subsidy,
ONE_YEAR_IN_DAYS,
);
self.yearly_inflation_rate.multi_insert_percentage(
heights,
dates,
&mut self.annualized_issuance,
&mut self.cumulative_subsidy,
);
self.blocks_mined_1d_target
.multi_insert_const(dates, TARGET_BLOCKS_PER_DAY);
self.blocks_mined_1w_target
.multi_insert_const(dates, ONE_WEEK_IN_DAYS * TARGET_BLOCKS_PER_DAY);
self.blocks_mined_1m_target
.multi_insert_const(dates, ONE_MONTH_IN_DAYS * TARGET_BLOCKS_PER_DAY);
self.blocks_mined_1y_target
.multi_insert_const(dates, ONE_YEAR_IN_DAYS * TARGET_BLOCKS_PER_DAY);
self.blocks_mined_1w_sma.multi_insert_simple_average(
dates,
&mut self.blocks_mined,
ONE_WEEK_IN_DAYS,
);
self.blocks_mined_1m_sma.multi_insert_simple_average(
dates,
&mut self.blocks_mined,
ONE_MONTH_IN_DAYS,
);
self.cumulative_block_size
.height
.multi_insert_cumulative(heights, &mut self.block_size);
self.cumulative_block_size.date.multi_insert_last(
dates,
&mut self.cumulative_block_size.height,
last_height,
);
// https://hashrateindex.com/blog/what-is-bitcoins-hashrate/
self.hash_rate.multi_insert(dates, |date| {
let blocks_mined = self.blocks_mined.get_or_import(date).unwrap();
let difficulty = self.difficulty.date.get_or_import(date).unwrap();
((blocks_mined as f64 / TARGET_BLOCKS_PER_DAY as f64) * difficulty * 2.0_f64.powi(32))
/ 600.0
/ 1_000_000_000_000_000_000.0
});
self.hash_rate_1w_sma.multi_insert_simple_average(
dates,
&mut self.hash_rate,
ONE_WEEK_IN_DAYS,
);
self.hash_rate_1m_sma.multi_insert_simple_average(
dates,
&mut self.hash_rate,
ONE_MONTH_IN_DAYS,
);
self.hash_rate_2m_sma.multi_insert_simple_average(
dates,
&mut self.hash_rate,
2 * ONE_MONTH_IN_DAYS,
);
self.hash_price.multi_insert(dates, |date| {
let coinbase_in_dollars = self.coinbase_in_dollars.date.get_or_import(date).unwrap();
let hashrate = self.hash_rate.get_or_import(date).unwrap();
coinbase_in_dollars as f64 / hashrate / 1_000.0
});
self.puell_multiple.multi_insert_divide(
dates,
&mut self.coinbase_in_dollars.date,
&mut self.coinbase_in_dollars_1y_sma,
);
self.difficulty_adjustment.multi_insert_percentage_change(
dates,
&mut self.difficulty.date,
ONE_DAY_IN_DAYS,
);
}
}
impl AnyDataset for MiningDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_inserted_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![
&self.coinbase,
&self.coinbase_in_dollars,
&self.fees,
&self.fees_in_dollars,
&self.subsidy,
&self.subsidy_in_dollars,
&self.difficulty,
]
}
fn to_inserted_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![
&mut self.coinbase,
&mut self.coinbase_in_dollars,
&mut self.fees,
&mut self.fees_in_dollars,
&mut self.subsidy,
&mut self.subsidy_in_dollars,
&mut self.difficulty,
]
}
fn to_inserted_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
vec![
&self.total_blocks_mined,
&self.blocks_mined,
&self.last_subsidy,
&self.last_subsidy_in_dollars,
&self.last_coinbase,
&self.last_coinbase_in_dollars,
&self.last_fees,
&self.last_fees_in_dollars,
]
}
fn to_inserted_mut_date_map_vec(&mut self) -> Vec<&mut dyn AnyDateMap> {
vec![
&mut self.total_blocks_mined,
&mut self.blocks_mined,
&mut self.last_subsidy,
&mut self.last_subsidy_in_dollars,
&mut self.last_coinbase,
&mut self.last_coinbase_in_dollars,
&mut self.last_fees,
&mut self.last_fees_in_dollars,
]
}
fn to_inserted_height_map_vec(&self) -> Vec<&(dyn AnyHeightMap + Send + Sync)> {
vec![
&self.block_size,
&self.block_weight,
&self.block_vbytes,
&self.block_interval,
]
}
fn to_inserted_mut_height_map_vec(&mut self) -> Vec<&mut dyn AnyHeightMap> {
vec![
&mut self.block_size,
&mut self.block_weight,
&mut self.block_vbytes,
&mut self.block_interval,
]
}
fn to_computed_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![
&self.cumulative_coinbase,
&self.cumulative_coinbase_in_dollars,
&self.cumulative_fees,
&self.cumulative_fees_in_dollars,
&self.cumulative_subsidy,
&self.cumulative_subsidy_in_dollars,
&self.annualized_issuance,
&self.yearly_inflation_rate,
&self.cumulative_block_size,
&self.subsidy_to_coinbase_ratio,
&self.fees_to_coinbase_ratio,
]
}
fn to_computed_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![
&mut self.cumulative_coinbase,
&mut self.cumulative_coinbase_in_dollars,
&mut self.cumulative_fees,
&mut self.cumulative_fees_in_dollars,
&mut self.cumulative_subsidy,
&mut self.cumulative_subsidy_in_dollars,
&mut self.annualized_issuance,
&mut self.yearly_inflation_rate,
&mut self.cumulative_block_size,
&mut self.subsidy_to_coinbase_ratio,
&mut self.fees_to_coinbase_ratio,
]
}
fn to_computed_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
vec![
&self.blocks_mined_1d_target,
&self.blocks_mined_1w_sma,
&self.blocks_mined_1m_sma,
&self.blocks_mined_1w_sum,
&self.blocks_mined_1m_sum,
&self.blocks_mined_1y_sum,
&self.blocks_mined_1w_target,
&self.blocks_mined_1m_target,
&self.blocks_mined_1y_target,
&self.subsidy_1y_sum,
&self.subsidy_in_dollars_1y_sum,
&self.coinbase_1y_sum,
&self.coinbase_in_dollars_1y_sum,
&self.coinbase_in_dollars_1y_sma,
&self.fees_1y_sum,
&self.fees_in_dollars_1y_sum,
&self.hash_rate,
&self.hash_rate_1w_sma,
&self.hash_rate_1m_sma,
&self.hash_rate_2m_sma,
&self.hash_price,
&self.puell_multiple,
&self.difficulty_adjustment,
]
}
fn to_computed_mut_date_map_vec(&mut self) -> Vec<&mut dyn AnyDateMap> {
vec![
&mut self.blocks_mined_1d_target,
&mut self.blocks_mined_1w_sma,
&mut self.blocks_mined_1m_sma,
&mut self.blocks_mined_1w_sum,
&mut self.blocks_mined_1m_sum,
&mut self.blocks_mined_1y_sum,
&mut self.blocks_mined_1w_target,
&mut self.blocks_mined_1m_target,
&mut self.blocks_mined_1y_target,
&mut self.subsidy_1y_sum,
&mut self.subsidy_in_dollars_1y_sum,
&mut self.coinbase_1y_sum,
&mut self.coinbase_in_dollars_1y_sum,
&mut self.coinbase_in_dollars_1y_sma,
&mut self.fees_1y_sum,
&mut self.fees_in_dollars_1y_sum,
&mut self.hash_rate,
&mut self.hash_rate_1w_sma,
&mut self.hash_rate_1m_sma,
&mut self.hash_rate_2m_sma,
&mut self.hash_price,
&mut self.puell_multiple,
&mut self.difficulty_adjustment,
]
}
}

340
parser/src/datasets/mod.rs Normal file
View File

@@ -0,0 +1,340 @@
use std::{collections::BTreeMap, ops::RangeInclusive};
use allocative::Allocative;
use itertools::Itertools;
use rayon::prelude::*;
mod _traits;
mod address;
mod block_metadata;
mod coindays;
mod cointime;
mod constant;
mod date_metadata;
mod mining;
mod price;
mod subs;
mod transaction;
mod utxo;
pub use _traits::*;
pub use address::*;
pub use block_metadata::*;
pub use coindays::*;
pub use cointime::*;
pub use constant::*;
pub use date_metadata::*;
pub use mining::*;
pub use price::*;
pub use subs::*;
pub use transaction::*;
pub use utxo::*;
use crate::{
databases::Databases,
io::Json,
states::{
AddressCohortsInputStates,
AddressCohortsOneShotStates,
AddressCohortsRealizedStates,
States,
UTXOCohortsOneShotStates,
// UTXOCohortsReceivedStates,
UTXOCohortsSentStates,
},
structs::{Price, WAmount, WNaiveDate},
};
pub struct InsertData<'a> {
pub address_cohorts_input_states: &'a Option<AddressCohortsInputStates>,
pub address_cohorts_one_shot_states: &'a Option<AddressCohortsOneShotStates>,
pub address_cohorts_realized_states: &'a Option<AddressCohortsRealizedStates>,
pub amount_sent: WAmount,
pub block_interval: u32,
pub block_price: Price,
pub block_size: usize,
pub block_vbytes: u64,
pub block_weight: u64,
pub coinbase: WAmount,
pub compute_addresses: bool,
pub databases: &'a Databases,
pub date: WNaiveDate,
pub date_blocks_range: &'a RangeInclusive<usize>,
pub date_first_height: usize,
pub difficulty: f64,
pub fees: &'a Vec<WAmount>,
pub height: usize,
pub is_date_last_block: bool,
pub satblocks_destroyed: WAmount,
pub satdays_destroyed: WAmount,
pub states: &'a States,
pub timestamp: u32,
pub transaction_count: usize,
pub utxo_cohorts_one_shot_states: &'a UTXOCohortsOneShotStates,
// pub utxo_cohorts_received_states: &'a UTXOCohortsReceivedStates,
pub utxo_cohorts_sent_states: &'a UTXOCohortsSentStates,
}
pub struct ComputeData<'a> {
pub heights: &'a [usize],
pub dates: &'a [WNaiveDate],
}
#[derive(Allocative)]
pub struct AllDatasets {
min_initial_states: MinInitialStates,
pub constant: ConstantDataset,
pub address: AddressDatasets,
pub block_metadata: BlockMetadataDataset,
pub coindays: CoindaysDataset,
pub cointime: CointimeDataset,
pub date_metadata: DateMetadataDataset,
pub mining: MiningDataset,
pub price: PriceDatasets,
pub transaction: TransactionDataset,
pub utxo: UTXODatasets,
}
impl AllDatasets {
pub fn import() -> color_eyre::Result<Self> {
let path = "../datasets";
let price = PriceDatasets::import(path)?;
let constant = ConstantDataset::import(path)?;
let date_metadata = DateMetadataDataset::import(path)?;
let cointime = CointimeDataset::import(path)?;
let coindays = CoindaysDataset::import(path)?;
let mining = MiningDataset::import(path)?;
let block_metadata = BlockMetadataDataset::import(path)?;
let transaction = TransactionDataset::import(path)?;
let address = AddressDatasets::import(path)?;
let utxo = UTXODatasets::import(path)?;
let mut s = Self {
min_initial_states: MinInitialStates::default(),
address,
block_metadata,
cointime,
coindays,
constant,
date_metadata,
price,
mining,
transaction,
utxo,
};
s.min_initial_states
.consume(MinInitialStates::compute_from_datasets(&s));
s.export_path_to_type()?;
Ok(s)
}
pub fn insert(&mut self, insert_data: InsertData) {
self.address.insert(&insert_data);
self.utxo.insert(&insert_data);
if self
.block_metadata
.needs_insert(insert_data.height, insert_data.date)
{
self.block_metadata.insert(&insert_data);
}
if self
.date_metadata
.needs_insert(insert_data.height, insert_data.date)
{
self.date_metadata.insert(&insert_data);
}
if self
.coindays
.needs_insert(insert_data.height, insert_data.date)
{
self.coindays.insert(&insert_data);
}
if self
.mining
.needs_insert(insert_data.height, insert_data.date)
{
self.mining.insert(&insert_data);
}
if self
.transaction
.needs_insert(insert_data.height, insert_data.date)
{
self.transaction.insert(&insert_data);
}
if self
.cointime
.needs_insert(insert_data.height, insert_data.date)
{
self.cointime.insert(&insert_data);
}
}
pub fn compute(&mut self, compute_data: ComputeData) {
if self.constant.should_compute(&compute_data) {
self.constant.compute(&compute_data);
}
if self.mining.should_compute(&compute_data) {
self.mining
.compute(&compute_data, &mut self.date_metadata.last_height);
}
// No compute needed for now
self.price
.compute(&compute_data, &mut self.mining.cumulative_subsidy);
self.address.compute(
&compute_data,
&mut self.price.closes,
&mut self.mining.cumulative_subsidy,
&mut self.price.market_cap,
);
self.utxo.compute(
&compute_data,
&mut self.price.closes,
&mut self.mining.cumulative_subsidy,
&mut self.price.market_cap,
);
// No compute needed for now
// if self.block_metadata.should_compute(height, date) {
// self.block_metadata.compute(&compute_data);
// }
// No compute needed for now
// if self.date_metadata.should_compute(height, date) {
// self.date_metadata.compute(&compute_data);
// }
// No compute needed for now
// if self.coindays.should_compute(height, date) {
// self.coindays.compute(&compute_data);
// }
if self.transaction.should_compute(&compute_data) {
self.transaction.compute(
&compute_data,
&mut self.mining.cumulative_subsidy,
&mut self.mining.block_interval,
);
}
if self.cointime.should_compute(&compute_data) {
self.cointime.compute(
&compute_data,
&mut self.date_metadata.first_height,
&mut self.date_metadata.last_height,
&mut self.price.closes,
&mut self.mining.cumulative_subsidy,
&mut self.address.cohorts.all.all.capitalization.realized_cap,
&mut self.address.cohorts.all.all.capitalization.realized_price,
&mut self.mining.yearly_inflation_rate,
&mut self.transaction.annualized_volume,
&mut self.mining.cumulative_subsidy_in_dollars,
);
}
}
pub fn export_path_to_type(&self) -> color_eyre::Result<()> {
let path_to_type: BTreeMap<&str, &str> = self
.to_any_dataset_vec()
.into_iter()
.flat_map(|dataset| {
dataset
.to_all_map_vec()
.into_iter()
.flat_map(|map| map.exported_path_with_t_name())
})
.collect();
Json::export("../datasets/disk_path_to_type.json", &path_to_type)
}
pub fn export(&mut self) -> color_eyre::Result<()> {
self.to_mut_any_dataset_vec()
.into_iter()
.for_each(|dataset| dataset.pre_export());
self.to_any_dataset_vec()
.into_par_iter()
.try_for_each(|dataset| -> color_eyre::Result<()> { dataset.export() })?;
self.to_mut_any_dataset_vec()
.into_iter()
.for_each(|dataset| dataset.post_export());
Ok(())
}
}
impl AnyDatasets for AllDatasets {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_any_dataset_vec(&self) -> Vec<&(dyn AnyDataset + Send + Sync)> {
vec![
vec![
&self.price as &(dyn AnyDataset + Send + Sync),
&self.constant,
],
self.address.to_any_dataset_vec(),
self.utxo.to_any_dataset_vec(),
vec![
&self.mining,
&self.transaction,
&self.block_metadata,
&self.date_metadata,
&self.cointime,
&self.coindays,
],
]
.into_iter()
.flatten()
.collect_vec()
}
fn to_mut_any_dataset_vec(&mut self) -> Vec<&mut dyn AnyDataset> {
vec![
vec![&mut self.price as &mut dyn AnyDataset, &mut self.constant],
self.address.to_mut_any_dataset_vec(),
self.utxo.to_mut_any_dataset_vec(),
vec![
&mut self.mining,
&mut self.transaction,
&mut self.block_metadata,
&mut self.date_metadata,
&mut self.cointime,
&mut self.coindays,
],
]
.into_iter()
.flatten()
.collect_vec()
}
}

View File

@@ -0,0 +1,493 @@
mod ohlc;
use std::collections::BTreeMap;
use allocative::Allocative;
use chrono::{Days, NaiveDateTime, NaiveTime, TimeZone, Timelike, Utc};
use color_eyre::eyre::Error;
pub use ohlc::*;
use crate::{
price::{Binance, Kraken},
structs::{AnyBiMap, AnyDateMap, BiMap, DateMap, WNaiveDate},
utils::{ONE_MONTH_IN_DAYS, ONE_WEEK_IN_DAYS, ONE_YEAR_IN_DAYS},
};
use super::{AnyDataset, ComputeData, MinInitialStates};
#[derive(Allocative)]
pub struct PriceDatasets {
min_initial_states: MinInitialStates,
kraken_daily: Option<BTreeMap<WNaiveDate, OHLC>>,
kraken_1mn: Option<BTreeMap<u32, OHLC>>,
binance_1mn: Option<BTreeMap<u32, OHLC>>,
binance_har: Option<BTreeMap<u32, OHLC>>,
// Inserted
pub ohlcs: BiMap<OHLC>,
// Computed
pub closes: BiMap<f32>,
pub market_cap: BiMap<f32>,
pub price_1w_sma: DateMap<f32>,
pub price_1m_sma: DateMap<f32>,
pub price_1y_sma: DateMap<f32>,
pub price_2y_sma: DateMap<f32>,
pub price_4y_sma: DateMap<f32>,
pub price_8d_sma: DateMap<f32>,
pub price_13d_sma: DateMap<f32>,
pub price_21d_sma: DateMap<f32>,
pub price_34d_sma: DateMap<f32>,
pub price_55d_sma: DateMap<f32>,
pub price_89d_sma: DateMap<f32>,
pub price_144d_sma: DateMap<f32>,
pub price_200w_sma: DateMap<f32>,
pub price_1d_total_return: DateMap<f32>,
pub price_1m_total_return: DateMap<f32>,
pub price_6m_total_return: DateMap<f32>,
pub price_1y_total_return: DateMap<f32>,
pub price_2y_total_return: DateMap<f32>,
pub price_3y_total_return: DateMap<f32>,
pub price_4y_total_return: DateMap<f32>,
pub price_6y_total_return: DateMap<f32>,
pub price_8y_total_return: DateMap<f32>,
pub price_10y_total_return: DateMap<f32>,
pub price_4y_compound_return: DateMap<f32>,
// projection via lowest 4y compound value
// volatility
// drawdown
// sats per dollar
}
impl PriceDatasets {
pub fn import(datasets_path: &str) -> color_eyre::Result<Self> {
let price_path = "../price";
let f = |s: &str| format!("{datasets_path}/{s}");
let mut s = Self {
min_initial_states: MinInitialStates::default(),
binance_1mn: None,
binance_har: None,
kraken_1mn: None,
kraken_daily: None,
ohlcs: BiMap::new_json(1, &format!("{price_path}/ohlc")),
closes: BiMap::new_json(1, &f("close")),
market_cap: BiMap::new_bin(1, &f("market_cap")),
price_1w_sma: DateMap::new_bin(1, &f("price_1w_sma")),
price_1m_sma: DateMap::new_bin(1, &f("price_1m_sma")),
price_1y_sma: DateMap::new_bin(1, &f("price_1y_sma")),
price_2y_sma: DateMap::new_bin(1, &f("price_2y_sma")),
price_4y_sma: DateMap::new_bin(1, &f("price_4y_sma")),
price_8d_sma: DateMap::new_bin(1, &f("price_8d_sma")),
price_13d_sma: DateMap::new_bin(1, &f("price_13d_sma")),
price_21d_sma: DateMap::new_bin(1, &f("price_21d_sma")),
price_34d_sma: DateMap::new_bin(1, &f("price_34d_sma")),
price_55d_sma: DateMap::new_bin(1, &f("price_55d_sma")),
price_89d_sma: DateMap::new_bin(1, &f("price_89d_sma")),
price_144d_sma: DateMap::new_bin(1, &f("price_144d_sma")),
price_200w_sma: DateMap::new_bin(1, &f("price_200w_sma")),
price_1d_total_return: DateMap::new_bin(1, &f("price_1d_total_return")),
price_1m_total_return: DateMap::new_bin(1, &f("price_1m_total_return")),
price_6m_total_return: DateMap::new_bin(1, &f("price_6m_total_return")),
price_1y_total_return: DateMap::new_bin(1, &f("price_1y_total_return")),
price_2y_total_return: DateMap::new_bin(1, &f("price_2y_total_return")),
price_3y_total_return: DateMap::new_bin(1, &f("price_3y_total_return")),
price_4y_total_return: DateMap::new_bin(1, &f("price_4y_total_return")),
price_6y_total_return: DateMap::new_bin(1, &f("price_6y_total_return")),
price_8y_total_return: DateMap::new_bin(1, &f("price_8y_total_return")),
price_10y_total_return: DateMap::new_bin(1, &f("price_10y_total_return")),
price_4y_compound_return: DateMap::new_bin(1, &f("price_4y_compound_return")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s));
Ok(s)
}
pub fn compute(
&mut self,
&ComputeData { dates, heights }: &ComputeData,
circulating_supply: &mut BiMap<f64>,
) {
self.closes
.multi_insert_simple_transform(heights, dates, &mut self.ohlcs, &|ohlc| ohlc.close);
self.market_cap
.multi_insert_multiply(heights, dates, &mut self.closes, circulating_supply);
self.price_1w_sma.multi_insert_simple_average(
dates,
&mut self.closes.date,
ONE_WEEK_IN_DAYS,
);
self.price_1m_sma.multi_insert_simple_average(
dates,
&mut self.closes.date,
ONE_MONTH_IN_DAYS,
);
self.price_1y_sma.multi_insert_simple_average(
dates,
&mut self.closes.date,
ONE_YEAR_IN_DAYS,
);
self.price_2y_sma.multi_insert_simple_average(
dates,
&mut self.closes.date,
2 * ONE_YEAR_IN_DAYS,
);
self.price_4y_sma.multi_insert_simple_average(
dates,
&mut self.closes.date,
4 * ONE_YEAR_IN_DAYS,
);
self.price_8d_sma
.multi_insert_simple_average(dates, &mut self.closes.date, 8);
self.price_13d_sma
.multi_insert_simple_average(dates, &mut self.closes.date, 13);
self.price_21d_sma
.multi_insert_simple_average(dates, &mut self.closes.date, 21);
self.price_34d_sma
.multi_insert_simple_average(dates, &mut self.closes.date, 34);
self.price_55d_sma
.multi_insert_simple_average(dates, &mut self.closes.date, 55);
self.price_89d_sma
.multi_insert_simple_average(dates, &mut self.closes.date, 89);
self.price_144d_sma
.multi_insert_simple_average(dates, &mut self.closes.date, 144);
self.price_200w_sma.multi_insert_simple_average(
dates,
&mut self.closes.date,
200 * ONE_WEEK_IN_DAYS,
);
self.price_1d_total_return
.multi_insert_percentage_change(dates, &mut self.closes.date, 1);
self.price_1m_total_return.multi_insert_percentage_change(
dates,
&mut self.closes.date,
ONE_MONTH_IN_DAYS,
);
self.price_6m_total_return.multi_insert_percentage_change(
dates,
&mut self.closes.date,
6 * ONE_MONTH_IN_DAYS,
);
self.price_1y_total_return.multi_insert_percentage_change(
dates,
&mut self.closes.date,
ONE_YEAR_IN_DAYS,
);
self.price_2y_total_return.multi_insert_percentage_change(
dates,
&mut self.closes.date,
2 * ONE_YEAR_IN_DAYS,
);
self.price_3y_total_return.multi_insert_percentage_change(
dates,
&mut self.closes.date,
3 * ONE_YEAR_IN_DAYS,
);
self.price_4y_total_return.multi_insert_percentage_change(
dates,
&mut self.closes.date,
4 * ONE_YEAR_IN_DAYS,
);
self.price_6y_total_return.multi_insert_percentage_change(
dates,
&mut self.closes.date,
6 * ONE_YEAR_IN_DAYS,
);
self.price_8y_total_return.multi_insert_percentage_change(
dates,
&mut self.closes.date,
8 * ONE_YEAR_IN_DAYS,
);
self.price_10y_total_return.multi_insert_percentage_change(
dates,
&mut self.closes.date,
10 * ONE_YEAR_IN_DAYS,
);
self.price_4y_compound_return
.multi_insert_complex_transform(
dates,
&mut self.closes.date,
|(last_value, date, closes)| {
let previous_value = date
.checked_sub_days(Days::new(4 * ONE_YEAR_IN_DAYS as u64))
.and_then(|date| closes.get_or_import(&WNaiveDate::wrap(date)))
.unwrap_or_default();
(((last_value / previous_value).powf(1.0 / 4.0)) - 1.0) * 100.0
},
);
}
pub fn get_date_ohlc(&mut self, date: WNaiveDate) -> color_eyre::Result<OHLC> {
if self.ohlcs.date.is_date_safe(date) {
Ok(self.ohlcs.date.get(&date).unwrap().to_owned())
} else {
let ohlc = self.get_from_daily_kraken(&date)?;
self.ohlcs.date.insert(date, ohlc);
Ok(ohlc)
}
}
fn get_from_daily_kraken(&mut self, date: &WNaiveDate) -> color_eyre::Result<OHLC> {
if self.kraken_daily.is_none() {
self.kraken_daily.replace(
Kraken::fetch_daily_prices()
.unwrap_or_else(|_| Binance::fetch_daily_prices().unwrap()),
);
}
self.kraken_daily
.as_ref()
.unwrap()
.get(date)
.cloned()
.ok_or(Error::msg("Couldn't find date in daily kraken"))
}
pub fn get_height_ohlc(
&mut self,
height: usize,
timestamp: u32,
previous_timestamp: Option<u32>,
) -> color_eyre::Result<OHLC> {
if let Some(ohlc) = self.ohlcs.height.get(&height) {
return Ok(ohlc);
}
let clean_timestamp = |timestamp| {
let date_time = Utc.timestamp_opt(i64::from(timestamp), 0).unwrap();
NaiveDateTime::new(
date_time.date_naive(),
NaiveTime::from_hms_opt(date_time.hour(), date_time.minute(), 0).unwrap(),
)
.and_utc()
.timestamp() as u32
};
let timestamp = clean_timestamp(timestamp);
if previous_timestamp.is_none() && height > 0 {
panic!("Shouldn't be possible");
}
let previous_timestamp = previous_timestamp.map(clean_timestamp);
let ohlc = self.get_from_1mn_kraken(timestamp, previous_timestamp).unwrap_or_else(|_| {
self.get_from_1mn_binance(timestamp, previous_timestamp)
.unwrap_or_else(|_| self.get_from_har_binance(timestamp, previous_timestamp).unwrap_or_else(|_| {
let date = WNaiveDate::from_timestamp(timestamp);
panic!(
"Can't find price for {height} - {timestamp} - {date}, please update binance.har file"
)
}))
});
self.ohlcs.height.insert(height, ohlc);
Ok(ohlc)
}
fn get_from_1mn_kraken(
&mut self,
timestamp: u32,
previous_timestamp: Option<u32>,
) -> color_eyre::Result<OHLC> {
if self.kraken_1mn.is_none() {
self.kraken_1mn.replace(Kraken::fetch_1mn_prices()?);
}
Self::find_height_ohlc(&self.kraken_1mn, timestamp, previous_timestamp, "kraken 1m")
}
fn get_from_1mn_binance(
&mut self,
timestamp: u32,
previous_timestamp: Option<u32>,
) -> color_eyre::Result<OHLC> {
if self.binance_1mn.is_none() {
self.binance_1mn.replace(Binance::fetch_1mn_prices()?);
}
Self::find_height_ohlc(
&self.binance_1mn,
timestamp,
previous_timestamp,
"binance 1m",
)
}
fn get_from_har_binance(
&mut self,
timestamp: u32,
previous_timestamp: Option<u32>,
) -> color_eyre::Result<OHLC> {
if self.binance_har.is_none() {
self.binance_har.replace(Binance::read_har_file()?);
}
Self::find_height_ohlc(
&self.binance_har,
timestamp,
previous_timestamp,
"binance har",
)
}
fn find_height_ohlc(
tree: &Option<BTreeMap<u32, OHLC>>,
timestamp: u32,
previous_timestamp: Option<u32>,
name: &str,
) -> color_eyre::Result<OHLC> {
let tree = tree.as_ref().unwrap();
let err = Error::msg(format!("Couldn't find timestamp in {name}"));
let previous_ohlc = previous_timestamp
.map_or(Some(OHLC::default()), |previous_timestamp| {
tree.get(&previous_timestamp).cloned()
});
let last_ohlc = tree.get(&timestamp);
if previous_ohlc.is_none() || last_ohlc.is_none() {
return Err(err);
}
let previous_ohlc = previous_ohlc.unwrap();
let mut final_ohlc = OHLC {
open: previous_ohlc.close,
high: previous_ohlc.close,
low: previous_ohlc.close,
close: previous_ohlc.close,
};
let start = previous_timestamp.unwrap_or(0);
let end = timestamp;
// Otherwise it's a re-org
if start < end {
tree.range(&start..=&end).skip(1).for_each(|(_, ohlc)| {
if ohlc.high > final_ohlc.high {
final_ohlc.high = ohlc.high
}
if ohlc.low < final_ohlc.low {
final_ohlc.low = ohlc.low
}
final_ohlc.close = ohlc.close;
});
}
Ok(final_ohlc)
}
}
impl AnyDataset for PriceDatasets {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_inserted_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![&self.ohlcs]
}
fn to_inserted_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![&mut self.ohlcs]
}
fn to_computed_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![&self.closes, &self.market_cap]
}
fn to_computed_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![&mut self.closes, &mut self.market_cap]
}
fn to_computed_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
vec![
&self.price_1w_sma,
&self.price_1m_sma,
&self.price_1y_sma,
&self.price_2y_sma,
&self.price_4y_sma,
&self.price_8d_sma,
&self.price_13d_sma,
&self.price_21d_sma,
&self.price_34d_sma,
&self.price_55d_sma,
&self.price_89d_sma,
&self.price_144d_sma,
&self.price_200w_sma,
&self.price_1d_total_return,
&self.price_1m_total_return,
&self.price_6m_total_return,
&self.price_1y_total_return,
&self.price_2y_total_return,
&self.price_3y_total_return,
&self.price_4y_total_return,
&self.price_6y_total_return,
&self.price_8y_total_return,
&self.price_10y_total_return,
&self.price_4y_compound_return,
]
}
fn to_computed_mut_date_map_vec(&mut self) -> Vec<&mut dyn AnyDateMap> {
vec![
&mut self.price_1w_sma,
&mut self.price_1m_sma,
&mut self.price_1y_sma,
&mut self.price_2y_sma,
&mut self.price_4y_sma,
&mut self.price_8d_sma,
&mut self.price_13d_sma,
&mut self.price_21d_sma,
&mut self.price_34d_sma,
&mut self.price_55d_sma,
&mut self.price_89d_sma,
&mut self.price_144d_sma,
&mut self.price_200w_sma,
&mut self.price_1d_total_return,
&mut self.price_1m_total_return,
&mut self.price_6m_total_return,
&mut self.price_1y_total_return,
&mut self.price_2y_total_return,
&mut self.price_3y_total_return,
&mut self.price_4y_total_return,
&mut self.price_6y_total_return,
&mut self.price_8y_total_return,
&mut self.price_10y_total_return,
&mut self.price_4y_compound_return,
]
}
}

View File

@@ -0,0 +1,12 @@
use allocative::Allocative;
use bincode::{Decode, Encode};
use serde::{Deserialize, Serialize};
#[allow(clippy::upper_case_acronyms)]
#[derive(Debug, Default, Deserialize, Serialize, Encode, Decode, Clone, Copy, Allocative)]
pub struct OHLC {
pub open: f32,
pub high: f32,
pub low: f32,
pub close: f32,
}

View File

@@ -0,0 +1,121 @@
use allocative::Allocative;
use crate::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates},
states::CapitalizationState,
structs::{AnyBiMap, BiMap},
utils::ONE_MONTH_IN_DAYS,
};
#[derive(Default, Allocative)]
pub struct CapitalizationDataset {
min_initial_states: MinInitialStates,
// Inserted
pub realized_cap: BiMap<f32>,
// Computed
pub realized_price: BiMap<f32>,
mvrv: BiMap<f32>,
realized_cap_1m_net_change: BiMap<f32>,
}
impl CapitalizationDataset {
pub fn import(parent_path: &str) -> color_eyre::Result<Self> {
let f = |s: &str| format!("{parent_path}/{s}");
let mut s = Self {
min_initial_states: MinInitialStates::default(),
realized_cap: BiMap::new_bin(1, &f("realized_cap")),
realized_cap_1m_net_change: BiMap::new_bin(1, &f("realized_cap_1m_net_change")),
realized_price: BiMap::new_bin(1, &f("realized_price")),
mvrv: BiMap::new_bin(1, &f("mvrv")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
is_date_last_block,
date,
..
}: &InsertData,
state: &CapitalizationState,
) {
let realized_cap = self
.realized_cap
.height
.insert(height, state.realized_cap.to_dollar() as f32);
if is_date_last_block {
self.realized_cap.date.insert(date, realized_cap);
}
}
pub fn compute(
&mut self,
&ComputeData { heights, dates }: &ComputeData,
closes: &mut BiMap<f32>,
cohort_supply: &mut BiMap<f64>,
) {
self.realized_price.multi_insert_divide(
heights,
dates,
&mut self.realized_cap,
cohort_supply,
);
self.mvrv.height.multi_insert_divide(
heights,
&mut closes.height,
&mut self.realized_price.height,
);
self.mvrv
.date
.multi_insert_divide(dates, &mut closes.date, &mut self.realized_price.date);
self.realized_cap_1m_net_change.multi_insert_net_change(
heights,
dates,
&mut self.realized_cap,
ONE_MONTH_IN_DAYS,
)
}
}
impl AnyDataset for CapitalizationDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_inserted_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![&self.realized_cap]
}
fn to_inserted_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![&mut self.realized_cap]
}
fn to_computed_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![
&self.realized_price,
&self.mvrv,
&self.realized_cap_1m_net_change,
]
}
fn to_computed_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![
&mut self.realized_price,
&mut self.mvrv,
&mut self.realized_cap_1m_net_change,
]
}
}

View File

@@ -0,0 +1,70 @@
use allocative::Allocative;
use crate::{
datasets::{AnyDataset, InsertData, MinInitialStates},
states::InputState,
structs::{AnyBiMap, BiMap},
};
#[derive(Default, Allocative)]
pub struct InputSubDataset {
min_initial_states: MinInitialStates,
pub count: BiMap<u64>,
pub volume: BiMap<f64>,
// add inputs_per_second
}
impl InputSubDataset {
pub fn import(parent_path: &str) -> color_eyre::Result<Self> {
let f = |s: &str| format!("{parent_path}/{s}");
let mut s = Self {
min_initial_states: MinInitialStates::default(),
count: BiMap::new_bin(1, &f("input_count")),
volume: BiMap::new_bin(1, &f("input_volume")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
date_blocks_range,
..
}: &InsertData,
state: &InputState,
) {
let count = self.count.height.insert(height, state.count.round() as u64);
self.volume.height.insert(height, state.volume.to_btc());
if is_date_last_block {
self.count.date.insert(date, count);
self.volume.date_insert_sum_range(date, date_blocks_range);
}
}
}
impl AnyDataset for InputSubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_inserted_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![&self.count, &self.volume]
}
fn to_inserted_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![&mut self.count, &mut self.volume]
}
}

View File

@@ -0,0 +1,80 @@
use allocative::Allocative;
mod capitalization;
mod input;
// mod output;
mod price_paid;
mod realized;
mod supply;
mod unrealized;
mod utxo;
pub use capitalization::*;
pub use input::*;
// pub use output::*;
pub use price_paid::*;
pub use realized::*;
pub use supply::*;
pub use unrealized::*;
pub use utxo::*;
use crate::datasets::AnyDataset;
use super::AnyDatasetGroup;
#[derive(Default, Allocative)]
pub struct SubDataset {
pub capitalization: CapitalizationDataset,
pub input: InputSubDataset,
// pub output: OutputSubDataset,
pub price_paid: PricePaidSubDataset,
pub realized: RealizedSubDataset,
pub supply: SupplySubDataset,
pub unrealized: UnrealizedSubDataset,
pub utxo: UTXOSubDataset,
}
impl SubDataset {
pub fn import(parent_path: &str) -> color_eyre::Result<Self> {
let s = Self {
capitalization: CapitalizationDataset::import(parent_path)?,
input: InputSubDataset::import(parent_path)?,
// output: OutputSubDataset::import(parent_path)?,
price_paid: PricePaidSubDataset::import(parent_path)?,
realized: RealizedSubDataset::import(parent_path)?,
supply: SupplySubDataset::import(parent_path)?,
unrealized: UnrealizedSubDataset::import(parent_path)?,
utxo: UTXOSubDataset::import(parent_path)?,
};
Ok(s)
}
}
impl AnyDatasetGroup for SubDataset {
fn as_vec(&self) -> Vec<&(dyn AnyDataset + Send + Sync)> {
vec![
&self.capitalization,
&self.price_paid,
&self.realized,
&self.supply,
&self.unrealized,
&self.utxo,
&self.input,
// &self.output,
]
}
fn as_mut_vec(&mut self) -> Vec<&mut dyn AnyDataset> {
vec![
&mut self.capitalization,
&mut self.price_paid,
&mut self.realized,
&mut self.supply,
&mut self.unrealized,
&mut self.utxo,
&mut self.input,
// &mut self.output,
]
}
}

View File

@@ -0,0 +1,103 @@
use crate::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates},
states::OutputState,
structs::{AnyBiMap, BiMap},
utils::ONE_YEAR_IN_DAYS,
};
pub struct OutputSubDataset {
min_initial_states: MinInitialStates,
// Inserted
pub count: BiMap<f32>,
pub volume: BiMap<f32>,
// Computed
pub annualized_volume: BiMap<f32>,
pub velocity: BiMap<f32>,
// add outputs_per_second
}
impl OutputSubDataset {
pub fn import(parent_path: &str) -> color_eyre::Result<Self> {
let f = |s: &str| format!("{parent_path}/{s}");
let mut s = Self {
min_initial_states: MinInitialStates::default(),
count: BiMap::new_bin(1, &f("output_count")),
volume: BiMap::new_bin(1, &f("output_volume")),
annualized_volume: BiMap::new_bin(1, &f("annualized_output_volume")),
velocity: BiMap::new_bin(1, &f("output_velocity")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
date_blocks_range,
..
}: &InsertData,
state: &OutputState,
) {
let count = self.count.height.insert(height, state.count);
self.volume.height.insert(height, state.volume);
if is_date_last_block {
self.count.date.insert(date, count);
self.volume.date_insert_sum_range(date, date_blocks_range);
}
}
pub fn compute(
&mut self,
&ComputeData { heights, dates }: &ComputeData,
cohort_supply: &mut BiMap<f32>,
) {
self.annualized_volume.multi_insert_last_x_sum(
heights,
dates,
&mut self.volume,
ONE_YEAR_IN_DAYS,
);
self.velocity.multi_insert_divide(
heights,
dates,
&mut self.annualized_volume,
cohort_supply,
);
}
}
impl AnyDataset for OutputSubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_inserted_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![&self.count, &self.volume]
}
fn to_inserted_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![&mut self.count, &mut self.volume]
}
fn to_computed_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![&self.annualized_volume, &self.velocity]
}
fn to_computed_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![&mut self.annualized_volume, &mut self.velocity]
}
}

View File

@@ -0,0 +1,293 @@
use allocative::Allocative;
use itertools::Itertools;
use crate::{
datasets::{AnyDataset, InsertData, MinInitialStates},
states::PricePaidState,
structs::{AnyBiMap, BiMap, WNaiveDate},
};
#[derive(Default, Allocative)]
pub struct PricePaidSubDataset {
min_initial_states: MinInitialStates,
// Inserted
pp_median: BiMap<f32>,
pp_95p: BiMap<f32>,
pp_90p: BiMap<f32>,
pp_85p: BiMap<f32>,
pp_80p: BiMap<f32>,
pp_75p: BiMap<f32>,
pp_70p: BiMap<f32>,
pp_65p: BiMap<f32>,
pp_60p: BiMap<f32>,
pp_55p: BiMap<f32>,
pp_45p: BiMap<f32>,
pp_40p: BiMap<f32>,
pp_35p: BiMap<f32>,
pp_30p: BiMap<f32>,
pp_25p: BiMap<f32>,
pp_20p: BiMap<f32>,
pp_15p: BiMap<f32>,
pp_10p: BiMap<f32>,
pp_05p: BiMap<f32>,
}
impl PricePaidSubDataset {
pub fn import(parent_path: &str) -> color_eyre::Result<Self> {
let f = |s: &str| format!("{parent_path}/{s}");
let mut s = Self {
min_initial_states: MinInitialStates::default(),
pp_median: BiMap::new_bin(1, &f("median_price_paid")),
pp_95p: BiMap::new_bin(1, &f("95p_price_paid")),
pp_90p: BiMap::new_bin(1, &f("90p_price_paid")),
pp_85p: BiMap::new_bin(1, &f("85p_price_paid")),
pp_80p: BiMap::new_bin(1, &f("80p_price_paid")),
pp_75p: BiMap::new_bin(1, &f("75p_price_paid")),
pp_70p: BiMap::new_bin(1, &f("70p_price_paid")),
pp_65p: BiMap::new_bin(1, &f("65p_price_paid")),
pp_60p: BiMap::new_bin(1, &f("60p_price_paid")),
pp_55p: BiMap::new_bin(1, &f("55p_price_paid")),
pp_45p: BiMap::new_bin(1, &f("45p_price_paid")),
pp_40p: BiMap::new_bin(1, &f("40p_price_paid")),
pp_35p: BiMap::new_bin(1, &f("35p_price_paid")),
pp_30p: BiMap::new_bin(1, &f("30p_price_paid")),
pp_25p: BiMap::new_bin(1, &f("25p_price_paid")),
pp_20p: BiMap::new_bin(1, &f("20p_price_paid")),
pp_15p: BiMap::new_bin(1, &f("15p_price_paid")),
pp_10p: BiMap::new_bin(1, &f("10p_price_paid")),
pp_05p: BiMap::new_bin(1, &f("05p_price_paid")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
is_date_last_block,
date,
..
}: &InsertData,
state: &PricePaidState,
) {
let PricePaidState {
pp_05p,
pp_10p,
pp_15p,
pp_20p,
pp_25p,
pp_30p,
pp_35p,
pp_40p,
pp_45p,
pp_median,
pp_55p,
pp_60p,
pp_65p,
pp_70p,
pp_75p,
pp_80p,
pp_85p,
pp_90p,
pp_95p,
..
} = state;
// Check if iter was empty
if pp_05p.is_none() {
self.insert_height_default(height);
if is_date_last_block {
self.insert_date_default(date);
}
return;
}
let pp_05p = self
.pp_05p
.height
.insert(height, pp_05p.unwrap().to_dollar() as f32);
let pp_10p = self
.pp_10p
.height
.insert(height, pp_10p.unwrap().to_dollar() as f32);
let pp_15p = self
.pp_15p
.height
.insert(height, pp_15p.unwrap().to_dollar() as f32);
let pp_20p = self
.pp_20p
.height
.insert(height, pp_20p.unwrap().to_dollar() as f32);
let pp_25p = self
.pp_25p
.height
.insert(height, pp_25p.unwrap().to_dollar() as f32);
let pp_30p = self
.pp_30p
.height
.insert(height, pp_30p.unwrap().to_dollar() as f32);
let pp_35p = self
.pp_35p
.height
.insert(height, pp_35p.unwrap().to_dollar() as f32);
let pp_40p = self
.pp_40p
.height
.insert(height, pp_40p.unwrap().to_dollar() as f32);
let pp_45p = self
.pp_45p
.height
.insert(height, pp_45p.unwrap().to_dollar() as f32);
let pp_median = self
.pp_median
.height
.insert(height, pp_median.unwrap().to_dollar() as f32);
let pp_55p = self
.pp_55p
.height
.insert(height, pp_55p.unwrap().to_dollar() as f32);
let pp_60p = self
.pp_60p
.height
.insert(height, pp_60p.unwrap().to_dollar() as f32);
let pp_65p = self
.pp_65p
.height
.insert(height, pp_65p.unwrap().to_dollar() as f32);
let pp_70p = self
.pp_70p
.height
.insert(height, pp_70p.unwrap().to_dollar() as f32);
let pp_75p = self
.pp_75p
.height
.insert(height, pp_75p.unwrap().to_dollar() as f32);
let pp_80p = self
.pp_80p
.height
.insert(height, pp_80p.unwrap().to_dollar() as f32);
let pp_85p = self
.pp_85p
.height
.insert(height, pp_85p.unwrap().to_dollar() as f32);
let pp_90p = self
.pp_90p
.height
.insert(height, pp_90p.unwrap().to_dollar() as f32);
let pp_95p = self
.pp_95p
.height
.insert(height, pp_95p.unwrap().to_dollar() as f32);
if is_date_last_block {
self.pp_05p.date.insert(date, pp_05p);
self.pp_10p.date.insert(date, pp_10p);
self.pp_15p.date.insert(date, pp_15p);
self.pp_20p.date.insert(date, pp_20p);
self.pp_25p.date.insert(date, pp_25p);
self.pp_30p.date.insert(date, pp_30p);
self.pp_35p.date.insert(date, pp_35p);
self.pp_40p.date.insert(date, pp_40p);
self.pp_45p.date.insert(date, pp_45p);
self.pp_median.date.insert(date, pp_median);
self.pp_55p.date.insert(date, pp_55p);
self.pp_60p.date.insert(date, pp_60p);
self.pp_65p.date.insert(date, pp_65p);
self.pp_70p.date.insert(date, pp_70p);
self.pp_75p.date.insert(date, pp_75p);
self.pp_80p.date.insert(date, pp_80p);
self.pp_85p.date.insert(date, pp_85p);
self.pp_90p.date.insert(date, pp_90p);
self.pp_95p.date.insert(date, pp_95p);
}
}
fn insert_height_default(&mut self, height: usize) {
self.inserted_as_mut_vec().into_iter().for_each(|bi| {
bi.height.insert_default(height);
})
}
fn insert_date_default(&mut self, date: WNaiveDate) {
self.inserted_as_mut_vec().into_iter().for_each(|bi| {
bi.date.insert_default(date);
})
}
pub fn inserted_as_vec(&self) -> Vec<&BiMap<f32>> {
vec![
&self.pp_95p,
&self.pp_90p,
&self.pp_85p,
&self.pp_80p,
&self.pp_75p,
&self.pp_70p,
&self.pp_65p,
&self.pp_60p,
&self.pp_55p,
&self.pp_median,
&self.pp_45p,
&self.pp_40p,
&self.pp_35p,
&self.pp_30p,
&self.pp_25p,
&self.pp_20p,
&self.pp_15p,
&self.pp_10p,
&self.pp_05p,
]
}
pub fn inserted_as_mut_vec(&mut self) -> Vec<&mut BiMap<f32>> {
vec![
&mut self.pp_95p,
&mut self.pp_90p,
&mut self.pp_85p,
&mut self.pp_80p,
&mut self.pp_75p,
&mut self.pp_70p,
&mut self.pp_65p,
&mut self.pp_60p,
&mut self.pp_55p,
&mut self.pp_median,
&mut self.pp_45p,
&mut self.pp_40p,
&mut self.pp_35p,
&mut self.pp_30p,
&mut self.pp_25p,
&mut self.pp_20p,
&mut self.pp_15p,
&mut self.pp_10p,
&mut self.pp_05p,
]
}
}
impl AnyDataset for PricePaidSubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_inserted_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
self.inserted_as_vec()
.into_iter()
.map(|dataset| dataset as &(dyn AnyBiMap + Send + Sync))
.collect_vec()
}
fn to_inserted_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
self.inserted_as_mut_vec()
.into_iter()
.map(|dataset| dataset as &mut dyn AnyBiMap)
.collect_vec()
}
}

View File

@@ -0,0 +1,178 @@
use allocative::Allocative;
use crate::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates},
states::RealizedState,
structs::{AnyBiMap, BiMap},
utils::ONE_MONTH_IN_DAYS,
};
/// TODO: Fix fees not taken into account ?
#[derive(Default, Allocative)]
pub struct RealizedSubDataset {
min_initial_states: MinInitialStates,
// Inserted
realized_profit: BiMap<f32>,
realized_loss: BiMap<f32>,
// Computed
negative_realized_loss: BiMap<f32>,
net_realized_profit_and_loss: BiMap<f32>,
net_realized_profit_and_loss_to_market_cap_ratio: BiMap<f32>,
cumulative_realized_profit: BiMap<f32>,
cumulative_realized_loss: BiMap<f32>,
cumulative_net_realized_profit_and_loss: BiMap<f32>,
cumulative_net_realized_profit_and_loss_1m_net_change: BiMap<f32>,
}
impl RealizedSubDataset {
pub fn import(parent_path: &str) -> color_eyre::Result<Self> {
let f = |s: &str| format!("{parent_path}/{s}");
let mut s = Self {
min_initial_states: MinInitialStates::default(),
realized_profit: BiMap::new_bin(1, &f("realized_profit")),
realized_loss: BiMap::new_bin(1, &f("realized_loss")),
negative_realized_loss: BiMap::new_bin(2, &f("negative_realized_loss")),
net_realized_profit_and_loss: BiMap::new_bin(1, &f("net_realized_profit_and_loss")),
net_realized_profit_and_loss_to_market_cap_ratio: BiMap::new_bin(
1,
&f("net_realized_profit_and_loss_to_market_cap_ratio"),
),
cumulative_realized_profit: BiMap::new_bin(1, &f("cumulative_realized_profit")),
cumulative_realized_loss: BiMap::new_bin(1, &f("cumulative_realized_loss")),
cumulative_net_realized_profit_and_loss: BiMap::new_bin(
1,
&f("cumulative_net_realized_profit_and_loss"),
),
cumulative_net_realized_profit_and_loss_1m_net_change: BiMap::new_bin(
1,
&f("cumulative_net_realized_profit_and_loss_1m_net_change"),
),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
date_blocks_range,
..
}: &InsertData,
height_state: &RealizedState,
) {
self.realized_profit
.height
.insert(height, height_state.realized_profit.to_dollar() as f32);
self.realized_loss
.height
.insert(height, height_state.realized_loss.to_dollar() as f32);
if is_date_last_block {
self.realized_profit
.date_insert_sum_range(date, date_blocks_range);
self.realized_loss
.date_insert_sum_range(date, date_blocks_range);
}
}
pub fn compute(
&mut self,
&ComputeData { heights, dates }: &ComputeData,
market_cap: &mut BiMap<f32>,
) {
self.negative_realized_loss.multi_insert_simple_transform(
heights,
dates,
&mut self.realized_loss,
&|v| v * -1.0,
);
self.net_realized_profit_and_loss.multi_insert_subtract(
heights,
dates,
&mut self.realized_profit,
&mut self.realized_loss,
);
self.net_realized_profit_and_loss_to_market_cap_ratio
.multi_insert_divide(
heights,
dates,
&mut self.net_realized_profit_and_loss,
market_cap,
);
self.cumulative_realized_profit.multi_insert_cumulative(
heights,
dates,
&mut self.realized_profit,
);
self.cumulative_realized_loss.multi_insert_cumulative(
heights,
dates,
&mut self.realized_loss,
);
self.cumulative_net_realized_profit_and_loss
.multi_insert_cumulative(heights, dates, &mut self.net_realized_profit_and_loss);
self.cumulative_net_realized_profit_and_loss_1m_net_change
.multi_insert_net_change(
heights,
dates,
&mut self.cumulative_net_realized_profit_and_loss,
ONE_MONTH_IN_DAYS,
);
}
}
impl AnyDataset for RealizedSubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_inserted_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![&self.realized_loss, &self.realized_profit]
}
fn to_inserted_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![&mut self.realized_loss, &mut self.realized_profit]
}
fn to_computed_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![
&self.negative_realized_loss,
&self.net_realized_profit_and_loss,
&self.net_realized_profit_and_loss_to_market_cap_ratio,
&self.cumulative_realized_profit,
&self.cumulative_realized_loss,
&self.cumulative_net_realized_profit_and_loss,
&self.cumulative_net_realized_profit_and_loss_1m_net_change,
]
}
fn to_computed_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![
&mut self.negative_realized_loss,
&mut self.net_realized_profit_and_loss,
&mut self.net_realized_profit_and_loss_to_market_cap_ratio,
&mut self.cumulative_realized_profit,
&mut self.cumulative_realized_loss,
&mut self.cumulative_net_realized_profit_and_loss,
&mut self.cumulative_net_realized_profit_and_loss_1m_net_change,
]
}
}

View File

@@ -0,0 +1,114 @@
use allocative::Allocative;
use crate::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates},
states::SupplyState,
structs::{AnyBiMap, BiMap},
};
#[derive(Default, Allocative)]
pub struct SupplySubDataset {
min_initial_states: MinInitialStates,
// Inserted
pub supply: BiMap<f64>,
// Computed
pub supply_to_circulating_supply_ratio: BiMap<f64>,
pub halved_supply: BiMap<f64>,
pub halved_supply_to_circulating_supply_ratio: BiMap<f64>,
}
impl SupplySubDataset {
pub fn import(parent_path: &str) -> color_eyre::Result<Self> {
let f = |s: &str| format!("{parent_path}/{s}");
let mut s = Self {
min_initial_states: MinInitialStates::default(),
supply: BiMap::new_bin(1, &f("supply")),
supply_to_circulating_supply_ratio: BiMap::new_bin(
1,
&f("supply_to_circulating_supply_ratio"),
),
halved_supply: BiMap::new_bin(1, &f("halved_supply")),
halved_supply_to_circulating_supply_ratio: BiMap::new_bin(
1,
&f("halved_supply_to_circulating_supply_ratio"),
),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
..
}: &InsertData,
state: &SupplyState,
) {
let total_supply = self.supply.height.insert(height, state.supply.to_btc());
if is_date_last_block {
self.supply.date.insert(date, total_supply);
}
}
#[allow(unused_variables)]
pub fn compute(
&mut self,
&ComputeData { heights, dates }: &ComputeData,
circulating_supply: &mut BiMap<f64>,
) {
self.supply_to_circulating_supply_ratio
.multi_insert_percentage(heights, dates, &mut self.supply, circulating_supply);
self.halved_supply
.multi_insert_simple_transform(heights, dates, &mut self.supply, &|v| v / 2.0);
self.halved_supply_to_circulating_supply_ratio
.multi_insert_simple_transform(
heights,
dates,
&mut self.supply_to_circulating_supply_ratio,
&|v| v / 2.0,
);
}
}
impl AnyDataset for SupplySubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_inserted_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![&self.supply]
}
fn to_inserted_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![&mut self.supply]
}
fn to_computed_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![
&self.supply_to_circulating_supply_ratio,
&self.halved_supply,
&self.halved_supply_to_circulating_supply_ratio,
]
}
fn to_computed_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![
&mut self.supply_to_circulating_supply_ratio,
&mut self.halved_supply,
&mut self.halved_supply_to_circulating_supply_ratio,
]
}
}

View File

@@ -0,0 +1,211 @@
use allocative::Allocative;
use crate::{
datasets::{AnyDataset, ComputeData, InsertData, MinInitialStates},
states::UnrealizedState,
structs::{AnyBiMap, BiMap},
};
#[derive(Default, Allocative)]
pub struct UnrealizedSubDataset {
min_initial_states: MinInitialStates,
// Inserted
supply_in_profit: BiMap<f64>,
unrealized_profit: BiMap<f32>,
unrealized_loss: BiMap<f32>,
// Computed
supply_in_loss: BiMap<f64>,
negative_unrealized_loss: BiMap<f32>,
net_unrealized_profit_and_loss: BiMap<f32>,
net_unrealized_profit_and_loss_to_market_cap_ratio: BiMap<f32>,
supply_in_profit_to_own_supply_ratio: BiMap<f64>,
supply_in_profit_to_circulating_supply_ratio: BiMap<f64>,
supply_in_loss_to_own_supply_ratio: BiMap<f64>,
supply_in_loss_to_circulating_supply_ratio: BiMap<f64>,
}
impl UnrealizedSubDataset {
pub fn import(parent_path: &str) -> color_eyre::Result<Self> {
let f = |s: &str| format!("{parent_path}/{s}");
let mut s = Self {
min_initial_states: MinInitialStates::default(),
supply_in_profit: BiMap::new_bin(1, &f("supply_in_profit")),
supply_in_loss: BiMap::new_bin(1, &f("supply_in_loss")),
unrealized_profit: BiMap::new_bin(1, &f("unrealized_profit")),
unrealized_loss: BiMap::new_bin(1, &f("unrealized_loss")),
negative_unrealized_loss: BiMap::new_bin(1, &f("negative_unrealized_loss")),
net_unrealized_profit_and_loss: BiMap::new_bin(1, &f("net_unrealized_profit_and_loss")),
net_unrealized_profit_and_loss_to_market_cap_ratio: BiMap::new_bin(
1,
&f("net_unrealized_profit_and_loss_to_market_cap_ratio"),
),
supply_in_profit_to_own_supply_ratio: BiMap::new_bin(
1,
&f("supply_in_profit_to_own_supply_ratio"),
),
supply_in_profit_to_circulating_supply_ratio: BiMap::new_bin(
1,
&f("supply_in_profit_to_circulating_supply_ratio"),
),
supply_in_loss_to_own_supply_ratio: BiMap::new_bin(
1,
&f("supply_in_loss_to_own_supply_ratio"),
),
supply_in_loss_to_circulating_supply_ratio: BiMap::new_bin(
1,
&f("supply_in_loss_to_circulating_supply_ratio"),
),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
is_date_last_block,
..
}: &InsertData,
block_state: &UnrealizedState,
date_state: &Option<UnrealizedState>,
) {
self.supply_in_profit
.height
.insert(height, block_state.supply_in_profit.to_btc());
self.unrealized_profit
.height
.insert(height, block_state.unrealized_profit.to_dollar() as f32);
self.unrealized_loss
.height
.insert(height, block_state.unrealized_loss.to_dollar() as f32);
if is_date_last_block {
let date_state = date_state.as_ref().unwrap();
self.supply_in_profit
.date
.insert(date, date_state.supply_in_profit.to_btc());
self.unrealized_profit
.date
.insert(date, date_state.unrealized_profit.to_dollar() as f32);
self.unrealized_loss
.date
.insert(date, date_state.unrealized_loss.to_dollar() as f32);
}
}
pub fn compute(
&mut self,
&ComputeData { heights, dates }: &ComputeData,
own_supply: &mut BiMap<f64>,
circulating_supply: &mut BiMap<f64>,
market_cap: &mut BiMap<f32>,
) {
self.supply_in_loss.multi_insert_subtract(
heights,
dates,
own_supply,
&mut self.supply_in_profit,
);
self.negative_unrealized_loss.multi_insert_simple_transform(
heights,
dates,
&mut self.unrealized_loss,
&|v| v * -1.0,
);
self.net_unrealized_profit_and_loss.multi_insert_subtract(
heights,
dates,
&mut self.unrealized_profit,
&mut self.unrealized_loss,
);
self.net_unrealized_profit_and_loss_to_market_cap_ratio
.multi_insert_divide(
heights,
dates,
&mut self.net_unrealized_profit_and_loss,
market_cap,
);
self.supply_in_profit_to_own_supply_ratio
.multi_insert_percentage(heights, dates, &mut self.supply_in_profit, own_supply);
self.supply_in_profit_to_circulating_supply_ratio
.multi_insert_percentage(
heights,
dates,
&mut self.supply_in_profit,
circulating_supply,
);
self.supply_in_loss_to_own_supply_ratio
.multi_insert_percentage(heights, dates, &mut self.supply_in_loss, own_supply);
self.supply_in_loss_to_circulating_supply_ratio
.multi_insert_percentage(heights, dates, &mut self.supply_in_loss, circulating_supply);
}
}
impl AnyDataset for UnrealizedSubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_inserted_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![
&self.supply_in_profit,
&self.unrealized_profit,
&self.unrealized_loss,
]
}
fn to_inserted_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![
&mut self.supply_in_profit,
&mut self.unrealized_profit,
&mut self.unrealized_loss,
]
}
fn to_computed_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![
&self.supply_in_loss,
&self.negative_unrealized_loss,
&self.net_unrealized_profit_and_loss,
&self.net_unrealized_profit_and_loss_to_market_cap_ratio,
&self.supply_in_profit_to_own_supply_ratio,
&self.supply_in_profit_to_circulating_supply_ratio,
&self.supply_in_loss_to_own_supply_ratio,
&self.supply_in_loss_to_circulating_supply_ratio,
]
}
fn to_computed_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![
&mut self.supply_in_loss,
&mut self.negative_unrealized_loss,
&mut self.net_unrealized_profit_and_loss,
&mut self.net_unrealized_profit_and_loss_to_market_cap_ratio,
&mut self.supply_in_profit_to_own_supply_ratio,
&mut self.supply_in_profit_to_circulating_supply_ratio,
&mut self.supply_in_loss_to_own_supply_ratio,
&mut self.supply_in_loss_to_circulating_supply_ratio,
]
}
}

View File

@@ -0,0 +1,63 @@
use allocative::Allocative;
use crate::{
datasets::{AnyDataset, InsertData, MinInitialStates},
states::UTXOState,
structs::{AnyBiMap, BiMap},
};
#[derive(Default, Allocative)]
pub struct UTXOSubDataset {
min_initial_states: MinInitialStates,
// Inserted
count: BiMap<usize>,
}
impl UTXOSubDataset {
pub fn import(parent_path: &str) -> color_eyre::Result<Self> {
let f = |s: &str| format!("{parent_path}/{s}");
let mut s = Self {
min_initial_states: MinInitialStates::default(),
count: BiMap::new_bin(1, &f("utxo_count")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
is_date_last_block,
date,
..
}: &InsertData,
state: &UTXOState,
) {
let count = self.count.height.insert(height, state.count);
if is_date_last_block {
self.count.date.insert(date, count);
}
}
}
impl AnyDataset for UTXOSubDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_inserted_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![&self.count]
}
fn to_inserted_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![&mut self.count]
}
}

View File

@@ -0,0 +1,257 @@
use allocative::Allocative;
use crate::{
datasets::InsertData,
structs::{AnyBiMap, BiMap, HeightMap},
utils::{ONE_DAY_IN_S, ONE_MONTH_IN_DAYS, ONE_WEEK_IN_DAYS, ONE_YEAR_IN_DAYS},
};
use super::{AnyDataset, ComputeData, MinInitialStates};
#[derive(Allocative)]
pub struct TransactionDataset {
min_initial_states: MinInitialStates,
// Inserted
pub count: BiMap<usize>,
pub volume: BiMap<f64>,
pub volume_in_dollars: BiMap<f32>,
// Average sent
// Average sent in dollars
// Median sent
// Median sent in dollars
// Min
// Max
// 10th 25th 75th 90th percentiles
// type
// version
// Computed
pub count_1w_sma: BiMap<f32>,
pub count_1m_sma: BiMap<f32>,
pub volume_1w_sma: BiMap<f32>,
pub volume_1m_sma: BiMap<f32>,
pub volume_in_dollars_1w_sma: BiMap<f32>,
pub volume_in_dollars_1m_sma: BiMap<f32>,
pub annualized_volume: BiMap<f32>,
pub annualized_volume_in_dollars: BiMap<f32>,
pub velocity: BiMap<f32>,
pub transactions_per_second: BiMap<f32>,
pub transactions_per_second_1w_sma: BiMap<f32>,
pub transactions_per_second_1m_sma: BiMap<f32>,
}
impl TransactionDataset {
pub fn import(parent_path: &str) -> color_eyre::Result<Self> {
let f = |s: &str| format!("{parent_path}/{s}");
let mut s = Self {
min_initial_states: MinInitialStates::default(),
count: BiMap::new_bin(1, &f("transaction_count")),
count_1w_sma: BiMap::new_bin(1, &f("transaction_count_1w_sma")),
count_1m_sma: BiMap::new_bin(1, &f("transaction_count_1m_sma")),
volume: BiMap::new_bin(1, &f("transaction_volume")),
volume_1w_sma: BiMap::new_bin(1, &f("transaction_volume_1w_sma")),
volume_1m_sma: BiMap::new_bin(1, &f("transaction_volume_1m_sma")),
volume_in_dollars: BiMap::new_bin(1, &f("transaction_volume_in_dollars")),
volume_in_dollars_1w_sma: BiMap::new_bin(1, &f("transaction_volume_in_dollars_1w_sma")),
volume_in_dollars_1m_sma: BiMap::new_bin(1, &f("transaction_volume_in_dollars_1m_sma")),
annualized_volume: BiMap::new_bin(1, &f("annualized_transaction_volume")),
annualized_volume_in_dollars: BiMap::new_bin(
2,
&f("annualized_transaction_volume_in_dollars"),
),
velocity: BiMap::new_bin(1, &f("transaction_velocity")),
transactions_per_second: BiMap::new_bin(1, &f("transactions_per_second")),
transactions_per_second_1w_sma: BiMap::new_bin(1, &f("transactions_per_second_1w_sma")),
transactions_per_second_1m_sma: BiMap::new_bin(1, &f("transactions_per_second_1m_sma")),
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s));
Ok(s)
}
pub fn insert(
&mut self,
&InsertData {
height,
date,
amount_sent,
transaction_count,
is_date_last_block,
date_blocks_range,
block_price,
..
}: &InsertData,
) {
self.count.height.insert(height, transaction_count);
self.volume.height.insert(height, amount_sent.to_btc());
self.volume_in_dollars
.height
.insert(height, (block_price * amount_sent).to_dollar() as f32);
if is_date_last_block {
self.count.date_insert_sum_range(date, date_blocks_range);
self.volume.date_insert_sum_range(date, date_blocks_range);
self.volume_in_dollars
.date_insert_sum_range(date, date_blocks_range);
}
}
pub fn compute(
&mut self,
&ComputeData { heights, dates }: &ComputeData,
circulating_supply: &mut BiMap<f64>,
block_interval: &mut HeightMap<u32>,
) {
self.count_1w_sma.multi_insert_simple_average(
heights,
dates,
&mut self.count,
ONE_WEEK_IN_DAYS,
);
self.count_1m_sma.multi_insert_simple_average(
heights,
dates,
&mut self.count,
ONE_MONTH_IN_DAYS,
);
self.volume_1w_sma.multi_insert_simple_average(
heights,
dates,
&mut self.volume,
ONE_WEEK_IN_DAYS,
);
self.volume_1m_sma.multi_insert_simple_average(
heights,
dates,
&mut self.volume,
ONE_MONTH_IN_DAYS,
);
self.volume_in_dollars_1w_sma.multi_insert_simple_average(
heights,
dates,
&mut self.volume_in_dollars,
ONE_WEEK_IN_DAYS,
);
self.volume_in_dollars_1m_sma.multi_insert_simple_average(
heights,
dates,
&mut self.volume_in_dollars,
ONE_MONTH_IN_DAYS,
);
self.annualized_volume.multi_insert_last_x_sum(
heights,
dates,
&mut self.volume,
ONE_YEAR_IN_DAYS,
);
self.annualized_volume_in_dollars.multi_insert_last_x_sum(
heights,
dates,
&mut self.volume_in_dollars,
ONE_YEAR_IN_DAYS,
);
self.velocity.multi_insert_divide(
heights,
dates,
&mut self.annualized_volume,
circulating_supply,
);
self.transactions_per_second.height.multi_insert_divide(
heights,
&mut self.count.height,
block_interval,
);
self.transactions_per_second
.date
.multi_insert_simple_transform(dates, &mut self.count.date, |count| {
count as f32 / ONE_DAY_IN_S as f32
});
self.transactions_per_second_1w_sma
.multi_insert_simple_average(
heights,
dates,
&mut self.transactions_per_second,
ONE_WEEK_IN_DAYS,
);
self.transactions_per_second_1m_sma
.multi_insert_simple_average(
heights,
dates,
&mut self.transactions_per_second,
ONE_MONTH_IN_DAYS,
);
}
}
impl AnyDataset for TransactionDataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_inserted_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![&self.count, &self.volume, &self.volume_in_dollars]
}
fn to_inserted_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![
&mut self.count,
&mut self.volume,
&mut self.volume_in_dollars,
]
}
fn to_computed_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
vec![
&self.count_1w_sma,
&self.count_1m_sma,
&self.volume_1w_sma,
&self.volume_1m_sma,
&self.volume_in_dollars_1w_sma,
&self.volume_in_dollars_1m_sma,
&self.annualized_volume,
&self.annualized_volume_in_dollars,
&self.velocity,
&self.transactions_per_second,
&self.transactions_per_second_1w_sma,
&self.transactions_per_second_1m_sma,
]
}
fn to_computed_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
vec![
&mut self.count_1w_sma,
&mut self.count_1m_sma,
&mut self.volume_1w_sma,
&mut self.volume_1m_sma,
&mut self.volume_in_dollars_1w_sma,
&mut self.volume_in_dollars_1m_sma,
&mut self.annualized_volume,
&mut self.annualized_volume_in_dollars,
&mut self.velocity,
&mut self.transactions_per_second,
&mut self.transactions_per_second_1w_sma,
&mut self.transactions_per_second_1m_sma,
]
}
}

View File

@@ -0,0 +1,287 @@
use allocative::Allocative;
use itertools::Itertools;
use crate::{
datasets::{
AnyDataset, AnyDatasetGroup, ComputeData, InsertData, MinInitialStates, SubDataset,
},
states::UTXOCohortId,
structs::{AnyBiMap, AnyDateMap, AnyHeightMap, BiMap, WNaiveDate},
};
#[derive(Default, Allocative)]
pub struct UTXODataset {
id: UTXOCohortId,
min_initial_states: MinInitialStates,
pub subs: SubDataset,
}
impl UTXODataset {
pub fn import(parent_path: &str, id: UTXOCohortId) -> color_eyre::Result<Self> {
let name = id.name();
let folder_path = format!("{parent_path}/{name}");
let mut s = Self {
min_initial_states: MinInitialStates::default(),
id,
subs: SubDataset::import(&folder_path)?,
};
s.min_initial_states
.consume(MinInitialStates::compute_from_dataset(&s));
Ok(s)
}
pub fn insert(&mut self, insert_data: &InsertData) {
let &InsertData {
states,
utxo_cohorts_one_shot_states,
// utxo_cohorts_received_states,
utxo_cohorts_sent_states,
..
} = insert_data;
if self.needs_insert_supply(insert_data.height, insert_data.date) {
self.subs.supply.insert(
insert_data,
&states
.utxo_cohorts_durable_states
.get(&self.id)
.durable_states
.supply_state,
);
}
if self.needs_insert_utxo(insert_data.height, insert_data.date) {
self.subs.utxo.insert(
insert_data,
&states
.utxo_cohorts_durable_states
.get(&self.id)
.durable_states
.utxo_state,
);
}
if self.needs_insert_capitalization(insert_data.height, insert_data.date) {
self.subs.capitalization.insert(
insert_data,
&states
.utxo_cohorts_durable_states
.get(&self.id)
.durable_states
.capitalization_state,
);
}
if self.needs_insert_unrealized(insert_data.height, insert_data.date) {
self.subs.unrealized.insert(
insert_data,
&utxo_cohorts_one_shot_states
.get(&self.id)
.unrealized_block_state,
&utxo_cohorts_one_shot_states
.get(&self.id)
.unrealized_date_state,
);
}
if self.needs_insert_price_paid(insert_data.height, insert_data.date) {
self.subs.price_paid.insert(
insert_data,
&utxo_cohorts_one_shot_states.get(&self.id).price_paid_state,
);
}
if self.needs_insert_realized(insert_data.height, insert_data.date) {
self.subs.realized.insert(
insert_data,
&utxo_cohorts_sent_states.get(&self.id).realized,
);
}
if self.needs_insert_input(insert_data.height, insert_data.date) {
self.subs
.input
.insert(insert_data, &utxo_cohorts_sent_states.get(&self.id).input);
}
// TODO: move output from common to address
// if self.subs.output.needs_insert(insert_data) {
// self.subs
// .output
// .insert(insert_data, utxo_cohorts_received_states.get(&self.id));
// }
}
pub fn needs_insert_utxo(&self, height: usize, date: WNaiveDate) -> bool {
self.subs.utxo.needs_insert(height, date)
}
pub fn needs_insert_capitalization(&self, height: usize, date: WNaiveDate) -> bool {
self.subs.capitalization.needs_insert(height, date)
}
pub fn needs_insert_supply(&self, height: usize, date: WNaiveDate) -> bool {
self.subs.supply.needs_insert(height, date)
}
pub fn needs_insert_price_paid(&self, height: usize, date: WNaiveDate) -> bool {
self.subs.price_paid.needs_insert(height, date)
}
pub fn needs_insert_realized(&self, height: usize, date: WNaiveDate) -> bool {
self.subs.realized.needs_insert(height, date)
}
pub fn needs_insert_unrealized(&self, height: usize, date: WNaiveDate) -> bool {
self.subs.unrealized.needs_insert(height, date)
}
pub fn needs_insert_input(&self, height: usize, date: WNaiveDate) -> bool {
self.subs.input.needs_insert(height, date)
}
pub fn compute(
&mut self,
compute_data: &ComputeData,
closes: &mut BiMap<f32>,
circulating_supply: &mut BiMap<f64>,
market_cap: &mut BiMap<f32>,
) {
if self.subs.supply.should_compute(compute_data) {
self.subs.supply.compute(compute_data, circulating_supply);
}
if self.subs.unrealized.should_compute(compute_data) {
self.subs.unrealized.compute(
compute_data,
&mut self.subs.supply.supply,
circulating_supply,
market_cap,
);
}
if self.subs.realized.should_compute(compute_data) {
self.subs.realized.compute(compute_data, market_cap);
}
if self.subs.capitalization.should_compute(compute_data) {
self.subs
.capitalization
.compute(compute_data, closes, &mut self.subs.supply.supply);
}
// if self.subs.output.should_compute(compute_data) {
// self.subs
// .output
// .compute(compute_data, &mut self.subs.supply.total);
// }
}
}
impl AnyDataset for UTXODataset {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_inserted_height_map_vec(&self) -> Vec<&(dyn AnyHeightMap + Send + Sync)> {
self.subs
.as_vec()
.into_iter()
.flat_map(|d| d.to_inserted_height_map_vec())
.collect_vec()
}
fn to_inserted_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
self.subs
.as_vec()
.into_iter()
.flat_map(|d| d.to_inserted_date_map_vec())
.collect_vec()
}
fn to_inserted_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
self.subs
.as_vec()
.into_iter()
.flat_map(|d| d.to_inserted_bi_map_vec())
.collect_vec()
}
fn to_inserted_mut_height_map_vec(&mut self) -> Vec<&mut dyn AnyHeightMap> {
self.subs
.as_mut_vec()
.into_iter()
.flat_map(|d| d.to_inserted_mut_height_map_vec())
.collect_vec()
}
fn to_inserted_mut_date_map_vec(&mut self) -> Vec<&mut dyn AnyDateMap> {
self.subs
.as_mut_vec()
.into_iter()
.flat_map(|d| d.to_inserted_mut_date_map_vec())
.collect_vec()
}
fn to_inserted_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
self.subs
.as_mut_vec()
.into_iter()
.flat_map(|d| d.to_inserted_mut_bi_map_vec())
.collect_vec()
}
fn to_computed_height_map_vec(&self) -> Vec<&(dyn AnyHeightMap + Send + Sync)> {
self.subs
.as_vec()
.into_iter()
.flat_map(|d| d.to_computed_height_map_vec())
.collect_vec()
}
fn to_computed_date_map_vec(&self) -> Vec<&(dyn AnyDateMap + Send + Sync)> {
self.subs
.as_vec()
.into_iter()
.flat_map(|d| d.to_computed_date_map_vec())
.collect_vec()
}
fn to_computed_bi_map_vec(&self) -> Vec<&(dyn AnyBiMap + Send + Sync)> {
self.subs
.as_vec()
.into_iter()
.flat_map(|d| d.to_computed_bi_map_vec())
.collect_vec()
}
fn to_computed_mut_height_map_vec(&mut self) -> Vec<&mut dyn AnyHeightMap> {
self.subs
.as_mut_vec()
.into_iter()
.flat_map(|d| d.to_computed_mut_height_map_vec())
.collect_vec()
}
fn to_computed_mut_date_map_vec(&mut self) -> Vec<&mut dyn AnyDateMap> {
self.subs
.as_mut_vec()
.into_iter()
.flat_map(|d| d.to_computed_mut_date_map_vec())
.collect_vec()
}
fn to_computed_mut_bi_map_vec(&mut self) -> Vec<&mut dyn AnyBiMap> {
self.subs
.as_mut_vec()
.into_iter()
.flat_map(|d| d.to_computed_mut_bi_map_vec())
.collect_vec()
}
}

View File

@@ -0,0 +1,162 @@
mod dataset;
use allocative::Allocative;
use dataset::*;
use rayon::prelude::*;
use itertools::Itertools;
use crate::{
datasets::AnyDatasets,
states::{SplitByUTXOCohort, UTXOCohortId},
structs::{BiMap, WNaiveDate},
};
use super::{AnyDataset, ComputeData, InsertData, MinInitialStates};
#[derive(Allocative)]
pub struct UTXODatasets {
min_initial_states: MinInitialStates,
cohorts: SplitByUTXOCohort<UTXODataset>,
}
impl UTXODatasets {
pub fn import(parent_path: &str) -> color_eyre::Result<Self> {
let mut cohorts = SplitByUTXOCohort::<UTXODataset>::default();
cohorts
.as_vec()
.into_par_iter()
.map(|(_, id)| (id, UTXODataset::import(parent_path, id)))
.collect::<Vec<_>>()
.into_iter()
.try_for_each(|(id, dataset)| -> color_eyre::Result<()> {
*cohorts.get_mut(&id) = dataset?;
Ok(())
})?;
let mut s = Self {
min_initial_states: MinInitialStates::default(),
cohorts,
};
s.min_initial_states
.consume(MinInitialStates::compute_from_datasets(&s));
Ok(s)
}
pub fn insert(&mut self, insert_data: &InsertData) {
self.cohorts
.as_mut_vec()
.into_iter()
.for_each(|(cohort, _)| cohort.insert(insert_data))
}
pub fn needs_durable_states(&self, height: usize, date: WNaiveDate) -> bool {
let needs_insert_utxo = self.needs_insert_utxo(height, date);
let needs_insert_capitalization = self.needs_insert_capitalization(height, date);
let needs_insert_supply = self.needs_insert_supply(height, date);
let needs_one_shot_states = self.needs_one_shot_states(height, date);
needs_insert_utxo
|| needs_insert_capitalization
|| needs_insert_supply
|| needs_one_shot_states
}
pub fn needs_one_shot_states(&self, height: usize, date: WNaiveDate) -> bool {
self.needs_insert_price_paid(height, date) || self.needs_insert_unrealized(height, date)
}
pub fn needs_sent_states(&self, height: usize, date: WNaiveDate) -> bool {
self.needs_insert_input(height, date) || self.needs_insert_realized(height, date)
}
pub fn needs_insert_utxo(&self, height: usize, date: WNaiveDate) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_utxo(height, date))
}
pub fn needs_insert_capitalization(&self, height: usize, date: WNaiveDate) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_capitalization(height, date))
}
pub fn needs_insert_supply(&self, height: usize, date: WNaiveDate) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_supply(height, date))
}
pub fn needs_insert_price_paid(&self, height: usize, date: WNaiveDate) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_price_paid(height, date))
}
pub fn needs_insert_realized(&self, height: usize, date: WNaiveDate) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_realized(height, date))
}
pub fn needs_insert_unrealized(&self, height: usize, date: WNaiveDate) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_unrealized(height, date))
}
pub fn needs_insert_input(&self, height: usize, date: WNaiveDate) -> bool {
self.as_vec()
.iter()
.any(|(dataset, _)| dataset.needs_insert_input(height, date))
}
pub fn compute(
&mut self,
compute_data: &ComputeData,
closes: &mut BiMap<f32>,
circulating_supply: &mut BiMap<f64>,
market_cap: &mut BiMap<f32>,
) {
self.cohorts
.as_mut_vec()
.into_iter()
.for_each(|(cohort, _)| {
cohort.compute(compute_data, closes, circulating_supply, market_cap)
})
}
fn as_vec(&self) -> Vec<(&UTXODataset, UTXOCohortId)> {
self.cohorts.as_vec()
}
fn as_mut_vec(&mut self) -> Vec<(&mut UTXODataset, UTXOCohortId)> {
self.cohorts.as_mut_vec()
}
}
impl AnyDatasets for UTXODatasets {
fn get_min_initial_states(&self) -> &MinInitialStates {
&self.min_initial_states
}
fn to_any_dataset_vec(&self) -> Vec<&(dyn AnyDataset + Send + Sync)> {
self.as_vec()
.into_iter()
.map(|(dataset, _)| dataset as &(dyn AnyDataset + Send + Sync))
.collect_vec()
}
fn to_mut_any_dataset_vec(&mut self) -> Vec<&mut dyn AnyDataset> {
self.as_mut_vec()
.into_iter()
.map(|(dataset, _)| dataset as &mut dyn AnyDataset)
.collect_vec()
}
}

43
parser/src/io/binary.rs Normal file
View File

@@ -0,0 +1,43 @@
use std::{
fmt::Debug,
fs::File,
io::{BufReader, BufWriter},
};
use bincode::{config, decode_from_std_read, encode_into_std_write, Decode, Encode};
pub struct Binary;
impl Binary {
pub fn import<T>(path: &str) -> color_eyre::Result<T>
where
T: Decode,
{
let config = config::standard();
let file = File::open(path)?;
let mut reader = BufReader::new(file);
let decoded = decode_from_std_read(&mut reader, config)?;
Ok(decoded)
}
pub fn export<T>(path: &str, value: &T) -> color_eyre::Result<()>
where
T: Debug + Encode,
{
let config = config::standard();
let file = File::create(path).inspect_err(|_| {
dbg!(path, value);
})?;
let mut writer = BufWriter::new(file);
encode_into_std_write(value, &mut writer, config)?;
Ok(())
}
}

2
parser/src/io/consts.rs Normal file
View File

@@ -0,0 +1,2 @@
pub const IMPORTS_FOLDER_PATH: &str = "./imports";
pub const OUTPUTS_FOLDER_PATH: &str = "./target/outputs";

37
parser/src/io/json.rs Normal file
View File

@@ -0,0 +1,37 @@
use std::{
fs::File,
io::{BufReader, BufWriter},
};
use serde::{de::DeserializeOwned, Serialize};
pub struct Json;
impl Json {
pub fn import<T>(path: &str) -> color_eyre::Result<T>
where
T: DeserializeOwned,
{
let file = File::open(path)?;
let reader = BufReader::new(file);
Ok(serde_json::from_reader(reader)?)
}
pub fn export<T>(path: &str, value: &T) -> color_eyre::Result<()>
where
T: Serialize,
{
let file = File::create(path).unwrap_or_else(|_| {
dbg!(&path);
panic!("No such file or directory")
});
let mut writer = BufWriter::new(file);
serde_json::to_writer_pretty(&mut writer, value)?;
Ok(())
}
}

11
parser/src/io/mod.rs Normal file
View File

@@ -0,0 +1,11 @@
mod binary;
mod consts;
mod json;
mod path;
mod serialization;
pub use binary::*;
pub use consts::*;
pub use json::*;
pub use path::*;
pub use serialization::*;

3
parser/src/io/path.rs Normal file
View File

@@ -0,0 +1,3 @@
pub fn format_path(path: &str) -> String {
path.replace(['-', '_', ' '], "/")
}

View File

@@ -0,0 +1,55 @@
use std::fmt::Debug;
use allocative::Allocative;
use bincode::{Decode, Encode};
use serde::{de::DeserializeOwned, Serialize};
use crate::io::{Binary, Json};
#[derive(PartialEq, PartialOrd, Ord, Eq, Debug, Clone, Copy, Default, Allocative)]
pub enum Serialization {
#[default]
Binary,
Json,
}
impl Serialization {
pub fn to_extension(&self) -> &str {
match self {
Self::Binary => "bin",
Self::Json => "json",
}
}
pub fn from_extension(extension: &str) -> Self {
match extension {
"bin" => Self::Binary,
"json" => Self::Json,
_ => panic!("Extension \"{extension}\" isn't supported"),
}
}
pub fn append_extension(&self, path: &str) -> String {
format!("{path}.{}", self.to_extension())
}
pub fn import<T>(&self, path: &str) -> color_eyre::Result<T>
where
T: Debug + DeserializeOwned + Decode,
{
match self {
Serialization::Binary => Binary::import(path),
Serialization::Json => Json::import(path),
}
}
pub fn export<T>(&self, path: &str, value: &T) -> color_eyre::Result<()>
where
T: Debug + Serialize + Encode,
{
match self {
Serialization::Binary => Binary::export(path, value),
Serialization::Json => Json::export(path, value),
}
}
}

21
parser/src/lib.rs Normal file
View File

@@ -0,0 +1,21 @@
mod actions;
mod bitcoin;
mod databases;
mod datasets;
mod io;
mod price;
mod states;
mod structs;
mod utils;
pub use crate::{
actions::iter_blocks,
bitcoin::{BitcoinDB, BitcoinDaemon},
datasets::OHLC,
io::{Binary, Json, Serialization},
structs::{
DateMap, HeightMap, SerializedDateMap, SerializedHeightMap, WNaiveDate,
HEIGHT_MAP_CHUNK_SIZE,
},
utils::log,
};

41
parser/src/main.rs Normal file
View File

@@ -0,0 +1,41 @@
use std::{env::args, path::Path};
use itertools::Itertools;
use parser::{iter_blocks, log, BitcoinDB, BitcoinDaemon};
fn main() -> color_eyre::Result<()> {
let args = args().collect_vec();
let bitcoin_dir_path = args.get(1).unwrap();
color_eyre::install()?;
let deamon = BitcoinDaemon::new(bitcoin_dir_path);
loop {
deamon.stop();
// Scoped to free bitcoin's lock
let block_count = {
let bitcoin_db = BitcoinDB::new(Path::new(bitcoin_dir_path), true)?;
// let block_count = 200_000;
let block_count = bitcoin_db.get_block_count();
log(&format!("{block_count} blocks found."));
iter_blocks(&bitcoin_db, block_count)?;
block_count
};
deamon.start();
if deamon.check_if_fully_synced() {
deamon.wait_for_new_block(block_count - 1);
} else {
deamon.wait_sync();
}
}
// Ok(())
}

201
parser/src/price/binance.rs Normal file
View File

@@ -0,0 +1,201 @@
#![allow(dead_code)]
use std::{collections::BTreeMap, path::Path};
use color_eyre::eyre::ContextCompat;
use itertools::Itertools;
use serde_json::Value;
use crate::{
datasets::OHLC,
io::{Json, IMPORTS_FOLDER_PATH},
structs::WNaiveDate,
utils::{log, retry},
};
pub struct Binance;
impl Binance {
pub fn read_har_file() -> color_eyre::Result<BTreeMap<u32, OHLC>> {
log("binance: read har file");
let path_binance_har = Path::new(IMPORTS_FOLDER_PATH).join("binance.har");
let json: BTreeMap<String, Value> =
Json::import(path_binance_har.to_str().unwrap()).unwrap_or_default();
Ok(json
.get("log")
.context("Expect object to have log attribute")?
.as_object()
.context("Expect to be an object")?
.get("entries")
.context("Expect object to have entries")?
.as_array()
.context("Expect to be an array")?
.iter()
.filter(|entry| {
entry
.as_object()
.unwrap()
.get("request")
.unwrap()
.as_object()
.unwrap()
.get("url")
.unwrap()
.as_str()
.unwrap()
.contains("/uiKlines")
})
.flat_map(|entry| {
let response = entry
.as_object()
.unwrap()
.get("response")
.unwrap()
.as_object()
.unwrap();
let content = response.get("content").unwrap().as_object().unwrap();
let text = content.get("text");
if text.is_none() {
return vec![];
}
let text = text.unwrap().as_str().unwrap();
let arrays: Value = serde_json::from_str(text).unwrap();
arrays
.as_array()
.unwrap()
.iter()
.map(|array| {
let array = array.as_array().unwrap();
let timestamp = (array.first().unwrap().as_u64().unwrap() / 1000) as u32;
let get_f32 = |index: usize| {
array
.get(index)
.unwrap()
.as_str()
.unwrap()
.parse::<f32>()
.unwrap()
};
(
timestamp,
OHLC {
open: get_f32(1),
high: get_f32(2),
low: get_f32(3),
close: get_f32(4),
},
)
})
.collect_vec()
})
.collect::<BTreeMap<_, _>>())
}
pub fn fetch_1mn_prices() -> color_eyre::Result<BTreeMap<u32, OHLC>> {
log("binance: fetch 1mn");
retry(
|| {
let body: Value = reqwest::blocking::get(
"https://api.binance.com/api/v3/uiKlines?symbol=BTCUSDT&interval=1m&limit=1000",
)?
.json()?;
Ok(body
.as_array()
.context("Expect to be an array")?
.iter()
.map(|value| {
// [timestamp, open, high, low, close, volume, ...]
let array = value.as_array().unwrap();
let timestamp = array.first().unwrap().as_u64().unwrap() as u32;
let get_f32 = |index: usize| {
array
.get(index)
.unwrap()
.as_str()
.unwrap()
.parse::<f32>()
.unwrap()
};
(
timestamp,
OHLC {
open: get_f32(1),
high: get_f32(2),
low: get_f32(3),
close: get_f32(4),
},
)
})
.collect::<BTreeMap<_, _>>())
},
10,
5,
)
}
pub fn fetch_daily_prices() -> color_eyre::Result<BTreeMap<WNaiveDate, OHLC>> {
log("binance: fetch 1d");
retry(
|| {
let body: Value = reqwest::blocking::get(
"https://api.binance.com/api/v3/uiKlines?symbol=BTCUSDT&interval=1d",
)?
.json()?;
Ok(body
.as_array()
.context("Expect to be an array")?
.iter()
.map(|value| {
// [timestamp, open, high, low, close, volume, ...]
let array = value.as_array().unwrap();
let date = WNaiveDate::from_timestamp(
array.first().unwrap().as_u64().unwrap() as u32 / 1000,
);
let get_f32 = |index: usize| {
array
.get(index)
.unwrap()
.as_str()
.unwrap()
.parse::<f32>()
.unwrap()
};
(
date,
OHLC {
open: get_f32(1),
high: get_f32(2),
low: get_f32(3),
close: get_f32(4),
},
)
})
.collect::<BTreeMap<_, _>>())
},
10,
5,
)
}
}

124
parser/src/price/kraken.rs Normal file
View File

@@ -0,0 +1,124 @@
use std::collections::BTreeMap;
use color_eyre::eyre::ContextCompat;
use serde_json::Value;
use crate::{
datasets::OHLC,
structs::WNaiveDate,
utils::{log, retry},
};
pub struct Kraken;
impl Kraken {
pub fn fetch_1mn_prices() -> color_eyre::Result<BTreeMap<u32, OHLC>> {
log("kraken: fetch 1mn");
retry(
|| {
let body: Value = reqwest::blocking::get(
"https://api.kraken.com/0/public/OHLC?pair=XBTUSD&interval=1",
)?
.json()?;
Ok(body
.as_object()
.context("Expect to be an object")?
.get("result")
.context("Expect object to have result")?
.as_object()
.context("Expect to be an object")?
.get("XXBTZUSD")
.context("Expect to have XXBTZUSD")?
.as_array()
.context("Expect to be an array")?
.iter()
.map(|value| {
let array = value.as_array().unwrap();
let timestamp = array.first().unwrap().as_u64().unwrap() as u32;
let get_f32 = |index: usize| {
array
.get(index)
.unwrap()
.as_str()
.unwrap()
.parse::<f32>()
.unwrap()
};
(
timestamp,
OHLC {
open: get_f32(1),
high: get_f32(2),
low: get_f32(3),
close: get_f32(4),
},
)
})
.collect::<BTreeMap<_, _>>())
},
10,
5,
)
}
pub fn fetch_daily_prices() -> color_eyre::Result<BTreeMap<WNaiveDate, OHLC>> {
log("fetch kraken daily");
retry(
|| {
let body: Value = reqwest::blocking::get(
"https://api.kraken.com/0/public/OHLC?pair=XBTUSD&interval=1440",
)?
.json()?;
Ok(body
.as_object()
.context("Expect to be an object")?
.get("result")
.context("Expect object to have result")?
.as_object()
.context("Expect to be an object")?
.get("XXBTZUSD")
.context("Expect to have XXBTZUSD")?
.as_array()
.context("Expect to be an array")?
.iter()
.map(|value| {
let array = value.as_array().unwrap();
let date = WNaiveDate::from_timestamp(
array.first().unwrap().as_u64().unwrap() as u32,
);
let get_f32 = |index: usize| {
array
.get(index)
.unwrap()
.as_str()
.unwrap()
.parse::<f32>()
.unwrap()
};
(
date,
OHLC {
open: get_f32(1),
high: get_f32(2),
low: get_f32(3),
close: get_f32(4),
},
)
})
.collect::<BTreeMap<_, _>>())
},
10,
5,
)
}
}

5
parser/src/price/mod.rs Normal file
View File

@@ -0,0 +1,5 @@
mod binance;
mod kraken;
pub use binance::*;
pub use kraken::*;

View File

@@ -0,0 +1,47 @@
use std::{fmt::Debug, fs, io};
use bincode::{Decode, Encode};
use crate::io::{Binary, OUTPUTS_FOLDER_PATH};
// https://github.com/djkoloski/rust_serialization_benchmark
pub trait AnyState
where
Self: Debug + Encode + Decode,
{
fn name<'a>() -> &'a str;
fn create_dir_all() -> color_eyre::Result<(), io::Error> {
fs::create_dir_all(Self::folder_path())
}
fn folder_path() -> String {
format!("{OUTPUTS_FOLDER_PATH}/states")
}
fn full_path() -> String {
let name = Self::name();
let folder_path = Self::folder_path();
format!("{folder_path}/{name}.bin")
}
fn reset(&mut self) -> color_eyre::Result<(), io::Error> {
self.clear();
fs::remove_file(Self::full_path())
}
fn import() -> color_eyre::Result<Self> {
Self::create_dir_all()?;
Binary::import(&Self::full_path())
}
fn export(&self) -> color_eyre::Result<()> {
Binary::export(&Self::full_path(), self)
}
fn clear(&mut self);
}

View File

@@ -0,0 +1,411 @@
use allocative::Allocative;
use crate::{
states::{DurableStates, OneShotStates, PriceToValue, UnrealizedState},
structs::{LiquiditySplitResult, Price, SplitByLiquidity, WAmount},
};
#[derive(Default, Debug, Allocative)]
pub struct AddressCohortDurableStates {
pub address_count: usize,
pub split_durable_states: SplitByLiquidity<DurableStates>,
pub price_to_split_amount: PriceToValue<SplitByLiquidity<WAmount>>,
}
const ONE_THIRD: f64 = 0.33333333333;
// TODO: Clean that mess, move to a generic liquidity split and somehow support rest for non floats
impl AddressCohortDurableStates {
#[allow(clippy::too_many_arguments)]
pub fn increment(
&mut self,
amount: WAmount,
utxo_count: usize,
realized_cap: Price,
mean_price_paid: Price,
split_sat_amount_result: &LiquiditySplitResult,
split_utxo_count_result: &LiquiditySplitResult,
split_realized_cap_result: &LiquiditySplitResult,
) -> color_eyre::Result<()> {
self.address_count += 1;
self._crement(
amount,
utxo_count,
realized_cap,
mean_price_paid,
split_sat_amount_result,
split_utxo_count_result,
split_realized_cap_result,
true,
)
}
#[allow(clippy::too_many_arguments)]
pub fn decrement(
&mut self,
amount: WAmount,
utxo_count: usize,
realized_cap: Price,
mean_price_paid: Price,
split_sat_amount_result: &LiquiditySplitResult,
split_utxo_count_result: &LiquiditySplitResult,
split_realized_cap_result: &LiquiditySplitResult,
) -> color_eyre::Result<()> {
self.address_count -= 1;
self._crement(
amount,
utxo_count,
realized_cap,
mean_price_paid,
split_sat_amount_result,
split_utxo_count_result,
split_realized_cap_result,
false,
)
}
#[allow(clippy::too_many_arguments)]
pub fn _crement(
&mut self,
amount: WAmount,
utxo_count: usize,
realized_cap: Price,
mean_price_paid: Price,
split_sat_amount_result: &LiquiditySplitResult,
split_utxo_count_result: &LiquiditySplitResult,
split_realized_cap_result: &LiquiditySplitResult,
increment: bool,
) -> color_eyre::Result<()> {
if increment {
self.split_durable_states
.all
.increment(amount, utxo_count, realized_cap)
} else {
self.split_durable_states
.all
.decrement(amount, utxo_count, realized_cap)
}
.inspect_err(|report| {
dbg!(
report,
"split all failed",
split_sat_amount_result,
split_utxo_count_result
);
})?;
let illiquid_amount = split_sat_amount_result.illiquid.trunc();
let illiquid_amount_rest = split_sat_amount_result.illiquid - illiquid_amount;
let mut illiquid_amount = WAmount::from_sat(illiquid_amount as u64);
let mut illiquid_utxo_count = split_utxo_count_result.illiquid.trunc() as usize;
let illiquid_utxo_count_rest = split_utxo_count_result.illiquid.fract();
let mut illiquid_realized_cap =
Price::from_cent(split_realized_cap_result.illiquid.trunc() as u64);
let illiquid_realized_cap_rest = split_realized_cap_result.illiquid.fract();
let liquid_amount = split_sat_amount_result.liquid.trunc();
let liquid_amount_rest = split_sat_amount_result.liquid - liquid_amount;
let mut liquid_amount = WAmount::from_sat(liquid_amount as u64);
let mut liquid_utxo_count = split_utxo_count_result.liquid.trunc() as usize;
let liquid_utxo_count_rest = split_utxo_count_result.liquid.fract();
let mut liquid_realized_cap =
Price::from_cent(split_realized_cap_result.liquid.trunc() as u64);
let liquid_realized_cap_rest = split_realized_cap_result.liquid.fract();
let mut highly_liquid_amount = amount - illiquid_amount - liquid_amount;
let mut highly_liquid_utxo_count = utxo_count - illiquid_utxo_count - liquid_utxo_count;
let mut highly_liquid_realized_cap =
realized_cap - illiquid_realized_cap - liquid_realized_cap;
let amount_diff = amount - illiquid_amount - liquid_amount - highly_liquid_amount;
if amount_diff > WAmount::ZERO {
if illiquid_amount_rest >= ONE_THIRD && illiquid_amount_rest > liquid_amount_rest {
illiquid_amount += amount_diff;
} else if illiquid_amount_rest >= ONE_THIRD {
liquid_amount += amount_diff;
} else {
highly_liquid_amount += amount_diff;
}
}
let utxo_count_diff =
utxo_count - illiquid_utxo_count - liquid_utxo_count - highly_liquid_utxo_count;
if utxo_count_diff > 0 {
if illiquid_utxo_count_rest >= ONE_THIRD
&& illiquid_utxo_count_rest > liquid_utxo_count_rest
{
illiquid_utxo_count += utxo_count_diff;
} else if illiquid_utxo_count_rest >= ONE_THIRD {
liquid_utxo_count += utxo_count_diff;
} else {
highly_liquid_utxo_count += utxo_count_diff;
}
}
let realized_cap_diff =
realized_cap - illiquid_realized_cap - liquid_realized_cap - highly_liquid_realized_cap;
if realized_cap_diff > Price::ZERO {
if illiquid_realized_cap_rest >= ONE_THIRD
&& illiquid_realized_cap_rest > liquid_realized_cap_rest
{
illiquid_realized_cap += realized_cap_diff;
} else if illiquid_realized_cap_rest >= ONE_THIRD {
liquid_realized_cap += realized_cap_diff;
} else {
highly_liquid_realized_cap += realized_cap_diff;
}
}
let split_amount = SplitByLiquidity {
all: amount,
illiquid: illiquid_amount,
liquid: liquid_amount,
highly_liquid: highly_liquid_amount,
};
let split_utxo_count = SplitByLiquidity {
all: utxo_count,
illiquid: illiquid_utxo_count,
liquid: liquid_utxo_count,
highly_liquid: highly_liquid_utxo_count,
};
let split_realized_cap = SplitByLiquidity {
all: realized_cap,
illiquid: illiquid_realized_cap,
liquid: liquid_realized_cap,
highly_liquid: highly_liquid_realized_cap,
};
if increment {
self.price_to_split_amount
.increment(mean_price_paid, split_amount);
} else {
self.price_to_split_amount
.decrement(mean_price_paid, split_amount)
.inspect_err(|report| {
dbg!(
report,
"cents_to_split_amount decrement",
split_sat_amount_result,
split_utxo_count_result,
split_amount,
split_utxo_count,
split_realized_cap,
);
})?;
}
if increment {
self.split_durable_states.illiquid.increment(
illiquid_amount,
illiquid_utxo_count,
illiquid_realized_cap,
)
} else {
self.split_durable_states.illiquid.decrement(
illiquid_amount,
illiquid_utxo_count,
illiquid_realized_cap,
)
}
.inspect_err(|report| {
dbg!(
report,
"split illiquid failed",
split_sat_amount_result,
split_utxo_count_result,
split_amount,
split_utxo_count,
split_realized_cap,
);
})?;
if increment {
self.split_durable_states.liquid.increment(
liquid_amount,
liquid_utxo_count,
liquid_realized_cap,
)
} else {
self.split_durable_states.liquid.decrement(
liquid_amount,
liquid_utxo_count,
liquid_realized_cap,
)
}
.inspect_err(|report| {
dbg!(
report,
"split liquid failed",
split_sat_amount_result,
split_utxo_count_result,
split_amount,
split_utxo_count,
split_realized_cap,
);
})?;
if increment {
self.split_durable_states.highly_liquid.increment(
highly_liquid_amount,
highly_liquid_utxo_count,
highly_liquid_realized_cap,
)
} else {
self.split_durable_states.highly_liquid.decrement(
highly_liquid_amount,
highly_liquid_utxo_count,
highly_liquid_realized_cap,
)
}
.inspect_err(|report| {
dbg!(
report,
"split highly liquid failed",
split_sat_amount_result,
split_utxo_count_result,
split_amount,
split_utxo_count,
split_realized_cap,
);
})?;
Ok(())
}
pub fn compute_one_shot_states(
&self,
block_price: Price,
date_price: Option<Price>,
) -> SplitByLiquidity<OneShotStates> {
let mut one_shot_states: SplitByLiquidity<OneShotStates> = SplitByLiquidity::default();
if date_price.is_some() {
one_shot_states
.all
.unrealized_date_state
.replace(UnrealizedState::default());
one_shot_states
.illiquid
.unrealized_date_state
.replace(UnrealizedState::default());
one_shot_states
.liquid
.unrealized_date_state
.replace(UnrealizedState::default());
one_shot_states
.highly_liquid
.unrealized_date_state
.replace(UnrealizedState::default());
}
let all_supply = self.split_durable_states.all.supply_state.supply;
let illiquid_supply = self.split_durable_states.illiquid.supply_state.supply;
let liquid_supply = self.split_durable_states.liquid.supply_state.supply;
let highly_liquid_supply = self.split_durable_states.highly_liquid.supply_state.supply;
let one_shot_states_ref = &mut one_shot_states;
self.price_to_split_amount.iterate(
SplitByLiquidity {
all: all_supply,
illiquid: illiquid_supply,
liquid: liquid_supply,
highly_liquid: highly_liquid_supply,
},
|price_paid, split_amount| {
one_shot_states_ref.all.price_paid_state.iterate(
price_paid,
split_amount.all,
all_supply,
);
one_shot_states_ref.all.unrealized_block_state.iterate(
price_paid,
block_price,
split_amount.all,
);
if let Some(unrealized_date_state) =
one_shot_states_ref.all.unrealized_date_state.as_mut()
{
unrealized_date_state.iterate(
price_paid,
date_price.unwrap(),
split_amount.all,
);
}
if split_amount.illiquid > WAmount::ZERO {
one_shot_states_ref.illiquid.price_paid_state.iterate(
price_paid,
split_amount.illiquid,
illiquid_supply,
);
one_shot_states_ref.illiquid.unrealized_block_state.iterate(
price_paid,
block_price,
split_amount.illiquid,
);
if let Some(unrealized_date_state) =
one_shot_states_ref.illiquid.unrealized_date_state.as_mut()
{
unrealized_date_state.iterate(
price_paid,
date_price.unwrap(),
split_amount.illiquid,
);
}
}
if split_amount.liquid > WAmount::ZERO {
one_shot_states_ref.liquid.price_paid_state.iterate(
price_paid,
split_amount.liquid,
liquid_supply,
);
one_shot_states_ref.liquid.unrealized_block_state.iterate(
price_paid,
block_price,
split_amount.liquid,
);
if let Some(unrealized_date_state) =
one_shot_states_ref.liquid.unrealized_date_state.as_mut()
{
unrealized_date_state.iterate(
price_paid,
date_price.unwrap(),
split_amount.liquid,
);
}
}
if split_amount.highly_liquid > WAmount::ZERO {
one_shot_states_ref.highly_liquid.price_paid_state.iterate(
price_paid,
split_amount.highly_liquid,
highly_liquid_supply,
);
one_shot_states_ref
.highly_liquid
.unrealized_block_state
.iterate(price_paid, block_price, split_amount.highly_liquid);
if let Some(unrealized_date_state) = one_shot_states_ref
.highly_liquid
.unrealized_date_state
.as_mut()
{
unrealized_date_state.iterate(
price_paid,
date_price.unwrap(),
split_amount.highly_liquid,
);
}
}
},
);
one_shot_states
}
}

View File

@@ -0,0 +1,68 @@
use crate::structs::{AddressSize, AddressSplit, AddressType};
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)]
pub enum AddressCohortId {
All,
Plankton,
Shrimp,
Crab,
Fish,
Shark,
Whale,
Humpback,
Megalodon,
P2PK,
P2PKH,
P2SH,
P2WPKH,
P2WSH,
P2TR,
}
impl AddressCohortId {
pub fn as_name(&self) -> Option<&str> {
match self {
Self::All => None,
Self::Plankton => Some("plankton"),
Self::Shrimp => Some("shrimp"),
Self::Crab => Some("crab"),
Self::Fish => Some("fish"),
Self::Shark => Some("shark"),
Self::Whale => Some("whale"),
Self::Humpback => Some("humpback"),
Self::Megalodon => Some("megalodon"),
Self::P2PK => Some("p2pk"),
Self::P2PKH => Some("p2pkh"),
Self::P2SH => Some("p2sh"),
Self::P2WPKH => Some("p2wpkh"),
Self::P2WSH => Some("p2wsh"),
Self::P2TR => Some("p2tr"),
}
}
pub fn as_split(&self) -> AddressSplit {
match self {
Self::All => AddressSplit::All,
Self::Plankton => AddressSplit::Size(AddressSize::Plankton),
Self::Shrimp => AddressSplit::Size(AddressSize::Shrimp),
Self::Crab => AddressSplit::Size(AddressSize::Crab),
Self::Fish => AddressSplit::Size(AddressSize::Fish),
Self::Shark => AddressSplit::Size(AddressSize::Shark),
Self::Whale => AddressSplit::Size(AddressSize::Whale),
Self::Humpback => AddressSplit::Size(AddressSize::Humpback),
Self::Megalodon => AddressSplit::Size(AddressSize::Megalodon),
Self::P2PK => AddressSplit::Type(AddressType::P2PK),
Self::P2PKH => AddressSplit::Type(AddressType::P2PKH),
Self::P2SH => AddressSplit::Type(AddressType::P2SH),
Self::P2WPKH => AddressSplit::Type(AddressType::P2WPKH),
Self::P2WSH => AddressSplit::Type(AddressType::P2WSH),
Self::P2TR => AddressSplit::Type(AddressType::P2TR),
}
}
}

View File

@@ -0,0 +1,143 @@
use allocative::Allocative;
use color_eyre::eyre::eyre;
use derive_deref::{Deref, DerefMut};
use rayon::prelude::*;
use crate::{
databases::AddressIndexToAddressData,
structs::{AddressData, AddressRealizedData, Price},
};
use super::{AddressCohortDurableStates, AddressCohortsOneShotStates, SplitByAddressCohort};
#[derive(Default, Deref, DerefMut, Allocative)]
pub struct AddressCohortsDurableStates(SplitByAddressCohort<AddressCohortDurableStates>);
impl AddressCohortsDurableStates {
pub fn init(address_index_to_address_data: &mut AddressIndexToAddressData) -> Self {
let mut s = Self::default();
// Paralize that, different s could be added together
address_index_to_address_data
.iter(&mut |(_, address_data)| s.increment(address_data).unwrap());
s
}
pub fn iterate(
&mut self,
address_realized_data: &AddressRealizedData,
current_address_data: &AddressData,
) -> color_eyre::Result<()> {
self.decrement(&address_realized_data.initial_address_data)
.inspect_err(|report| {
dbg!(report);
dbg!(address_realized_data, current_address_data);
dbg!("decrement initial address_data");
})?;
self.increment(current_address_data).inspect_err(|report| {
dbg!(report);
dbg!(address_realized_data, current_address_data);
dbg!("increment address_data");
})?;
Ok(())
}
/// Should always increment using current address data state
fn increment(&mut self, address_data: &AddressData) -> color_eyre::Result<()> {
self._crement(address_data, true)
}
/// Should always decrement using initial address data state
fn decrement(&mut self, address_data: &AddressData) -> color_eyre::Result<()> {
self._crement(address_data, false)
}
fn _crement(&mut self, address_data: &AddressData, increment: bool) -> color_eyre::Result<()> {
// No need to either insert or remove if empty
if address_data.is_empty() {
return Ok(());
}
let amount = address_data.amount;
let utxo_count = address_data.outputs_len as usize;
let realized_cap = address_data.realized_cap;
let mean_price_paid = address_data.realized_cap / amount;
let liquidity_classification = address_data.compute_liquidity_classification();
let split_sat_amount = liquidity_classification.split(amount.to_sat() as f64);
let split_utxo_count = liquidity_classification.split(utxo_count as f64);
let split_realized_cap = liquidity_classification.split(utxo_count as f64);
self.0
.iterate(address_data, |state: &mut AddressCohortDurableStates| {
if increment {
if let Err(report) = state.increment(
amount,
utxo_count,
realized_cap,
mean_price_paid,
&split_sat_amount,
&split_utxo_count,
&split_realized_cap,
) {
dbg!(
report.to_string(),
&state,
&address_data,
&liquidity_classification
);
return Err(eyre!("increment error"));
}
} else if let Err(report) = state.decrement(
amount,
utxo_count,
realized_cap,
mean_price_paid,
&split_sat_amount,
&split_utxo_count,
&split_realized_cap,
) {
dbg!(
report.to_string(),
&state,
&address_data,
&liquidity_classification
);
return Err(eyre!("decrement error"));
}
Ok(())
})?;
Ok(())
}
pub fn compute_one_shot_states(
&mut self,
block_price: Price,
date_price: Option<Price>,
) -> AddressCohortsOneShotStates {
let mut one_shot_states = AddressCohortsOneShotStates::default();
self.as_vec()
.into_par_iter()
.map(|(states, address_cohort_id)| {
(
address_cohort_id,
states.compute_one_shot_states(block_price, date_price),
)
})
.collect::<Vec<_>>()
.into_iter()
.for_each(|(address_cohort_id, states)| {
*one_shot_states.get_mut_from_id(&address_cohort_id) = states;
});
one_shot_states
}
}

View File

@@ -0,0 +1,48 @@
use derive_deref::{Deref, DerefMut};
use crate::{
states::InputState,
structs::{AddressRealizedData, LiquidityClassification, SplitByLiquidity, WAmount},
};
use super::SplitByAddressCohort;
#[derive(Deref, DerefMut, Default)]
pub struct AddressCohortsInputStates(SplitByAddressCohort<SplitByLiquidity<InputState>>);
impl AddressCohortsInputStates {
pub fn iterate_input(
&mut self,
realized_data: &AddressRealizedData,
liquidity_classification: &LiquidityClassification,
) -> color_eyre::Result<()> {
let count = realized_data.utxos_destroyed as f64;
let sent = realized_data.sent;
let split_count = liquidity_classification.split(count);
let split_volume = liquidity_classification.split(sent.to_sat() as f64);
let iterate = move |state: &mut SplitByLiquidity<InputState>| -> color_eyre::Result<()> {
state.all.iterate(count, sent);
state.illiquid.iterate(
split_count.illiquid,
WAmount::from_sat(split_volume.illiquid.round() as u64),
);
state.liquid.iterate(
split_count.liquid,
WAmount::from_sat(split_volume.liquid.round() as u64),
);
state.highly_liquid.iterate(
split_count.highly_liquid,
WAmount::from_sat(split_volume.highly_liquid.round() as u64),
);
Ok(())
};
self.iterate(&realized_data.initial_address_data, iterate)
}
}

View File

@@ -0,0 +1,8 @@
use derive_deref::{Deref, DerefMut};
use crate::{states::OneShotStates, structs::SplitByLiquidity};
use super::SplitByAddressCohort;
#[derive(Deref, DerefMut, Default)]
pub struct AddressCohortsOneShotStates(pub SplitByAddressCohort<SplitByLiquidity<OneShotStates>>);

View File

@@ -0,0 +1,48 @@
use derive_deref::{Deref, DerefMut};
use crate::{
states::OutputState,
structs::{AddressRealizedData, LiquidityClassification, SplitByLiquidity, WAmount},
};
use super::SplitByAddressCohort;
#[derive(Deref, DerefMut, Default)]
pub struct AddressCohortsOutputStates(SplitByAddressCohort<SplitByLiquidity<OutputState>>);
impl AddressCohortsOutputStates {
pub fn iterate_output(
&mut self,
realized_data: &AddressRealizedData,
liquidity_classification: &LiquidityClassification,
) -> color_eyre::Result<()> {
let count = realized_data.utxos_created as f64;
let volume = realized_data.received;
let split_count = liquidity_classification.split(count);
let split_volume = liquidity_classification.split(volume.to_sat() as f64);
let iterate = move |state: &mut SplitByLiquidity<OutputState>| -> color_eyre::Result<()> {
state.all.iterate(count, volume);
state.illiquid.iterate(
split_count.illiquid,
WAmount::from_sat(split_volume.illiquid.round() as u64),
);
state.liquid.iterate(
split_count.liquid,
WAmount::from_sat(split_volume.liquid.round() as u64),
);
state.highly_liquid.iterate(
split_count.highly_liquid,
WAmount::from_sat(split_volume.highly_liquid.round() as u64),
);
Ok(())
};
self.iterate(&realized_data.initial_address_data, iterate)
}
}

View File

@@ -0,0 +1,48 @@
use derive_deref::{Deref, DerefMut};
use crate::{
states::RealizedState,
structs::{AddressRealizedData, LiquidityClassification, Price, SplitByLiquidity},
};
use super::SplitByAddressCohort;
#[derive(Deref, DerefMut, Default)]
pub struct AddressCohortsRealizedStates(SplitByAddressCohort<SplitByLiquidity<RealizedState>>);
impl AddressCohortsRealizedStates {
pub fn iterate_realized(
&mut self,
realized_data: &AddressRealizedData,
liquidity_classification: &LiquidityClassification,
) -> color_eyre::Result<()> {
let profit = realized_data.profit;
let loss = realized_data.loss;
let split_profit = liquidity_classification.split(profit.to_cent() as f64);
let split_loss = liquidity_classification.split(loss.to_cent() as f64);
let iterate = move |state: &mut SplitByLiquidity<RealizedState>| -> color_eyre::Result<()> {
state.all.iterate(profit, loss);
state.illiquid.iterate(
Price::from_cent(split_profit.illiquid as u64),
Price::from_cent(split_loss.illiquid as u64),
);
state.liquid.iterate(
Price::from_cent(split_profit.liquid as u64),
Price::from_cent(split_loss.liquid as u64),
);
state.highly_liquid.iterate(
Price::from_cent(split_profit.highly_liquid as u64),
Price::from_cent(split_loss.highly_liquid as u64),
);
Ok(())
};
self.iterate(&realized_data.initial_address_data, iterate)
}
}

View File

@@ -0,0 +1,17 @@
mod cohort_durable_states;
mod cohort_id;
mod cohorts_durable_states;
mod cohorts_input_states;
mod cohorts_one_shot_states;
mod cohorts_output_states;
mod cohorts_realized_states;
mod split_by_address_cohort;
pub use cohort_durable_states::*;
pub use cohort_id::*;
pub use cohorts_durable_states::*;
pub use cohorts_input_states::*;
pub use cohorts_one_shot_states::*;
pub use cohorts_output_states::*;
pub use cohorts_realized_states::*;
pub use split_by_address_cohort::*;

View File

@@ -0,0 +1,177 @@
use allocative::Allocative;
use crate::structs::{AddressData, AddressSize, AddressSplit, AddressType};
use super::AddressCohortId;
#[derive(Default, Allocative)]
pub struct SplitByAddressCohort<T> {
pub all: T,
pub plankton: T,
pub shrimp: T,
pub crab: T,
pub fish: T,
pub shark: T,
pub whale: T,
pub humpback: T,
pub megalodon: T,
pub p2pk: T,
pub p2pkh: T,
pub p2sh: T,
pub p2wpkh: T,
pub p2wsh: T,
pub p2tr: T,
}
impl<T> SplitByAddressCohort<T> {
pub fn get(&self, split: &AddressSplit) -> Option<&T> {
match &split {
AddressSplit::All => Some(&self.all),
AddressSplit::Type(address_type) => match address_type {
AddressType::P2PK => Some(&self.p2pk),
AddressType::P2PKH => Some(&self.p2pkh),
AddressType::P2SH => Some(&self.p2sh),
AddressType::P2WPKH => Some(&self.p2wpkh),
AddressType::P2WSH => Some(&self.p2wsh),
AddressType::P2TR => Some(&self.p2tr),
AddressType::MultiSig => None,
AddressType::Unknown => None,
AddressType::OpReturn => None,
AddressType::PushOnly => None,
AddressType::Empty => None,
},
AddressSplit::Size(address_size) => match address_size {
AddressSize::Plankton => Some(&self.plankton),
AddressSize::Shrimp => Some(&self.shrimp),
AddressSize::Crab => Some(&self.crab),
AddressSize::Fish => Some(&self.fish),
AddressSize::Shark => Some(&self.shark),
AddressSize::Whale => Some(&self.whale),
AddressSize::Humpback => Some(&self.humpback),
AddressSize::Megalodon => Some(&self.megalodon),
AddressSize::Empty => None,
},
}
}
pub fn iterate(
&mut self,
address_data: &AddressData,
iterate: impl Fn(&mut T) -> color_eyre::Result<()>,
) -> color_eyre::Result<()> {
if let Some(state) = self.get_mut_from_split(&AddressSplit::All) {
iterate(state)?;
}
if let Some(state) = self.get_mut_from_split(&AddressSplit::Type(address_data.address_type))
{
iterate(state)?;
}
if let Some(state) = self.get_mut_from_split(&AddressSplit::Size(AddressSize::from_amount(
address_data.amount,
))) {
iterate(state)?;
}
Ok(())
}
fn get_mut_from_split(&mut self, split: &AddressSplit) -> Option<&mut T> {
match &split {
AddressSplit::All => Some(&mut self.all),
AddressSplit::Type(address_type) => match address_type {
AddressType::P2PK => Some(&mut self.p2pk),
AddressType::P2PKH => Some(&mut self.p2pkh),
AddressType::P2SH => Some(&mut self.p2sh),
AddressType::P2WPKH => Some(&mut self.p2wpkh),
AddressType::P2WSH => Some(&mut self.p2wsh),
AddressType::P2TR => Some(&mut self.p2tr),
AddressType::MultiSig => None,
AddressType::Unknown => None,
AddressType::OpReturn => None,
AddressType::PushOnly => None,
AddressType::Empty => None,
},
AddressSplit::Size(address_size) => match address_size {
AddressSize::Plankton => Some(&mut self.plankton),
AddressSize::Shrimp => Some(&mut self.shrimp),
AddressSize::Crab => Some(&mut self.crab),
AddressSize::Fish => Some(&mut self.fish),
AddressSize::Shark => Some(&mut self.shark),
AddressSize::Whale => Some(&mut self.whale),
AddressSize::Humpback => Some(&mut self.humpback),
AddressSize::Megalodon => Some(&mut self.megalodon),
AddressSize::Empty => None,
},
}
}
pub fn get_mut_from_id(&mut self, id: &AddressCohortId) -> &mut T {
match id {
AddressCohortId::All => &mut self.all,
AddressCohortId::Plankton => &mut self.plankton,
AddressCohortId::Shrimp => &mut self.shrimp,
AddressCohortId::Crab => &mut self.crab,
AddressCohortId::Fish => &mut self.fish,
AddressCohortId::Shark => &mut self.shark,
AddressCohortId::Whale => &mut self.whale,
AddressCohortId::Humpback => &mut self.humpback,
AddressCohortId::Megalodon => &mut self.megalodon,
AddressCohortId::P2PK => &mut self.p2pk,
AddressCohortId::P2PKH => &mut self.p2pkh,
AddressCohortId::P2SH => &mut self.p2sh,
AddressCohortId::P2WPKH => &mut self.p2wpkh,
AddressCohortId::P2WSH => &mut self.p2wsh,
AddressCohortId::P2TR => &mut self.p2tr,
}
}
pub fn as_vec(&self) -> Vec<(&T, AddressCohortId)> {
vec![
(&self.all, AddressCohortId::All),
(&self.plankton, AddressCohortId::Plankton),
(&self.shrimp, AddressCohortId::Shrimp),
(&self.crab, AddressCohortId::Crab),
(&self.fish, AddressCohortId::Fish),
(&self.shark, AddressCohortId::Shark),
(&self.whale, AddressCohortId::Whale),
(&self.humpback, AddressCohortId::Humpback),
(&self.megalodon, AddressCohortId::Megalodon),
(&self.p2pk, AddressCohortId::P2PK),
(&self.p2pkh, AddressCohortId::P2PKH),
(&self.p2sh, AddressCohortId::P2SH),
(&self.p2wpkh, AddressCohortId::P2WPKH),
(&self.p2wsh, AddressCohortId::P2WSH),
(&self.p2tr, AddressCohortId::P2TR),
]
}
pub fn as_mut_vec(&mut self) -> Vec<(&mut T, AddressCohortId)> {
vec![
(&mut self.all, AddressCohortId::All),
(&mut self.plankton, AddressCohortId::Plankton),
(&mut self.shrimp, AddressCohortId::Shrimp),
(&mut self.crab, AddressCohortId::Crab),
(&mut self.fish, AddressCohortId::Fish),
(&mut self.shark, AddressCohortId::Shark),
(&mut self.whale, AddressCohortId::Whale),
(&mut self.humpback, AddressCohortId::Humpback),
(&mut self.megalodon, AddressCohortId::Megalodon),
(&mut self.p2pk, AddressCohortId::P2PK),
(&mut self.p2pkh, AddressCohortId::P2PKH),
(&mut self.p2sh, AddressCohortId::P2SH),
(&mut self.p2wpkh, AddressCohortId::P2WPKH),
(&mut self.p2wsh, AddressCohortId::P2WSH),
(&mut self.p2tr, AddressCohortId::P2TR),
]
}
}

View File

@@ -0,0 +1,18 @@
use allocative::Allocative;
use crate::structs::Price;
#[derive(Debug, Default, Allocative)]
pub struct CapitalizationState {
pub realized_cap: Price,
}
impl CapitalizationState {
pub fn increment(&mut self, realized_cap: Price) {
self.realized_cap += realized_cap;
}
pub fn decrement(&mut self, realized_cap: Price) {
self.realized_cap -= realized_cap;
}
}

View File

@@ -0,0 +1,55 @@
use allocative::Allocative;
use color_eyre::eyre::eyre;
use crate::structs::{Price, WAmount};
use super::{CapitalizationState, SupplyState, UTXOState};
#[derive(Default, Debug, Allocative)]
pub struct DurableStates {
pub capitalization_state: CapitalizationState,
pub supply_state: SupplyState,
pub utxo_state: UTXOState,
}
impl DurableStates {
pub fn increment(
&mut self,
amount: WAmount,
utxo_count: usize,
realized_cap: Price,
) -> color_eyre::Result<()> {
if amount == WAmount::ZERO {
if utxo_count != 0 {
dbg!(amount, utxo_count);
return Err(eyre!("Shouldn't be possible"));
}
} else {
self.capitalization_state.increment(realized_cap);
self.supply_state.increment(amount);
self.utxo_state.increment(utxo_count);
}
Ok(())
}
pub fn decrement(
&mut self,
amount: WAmount,
utxo_count: usize,
realized_cap: Price,
) -> color_eyre::Result<()> {
if amount == WAmount::ZERO {
if utxo_count != 0 {
dbg!(amount, utxo_count);
unreachable!("Shouldn't be possible")
}
} else {
self.capitalization_state.decrement(realized_cap);
self.supply_state.decrement(amount)?;
self.utxo_state.decrement(utxo_count)?;
}
Ok(())
}
}

View File

@@ -0,0 +1,14 @@
use crate::structs::WAmount;
#[derive(Debug, Default)]
pub struct InputState {
pub count: f64,
pub volume: WAmount,
}
impl InputState {
pub fn iterate(&mut self, count: f64, volume: WAmount) {
self.count += count;
self.volume += volume;
}
}

View File

@@ -0,0 +1,23 @@
mod capitalization_state;
mod durable_states;
mod input_state;
mod one_shot_states;
mod output_state;
mod price_paid_state;
mod price_to_value;
mod realized_state;
mod supply_state;
mod unrealized_state;
mod utxo_state;
pub use capitalization_state::*;
pub use durable_states::*;
pub use input_state::*;
pub use one_shot_states::*;
pub use output_state::*;
pub use price_paid_state::*;
pub use price_to_value::*;
pub use realized_state::*;
pub use supply_state::*;
pub use unrealized_state::*;
pub use utxo_state::*;

View File

@@ -0,0 +1,9 @@
use super::{PricePaidState, UnrealizedState};
#[derive(Default)]
pub struct OneShotStates {
pub price_paid_state: PricePaidState,
pub unrealized_block_state: UnrealizedState,
pub unrealized_date_state: Option<UnrealizedState>,
}

View File

@@ -0,0 +1,14 @@
use crate::structs::WAmount;
#[derive(Debug, Default)]
pub struct OutputState {
pub count: f64,
pub volume: WAmount,
}
impl OutputState {
pub fn iterate(&mut self, count: f64, volume: WAmount) {
self.count += count;
self.volume += volume;
}
}

View File

@@ -0,0 +1,210 @@
use crate::structs::{Price, WAmount};
#[derive(Default, Debug)]
pub struct PricePaidState {
pub pp_05p: Option<Price>,
pub pp_10p: Option<Price>,
pub pp_15p: Option<Price>,
pub pp_20p: Option<Price>,
pub pp_25p: Option<Price>,
pub pp_30p: Option<Price>,
pub pp_35p: Option<Price>,
pub pp_40p: Option<Price>,
pub pp_45p: Option<Price>,
pub pp_median: Option<Price>,
pub pp_55p: Option<Price>,
pub pp_60p: Option<Price>,
pub pp_65p: Option<Price>,
pub pp_70p: Option<Price>,
pub pp_75p: Option<Price>,
pub pp_80p: Option<Price>,
pub pp_85p: Option<Price>,
pub pp_90p: Option<Price>,
pub pp_95p: Option<Price>,
pub processed_amount: WAmount,
}
impl PricePaidState {
pub fn iterate(&mut self, price: Price, amount: WAmount, total_supply: WAmount) {
let PricePaidState {
processed_amount,
pp_05p,
pp_10p,
pp_15p,
pp_20p,
pp_25p,
pp_30p,
pp_35p,
pp_40p,
pp_45p,
pp_median,
pp_55p,
pp_60p,
pp_65p,
pp_70p,
pp_75p,
pp_80p,
pp_85p,
pp_90p,
pp_95p,
} = self;
*processed_amount += amount;
if pp_95p.is_some() {
return;
}
let processed_sat_amount = processed_amount.to_sat();
let total_sat_supply = total_supply.to_sat();
if processed_sat_amount >= total_sat_supply * 95 / 100 {
pp_95p.replace(price);
}
if pp_90p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 90 / 100 {
pp_90p.replace(price);
}
if pp_85p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 85 / 100 {
pp_85p.replace(price);
}
if pp_80p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 80 / 100 {
pp_80p.replace(price);
}
if pp_75p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 75 / 100 {
pp_75p.replace(price);
}
if pp_70p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 70 / 100 {
pp_70p.replace(price);
}
if pp_65p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 65 / 100 {
pp_65p.replace(price);
}
if pp_60p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 60 / 100 {
pp_60p.replace(price);
}
if pp_55p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 55 / 100 {
pp_55p.replace(price);
}
if pp_median.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply / 2 {
pp_median.replace(price);
}
if pp_45p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 45 / 100 {
pp_45p.replace(price);
}
if pp_40p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 40 / 100 {
pp_40p.replace(price);
}
if pp_35p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 35 / 100 {
pp_35p.replace(price);
}
if pp_30p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 30 / 100 {
pp_30p.replace(price);
}
if pp_25p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply / 4 {
pp_25p.replace(price);
}
if pp_20p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply / 5 {
pp_20p.replace(price);
}
if pp_15p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply * 15 / 100 {
pp_15p.replace(price);
}
if pp_10p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply / 10 {
pp_10p.replace(price);
}
if pp_05p.is_some() {
return;
}
if processed_sat_amount >= total_sat_supply / 20 {
pp_05p.replace(price);
}
}
}

View File

@@ -0,0 +1,123 @@
use std::{
collections::BTreeMap,
fmt::Debug,
ops::{AddAssign, SubAssign},
};
use allocative::Allocative;
use color_eyre::eyre::eyre;
use derive_deref::{Deref, DerefMut};
use crate::structs::{Price, SplitByLiquidity, WAmount};
#[derive(Deref, DerefMut, Default, Debug, Allocative)]
pub struct PriceToValue<T>(BTreeMap<u32, T>);
impl<T> PriceToValue<T>
where
T: Default
+ Debug
+ AddAssign
+ SubAssign
+ CanSubtract
+ Default
+ Copy
+ Clone
+ PartialEq
+ IsZero,
{
pub fn increment(&mut self, price: Price, value: T) {
*self.entry(price.to_cent() as u32).or_default() += value;
}
pub fn decrement(&mut self, price: Price, value: T) -> color_eyre::Result<()> {
let cent = price.to_cent() as u32;
let delete = {
let self_value = self.get_mut(&cent);
if self_value.is_none() {
dbg!(&self.0, price, value);
return Err(eyre!("self_value is none"));
}
let self_value = self_value.unwrap();
if !self_value.can_subtract(&value) {
dbg!(*self_value, &self.0, price, value);
return Err(eyre!("self value < value"));
}
*self_value -= value;
self_value.is_zero()?
};
if delete {
self.remove(&cent).unwrap();
}
Ok(())
}
pub fn iterate(&self, supply: T, mut iterate: impl FnMut(Price, T)) {
let mut processed = T::default();
self.iter().for_each(|(cent, value)| {
let value = *value;
processed += value;
iterate(Price::from_cent(*cent as u64), value)
});
if processed != supply {
dbg!(processed, supply);
panic!("processed_amount isn't equal to supply")
}
}
}
pub trait CanSubtract {
fn can_subtract(&self, other: &Self) -> bool;
}
impl CanSubtract for WAmount {
fn can_subtract(&self, other: &Self) -> bool {
self >= other
}
}
impl CanSubtract for SplitByLiquidity<WAmount> {
fn can_subtract(&self, other: &Self) -> bool {
self.all >= other.all
&& self.illiquid >= other.illiquid
&& self.liquid >= other.liquid
&& self.highly_liquid >= other.highly_liquid
}
}
pub trait IsZero {
fn is_zero(&self) -> color_eyre::Result<bool>;
}
impl IsZero for WAmount {
fn is_zero(&self) -> color_eyre::Result<bool> {
Ok(*self == WAmount::ZERO)
}
}
impl IsZero for SplitByLiquidity<WAmount> {
fn is_zero(&self) -> color_eyre::Result<bool> {
if self.all == WAmount::ZERO
&& (self.illiquid != WAmount::ZERO
|| self.liquid != WAmount::ZERO
|| self.highly_liquid != WAmount::ZERO)
{
dbg!(&self);
Err(eyre!("Bad split"))
} else {
Ok(self.all == WAmount::ZERO)
}
}
}

View File

@@ -0,0 +1,14 @@
use crate::structs::Price;
#[derive(Debug, Default)]
pub struct RealizedState {
pub realized_profit: Price,
pub realized_loss: Price,
}
impl RealizedState {
pub fn iterate(&mut self, realized_profit: Price, realized_loss: Price) {
self.realized_profit += realized_profit;
self.realized_loss += realized_loss;
}
}

View File

@@ -0,0 +1,27 @@
use allocative::Allocative;
use color_eyre::eyre::eyre;
use crate::structs::WAmount;
#[derive(Debug, Default, Allocative)]
pub struct SupplyState {
pub supply: WAmount,
}
impl SupplyState {
pub fn increment(&mut self, amount: WAmount) {
self.supply += amount;
}
pub fn decrement(&mut self, amount: WAmount) -> color_eyre::Result<()> {
if self.supply < amount {
dbg!(self.supply, amount);
return Err(eyre!("supply smaller than supply"));
}
self.supply -= amount;
Ok(())
}
}

View File

@@ -0,0 +1,38 @@
use std::{cmp::Ordering, ops::Add};
use crate::structs::{Price, WAmount};
#[derive(Debug, Default)]
pub struct UnrealizedState {
pub supply_in_profit: WAmount,
pub unrealized_profit: Price,
pub unrealized_loss: Price,
}
impl UnrealizedState {
#[inline]
pub fn iterate(&mut self, price_then: Price, price_now: Price, amount: WAmount) {
match price_then.cmp(&price_now) {
Ordering::Less => {
self.unrealized_profit += (price_now - price_then) * amount;
self.supply_in_profit += amount;
}
Ordering::Greater => {
self.unrealized_loss += (price_then - price_now) * amount;
}
Ordering::Equal => {}
}
}
}
impl Add<UnrealizedState> for UnrealizedState {
type Output = UnrealizedState;
fn add(self, other: UnrealizedState) -> UnrealizedState {
UnrealizedState {
supply_in_profit: self.supply_in_profit + other.supply_in_profit,
unrealized_profit: self.unrealized_profit + other.unrealized_profit,
unrealized_loss: self.unrealized_loss + other.unrealized_loss,
}
}
}

View File

@@ -0,0 +1,25 @@
use allocative::Allocative;
use color_eyre::eyre::eyre;
#[derive(Debug, Default, Allocative)]
pub struct UTXOState {
pub count: usize,
}
impl UTXOState {
pub fn increment(&mut self, utxo_count: usize) {
self.count += utxo_count;
}
pub fn decrement(&mut self, utxo_count: usize) -> color_eyre::Result<()> {
if self.count < utxo_count {
dbg!(self.count, utxo_count);
return Err(eyre!("self.count smaller than utxo_count"));
}
self.count -= utxo_count;
Ok(())
}
}

View File

@@ -0,0 +1,7 @@
mod address;
mod any;
mod utxo;
pub use address::*;
pub use any::*;
pub use utxo::*;

View File

@@ -0,0 +1,107 @@
use allocative::Allocative;
use crate::{
states::{DurableStates, OneShotStates, PriceToValue, UnrealizedState},
structs::{Price, WAmount},
};
#[derive(Default, Debug, Allocative)]
pub struct UTXOCohortDurableStates {
pub durable_states: DurableStates,
pub price_to_amount: PriceToValue<WAmount>,
}
impl UTXOCohortDurableStates {
pub fn increment(
&mut self,
amount: WAmount,
utxo_count: usize,
price: Price,
) -> color_eyre::Result<()> {
self._crement(amount, utxo_count, price, true)
}
pub fn decrement(
&mut self,
amount: WAmount,
utxo_count: usize,
price: Price,
) -> color_eyre::Result<()> {
self._crement(amount, utxo_count, price, false)
}
pub fn _crement(
&mut self,
amount: WAmount,
utxo_count: usize,
price: Price,
increment: bool,
) -> color_eyre::Result<()> {
let realized_cap = price * amount;
if increment {
self.durable_states
.increment(amount, utxo_count, realized_cap)
} else {
self.durable_states
.decrement(amount, utxo_count, realized_cap)
}
.inspect_err(|report| {
dbg!(report, "split all failed", amount, utxo_count);
})?;
let rounded_price = price.to_significant();
if increment {
self.price_to_amount.increment(rounded_price, amount);
} else {
self.price_to_amount
.decrement(rounded_price, amount)
.inspect_err(|report| {
dbg!(
report,
"cents_to_amount decrement failed",
amount,
utxo_count
);
})?;
}
Ok(())
}
pub fn compute_one_shot_states(
&self,
block_price: Price,
date_price: Option<Price>,
) -> OneShotStates {
let mut one_shot_states = OneShotStates::default();
if date_price.is_some() {
one_shot_states
.unrealized_date_state
.replace(UnrealizedState::default());
}
let supply = self.durable_states.supply_state.supply;
let one_shot_states_ref = &mut one_shot_states;
self.price_to_amount.iterate(supply, |price_paid, amount| {
one_shot_states_ref
.price_paid_state
.iterate(price_paid, amount, supply);
one_shot_states_ref
.unrealized_block_state
.iterate(price_paid, block_price, amount);
if let Some(unrealized_date_state) = one_shot_states_ref.unrealized_date_state.as_mut()
{
unrealized_date_state.iterate(price_paid, date_price.unwrap(), amount);
}
});
one_shot_states
}
}

View File

@@ -0,0 +1,32 @@
pub enum UTXOFilter {
To(u32),
FromTo { from: u32, to: u32 },
From(u32),
Year(u32),
}
impl UTXOCheck for UTXOFilter {
fn check(&self, days_old: &u32, year: &u32) -> bool {
match self {
UTXOFilter::From(from) => from <= days_old,
UTXOFilter::To(to) => to > days_old,
UTXOFilter::FromTo { from, to } => from <= days_old && to > days_old,
UTXOFilter::Year(_year) => _year == year,
}
}
fn check_days_old(&self, days_old: &u32) -> bool {
match self {
UTXOFilter::From(from) => from <= days_old,
UTXOFilter::To(to) => to > days_old,
UTXOFilter::FromTo { from, to } => from <= days_old && to > days_old,
UTXOFilter::Year(_) => unreachable!(),
}
}
}
pub trait UTXOCheck {
fn check(&self, days_old: &u32, year: &u32) -> bool;
fn check_days_old(&self, days_old: &u32) -> bool;
}

View File

@@ -0,0 +1,84 @@
use super::{SplitByUTXOCohort, UTXOFilter};
pub const UTXO_FILTERS: SplitByUTXOCohort<UTXOFilter> = SplitByUTXOCohort {
up_to_1d: UTXOFilter::To(1),
up_to_1w: UTXOFilter::To(7),
up_to_1m: UTXOFilter::To(30),
up_to_2m: UTXOFilter::To(2 * 30),
up_to_3m: UTXOFilter::To(3 * 30),
up_to_4m: UTXOFilter::To(4 * 30),
up_to_5m: UTXOFilter::To(5 * 30),
up_to_6m: UTXOFilter::To(6 * 30),
up_to_1y: UTXOFilter::To(365),
up_to_2y: UTXOFilter::To(2 * 365),
up_to_3y: UTXOFilter::To(3 * 365),
up_to_5y: UTXOFilter::To(5 * 365),
up_to_7y: UTXOFilter::To(7 * 365),
up_to_10y: UTXOFilter::To(10 * 365),
up_to_15y: UTXOFilter::To(15 * 365),
from_1d_to_1w: UTXOFilter::FromTo { from: 1, to: 7 },
from_1w_to_1m: UTXOFilter::FromTo { from: 7, to: 30 },
from_1m_to_3m: UTXOFilter::FromTo {
from: 30,
to: 3 * 30,
},
from_3m_to_6m: UTXOFilter::FromTo {
from: 3 * 30,
to: 6 * 30,
},
from_6m_to_1y: UTXOFilter::FromTo {
from: 6 * 30,
to: 365,
},
from_1y_to_2y: UTXOFilter::FromTo {
from: 365,
to: 2 * 365,
},
from_2y_to_3y: UTXOFilter::FromTo {
from: 2 * 365,
to: 3 * 365,
},
from_3y_to_5y: UTXOFilter::FromTo {
from: 3 * 365,
to: 5 * 365,
},
from_5y_to_7y: UTXOFilter::FromTo {
from: 5 * 365,
to: 7 * 365,
},
from_7y_to_10y: UTXOFilter::FromTo {
from: 7 * 365,
to: 10 * 365,
},
from_10y_to_15y: UTXOFilter::FromTo {
from: 10 * 365,
to: 15 * 365,
},
from_1y: UTXOFilter::From(365),
from_2y: UTXOFilter::From(2 * 365),
from_4y: UTXOFilter::From(4 * 365),
from_10y: UTXOFilter::From(10 * 365),
from_15y: UTXOFilter::From(15 * 365),
year_2009: UTXOFilter::Year(2009),
year_2010: UTXOFilter::Year(2010),
year_2011: UTXOFilter::Year(2011),
year_2012: UTXOFilter::Year(2012),
year_2013: UTXOFilter::Year(2013),
year_2014: UTXOFilter::Year(2014),
year_2015: UTXOFilter::Year(2015),
year_2016: UTXOFilter::Year(2016),
year_2017: UTXOFilter::Year(2017),
year_2018: UTXOFilter::Year(2018),
year_2019: UTXOFilter::Year(2019),
year_2020: UTXOFilter::Year(2020),
year_2021: UTXOFilter::Year(2021),
year_2022: UTXOFilter::Year(2022),
year_2023: UTXOFilter::Year(2023),
year_2024: UTXOFilter::Year(2024),
sth: UTXOFilter::To(155),
lth: UTXOFilter::From(155),
};

View File

@@ -0,0 +1,119 @@
use allocative::Allocative;
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Default, Allocative)]
pub enum UTXOCohortId {
#[default]
UpTo1d,
UpTo1w,
UpTo1m,
UpTo2m,
UpTo3m,
UpTo4m,
UpTo5m,
UpTo6m,
UpTo1y,
UpTo2y,
UpTo3y,
UpTo5y,
UpTo7y,
UpTo10y,
UpTo15y,
From1dTo1w,
From1wTo1m,
From1mTo3m,
From3mTo6m,
From6mTo1y,
From1yTo2y,
From2yTo3y,
From3yTo5y,
From5yTo7y,
From7yTo10y,
From10yTo15y,
From1y,
From2y,
From4y,
From10y,
From15y,
Year2009,
Year2010,
Year2011,
Year2012,
Year2013,
Year2014,
Year2015,
Year2016,
Year2017,
Year2018,
Year2019,
Year2020,
Year2021,
Year2022,
Year2023,
Year2024,
ShortTermHolders,
LongTermHolders,
}
impl UTXOCohortId {
pub fn name(&self) -> &str {
match self {
Self::UpTo1d => "up_to_1d",
Self::UpTo1w => "up_to_1w",
Self::UpTo1m => "up_to_1m",
Self::UpTo2m => "up_to_2m",
Self::UpTo3m => "up_to_3m",
Self::UpTo4m => "up_to_4m",
Self::UpTo5m => "up_to_5m",
Self::UpTo6m => "up_to_6m",
Self::UpTo1y => "up_to_1y",
Self::UpTo2y => "up_to_2y",
Self::UpTo3y => "up_to_3y",
Self::UpTo5y => "up_to_5y",
Self::UpTo7y => "up_to_7y",
Self::UpTo10y => "up_to_10y",
Self::UpTo15y => "up_to_15y",
Self::From1dTo1w => "from_1d_to_1w",
Self::From1wTo1m => "from_1w_to_1m",
Self::From1mTo3m => "from_1m_to_3m",
Self::From3mTo6m => "from_3m_to_6m",
Self::From6mTo1y => "from_6m_to_1y",
Self::From1yTo2y => "from_1y_to_2y",
Self::From2yTo3y => "from_2y_to_3y",
Self::From3yTo5y => "from_3y_to_5y",
Self::From5yTo7y => "from_5y_to_7y",
Self::From7yTo10y => "from_7y_to_10y",
Self::From10yTo15y => "from_10y_to_15y",
Self::From1y => "from_1y",
Self::From2y => "from_2y",
Self::From4y => "from_4y",
Self::From10y => "from_10y",
Self::From15y => "from_15y",
Self::Year2009 => "year_2009",
Self::Year2010 => "year_2010",
Self::Year2011 => "year_2011",
Self::Year2012 => "year_2012",
Self::Year2013 => "year_2013",
Self::Year2014 => "year_2014",
Self::Year2015 => "year_2015",
Self::Year2016 => "year_2016",
Self::Year2017 => "year_2017",
Self::Year2018 => "year_2018",
Self::Year2019 => "year_2019",
Self::Year2020 => "year_2020",
Self::Year2021 => "year_2021",
Self::Year2022 => "year_2022",
Self::Year2023 => "year_2023",
Self::Year2024 => "year_2024",
Self::ShortTermHolders => "sth",
Self::LongTermHolders => "lth",
}
}
}

View File

@@ -0,0 +1,154 @@
use allocative::Allocative;
use chrono::Datelike;
use derive_deref::{Deref, DerefMut};
use rayon::prelude::*;
use crate::{
states::DateDataVec,
structs::{BlockData, Price, SentData, WAmount},
utils::difference_in_days_between_timestamps,
WNaiveDate,
};
use super::{SplitByUTXOCohort, UTXOCohortDurableStates, UTXOCohortsOneShotStates};
#[derive(Default, Deref, DerefMut, Allocative)]
pub struct UTXOCohortsDurableStates(SplitByUTXOCohort<UTXOCohortDurableStates>);
impl UTXOCohortsDurableStates {
pub fn init(date_data_vec: &DateDataVec) -> Self {
let mut s = Self::default();
if let Some(last_date_data) = date_data_vec.last() {
let last_block_data = last_date_data.blocks.last().unwrap();
date_data_vec.iter().for_each(|date_data| {
let year = date_data.date.year() as u32;
date_data.blocks.iter().for_each(|block_data| {
let amount = block_data.amount;
let utxo_count = block_data.utxos as usize;
// No need to either insert or remove if 0
if amount == WAmount::ZERO {
return;
}
let increment_days_old = difference_in_days_between_timestamps(
block_data.timestamp,
last_block_data.timestamp,
);
s.initial_filtered_apply(&increment_days_old, &year, |state| {
state
.increment(amount, utxo_count, block_data.price)
.unwrap();
});
})
});
}
s
}
pub fn udpate_age_if_needed(
&mut self,
block_data: &BlockData,
last_block_data: &BlockData,
previous_last_block_data: Option<&BlockData>,
) {
let amount = block_data.amount;
let utxo_count = block_data.utxos as usize;
let price = block_data.price;
// No need to either insert or remove if 0
if amount == WAmount::ZERO {
return;
}
if block_data.height == last_block_data.height {
let year = WNaiveDate::from_timestamp(block_data.timestamp).year() as u32;
self.initial_filtered_apply(&0, &year, |state| {
state.increment(amount, utxo_count, price).unwrap();
})
} else {
let increment_days_old = difference_in_days_between_timestamps(
block_data.timestamp,
last_block_data.timestamp,
);
let decrement_days_old = difference_in_days_between_timestamps(
block_data.timestamp,
previous_last_block_data
.unwrap_or_else(|| {
dbg!(block_data, last_block_data, previous_last_block_data);
panic!()
})
.timestamp,
);
if increment_days_old == decrement_days_old {
return;
}
self.duo_filtered_apply(
&increment_days_old,
&decrement_days_old,
|state| {
state.increment(amount, utxo_count, price).unwrap();
},
|state| {
state.decrement(amount, utxo_count, price).unwrap();
},
);
}
}
pub fn subtract_moved(
&mut self,
block_data: &BlockData,
sent_data: &SentData,
previous_last_block_data: &BlockData,
) {
let amount = sent_data.volume;
let utxo_count = sent_data.count as usize;
// No need to either insert or remove if 0
if amount == WAmount::ZERO {
return;
}
let days_old = difference_in_days_between_timestamps(
block_data.timestamp,
previous_last_block_data.timestamp,
);
let year = WNaiveDate::from_timestamp(block_data.timestamp).year() as u32;
self.initial_filtered_apply(&days_old, &year, |state| {
state
.decrement(amount, utxo_count, block_data.price)
.unwrap();
})
}
pub fn compute_one_shot_states(
&mut self,
block_price: Price,
date_price: Option<Price>,
) -> UTXOCohortsOneShotStates {
let mut one_shot_states = UTXOCohortsOneShotStates::default();
self.as_vec()
.into_par_iter()
.map(|(states, id)| (states.compute_one_shot_states(block_price, date_price), id))
.collect::<Vec<_>>()
.into_iter()
.for_each(|(states, id)| {
*one_shot_states.get_mut(&id) = states;
});
one_shot_states
}
}

View File

@@ -0,0 +1,8 @@
use derive_deref::{Deref, DerefMut};
use crate::states::OneShotStates;
use super::SplitByUTXOCohort;
#[derive(Deref, DerefMut, Default)]
pub struct UTXOCohortsOneShotStates(pub SplitByUTXOCohort<OneShotStates>);

View File

@@ -0,0 +1,68 @@
use std::{cmp::Ordering, collections::BTreeMap};
use chrono::Datelike;
use derive_deref::{Deref, DerefMut};
use crate::{
states::{DateDataVec, InputState, RealizedState},
structs::{BlockPath, Price, SentData},
utils::difference_in_days_between_timestamps,
};
use super::SplitByUTXOCohort;
#[derive(Default, Debug)]
pub struct SentState {
pub input: InputState,
pub realized: RealizedState,
}
#[derive(Deref, DerefMut, Default)]
pub struct UTXOCohortsSentStates(SplitByUTXOCohort<SentState>);
impl UTXOCohortsSentStates {
pub fn compute(
&mut self,
date_data_vec: &DateDataVec,
block_path_to_sent_data: &BTreeMap<BlockPath, SentData>,
current_price: Price,
) {
if let Some(last_block_data) = date_data_vec.last_block() {
block_path_to_sent_data
.iter()
.for_each(|(block_path, sent_data)| {
let date_data = date_data_vec.get_date_data(block_path).unwrap();
let year = date_data.date.year() as u32;
let block_data = date_data.get_block_data(block_path).unwrap();
let days_old = difference_in_days_between_timestamps(
block_data.timestamp,
last_block_data.timestamp,
);
let previous_price = block_data.price;
let amount_sent = sent_data.volume;
self.initial_filtered_apply(&days_old, &year, |state| {
state.input.iterate(sent_data.count as f64, amount_sent);
let previous_value = previous_price * amount_sent;
let current_value = current_price * amount_sent;
match previous_value.cmp(&current_value) {
Ordering::Less => {
state.realized.realized_profit += current_value - previous_value;
}
Ordering::Greater => {
state.realized.realized_loss += previous_value - current_value;
}
Ordering::Equal => {}
}
})
})
}
}
}

View File

@@ -0,0 +1,17 @@
mod cohort_durable_states;
mod cohort_filter;
mod cohort_filters;
mod cohort_id;
mod cohorts_durable_states;
mod cohorts_one_shot_states;
mod cohorts_sent_states;
mod split_by_utxo_cohort;
pub use cohort_durable_states::*;
pub use cohort_filter::*;
pub use cohort_filters::*;
pub use cohort_id::*;
pub use cohorts_durable_states::*;
pub use cohorts_one_shot_states::*;
pub use cohorts_sent_states::*;
pub use split_by_utxo_cohort::*;

Some files were not shown because too many files have changed in this diff Show More