global: snapshot

This commit is contained in:
nym21
2025-06-26 16:40:29 +02:00
parent 903e69ff77
commit b7f51b03bc
45 changed files with 3237 additions and 1982 deletions

2
Cargo.lock generated
View File

@@ -509,8 +509,10 @@ dependencies = [
"brk_logger",
"brk_parser",
"brk_state",
"brk_store",
"brk_vec",
"color-eyre",
"derive_deref",
"fjall",
"jiff",
"log",

Binary file not shown.

Before

Width:  |  Height:  |  Size: 136 KiB

After

Width:  |  Height:  |  Size: 133 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 253 KiB

After

Width:  |  Height:  |  Size: 263 KiB

View File

@@ -17,8 +17,10 @@ brk_indexer = { workspace = true }
brk_logger = { workspace = true }
brk_parser = { workspace = true }
brk_state = { workspace = true }
brk_store = { workspace = true }
brk_vec = { workspace = true }
color-eyre = { workspace = true }
derive_deref = { workspace = true }
fjall = { workspace = true }
jiff = { workspace = true }
log = { workspace = true }

View File

@@ -1,14 +1,32 @@
use std::path::Path;
use brk_core::Version;
use brk_core::{P2AAddressIndex, P2MSOutputIndex, Version};
use brk_store::Store;
use fjall::TransactionalKeyspace;
use jiff::Unit;
const _VERSION: Version = Version::ZERO;
#[derive(Clone)]
pub struct Stores {
// pub address_to_utxos_received: Store<AddressIndexOutputIndex, Unit>,
// pub address_to_utxos_spent: Store<AddressIndexOutputIndex, Unit>,
// pub p2aaddressindex_to_utxos_received: Store<P2AAddressIndex, Unit>,
// pub p2aaddressindex_to_utxos_sent: Store<P2AAddressIndex, Unit>,
// pub p2msoutputindex_to_utxos_received: Store<P2MSOutputIndex, Unit>,
// pub p2msoutputindex_to_utxos_sent: Store<P2MSOutputIndex, Unit>,
// pub p2pk33addressindex_to_utxos_received: Store<P2PK33AddressIndex, Unit>,
// pub p2pk33addressindex_to_utxos_sent: Store<P2PK33AddressIndex, Unit>,
// pub p2pk65addressindex_to_utxos_received: Store<P2PK65AddressIndex, Unit>,
// pub p2pk65addressindex_to_utxos_sent: Store<P2PK65AddressIndex, Unit>,
// pub p2pkhaddressindex_to_utxos_received: Store<P2PKHAddressIndex, Unit>,
// pub p2pkhaddressindex_to_utxos_sent: Store<P2PKHAddressIndex, Unit>,
// pub p2shaddressindex_to_utxos_received: Store<P2SHAddressIndex, Unit>,
// pub p2shaddressindex_to_utxos_sent: Store<P2SHAddressIndex, Unit>,
// pub p2traddressindex_to_utxos_received: Store<P2TRAddressIndex, Unit>,
// pub p2traddressindex_to_utxos_sent: Store<P2TRAddressIndex, Unit>,
// pub p2wpkhaddressindex_to_utxos_received: Store<P2WPKHAddressIndex, Unit>,
// pub p2wpkhaddressindex_to_utxos_sent: Store<P2WPKHAddressIndex, Unit>,
// pub p2wshaddressindex_to_utxos_received: Store<P2WSHAddressIndex, Unit>,
// pub p2wshaddressindex_to_utxos_sent: Store<P2WSHAddressIndex, Unit>,
}
impl Stores {

View File

@@ -0,0 +1,213 @@
use std::{ops::Deref, path::Path};
use brk_core::{Bitcoin, DateIndex, Dollars, Height, Result, StoredUsize, Version};
use brk_exit::Exit;
use brk_indexer::Indexer;
use brk_state::{AddressCohortState, CohortStateTrait};
use brk_vec::{AnyCollectableVec, AnyIterableVec, AnyVec, Computation, EagerVec, Format};
use crate::vecs::{
Indexes, fetched, indexes, market,
stateful::{common, r#trait::CohortVecs},
};
const VERSION: Version = Version::ZERO;
#[derive(Clone)]
pub struct Vecs {
starting_height: Height,
pub state: AddressCohortState,
pub height_to_address_count: EagerVec<Height, StoredUsize>,
pub inner: common::Vecs,
}
impl CohortVecs for Vecs {
#[allow(clippy::too_many_arguments)]
fn forced_import(
path: &Path,
cohort_name: Option<&str>,
computation: Computation,
format: Format,
version: Version,
fetched: Option<&fetched::Vecs>,
states_path: &Path,
compute_relative_to_all: bool,
) -> color_eyre::Result<Self> {
let compute_dollars = fetched.is_some();
let suffix = |s: &str| cohort_name.map_or(s.to_string(), |name| format!("{name}_{s}"));
Ok(Self {
starting_height: Height::ZERO,
state: AddressCohortState::default_and_import(
states_path,
cohort_name.unwrap_or_default(),
compute_dollars,
)?,
height_to_address_count: EagerVec::forced_import(
path,
&suffix("address_count"),
version + VERSION + Version::ZERO,
format,
)?,
inner: common::Vecs::forced_import(
path,
cohort_name,
computation,
format,
version,
fetched,
compute_relative_to_all,
)?,
})
}
fn starting_height(&self) -> Height {
[
self.state
.price_to_amount
.height()
.map_or(Height::MAX, |h| h.incremented()),
self.height_to_address_count.len().into(),
self.inner.starting_height(),
]
.into_iter()
.min()
.unwrap()
}
fn init(&mut self, starting_height: Height) {
if starting_height > self.starting_height() {
unreachable!()
}
self.starting_height = starting_height;
self.inner.init(&mut self.starting_height, &mut self.state);
}
fn validate_computed_versions(&mut self, base_version: Version) -> Result<()> {
self.height_to_address_count
.validate_computed_version_or_reset_file(
base_version + self.height_to_address_count.inner_version(),
)?;
self.inner.validate_computed_versions(base_version)
}
fn forced_pushed_at(&mut self, height: Height, exit: &Exit) -> Result<()> {
if self.starting_height > height {
return Ok(());
}
self.height_to_address_count.forced_push_at(
height,
self.state.address_count.into(),
exit,
)?;
self.inner.forced_pushed_at(height, exit, &self.state)
}
fn compute_then_force_push_unrealized_states(
&mut self,
height: Height,
height_price: Option<Dollars>,
dateindex: Option<DateIndex>,
date_price: Option<Option<Dollars>>,
exit: &Exit,
) -> Result<()> {
self.inner.compute_then_force_push_unrealized_states(
height,
height_price,
dateindex,
date_price,
exit,
&self.state,
)
}
fn safe_flush_stateful_vecs(&mut self, height: Height, exit: &Exit) -> Result<()> {
self.height_to_address_count.safe_flush(exit)?;
self.inner
.safe_flush_stateful_vecs(height, exit, &mut self.state)
}
fn compute_from_stateful(
&mut self,
starting_indexes: &Indexes,
others: &[&Self],
exit: &Exit,
) -> Result<()> {
self.height_to_address_count.compute_sum_of_others(
starting_indexes.height,
others
.iter()
.map(|v| &v.height_to_address_count)
.collect::<Vec<_>>()
.as_slice(),
exit,
)?;
self.inner.compute_from_stateful(
starting_indexes,
&others.iter().map(|v| &v.inner).collect::<Vec<_>>(),
exit,
)
}
#[allow(clippy::too_many_arguments)]
fn compute_rest_part1(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
fetched: Option<&fetched::Vecs>,
starting_indexes: &Indexes,
exit: &Exit,
) -> color_eyre::Result<()> {
self.inner
.compute_rest_part1(indexer, indexes, fetched, starting_indexes, exit)
}
#[allow(clippy::too_many_arguments)]
fn compute_rest_part2(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
fetched: Option<&fetched::Vecs>,
starting_indexes: &Indexes,
market: &market::Vecs,
height_to_supply: &impl AnyIterableVec<Height, Bitcoin>,
dateindex_to_supply: &impl AnyIterableVec<DateIndex, Bitcoin>,
height_to_realized_cap: Option<&impl AnyIterableVec<Height, Dollars>>,
dateindex_to_realized_cap: Option<&impl AnyIterableVec<DateIndex, Dollars>>,
exit: &Exit,
) -> color_eyre::Result<()> {
self.inner.compute_rest_part2(
indexer,
indexes,
fetched,
starting_indexes,
market,
height_to_supply,
dateindex_to_supply,
height_to_realized_cap,
dateindex_to_realized_cap,
exit,
)
}
fn vecs(&self) -> Vec<&dyn AnyCollectableVec> {
[self.inner.vecs(), vec![&self.height_to_address_count]].concat()
}
}
impl Deref for Vecs {
type Target = common::Vecs;
fn deref(&self) -> &Self::Target {
&self.inner
}
}

View File

@@ -0,0 +1,279 @@
use std::path::Path;
use brk_core::{
AddressGroups, GroupFilter, GroupedByFromSize, GroupedBySizeRange, GroupedByUpToSize, Version,
};
use brk_vec::{Computation, Format};
use crate::vecs::{
fetched,
stateful::{address_cohort, r#trait::CohortVecs},
};
const VERSION: Version = Version::new(0);
#[derive(Clone)]
pub struct Vecs(AddressGroups<(GroupFilter, address_cohort::Vecs)>);
impl Vecs {
pub fn forced_import(
path: &Path,
version: Version,
_computation: Computation,
format: Format,
fetched: Option<&fetched::Vecs>,
states_path: &Path,
) -> color_eyre::Result<Self> {
Ok(Self(
AddressGroups {
by_size_range: GroupedBySizeRange {
_0sats: address_cohort::Vecs::forced_import(
path,
Some("0sats"),
_computation,
format,
version + VERSION + Version::ZERO,
fetched,
states_path,
true,
)?,
from_1sat_to_10sats: address_cohort::Vecs::forced_import(
path,
Some("from_1sat_to_10sats"),
_computation,
format,
version + VERSION + Version::ZERO,
fetched,
states_path,
true,
)?,
from_10sats_to_100sats: address_cohort::Vecs::forced_import(
path,
Some("from_10sats_to_100sats"),
_computation,
format,
version + VERSION + Version::ZERO,
fetched,
states_path,
true,
)?,
from_100sats_to_1_000sats: address_cohort::Vecs::forced_import(
path,
Some("from_100sats_to_1_000sats"),
_computation,
format,
version + VERSION + Version::ZERO,
fetched,
states_path,
true,
)?,
from_1_000sats_to_10_000sats: address_cohort::Vecs::forced_import(
path,
Some("from_1_000sats_to_10_000sats"),
_computation,
format,
version + VERSION + Version::ZERO,
fetched,
states_path,
true,
)?,
from_10_000sats_to_100_000sats: address_cohort::Vecs::forced_import(
path,
Some("from_10_000sats_to_100_000sats"),
_computation,
format,
version + VERSION + Version::ZERO,
fetched,
states_path,
true,
)?,
from_100_000sats_to_1_000_000sats: address_cohort::Vecs::forced_import(
path,
Some("from_100_000sats_to_1_000_000sats"),
_computation,
format,
version + VERSION + Version::ZERO,
fetched,
states_path,
true,
)?,
from_1_000_000sats_to_10_000_000sats: address_cohort::Vecs::forced_import(
path,
Some("from_1_000_000sats_to_10_000_000sats"),
_computation,
format,
version + VERSION + Version::ZERO,
fetched,
states_path,
true,
)?,
from_10_000_000sats_to_1btc: address_cohort::Vecs::forced_import(
path,
Some("from_10_000_000sats_to_1btc"),
_computation,
format,
version + VERSION + Version::ZERO,
fetched,
states_path,
true,
)?,
from_1btc_to_10btc: address_cohort::Vecs::forced_import(
path,
Some("from_1btc_to_10btc"),
_computation,
format,
version + VERSION + Version::ZERO,
fetched,
states_path,
true,
)?,
from_10btc_to_100btc: address_cohort::Vecs::forced_import(
path,
Some("from_10btc_to_100btc"),
_computation,
format,
version + VERSION + Version::ZERO,
fetched,
states_path,
true,
)?,
from_100btc_to_1_000btc: address_cohort::Vecs::forced_import(
path,
Some("from_100btc_to_1_000btc"),
_computation,
format,
version + VERSION + Version::ZERO,
fetched,
states_path,
true,
)?,
from_1_000btc_to_10_000btc: address_cohort::Vecs::forced_import(
path,
Some("from_1_000btc_to_10_000btc"),
_computation,
format,
version + VERSION + Version::ZERO,
fetched,
states_path,
true,
)?,
from_10_000btc_to_100_000btc: address_cohort::Vecs::forced_import(
path,
Some("from_10_000btc_to_100_000btc"),
_computation,
format,
version + VERSION + Version::ZERO,
fetched,
states_path,
true,
)?,
from_100_000btc: address_cohort::Vecs::forced_import(
path,
Some("from_100_000btc"),
_computation,
format,
version + VERSION + Version::ZERO,
fetched,
states_path,
true,
)?,
},
by_up_to_size: GroupedByUpToSize {
_1_000sats: address_cohort::Vecs::forced_import(
path,
Some("up_to_1_000sats"),
_computation,
format,
version + VERSION + Version::ZERO,
fetched,
states_path,
true,
)?,
_10_000sats: address_cohort::Vecs::forced_import(
path,
Some("up_to_10_000sats"),
_computation,
format,
version + VERSION + Version::ZERO,
fetched,
states_path,
true,
)?,
_1btc: address_cohort::Vecs::forced_import(
path,
Some("up_to_1btc"),
_computation,
format,
version + VERSION + Version::ZERO,
fetched,
states_path,
true,
)?,
_10btc: address_cohort::Vecs::forced_import(
path,
Some("up_to_10btc"),
_computation,
format,
version + VERSION + Version::ZERO,
fetched,
states_path,
true,
)?,
_100btc: address_cohort::Vecs::forced_import(
path,
Some("up_to_100btc"),
_computation,
format,
version + VERSION + Version::ZERO,
fetched,
states_path,
true,
)?,
},
by_from_size: GroupedByFromSize {
_1_000sats: address_cohort::Vecs::forced_import(
path,
Some("from_1_000sats"),
_computation,
format,
version + VERSION + Version::ZERO,
fetched,
states_path,
true,
)?,
_1btc: address_cohort::Vecs::forced_import(
path,
Some("from_1btc"),
_computation,
format,
version + VERSION + Version::ZERO,
fetched,
states_path,
true,
)?,
_10btc: address_cohort::Vecs::forced_import(
path,
Some("from_10btc"),
_computation,
format,
version + VERSION + Version::ZERO,
fetched,
states_path,
true,
)?,
_100btc: address_cohort::Vecs::forced_import(
path,
Some("from_100btc"),
_computation,
format,
version + VERSION + Version::ZERO,
fetched,
states_path,
true,
)?,
},
}
.into(),
))
}
}

View File

@@ -1,4 +1,3 @@
use core::panic;
use std::path::Path;
use brk_core::{
@@ -6,10 +5,9 @@ use brk_core::{
};
use brk_exit::Exit;
use brk_indexer::Indexer;
use brk_state::CohortState;
use brk_state::{CohortState, CohortStateTrait};
use brk_vec::{
AnyCollectableVec, AnyIterableVec, AnyVec, Computation, EagerVec, Format, StoredIndex,
VecIterator,
AnyCollectableVec, AnyIterableVec, AnyVec, Computation, EagerVec, Format, VecIterator,
};
use crate::vecs::{
@@ -25,9 +23,6 @@ const VERSION: Version = Version::ZERO;
#[derive(Clone)]
pub struct Vecs {
starting_height: Height,
pub state: CohortState,
// Cumulative
pub height_to_realized_cap: Option<EagerVec<Height, Dollars>>,
pub height_to_supply: EagerVec<Height, Sats>,
@@ -138,7 +133,6 @@ impl Vecs {
format: Format,
version: Version,
fetched: Option<&fetched::Vecs>,
states_path: &Path,
compute_relative_to_all: bool,
) -> color_eyre::Result<Self> {
let compute_dollars = fetched.is_some();
@@ -147,15 +141,7 @@ impl Vecs {
let suffix = |s: &str| cohort_name.map_or(s.to_string(), |name| format!("{name}_{s}"));
let state = CohortState::default_and_import(
states_path,
cohort_name.unwrap_or_default(),
compute_dollars,
)?;
Ok(Self {
starting_height: Height::ZERO,
state,
height_to_supply_in_profit: compute_dollars.then(|| {
EagerVec::forced_import(
@@ -978,10 +964,6 @@ impl Vecs {
pub fn starting_height(&self) -> Height {
[
self.state
.price_to_amount
.height()
.map_or(usize::MAX, |h| h.incremented().unwrap_to_usize()),
self.height_to_supply.len(),
self.height_to_utxo_count.len(),
self.height_to_realized_cap
@@ -1035,25 +1017,19 @@ impl Vecs {
.unwrap()
}
pub fn init(&mut self, starting_height: Height) {
if starting_height > self.starting_height() {
unreachable!()
}
self.starting_height = starting_height;
pub fn init(&mut self, starting_height: &mut Height, state: &mut CohortState) {
if let Some(prev_height) = starting_height.decremented() {
self.state.supply.value = self
state.supply.value = self
.height_to_supply
.into_iter()
.unwrap_get_inner(prev_height);
self.state.supply.utxos = *self
state.supply.utxos = *self
.height_to_utxo_count
.into_iter()
.unwrap_get_inner(prev_height);
if let Some(height_to_realized_cap) = self.height_to_realized_cap.as_mut() {
self.state.realized.as_mut().unwrap().cap = height_to_realized_cap
state.realized.as_mut().unwrap().cap = height_to_realized_cap
.into_iter()
.unwrap_get_inner(prev_height);
}
@@ -1286,35 +1262,33 @@ impl Vecs {
Ok(())
}
pub fn forced_pushed_at(&mut self, height: Height, exit: &Exit) -> Result<()> {
if self.starting_height > height {
return Ok(());
}
pub fn forced_pushed_at(
&mut self,
height: Height,
exit: &Exit,
state: &CohortState,
) -> Result<()> {
self.height_to_supply
.forced_push_at(height, self.state.supply.value, exit)?;
.forced_push_at(height, state.supply.value, exit)?;
self.height_to_utxo_count.forced_push_at(
height,
StoredUsize::from(self.state.supply.utxos),
StoredUsize::from(state.supply.utxos),
exit,
)?;
self.height_to_satblocks_destroyed.forced_push_at(
height,
self.state.satblocks_destroyed,
state.satblocks_destroyed,
exit,
)?;
self.height_to_satdays_destroyed.forced_push_at(
height,
self.state.satdays_destroyed,
exit,
)?;
self.height_to_satdays_destroyed
.forced_push_at(height, state.satdays_destroyed, exit)?;
if let Some(height_to_realized_cap) = self.height_to_realized_cap.as_mut() {
let realized = self.state.realized.as_ref().unwrap_or_else(|| {
dbg!((&self.state.realized, &self.state.supply));
let realized = state.realized.as_ref().unwrap_or_else(|| {
dbg!((&state.realized, &state.supply));
panic!();
});
@@ -1355,6 +1329,7 @@ impl Vecs {
dateindex: Option<DateIndex>,
date_price: Option<Option<Dollars>>,
exit: &Exit,
state: &CohortState,
) -> Result<()> {
if let Some(height_price) = height_price {
self.height_to_min_price_paid
@@ -1362,7 +1337,7 @@ impl Vecs {
.unwrap()
.forced_push_at(
height,
self.state
state
.price_to_amount
.first_key_value()
.map(|(&dollars, _)| dollars)
@@ -1374,7 +1349,7 @@ impl Vecs {
.unwrap()
.forced_push_at(
height,
self.state
state
.price_to_amount
.last_key_value()
.map(|(&dollars, _)| dollars)
@@ -1382,9 +1357,8 @@ impl Vecs {
exit,
)?;
let (height_unrealized_state, date_unrealized_state) = self
.state
.compute_unrealized_states(height_price, date_price.unwrap());
let (height_unrealized_state, date_unrealized_state) =
state.compute_unrealized_states(height_price, date_price.unwrap());
self.height_to_supply_even
.as_mut()
@@ -1436,7 +1410,12 @@ impl Vecs {
Ok(())
}
pub fn safe_flush_stateful_vecs(&mut self, height: Height, exit: &Exit) -> Result<()> {
pub fn safe_flush_stateful_vecs(
&mut self,
height: Height,
exit: &Exit,
state: &mut CohortState,
) -> Result<()> {
self.height_to_supply.safe_flush(exit)?;
self.height_to_utxo_count.safe_flush(exit)?;
self.height_to_satdays_destroyed.safe_flush(exit)?;
@@ -1518,7 +1497,7 @@ impl Vecs {
.safe_flush(exit)?;
}
self.state.commit(height)?;
state.commit(height)?;
Ok(())
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,14 +1,14 @@
use std::{collections::BTreeMap, ops::ControlFlow};
use brk_core::{CheckedSub, Dollars, HalvingEpoch, Height, Result, Timestamp};
use brk_core::{
CheckedSub, Dollars, GroupFilter, HalvingEpoch, Height, Result, Timestamp, UTXOGroups,
};
use brk_exit::Exit;
use brk_state::{BlockState, OutputFilter, Outputs, Transacted};
use brk_state::{BlockState, CohortStateTrait, Transacted};
use brk_vec::StoredIndex;
use rayon::prelude::*;
use crate::vecs::Indexes;
use super::cohort;
use crate::vecs::{Indexes, stateful::utxo_cohort};
pub trait OutputCohorts {
fn tick_tock_next_block(&mut self, chain_state: &[BlockState], timestamp: Timestamp);
@@ -17,7 +17,7 @@ pub trait OutputCohorts {
fn compute_overlapping_vecs(&mut self, starting_indexes: &Indexes, exit: &Exit) -> Result<()>;
}
impl OutputCohorts for Outputs<(OutputFilter, cohort::Vecs)> {
impl OutputCohorts for UTXOGroups<(GroupFilter, utxo_cohort::Vecs)> {
fn tick_tock_next_block(&mut self, chain_state: &[BlockState], timestamp: Timestamp) {
if chain_state.is_empty() {
return;
@@ -92,10 +92,10 @@ impl OutputCohorts for Outputs<(OutputFilter, cohort::Vecs)> {
time_based_vecs
.iter_mut()
.filter(|(filter, _)| match filter {
OutputFilter::From(from) => *from <= days_old,
OutputFilter::To(to) => *to > days_old,
OutputFilter::Range(range) => range.contains(&days_old),
OutputFilter::Epoch(epoch) => *epoch == HalvingEpoch::from(height),
GroupFilter::From(from) => *from <= days_old,
GroupFilter::To(to) => *to > days_old,
GroupFilter::Range(range) => range.contains(&days_old),
GroupFilter::Epoch(epoch) => *epoch == HalvingEpoch::from(height),
_ => unreachable!(),
})
.for_each(|(_, vecs)| {
@@ -123,6 +123,7 @@ impl OutputCohorts for Outputs<(OutputFilter, cohort::Vecs)> {
);
sent.by_size_group
.as_typed_vec()
.into_iter()
.for_each(|(group, supply_state)| {
self.by_size_range.get_mut(group).1.state.send(
@@ -154,7 +155,7 @@ impl OutputCohorts for Outputs<(OutputFilter, cohort::Vecs)> {
.into_iter()
.for_each(|(filter, vecs)| {
let output_type = match filter {
OutputFilter::Type(output_type) => *output_type,
GroupFilter::Type(output_type) => *output_type,
_ => unreachable!(),
};
vecs.state.receive(received.by_type.get(output_type), price)
@@ -162,6 +163,7 @@ impl OutputCohorts for Outputs<(OutputFilter, cohort::Vecs)> {
received
.by_size_group
.as_typed_vec()
.into_iter()
.for_each(|(group, supply_state)| {
self.by_size_range

View File

@@ -0,0 +1,75 @@
use std::path::Path;
use brk_core::{Bitcoin, DateIndex, Dollars, Height, Result, Version};
use brk_exit::Exit;
use brk_indexer::Indexer;
use brk_vec::{AnyCollectableVec, AnyIterableVec, Computation, Format};
use crate::vecs::{Indexes, fetched, indexes, market};
pub trait CohortVecs: Sized {
#[allow(clippy::too_many_arguments)]
fn forced_import(
path: &Path,
cohort_name: Option<&str>,
computation: Computation,
format: Format,
version: Version,
fetched: Option<&fetched::Vecs>,
states_path: &Path,
compute_relative_to_all: bool,
) -> color_eyre::Result<Self>;
fn starting_height(&self) -> Height;
fn init(&mut self, starting_height: Height);
fn validate_computed_versions(&mut self, base_version: Version) -> Result<()>;
fn forced_pushed_at(&mut self, height: Height, exit: &Exit) -> Result<()>;
fn compute_then_force_push_unrealized_states(
&mut self,
height: Height,
height_price: Option<Dollars>,
dateindex: Option<DateIndex>,
date_price: Option<Option<Dollars>>,
exit: &Exit,
) -> Result<()>;
fn safe_flush_stateful_vecs(&mut self, height: Height, exit: &Exit) -> Result<()>;
fn compute_from_stateful(
&mut self,
starting_indexes: &Indexes,
others: &[&Self],
exit: &Exit,
) -> Result<()>;
#[allow(clippy::too_many_arguments)]
fn compute_rest_part1(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
fetched: Option<&fetched::Vecs>,
starting_indexes: &Indexes,
exit: &Exit,
) -> color_eyre::Result<()>;
#[allow(clippy::too_many_arguments)]
fn compute_rest_part2(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
fetched: Option<&fetched::Vecs>,
starting_indexes: &Indexes,
market: &market::Vecs,
height_to_supply: &impl AnyIterableVec<Height, Bitcoin>,
dateindex_to_supply: &impl AnyIterableVec<DateIndex, Bitcoin>,
height_to_realized_cap: Option<&impl AnyIterableVec<Height, Dollars>>,
dateindex_to_realized_cap: Option<&impl AnyIterableVec<DateIndex, Dollars>>,
exit: &Exit,
) -> color_eyre::Result<()>;
fn vecs(&self) -> Vec<&dyn AnyCollectableVec>;
}

View File

@@ -0,0 +1,180 @@
use std::{ops::Deref, path::Path};
use brk_core::{Bitcoin, DateIndex, Dollars, Height, Result, Version};
use brk_exit::Exit;
use brk_indexer::Indexer;
use brk_state::{CohortStateTrait, UTXOCohortState};
use brk_vec::{AnyCollectableVec, AnyIterableVec, Computation, Format};
use crate::vecs::{
Indexes, fetched, indexes, market,
stateful::{common, r#trait::CohortVecs},
};
#[derive(Clone)]
pub struct Vecs {
starting_height: Height,
pub state: UTXOCohortState,
inner: common::Vecs,
}
impl CohortVecs for Vecs {
#[allow(clippy::too_many_arguments)]
fn forced_import(
path: &Path,
cohort_name: Option<&str>,
computation: Computation,
format: Format,
version: Version,
fetched: Option<&fetched::Vecs>,
states_path: &Path,
compute_relative_to_all: bool,
) -> color_eyre::Result<Self> {
let compute_dollars = fetched.is_some();
Ok(Self {
starting_height: Height::ZERO,
state: UTXOCohortState::default_and_import(
states_path,
cohort_name.unwrap_or_default(),
compute_dollars,
)?,
inner: common::Vecs::forced_import(
path,
cohort_name,
computation,
format,
version,
fetched,
compute_relative_to_all,
)?,
})
}
fn starting_height(&self) -> Height {
[
self.state
.price_to_amount
.height()
.map_or(Height::MAX, |h| h.incremented()),
self.inner.starting_height(),
]
.into_iter()
.min()
.unwrap()
}
fn init(&mut self, starting_height: Height) {
if starting_height > self.starting_height() {
unreachable!()
}
self.starting_height = starting_height;
self.inner.init(&mut self.starting_height, &mut self.state);
}
fn validate_computed_versions(&mut self, base_version: Version) -> Result<()> {
self.inner.validate_computed_versions(base_version)
}
fn forced_pushed_at(&mut self, height: Height, exit: &Exit) -> Result<()> {
if self.starting_height > height {
return Ok(());
}
self.inner.forced_pushed_at(height, exit, &self.state)
}
fn compute_then_force_push_unrealized_states(
&mut self,
height: Height,
height_price: Option<Dollars>,
dateindex: Option<DateIndex>,
date_price: Option<Option<Dollars>>,
exit: &Exit,
) -> Result<()> {
self.inner.compute_then_force_push_unrealized_states(
height,
height_price,
dateindex,
date_price,
exit,
&self.state,
)
}
fn safe_flush_stateful_vecs(&mut self, height: Height, exit: &Exit) -> Result<()> {
self.inner
.safe_flush_stateful_vecs(height, exit, &mut self.state)
}
fn compute_from_stateful(
&mut self,
starting_indexes: &Indexes,
others: &[&Self],
exit: &Exit,
) -> Result<()> {
self.inner.compute_from_stateful(
starting_indexes,
&others.iter().map(|v| &v.inner).collect::<Vec<_>>(),
exit,
)
}
#[allow(clippy::too_many_arguments)]
fn compute_rest_part1(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
fetched: Option<&fetched::Vecs>,
starting_indexes: &Indexes,
exit: &Exit,
) -> color_eyre::Result<()> {
self.inner
.compute_rest_part1(indexer, indexes, fetched, starting_indexes, exit)
}
#[allow(clippy::too_many_arguments)]
fn compute_rest_part2(
&mut self,
indexer: &Indexer,
indexes: &indexes::Vecs,
fetched: Option<&fetched::Vecs>,
starting_indexes: &Indexes,
market: &market::Vecs,
height_to_supply: &impl AnyIterableVec<Height, Bitcoin>,
dateindex_to_supply: &impl AnyIterableVec<DateIndex, Bitcoin>,
height_to_realized_cap: Option<&impl AnyIterableVec<Height, Dollars>>,
dateindex_to_realized_cap: Option<&impl AnyIterableVec<DateIndex, Dollars>>,
exit: &Exit,
) -> color_eyre::Result<()> {
self.inner.compute_rest_part2(
indexer,
indexes,
fetched,
starting_indexes,
market,
height_to_supply,
dateindex_to_supply,
height_to_realized_cap,
dateindex_to_realized_cap,
exit,
)
}
fn vecs(&self) -> Vec<&dyn AnyCollectableVec> {
self.inner.vecs()
}
}
impl Deref for Vecs {
type Target = common::Vecs;
fn deref(&self) -> &Self::Target {
&self.inner
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,55 @@
use crate::{GroupFilter, GroupedByFromSize, GroupedBySizeRange, GroupedByUpToSize};
#[derive(Default, Clone)]
pub struct AddressGroups<T> {
pub by_from_size: GroupedByFromSize<T>,
pub by_size_range: GroupedBySizeRange<T>,
pub by_up_to_size: GroupedByUpToSize<T>,
}
impl<T> AddressGroups<T> {
pub fn as_mut_vecs(&mut self) -> Vec<&mut T> {
self.by_from_size
.as_mut_vec()
.into_iter()
.chain(self.by_size_range.as_mut_vec())
.chain(self.by_up_to_size.as_mut_vec())
.collect::<Vec<_>>()
}
pub fn as_mut_separate_vecs(&mut self) -> Vec<&mut T> {
self.by_size_range
.as_mut_vec()
.into_iter()
.collect::<Vec<_>>()
}
pub fn as_mut_overlapping_vecs(&mut self) -> Vec<&mut T> {
self.by_up_to_size
.as_mut_vec()
.into_iter()
.chain(self.by_from_size.as_mut_vec())
.collect::<Vec<_>>()
}
}
impl<T> AddressGroups<(GroupFilter, T)> {
pub fn vecs(&self) -> Vec<&T> {
self.by_size_range
.vecs()
.into_iter()
.chain(self.by_up_to_size.vecs())
.chain(self.by_from_size.vecs())
.collect::<Vec<_>>()
}
}
impl<T> From<AddressGroups<T>> for AddressGroups<(GroupFilter, T)> {
fn from(value: AddressGroups<T>) -> Self {
Self {
by_size_range: GroupedBySizeRange::from(value.by_size_range),
by_up_to_size: GroupedByUpToSize::from(value.by_up_to_size),
by_from_size: GroupedByFromSize::from(value.by_from_size),
}
}
}

View File

@@ -0,0 +1,124 @@
use std::ops::{Add, AddAssign};
use crate::OutputType;
use super::GroupFilter;
#[derive(Default, Clone, Debug)]
pub struct GroupedByAddressType<T> {
pub p2pk65: T,
pub p2pk33: T,
pub p2pkh: T,
pub p2sh: T,
pub p2wpkh: T,
pub p2wsh: T,
pub p2tr: T,
pub p2a: T,
}
impl<T> GroupedByAddressType<T> {
pub fn get_mut(&mut self, output_type: OutputType) -> &mut T {
match output_type {
OutputType::P2PK65 => &mut self.p2pk65,
OutputType::P2PK33 => &mut self.p2pk33,
OutputType::P2PKH => &mut self.p2pkh,
OutputType::P2SH => &mut self.p2sh,
OutputType::P2WPKH => &mut self.p2wpkh,
OutputType::P2WSH => &mut self.p2wsh,
OutputType::P2TR => &mut self.p2tr,
OutputType::P2A => &mut self.p2a,
_ => unreachable!(),
}
}
pub fn as_mut_vec(&mut self) -> [&mut T; 8] {
[
&mut self.p2pk65,
&mut self.p2pk33,
&mut self.p2pkh,
&mut self.p2sh,
&mut self.p2wpkh,
&mut self.p2wsh,
&mut self.p2tr,
&mut self.p2a,
]
}
pub fn as_typed_vec(&self) -> [(OutputType, &T); 8] {
[
(OutputType::P2PK65, &self.p2pk65),
(OutputType::P2PK33, &self.p2pk33),
(OutputType::P2PKH, &self.p2pkh),
(OutputType::P2SH, &self.p2sh),
(OutputType::P2WPKH, &self.p2wpkh),
(OutputType::P2WSH, &self.p2wsh),
(OutputType::P2TR, &self.p2tr),
(OutputType::P2A, &self.p2a),
]
}
}
impl<T> GroupedByAddressType<(GroupFilter, T)> {
pub fn vecs(&self) -> [&T; 8] {
[
&self.p2pk65.1,
&self.p2pk33.1,
&self.p2pkh.1,
&self.p2sh.1,
&self.p2wpkh.1,
&self.p2wsh.1,
&self.p2tr.1,
&self.p2a.1,
]
}
}
impl<T> From<GroupedByAddressType<T>> for GroupedByAddressType<(GroupFilter, T)> {
fn from(value: GroupedByAddressType<T>) -> Self {
Self {
p2pk65: (GroupFilter::Type(OutputType::P2PK65), value.p2pk65),
p2pk33: (GroupFilter::Type(OutputType::P2PK33), value.p2pk33),
p2pkh: (GroupFilter::Type(OutputType::P2PKH), value.p2pkh),
p2sh: (GroupFilter::Type(OutputType::P2SH), value.p2sh),
p2wpkh: (GroupFilter::Type(OutputType::P2WPKH), value.p2wpkh),
p2wsh: (GroupFilter::Type(OutputType::P2WSH), value.p2wsh),
p2tr: (GroupFilter::Type(OutputType::P2TR), value.p2tr),
p2a: (GroupFilter::Type(OutputType::P2A), value.p2a),
}
}
}
impl<T> Add for GroupedByAddressType<T>
where
T: Add<Output = T>,
{
type Output = Self;
fn add(self, rhs: Self) -> Self::Output {
Self {
p2pk65: self.p2pk65 + rhs.p2pk65,
p2pk33: self.p2pk33 + rhs.p2pk33,
p2pkh: self.p2pkh + rhs.p2pkh,
p2sh: self.p2sh + rhs.p2sh,
p2wpkh: self.p2wpkh + rhs.p2wpkh,
p2wsh: self.p2wsh + rhs.p2wsh,
p2tr: self.p2tr + rhs.p2tr,
p2a: self.p2a + rhs.p2a,
}
}
}
impl<T> AddAssign for GroupedByAddressType<T>
where
T: AddAssign,
{
fn add_assign(&mut self, rhs: Self) {
self.p2pk65 += rhs.p2pk65;
self.p2pk33 += rhs.p2pk33;
self.p2pkh += rhs.p2pkh;
self.p2sh += rhs.p2sh;
self.p2wpkh += rhs.p2wpkh;
self.p2wsh += rhs.p2wsh;
self.p2tr += rhs.p2tr;
self.p2a += rhs.p2a;
}
}

View File

@@ -1,7 +1,7 @@
use super::OutputFilter;
use super::GroupFilter;
#[derive(Default, Clone)]
pub struct OutputsByDateRange<T> {
pub struct GroupedByDateRange<T> {
pub start_to_1d: T,
pub _1d_to_1w: T,
pub _1w_to_1m: T,
@@ -23,33 +23,33 @@ pub struct OutputsByDateRange<T> {
pub _15y_to_end: T,
}
impl<T> From<OutputsByDateRange<T>> for OutputsByDateRange<(OutputFilter, T)> {
fn from(value: OutputsByDateRange<T>) -> Self {
impl<T> From<GroupedByDateRange<T>> for GroupedByDateRange<(GroupFilter, T)> {
fn from(value: GroupedByDateRange<T>) -> Self {
Self {
start_to_1d: (OutputFilter::To(1), value.start_to_1d),
_1d_to_1w: (OutputFilter::Range(1..7), value._1d_to_1w),
_1w_to_1m: (OutputFilter::Range(7..30), value._1w_to_1m),
_1m_to_2m: (OutputFilter::Range(30..2 * 30), value._1m_to_2m),
_2m_to_3m: (OutputFilter::Range(2 * 30..3 * 30), value._2m_to_3m),
_3m_to_4m: (OutputFilter::Range(3 * 30..4 * 30), value._3m_to_4m),
_4m_to_5m: (OutputFilter::Range(4 * 30..5 * 30), value._4m_to_5m),
_5m_to_6m: (OutputFilter::Range(5 * 30..6 * 30), value._5m_to_6m),
_6m_to_1y: (OutputFilter::Range(6 * 30..365), value._6m_to_1y),
_1y_to_2y: (OutputFilter::Range(365..2 * 365), value._1y_to_2y),
_2y_to_3y: (OutputFilter::Range(2 * 365..3 * 365), value._2y_to_3y),
_3y_to_4y: (OutputFilter::Range(3 * 365..4 * 365), value._3y_to_4y),
_4y_to_5y: (OutputFilter::Range(4 * 365..5 * 365), value._4y_to_5y),
_5y_to_6y: (OutputFilter::Range(5 * 365..6 * 365), value._5y_to_6y),
_6y_to_7y: (OutputFilter::Range(6 * 365..7 * 365), value._6y_to_7y),
_7y_to_8y: (OutputFilter::Range(7 * 365..8 * 365), value._7y_to_8y),
_8y_to_10y: (OutputFilter::Range(8 * 365..10 * 365), value._8y_to_10y),
_10y_to_15y: (OutputFilter::Range(10 * 365..15 * 365), value._10y_to_15y),
_15y_to_end: (OutputFilter::From(15 * 365), value._15y_to_end),
start_to_1d: (GroupFilter::To(1), value.start_to_1d),
_1d_to_1w: (GroupFilter::Range(1..7), value._1d_to_1w),
_1w_to_1m: (GroupFilter::Range(7..30), value._1w_to_1m),
_1m_to_2m: (GroupFilter::Range(30..2 * 30), value._1m_to_2m),
_2m_to_3m: (GroupFilter::Range(2 * 30..3 * 30), value._2m_to_3m),
_3m_to_4m: (GroupFilter::Range(3 * 30..4 * 30), value._3m_to_4m),
_4m_to_5m: (GroupFilter::Range(4 * 30..5 * 30), value._4m_to_5m),
_5m_to_6m: (GroupFilter::Range(5 * 30..6 * 30), value._5m_to_6m),
_6m_to_1y: (GroupFilter::Range(6 * 30..365), value._6m_to_1y),
_1y_to_2y: (GroupFilter::Range(365..2 * 365), value._1y_to_2y),
_2y_to_3y: (GroupFilter::Range(2 * 365..3 * 365), value._2y_to_3y),
_3y_to_4y: (GroupFilter::Range(3 * 365..4 * 365), value._3y_to_4y),
_4y_to_5y: (GroupFilter::Range(4 * 365..5 * 365), value._4y_to_5y),
_5y_to_6y: (GroupFilter::Range(5 * 365..6 * 365), value._5y_to_6y),
_6y_to_7y: (GroupFilter::Range(6 * 365..7 * 365), value._6y_to_7y),
_7y_to_8y: (GroupFilter::Range(7 * 365..8 * 365), value._7y_to_8y),
_8y_to_10y: (GroupFilter::Range(8 * 365..10 * 365), value._8y_to_10y),
_10y_to_15y: (GroupFilter::Range(10 * 365..15 * 365), value._10y_to_15y),
_15y_to_end: (GroupFilter::From(15 * 365), value._15y_to_end),
}
}
}
impl<T> OutputsByDateRange<T> {
impl<T> GroupedByDateRange<T> {
pub fn as_vec(&mut self) -> [&T; 19] {
[
&self.start_to_1d,
@@ -99,7 +99,7 @@ impl<T> OutputsByDateRange<T> {
}
}
impl<T> OutputsByDateRange<(OutputFilter, T)> {
impl<T> GroupedByDateRange<(GroupFilter, T)> {
pub fn vecs(&self) -> [&T; 19] {
[
&self.start_to_1d.1,

View File

@@ -1,9 +1,9 @@
use brk_core::{HalvingEpoch, Height};
use crate::{HalvingEpoch, Height};
use super::OutputFilter;
use super::GroupFilter;
#[derive(Default, Clone)]
pub struct OutputsByEpoch<T> {
pub struct GroupedByEpoch<T> {
pub _0: T,
pub _1: T,
pub _2: T,
@@ -11,19 +11,19 @@ pub struct OutputsByEpoch<T> {
pub _4: T,
}
impl<T> From<OutputsByEpoch<T>> for OutputsByEpoch<(OutputFilter, T)> {
fn from(value: OutputsByEpoch<T>) -> Self {
impl<T> From<GroupedByEpoch<T>> for GroupedByEpoch<(GroupFilter, T)> {
fn from(value: GroupedByEpoch<T>) -> Self {
Self {
_0: (OutputFilter::Epoch(HalvingEpoch::new(0)), value._0),
_1: (OutputFilter::Epoch(HalvingEpoch::new(1)), value._1),
_2: (OutputFilter::Epoch(HalvingEpoch::new(2)), value._2),
_3: (OutputFilter::Epoch(HalvingEpoch::new(3)), value._3),
_4: (OutputFilter::Epoch(HalvingEpoch::new(4)), value._4),
_0: (GroupFilter::Epoch(HalvingEpoch::new(0)), value._0),
_1: (GroupFilter::Epoch(HalvingEpoch::new(1)), value._1),
_2: (GroupFilter::Epoch(HalvingEpoch::new(2)), value._2),
_3: (GroupFilter::Epoch(HalvingEpoch::new(3)), value._3),
_4: (GroupFilter::Epoch(HalvingEpoch::new(4)), value._4),
}
}
}
impl<T> OutputsByEpoch<T> {
impl<T> GroupedByEpoch<T> {
pub fn as_mut_vec(&mut self) -> [&mut T; 5] {
[
&mut self._0,
@@ -52,7 +52,7 @@ impl<T> OutputsByEpoch<T> {
}
}
impl<T> OutputsByEpoch<(OutputFilter, T)> {
impl<T> GroupedByEpoch<(GroupFilter, T)> {
pub fn vecs(&self) -> [&T; 5] {
[&self._0.1, &self._1.1, &self._2.1, &self._3.1, &self._4.1]
}

View File

@@ -1,7 +1,7 @@
use super::OutputFilter;
use super::GroupFilter;
#[derive(Default, Clone)]
pub struct OutputsByFromDate<T> {
pub struct GroupedByFromDate<T> {
pub _1d: T,
pub _1w: T,
pub _1m: T,
@@ -22,7 +22,7 @@ pub struct OutputsByFromDate<T> {
pub _15y: T,
}
impl<T> OutputsByFromDate<T> {
impl<T> GroupedByFromDate<T> {
pub fn as_mut_vec(&mut self) -> [&mut T; 18] {
[
&mut self._1d,
@@ -47,7 +47,7 @@ impl<T> OutputsByFromDate<T> {
}
}
impl<T> OutputsByFromDate<(OutputFilter, T)> {
impl<T> GroupedByFromDate<(GroupFilter, T)> {
pub fn vecs(&self) -> [&T; 18] {
[
&self._1d.1,
@@ -72,27 +72,27 @@ impl<T> OutputsByFromDate<(OutputFilter, T)> {
}
}
impl<T> From<OutputsByFromDate<T>> for OutputsByFromDate<(OutputFilter, T)> {
fn from(value: OutputsByFromDate<T>) -> Self {
impl<T> From<GroupedByFromDate<T>> for GroupedByFromDate<(GroupFilter, T)> {
fn from(value: GroupedByFromDate<T>) -> Self {
Self {
_1d: (OutputFilter::From(1), value._1d),
_1w: (OutputFilter::From(7), value._1w),
_1m: (OutputFilter::From(30), value._1m),
_2m: (OutputFilter::From(2 * 30), value._2m),
_3m: (OutputFilter::From(3 * 30), value._3m),
_4m: (OutputFilter::From(4 * 30), value._4m),
_5m: (OutputFilter::From(5 * 30), value._5m),
_6m: (OutputFilter::From(6 * 30), value._6m),
_1y: (OutputFilter::From(365), value._1y),
_2y: (OutputFilter::From(2 * 365), value._2y),
_3y: (OutputFilter::From(3 * 365), value._3y),
_4y: (OutputFilter::From(4 * 365), value._4y),
_5y: (OutputFilter::From(5 * 365), value._5y),
_6y: (OutputFilter::From(6 * 365), value._6y),
_7y: (OutputFilter::From(7 * 365), value._7y),
_8y: (OutputFilter::From(8 * 365), value._8y),
_10y: (OutputFilter::From(10 * 365), value._10y),
_15y: (OutputFilter::From(15 * 365), value._15y),
_1d: (GroupFilter::From(1), value._1d),
_1w: (GroupFilter::From(7), value._1w),
_1m: (GroupFilter::From(30), value._1m),
_2m: (GroupFilter::From(2 * 30), value._2m),
_3m: (GroupFilter::From(3 * 30), value._3m),
_4m: (GroupFilter::From(4 * 30), value._4m),
_5m: (GroupFilter::From(5 * 30), value._5m),
_6m: (GroupFilter::From(6 * 30), value._6m),
_1y: (GroupFilter::From(365), value._1y),
_2y: (GroupFilter::From(2 * 365), value._2y),
_3y: (GroupFilter::From(3 * 365), value._3y),
_4y: (GroupFilter::From(4 * 365), value._4y),
_5y: (GroupFilter::From(5 * 365), value._5y),
_6y: (GroupFilter::From(6 * 365), value._6y),
_7y: (GroupFilter::From(7 * 365), value._7y),
_8y: (GroupFilter::From(8 * 365), value._8y),
_10y: (GroupFilter::From(10 * 365), value._10y),
_15y: (GroupFilter::From(15 * 365), value._15y),
}
}
}

View File

@@ -1,16 +1,16 @@
use brk_core::Sats;
use crate::Sats;
use super::OutputFilter;
use super::GroupFilter;
#[derive(Default, Clone)]
pub struct OutputsByFromSize<T> {
pub struct GroupedByFromSize<T> {
pub _1_000sats: T,
pub _1btc: T,
pub _10btc: T,
pub _100btc: T,
}
impl<T> OutputsByFromSize<T> {
impl<T> GroupedByFromSize<T> {
pub fn as_mut_vec(&mut self) -> [&mut T; 4] {
[
&mut self._1_000sats,
@@ -21,7 +21,7 @@ impl<T> OutputsByFromSize<T> {
}
}
impl<T> OutputsByFromSize<(OutputFilter, T)> {
impl<T> GroupedByFromSize<(GroupFilter, T)> {
pub fn vecs(&self) -> [&T; 4] {
[
&self._1_000sats.1,
@@ -32,17 +32,17 @@ impl<T> OutputsByFromSize<(OutputFilter, T)> {
}
}
impl<T> From<OutputsByFromSize<T>> for OutputsByFromSize<(OutputFilter, T)> {
fn from(value: OutputsByFromSize<T>) -> Self {
impl<T> From<GroupedByFromSize<T>> for GroupedByFromSize<(GroupFilter, T)> {
fn from(value: GroupedByFromSize<T>) -> Self {
Self {
_1_000sats: (OutputFilter::From(1_000), value._1_000sats),
_1btc: (OutputFilter::From(usize::from(Sats::ONE_BTC)), value._1btc),
_1_000sats: (GroupFilter::From(1_000), value._1_000sats),
_1btc: (GroupFilter::From(usize::from(Sats::ONE_BTC)), value._1btc),
_10btc: (
OutputFilter::From(usize::from(10 * Sats::ONE_BTC)),
GroupFilter::From(usize::from(10 * Sats::ONE_BTC)),
value._10btc,
),
_100btc: (
OutputFilter::From(usize::from(100 * Sats::ONE_BTC)),
GroupFilter::From(usize::from(100 * Sats::ONE_BTC)),
value._100btc,
),
}

View File

@@ -0,0 +1,256 @@
use std::ops::{Add, AddAssign};
use crate::Sats;
use super::GroupFilter;
#[derive(Debug, Default, Clone)]
pub struct GroupedBySizeRange<T> {
pub _0sats: T,
pub from_1sat_to_10sats: T,
pub from_10sats_to_100sats: T,
pub from_100sats_to_1_000sats: T,
pub from_1_000sats_to_10_000sats: T,
pub from_10_000sats_to_100_000sats: T,
pub from_100_000sats_to_1_000_000sats: T,
pub from_1_000_000sats_to_10_000_000sats: T,
pub from_10_000_000sats_to_1btc: T,
pub from_1btc_to_10btc: T,
pub from_10btc_to_100btc: T,
pub from_100btc_to_1_000btc: T,
pub from_1_000btc_to_10_000btc: T,
pub from_10_000btc_to_100_000btc: T,
pub from_100_000btc: T,
}
impl<T> From<GroupedBySizeRange<T>> for GroupedBySizeRange<(GroupFilter, T)> {
fn from(value: GroupedBySizeRange<T>) -> Self {
#[allow(clippy::inconsistent_digit_grouping)]
Self {
_0sats: (GroupFilter::To(1), value._0sats),
from_1sat_to_10sats: (GroupFilter::Range(1..10), value.from_1sat_to_10sats),
from_10sats_to_100sats: (GroupFilter::Range(10..100), value.from_10sats_to_100sats),
from_100sats_to_1_000sats: (
GroupFilter::Range(100..1_000),
value.from_100sats_to_1_000sats,
),
from_1_000sats_to_10_000sats: (
GroupFilter::Range(1_000..10_000),
value.from_1_000sats_to_10_000sats,
),
from_10_000sats_to_100_000sats: (
GroupFilter::Range(10_000..100_000),
value.from_10_000sats_to_100_000sats,
),
from_100_000sats_to_1_000_000sats: (
GroupFilter::Range(100_000..1_000_000),
value.from_100_000sats_to_1_000_000sats,
),
from_1_000_000sats_to_10_000_000sats: (
GroupFilter::Range(1_000_000..10_000_000),
value.from_1_000_000sats_to_10_000_000sats,
),
from_10_000_000sats_to_1btc: (
GroupFilter::Range(10_000_000..1_00_000_000),
value.from_10_000_000sats_to_1btc,
),
from_1btc_to_10btc: (
GroupFilter::Range(1_00_000_000..10_00_000_000),
value.from_1btc_to_10btc,
),
from_10btc_to_100btc: (
GroupFilter::Range(10_00_000_000..100_00_000_000),
value.from_10btc_to_100btc,
),
from_100btc_to_1_000btc: (
GroupFilter::Range(100_00_000_000..1_000_00_000_000),
value.from_100btc_to_1_000btc,
),
from_1_000btc_to_10_000btc: (
GroupFilter::Range(1_000_00_000_000..10_000_00_000_000),
value.from_1_000btc_to_10_000btc,
),
from_10_000btc_to_100_000btc: (
GroupFilter::Range(10_000_00_000_000..100_000_00_000_000),
value.from_10_000btc_to_100_000btc,
),
from_100_000btc: (GroupFilter::From(100_000_00_000_000), value.from_100_000btc),
}
}
}
impl<T> GroupedBySizeRange<T> {
#[allow(clippy::inconsistent_digit_grouping)]
pub fn get_mut(&mut self, value: Sats) -> &mut T {
if value == Sats::ZERO {
&mut self._0sats
} else if value < Sats::_10 {
&mut self.from_1sat_to_10sats
} else if value < Sats::_100 {
&mut self.from_10sats_to_100sats
} else if value < Sats::_1K {
&mut self.from_100sats_to_1_000sats
} else if value < Sats::_10K {
&mut self.from_1_000sats_to_10_000sats
} else if value < Sats::_100K {
&mut self.from_10_000sats_to_100_000sats
} else if value < Sats::_1M {
&mut self.from_100_000sats_to_1_000_000sats
} else if value < Sats::_10M {
&mut self.from_1_000_000sats_to_10_000_000sats
} else if value < Sats::_1_BTC {
&mut self.from_10_000_000sats_to_1btc
} else if value < Sats::_10_BTC {
&mut self.from_1btc_to_10btc
} else if value < Sats::_100_BTC {
&mut self.from_10btc_to_100btc
} else if value < Sats::_1K_BTC {
&mut self.from_100btc_to_1_000btc
} else if value < Sats::_10K_BTC {
&mut self.from_1_000btc_to_10_000btc
} else if value < Sats::_100K_BTC {
&mut self.from_10_000btc_to_100_000btc
} else {
&mut self.from_100_000btc
}
}
pub fn as_vec(&self) -> [&T; 15] {
[
&self._0sats,
&self.from_1sat_to_10sats,
&self.from_10sats_to_100sats,
&self.from_100sats_to_1_000sats,
&self.from_1_000sats_to_10_000sats,
&self.from_10_000sats_to_100_000sats,
&self.from_100_000sats_to_1_000_000sats,
&self.from_1_000_000sats_to_10_000_000sats,
&self.from_10_000_000sats_to_1btc,
&self.from_1btc_to_10btc,
&self.from_10btc_to_100btc,
&self.from_100btc_to_1_000btc,
&self.from_1_000btc_to_10_000btc,
&self.from_10_000btc_to_100_000btc,
&self.from_100_000btc,
]
}
pub fn as_typed_vec(&self) -> [(Sats, &T); 15] {
[
(Sats::ZERO, &self._0sats),
(Sats::_1, &self.from_1sat_to_10sats),
(Sats::_10, &self.from_10sats_to_100sats),
(Sats::_100, &self.from_100sats_to_1_000sats),
(Sats::_1K, &self.from_1_000sats_to_10_000sats),
(Sats::_10K, &self.from_10_000sats_to_100_000sats),
(Sats::_100K, &self.from_100_000sats_to_1_000_000sats),
(Sats::_1M, &self.from_1_000_000sats_to_10_000_000sats),
(Sats::_10M, &self.from_10_000_000sats_to_1btc),
(Sats::_1_BTC, &self.from_1btc_to_10btc),
(Sats::_10_BTC, &self.from_10btc_to_100btc),
(Sats::_100_BTC, &self.from_100btc_to_1_000btc),
(Sats::_1K_BTC, &self.from_1_000btc_to_10_000btc),
(Sats::_10K_BTC, &self.from_10_000btc_to_100_000btc),
(Sats::_100K_BTC, &self.from_100_000btc),
]
}
pub fn as_mut_vec(&mut self) -> [&mut T; 15] {
[
&mut self._0sats,
&mut self.from_1sat_to_10sats,
&mut self.from_10sats_to_100sats,
&mut self.from_100sats_to_1_000sats,
&mut self.from_1_000sats_to_10_000sats,
&mut self.from_10_000sats_to_100_000sats,
&mut self.from_100_000sats_to_1_000_000sats,
&mut self.from_1_000_000sats_to_10_000_000sats,
&mut self.from_10_000_000sats_to_1btc,
&mut self.from_1btc_to_10btc,
&mut self.from_10btc_to_100btc,
&mut self.from_100btc_to_1_000btc,
&mut self.from_1_000btc_to_10_000btc,
&mut self.from_10_000btc_to_100_000btc,
&mut self.from_100_000btc,
]
}
}
impl<T> GroupedBySizeRange<(GroupFilter, T)> {
pub fn vecs(&self) -> [&T; 15] {
[
&self._0sats.1,
&self.from_1sat_to_10sats.1,
&self.from_10sats_to_100sats.1,
&self.from_100sats_to_1_000sats.1,
&self.from_1_000sats_to_10_000sats.1,
&self.from_10_000sats_to_100_000sats.1,
&self.from_100_000sats_to_1_000_000sats.1,
&self.from_1_000_000sats_to_10_000_000sats.1,
&self.from_10_000_000sats_to_1btc.1,
&self.from_1btc_to_10btc.1,
&self.from_10btc_to_100btc.1,
&self.from_100btc_to_1_000btc.1,
&self.from_1_000btc_to_10_000btc.1,
&self.from_10_000btc_to_100_000btc.1,
&self.from_100_000btc.1,
]
}
}
impl<T> Add for GroupedBySizeRange<T>
where
T: Add<Output = T>,
{
type Output = Self;
fn add(self, rhs: Self) -> Self::Output {
Self {
_0sats: self._0sats + rhs._0sats,
from_1sat_to_10sats: self.from_1sat_to_10sats + rhs.from_1sat_to_10sats,
from_10sats_to_100sats: self.from_10sats_to_100sats + rhs.from_10sats_to_100sats,
from_100sats_to_1_000sats: self.from_100sats_to_1_000sats
+ rhs.from_100sats_to_1_000sats,
from_1_000sats_to_10_000sats: self.from_1_000sats_to_10_000sats
+ rhs.from_1_000sats_to_10_000sats,
from_10_000sats_to_100_000sats: self.from_10_000sats_to_100_000sats
+ rhs.from_10_000sats_to_100_000sats,
from_100_000sats_to_1_000_000sats: self.from_100_000sats_to_1_000_000sats
+ rhs.from_100_000sats_to_1_000_000sats,
from_1_000_000sats_to_10_000_000sats: self.from_1_000_000sats_to_10_000_000sats
+ rhs.from_1_000_000sats_to_10_000_000sats,
from_10_000_000sats_to_1btc: self.from_10_000_000sats_to_1btc
+ rhs.from_10_000_000sats_to_1btc,
from_1btc_to_10btc: self.from_1btc_to_10btc + rhs.from_1btc_to_10btc,
from_10btc_to_100btc: self.from_10btc_to_100btc + rhs.from_10btc_to_100btc,
from_100btc_to_1_000btc: self.from_100btc_to_1_000btc + rhs.from_100btc_to_1_000btc,
from_1_000btc_to_10_000btc: self.from_1_000btc_to_10_000btc
+ rhs.from_1_000btc_to_10_000btc,
from_10_000btc_to_100_000btc: self.from_10_000btc_to_100_000btc
+ rhs.from_10_000btc_to_100_000btc,
from_100_000btc: self.from_100_000btc + rhs.from_100_000btc,
}
}
}
impl<T> AddAssign for GroupedBySizeRange<T>
where
T: AddAssign,
{
fn add_assign(&mut self, rhs: Self) {
self._0sats += rhs._0sats;
self.from_1sat_to_10sats += rhs.from_1sat_to_10sats;
self.from_10sats_to_100sats += rhs.from_10sats_to_100sats;
self.from_100sats_to_1_000sats += rhs.from_100sats_to_1_000sats;
self.from_1_000sats_to_10_000sats += rhs.from_1_000sats_to_10_000sats;
self.from_10_000sats_to_100_000sats += rhs.from_10_000sats_to_100_000sats;
self.from_100_000sats_to_1_000_000sats += rhs.from_100_000sats_to_1_000_000sats;
self.from_1_000_000sats_to_10_000_000sats += rhs.from_1_000_000sats_to_10_000_000sats;
self.from_10_000_000sats_to_1btc += rhs.from_10_000_000sats_to_1btc;
self.from_1btc_to_10btc += rhs.from_1btc_to_10btc;
self.from_10btc_to_100btc += rhs.from_10btc_to_100btc;
self.from_100btc_to_1_000btc += rhs.from_100btc_to_1_000btc;
self.from_1_000btc_to_10_000btc += rhs.from_1_000btc_to_10_000btc;
self.from_10_000btc_to_100_000btc += rhs.from_10_000btc_to_100_000btc;
self.from_100_000btc += rhs.from_100_000btc;
}
}

View File

@@ -1,11 +1,11 @@
use std::ops::{Add, AddAssign};
use brk_core::OutputType;
use crate::OutputType;
use super::OutputFilter;
use super::GroupFilter;
#[derive(Default, Clone, Debug)]
pub struct OutputsBySpendableType<T> {
pub struct GroupedBySpendableType<T> {
pub p2pk65: T,
pub p2pk33: T,
pub p2pkh: T,
@@ -19,24 +19,7 @@ pub struct OutputsBySpendableType<T> {
pub empty: T,
}
impl<T> OutputsBySpendableType<T> {
// pub fn get(&self, output_type: OutputType) -> &T {
// match output_type {
// OutputType::P2PK65 => &self.p2pk65,
// OutputType::P2PK33 => &self.p2pk33,
// OutputType::P2PKH => &self.p2pkh,
// OutputType::P2MS => &self.p2ms,
// OutputType::P2SH => &self.p2sh,
// OutputType::P2WPKH => &self.p2wpkh,
// OutputType::P2WSH => &self.p2wsh,
// OutputType::P2TR => &self.p2tr,
// OutputType::P2A => &self.p2a,
// OutputType::Unknown => &self.unknown,
// OutputType::Empty => &self.empty,
// _ => unreachable!(),
// }
// }
impl<T> GroupedBySpendableType<T> {
pub fn get_mut(&mut self, output_type: OutputType) -> &mut T {
match output_type {
OutputType::P2PK65 => &mut self.p2pk65,
@@ -54,22 +37,6 @@ impl<T> OutputsBySpendableType<T> {
}
}
// pub fn as_vec(&self) -> [&T; 11] {
// [
// &self.p2pk65,
// &self.p2pk33,
// &self.p2pkh,
// &self.p2ms,
// &self.p2sh,
// &self.p2wpkh,
// &self.p2wsh,
// &self.p2tr,
// &self.p2a,
// &self.unknown,
// &self.empty,
// ]
// }
pub fn as_mut_vec(&mut self) -> [&mut T; 11] {
[
&mut self.p2pk65,
@@ -103,7 +70,7 @@ impl<T> OutputsBySpendableType<T> {
}
}
impl<T> OutputsBySpendableType<(OutputFilter, T)> {
impl<T> GroupedBySpendableType<(GroupFilter, T)> {
pub fn vecs(&self) -> [&T; 11] {
[
&self.p2pk65.1,
@@ -121,25 +88,25 @@ impl<T> OutputsBySpendableType<(OutputFilter, T)> {
}
}
impl<T> From<OutputsBySpendableType<T>> for OutputsBySpendableType<(OutputFilter, T)> {
fn from(value: OutputsBySpendableType<T>) -> Self {
impl<T> From<GroupedBySpendableType<T>> for GroupedBySpendableType<(GroupFilter, T)> {
fn from(value: GroupedBySpendableType<T>) -> Self {
Self {
p2pk65: (OutputFilter::Type(OutputType::P2PK65), value.p2pk65),
p2pk33: (OutputFilter::Type(OutputType::P2PK33), value.p2pk33),
p2pkh: (OutputFilter::Type(OutputType::P2PKH), value.p2pkh),
p2ms: (OutputFilter::Type(OutputType::P2MS), value.p2ms),
p2sh: (OutputFilter::Type(OutputType::P2SH), value.p2sh),
p2wpkh: (OutputFilter::Type(OutputType::P2WPKH), value.p2wpkh),
p2wsh: (OutputFilter::Type(OutputType::P2WSH), value.p2wsh),
p2tr: (OutputFilter::Type(OutputType::P2TR), value.p2tr),
p2a: (OutputFilter::Type(OutputType::P2A), value.p2a),
unknown: (OutputFilter::Type(OutputType::Unknown), value.unknown),
empty: (OutputFilter::Type(OutputType::Empty), value.empty),
p2pk65: (GroupFilter::Type(OutputType::P2PK65), value.p2pk65),
p2pk33: (GroupFilter::Type(OutputType::P2PK33), value.p2pk33),
p2pkh: (GroupFilter::Type(OutputType::P2PKH), value.p2pkh),
p2ms: (GroupFilter::Type(OutputType::P2MS), value.p2ms),
p2sh: (GroupFilter::Type(OutputType::P2SH), value.p2sh),
p2wpkh: (GroupFilter::Type(OutputType::P2WPKH), value.p2wpkh),
p2wsh: (GroupFilter::Type(OutputType::P2WSH), value.p2wsh),
p2tr: (GroupFilter::Type(OutputType::P2TR), value.p2tr),
p2a: (GroupFilter::Type(OutputType::P2A), value.p2a),
unknown: (GroupFilter::Type(OutputType::Unknown), value.unknown),
empty: (GroupFilter::Type(OutputType::Empty), value.empty),
}
}
}
impl<T> Add for OutputsBySpendableType<T>
impl<T> Add for GroupedBySpendableType<T>
where
T: Add<Output = T>,
{
@@ -161,7 +128,7 @@ where
}
}
impl<T> AddAssign for OutputsBySpendableType<T>
impl<T> AddAssign for GroupedBySpendableType<T>
where
T: AddAssign,
{

View File

@@ -0,0 +1,28 @@
use super::GroupFilter;
#[derive(Default, Clone)]
pub struct GroupedByTerm<T> {
pub short: T,
pub long: T,
}
impl<T> GroupedByTerm<T> {
pub fn as_mut_vec(&mut self) -> [&mut T; 2] {
[&mut self.short, &mut self.long]
}
}
impl<T> GroupedByTerm<(GroupFilter, T)> {
pub fn vecs(&self) -> [&T; 2] {
[&self.short.1, &self.long.1]
}
}
impl<T> From<GroupedByTerm<T>> for GroupedByTerm<(GroupFilter, T)> {
fn from(value: GroupedByTerm<T>) -> Self {
Self {
short: (GroupFilter::To(5 * 30), value.short),
long: (GroupFilter::From(5 * 30), value.long),
}
}
}

View File

@@ -1,16 +1,16 @@
use std::ops::{Add, AddAssign};
use brk_core::OutputType;
use crate::OutputType;
use super::{OutputsBySpendableType, OutputsByUnspendableType};
use super::{GroupedBySpendableType, GroupedByUnspendableType};
#[derive(Default, Clone, Debug)]
pub struct OutputsByType<T> {
pub spendable: OutputsBySpendableType<T>,
pub unspendable: OutputsByUnspendableType<T>,
pub struct GroupedByType<T> {
pub spendable: GroupedBySpendableType<T>,
pub unspendable: GroupedByUnspendableType<T>,
}
impl<T> OutputsByType<T> {
impl<T> GroupedByType<T> {
pub fn get(&self, output_type: OutputType) -> &T {
match output_type {
OutputType::P2PK65 => &self.spendable.p2pk65,
@@ -44,25 +44,9 @@ impl<T> OutputsByType<T> {
OutputType::OpReturn => &mut self.unspendable.opreturn,
}
}
// pub fn as_vec(&self) -> Vec<&T> {
// self.spendable
// .as_vec()
// .into_iter()
// .chain(self.unspendable.as_vec())
// .collect::<Vec<_>>()
// }
// pub fn as_mut_vec(&mut self) -> Vec<&mut T> {
// self.spendable
// .as_mut_vec()
// .into_iter()
// .chain(self.unspendable.as_mut_vec())
// .collect::<Vec<_>>()
// }
}
impl<T> Add for OutputsByType<T>
impl<T> Add for GroupedByType<T>
where
T: Add<Output = T>,
{
@@ -75,7 +59,7 @@ where
}
}
impl<T> AddAssign for OutputsByType<T>
impl<T> AddAssign for GroupedByType<T>
where
T: AddAssign,
{

View File

@@ -1,17 +1,17 @@
use std::ops::{Add, AddAssign};
#[derive(Default, Clone, Debug)]
pub struct OutputsByUnspendableType<T> {
pub struct GroupedByUnspendableType<T> {
pub opreturn: T,
}
impl<T> OutputsByUnspendableType<T> {
impl<T> GroupedByUnspendableType<T> {
pub fn as_vec(&self) -> [&T; 1] {
[&self.opreturn]
}
}
impl<T> Add for OutputsByUnspendableType<T>
impl<T> Add for GroupedByUnspendableType<T>
where
T: Add<Output = T>,
{
@@ -23,7 +23,7 @@ where
}
}
impl<T> AddAssign for OutputsByUnspendableType<T>
impl<T> AddAssign for GroupedByUnspendableType<T>
where
T: AddAssign,
{

View File

@@ -1,7 +1,7 @@
use super::OutputFilter;
use super::GroupFilter;
#[derive(Default, Clone)]
pub struct OutputsByUpToDate<T> {
pub struct GroupedByUpToDate<T> {
pub _1d: T,
pub _1w: T,
pub _1m: T,
@@ -22,7 +22,7 @@ pub struct OutputsByUpToDate<T> {
pub _15y: T,
}
impl<T> OutputsByUpToDate<T> {
impl<T> GroupedByUpToDate<T> {
pub fn as_mut_vec(&mut self) -> [&mut T; 18] {
[
&mut self._1d,
@@ -47,7 +47,7 @@ impl<T> OutputsByUpToDate<T> {
}
}
impl<T> OutputsByUpToDate<(OutputFilter, T)> {
impl<T> GroupedByUpToDate<(GroupFilter, T)> {
pub fn vecs(&self) -> [&T; 18] {
[
&self._1d.1,
@@ -72,27 +72,27 @@ impl<T> OutputsByUpToDate<(OutputFilter, T)> {
}
}
impl<T> From<OutputsByUpToDate<T>> for OutputsByUpToDate<(OutputFilter, T)> {
fn from(value: OutputsByUpToDate<T>) -> Self {
impl<T> From<GroupedByUpToDate<T>> for GroupedByUpToDate<(GroupFilter, T)> {
fn from(value: GroupedByUpToDate<T>) -> Self {
Self {
_1d: (OutputFilter::To(1), value._1d),
_1w: (OutputFilter::To(7), value._1w),
_1m: (OutputFilter::To(30), value._1m),
_2m: (OutputFilter::To(2 * 30), value._2m),
_3m: (OutputFilter::To(3 * 30), value._3m),
_4m: (OutputFilter::To(4 * 30), value._4m),
_5m: (OutputFilter::To(5 * 30), value._5m),
_6m: (OutputFilter::To(6 * 30), value._6m),
_1y: (OutputFilter::To(365), value._1y),
_2y: (OutputFilter::To(2 * 365), value._2y),
_3y: (OutputFilter::To(3 * 365), value._3y),
_4y: (OutputFilter::To(4 * 365), value._4y),
_5y: (OutputFilter::To(5 * 365), value._5y),
_6y: (OutputFilter::To(6 * 365), value._6y),
_7y: (OutputFilter::To(7 * 365), value._7y),
_8y: (OutputFilter::To(8 * 365), value._8y),
_10y: (OutputFilter::To(10 * 365), value._10y),
_15y: (OutputFilter::To(15 * 365), value._15y),
_1d: (GroupFilter::To(1), value._1d),
_1w: (GroupFilter::To(7), value._1w),
_1m: (GroupFilter::To(30), value._1m),
_2m: (GroupFilter::To(2 * 30), value._2m),
_3m: (GroupFilter::To(3 * 30), value._3m),
_4m: (GroupFilter::To(4 * 30), value._4m),
_5m: (GroupFilter::To(5 * 30), value._5m),
_6m: (GroupFilter::To(6 * 30), value._6m),
_1y: (GroupFilter::To(365), value._1y),
_2y: (GroupFilter::To(2 * 365), value._2y),
_3y: (GroupFilter::To(3 * 365), value._3y),
_4y: (GroupFilter::To(4 * 365), value._4y),
_5y: (GroupFilter::To(5 * 365), value._5y),
_6y: (GroupFilter::To(6 * 365), value._6y),
_7y: (GroupFilter::To(7 * 365), value._7y),
_8y: (GroupFilter::To(8 * 365), value._8y),
_10y: (GroupFilter::To(10 * 365), value._10y),
_15y: (GroupFilter::To(15 * 365), value._15y),
}
}
}

View File

@@ -1,9 +1,9 @@
use brk_core::Sats;
use crate::Sats;
use super::OutputFilter;
use super::GroupFilter;
#[derive(Default, Clone)]
pub struct OutputsByUpToSize<T> {
pub struct GroupedByUpToSize<T> {
pub _1_000sats: T,
pub _10_000sats: T,
pub _1btc: T,
@@ -11,7 +11,7 @@ pub struct OutputsByUpToSize<T> {
pub _100btc: T,
}
impl<T> OutputsByUpToSize<T> {
impl<T> GroupedByUpToSize<T> {
pub fn as_mut_vec(&mut self) -> [&mut T; 5] {
[
&mut self._1_000sats,
@@ -23,7 +23,7 @@ impl<T> OutputsByUpToSize<T> {
}
}
impl<T> OutputsByUpToSize<(OutputFilter, T)> {
impl<T> GroupedByUpToSize<(GroupFilter, T)> {
pub fn vecs(&self) -> [&T; 5] {
[
&self._1_000sats.1,
@@ -35,18 +35,18 @@ impl<T> OutputsByUpToSize<(OutputFilter, T)> {
}
}
impl<T> From<OutputsByUpToSize<T>> for OutputsByUpToSize<(OutputFilter, T)> {
fn from(value: OutputsByUpToSize<T>) -> Self {
impl<T> From<GroupedByUpToSize<T>> for GroupedByUpToSize<(GroupFilter, T)> {
fn from(value: GroupedByUpToSize<T>) -> Self {
Self {
_1_000sats: (OutputFilter::To(1_000), value._1_000sats),
_10_000sats: (OutputFilter::To(10_000), value._10_000sats),
_1btc: (OutputFilter::To(usize::from(Sats::ONE_BTC)), value._1btc),
_1_000sats: (GroupFilter::To(1_000), value._1_000sats),
_10_000sats: (GroupFilter::To(10_000), value._10_000sats),
_1btc: (GroupFilter::To(usize::from(Sats::ONE_BTC)), value._1btc),
_10btc: (
OutputFilter::To(usize::from(10 * Sats::ONE_BTC)),
GroupFilter::To(usize::from(10 * Sats::ONE_BTC)),
value._10btc,
),
_100btc: (
OutputFilter::To(usize::from(100 * Sats::ONE_BTC)),
GroupFilter::To(usize::from(100 * Sats::ONE_BTC)),
value._100btc,
),
}

View File

@@ -1,5 +1,5 @@
#[derive(Default, Clone)]
pub struct OutputsByValue<T> {
pub struct GroupedByValue<T> {
pub up_to_1cent: T,
pub from_1c_to_10c: T,
pub from_10c_to_1d: T,
@@ -16,7 +16,7 @@ pub struct OutputsByValue<T> {
// ...
}
impl<T> OutputsByValue<T> {
impl<T> GroupedByValue<T> {
pub fn as_mut_vec(&mut self) -> Vec<&mut T> {
vec![
&mut self.up_to_1cent,

View File

@@ -0,0 +1,51 @@
use std::ops::Range;
use crate::{HalvingEpoch, OutputType};
#[derive(Debug, Clone)]
pub enum GroupFilter {
All,
To(usize),
Range(Range<usize>),
From(usize),
Epoch(HalvingEpoch),
Type(OutputType),
}
impl GroupFilter {
pub fn contains(&self, value: usize) -> bool {
match self {
GroupFilter::All => true,
GroupFilter::To(to) => *to > value,
GroupFilter::From(from) => *from <= value,
GroupFilter::Range(r) => r.contains(&value),
GroupFilter::Epoch(_) => false,
GroupFilter::Type(_) => false,
}
}
pub fn includes(&self, other: &GroupFilter) -> bool {
match self {
GroupFilter::All => true,
GroupFilter::To(to) => match other {
GroupFilter::All => false,
GroupFilter::To(to2) => to >= to2,
GroupFilter::Range(range) => range.end <= *to,
GroupFilter::From(_) => false,
GroupFilter::Epoch(_) => false,
GroupFilter::Type(_) => false,
},
GroupFilter::From(from) => match other {
GroupFilter::All => false,
GroupFilter::To(_) => false,
GroupFilter::Range(range) => range.start >= *from,
GroupFilter::From(from2) => from <= from2,
GroupFilter::Epoch(_) => false,
GroupFilter::Type(_) => false,
},
GroupFilter::Range(_) => false,
GroupFilter::Epoch(_) => false,
GroupFilter::Type(_) => false,
}
}
}

View File

@@ -0,0 +1,31 @@
mod address;
mod by_address_type;
mod by_date_range;
mod by_epoch;
mod by_from_date;
mod by_from_size;
mod by_size_range;
mod by_spendable_type;
mod by_term;
mod by_type;
mod by_unspendable_type;
mod by_up_to_date;
mod by_up_to_size;
mod filter;
mod utxo;
pub use address::*;
pub use by_address_type::*;
pub use by_date_range::*;
pub use by_epoch::*;
pub use by_from_date::*;
pub use by_from_size::*;
pub use by_size_range::*;
pub use by_spendable_type::*;
pub use by_term::*;
pub use by_type::*;
pub use by_unspendable_type::*;
pub use by_up_to_date::*;
pub use by_up_to_size::*;
pub use filter::*;
pub use utxo::*;

View File

@@ -0,0 +1,91 @@
use crate::{
GroupFilter, GroupedByDateRange, GroupedByEpoch, GroupedByFromDate, GroupedByFromSize,
GroupedBySizeRange, GroupedBySpendableType, GroupedByTerm, GroupedByUpToDate,
GroupedByUpToSize,
};
#[derive(Default, Clone)]
pub struct UTXOGroups<T> {
pub all: T,
pub by_date_range: GroupedByDateRange<T>,
pub by_epoch: GroupedByEpoch<T>,
pub by_from_date: GroupedByFromDate<T>,
pub by_from_size: GroupedByFromSize<T>,
pub by_size_range: GroupedBySizeRange<T>,
pub by_term: GroupedByTerm<T>,
pub by_type: GroupedBySpendableType<T>,
pub by_up_to_date: GroupedByUpToDate<T>,
pub by_up_to_size: GroupedByUpToSize<T>,
}
impl<T> UTXOGroups<T> {
pub fn as_mut_vecs(&mut self) -> Vec<&mut T> {
[&mut self.all]
.into_iter()
.chain(self.by_term.as_mut_vec())
.chain(self.by_up_to_date.as_mut_vec())
.chain(self.by_from_date.as_mut_vec())
.chain(self.by_from_size.as_mut_vec())
.chain(self.by_date_range.as_mut_vec())
.chain(self.by_epoch.as_mut_vec())
.chain(self.by_size_range.as_mut_vec())
.chain(self.by_up_to_size.as_mut_vec())
.chain(self.by_type.as_mut_vec())
.collect::<Vec<_>>()
}
pub fn as_mut_separate_vecs(&mut self) -> Vec<&mut T> {
self.by_date_range
.as_mut_vec()
.into_iter()
.chain(self.by_epoch.as_mut_vec())
.chain(self.by_size_range.as_mut_vec())
.chain(self.by_type.as_mut_vec())
.collect::<Vec<_>>()
}
pub fn as_mut_overlapping_vecs(&mut self) -> Vec<&mut T> {
[&mut self.all]
.into_iter()
.chain(self.by_term.as_mut_vec())
.chain(self.by_up_to_date.as_mut_vec())
.chain(self.by_from_date.as_mut_vec())
.chain(self.by_up_to_size.as_mut_vec())
.chain(self.by_from_size.as_mut_vec())
.collect::<Vec<_>>()
}
}
impl<T> UTXOGroups<(GroupFilter, T)> {
pub fn vecs(&self) -> Vec<&T> {
[&self.all.1]
.into_iter()
.chain(self.by_term.vecs())
.chain(self.by_up_to_date.vecs())
.chain(self.by_from_date.vecs())
.chain(self.by_date_range.vecs())
.chain(self.by_epoch.vecs())
.chain(self.by_size_range.vecs())
.chain(self.by_type.vecs())
.chain(self.by_up_to_size.vecs())
.chain(self.by_from_size.vecs())
.collect::<Vec<_>>()
}
}
impl<T> From<UTXOGroups<T>> for UTXOGroups<(GroupFilter, T)> {
fn from(value: UTXOGroups<T>) -> Self {
Self {
all: (GroupFilter::All, value.all),
by_term: GroupedByTerm::from(value.by_term),
by_up_to_date: GroupedByUpToDate::from(value.by_up_to_date),
by_from_date: GroupedByFromDate::from(value.by_from_date),
by_date_range: GroupedByDateRange::from(value.by_date_range),
by_epoch: GroupedByEpoch::from(value.by_epoch),
by_size_range: GroupedBySizeRange::from(value.by_size_range),
by_up_to_size: GroupedByUpToSize::from(value.by_up_to_size),
by_from_size: GroupedByFromSize::from(value.by_from_size),
by_type: GroupedBySpendableType::from(value.by_type),
}
}
}

View File

@@ -1,11 +1,13 @@
#![doc = include_str!("../README.md")]
mod enums;
mod groups;
mod structs;
mod traits;
mod utils;
pub use enums::*;
pub use groups::*;
pub use structs::*;
pub use traits::*;
pub use utils::*;

View File

@@ -36,8 +36,22 @@ pub struct Sats(u64);
#[allow(clippy::inconsistent_digit_grouping)]
impl Sats {
pub const ZERO: Self = Self(0);
pub const MAX: Self = Self(u64::MAX);
pub const _1: Self = Self(1);
pub const _10: Self = Self(10);
pub const _100: Self = Self(100);
pub const _1K: Self = Self(1_000);
pub const _10K: Self = Self(10_000);
pub const _100K: Self = Self(100_000);
pub const _1M: Self = Self(1_000_000);
pub const _10M: Self = Self(10_000_000);
pub const _1_BTC: Self = Self::ONE_BTC;
pub const _10_BTC: Self = Self(10_00_000_000);
pub const _100_BTC: Self = Self(100_00_000_000);
pub const _1K_BTC: Self = Self(1_000_00_000_000);
pub const _10K_BTC: Self = Self(10_000_00_000_000);
pub const _100K_BTC: Self = Self(100_000_00_000_000);
pub const ONE_BTC: Self = Self(1_00_000_000);
pub const MAX: Self = Self(u64::MAX);
pub const FIFTY_BTC: Self = Self(50_00_000_000);
pub fn new(sats: u64) -> Self {

View File

@@ -14,21 +14,26 @@ Please be aware thtat the technology is very rapidly evolving, thus having issue
#### Step 1
First we need to connect BRK to Claude.
First we need to connect BRK to Claude. To do that we need to go to the "Connect apps" menu from the home screen of Claude desktop.
![Caude MCP setup step 1](https://github.com/bitcoinresearchkit/brk/blob/main/assets/claude-step1.png)
![Image of Claude Desktop home screen](https://github.com/bitcoinresearchkit/brk/blob/main/assets/claude-step1.png)
#### Step 2
Which is done by adding an integration.
Then simply go to "Add integration".
![Caude MCP setup step 2](https://github.com/bitcoinresearchkit/brk/blob/main/assets/claude-step2.png)
![Image of the Connect app" menu of Claude Desktop](https://github.com/bitcoinresearchkit/brk/blob/main/assets/claude-step2.png)
#### Step 3
Since BRK's open and free, only a URL is needed.
Claude's MCP client is (for now?) session based thus using a URL pointing to a load balancer will not work.
![Caude MCP setup step 3](https://github.com/bitcoinresearchkit/brk/blob/main/assets/claude-step3.png)
Use one of the following URL instead:
- https://eu1.bitcoinresearchkit.org/mcp
- https://eu2.bitcoinresearchkit.org/mcp
![Image of Add Integration menu of Claude Desktop](https://github.com/bitcoinresearchkit/brk/blob/main/assets/claude-step3.png)
#### Step 4
@@ -36,7 +41,7 @@ Verify that it has access to BRK's tools.
Optionally and highly recommended, giving it unsupervised access gives a more fluid experience and prevents possible issues and errors.
![Caude MCP setup step 4](https://github.com/bitcoinresearchkit/brk/blob/main/assets/claude-step4.png)
![Image of edit integration meny on Claude Desktop](https://github.com/bitcoinresearchkit/brk/blob/main/assets/claude-step4.png)
#### Results

View File

@@ -0,0 +1,90 @@
use std::{
ops::{Deref, DerefMut},
path::Path,
};
use brk_core::{Dollars, Height, Result};
use crate::{CohortStateTrait, SupplyState, UnrealizedState};
use super::CohortState;
#[derive(Clone)]
pub struct AddressCohortState {
pub address_count: usize,
pub inner: CohortState,
}
impl CohortStateTrait for AddressCohortState {
fn default_and_import(path: &Path, name: &str, compute_dollars: bool) -> Result<Self> {
Ok(Self {
address_count: 0,
inner: CohortState::default_and_import(path, name, compute_dollars)?,
})
}
fn reset_single_iteration_values(&mut self) {
self.inner.reset_single_iteration_values();
}
fn increment(&mut self, supply_state: &SupplyState, price: Option<Dollars>) {
self.inner.increment(supply_state, price);
}
fn decrement(&mut self, supply_state: &SupplyState, price: Option<Dollars>) {
self.inner.decrement(supply_state, price);
}
fn decrement_price_to_amount(&mut self, supply_state: &SupplyState, price: Dollars) {
self.inner.decrement_price_to_amount(supply_state, price);
}
fn receive(&mut self, supply_state: &SupplyState, price: Option<Dollars>) {
self.inner.receive(supply_state, price);
}
fn send(
&mut self,
supply_state: &SupplyState,
current_price: Option<Dollars>,
prev_price: Option<Dollars>,
blocks_old: usize,
days_old: f64,
older_than_hour: bool,
) {
self.inner.send(
supply_state,
current_price,
prev_price,
blocks_old,
days_old,
older_than_hour,
);
}
fn compute_unrealized_states(
&self,
height_price: Dollars,
date_price: Option<Dollars>,
) -> (UnrealizedState, Option<UnrealizedState>) {
self.inner
.compute_unrealized_states(height_price, date_price)
}
fn commit(&mut self, height: Height) -> Result<()> {
self.inner.commit(height)
}
}
impl Deref for AddressCohortState {
type Target = CohortState;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl DerefMut for AddressCohortState {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}

View File

@@ -2,9 +2,7 @@ use std::{cmp::Ordering, path::Path};
use brk_core::{CheckedSub, Dollars, Height, Result, Sats};
use crate::{PriceToAmount, UnrealizedState};
use super::{RealizedState, SupplyState};
use crate::{CohortStateTrait, PriceToAmount, RealizedState, SupplyState, UnrealizedState};
#[derive(Clone)]
pub struct CohortState {
@@ -15,8 +13,8 @@ pub struct CohortState {
pub price_to_amount: PriceToAmount,
}
impl CohortState {
pub fn default_and_import(path: &Path, name: &str, compute_dollars: bool) -> Result<Self> {
impl CohortStateTrait for CohortState {
fn default_and_import(path: &Path, name: &str, compute_dollars: bool) -> Result<Self> {
Ok(Self {
supply: SupplyState::default(),
realized: compute_dollars.then_some(RealizedState::NAN),
@@ -26,7 +24,7 @@ impl CohortState {
})
}
pub fn reset_single_iteration_values(&mut self) {
fn reset_single_iteration_values(&mut self) {
self.satdays_destroyed = Sats::ZERO;
self.satblocks_destroyed = Sats::ZERO;
if let Some(realized) = self.realized.as_mut() {
@@ -34,7 +32,7 @@ impl CohortState {
}
}
pub fn increment(&mut self, supply_state: &SupplyState, price: Option<Dollars>) {
fn increment(&mut self, supply_state: &SupplyState, price: Option<Dollars>) {
self.supply += supply_state;
if supply_state.value > Sats::ZERO {
@@ -46,7 +44,7 @@ impl CohortState {
}
}
pub fn decrement(&mut self, supply_state: &SupplyState, price: Option<Dollars>) {
fn decrement(&mut self, supply_state: &SupplyState, price: Option<Dollars>) {
self.supply -= supply_state;
if supply_state.value > Sats::ZERO {
@@ -66,7 +64,7 @@ impl CohortState {
}
}
pub fn receive(&mut self, supply_state: &SupplyState, price: Option<Dollars>) {
fn receive(&mut self, supply_state: &SupplyState, price: Option<Dollars>) {
self.supply += supply_state;
if supply_state.value > Sats::ZERO {
@@ -78,7 +76,7 @@ impl CohortState {
}
}
pub fn send(
fn send(
&mut self,
supply_state: &SupplyState,
current_price: Option<Dollars>,
@@ -87,24 +85,26 @@ impl CohortState {
days_old: f64,
older_than_hour: bool,
) {
self.supply -= supply_state;
if supply_state.utxos > 0 {
self.supply -= supply_state;
if supply_state.value > Sats::ZERO {
self.satblocks_destroyed += supply_state.value * blocks_old;
if supply_state.value > Sats::ZERO {
self.satblocks_destroyed += supply_state.value * blocks_old;
self.satdays_destroyed +=
Sats::from((u64::from(supply_state.value) as f64 * days_old).floor() as u64);
self.satdays_destroyed +=
Sats::from((u64::from(supply_state.value) as f64 * days_old).floor() as u64);
if let Some(realized) = self.realized.as_mut() {
let current_price = current_price.unwrap();
let prev_price = prev_price.unwrap();
realized.send(supply_state, current_price, prev_price, older_than_hour);
self.decrement_price_to_amount(supply_state, prev_price);
if let Some(realized) = self.realized.as_mut() {
let current_price = current_price.unwrap();
let prev_price = prev_price.unwrap();
realized.send(supply_state, current_price, prev_price, older_than_hour);
self.decrement_price_to_amount(supply_state, prev_price);
}
}
}
}
pub fn compute_unrealized_states(
fn compute_unrealized_states(
&self,
height_price: Dollars,
date_price: Option<Dollars>,
@@ -166,7 +166,7 @@ impl CohortState {
(height_unrealized_state, date_unrealized_state)
}
pub fn commit(&mut self, height: Height) -> Result<()> {
fn commit(&mut self, height: Height) -> Result<()> {
self.price_to_amount.flush(height)
}
}

View File

@@ -0,0 +1,9 @@
mod address;
mod common;
mod r#trait;
mod utxo;
pub use address::*;
pub use common::*;
pub use r#trait::*;
pub use utxo::*;

View File

@@ -0,0 +1,29 @@
use std::path::Path;
use brk_core::{Dollars, Height, Result};
use crate::{SupplyState, UnrealizedState};
pub trait CohortStateTrait: Sized {
fn default_and_import(path: &Path, name: &str, compute_dollars: bool) -> Result<Self>;
fn reset_single_iteration_values(&mut self);
fn increment(&mut self, supply_state: &SupplyState, price: Option<Dollars>);
fn decrement(&mut self, supply_state: &SupplyState, price: Option<Dollars>);
fn decrement_price_to_amount(&mut self, supply_state: &SupplyState, price: Dollars);
fn receive(&mut self, supply_state: &SupplyState, price: Option<Dollars>);
fn send(
&mut self,
supply_state: &SupplyState,
current_price: Option<Dollars>,
prev_price: Option<Dollars>,
blocks_old: usize,
days_old: f64,
older_than_hour: bool,
);
fn compute_unrealized_states(
&self,
height_price: Dollars,
date_price: Option<Dollars>,
) -> (UnrealizedState, Option<UnrealizedState>);
fn commit(&mut self, height: Height) -> Result<()>;
}

View File

@@ -0,0 +1,72 @@
use std::path::Path;
use brk_core::{Dollars, Height, Result};
use derive_deref::{Deref, DerefMut};
use crate::{CohortStateTrait, SupplyState, UnrealizedState};
use super::CohortState;
#[derive(Clone, Deref, DerefMut)]
pub struct UTXOCohortState(CohortState);
impl CohortStateTrait for UTXOCohortState {
fn default_and_import(path: &Path, name: &str, compute_dollars: bool) -> Result<Self> {
Ok(Self(CohortState::default_and_import(
path,
name,
compute_dollars,
)?))
}
fn reset_single_iteration_values(&mut self) {
self.0.reset_single_iteration_values();
}
fn increment(&mut self, supply_state: &SupplyState, price: Option<Dollars>) {
self.0.increment(supply_state, price);
}
fn decrement(&mut self, supply_state: &SupplyState, price: Option<Dollars>) {
self.0.decrement(supply_state, price);
}
fn decrement_price_to_amount(&mut self, supply_state: &SupplyState, price: Dollars) {
self.0.decrement_price_to_amount(supply_state, price);
}
fn receive(&mut self, supply_state: &SupplyState, price: Option<Dollars>) {
self.0.receive(supply_state, price);
}
fn send(
&mut self,
supply_state: &SupplyState,
current_price: Option<Dollars>,
prev_price: Option<Dollars>,
blocks_old: usize,
days_old: f64,
older_than_hour: bool,
) {
self.0.send(
supply_state,
current_price,
prev_price,
blocks_old,
days_old,
older_than_hour,
);
}
fn compute_unrealized_states(
&self,
height_price: Dollars,
date_price: Option<Dollars>,
) -> (UnrealizedState, Option<UnrealizedState>) {
self.0.compute_unrealized_states(height_price, date_price)
}
fn commit(&mut self, height: Height) -> Result<()> {
self.0.commit(height)
}
}

View File

@@ -1,142 +0,0 @@
pub struct OneShotSats {
pub price_paid_state: PricePaidState,
pub unrealized_block_state: UnrealizedState,
pub unrealized_date_state: Option<UnrealizedState>,
}
pub struct UnrealizedState {
supply_in_profit: Sats,
// supply_in_loss: Sats,
unrealized_profit: Dollars,
unrealized_loss: Dollars,
}
// Why option ?
#[derive(Default, Debug)]
pub struct PricePaidState {
pp_p5: Option<Dollars>,
pp_p10: Option<Dollars>,
pp_p15: Option<Dollars>,
pp_p20: Option<Dollars>,
pp_p25: Option<Dollars>,
pp_p30: Option<Dollars>,
pp_p35: Option<Dollars>,
pp_p40: Option<Dollars>,
pp_p45: Option<Dollars>,
pp_median: Option<Dollars>,
pp_p55: Option<Dollars>,
pp_p60: Option<Dollars>,
pp_p65: Option<Dollars>,
pp_p70: Option<Dollars>,
pp_p75: Option<Dollars>,
pp_p80: Option<Dollars>,
pp_p85: Option<Dollars>,
pp_p90: Option<Dollars>,
pp_p95: Option<Dollars>,
processed_amount: Sats,
}
pub struct PricePaidStateFull {
pp_p1: Option<Dollars>,
pp_p2: Option<Dollars>,
pp_p3: Option<Dollars>,
pp_p4: Option<Dollars>,
pp_p5: Option<Dollars>,
pp_p6: Option<Dollars>,
pp_p7: Option<Dollars>,
pp_p8: Option<Dollars>,
pp_p9: Option<Dollars>,
pp_p10: Option<Dollars>,
pp_p11: Option<Dollars>,
pp_p12: Option<Dollars>,
pp_p13: Option<Dollars>,
pp_p14: Option<Dollars>,
pp_p15: Option<Dollars>,
pp_p16: Option<Dollars>,
pp_p17: Option<Dollars>,
pp_p18: Option<Dollars>,
pp_p19: Option<Dollars>,
pp_p20: Option<Dollars>,
pp_p21: Option<Dollars>,
pp_p22: Option<Dollars>,
pp_p23: Option<Dollars>,
pp_p24: Option<Dollars>,
pp_p25: Option<Dollars>,
pp_p26: Option<Dollars>,
pp_p27: Option<Dollars>,
pp_p28: Option<Dollars>,
pp_p29: Option<Dollars>,
pp_p30: Option<Dollars>,
pp_p31: Option<Dollars>,
pp_p32: Option<Dollars>,
pp_p33: Option<Dollars>,
pp_p34: Option<Dollars>,
pp_p35: Option<Dollars>,
pp_p36: Option<Dollars>,
pp_p37: Option<Dollars>,
pp_p38: Option<Dollars>,
pp_p39: Option<Dollars>,
pp_p40: Option<Dollars>,
pp_p41: Option<Dollars>,
pp_p42: Option<Dollars>,
pp_p43: Option<Dollars>,
pp_p44: Option<Dollars>,
pp_p45: Option<Dollars>,
pp_p46: Option<Dollars>,
pp_p47: Option<Dollars>,
pp_p48: Option<Dollars>,
pp_p49: Option<Dollars>,
pp_p50: Option<Dollars>,
pp_p51: Option<Dollars>,
pp_p52: Option<Dollars>,
pp_p53: Option<Dollars>,
pp_p54: Option<Dollars>,
pp_p55: Option<Dollars>,
pp_p56: Option<Dollars>,
pp_p57: Option<Dollars>,
pp_p58: Option<Dollars>,
pp_p59: Option<Dollars>,
pp_p60: Option<Dollars>,
pp_p61: Option<Dollars>,
pp_p62: Option<Dollars>,
pp_p63: Option<Dollars>,
pp_p64: Option<Dollars>,
pp_p65: Option<Dollars>,
pp_p66: Option<Dollars>,
pp_p67: Option<Dollars>,
pp_p68: Option<Dollars>,
pp_p69: Option<Dollars>,
pp_p70: Option<Dollars>,
pp_p71: Option<Dollars>,
pp_p72: Option<Dollars>,
pp_p73: Option<Dollars>,
pp_p74: Option<Dollars>,
pp_p75: Option<Dollars>,
pp_p76: Option<Dollars>,
pp_p77: Option<Dollars>,
pp_p78: Option<Dollars>,
pp_p79: Option<Dollars>,
pp_p80: Option<Dollars>,
pp_p81: Option<Dollars>,
pp_p82: Option<Dollars>,
pp_p83: Option<Dollars>,
pp_p84: Option<Dollars>,
pp_p85: Option<Dollars>,
pp_p86: Option<Dollars>,
pp_p87: Option<Dollars>,
pp_p88: Option<Dollars>,
pp_p89: Option<Dollars>,
pp_p90: Option<Dollars>,
pp_p91: Option<Dollars>,
pp_p92: Option<Dollars>,
pp_p93: Option<Dollars>,
pp_p94: Option<Dollars>,
pp_p95: Option<Dollars>,
pp_p96: Option<Dollars>,
pp_p97: Option<Dollars>,
pp_p98: Option<Dollars>,
pp_p99: Option<Dollars>,
processed_amount: Sats,
}

View File

@@ -4,21 +4,17 @@
// #![doc = "```"]
mod block;
mod cohort;
mod outputs;
mod realized;
// mod hot;
mod cohorts;
mod price_to_amount;
mod realized;
mod supply;
mod transacted;
mod unrealized;
pub use block::*;
pub use cohort::*;
pub use outputs::*;
pub use realized::*;
pub use unrealized::*;
// pub use hot::*;
pub use cohorts::*;
pub use price_to_amount::*;
pub use realized::*;
pub use supply::*;
pub use transacted::*;
pub use unrealized::*;

View File

@@ -1,178 +0,0 @@
use super::OutputFilter;
#[derive(Default, Clone)]
pub struct OutputsBySizeRange<T> {
pub _0sats: T,
pub from_1sat_to_10sats: T,
pub from_10sats_to_100sats: T,
pub from_100sats_to_1_000sats: T,
pub from_1_000sats_to_10_000sats: T,
pub from_10_000sats_to_100_000sats: T,
pub from_100_000sats_to_1_000_000sats: T,
pub from_1_000_000sats_to_10_000_000sats: T,
pub from_10_000_000sats_to_1btc: T,
pub from_1btc_to_10btc: T,
pub from_10btc_to_100btc: T,
pub from_100btc_to_1_000btc: T,
pub from_1_000btc_to_10_000btc: T,
pub from_10_000btc_to_100_000btc: T,
pub from_100_000btc: T,
}
impl<T> From<OutputsBySizeRange<T>> for OutputsBySizeRange<(OutputFilter, T)> {
fn from(value: OutputsBySizeRange<T>) -> Self {
#[allow(clippy::inconsistent_digit_grouping)]
Self {
_0sats: (OutputFilter::To(1), value._0sats),
from_1sat_to_10sats: (OutputFilter::Range(1..10), value.from_1sat_to_10sats),
from_10sats_to_100sats: (OutputFilter::Range(10..100), value.from_10sats_to_100sats),
from_100sats_to_1_000sats: (
OutputFilter::Range(100..1_000),
value.from_100sats_to_1_000sats,
),
from_1_000sats_to_10_000sats: (
OutputFilter::Range(1_000..10_000),
value.from_1_000sats_to_10_000sats,
),
from_10_000sats_to_100_000sats: (
OutputFilter::Range(10_000..100_000),
value.from_10_000sats_to_100_000sats,
),
from_100_000sats_to_1_000_000sats: (
OutputFilter::Range(100_000..1_000_000),
value.from_100_000sats_to_1_000_000sats,
),
from_1_000_000sats_to_10_000_000sats: (
OutputFilter::Range(1_000_000..10_000_000),
value.from_1_000_000sats_to_10_000_000sats,
),
from_10_000_000sats_to_1btc: (
OutputFilter::Range(10_000_000..1_00_000_000),
value.from_10_000_000sats_to_1btc,
),
from_1btc_to_10btc: (
OutputFilter::Range(1_00_000_000..10_00_000_000),
value.from_1btc_to_10btc,
),
from_10btc_to_100btc: (
OutputFilter::Range(10_00_000_000..100_00_000_000),
value.from_10btc_to_100btc,
),
from_100btc_to_1_000btc: (
OutputFilter::Range(100_00_000_000..1_000_00_000_000),
value.from_100btc_to_1_000btc,
),
from_1_000btc_to_10_000btc: (
OutputFilter::Range(1_000_00_000_000..10_000_00_000_000),
value.from_1_000btc_to_10_000btc,
),
from_10_000btc_to_100_000btc: (
OutputFilter::Range(10_000_00_000_000..100_000_00_000_000),
value.from_10_000btc_to_100_000btc,
),
from_100_000btc: (
OutputFilter::From(100_000_00_000_000),
value.from_100_000btc,
),
}
}
}
impl<T> OutputsBySizeRange<T> {
#[allow(clippy::inconsistent_digit_grouping)]
pub fn get_mut(&mut self, group: usize) -> &mut T {
if group == 0 {
&mut self._0sats
} else if group == 1 {
&mut self.from_1sat_to_10sats
} else if group == 10 {
&mut self.from_10sats_to_100sats
} else if group == 100 {
&mut self.from_100sats_to_1_000sats
} else if group == 1_000 {
&mut self.from_1_000sats_to_10_000sats
} else if group == 10_000 {
&mut self.from_10_000sats_to_100_000sats
} else if group == 100_000 {
&mut self.from_100_000sats_to_1_000_000sats
} else if group == 1_000_000 {
&mut self.from_1_000_000sats_to_10_000_000sats
} else if group == 10_000_000 {
&mut self.from_10_000_000sats_to_1btc
} else if group == 1_00_000_000 {
&mut self.from_1btc_to_10btc
} else if group == 10_00_000_000 {
&mut self.from_10btc_to_100btc
} else if group == 100_00_000_000 {
&mut self.from_100btc_to_1_000btc
} else if group == 1_000_00_000_000 {
&mut self.from_1_000btc_to_10_000btc
} else if group == 10_000_00_000_000 {
&mut self.from_10_000btc_to_100_000btc
} else {
&mut self.from_100_000btc
}
}
pub fn as_vec(&self) -> [&T; 15] {
[
&self._0sats,
&self.from_1sat_to_10sats,
&self.from_10sats_to_100sats,
&self.from_100sats_to_1_000sats,
&self.from_1_000sats_to_10_000sats,
&self.from_10_000sats_to_100_000sats,
&self.from_100_000sats_to_1_000_000sats,
&self.from_1_000_000sats_to_10_000_000sats,
&self.from_10_000_000sats_to_1btc,
&self.from_1btc_to_10btc,
&self.from_10btc_to_100btc,
&self.from_100btc_to_1_000btc,
&self.from_1_000btc_to_10_000btc,
&self.from_10_000btc_to_100_000btc,
&self.from_100_000btc,
]
}
pub fn as_mut_vec(&mut self) -> [&mut T; 15] {
[
&mut self._0sats,
&mut self.from_1sat_to_10sats,
&mut self.from_10sats_to_100sats,
&mut self.from_100sats_to_1_000sats,
&mut self.from_1_000sats_to_10_000sats,
&mut self.from_10_000sats_to_100_000sats,
&mut self.from_100_000sats_to_1_000_000sats,
&mut self.from_1_000_000sats_to_10_000_000sats,
&mut self.from_10_000_000sats_to_1btc,
&mut self.from_1btc_to_10btc,
&mut self.from_10btc_to_100btc,
&mut self.from_100btc_to_1_000btc,
&mut self.from_1_000btc_to_10_000btc,
&mut self.from_10_000btc_to_100_000btc,
&mut self.from_100_000btc,
]
}
}
impl<T> OutputsBySizeRange<(OutputFilter, T)> {
pub fn vecs(&self) -> [&T; 15] {
[
&self._0sats.1,
&self.from_1sat_to_10sats.1,
&self.from_10sats_to_100sats.1,
&self.from_100sats_to_1_000sats.1,
&self.from_1_000sats_to_10_000sats.1,
&self.from_10_000sats_to_100_000sats.1,
&self.from_100_000sats_to_1_000_000sats.1,
&self.from_1_000_000sats_to_10_000_000sats.1,
&self.from_10_000_000sats_to_1btc.1,
&self.from_1btc_to_10btc.1,
&self.from_10btc_to_100btc.1,
&self.from_100btc_to_1_000btc.1,
&self.from_1_000btc_to_10_000btc.1,
&self.from_10_000btc_to_100_000btc.1,
&self.from_100_000btc.1,
]
}
}

View File

@@ -1,28 +0,0 @@
use super::OutputFilter;
#[derive(Default, Clone)]
pub struct OutputsByTerm<T> {
pub short: T,
pub long: T,
}
impl<T> OutputsByTerm<T> {
pub fn as_mut_vec(&mut self) -> [&mut T; 2] {
[&mut self.short, &mut self.long]
}
}
impl<T> OutputsByTerm<(OutputFilter, T)> {
pub fn vecs(&self) -> [&T; 2] {
[&self.short.1, &self.long.1]
}
}
impl<T> From<OutputsByTerm<T>> for OutputsByTerm<(OutputFilter, T)> {
fn from(value: OutputsByTerm<T>) -> Self {
Self {
short: (OutputFilter::To(5 * 30), value.short),
long: (OutputFilter::From(5 * 30), value.long),
}
}
}

View File

@@ -1,51 +0,0 @@
use std::ops::Range;
use brk_core::{HalvingEpoch, OutputType};
#[derive(Debug, Clone)]
pub enum OutputFilter {
All,
To(usize),
Range(Range<usize>),
From(usize),
Epoch(HalvingEpoch),
Type(OutputType),
}
impl OutputFilter {
pub fn contains(&self, value: usize) -> bool {
match self {
OutputFilter::All => true,
OutputFilter::To(to) => *to > value,
OutputFilter::From(from) => *from <= value,
OutputFilter::Range(r) => r.contains(&value),
OutputFilter::Epoch(_) => false,
OutputFilter::Type(_) => false,
}
}
pub fn includes(&self, other: &OutputFilter) -> bool {
match self {
OutputFilter::All => true,
OutputFilter::To(to) => match other {
OutputFilter::All => false,
OutputFilter::To(to2) => to >= to2,
OutputFilter::Range(range) => range.end <= *to,
OutputFilter::From(_) => false,
OutputFilter::Epoch(_) => false,
OutputFilter::Type(_) => false,
},
OutputFilter::From(from) => match other {
OutputFilter::All => false,
OutputFilter::To(_) => false,
OutputFilter::Range(range) => range.start >= *from,
OutputFilter::From(from2) => from <= from2,
OutputFilter::Epoch(_) => false,
OutputFilter::Type(_) => false,
},
OutputFilter::Range(_) => false,
OutputFilter::Epoch(_) => false,
OutputFilter::Type(_) => false,
}
}
}

View File

@@ -1,120 +0,0 @@
mod by_date_range;
mod by_epoch;
mod by_from_date;
mod by_from_size;
mod by_size_range;
mod by_spendable_type;
mod by_term;
mod by_type;
mod by_unspendable_type;
mod by_up_to_date;
mod by_up_to_size;
// mod by_value;
mod filter;
pub use by_date_range::*;
pub use by_epoch::*;
pub use by_from_date::*;
pub use by_from_size::*;
pub use by_size_range::*;
pub use by_spendable_type::*;
pub use by_term::*;
pub use by_type::*;
pub use by_unspendable_type::*;
pub use by_up_to_date::*;
pub use by_up_to_size::*;
// pub use by_value::*;
pub use filter::*;
#[derive(Default, Clone)]
pub struct Outputs<T> {
pub all: T,
pub by_date_range: OutputsByDateRange<T>,
pub by_epoch: OutputsByEpoch<T>,
pub by_from_date: OutputsByFromDate<T>,
pub by_from_size: OutputsByFromSize<T>,
pub by_size_range: OutputsBySizeRange<T>,
pub by_term: OutputsByTerm<T>,
pub by_type: OutputsBySpendableType<T>,
pub by_up_to_date: OutputsByUpToDate<T>,
pub by_up_to_size: OutputsByUpToSize<T>,
// Needs whole UTXO set, TODO later
// pub by_value: OutputsByValue<T>,
}
impl<T> Outputs<T> {
pub fn as_mut_vecs(&mut self) -> Vec<&mut T> {
[&mut self.all]
.into_iter()
.chain(self.by_term.as_mut_vec())
.chain(self.by_up_to_date.as_mut_vec())
.chain(self.by_from_date.as_mut_vec())
.chain(self.by_from_size.as_mut_vec())
.chain(self.by_date_range.as_mut_vec())
.chain(self.by_epoch.as_mut_vec())
.chain(self.by_size_range.as_mut_vec())
.chain(self.by_up_to_size.as_mut_vec())
.chain(self.by_type.as_mut_vec())
// .chain(self.by_value.as_mut_vec())
.collect::<Vec<_>>()
}
pub fn as_mut_separate_vecs(&mut self) -> Vec<&mut T> {
self.by_date_range
.as_mut_vec()
.into_iter()
.chain(self.by_epoch.as_mut_vec())
.chain(self.by_size_range.as_mut_vec())
.chain(self.by_type.as_mut_vec())
// .chain(self.by_value.as_mut_vec())
.collect::<Vec<_>>()
}
pub fn as_mut_overlapping_vecs(&mut self) -> Vec<&mut T> {
[&mut self.all]
.into_iter()
.chain(self.by_term.as_mut_vec())
.chain(self.by_up_to_date.as_mut_vec())
.chain(self.by_from_date.as_mut_vec())
.chain(self.by_up_to_size.as_mut_vec())
.chain(self.by_from_size.as_mut_vec())
.collect::<Vec<_>>()
}
}
impl<T> Outputs<(OutputFilter, T)> {
pub fn vecs(&self) -> Vec<&T> {
[&self.all.1]
.into_iter()
.chain(self.by_term.vecs())
.chain(self.by_up_to_date.vecs())
.chain(self.by_from_date.vecs())
.chain(self.by_date_range.vecs())
.chain(self.by_epoch.vecs())
.chain(self.by_size_range.vecs())
.chain(self.by_type.vecs())
.chain(self.by_up_to_size.vecs())
.chain(self.by_from_size.vecs())
// .chain(self.by_value.vecs())
.collect::<Vec<_>>()
}
}
impl<T> From<Outputs<T>> for Outputs<(OutputFilter, T)> {
fn from(value: Outputs<T>) -> Self {
Self {
all: (OutputFilter::All, value.all),
by_term: OutputsByTerm::from(value.by_term),
by_up_to_date: OutputsByUpToDate::from(value.by_up_to_date),
by_from_date: OutputsByFromDate::from(value.by_from_date),
by_date_range: OutputsByDateRange::from(value.by_date_range),
by_epoch: OutputsByEpoch::from(value.by_epoch),
by_size_range: OutputsBySizeRange::from(value.by_size_range),
by_up_to_size: OutputsByUpToSize::from(value.by_up_to_size),
by_from_size: OutputsByFromSize::from(value.by_from_size),
// Needs whole UTXO set, TODO later
// by_value: OutputsByValue<T>,
by_type: OutputsBySpendableType::from(value.by_type),
}
}
}

View File

@@ -1,18 +1,14 @@
use std::{
collections::BTreeMap,
mem,
ops::{Add, AddAssign},
};
use std::ops::{Add, AddAssign};
use brk_core::{OutputType, Sats};
use brk_core::{GroupedBySizeRange, GroupedByType, OutputType, Sats};
use super::{OutputsByType, SupplyState};
use super::SupplyState;
#[derive(Default, Debug)]
pub struct Transacted {
pub spendable_supply: SupplyState,
pub by_type: OutputsByType<SupplyState>,
pub by_size_group: BTreeMap<usize, SupplyState>,
pub by_type: GroupedByType<SupplyState>,
pub by_size_group: GroupedBySizeRange<SupplyState>,
}
impl Transacted {
@@ -28,55 +24,7 @@ impl Transacted {
self.spendable_supply += &supply;
let _value = usize::from(value);
// Need to be in sync with by_size !! but plenty fast (I think)
if _value == 0 {
*self.by_size_group.entry(0).or_default() += &supply;
} else if _value < 10 {
*self.by_size_group.entry(1).or_default() += &supply;
} else if _value < 100 {
*self.by_size_group.entry(10).or_default() += &supply;
} else if _value < 1_000 {
*self.by_size_group.entry(100).or_default() += &supply;
} else if _value < 10_000 {
*self.by_size_group.entry(1_000).or_default() += &supply;
} else if _value < 100_000 {
*self.by_size_group.entry(10_000).or_default() += &supply;
} else if _value < 1_000_000 {
*self.by_size_group.entry(100_000).or_default() += &supply;
} else if _value < 10_000_000 {
*self.by_size_group.entry(1_000_000).or_default() += &supply;
} else if _value < 1_00_000_000 {
*self.by_size_group.entry(10_000_000).or_default() += &supply;
} else if _value < 10_00_000_000 {
*self.by_size_group.entry(1_00_000_000).or_default() += &supply;
} else if _value < 100_00_000_000 {
*self.by_size_group.entry(10_00_000_000).or_default() += &supply;
} else if _value < 1_000_00_000_000 {
*self.by_size_group.entry(100_00_000_000).or_default() += &supply;
} else if _value < 10_000_00_000_000 {
*self.by_size_group.entry(1_000_00_000_000).or_default() += &supply;
} else if _value < 100_000_00_000_000 {
*self.by_size_group.entry(10_000_00_000_000).or_default() += &supply;
} else {
*self.by_size_group.entry(100_000_00_000_000).or_default() += &supply;
}
}
fn merge_by_size(
first: BTreeMap<usize, SupplyState>,
second: BTreeMap<usize, SupplyState>,
) -> BTreeMap<usize, SupplyState> {
let (mut source, to_consume) = if first.len() > second.len() {
(first, second)
} else {
(second, first)
};
to_consume.into_iter().for_each(|(k, v)| {
*source.entry(k).or_default() += &v;
});
source
*self.by_size_group.get_mut(value) += &supply;
}
}
@@ -86,15 +34,14 @@ impl Add for Transacted {
Self {
spendable_supply: self.spendable_supply + rhs.spendable_supply,
by_type: self.by_type + rhs.by_type,
by_size_group: Self::merge_by_size(self.by_size_group, rhs.by_size_group),
by_size_group: self.by_size_group + rhs.by_size_group,
}
}
}
impl AddAssign for Transacted {
fn add_assign(&mut self, rhs: Self) {
self.by_size_group =
Self::merge_by_size(mem::take(&mut self.by_size_group), rhs.by_size_group);
self.by_size_group += rhs.by_size_group;
self.spendable_supply += &rhs.spendable_supply;
self.by_type += rhs.by_type;
}