global: snapshot

This commit is contained in:
nym21
2025-12-27 20:34:13 +01:00
parent f9856cf0aa
commit 9ba77dac0f
28 changed files with 316 additions and 614 deletions

View File

@@ -12,6 +12,8 @@ use vecdb::{
use super::Indexes;
pub const DB_NAME: &str = "blks";
#[derive(Clone, Traversable)]
pub struct Vecs {
db: Database,
@@ -22,7 +24,7 @@ pub struct Vecs {
impl Vecs {
pub fn forced_import(parent_path: &Path, parent_version: Version) -> Result<Self> {
let db = Database::open(&parent_path.join("blks"))?;
let db = Database::open(&parent_path.join(DB_NAME))?;
db.set_min_len(PAGE_SIZE * 1_000_000)?;
let version = parent_version + Version::ZERO;
@@ -39,7 +41,6 @@ impl Vecs {
.flat_map(|v| v.region_names())
.collect(),
)?;
this.db.compact()?;
Ok(this)
@@ -70,7 +71,8 @@ impl Vecs {
let Some(min_height) = indexer
.vecs
.tx.txindex_to_height
.tx
.txindex_to_height
.iter()?
.get(min_txindex)
.map(|h| h.min(starting_indexes.height))

View File

@@ -31,7 +31,7 @@ impl Vecs {
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
) -> Result<Self> {
let db = Database::open(&parent_path.join("chain"))?;
let db = Database::open(&parent_path.join(super::DB_NAME))?;
db.set_min_len(PAGE_SIZE * 50_000_000)?;
let version = parent_version + Version::ZERO;
@@ -463,7 +463,6 @@ impl Vecs {
.flat_map(|v| v.region_names())
.collect(),
)?;
this.db.compact()?;
Ok(this)

View File

@@ -14,6 +14,8 @@ use crate::grouped::{
ComputedVecsFromHeight, ComputedVecsFromTxindex,
};
pub const DB_NAME: &str = "chain";
pub(crate) const TARGET_BLOCKS_PER_DAY_F64: f64 = 144.0;
pub(crate) const TARGET_BLOCKS_PER_DAY_F32: f32 = 144.0;
pub(crate) const TARGET_BLOCKS_PER_DAY: u64 = 144;

View File

@@ -16,6 +16,8 @@ use super::{
indexes, price, stateful,
};
pub const DB_NAME: &str = "cointime";
#[derive(Clone, Traversable)]
pub struct Vecs {
db: Database,
@@ -56,7 +58,7 @@ impl Vecs {
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
) -> Result<Self> {
let db = Database::open(&parent_path.join("cointime"))?;
let db = Database::open(&parent_path.join(DB_NAME))?;
db.set_min_len(PAGE_SIZE * 1_000_000)?;
let compute_dollars = price.is_some();
@@ -174,7 +176,6 @@ impl Vecs {
.flat_map(|v| v.region_names())
.collect(),
)?;
this.db.compact()?;
Ok(this)

View File

@@ -13,6 +13,8 @@ use super::{
indexes,
};
pub const DB_NAME: &str = "constants";
#[derive(Clone, Traversable)]
pub struct Vecs {
db: Database,
@@ -39,7 +41,7 @@ impl Vecs {
parent_version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let db = Database::open(&parent_path.join("constants"))?;
let db = Database::open(&parent_path.join(DB_NAME))?;
db.set_min_len(PAGE_SIZE * 1_000_000)?;
let version = parent_version + Version::ZERO;
@@ -166,7 +168,6 @@ impl Vecs {
.flat_map(|v| v.region_names())
.collect(),
)?;
this.db.compact()?;
Ok(this)

View File

@@ -48,7 +48,6 @@ impl Vecs {
.flat_map(|v| v.region_names())
.collect(),
)?;
this.db.compact()?;
Ok(this)

View File

@@ -18,6 +18,7 @@ use vecdb::{
};
const VERSION: Version = Version::ZERO;
pub const DB_NAME: &str = "indexes";
#[derive(Clone, Traversable)]
pub struct Vecs {
@@ -101,7 +102,7 @@ impl Vecs {
parent_version: Version,
indexer: &Indexer,
) -> Result<Self> {
let db = Database::open(&parent.join("indexes"))?;
let db = Database::open(&parent.join(DB_NAME))?;
db.set_min_len(PAGE_SIZE * 10_000_000)?;
let version = parent_version + VERSION;
@@ -222,7 +223,6 @@ impl Vecs {
.flat_map(|v| v.region_names())
.collect(),
)?;
this.db.compact()?;
Ok(this)

View File

@@ -1,6 +1,6 @@
#![doc = include_str!("../README.md")]
use std::{path::Path, thread, time::Instant};
use std::{fs, path::Path, thread, time::Instant};
use brk_error::Result;
use brk_fetcher::Fetcher;
@@ -147,7 +147,7 @@ impl Computer {
info!("Total import time: {:?}", import_start.elapsed());
Ok(Self {
let this = Self {
constants,
market,
stateful,
@@ -160,7 +160,50 @@ impl Computer {
fetched,
price,
txouts,
})
};
Self::retain_databases(&computed_path)?;
Ok(this)
}
/// Removes database folders that are no longer in use.
fn retain_databases(computed_path: &Path) -> Result<()> {
const EXPECTED_DBS: &[&str] = &[
blks::DB_NAME,
chain::DB_NAME,
cointime::DB_NAME,
constants::DB_NAME,
indexes::DB_NAME,
market::DB_NAME,
pools::DB_NAME,
price::DB_NAME,
stateful::DB_NAME,
txins::DB_NAME,
txouts::DB_NAME,
];
if !computed_path.exists() {
return Ok(());
}
for entry in fs::read_dir(computed_path)? {
let entry = entry?;
let file_type = entry.file_type()?;
if !file_type.is_dir() {
continue;
}
if let Some(name) = entry.file_name().to_str() {
if !EXPECTED_DBS.contains(&name) {
info!("Removing obsolete database folder: {}", name);
fs::remove_dir_all(entry.path())?;
}
}
}
Ok(())
}
pub fn compute(

View File

@@ -21,7 +21,7 @@ impl Vecs {
parent_version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
let db = Database::open(&parent_path.join("market"))?;
let db = Database::open(&parent_path.join(super::DB_NAME))?;
db.set_min_len(PAGE_SIZE * 1_000_000)?;
let version = parent_version + Version::ZERO;
@@ -32,20 +32,49 @@ impl Vecs {
macro_rules! computed_di {
($name:expr) => {
ComputedVecsFromDateIndex::forced_import(&db, $name, Source::Compute, version + v0, indexes, last.clone())?
ComputedVecsFromDateIndex::forced_import(
&db,
$name,
Source::Compute,
version + v0,
indexes,
last.clone(),
)?
};
($name:expr, $v:expr) => {
ComputedVecsFromDateIndex::forced_import(&db, $name, Source::Compute, version + $v, indexes, last.clone())?
ComputedVecsFromDateIndex::forced_import(
&db,
$name,
Source::Compute,
version + $v,
indexes,
last.clone(),
)?
};
}
macro_rules! ratio_di {
($name:expr) => {
ComputedRatioVecsFromDateIndex::forced_import(&db, $name, Source::Compute, version + v0, indexes, true)?
ComputedRatioVecsFromDateIndex::forced_import(
&db,
$name,
Source::Compute,
version + v0,
indexes,
true,
)?
};
}
macro_rules! sd_di {
($name:expr, $window:expr, $v:expr) => {
ComputedStandardDeviationVecsFromDateIndex::forced_import(&db, $name, $window, Source::Compute, version + $v, indexes, StandardDeviationVecsOptions::default())?
ComputedStandardDeviationVecsFromDateIndex::forced_import(
&db,
$name,
$window,
Source::Compute,
version + $v,
indexes,
StandardDeviationVecsOptions::default(),
)?
};
}
macro_rules! eager_h {
@@ -239,7 +268,6 @@ impl Vecs {
.flat_map(|v| v.region_names())
.collect(),
)?;
this.db.compact()?;
Ok(this)

View File

@@ -10,6 +10,8 @@ use crate::grouped::{
ComputedVecsFromDateIndex,
};
pub const DB_NAME: &str = "market";
#[derive(Clone, Traversable)]
pub struct Vecs {
pub(crate) db: Database,

View File

@@ -19,6 +19,8 @@ use crate::{
price,
};
pub const DB_NAME: &str = "pools";
#[derive(Clone, Traversable)]
pub struct Vecs {
db: Database,
@@ -35,7 +37,7 @@ impl Vecs {
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
) -> Result<Self> {
let db = Database::open(&parent_path.join("pools"))?;
let db = Database::open(&parent_path.join(DB_NAME))?;
db.set_min_len(PAGE_SIZE * 1_000_000)?;
let pools = pools();
@@ -66,7 +68,6 @@ impl Vecs {
.flat_map(|v| v.region_names())
.collect(),
)?;
this.db.compact()?;
Ok(this)
@@ -129,8 +130,7 @@ impl Vecs {
let mut txindex_to_output_count_iter = indexes.txindex_to_output_count.iter();
let mut txoutindex_to_outputtype_iter =
indexer.vecs.txout.txoutindex_to_outputtype.iter()?;
let mut txoutindex_to_typeindex_iter =
indexer.vecs.txout.txoutindex_to_typeindex.iter()?;
let mut txoutindex_to_typeindex_iter = indexer.vecs.txout.txoutindex_to_typeindex.iter()?;
let mut p2pk65addressindex_to_p2pk65bytes_iter = indexer
.vecs
.address

View File

@@ -16,6 +16,8 @@ use super::{
indexes,
};
pub const DB_NAME: &str = "price";
#[derive(Clone, Traversable)]
pub struct Vecs {
db: Database,
@@ -71,26 +73,44 @@ const VERSION_IN_SATS: Version = Version::ZERO;
impl Vecs {
pub fn forced_import(parent: &Path, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
let db = Database::open(&parent.join("price"))?;
let db = Database::open(&parent.join(DB_NAME))?;
db.set_min_len(PAGE_SIZE * 1_000_000)?;
let v = version + VERSION;
let v_sats = version + VERSION + VERSION_IN_SATS;
macro_rules! eager {
($name:expr) => { EagerVec::forced_import(&db, $name, v)? };
($name:expr) => {
EagerVec::forced_import(&db, $name, v)?
};
}
macro_rules! eager_sats {
($name:expr) => { EagerVec::forced_import(&db, $name, v_sats)? };
($name:expr) => {
EagerVec::forced_import(&db, $name, v_sats)?
};
}
macro_rules! computed_di {
($name:expr, $opts:expr) => {
ComputedVecsFromDateIndex::forced_import(&db, $name, Source::Compute, v, indexes, $opts)?
ComputedVecsFromDateIndex::forced_import(
&db,
$name,
Source::Compute,
v,
indexes,
$opts,
)?
};
}
macro_rules! computed_di_sats {
($name:expr, $opts:expr) => {
ComputedVecsFromDateIndex::forced_import(&db, $name, Source::Compute, v_sats, indexes, $opts)?
ComputedVecsFromDateIndex::forced_import(
&db,
$name,
Source::Compute,
v_sats,
indexes,
$opts,
)?
};
}
macro_rules! computed_h {
@@ -162,7 +182,6 @@ impl Vecs {
.flat_map(|v| v.region_names())
.collect(),
)?;
this.db.compact()?;
Ok(this)

View File

@@ -1,427 +0,0 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Dollars, Height, Sats, Version};
use rayon::prelude::*;
use vecdb::{
AnyStoredVec, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableCloneableVec, PcoVec,
};
use crate::{
Indexes,
grouped::{
ComputedHeightValueVecs, ComputedValueVecsFromDateIndex, ComputedVecsFromDateIndex, Source,
VecBuilderOptions,
},
stateful::states::UnrealizedState,
};
use super::ImportConfig;
/// Unrealized profit/loss metrics.
#[derive(Clone, Traversable)]
pub struct UnrealizedMetrics {
// === Supply in Profit/Loss ===
pub height_to_supply_in_profit: EagerVec<PcoVec<Height, Sats>>,
pub indexes_to_supply_in_profit: ComputedValueVecsFromDateIndex,
pub height_to_supply_in_loss: EagerVec<PcoVec<Height, Sats>>,
pub indexes_to_supply_in_loss: ComputedValueVecsFromDateIndex,
pub dateindex_to_supply_in_profit: EagerVec<PcoVec<DateIndex, Sats>>,
pub dateindex_to_supply_in_loss: EagerVec<PcoVec<DateIndex, Sats>>,
pub height_to_supply_in_profit_value: ComputedHeightValueVecs,
pub height_to_supply_in_loss_value: ComputedHeightValueVecs,
// === Unrealized Profit/Loss ===
pub height_to_unrealized_profit: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_unrealized_profit: ComputedVecsFromDateIndex<Dollars>,
pub height_to_unrealized_loss: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_unrealized_loss: ComputedVecsFromDateIndex<Dollars>,
pub dateindex_to_unrealized_profit: EagerVec<PcoVec<DateIndex, Dollars>>,
pub dateindex_to_unrealized_loss: EagerVec<PcoVec<DateIndex, Dollars>>,
// === Negated and Net ===
pub height_to_neg_unrealized_loss: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_neg_unrealized_loss: ComputedVecsFromDateIndex<Dollars>,
pub height_to_net_unrealized_pnl: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_net_unrealized_pnl: ComputedVecsFromDateIndex<Dollars>,
pub height_to_total_unrealized_pnl: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_total_unrealized_pnl: ComputedVecsFromDateIndex<Dollars>,
}
impl UnrealizedMetrics {
/// Import unrealized metrics from database.
pub fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let v0 = Version::ZERO;
let compute_dollars = cfg.compute_dollars();
let last = VecBuilderOptions::default().add_last();
// Pre-import the dateindex vecs that are used as sources
let dateindex_to_supply_in_profit =
EagerVec::forced_import(cfg.db, &cfg.name("supply_in_profit"), cfg.version + v0)?;
let dateindex_to_supply_in_loss =
EagerVec::forced_import(cfg.db, &cfg.name("supply_in_loss"), cfg.version + v0)?;
let dateindex_to_unrealized_profit =
EagerVec::forced_import(cfg.db, &cfg.name("unrealized_profit"), cfg.version + v0)?;
let dateindex_to_unrealized_loss =
EagerVec::forced_import(cfg.db, &cfg.name("unrealized_loss"), cfg.version + v0)?;
Ok(Self {
// === Supply in Profit/Loss ===
height_to_supply_in_profit: EagerVec::forced_import(
cfg.db,
&cfg.name("supply_in_profit"),
cfg.version + v0,
)?,
indexes_to_supply_in_profit: ComputedValueVecsFromDateIndex::forced_import(
cfg.db,
&cfg.name("supply_in_profit"),
Source::Vec(dateindex_to_supply_in_profit.boxed_clone()),
cfg.version + v0,
last,
compute_dollars,
cfg.indexes,
)?,
height_to_supply_in_loss: EagerVec::forced_import(
cfg.db,
&cfg.name("supply_in_loss"),
cfg.version + v0,
)?,
indexes_to_supply_in_loss: ComputedValueVecsFromDateIndex::forced_import(
cfg.db,
&cfg.name("supply_in_loss"),
Source::Vec(dateindex_to_supply_in_loss.boxed_clone()),
cfg.version + v0,
last,
compute_dollars,
cfg.indexes,
)?,
dateindex_to_supply_in_profit,
dateindex_to_supply_in_loss,
height_to_supply_in_profit_value: ComputedHeightValueVecs::forced_import(
cfg.db,
&cfg.name("supply_in_profit"),
Source::None,
cfg.version + v0,
compute_dollars,
)?,
height_to_supply_in_loss_value: ComputedHeightValueVecs::forced_import(
cfg.db,
&cfg.name("supply_in_loss"),
Source::None,
cfg.version + v0,
compute_dollars,
)?,
// === Unrealized Profit/Loss ===
height_to_unrealized_profit: EagerVec::forced_import(
cfg.db,
&cfg.name("unrealized_profit"),
cfg.version + v0,
)?,
indexes_to_unrealized_profit: ComputedVecsFromDateIndex::forced_import(
cfg.db,
&cfg.name("unrealized_profit"),
Source::Vec(dateindex_to_unrealized_profit.boxed_clone()),
cfg.version + v0,
cfg.indexes,
last,
)?,
height_to_unrealized_loss: EagerVec::forced_import(
cfg.db,
&cfg.name("unrealized_loss"),
cfg.version + v0,
)?,
indexes_to_unrealized_loss: ComputedVecsFromDateIndex::forced_import(
cfg.db,
&cfg.name("unrealized_loss"),
Source::Vec(dateindex_to_unrealized_loss.boxed_clone()),
cfg.version + v0,
cfg.indexes,
last,
)?,
dateindex_to_unrealized_profit,
dateindex_to_unrealized_loss,
// === Negated and Net ===
height_to_neg_unrealized_loss: EagerVec::forced_import(
cfg.db,
&cfg.name("neg_unrealized_loss"),
cfg.version + v0,
)?,
indexes_to_neg_unrealized_loss: ComputedVecsFromDateIndex::forced_import(
cfg.db,
&cfg.name("neg_unrealized_loss"),
Source::Compute,
cfg.version + v0,
cfg.indexes,
last,
)?,
height_to_net_unrealized_pnl: EagerVec::forced_import(
cfg.db,
&cfg.name("net_unrealized_pnl"),
cfg.version + v0,
)?,
indexes_to_net_unrealized_pnl: ComputedVecsFromDateIndex::forced_import(
cfg.db,
&cfg.name("net_unrealized_pnl"),
Source::Compute,
cfg.version + v0,
cfg.indexes,
last,
)?,
height_to_total_unrealized_pnl: EagerVec::forced_import(
cfg.db,
&cfg.name("total_unrealized_pnl"),
cfg.version + v0,
)?,
indexes_to_total_unrealized_pnl: ComputedVecsFromDateIndex::forced_import(
cfg.db,
&cfg.name("total_unrealized_pnl"),
Source::Compute,
cfg.version + v0,
cfg.indexes,
last,
)?,
})
}
/// Push unrealized state values to height-indexed vectors.
pub fn truncate_push(
&mut self,
height: Height,
dateindex: Option<DateIndex>,
height_state: &UnrealizedState,
date_state: Option<&UnrealizedState>,
) -> Result<()> {
self.height_to_supply_in_profit
.truncate_push(height, height_state.supply_in_profit)?;
self.height_to_supply_in_loss
.truncate_push(height, height_state.supply_in_loss)?;
self.height_to_unrealized_profit
.truncate_push(height, height_state.unrealized_profit)?;
self.height_to_unrealized_loss
.truncate_push(height, height_state.unrealized_loss)?;
if let (Some(dateindex), Some(date_state)) = (dateindex, date_state) {
self.dateindex_to_supply_in_profit
.truncate_push(dateindex, date_state.supply_in_profit)?;
self.dateindex_to_supply_in_loss
.truncate_push(dateindex, date_state.supply_in_loss)?;
self.dateindex_to_unrealized_profit
.truncate_push(dateindex, date_state.unrealized_profit)?;
self.dateindex_to_unrealized_loss
.truncate_push(dateindex, date_state.unrealized_loss)?;
}
Ok(())
}
/// Write height-indexed vectors to disk.
pub fn write(&mut self) -> Result<()> {
self.height_to_supply_in_profit.write()?;
self.height_to_supply_in_loss.write()?;
self.height_to_unrealized_profit.write()?;
self.height_to_unrealized_loss.write()?;
self.dateindex_to_supply_in_profit.write()?;
self.dateindex_to_supply_in_loss.write()?;
self.dateindex_to_unrealized_profit.write()?;
self.dateindex_to_unrealized_loss.write()?;
Ok(())
}
/// Returns a parallel iterator over all vecs for parallel writing.
pub fn par_iter_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
vec![
&mut self.height_to_supply_in_profit as &mut dyn AnyStoredVec,
&mut self.height_to_supply_in_loss as &mut dyn AnyStoredVec,
&mut self.height_to_unrealized_profit as &mut dyn AnyStoredVec,
&mut self.height_to_unrealized_loss as &mut dyn AnyStoredVec,
&mut self.dateindex_to_supply_in_profit as &mut dyn AnyStoredVec,
&mut self.dateindex_to_supply_in_loss as &mut dyn AnyStoredVec,
&mut self.dateindex_to_unrealized_profit as &mut dyn AnyStoredVec,
&mut self.dateindex_to_unrealized_loss as &mut dyn AnyStoredVec,
]
.into_par_iter()
}
/// Compute aggregate values from separate cohorts.
pub fn compute_from_stateful(
&mut self,
starting_indexes: &Indexes,
others: &[&Self],
exit: &Exit,
) -> Result<()> {
self.height_to_supply_in_profit.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.height_to_supply_in_profit)
.collect::<Vec<_>>(),
exit,
)?;
self.height_to_supply_in_loss.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.height_to_supply_in_loss)
.collect::<Vec<_>>(),
exit,
)?;
self.height_to_unrealized_profit.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.height_to_unrealized_profit)
.collect::<Vec<_>>(),
exit,
)?;
self.height_to_unrealized_loss.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.height_to_unrealized_loss)
.collect::<Vec<_>>(),
exit,
)?;
self.dateindex_to_supply_in_profit.compute_sum_of_others(
starting_indexes.dateindex,
&others
.iter()
.map(|v| &v.dateindex_to_supply_in_profit)
.collect::<Vec<_>>(),
exit,
)?;
self.dateindex_to_supply_in_loss.compute_sum_of_others(
starting_indexes.dateindex,
&others
.iter()
.map(|v| &v.dateindex_to_supply_in_loss)
.collect::<Vec<_>>(),
exit,
)?;
self.dateindex_to_unrealized_profit.compute_sum_of_others(
starting_indexes.dateindex,
&others
.iter()
.map(|v| &v.dateindex_to_unrealized_profit)
.collect::<Vec<_>>(),
exit,
)?;
self.dateindex_to_unrealized_loss.compute_sum_of_others(
starting_indexes.dateindex,
&others
.iter()
.map(|v| &v.dateindex_to_unrealized_loss)
.collect::<Vec<_>>(),
exit,
)?;
Ok(())
}
/// First phase of computed metrics.
pub fn compute_rest_part1(
&mut self,
price: Option<&crate::price::Vecs>,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
// Compute supply value (bitcoin + dollars) from sats
self.height_to_supply_in_profit_value.compute_rest(
price,
starting_indexes,
exit,
Some(&self.height_to_supply_in_profit),
)?;
self.height_to_supply_in_loss_value.compute_rest(
price,
starting_indexes,
exit,
Some(&self.height_to_supply_in_loss),
)?;
// Compute indexes from dateindex sources
self.indexes_to_supply_in_profit.compute_rest(
price,
starting_indexes,
exit,
Some(&self.dateindex_to_supply_in_profit),
)?;
self.indexes_to_supply_in_loss.compute_rest(
price,
starting_indexes,
exit,
Some(&self.dateindex_to_supply_in_loss),
)?;
self.indexes_to_unrealized_profit.compute_rest(
starting_indexes,
exit,
Some(&self.dateindex_to_unrealized_profit),
)?;
self.indexes_to_unrealized_loss.compute_rest(
starting_indexes,
exit,
Some(&self.dateindex_to_unrealized_loss),
)?;
// total_unrealized_pnl = profit + loss
self.height_to_total_unrealized_pnl.compute_add(
starting_indexes.height,
&self.height_to_unrealized_profit,
&self.height_to_unrealized_loss,
exit,
)?;
self.indexes_to_total_unrealized_pnl
.compute_all(starting_indexes, exit, |vec| {
vec.compute_add(
starting_indexes.dateindex,
&self.dateindex_to_unrealized_profit,
&self.dateindex_to_unrealized_loss,
exit,
)?;
Ok(())
})?;
// neg_unrealized_loss = loss * -1
self.height_to_neg_unrealized_loss.compute_transform(
starting_indexes.height,
&self.height_to_unrealized_loss,
|(h, v, ..)| (h, v * -1_i64),
exit,
)?;
self.indexes_to_neg_unrealized_loss
.compute_all(starting_indexes, exit, |vec| {
vec.compute_transform(
starting_indexes.dateindex,
&self.dateindex_to_unrealized_loss,
|(h, v, ..)| (h, v * -1_i64),
exit,
)?;
Ok(())
})?;
// net_unrealized_pnl = profit - loss
self.height_to_net_unrealized_pnl.compute_subtract(
starting_indexes.height,
&self.height_to_unrealized_profit,
&self.height_to_unrealized_loss,
exit,
)?;
self.indexes_to_net_unrealized_pnl
.compute_all(starting_indexes, exit, |vec| {
vec.compute_subtract(
starting_indexes.dateindex,
&self.dateindex_to_unrealized_profit,
&self.dateindex_to_unrealized_loss,
exit,
)?;
Ok(())
})?;
Ok(())
}
}

View File

@@ -11,5 +11,7 @@ use states::*;
pub use range_map::RangeMap;
pub use vecs::Vecs;
pub const DB_NAME: &str = "stateful";
pub use address::{AddressTypeToTypeIndexMap, AddressesDataVecs, AnyAddressIndexesVecs};
pub use cohorts::{AddressCohorts, CohortVecs, DynCohortVecs, UTXOCohorts};

View File

@@ -19,11 +19,12 @@ use crate::{
ComputedValueVecsFromHeight, ComputedVecsFromDateIndex, ComputedVecsFromHeight, Source,
VecBuilderOptions,
},
indexes, price, txins,
indexes, price,
stateful::{
compute::{StartMode, determine_start_mode, process_blocks, recover_state, reset_state},
states::BlockState,
},
txins,
utils::OptionExt,
};
@@ -75,7 +76,7 @@ impl Vecs {
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
) -> Result<Self> {
let db_path = parent.join("stateful");
let db_path = parent.join(super::DB_NAME);
let states_path = db_path.join("states");
let db = Database::open(&db_path)?;
@@ -112,7 +113,7 @@ impl Vecs {
|index, _| Some(index),
);
Ok(Self {
let this = Self {
chain_state: BytesVec::forced_import_with(
vecdb::ImportOptions::new(&db, "chain", v0)
.with_saved_stamped_changes(SAVED_STAMPED_CHANGES),
@@ -221,7 +222,16 @@ impl Vecs {
emptyaddressindex_to_emptyaddressindex,
db,
})
};
this.db.retain_regions(
this.iter_any_exportable()
.flat_map(|v| v.region_names())
.collect(),
)?;
this.db.compact()?;
Ok(this)
}
/// Main computation loop.

View File

@@ -13,6 +13,7 @@ use vecdb::{
use super::Indexes;
const BATCH_SIZE: usize = 2 * 1024 * 1024 * 1024 / size_of::<Entry>();
pub const DB_NAME: &str = "txins";
#[derive(Clone, Traversable)]
pub struct Vecs {
@@ -23,7 +24,7 @@ pub struct Vecs {
impl Vecs {
pub fn forced_import(parent_path: &Path, parent_version: Version) -> Result<Self> {
let db = Database::open(&parent_path.join("txins"))?;
let db = Database::open(&parent_path.join(DB_NAME))?;
db.set_min_len(PAGE_SIZE * 10_000_000)?;
let version = parent_version + Version::ZERO;

View File

@@ -12,6 +12,8 @@ use vecdb::{
use super::{Indexes, txins};
pub const DB_NAME: &str = "txouts";
#[derive(Clone, Traversable)]
pub struct Vecs {
db: Database,
@@ -20,7 +22,7 @@ pub struct Vecs {
impl Vecs {
pub fn forced_import(parent_path: &Path, parent_version: Version) -> Result<Self> {
let db = Database::open(&parent_path.join("txouts"))?;
let db = Database::open(&parent_path.join(DB_NAME))?;
db.set_min_len(PAGE_SIZE * 10_000_000)?;
let version = parent_version + Version::ZERO;