global: snapshot

This commit is contained in:
nym21
2025-12-12 16:55:55 +01:00
parent e755f2856a
commit 3526a177fc
66 changed files with 1964 additions and 9175 deletions

View File

@@ -1,7 +1,6 @@
[workspace]
resolver = "3"
members = ["crates/*"]
default-members = ["crates/brk_cli"]
package.description = "The Bitcoin Research Kit is a suite of tools designed to extract, compute and display data stored on a Bitcoin Core node"
package.license = "MIT"
package.edition = "2024"

View File

@@ -6,6 +6,12 @@ use std::{
time::Instant,
};
/// Patterns to match for progress tracking.
const PROGRESS_PATTERNS: &[&str] = &[
"block ", // "Indexing block 123..."
"chain at ", // "Processing chain at 456..."
];
pub struct ProgressionMonitor {
csv_file: Mutex<BufWriter<fs::File>>,
start_time: Instant,
@@ -14,7 +20,7 @@ pub struct ProgressionMonitor {
impl ProgressionMonitor {
pub fn new(csv_path: &Path) -> io::Result<Self> {
let mut csv_file = BufWriter::new(fs::File::create(csv_path)?);
writeln!(csv_file, "timestamp_ms,block_number")?;
writeln!(csv_file, "timestamp_ms,value")?;
Ok(Self {
csv_file: Mutex::new(csv_file),
@@ -22,20 +28,19 @@ impl ProgressionMonitor {
})
}
/// Fast inline check and record
/// Check message for progress patterns and record if found
#[inline]
pub fn check_and_record(&self, message: &str) {
if !message.contains("block ") {
let Some(value) = parse_progress(message) else {
return;
};
if value % 10 != 0 {
return;
}
if let Some(block_num) = parse_block_number(message)
&& block_num % 10 == 0
{
let elapsed_ms = self.start_time.elapsed().as_millis();
let mut writer = self.csv_file.lock();
let _ = writeln!(writer, "{},{}", elapsed_ms, block_num);
}
let elapsed_ms = self.start_time.elapsed().as_millis();
let _ = writeln!(self.csv_file.lock(), "{},{}", elapsed_ms, value);
}
pub fn flush(&self) -> io::Result<()> {
@@ -43,14 +48,27 @@ impl ProgressionMonitor {
}
}
/// Parse progress value from message
#[inline]
fn parse_block_number(message: &str) -> Option<u64> {
let start = message.find("block ")?;
let after_block = &message[start + 6..];
let end = after_block
.find(|c: char| !c.is_ascii_digit())
.unwrap_or(after_block.len());
after_block[..end].parse::<u64>().ok()
fn parse_progress(message: &str) -> Option<u64> {
PROGRESS_PATTERNS
.iter()
.find_map(|pattern| parse_number_after(message, pattern))
}
/// Extract number immediately following the pattern
#[inline]
fn parse_number_after(message: &str, pattern: &str) -> Option<u64> {
let start = message.find(pattern)?;
let after = &message[start + pattern.len()..];
let end = after
.find(|c: char| !c.is_ascii_digit())
.unwrap_or(after.len());
if end == 0 {
return None;
}
after[..end].parse().ok()
}

View File

@@ -0,0 +1,251 @@
use crate::data::{DataPoint, DualRun, Result, Run};
use crate::format;
use plotters::prelude::*;
use std::path::Path;
const FONT: &str = "monospace";
const FONT_SIZE: i32 = 20;
const FONT_SIZE_BIG: i32 = 30;
const SIZE: (u32, u32) = (2000, 1000);
const TIME_BUFFER_MS: u64 = 10_000;
const BG_COLOR: RGBColor = RGBColor(18, 18, 24);
const TEXT_COLOR: RGBColor = RGBColor(230, 230, 240);
const COLORS: [RGBColor; 6] = [
RGBColor(255, 99, 132), // Pink/Red
RGBColor(54, 162, 235), // Blue
RGBColor(75, 192, 192), // Teal
RGBColor(255, 206, 86), // Yellow
RGBColor(153, 102, 255), // Purple
RGBColor(255, 159, 64), // Orange
];
pub enum YAxisFormat {
Bytes,
Number,
}
pub struct ChartConfig<'a> {
pub output_path: &'a Path,
pub title: String,
pub y_label: String,
pub y_format: YAxisFormat,
}
/// Generate a simple line chart from runs
pub fn generate(config: ChartConfig, runs: &[Run]) -> Result<()> {
if runs.is_empty() {
return Ok(());
}
let max_time_ms = runs.iter().map(|r| r.max_timestamp()).max().unwrap_or(1000) + TIME_BUFFER_MS;
let max_time_s = max_time_ms as f64 / 1000.0;
let max_value = runs.iter().map(|r| r.max_value()).fold(0.0, f64::max);
let (time_scaled, time_divisor, time_label) = format::time(max_time_s);
let (value_scaled, scale_factor, y_label) = scale_y_axis(max_value, &config.y_label, &config.y_format);
let x_labels = label_count(time_scaled);
let root = SVGBackend::new(config.output_path, SIZE).into_drawing_area();
root.fill(&BG_COLOR)?;
let mut chart = ChartBuilder::on(&root)
.caption(&config.title, (FONT, FONT_SIZE_BIG).into_font().color(&TEXT_COLOR))
.margin(20)
.margin_right(40)
.x_label_area_size(50)
.margin_left(50)
.right_y_label_area_size(75)
.build_cartesian_2d(0.0..time_scaled * 1.025, 0.0..value_scaled * 1.1)?;
configure_mesh(&mut chart, time_label, &y_label, &config.y_format, x_labels)?;
for (idx, run) in runs.iter().enumerate() {
let color = COLORS[idx % COLORS.len()];
draw_series(&mut chart, &run.data, &run.id, color, time_divisor, scale_factor)?;
}
configure_legend(&mut chart)?;
root.present()?;
println!("Generated: {}", config.output_path.display());
Ok(())
}
/// Generate a chart with dual series per run (e.g., current + peak memory)
pub fn generate_dual(
config: ChartConfig,
runs: &[DualRun],
primary_suffix: &str,
secondary_suffix: &str,
) -> Result<()> {
if runs.is_empty() {
return Ok(());
}
let max_time_ms = runs
.iter()
.flat_map(|r| r.primary.iter().chain(r.secondary.iter()))
.map(|d| d.timestamp_ms)
.max()
.unwrap_or(1000)
+ TIME_BUFFER_MS;
let max_time_s = max_time_ms as f64 / 1000.0;
let max_value = runs.iter().map(|r| r.max_value()).fold(0.0, f64::max);
let (time_scaled, time_divisor, time_label) = format::time(max_time_s);
let (value_scaled, scale_factor, y_label) = scale_y_axis(max_value, &config.y_label, &config.y_format);
let x_labels = label_count(time_scaled);
let root = SVGBackend::new(config.output_path, SIZE).into_drawing_area();
root.fill(&BG_COLOR)?;
let mut chart = ChartBuilder::on(&root)
.caption(&config.title, (FONT, FONT_SIZE_BIG).into_font().color(&TEXT_COLOR))
.margin(20)
.margin_right(40)
.x_label_area_size(50)
.margin_left(50)
.right_y_label_area_size(75)
.build_cartesian_2d(0.0..time_scaled * 1.025, 0.0..value_scaled * 1.1)?;
configure_mesh(&mut chart, time_label, &y_label, &config.y_format, x_labels)?;
for (idx, run) in runs.iter().enumerate() {
let color = COLORS[idx % COLORS.len()];
// Primary series (solid)
draw_series(
&mut chart,
&run.primary,
&format!("{} {}", run.id, primary_suffix),
color,
time_divisor,
scale_factor,
)?;
// Secondary series (dashed)
draw_dashed_series(
&mut chart,
&run.secondary,
&format!("{} {}", run.id, secondary_suffix),
color.mix(0.5),
time_divisor,
scale_factor,
)?;
}
configure_legend(&mut chart)?;
root.present()?;
println!("Generated: {}", config.output_path.display());
Ok(())
}
fn scale_y_axis(max_value: f64, base_label: &str, y_format: &YAxisFormat) -> (f64, f64, String) {
match y_format {
YAxisFormat::Bytes => {
let (scaled, unit) = format::bytes(max_value);
let factor = max_value / scaled;
(scaled, factor, format!("{} ({})", base_label, unit))
}
YAxisFormat::Number => (max_value, 1.0, base_label.to_string()),
}
}
/// Calculate appropriate label count to avoid duplicates when rounding to integers
fn label_count(max_value: f64) -> usize {
let max_int = max_value.ceil() as usize;
// Don't exceed the range, cap at 12 for readability
max_int.clamp(2, 12)
}
type Chart<'a, 'b> = ChartContext<
'a,
SVGBackend<'b>,
Cartesian2d<plotters::coord::types::RangedCoordf64, plotters::coord::types::RangedCoordf64>,
>;
fn configure_mesh(chart: &mut Chart, x_label: &str, y_label: &str, y_format: &YAxisFormat, x_labels: usize) -> Result<()> {
let y_formatter: Box<dyn Fn(&f64) -> String> = match y_format {
YAxisFormat::Bytes => Box::new(|y: &f64| {
if y.fract() == 0.0 {
format!("{:.0}", y)
} else {
format!("{:.1}", y)
}
}),
YAxisFormat::Number => Box::new(|y: &f64| format::axis_number(*y)),
};
chart
.configure_mesh()
.disable_mesh()
.x_desc(x_label)
.y_desc(y_label)
.x_label_formatter(&|x| format!("{:.0}", x))
.y_label_formatter(&y_formatter)
.x_labels(x_labels)
.y_labels(10)
.x_label_style((FONT, FONT_SIZE).into_font().color(&TEXT_COLOR.mix(0.7)))
.y_label_style((FONT, FONT_SIZE).into_font().color(&TEXT_COLOR.mix(0.7)))
.axis_style(TEXT_COLOR.mix(0.3))
.draw()?;
Ok(())
}
fn draw_series(
chart: &mut Chart,
data: &[DataPoint],
label: &str,
color: RGBColor,
time_divisor: f64,
scale_factor: f64,
) -> Result<()> {
let points = data
.iter()
.map(|d| (d.timestamp_ms as f64 / 1000.0 / time_divisor, d.value / scale_factor));
chart
.draw_series(LineSeries::new(points, color.stroke_width(1)))?
.label(label)
.legend(move |(x, y)| PathElement::new(vec![(x, y), (x + 20, y)], color.stroke_width(1)));
Ok(())
}
fn draw_dashed_series(
chart: &mut Chart,
data: &[DataPoint],
label: &str,
color: RGBAColor,
time_divisor: f64,
scale_factor: f64,
) -> Result<()> {
let points: Vec<_> = data
.iter()
.map(|d| (d.timestamp_ms as f64 / 1000.0 / time_divisor, d.value / scale_factor))
.collect();
// Draw dashed line by skipping every other segment
chart
.draw_series(
points
.windows(2)
.enumerate()
.filter(|(i, _)| i % 2 == 0)
.map(|(_, w)| PathElement::new(vec![w[0], w[1]], color.stroke_width(2))),
)?
.label(label)
.legend(move |(x, y)| PathElement::new(vec![(x, y), (x + 10, y), (x + 20, y)], color.stroke_width(2)));
Ok(())
}
fn configure_legend<'a>(chart: &mut Chart<'a, 'a>) -> Result<()> {
chart
.configure_series_labels()
.position(SeriesLabelPosition::UpperLeft)
.label_font((FONT, FONT_SIZE).into_font().color(&TEXT_COLOR.mix(0.9)))
.background_style(BG_COLOR.mix(0.98))
.border_style(BG_COLOR)
.margin(10)
.draw()?;
Ok(())
}

View File

@@ -0,0 +1,239 @@
use std::{collections::HashMap, fs, path::Path};
pub type Result<T> = std::result::Result<T, Box<dyn std::error::Error>>;
#[derive(Debug, Clone)]
pub struct DataPoint {
pub timestamp_ms: u64,
pub value: f64,
}
/// Per-run cutoff timestamps for fair comparison
pub struct Cutoffs {
by_id: HashMap<String, u64>,
default: u64,
}
impl Cutoffs {
/// Calculate cutoffs from progress runs.
/// Finds the common max progress, then returns when each run reached it.
pub fn from_progress(progress_runs: &[Run]) -> Self {
const TIME_BUFFER_MS: u64 = 10_000;
if progress_runs.is_empty() {
return Self {
by_id: HashMap::new(),
default: u64::MAX,
};
}
// Find the minimum of max progress values (the common point all runs reached)
let common_progress = progress_runs
.iter()
.map(|r| r.max_value())
.fold(f64::MAX, f64::min);
let by_id: HashMap<_, _> = progress_runs
.iter()
.map(|run| {
let cutoff = run
.data
.iter()
.find(|d| d.value >= common_progress)
.map(|d| d.timestamp_ms)
.unwrap_or_else(|| run.max_timestamp())
.saturating_add(TIME_BUFFER_MS);
(run.id.clone(), cutoff)
})
.collect();
let default = by_id.values().copied().max().unwrap_or(u64::MAX);
Self { by_id, default }
}
pub fn get(&self, id: &str) -> u64 {
self.by_id.get(id).copied().unwrap_or(self.default)
}
pub fn trim_runs(&self, runs: &[Run]) -> Vec<Run> {
runs.iter().map(|r| r.trimmed(self.get(&r.id))).collect()
}
pub fn trim_dual_runs(&self, runs: &[DualRun]) -> Vec<DualRun> {
runs.iter().map(|r| r.trimmed(self.get(&r.id))).collect()
}
}
#[derive(Debug, Clone)]
pub struct Run {
pub id: String,
pub data: Vec<DataPoint>,
}
impl Run {
pub fn max_timestamp(&self) -> u64 {
self.data.iter().map(|d| d.timestamp_ms).max().unwrap_or(0)
}
pub fn max_value(&self) -> f64 {
self.data.iter().map(|d| d.value).fold(0.0, f64::max)
}
pub fn trimmed(&self, max_timestamp_ms: u64) -> Self {
Self {
id: self.id.clone(),
data: self
.data
.iter()
.filter(|d| d.timestamp_ms <= max_timestamp_ms)
.cloned()
.collect(),
}
}
}
/// Two data series from a single run (e.g., memory footprint + peak, or io read + write)
#[derive(Debug, Clone)]
pub struct DualRun {
pub id: String,
pub primary: Vec<DataPoint>,
pub secondary: Vec<DataPoint>,
}
impl DualRun {
pub fn trimmed(&self, max_timestamp_ms: u64) -> Self {
Self {
id: self.id.clone(),
primary: self
.primary
.iter()
.filter(|d| d.timestamp_ms <= max_timestamp_ms)
.cloned()
.collect(),
secondary: self
.secondary
.iter()
.filter(|d| d.timestamp_ms <= max_timestamp_ms)
.cloned()
.collect(),
}
}
pub fn max_value(&self) -> f64 {
self.primary
.iter()
.chain(self.secondary.iter())
.map(|d| d.value)
.fold(0.0, f64::max)
}
}
pub fn read_runs(crate_path: &Path, filename: &str) -> Result<Vec<Run>> {
let mut runs = Vec::new();
for entry in fs::read_dir(crate_path)? {
let run_path = entry?.path();
if !run_path.is_dir() {
continue;
}
let run_id = run_path
.file_name()
.and_then(|n| n.to_str())
.ok_or("Invalid run ID")?
.to_string();
// Skip underscore-prefixed or numeric-only directories
if run_id.starts_with('_') || run_id.chars().all(|c| c.is_ascii_digit()) {
continue;
}
let csv_path = run_path.join(filename);
if csv_path.exists() {
if let Ok(data) = read_csv(&csv_path) {
runs.push(Run { id: run_id, data });
}
}
}
Ok(runs)
}
pub fn read_dual_runs(crate_path: &Path, filename: &str) -> Result<Vec<DualRun>> {
let mut runs = Vec::new();
for entry in fs::read_dir(crate_path)? {
let run_path = entry?.path();
if !run_path.is_dir() {
continue;
}
let run_id = run_path
.file_name()
.and_then(|n| n.to_str())
.ok_or("Invalid run ID")?
.to_string();
if run_id.starts_with('_') || run_id.chars().all(|c| c.is_ascii_digit()) {
continue;
}
let csv_path = run_path.join(filename);
if csv_path.exists() {
if let Ok((primary, secondary)) = read_dual_csv(&csv_path) {
runs.push(DualRun {
id: run_id,
primary,
secondary,
});
}
}
}
Ok(runs)
}
fn read_csv(path: &Path) -> Result<Vec<DataPoint>> {
let content = fs::read_to_string(path)?;
let data = content
.lines()
.skip(1) // header
.filter_map(|line| {
let mut parts = line.split(',');
let timestamp_ms = parts.next()?.parse().ok()?;
let value = parts.next()?.parse().ok()?;
Some(DataPoint {
timestamp_ms,
value,
})
})
.collect();
Ok(data)
}
fn read_dual_csv(path: &Path) -> Result<(Vec<DataPoint>, Vec<DataPoint>)> {
let content = fs::read_to_string(path)?;
let mut primary = Vec::new();
let mut secondary = Vec::new();
for line in content.lines().skip(1) {
let mut parts = line.split(',');
if let (Some(ts), Some(v1), Some(v2)) = (parts.next(), parts.next(), parts.next()) {
if let (Ok(timestamp_ms), Ok(val1), Ok(val2)) =
(ts.parse(), v1.parse::<f64>(), v2.parse::<f64>())
{
primary.push(DataPoint {
timestamp_ms,
value: val1,
});
secondary.push(DataPoint {
timestamp_ms,
value: val2,
});
}
}
}
Ok((primary, secondary))
}

View File

@@ -0,0 +1,45 @@
const KIB: f64 = 1024.0;
const MIB: f64 = KIB * 1024.0;
const GIB: f64 = MIB * 1024.0;
const MINUTE: f64 = 60.0;
const HOUR: f64 = 3600.0;
/// Returns (scaled_value, unit_suffix)
pub fn bytes(bytes: f64) -> (f64, &'static str) {
if bytes >= GIB {
(bytes / GIB, "GiB")
} else if bytes >= MIB {
(bytes / MIB, "MiB")
} else if bytes >= KIB {
(bytes / KIB, "KiB")
} else {
(bytes, "bytes")
}
}
/// Returns (scaled_value, divisor, axis_label)
pub fn time(seconds: f64) -> (f64, f64, &'static str) {
if seconds >= HOUR * 2.0 {
(seconds / HOUR, HOUR, "Time (h)")
} else if seconds >= MINUTE * 2.0 {
(seconds / MINUTE, MINUTE, "Time (min)")
} else {
(seconds, 1.0, "Time (s)")
}
}
pub fn axis_number(value: f64) -> String {
if value >= 1000.0 {
let k = value / 1000.0;
if k.fract() == 0.0 || k >= 100.0 {
format!("{:.0}k", k)
} else if k >= 10.0 {
format!("{:.1}k", k)
} else {
format!("{:.2}k", k)
}
} else {
format!("{:.0}", value)
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -36,16 +36,16 @@ fn run() -> Result<()> {
dbg!(
indexer
.vecs
.txindex_to_txid
.tx.txindex_to_txid
.read_once(txindex)
.unwrap()
.to_string()
);
let first_txinindex = indexer.vecs.txindex_to_first_txinindex.read_once(txindex)?;
let first_txinindex = indexer.vecs.tx.txindex_to_first_txinindex.read_once(txindex)?;
dbg!(first_txinindex);
let first_txoutindex = indexer
.vecs
.txindex_to_first_txoutindex
.tx.txindex_to_first_txoutindex
.read_once(txindex)?;
dbg!(first_txoutindex);
let input_count = *computer.indexes.txindex_to_input_count.read_once(txindex)?;
@@ -74,11 +74,11 @@ fn run() -> Result<()> {
.txinindex_to_value
.read_once(first_txinindex + 1)
);
dbg!(indexer.vecs.txoutindex_to_value.read_once(first_txoutindex));
dbg!(indexer.vecs.txout.txoutindex_to_value.read_once(first_txoutindex));
dbg!(
indexer
.vecs
.txoutindex_to_value
.txout.txoutindex_to_value
.read_once(first_txoutindex + 1)
);
dbg!(computer.chain.txindex_to_input_value.read_once(txindex));

View File

@@ -11,34 +11,34 @@ fn main() -> color_eyre::Result<()> {
let indexer = Indexer::forced_import(&outputs_dir)?;
let reader_outputtype = indexer.vecs.txoutindex_to_outputtype.create_reader();
let reader_typeindex = indexer.vecs.txoutindex_to_typeindex.create_reader();
let reader_txindex = indexer.vecs.txoutindex_to_txindex.create_reader();
let reader_txid = indexer.vecs.txindex_to_txid.create_reader();
let reader_height_to_first_txoutindex = indexer.vecs.height_to_first_txoutindex.create_reader();
let reader_p2pkh = indexer.vecs.p2pkhaddressindex_to_p2pkhbytes.create_reader();
let reader_p2sh = indexer.vecs.p2shaddressindex_to_p2shbytes.create_reader();
let reader_outputtype = indexer.vecs.txout.txoutindex_to_outputtype.create_reader();
let reader_typeindex = indexer.vecs.txout.txoutindex_to_typeindex.create_reader();
let reader_txindex = indexer.vecs.txout.txoutindex_to_txindex.create_reader();
let reader_txid = indexer.vecs.tx.txindex_to_txid.create_reader();
let reader_height_to_first_txoutindex = indexer.vecs.txout.height_to_first_txoutindex.create_reader();
let reader_p2pkh = indexer.vecs.address.p2pkhaddressindex_to_p2pkhbytes.create_reader();
let reader_p2sh = indexer.vecs.address.p2shaddressindex_to_p2shbytes.create_reader();
// Check what's stored at typeindex 254909199 in both P2PKH and P2SH vecs
let typeindex = TypeIndex::from(254909199_usize);
let p2pkh_bytes = indexer
.vecs
.p2pkhaddressindex_to_p2pkhbytes
.address.p2pkhaddressindex_to_p2pkhbytes
.read(P2PKHAddressIndex::from(typeindex), &reader_p2pkh);
println!("P2PKH at typeindex 254909199: {:?}", p2pkh_bytes);
let p2sh_bytes = indexer
.vecs
.p2shaddressindex_to_p2shbytes
.address.p2shaddressindex_to_p2shbytes
.read(P2SHAddressIndex::from(typeindex), &reader_p2sh);
println!("P2SH at typeindex 254909199: {:?}", p2sh_bytes);
// Check first P2SH index at height 476152
let reader_first_p2sh = indexer.vecs.height_to_first_p2shaddressindex.create_reader();
let reader_first_p2pkh = indexer.vecs.height_to_first_p2pkhaddressindex.create_reader();
let first_p2sh_at_476152 = indexer.vecs.height_to_first_p2shaddressindex.read(Height::from(476152_usize), &reader_first_p2sh);
let first_p2pkh_at_476152 = indexer.vecs.height_to_first_p2pkhaddressindex.read(Height::from(476152_usize), &reader_first_p2pkh);
let reader_first_p2sh = indexer.vecs.address.height_to_first_p2shaddressindex.create_reader();
let reader_first_p2pkh = indexer.vecs.address.height_to_first_p2pkhaddressindex.create_reader();
let first_p2sh_at_476152 = indexer.vecs.address.height_to_first_p2shaddressindex.read(Height::from(476152_usize), &reader_first_p2sh);
let first_p2pkh_at_476152 = indexer.vecs.address.height_to_first_p2pkhaddressindex.read(Height::from(476152_usize), &reader_first_p2pkh);
println!("First P2SH index at height 476152: {:?}", first_p2sh_at_476152);
println!("First P2PKH index at height 476152: {:?}", first_p2pkh_at_476152);
@@ -47,22 +47,22 @@ fn main() -> color_eyre::Result<()> {
let txoutindex = TxOutIndex::from(txoutindex_usize);
let outputtype = indexer
.vecs
.txoutindex_to_outputtype
.txout.txoutindex_to_outputtype
.read(txoutindex, &reader_outputtype)
.unwrap();
let typeindex = indexer
.vecs
.txoutindex_to_typeindex
.txout.txoutindex_to_typeindex
.read(txoutindex, &reader_typeindex)
.unwrap();
let txindex = indexer
.vecs
.txoutindex_to_txindex
.txout.txoutindex_to_txindex
.read(txoutindex, &reader_txindex)
.unwrap();
let txid = indexer
.vecs
.txindex_to_txid
.tx.txindex_to_txid
.read(txindex, &reader_txid)
.unwrap();
@@ -71,7 +71,7 @@ fn main() -> color_eyre::Result<()> {
for h in 0..900_000_usize {
let first_txoutindex = indexer
.vecs
.height_to_first_txoutindex
.txout.height_to_first_txoutindex
.read(Height::from(h), &reader_height_to_first_txoutindex);
if let Ok(first) = first_txoutindex {
if usize::from(first) > txoutindex_usize {

View File

@@ -31,26 +31,26 @@ fn main() -> Result<()> {
let vecs = indexer.vecs;
let stores = indexer.stores;
let mut height_to_first_txindex_iter = vecs.height_to_first_txindex.iter()?;
let mut txindex_to_first_txoutindex_iter = vecs.txindex_to_first_txoutindex.iter()?;
let mut height_to_first_txindex_iter = vecs.tx.height_to_first_txindex.iter()?;
let mut txindex_to_first_txoutindex_iter = vecs.tx.txindex_to_first_txoutindex.iter()?;
let mut txindex_to_output_count_iter = computer.indexes.txindex_to_output_count.iter();
let mut txoutindex_to_outputtype_iter = vecs.txoutindex_to_outputtype.iter()?;
let mut txoutindex_to_typeindex_iter = vecs.txoutindex_to_typeindex.iter()?;
let mut txoutindex_to_outputtype_iter = vecs.txout.txoutindex_to_outputtype.iter()?;
let mut txoutindex_to_typeindex_iter = vecs.txout.txoutindex_to_typeindex.iter()?;
let mut p2pk65addressindex_to_p2pk65bytes_iter =
vecs.p2pk65addressindex_to_p2pk65bytes.iter()?;
vecs.address.p2pk65addressindex_to_p2pk65bytes.iter()?;
let mut p2pk33addressindex_to_p2pk33bytes_iter =
vecs.p2pk33addressindex_to_p2pk33bytes.iter()?;
vecs.address.p2pk33addressindex_to_p2pk33bytes.iter()?;
let mut p2pkhaddressindex_to_p2pkhbytes_iter =
vecs.p2pkhaddressindex_to_p2pkhbytes.iter()?;
vecs.address.p2pkhaddressindex_to_p2pkhbytes.iter()?;
let mut p2shaddressindex_to_p2shbytes_iter =
vecs.p2shaddressindex_to_p2shbytes.iter()?;
vecs.address.p2shaddressindex_to_p2shbytes.iter()?;
let mut p2wpkhaddressindex_to_p2wpkhbytes_iter =
vecs.p2wpkhaddressindex_to_p2wpkhbytes.iter()?;
vecs.address.p2wpkhaddressindex_to_p2wpkhbytes.iter()?;
let mut p2wshaddressindex_to_p2wshbytes_iter =
vecs.p2wshaddressindex_to_p2wshbytes.iter()?;
vecs.address.p2wshaddressindex_to_p2wshbytes.iter()?;
let mut p2traddressindex_to_p2trbytes_iter =
vecs.p2traddressindex_to_p2trbytes.iter()?;
let mut p2aaddressindex_to_p2abytes_iter = vecs.p2aaddressindex_to_p2abytes.iter()?;
vecs.address.p2traddressindex_to_p2trbytes.iter()?;
let mut p2aaddressindex_to_p2abytes_iter = vecs.address.p2aaddressindex_to_p2abytes.iter()?;
let unknown = pools.get_unknown();

View File

@@ -69,7 +69,7 @@ impl Vecs {
let Some(min_height) = indexer
.vecs
.txindex_to_height
.tx.txindex_to_height
.iter()?
.get(min_txindex)
.map(|h| h.min(starting_indexes.height))
@@ -77,12 +77,12 @@ impl Vecs {
return Ok(());
};
let mut height_to_first_txindex_iter = indexer.vecs.height_to_first_txindex.iter()?;
let mut height_to_first_txindex_iter = indexer.vecs.tx.height_to_first_txindex.iter()?;
parser
.read(
Some(min_height),
Some((indexer.vecs.height_to_first_txindex.len() - 1).into()),
Some((indexer.vecs.tx.height_to_first_txindex.len() - 1).into()),
)
.iter()
.try_for_each(|block| -> Result<()> {

View File

@@ -250,8 +250,8 @@ impl Vecs {
let txindex_to_weight = LazyVecFrom2::init(
"weight",
version + Version::ZERO,
indexer.vecs.txindex_to_base_size.boxed_clone(),
indexer.vecs.txindex_to_total_size.boxed_clone(),
indexer.vecs.tx.txindex_to_base_size.boxed_clone(),
indexer.vecs.tx.txindex_to_total_size.boxed_clone(),
|index: TxIndex, txindex_to_base_size_iter, txindex_to_total_size_iter| {
let index = index.to_usize();
txindex_to_base_size_iter.get_at(index).map(|base_size| {
@@ -279,8 +279,8 @@ impl Vecs {
let txindex_to_is_coinbase = LazyVecFrom2::init(
"is_coinbase",
version + Version::ZERO,
indexer.vecs.txindex_to_height.boxed_clone(),
indexer.vecs.height_to_first_txindex.boxed_clone(),
indexer.vecs.tx.txindex_to_height.boxed_clone(),
indexer.vecs.tx.height_to_first_txindex.boxed_clone(),
|index: TxIndex, txindex_to_height_iter, height_to_first_txindex_iter| {
txindex_to_height_iter.get(index).map(|height| {
let txindex = height_to_first_txindex_iter.get_unwrap(height);
@@ -652,7 +652,7 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_range(
starting_indexes.height,
&indexer.vecs.height_to_weight,
&indexer.vecs.block.height_to_weight,
|h| (h, StoredU32::from(1_u32)),
exit,
)?;
@@ -692,10 +692,10 @@ impl Vecs {
Ok(())
})?;
let mut height_to_timestamp_iter = indexer.vecs.height_to_timestamp.iter()?;
let mut height_to_timestamp_iter = indexer.vecs.block.height_to_timestamp.iter()?;
self.height_to_interval.compute_transform(
starting_indexes.height,
&indexer.vecs.height_to_timestamp,
&indexer.vecs.block.height_to_timestamp,
|(height, timestamp, ..)| {
let interval = height.decremented().map_or(Timestamp::ZERO, |prev_h| {
let prev_timestamp = height_to_timestamp_iter.get_unwrap(prev_h);
@@ -719,19 +719,19 @@ impl Vecs {
indexes,
starting_indexes,
exit,
Some(&indexer.vecs.height_to_weight),
Some(&indexer.vecs.block.height_to_weight),
)?;
self.indexes_to_block_size.compute_rest(
indexes,
starting_indexes,
exit,
Some(&indexer.vecs.height_to_total_size),
Some(&indexer.vecs.block.height_to_total_size),
)?;
self.height_to_vbytes.compute_transform(
starting_indexes.height,
&indexer.vecs.height_to_weight,
&indexer.vecs.block.height_to_weight,
|(h, w, ..)| {
(
h,
@@ -748,7 +748,7 @@ impl Vecs {
Some(&self.height_to_vbytes),
)?;
let mut height_to_timestamp_iter = indexer.vecs.height_to_timestamp.iter()?;
let mut height_to_timestamp_iter = indexer.vecs.block.height_to_timestamp.iter()?;
self.difficultyepoch_to_timestamp.compute_transform(
starting_indexes.difficultyepoch,
@@ -806,15 +806,15 @@ impl Vecs {
indexes,
starting_indexes,
exit,
Some(&indexer.vecs.height_to_difficulty),
Some(&indexer.vecs.block.height_to_difficulty),
)?;
self.indexes_to_tx_count
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.height_to_first_txindex,
&indexer.vecs.txindex_to_txid,
&indexer.vecs.tx.height_to_first_txindex,
&indexer.vecs.tx.txindex_to_txid,
exit,
)?;
Ok(())
@@ -838,12 +838,12 @@ impl Vecs {
let compute_indexes_to_tx_vany =
|indexes_to_tx_vany: &mut ComputedVecsFromHeight<StoredU64>, txversion| {
let mut txindex_to_txversion_iter = indexer.vecs.txindex_to_txversion.iter()?;
let mut txindex_to_txversion_iter = indexer.vecs.tx.txindex_to_txversion.iter()?;
indexes_to_tx_vany.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_filtered_count_from_indexes(
starting_indexes.height,
&indexer.vecs.height_to_first_txindex,
&indexer.vecs.txindex_to_txid,
&indexer.vecs.tx.height_to_first_txindex,
&indexer.vecs.tx.txindex_to_txid,
|txindex| {
let v = txindex_to_txversion_iter.get_unwrap(txindex);
v == txversion
@@ -857,8 +857,8 @@ impl Vecs {
compute_indexes_to_tx_vany(&mut self.indexes_to_tx_v2, TxVersion::TWO)?;
compute_indexes_to_tx_vany(&mut self.indexes_to_tx_v3, TxVersion::THREE)?;
let txoutindex_to_value = &indexer.vecs.txoutindex_to_value;
let txoutindex_to_value_reader = indexer.vecs.txoutindex_to_value.create_reader();
let txoutindex_to_value = &indexer.vecs.txout.txoutindex_to_value;
let txoutindex_to_value_reader = indexer.vecs.txout.txoutindex_to_value.create_reader();
self.txinindex_to_value.compute_transform(
starting_indexes.txinindex,
&indexes.txinindex_to_txoutindex,
@@ -877,7 +877,7 @@ impl Vecs {
self.txindex_to_input_value.compute_sum_from_indexes(
starting_indexes.txindex,
&indexer.vecs.txindex_to_first_txinindex,
&indexer.vecs.tx.txindex_to_first_txinindex,
&indexes.txindex_to_input_count,
&self.txinindex_to_value,
exit,
@@ -885,9 +885,9 @@ impl Vecs {
self.txindex_to_output_value.compute_sum_from_indexes(
starting_indexes.txindex,
&indexer.vecs.txindex_to_first_txoutindex,
&indexer.vecs.tx.txindex_to_first_txoutindex,
&indexes.txindex_to_output_count,
&indexer.vecs.txoutindex_to_value,
&indexer.vecs.txout.txoutindex_to_value,
exit,
)?;
@@ -918,7 +918,7 @@ impl Vecs {
.compute_all(indexes, price, starting_indexes, exit, |v| {
v.compute_filtered_sum_from_indexes(
starting_indexes.height,
&indexer.vecs.height_to_first_txindex,
&indexer.vecs.tx.height_to_first_txindex,
&indexes.height_to_txindex_count,
&self.txindex_to_input_value,
|sats| !sats.is_max(),
@@ -963,12 +963,12 @@ impl Vecs {
self.indexes_to_coinbase
.compute_all(indexes, price, starting_indexes, exit, |vec| {
let mut txindex_to_first_txoutindex_iter =
indexer.vecs.txindex_to_first_txoutindex.iter()?;
indexer.vecs.tx.txindex_to_first_txoutindex.iter()?;
let mut txindex_to_output_count_iter = indexes.txindex_to_output_count.iter();
let mut txoutindex_to_value_iter = indexer.vecs.txoutindex_to_value.iter()?;
let mut txoutindex_to_value_iter = indexer.vecs.txout.txoutindex_to_value.iter()?;
vec.compute_transform(
starting_indexes.height,
&indexer.vecs.height_to_first_txindex,
&indexer.vecs.tx.height_to_first_txindex,
|(height, txindex, ..)| {
let first_txoutindex = txindex_to_first_txoutindex_iter
.get_unwrap(txindex)
@@ -1093,8 +1093,8 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.height_to_first_p2aaddressindex,
&indexer.vecs.p2aaddressindex_to_p2abytes,
&indexer.vecs.address.height_to_first_p2aaddressindex,
&indexer.vecs.address.p2aaddressindex_to_p2abytes,
exit,
)?;
Ok(())
@@ -1104,8 +1104,8 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.height_to_first_p2msoutputindex,
&indexer.vecs.p2msoutputindex_to_txindex,
&indexer.vecs.output.height_to_first_p2msoutputindex,
&indexer.vecs.output.p2msoutputindex_to_txindex,
exit,
)?;
Ok(())
@@ -1115,8 +1115,8 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.height_to_first_p2pk33addressindex,
&indexer.vecs.p2pk33addressindex_to_p2pk33bytes,
&indexer.vecs.address.height_to_first_p2pk33addressindex,
&indexer.vecs.address.p2pk33addressindex_to_p2pk33bytes,
exit,
)?;
Ok(())
@@ -1126,8 +1126,8 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.height_to_first_p2pk65addressindex,
&indexer.vecs.p2pk65addressindex_to_p2pk65bytes,
&indexer.vecs.address.height_to_first_p2pk65addressindex,
&indexer.vecs.address.p2pk65addressindex_to_p2pk65bytes,
exit,
)?;
Ok(())
@@ -1137,8 +1137,8 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.height_to_first_p2pkhaddressindex,
&indexer.vecs.p2pkhaddressindex_to_p2pkhbytes,
&indexer.vecs.address.height_to_first_p2pkhaddressindex,
&indexer.vecs.address.p2pkhaddressindex_to_p2pkhbytes,
exit,
)?;
Ok(())
@@ -1148,8 +1148,8 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.height_to_first_p2shaddressindex,
&indexer.vecs.p2shaddressindex_to_p2shbytes,
&indexer.vecs.address.height_to_first_p2shaddressindex,
&indexer.vecs.address.p2shaddressindex_to_p2shbytes,
exit,
)?;
Ok(())
@@ -1159,8 +1159,8 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.height_to_first_p2traddressindex,
&indexer.vecs.p2traddressindex_to_p2trbytes,
&indexer.vecs.address.height_to_first_p2traddressindex,
&indexer.vecs.address.p2traddressindex_to_p2trbytes,
exit,
)?;
Ok(())
@@ -1170,8 +1170,8 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.height_to_first_p2wpkhaddressindex,
&indexer.vecs.p2wpkhaddressindex_to_p2wpkhbytes,
&indexer.vecs.address.height_to_first_p2wpkhaddressindex,
&indexer.vecs.address.p2wpkhaddressindex_to_p2wpkhbytes,
exit,
)?;
Ok(())
@@ -1181,8 +1181,8 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.height_to_first_p2wshaddressindex,
&indexer.vecs.p2wshaddressindex_to_p2wshbytes,
&indexer.vecs.address.height_to_first_p2wshaddressindex,
&indexer.vecs.address.p2wshaddressindex_to_p2wshbytes,
exit,
)?;
Ok(())
@@ -1192,8 +1192,8 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.height_to_first_opreturnindex,
&indexer.vecs.opreturnindex_to_txindex,
&indexer.vecs.output.height_to_first_opreturnindex,
&indexer.vecs.output.opreturnindex_to_txindex,
exit,
)?;
Ok(())
@@ -1203,8 +1203,8 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.height_to_first_unknownoutputindex,
&indexer.vecs.unknownoutputindex_to_txindex,
&indexer.vecs.output.height_to_first_unknownoutputindex,
&indexer.vecs.output.unknownoutputindex_to_txindex,
exit,
)?;
Ok(())
@@ -1214,8 +1214,8 @@ impl Vecs {
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.height_to_first_emptyoutputindex,
&indexer.vecs.emptyoutputindex_to_txindex,
&indexer.vecs.output.height_to_first_emptyoutputindex,
&indexer.vecs.output.emptyoutputindex_to_txindex,
exit,
)?;
Ok(())
@@ -1299,7 +1299,7 @@ impl Vecs {
let multiplier = 2.0_f64.powi(32) / 600.0;
v.compute_transform(
starting_indexes.height,
&indexer.vecs.height_to_difficulty,
&indexer.vecs.block.height_to_difficulty,
|(i, v, ..)| (i, StoredF32::from(*v * multiplier)),
exit,
)?;
@@ -1418,7 +1418,7 @@ impl Vecs {
|v| {
v.compute_percentage_change(
starting_indexes.height,
&indexer.vecs.height_to_difficulty,
&indexer.vecs.block.height_to_difficulty,
1,
exit,
)?;

View File

@@ -73,7 +73,7 @@ impl Vecs {
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
let height_to_timestamp = &indexer.vecs.height_to_timestamp;
let height_to_timestamp = &indexer.vecs.block.height_to_timestamp;
let index = starting_indexes
.height
.min(Height::from(self.height_to_price_ohlc_in_cents.len()));

View File

@@ -172,7 +172,7 @@ where
self.height.compute(
starting_indexes.height,
txindex,
&indexer.vecs.height_to_first_txindex,
&indexer.vecs.tx.height_to_first_txindex,
&indexes.height_to_txindex_count,
exit,
)?;
@@ -182,7 +182,7 @@ where
self.height.compute(
starting_indexes.height,
txindex,
&indexer.vecs.height_to_first_txindex,
&indexer.vecs.tx.height_to_first_txindex,
&indexes.height_to_txindex_count,
exit,
)?;
@@ -252,7 +252,7 @@ impl ComputedVecsFromTxindex<Bitcoin> {
let mut last_iter = sats.height.last.as_ref().map(|v| v.into_iter());
let mut cumulative_iter = sats.height.cumulative.as_ref().map(|v| v.into_iter());
(starting_index.to_usize()..indexer.vecs.height_to_weight.len())
(starting_index.to_usize()..indexer.vecs.block.height_to_weight.len())
.map(Height::from)
.try_for_each(|height| -> Result<()> {
if let Some(first) = self.height.first.as_mut() {
@@ -375,7 +375,7 @@ impl ComputedVecsFromTxindex<Dollars> {
let mut last_iter = bitcoin.height.last.as_ref().map(|v| v.into_iter());
let mut cumulative_iter = bitcoin.height.cumulative.as_ref().map(|v| v.into_iter());
(starting_index.to_usize()..indexer.vecs.height_to_weight.len())
(starting_index.to_usize()..indexer.vecs.block.height_to_weight.len())
.map(Height::from)
.try_for_each(|height| -> Result<()> {
let price = *close_iter.get_unwrap(height);

View File

@@ -74,7 +74,7 @@ impl ComputedValueVecsFromTxindex {
&name_usd,
version + VERSION,
bitcoin_txindex.boxed_clone(),
indexer.vecs.txindex_to_height.boxed_clone(),
indexer.vecs.tx.txindex_to_height.boxed_clone(),
price.chainindexes_to_price_close.height.boxed_clone(),
|txindex: TxIndex,
txindex_to_btc_iter,

View File

@@ -122,57 +122,57 @@ impl Vecs {
let this = Self {
txinindex_to_txoutindex: eager!("txoutindex"),
txoutindex_to_txoutindex: lazy!("txoutindex", indexer.vecs.txoutindex_to_value),
txinindex_to_txinindex: lazy!("txinindex", indexer.vecs.txinindex_to_outpoint),
txoutindex_to_txoutindex: lazy!("txoutindex", indexer.vecs.txout.txoutindex_to_value),
txinindex_to_txinindex: lazy!("txinindex", indexer.vecs.txin.txinindex_to_outpoint),
p2pk33addressindex_to_p2pk33addressindex: lazy!(
"p2pk33addressindex",
indexer.vecs.p2pk33addressindex_to_p2pk33bytes
indexer.vecs.address.p2pk33addressindex_to_p2pk33bytes
),
p2pk65addressindex_to_p2pk65addressindex: lazy!(
"p2pk65addressindex",
indexer.vecs.p2pk65addressindex_to_p2pk65bytes
indexer.vecs.address.p2pk65addressindex_to_p2pk65bytes
),
p2pkhaddressindex_to_p2pkhaddressindex: lazy!(
"p2pkhaddressindex",
indexer.vecs.p2pkhaddressindex_to_p2pkhbytes
indexer.vecs.address.p2pkhaddressindex_to_p2pkhbytes
),
p2shaddressindex_to_p2shaddressindex: lazy!(
"p2shaddressindex",
indexer.vecs.p2shaddressindex_to_p2shbytes
indexer.vecs.address.p2shaddressindex_to_p2shbytes
),
p2traddressindex_to_p2traddressindex: lazy!(
"p2traddressindex",
indexer.vecs.p2traddressindex_to_p2trbytes
indexer.vecs.address.p2traddressindex_to_p2trbytes
),
p2wpkhaddressindex_to_p2wpkhaddressindex: lazy!(
"p2wpkhaddressindex",
indexer.vecs.p2wpkhaddressindex_to_p2wpkhbytes
indexer.vecs.address.p2wpkhaddressindex_to_p2wpkhbytes
),
p2wshaddressindex_to_p2wshaddressindex: lazy!(
"p2wshaddressindex",
indexer.vecs.p2wshaddressindex_to_p2wshbytes
indexer.vecs.address.p2wshaddressindex_to_p2wshbytes
),
p2aaddressindex_to_p2aaddressindex: lazy!(
"p2aaddressindex",
indexer.vecs.p2aaddressindex_to_p2abytes
indexer.vecs.address.p2aaddressindex_to_p2abytes
),
p2msoutputindex_to_p2msoutputindex: lazy!(
"p2msoutputindex",
indexer.vecs.p2msoutputindex_to_txindex
indexer.vecs.output.p2msoutputindex_to_txindex
),
emptyoutputindex_to_emptyoutputindex: lazy!(
"emptyoutputindex",
indexer.vecs.emptyoutputindex_to_txindex
indexer.vecs.output.emptyoutputindex_to_txindex
),
unknownoutputindex_to_unknownoutputindex: lazy!(
"unknownoutputindex",
indexer.vecs.unknownoutputindex_to_txindex
indexer.vecs.output.unknownoutputindex_to_txindex
),
opreturnindex_to_opreturnindex: lazy!(
"opreturnindex",
indexer.vecs.opreturnindex_to_txindex
indexer.vecs.output.opreturnindex_to_txindex
),
txindex_to_txindex: lazy!("txindex", indexer.vecs.txindex_to_txid),
txindex_to_txindex: lazy!("txindex", indexer.vecs.tx.txindex_to_txid),
txindex_to_input_count: eager!("input_count"),
txindex_to_output_count: eager!("output_count"),
dateindex_to_date: eager!("date"),
@@ -251,11 +251,11 @@ impl Vecs {
// TxInIndex
// ---
let txindex_to_first_txoutindex = &indexer.vecs.txindex_to_first_txoutindex;
let txindex_to_first_txoutindex = &indexer.vecs.tx.txindex_to_first_txoutindex;
let txindex_to_first_txoutindex_reader = txindex_to_first_txoutindex.create_reader();
self.txinindex_to_txoutindex.compute_transform(
starting_indexes.txinindex,
&indexer.vecs.txinindex_to_outpoint,
&indexer.vecs.txin.txinindex_to_outpoint,
|(txinindex, outpoint, ..)| {
if unlikely(outpoint.is_coinbase()) {
return (txinindex, TxOutIndex::COINBASE);
@@ -274,22 +274,22 @@ impl Vecs {
self.txindex_to_input_count.compute_count_from_indexes(
starting_indexes.txindex,
&indexer.vecs.txindex_to_first_txinindex,
&indexer.vecs.txinindex_to_outpoint,
&indexer.vecs.tx.txindex_to_first_txinindex,
&indexer.vecs.txin.txinindex_to_outpoint,
exit,
)?;
self.txindex_to_output_count.compute_count_from_indexes(
starting_indexes.txindex,
&indexer.vecs.txindex_to_first_txoutindex,
&indexer.vecs.txoutindex_to_value,
&indexer.vecs.tx.txindex_to_first_txoutindex,
&indexer.vecs.txout.txoutindex_to_value,
exit,
)?;
self.height_to_txindex_count.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.height_to_first_txindex,
&indexer.vecs.txindex_to_txid,
&indexer.vecs.tx.height_to_first_txindex,
&indexer.vecs.tx.txindex_to_txid,
exit,
)?;
@@ -299,13 +299,13 @@ impl Vecs {
self.height_to_height.compute_from_index(
starting_indexes.height,
&indexer.vecs.height_to_weight,
&indexer.vecs.block.height_to_weight,
exit,
)?;
self.height_to_date.compute_transform(
starting_indexes.height,
&indexer.vecs.height_to_timestamp,
&indexer.vecs.block.height_to_timestamp,
|(h, t, ..)| (h, Date::from(t)),
exit,
)?;
@@ -313,7 +313,7 @@ impl Vecs {
let mut prev_timestamp_fixed = None;
self.height_to_timestamp_fixed.compute_transform(
starting_indexes.height,
&indexer.vecs.height_to_timestamp,
&indexer.vecs.block.height_to_timestamp,
|(h, timestamp, height_to_timestamp_fixed_iter)| {
if prev_timestamp_fixed.is_none()
&& let Some(prev_h) = h.decremented()
@@ -389,7 +389,7 @@ impl Vecs {
self.dateindex_to_height_count.compute_count_from_indexes(
starting_dateindex,
&self.dateindex_to_first_height,
&indexer.vecs.height_to_weight,
&indexer.vecs.block.height_to_weight,
exit,
)?;
@@ -442,7 +442,7 @@ impl Vecs {
self.height_to_difficultyepoch.compute_from_index(
starting_indexes.height,
&indexer.vecs.height_to_weight,
&indexer.vecs.block.height_to_weight,
exit,
)?;
@@ -626,7 +626,7 @@ impl Vecs {
self.height_to_halvingepoch.compute_from_index(
starting_indexes.height,
&indexer.vecs.height_to_weight,
&indexer.vecs.block.height_to_weight,
exit,
)?;

View File

@@ -122,28 +122,28 @@ impl Vecs {
self.height_to_pool.version() + indexer.stores.height_to_coinbase_tag.version(),
)?;
let mut height_to_first_txindex_iter = indexer.vecs.height_to_first_txindex.iter()?;
let mut height_to_first_txindex_iter = indexer.vecs.tx.height_to_first_txindex.iter()?;
let mut txindex_to_first_txoutindex_iter =
indexer.vecs.txindex_to_first_txoutindex.iter()?;
indexer.vecs.tx.txindex_to_first_txoutindex.iter()?;
let mut txindex_to_output_count_iter = indexes.txindex_to_output_count.iter();
let mut txoutindex_to_outputtype_iter = indexer.vecs.txoutindex_to_outputtype.iter()?;
let mut txoutindex_to_typeindex_iter = indexer.vecs.txoutindex_to_typeindex.iter()?;
let mut txoutindex_to_outputtype_iter = indexer.vecs.txout.txoutindex_to_outputtype.iter()?;
let mut txoutindex_to_typeindex_iter = indexer.vecs.txout.txoutindex_to_typeindex.iter()?;
let mut p2pk65addressindex_to_p2pk65bytes_iter =
indexer.vecs.p2pk65addressindex_to_p2pk65bytes.iter()?;
indexer.vecs.address.p2pk65addressindex_to_p2pk65bytes.iter()?;
let mut p2pk33addressindex_to_p2pk33bytes_iter =
indexer.vecs.p2pk33addressindex_to_p2pk33bytes.iter()?;
indexer.vecs.address.p2pk33addressindex_to_p2pk33bytes.iter()?;
let mut p2pkhaddressindex_to_p2pkhbytes_iter =
indexer.vecs.p2pkhaddressindex_to_p2pkhbytes.iter()?;
indexer.vecs.address.p2pkhaddressindex_to_p2pkhbytes.iter()?;
let mut p2shaddressindex_to_p2shbytes_iter =
indexer.vecs.p2shaddressindex_to_p2shbytes.iter()?;
indexer.vecs.address.p2shaddressindex_to_p2shbytes.iter()?;
let mut p2wpkhaddressindex_to_p2wpkhbytes_iter =
indexer.vecs.p2wpkhaddressindex_to_p2wpkhbytes.iter()?;
indexer.vecs.address.p2wpkhaddressindex_to_p2wpkhbytes.iter()?;
let mut p2wshaddressindex_to_p2wshbytes_iter =
indexer.vecs.p2wshaddressindex_to_p2wshbytes.iter()?;
indexer.vecs.address.p2wshaddressindex_to_p2wshbytes.iter()?;
let mut p2traddressindex_to_p2trbytes_iter =
indexer.vecs.p2traddressindex_to_p2trbytes.iter()?;
indexer.vecs.address.p2traddressindex_to_p2trbytes.iter()?;
let mut p2aaddressindex_to_p2abytes_iter =
indexer.vecs.p2aaddressindex_to_p2abytes.iter()?;
indexer.vecs.address.p2aaddressindex_to_p2abytes.iter()?;
let unknown = self.pools.get_unknown();

View File

@@ -381,18 +381,18 @@ impl Vecs {
.as_ref()
.map(|price| price.timeindexes_to_price_close.dateindex.u());
let height_to_date_fixed = &indexes.height_to_date_fixed;
let height_to_first_p2aaddressindex = &indexer.vecs.height_to_first_p2aaddressindex;
let height_to_first_p2pk33addressindex = &indexer.vecs.height_to_first_p2pk33addressindex;
let height_to_first_p2pk65addressindex = &indexer.vecs.height_to_first_p2pk65addressindex;
let height_to_first_p2pkhaddressindex = &indexer.vecs.height_to_first_p2pkhaddressindex;
let height_to_first_p2shaddressindex = &indexer.vecs.height_to_first_p2shaddressindex;
let height_to_first_p2traddressindex = &indexer.vecs.height_to_first_p2traddressindex;
let height_to_first_p2wpkhaddressindex = &indexer.vecs.height_to_first_p2wpkhaddressindex;
let height_to_first_p2wshaddressindex = &indexer.vecs.height_to_first_p2wshaddressindex;
let height_to_first_txindex = &indexer.vecs.height_to_first_txindex;
let height_to_first_p2aaddressindex = &indexer.vecs.address.height_to_first_p2aaddressindex;
let height_to_first_p2pk33addressindex = &indexer.vecs.address.height_to_first_p2pk33addressindex;
let height_to_first_p2pk65addressindex = &indexer.vecs.address.height_to_first_p2pk65addressindex;
let height_to_first_p2pkhaddressindex = &indexer.vecs.address.height_to_first_p2pkhaddressindex;
let height_to_first_p2shaddressindex = &indexer.vecs.address.height_to_first_p2shaddressindex;
let height_to_first_p2traddressindex = &indexer.vecs.address.height_to_first_p2traddressindex;
let height_to_first_p2wpkhaddressindex = &indexer.vecs.address.height_to_first_p2wpkhaddressindex;
let height_to_first_p2wshaddressindex = &indexer.vecs.address.height_to_first_p2wshaddressindex;
let height_to_first_txindex = &indexer.vecs.tx.height_to_first_txindex;
let height_to_txindex_count = chain.indexes_to_tx_count.height.u();
let height_to_first_txinindex = &indexer.vecs.height_to_first_txinindex;
let height_to_first_txoutindex = &indexer.vecs.height_to_first_txoutindex;
let height_to_first_txinindex = &indexer.vecs.txin.height_to_first_txinindex;
let height_to_first_txoutindex = &indexer.vecs.txout.height_to_first_txoutindex;
let height_to_input_count = chain.indexes_to_input_count.height.unwrap_sum();
let height_to_output_count = chain.indexes_to_output_count.height.unwrap_sum();
let height_to_price_close = price
@@ -406,15 +406,15 @@ impl Vecs {
.height
.as_ref()
.unwrap();
let txindex_to_first_txoutindex = &indexer.vecs.txindex_to_first_txoutindex;
let txindex_to_height = &indexer.vecs.txindex_to_height;
let txindex_to_first_txoutindex = &indexer.vecs.tx.txindex_to_first_txoutindex;
let txindex_to_height = &indexer.vecs.tx.txindex_to_height;
let txindex_to_input_count = &indexes.txindex_to_input_count;
let txindex_to_output_count = &indexes.txindex_to_output_count;
let txinindex_to_outpoint = &indexer.vecs.txinindex_to_outpoint;
let txoutindex_to_outputtype = &indexer.vecs.txoutindex_to_outputtype;
let txoutindex_to_txindex = &indexer.vecs.txoutindex_to_txindex;
let txoutindex_to_typeindex = &indexer.vecs.txoutindex_to_typeindex;
let txoutindex_to_value = &indexer.vecs.txoutindex_to_value;
let txinindex_to_outpoint = &indexer.vecs.txin.txinindex_to_outpoint;
let txoutindex_to_outputtype = &indexer.vecs.txout.txoutindex_to_outputtype;
let txoutindex_to_txindex = &indexer.vecs.txout.txoutindex_to_txindex;
let txoutindex_to_typeindex = &indexer.vecs.txout.txoutindex_to_typeindex;
let txoutindex_to_value = &indexer.vecs.txout.txoutindex_to_value;
let mut height_to_price_close_iter = height_to_price_close.as_ref().map(|v| v.into_iter());
let mut height_to_timestamp_fixed_iter = height_to_timestamp_fixed.into_iter();
@@ -501,7 +501,7 @@ impl Vecs {
};
let starting_height = starting_indexes.height.min(stateful_starting_height);
let last_height = Height::from(indexer.vecs.height_to_blockhash.stamp());
let last_height = Height::from(indexer.vecs.block.height_to_blockhash.stamp());
if starting_height <= last_height {
let stamp = starting_height.into();
let starting_height = if starting_height.is_not_zero() {

View File

@@ -16,11 +16,11 @@ pub struct IndexerReaders {
impl IndexerReaders {
pub fn new(indexer: &Indexer) -> Self {
Self {
txinindex_to_outpoint: indexer.vecs.txinindex_to_outpoint.create_reader(),
txindex_to_first_txoutindex: indexer.vecs.txindex_to_first_txoutindex.create_reader(),
txoutindex_to_value: indexer.vecs.txoutindex_to_value.create_reader(),
txoutindex_to_outputtype: indexer.vecs.txoutindex_to_outputtype.create_reader(),
txoutindex_to_typeindex: indexer.vecs.txoutindex_to_typeindex.create_reader(),
txinindex_to_outpoint: indexer.vecs.txin.txinindex_to_outpoint.create_reader(),
txindex_to_first_txoutindex: indexer.vecs.tx.txindex_to_first_txoutindex.create_reader(),
txoutindex_to_value: indexer.vecs.txout.txoutindex_to_value.create_reader(),
txoutindex_to_outputtype: indexer.vecs.txout.txoutindex_to_outputtype.create_reader(),
txoutindex_to_typeindex: indexer.vecs.txout.txoutindex_to_typeindex.create_reader(),
}
}
}

View File

@@ -61,9 +61,9 @@ pub fn process_blocks(
// References to vectors using correct field paths
// From indexer.vecs:
let height_to_first_txindex = &indexer.vecs.height_to_first_txindex;
let height_to_first_txoutindex = &indexer.vecs.height_to_first_txoutindex;
let height_to_first_txinindex = &indexer.vecs.height_to_first_txinindex;
let height_to_first_txindex = &indexer.vecs.tx.height_to_first_txindex;
let height_to_first_txoutindex = &indexer.vecs.txout.height_to_first_txoutindex;
let height_to_first_txinindex = &indexer.vecs.txin.height_to_first_txinindex;
// From chain (via .height.u() or .height.unwrap_sum() patterns):
let height_to_tx_count = chain.indexes_to_tx_count.height.u();
@@ -114,14 +114,14 @@ pub fn process_blocks(
let mut vr = VecsReaders::new(&vecs.any_address_indexes, &vecs.addresses_data);
// Create iterators for first address indexes per type
let mut first_p2a_iter = indexer.vecs.height_to_first_p2aaddressindex.into_iter();
let mut first_p2pk33_iter = indexer.vecs.height_to_first_p2pk33addressindex.into_iter();
let mut first_p2pk65_iter = indexer.vecs.height_to_first_p2pk65addressindex.into_iter();
let mut first_p2pkh_iter = indexer.vecs.height_to_first_p2pkhaddressindex.into_iter();
let mut first_p2sh_iter = indexer.vecs.height_to_first_p2shaddressindex.into_iter();
let mut first_p2tr_iter = indexer.vecs.height_to_first_p2traddressindex.into_iter();
let mut first_p2wpkh_iter = indexer.vecs.height_to_first_p2wpkhaddressindex.into_iter();
let mut first_p2wsh_iter = indexer.vecs.height_to_first_p2wshaddressindex.into_iter();
let mut first_p2a_iter = indexer.vecs.address.height_to_first_p2aaddressindex.into_iter();
let mut first_p2pk33_iter = indexer.vecs.address.height_to_first_p2pk33addressindex.into_iter();
let mut first_p2pk65_iter = indexer.vecs.address.height_to_first_p2pk65addressindex.into_iter();
let mut first_p2pkh_iter = indexer.vecs.address.height_to_first_p2pkhaddressindex.into_iter();
let mut first_p2sh_iter = indexer.vecs.address.height_to_first_p2shaddressindex.into_iter();
let mut first_p2tr_iter = indexer.vecs.address.height_to_first_p2traddressindex.into_iter();
let mut first_p2wpkh_iter = indexer.vecs.address.height_to_first_p2wpkhaddressindex.into_iter();
let mut first_p2wsh_iter = indexer.vecs.address.height_to_first_p2wshaddressindex.into_iter();
// Track running totals - recover from previous height if resuming
let (mut unspendable_supply, mut opreturn_supply, mut addresstype_to_addr_count, mut addresstype_to_empty_addr_count) =
@@ -210,9 +210,9 @@ pub fn process_blocks(
first_txoutindex,
output_count,
&txoutindex_to_txindex,
&indexer.vecs.txoutindex_to_value,
&indexer.vecs.txoutindex_to_outputtype,
&indexer.vecs.txoutindex_to_typeindex,
&indexer.vecs.txout.txoutindex_to_value,
&indexer.vecs.txout.txoutindex_to_outputtype,
&indexer.vecs.txout.txoutindex_to_typeindex,
&ir,
&first_addressindexes,
&loaded_cache,
@@ -228,11 +228,11 @@ pub fn process_blocks(
first_txinindex + 1, // Skip coinbase
input_count - 1,
&txinindex_to_txindex[1..], // Skip coinbase
&indexer.vecs.txinindex_to_outpoint,
&indexer.vecs.txindex_to_first_txoutindex,
&indexer.vecs.txoutindex_to_value,
&indexer.vecs.txoutindex_to_outputtype,
&indexer.vecs.txoutindex_to_typeindex,
&indexer.vecs.txin.txinindex_to_outpoint,
&indexer.vecs.tx.txindex_to_first_txoutindex,
&indexer.vecs.txout.txoutindex_to_value,
&indexer.vecs.txout.txoutindex_to_outputtype,
&indexer.vecs.txout.txoutindex_to_typeindex,
&txoutindex_to_height,
&ir,
&first_addressindexes,

View File

@@ -21,11 +21,11 @@ pub struct IndexerReaders {
impl IndexerReaders {
pub fn new(indexer: &Indexer) -> Self {
Self {
txinindex_to_outpoint: indexer.vecs.txinindex_to_outpoint.create_reader(),
txindex_to_first_txoutindex: indexer.vecs.txindex_to_first_txoutindex.create_reader(),
txoutindex_to_value: indexer.vecs.txoutindex_to_value.create_reader(),
txoutindex_to_outputtype: indexer.vecs.txoutindex_to_outputtype.create_reader(),
txoutindex_to_typeindex: indexer.vecs.txoutindex_to_typeindex.create_reader(),
txinindex_to_outpoint: indexer.vecs.txin.txinindex_to_outpoint.create_reader(),
txindex_to_first_txoutindex: indexer.vecs.tx.txindex_to_first_txoutindex.create_reader(),
txoutindex_to_value: indexer.vecs.txout.txoutindex_to_value.create_reader(),
txoutindex_to_outputtype: indexer.vecs.txout.txoutindex_to_outputtype.create_reader(),
txoutindex_to_typeindex: indexer.vecs.txout.txoutindex_to_typeindex.create_reader(),
}
}
}

View File

@@ -271,7 +271,7 @@ impl Vecs {
};
// 3. Get last height from indexer
let last_height = Height::from(indexer.vecs.height_to_blockhash.len().saturating_sub(1));
let last_height = Height::from(indexer.vecs.block.height_to_blockhash.len().saturating_sub(1));
// 4. Process blocks
if starting_height <= last_height {

View File

@@ -1,239 +0,0 @@
use std::path::Path;
use brk_error::Result;
use brk_grouper::{CohortContext, Filter, Filtered};
use brk_traversable::Traversable;
use brk_types::{Bitcoin, DateIndex, Dollars, Height, StoredU64, Version};
use vecdb::{
AnyStoredVec, AnyVec, Database, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableVec,
PcoVec, TypedVecIterator,
};
use crate::{
Indexes,
grouped::{ComputedVecsFromHeight, Source, VecBuilderOptions},
indexes, price,
stateful::{
common,
r#trait::{CohortVecs, DynCohortVecs},
},
states::AddressCohortState,
utils::OptionExt,
};
const VERSION: Version = Version::ZERO;
#[derive(Clone, Traversable)]
pub struct Vecs {
starting_height: Option<Height>,
#[traversable(skip)]
pub state: Option<AddressCohortState>,
#[traversable(flatten)]
pub inner: common::Vecs,
pub height_to_addr_count: EagerVec<PcoVec<Height, StoredU64>>,
pub indexes_to_addr_count: ComputedVecsFromHeight<StoredU64>,
}
impl Vecs {
pub fn forced_import(
db: &Database,
filter: Filter,
version: Version,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
states_path: Option<&Path>,
) -> Result<Self> {
let compute_dollars = price.is_some();
let full_name = filter.to_full_name(CohortContext::Address);
let suffix = |s: &str| {
if full_name.is_empty() {
s.to_string()
} else {
format!("{full_name}_{s}")
}
};
Ok(Self {
starting_height: None,
state: states_path.map(|states_path| {
AddressCohortState::new(states_path, &full_name, compute_dollars)
}),
height_to_addr_count: EagerVec::forced_import(
db,
&suffix("addr_count"),
version + VERSION + Version::ZERO,
)?,
indexes_to_addr_count: ComputedVecsFromHeight::forced_import(
db,
&suffix("addr_count"),
Source::None,
version + VERSION + Version::ZERO,
indexes,
VecBuilderOptions::default().add_last(),
)?,
inner: common::Vecs::forced_import(
db,
filter,
CohortContext::Address,
version,
indexes,
price,
)?,
})
}
}
impl DynCohortVecs for Vecs {
fn min_height_vecs_len(&self) -> usize {
std::cmp::min(
self.height_to_addr_count.len(),
self.inner.min_height_vecs_len(),
)
}
fn reset_state_starting_height(&mut self) {
self.starting_height = Some(Height::ZERO);
}
fn import_state(&mut self, starting_height: Height) -> Result<Height> {
let starting_height = self
.inner
.import_state(starting_height, &mut self.state.um().inner)?;
self.starting_height = Some(starting_height);
if let Some(prev_height) = starting_height.decremented() {
self.state.um().addr_count = *self
.height_to_addr_count
.into_iter()
.get_unwrap(prev_height);
}
Ok(starting_height)
}
fn validate_computed_versions(&mut self, base_version: Version) -> Result<()> {
self.height_to_addr_count
.validate_computed_version_or_reset(
base_version + self.height_to_addr_count.inner_version(),
)?;
self.inner.validate_computed_versions(base_version)
}
fn truncate_push(&mut self, height: Height) -> Result<()> {
if self.starting_height.unwrap() > height {
return Ok(());
}
self.height_to_addr_count
.truncate_push(height, self.state.u().addr_count.into())?;
self.inner
.truncate_push(height, &self.state.u().inner)
}
fn compute_then_truncate_push_unrealized_states(
&mut self,
height: Height,
height_price: Option<Dollars>,
dateindex: Option<DateIndex>,
date_price: Option<Option<Dollars>>,
) -> Result<()> {
self.inner.compute_then_truncate_push_unrealized_states(
height,
height_price,
dateindex,
date_price,
&self.state.u().inner,
)
}
fn safe_flush_stateful_vecs(&mut self, height: Height, exit: &Exit) -> Result<()> {
self.height_to_addr_count.safe_write(exit)?;
self.inner
.safe_flush_stateful_vecs(height, exit, &mut self.state.um().inner)
}
#[allow(clippy::too_many_arguments)]
fn compute_rest_part1(
&mut self,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_addr_count.compute_rest(
indexes,
starting_indexes,
exit,
Some(&self.height_to_addr_count),
)?;
self.inner
.compute_rest_part1(indexes, price, starting_indexes, exit)
}
}
impl CohortVecs for Vecs {
fn compute_from_stateful(
&mut self,
starting_indexes: &Indexes,
others: &[&Self],
exit: &Exit,
) -> Result<()> {
self.height_to_addr_count.compute_sum_of_others(
starting_indexes.height,
others
.iter()
.map(|v| &v.height_to_addr_count)
.collect::<Vec<_>>()
.as_slice(),
exit,
)?;
self.inner.compute_from_stateful(
starting_indexes,
&others.iter().map(|v| &v.inner).collect::<Vec<_>>(),
exit,
)
}
#[allow(clippy::too_many_arguments)]
fn compute_rest_part2(
&mut self,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
starting_indexes: &Indexes,
height_to_supply: &impl IterableVec<Height, Bitcoin>,
dateindex_to_supply: &impl IterableVec<DateIndex, Bitcoin>,
height_to_market_cap: Option<&impl IterableVec<Height, Dollars>>,
dateindex_to_market_cap: Option<&impl IterableVec<DateIndex, Dollars>>,
height_to_realized_cap: Option<&impl IterableVec<Height, Dollars>>,
dateindex_to_realized_cap: Option<&impl IterableVec<DateIndex, Dollars>>,
exit: &Exit,
) -> Result<()> {
self.inner.compute_rest_part2(
indexes,
price,
starting_indexes,
height_to_supply,
dateindex_to_supply,
height_to_market_cap,
dateindex_to_market_cap,
height_to_realized_cap,
dateindex_to_realized_cap,
exit,
)
}
}
impl Filtered for Vecs {
fn filter(&self) -> &Filter {
&self.inner.filter
}
}

View File

@@ -1,139 +0,0 @@
use std::path::Path;
use brk_error::Result;
use brk_grouper::{AddressGroups, AmountFilter, Filter, Filtered};
use brk_traversable::Traversable;
use brk_types::{Bitcoin, DateIndex, Dollars, Height, Version};
use derive_deref::{Deref, DerefMut};
use rayon::prelude::*;
use vecdb::{Database, Exit, IterableVec};
use crate::{
Indexes, indexes, price,
stateful::{
address_cohort,
r#trait::{CohortVecs, DynCohortVecs},
},
};
const VERSION: Version = Version::new(0);
#[derive(Clone, Deref, DerefMut, Traversable)]
pub struct Vecs(AddressGroups<address_cohort::Vecs>);
impl Vecs {
pub fn forced_import(
db: &Database,
version: Version,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
states_path: &Path,
) -> Result<Self> {
Ok(Self(AddressGroups::new(|filter| {
let states_path = match &filter {
Filter::Amount(AmountFilter::Range(_)) => Some(states_path),
_ => None,
};
address_cohort::Vecs::forced_import(
db,
filter,
version + VERSION + Version::ZERO,
indexes,
price,
states_path,
)
.unwrap()
})))
}
pub fn compute_overlapping_vecs(
&mut self,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
let by_size_range = &self.0.amount_range;
[
self.0
.ge_amount
.par_iter_mut()
.map(|vecs| {
let filter = vecs.filter().clone();
(
vecs,
by_size_range
.iter()
.filter(|other| filter.includes(other.filter()))
.collect::<Vec<_>>(),
)
})
.collect::<Vec<_>>(),
self.0
.lt_amount
.par_iter_mut()
.map(|vecs| {
let filter = vecs.filter().clone();
(
vecs,
by_size_range
.iter()
.filter(|other| filter.includes(other.filter()))
.collect::<Vec<_>>(),
)
})
.collect::<Vec<_>>(),
]
.into_iter()
.flatten()
.try_for_each(|(vecs, stateful)| {
vecs.compute_from_stateful(starting_indexes, &stateful, exit)
})
}
pub fn compute_rest_part1(
&mut self,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.par_iter_mut()
.try_for_each(|v| v.compute_rest_part1(indexes, price, starting_indexes, exit))
}
#[allow(clippy::too_many_arguments)]
pub fn compute_rest_part2(
&mut self,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
starting_indexes: &Indexes,
height_to_supply: &impl IterableVec<Height, Bitcoin>,
dateindex_to_supply: &impl IterableVec<DateIndex, Bitcoin>,
height_to_market_cap: Option<&impl IterableVec<Height, Dollars>>,
dateindex_to_market_cap: Option<&impl IterableVec<DateIndex, Dollars>>,
height_to_realized_cap: Option<&impl IterableVec<Height, Dollars>>,
dateindex_to_realized_cap: Option<&impl IterableVec<DateIndex, Dollars>>,
exit: &Exit,
) -> Result<()> {
self.0.par_iter_mut().try_for_each(|v| {
v.compute_rest_part2(
indexes,
price,
starting_indexes,
height_to_supply,
dateindex_to_supply,
height_to_market_cap,
dateindex_to_market_cap,
height_to_realized_cap,
dateindex_to_realized_cap,
exit,
)
})
}
pub fn safe_flush_stateful_vecs(&mut self, height: Height, exit: &Exit) -> Result<()> {
self.par_iter_separate_mut()
.try_for_each(|v| v.safe_flush_stateful_vecs(height, exit))
}
}

View File

@@ -1,226 +0,0 @@
use brk_error::{Error, Result};
use brk_traversable::Traversable;
use brk_types::{
AnyAddressIndex, EmptyAddressData, EmptyAddressIndex, Height, LoadedAddressData,
LoadedAddressIndex, OutputType, P2AAddressIndex, P2PK33AddressIndex, P2PK65AddressIndex,
P2PKHAddressIndex, P2SHAddressIndex, P2TRAddressIndex, P2WPKHAddressIndex, P2WSHAddressIndex,
TypeIndex,
};
use vecdb::{AnyStoredVec, BytesVec, GenericStoredVec, Reader, Stamp};
#[derive(Clone, Traversable)]
pub struct AnyAddressIndexesVecs {
pub p2pk33: BytesVec<P2PK33AddressIndex, AnyAddressIndex>,
pub p2pk65: BytesVec<P2PK65AddressIndex, AnyAddressIndex>,
pub p2pkh: BytesVec<P2PKHAddressIndex, AnyAddressIndex>,
pub p2sh: BytesVec<P2SHAddressIndex, AnyAddressIndex>,
pub p2tr: BytesVec<P2TRAddressIndex, AnyAddressIndex>,
pub p2wpkh: BytesVec<P2WPKHAddressIndex, AnyAddressIndex>,
pub p2wsh: BytesVec<P2WSHAddressIndex, AnyAddressIndex>,
pub p2a: BytesVec<P2AAddressIndex, AnyAddressIndex>,
}
impl AnyAddressIndexesVecs {
pub fn min_stamped_height(&self) -> Height {
Height::from(self.p2pk33.stamp())
.incremented()
.min(Height::from(self.p2pk65.stamp()).incremented())
.min(Height::from(self.p2pkh.stamp()).incremented())
.min(Height::from(self.p2sh.stamp()).incremented())
.min(Height::from(self.p2tr.stamp()).incremented())
.min(Height::from(self.p2wpkh.stamp()).incremented())
.min(Height::from(self.p2wsh.stamp()).incremented())
.min(Height::from(self.p2a.stamp()).incremented())
}
pub fn rollback_before(&mut self, stamp: Stamp) -> Result<[Stamp; 8]> {
Ok([
self.p2pk33.rollback_before(stamp)?,
self.p2pk65.rollback_before(stamp)?,
self.p2pkh.rollback_before(stamp)?,
self.p2sh.rollback_before(stamp)?,
self.p2tr.rollback_before(stamp)?,
self.p2wpkh.rollback_before(stamp)?,
self.p2wsh.rollback_before(stamp)?,
self.p2a.rollback_before(stamp)?,
])
}
pub fn reset(&mut self) -> Result<()> {
self.p2pk33.reset()?;
self.p2pk65.reset()?;
self.p2pkh.reset()?;
self.p2sh.reset()?;
self.p2tr.reset()?;
self.p2wpkh.reset()?;
self.p2wsh.reset()?;
self.p2a.reset()?;
Ok(())
}
pub fn get_anyaddressindex(
&self,
address_type: OutputType,
typeindex: TypeIndex,
reader: &Reader,
) -> AnyAddressIndex {
match address_type {
OutputType::P2PK33 => self
.p2pk33
.get_pushed_or_read_at_unwrap(typeindex.into(), reader),
OutputType::P2PK65 => self
.p2pk65
.get_pushed_or_read_at_unwrap(typeindex.into(), reader),
OutputType::P2PKH => self
.p2pkh
.get_pushed_or_read_at_unwrap(typeindex.into(), reader),
OutputType::P2SH => self
.p2sh
.get_pushed_or_read_at_unwrap(typeindex.into(), reader),
OutputType::P2TR => self
.p2tr
.get_pushed_or_read_at_unwrap(typeindex.into(), reader),
OutputType::P2WPKH => self
.p2wpkh
.get_pushed_or_read_at_unwrap(typeindex.into(), reader),
OutputType::P2WSH => self
.p2wsh
.get_pushed_or_read_at_unwrap(typeindex.into(), reader),
OutputType::P2A => self
.p2a
.get_pushed_or_read_at_unwrap(typeindex.into(), reader),
_ => unreachable!(),
}
}
pub fn get_anyaddressindex_once(
&self,
address_type: OutputType,
typeindex: TypeIndex,
) -> Result<AnyAddressIndex> {
match address_type {
OutputType::P2PK33 => self
.p2pk33
.read_at_once(typeindex.into())
.map_err(|e| e.into()),
OutputType::P2PK65 => self
.p2pk65
.read_at_once(typeindex.into())
.map_err(|e| e.into()),
OutputType::P2PKH => self
.p2pkh
.read_at_once(typeindex.into())
.map_err(|e| e.into()),
OutputType::P2SH => self
.p2sh
.read_at_once(typeindex.into())
.map_err(|e| e.into()),
OutputType::P2TR => self
.p2tr
.read_at_once(typeindex.into())
.map_err(|e| e.into()),
OutputType::P2WPKH => self
.p2wpkh
.read_at_once(typeindex.into())
.map_err(|e| e.into()),
OutputType::P2WSH => self
.p2wsh
.read_at_once(typeindex.into())
.map_err(|e| e.into()),
OutputType::P2A => self
.p2a
.read_at_once(typeindex.into())
.map_err(|e| e.into()),
_ => Err(Error::UnsupportedType(address_type.to_string())),
}
}
pub fn update_or_push(
&mut self,
address_type: OutputType,
typeindex: TypeIndex,
anyaddressindex: AnyAddressIndex,
) -> Result<()> {
(match address_type {
OutputType::P2PK33 => self
.p2pk33
.update_or_push(typeindex.into(), anyaddressindex),
OutputType::P2PK65 => self
.p2pk65
.update_or_push(typeindex.into(), anyaddressindex),
OutputType::P2PKH => self.p2pkh.update_or_push(typeindex.into(), anyaddressindex),
OutputType::P2SH => self.p2sh.update_or_push(typeindex.into(), anyaddressindex),
OutputType::P2TR => self.p2tr.update_or_push(typeindex.into(), anyaddressindex),
OutputType::P2WPKH => self
.p2wpkh
.update_or_push(typeindex.into(), anyaddressindex),
OutputType::P2WSH => self.p2wsh.update_or_push(typeindex.into(), anyaddressindex),
OutputType::P2A => self.p2a.update_or_push(typeindex.into(), anyaddressindex),
_ => unreachable!(),
})?;
Ok(())
}
pub fn stamped_flush_maybe_with_changes(
&mut self,
stamp: Stamp,
with_changes: bool,
) -> Result<()> {
self.p2pk33
.stamped_flush_maybe_with_changes(stamp, with_changes)?;
self.p2pk65
.stamped_flush_maybe_with_changes(stamp, with_changes)?;
self.p2pkh
.stamped_flush_maybe_with_changes(stamp, with_changes)?;
self.p2sh
.stamped_flush_maybe_with_changes(stamp, with_changes)?;
self.p2tr
.stamped_flush_maybe_with_changes(stamp, with_changes)?;
self.p2wpkh
.stamped_flush_maybe_with_changes(stamp, with_changes)?;
self.p2wsh
.stamped_flush_maybe_with_changes(stamp, with_changes)?;
self.p2a
.stamped_flush_maybe_with_changes(stamp, with_changes)?;
Ok(())
}
}
#[derive(Clone, Traversable)]
pub struct AddressesDataVecs {
pub loaded: BytesVec<LoadedAddressIndex, LoadedAddressData>,
pub empty: BytesVec<EmptyAddressIndex, EmptyAddressData>,
}
impl AddressesDataVecs {
pub fn min_stamped_height(&self) -> Height {
Height::from(self.loaded.stamp())
.incremented()
.min(Height::from(self.empty.stamp()).incremented())
}
pub fn rollback_before(&mut self, stamp: Stamp) -> Result<[Stamp; 2]> {
Ok([
self.loaded.rollback_before(stamp)?,
self.empty.rollback_before(stamp)?,
])
}
pub fn reset(&mut self) -> Result<()> {
self.loaded.reset()?;
self.empty.reset()?;
Ok(())
}
pub fn stamped_flush_maybe_with_changes(
&mut self,
stamp: Stamp,
with_changes: bool,
) -> Result<()> {
self.loaded
.stamped_flush_maybe_with_changes(stamp, with_changes)?;
self.empty
.stamped_flush_maybe_with_changes(stamp, with_changes)?;
Ok(())
}
}

View File

@@ -1,29 +0,0 @@
use brk_grouper::ByAddressType;
use brk_types::Height;
use derive_deref::{Deref, DerefMut};
use vecdb::TypedVecIterator;
use super::AddressTypeToHeightToAddressCount;
#[derive(Debug, Default, Deref, DerefMut)]
pub struct AddressTypeToAddressCount(ByAddressType<u64>);
impl From<(&AddressTypeToHeightToAddressCount, Height)> for AddressTypeToAddressCount {
#[inline]
fn from((groups, starting_height): (&AddressTypeToHeightToAddressCount, Height)) -> Self {
if let Some(prev_height) = starting_height.decremented() {
Self(ByAddressType {
p2pk65: groups.p2pk65.into_iter().get_unwrap(prev_height).into(),
p2pk33: groups.p2pk33.into_iter().get_unwrap(prev_height).into(),
p2pkh: groups.p2pkh.into_iter().get_unwrap(prev_height).into(),
p2sh: groups.p2sh.into_iter().get_unwrap(prev_height).into(),
p2wpkh: groups.p2wpkh.into_iter().get_unwrap(prev_height).into(),
p2wsh: groups.p2wsh.into_iter().get_unwrap(prev_height).into(),
p2tr: groups.p2tr.into_iter().get_unwrap(prev_height).into(),
p2a: groups.p2a.into_iter().get_unwrap(prev_height).into(),
})
} else {
Default::default()
}
}
}

View File

@@ -1,45 +0,0 @@
use brk_error::Result;
use brk_grouper::ByAddressType;
use brk_traversable::Traversable;
use brk_types::{Height, StoredU64};
use derive_deref::{Deref, DerefMut};
use vecdb::{PcoVec, EagerVec, GenericStoredVec};
use super::AddressTypeToAddressCount;
#[derive(Debug, Clone, Deref, DerefMut, Traversable)]
pub struct AddressTypeToHeightToAddressCount(ByAddressType<EagerVec<PcoVec<Height, StoredU64>>>);
impl From<ByAddressType<EagerVec<PcoVec<Height, StoredU64>>>> for AddressTypeToHeightToAddressCount {
#[inline]
fn from(value: ByAddressType<EagerVec<PcoVec<Height, StoredU64>>>) -> Self {
Self(value)
}
}
impl AddressTypeToHeightToAddressCount {
pub fn truncate_push(
&mut self,
height: Height,
addresstype_to_usize: &AddressTypeToAddressCount,
) -> Result<()> {
self.p2pk65
.truncate_push(height, addresstype_to_usize.p2pk65.into())?;
self.p2pk33
.truncate_push(height, addresstype_to_usize.p2pk33.into())?;
self.p2pkh
.truncate_push(height, addresstype_to_usize.p2pkh.into())?;
self.p2sh
.truncate_push(height, addresstype_to_usize.p2sh.into())?;
self.p2wpkh
.truncate_push(height, addresstype_to_usize.p2wpkh.into())?;
self.p2wsh
.truncate_push(height, addresstype_to_usize.p2wsh.into())?;
self.p2tr
.truncate_push(height, addresstype_to_usize.p2tr.into())?;
self.p2a
.truncate_push(height, addresstype_to_usize.p2a.into())?;
Ok(())
}
}

View File

@@ -1,9 +0,0 @@
use std::collections::BTreeMap;
use brk_types::Height;
use derive_deref::{Deref, DerefMut};
use crate::stateful::AddressTypeToVec;
#[derive(Debug, Default, Deref, DerefMut)]
pub struct HeightToAddressTypeToVec<T>(pub BTreeMap<Height, AddressTypeToVec<T>>);

View File

@@ -1,80 +0,0 @@
use brk_error::Result;
use brk_grouper::ByAddressType;
use brk_traversable::Traversable;
use brk_types::StoredU64;
use derive_deref::{Deref, DerefMut};
use vecdb::Exit;
use crate::{Indexes, grouped::ComputedVecsFromHeight, indexes};
use super::AddressTypeToHeightToAddressCount;
#[derive(Clone, Deref, DerefMut, Traversable)]
pub struct AddressTypeToIndexesToAddressCount(ByAddressType<ComputedVecsFromHeight<StoredU64>>);
impl From<ByAddressType<ComputedVecsFromHeight<StoredU64>>> for AddressTypeToIndexesToAddressCount {
#[inline]
fn from(value: ByAddressType<ComputedVecsFromHeight<StoredU64>>) -> Self {
Self(value)
}
}
impl AddressTypeToIndexesToAddressCount {
pub fn compute(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &Indexes,
exit: &Exit,
addresstype_to_height_to_addresscount: &AddressTypeToHeightToAddressCount,
) -> Result<()> {
self.p2pk65.compute_rest(
indexes,
starting_indexes,
exit,
Some(&addresstype_to_height_to_addresscount.p2pk65),
)?;
self.p2pk33.compute_rest(
indexes,
starting_indexes,
exit,
Some(&addresstype_to_height_to_addresscount.p2pk33),
)?;
self.p2pkh.compute_rest(
indexes,
starting_indexes,
exit,
Some(&addresstype_to_height_to_addresscount.p2pkh),
)?;
self.p2sh.compute_rest(
indexes,
starting_indexes,
exit,
Some(&addresstype_to_height_to_addresscount.p2sh),
)?;
self.p2wpkh.compute_rest(
indexes,
starting_indexes,
exit,
Some(&addresstype_to_height_to_addresscount.p2wpkh),
)?;
self.p2wsh.compute_rest(
indexes,
starting_indexes,
exit,
Some(&addresstype_to_height_to_addresscount.p2wsh),
)?;
self.p2tr.compute_rest(
indexes,
starting_indexes,
exit,
Some(&addresstype_to_height_to_addresscount.p2tr),
)?;
self.p2a.compute_rest(
indexes,
starting_indexes,
exit,
Some(&addresstype_to_height_to_addresscount.p2a),
)?;
Ok(())
}
}

View File

@@ -1,13 +0,0 @@
mod addresscount;
mod height_to_addresscount;
mod height_to_vec;
mod indexes_to_addresscount;
mod typeindex_map;
mod vec;
pub use addresscount::*;
pub use height_to_addresscount::*;
pub use height_to_vec::*;
pub use indexes_to_addresscount::*;
pub use typeindex_map::*;
pub use vec::*;

View File

@@ -1,100 +0,0 @@
use std::{collections::hash_map::Entry, mem};
use brk_grouper::ByAddressType;
use brk_types::{OutputType, TypeIndex};
use derive_deref::{Deref, DerefMut};
use rustc_hash::FxHashMap;
use smallvec::{Array, SmallVec};
#[derive(Debug, Deref, DerefMut)]
pub struct AddressTypeToTypeIndexMap<T>(ByAddressType<FxHashMap<TypeIndex, T>>);
impl<T> AddressTypeToTypeIndexMap<T> {
pub fn merge(mut self, mut other: Self) -> Self {
Self::merge_(&mut self.p2pk65, &mut other.p2pk65);
Self::merge_(&mut self.p2pk33, &mut other.p2pk33);
Self::merge_(&mut self.p2pkh, &mut other.p2pkh);
Self::merge_(&mut self.p2sh, &mut other.p2sh);
Self::merge_(&mut self.p2wpkh, &mut other.p2wpkh);
Self::merge_(&mut self.p2wsh, &mut other.p2wsh);
Self::merge_(&mut self.p2tr, &mut other.p2tr);
Self::merge_(&mut self.p2a, &mut other.p2a);
self
}
fn merge_(own: &mut FxHashMap<TypeIndex, T>, other: &mut FxHashMap<TypeIndex, T>) {
if own.len() < other.len() {
mem::swap(own, other);
}
own.extend(other.drain());
}
// pub fn get_for_type(&self, address_type: OutputType, typeindex: &TypeIndex) -> Option<&T> {
// self.get(address_type).unwrap().get(typeindex)
// }
pub fn insert_for_type(&mut self, address_type: OutputType, typeindex: TypeIndex, value: T) {
self.get_mut(address_type).unwrap().insert(typeindex, value);
}
pub fn remove_for_type(&mut self, address_type: OutputType, typeindex: &TypeIndex) -> T {
self.get_mut(address_type)
.unwrap()
.remove(typeindex)
.unwrap()
}
pub fn into_sorted_iter(self) -> impl Iterator<Item = (OutputType, Vec<(TypeIndex, T)>)> {
self.0.into_iter().map(|(output_type, map)| {
let mut sorted: Vec<_> = map.into_iter().collect();
sorted.sort_unstable_by_key(|(typeindex, _)| *typeindex);
(output_type, sorted)
})
}
#[allow(clippy::should_implement_trait)]
pub fn into_iter(self) -> impl Iterator<Item = (OutputType, FxHashMap<TypeIndex, T>)> {
self.0.into_iter()
}
}
impl<T> Default for AddressTypeToTypeIndexMap<T> {
fn default() -> Self {
Self(ByAddressType {
p2pk65: FxHashMap::default(),
p2pk33: FxHashMap::default(),
p2pkh: FxHashMap::default(),
p2sh: FxHashMap::default(),
p2wpkh: FxHashMap::default(),
p2wsh: FxHashMap::default(),
p2tr: FxHashMap::default(),
p2a: FxHashMap::default(),
})
}
}
impl<T> AddressTypeToTypeIndexMap<SmallVec<T>>
where
T: Array,
{
pub fn merge_vec(mut self, other: Self) -> Self {
for (address_type, other_map) in other.0.into_iter() {
let self_map = self.0.get_mut_unwrap(address_type);
for (typeindex, mut other_vec) in other_map {
match self_map.entry(typeindex) {
Entry::Occupied(mut entry) => {
let self_vec = entry.get_mut();
if other_vec.len() > self_vec.len() {
mem::swap(self_vec, &mut other_vec);
}
self_vec.extend(other_vec);
}
Entry::Vacant(entry) => {
entry.insert(other_vec);
}
}
}
}
self
}
}

View File

@@ -1,60 +0,0 @@
use std::mem;
use brk_grouper::ByAddressType;
use derive_deref::{Deref, DerefMut};
#[derive(Debug, Deref, DerefMut)]
pub struct AddressTypeToVec<T>(ByAddressType<Vec<T>>);
impl<T> AddressTypeToVec<T> {
pub fn merge(mut self, mut other: Self) -> Self {
Self::merge_(&mut self.p2pk65, &mut other.p2pk65);
Self::merge_(&mut self.p2pk33, &mut other.p2pk33);
Self::merge_(&mut self.p2pkh, &mut other.p2pkh);
Self::merge_(&mut self.p2sh, &mut other.p2sh);
Self::merge_(&mut self.p2wpkh, &mut other.p2wpkh);
Self::merge_(&mut self.p2wsh, &mut other.p2wsh);
Self::merge_(&mut self.p2tr, &mut other.p2tr);
Self::merge_(&mut self.p2a, &mut other.p2a);
self
}
pub fn merge_mut(&mut self, mut other: Self) {
Self::merge_(&mut self.p2pk65, &mut other.p2pk65);
Self::merge_(&mut self.p2pk33, &mut other.p2pk33);
Self::merge_(&mut self.p2pkh, &mut other.p2pkh);
Self::merge_(&mut self.p2sh, &mut other.p2sh);
Self::merge_(&mut self.p2wpkh, &mut other.p2wpkh);
Self::merge_(&mut self.p2wsh, &mut other.p2wsh);
Self::merge_(&mut self.p2tr, &mut other.p2tr);
Self::merge_(&mut self.p2a, &mut other.p2a);
}
fn merge_(own: &mut Vec<T>, other: &mut Vec<T>) {
if own.len() >= other.len() {
own.append(other);
} else {
other.append(own);
mem::swap(own, other);
}
}
pub fn unwrap(self) -> ByAddressType<Vec<T>> {
self.0
}
}
impl<T> Default for AddressTypeToVec<T> {
fn default() -> Self {
Self(ByAddressType {
p2pk65: vec![],
p2pk33: vec![],
p2pkh: vec![],
p2sh: vec![],
p2wpkh: vec![],
p2wsh: vec![],
p2tr: vec![],
p2a: vec![],
})
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,914 +0,0 @@
//! Import and validation methods for Vecs.
//!
//! This module contains methods for:
//! - `forced_import`: Creating a new Vecs instance from database
//! - `import_state`: Importing state when resuming from checkpoint
//! - `validate_computed_versions`: Version validation
//! - `min_height_vecs_len`: Finding minimum vector length
use brk_error::{Error, Result};
use brk_grouper::{CohortContext, Filter};
use brk_types::{DateIndex, Dollars, Height, Sats, StoredF32, StoredF64, Version};
use vecdb::{
AnyVec, Database, EagerVec, GenericStoredVec, ImportableVec, IterableCloneableVec, PcoVec,
StoredVec, TypedVecIterator,
};
use crate::{
grouped::{
ComputedHeightValueVecs, ComputedRatioVecsFromDateIndex, ComputedValueVecsFromDateIndex,
ComputedValueVecsFromHeight, ComputedVecsFromDateIndex, ComputedVecsFromHeight,
PricePercentiles, Source, VecBuilderOptions,
},
indexes, price,
states::CohortState,
utils::OptionExt,
};
use super::Vecs;
impl Vecs {
#[allow(clippy::too_many_arguments)]
pub fn forced_import(
db: &Database,
filter: Filter,
context: CohortContext,
parent_version: Version,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
) -> Result<Self> {
let compute_dollars = price.is_some();
let extended = filter.is_extended(context);
let compute_rel_to_all = filter.compute_rel_to_all();
let compute_adjusted = filter.compute_adjusted(context);
let version = parent_version + Version::ZERO;
let name_prefix = filter.to_full_name(context);
let suffix = |s: &str| {
if name_prefix.is_empty() {
s.to_string()
} else {
format!("{name_prefix}_{s}")
}
};
// Helper macros for imports
macro_rules! eager {
($idx:ty, $val:ty, $name:expr, $v:expr) => {
EagerVec::<PcoVec<$idx, $val>>::forced_import(db, &suffix($name), version + $v)
.unwrap()
};
}
macro_rules! computed_h {
($name:expr, $source:expr, $v:expr, $opts:expr $(,)?) => {
ComputedVecsFromHeight::forced_import(
db,
&suffix($name),
$source,
version + $v,
indexes,
$opts,
)
.unwrap()
};
}
macro_rules! computed_di {
($name:expr, $source:expr, $v:expr, $opts:expr $(,)?) => {
ComputedVecsFromDateIndex::forced_import(
db,
&suffix($name),
$source,
version + $v,
indexes,
$opts,
)
.unwrap()
};
}
// Common version patterns
let v0 = Version::ZERO;
let v1 = Version::ONE;
let v2 = Version::TWO;
let v3 = Version::new(3);
let last = || VecBuilderOptions::default().add_last();
let sum = || VecBuilderOptions::default().add_sum();
let sum_cum = || VecBuilderOptions::default().add_sum().add_cumulative();
// Pre-create dateindex vecs that are used in computed vecs
let dateindex_to_supply_in_profit =
compute_dollars.then(|| eager!(DateIndex, Sats, "supply_in_profit", v0));
let dateindex_to_supply_in_loss =
compute_dollars.then(|| eager!(DateIndex, Sats, "supply_in_loss", v0));
let dateindex_to_unrealized_profit =
compute_dollars.then(|| eager!(DateIndex, Dollars, "unrealized_profit", v0));
let dateindex_to_unrealized_loss =
compute_dollars.then(|| eager!(DateIndex, Dollars, "unrealized_loss", v0));
Ok(Self {
filter,
// ==================== SUPPLY & UTXO COUNT ====================
height_to_supply: EagerVec::forced_import(db, &suffix("supply"), version + v0)?,
height_to_supply_value: ComputedHeightValueVecs::forced_import(
db,
&suffix("supply"),
Source::None,
version + v0,
compute_dollars,
)?,
indexes_to_supply: ComputedValueVecsFromDateIndex::forced_import(
db,
&suffix("supply"),
Source::Compute,
version + v1,
last(),
compute_dollars,
indexes,
)?,
height_to_utxo_count: EagerVec::forced_import(db, &suffix("utxo_count"), version + v0)?,
indexes_to_utxo_count: computed_h!("utxo_count", Source::None, v0, last()),
height_to_supply_half_value: ComputedHeightValueVecs::forced_import(
db,
&suffix("supply_half"),
Source::Compute,
version + v0,
compute_dollars,
)?,
indexes_to_supply_half: ComputedValueVecsFromDateIndex::forced_import(
db,
&suffix("supply_half"),
Source::Compute,
version + v0,
last(),
compute_dollars,
indexes,
)?,
// ==================== ACTIVITY ====================
height_to_sent: EagerVec::forced_import(db, &suffix("sent"), version + v0)?,
indexes_to_sent: ComputedValueVecsFromHeight::forced_import(
db,
&suffix("sent"),
Source::None,
version + v0,
sum(),
compute_dollars,
indexes,
)?,
height_to_satblocks_destroyed: EagerVec::forced_import(
db,
&suffix("satblocks_destroyed"),
version + v0,
)?,
height_to_satdays_destroyed: EagerVec::forced_import(
db,
&suffix("satdays_destroyed"),
version + v0,
)?,
indexes_to_coinblocks_destroyed: computed_h!(
"coinblocks_destroyed",
Source::Compute,
v2,
sum_cum(),
),
indexes_to_coindays_destroyed: computed_h!(
"coindays_destroyed",
Source::Compute,
v2,
sum_cum(),
),
// ==================== REALIZED CAP & PRICE ====================
height_to_realized_cap: compute_dollars
.then(|| eager!(Height, Dollars, "realized_cap", v0)),
indexes_to_realized_cap: compute_dollars
.then(|| computed_h!("realized_cap", Source::None, v0, last())),
indexes_to_realized_price: compute_dollars
.then(|| computed_h!("realized_price", Source::Compute, v0, last())),
indexes_to_realized_price_extra: compute_dollars.then(|| {
ComputedRatioVecsFromDateIndex::forced_import(
db,
&suffix("realized_price"),
Source::None,
version + v0,
indexes,
extended,
)
.unwrap()
}),
indexes_to_realized_cap_rel_to_own_market_cap: (compute_dollars && extended).then(
|| {
computed_h!(
"realized_cap_rel_to_own_market_cap",
Source::Compute,
v0,
last()
)
},
),
indexes_to_realized_cap_30d_delta: compute_dollars
.then(|| computed_di!("realized_cap_30d_delta", Source::Compute, v0, last())),
// ==================== REALIZED PROFIT & LOSS ====================
height_to_realized_profit: compute_dollars
.then(|| eager!(Height, Dollars, "realized_profit", v0)),
indexes_to_realized_profit: compute_dollars
.then(|| computed_h!("realized_profit", Source::None, v0, sum_cum())),
height_to_realized_loss: compute_dollars
.then(|| eager!(Height, Dollars, "realized_loss", v0)),
indexes_to_realized_loss: compute_dollars
.then(|| computed_h!("realized_loss", Source::None, v0, sum_cum())),
indexes_to_neg_realized_loss: compute_dollars
.then(|| computed_h!("neg_realized_loss", Source::Compute, v1, sum_cum())),
indexes_to_net_realized_pnl: compute_dollars
.then(|| computed_h!("net_realized_pnl", Source::Compute, v0, sum_cum())),
indexes_to_realized_value: compute_dollars
.then(|| computed_h!("realized_value", Source::Compute, v0, sum())),
indexes_to_realized_profit_rel_to_realized_cap: compute_dollars.then(|| {
computed_h!(
"realized_profit_rel_to_realized_cap",
Source::Compute,
v0,
sum()
)
}),
indexes_to_realized_loss_rel_to_realized_cap: compute_dollars.then(|| {
computed_h!(
"realized_loss_rel_to_realized_cap",
Source::Compute,
v0,
sum()
)
}),
indexes_to_net_realized_pnl_rel_to_realized_cap: compute_dollars.then(|| {
computed_h!(
"net_realized_pnl_rel_to_realized_cap",
Source::Compute,
v1,
sum()
)
}),
height_to_total_realized_pnl: compute_dollars
.then(|| eager!(Height, Dollars, "total_realized_pnl", v0)),
indexes_to_total_realized_pnl: compute_dollars
.then(|| computed_di!("total_realized_pnl", Source::Compute, v1, sum())),
dateindex_to_realized_profit_to_loss_ratio: (compute_dollars && extended)
.then(|| eager!(DateIndex, StoredF64, "realized_profit_to_loss_ratio", v1)),
// ==================== VALUE CREATED & DESTROYED ====================
height_to_value_created: compute_dollars
.then(|| eager!(Height, Dollars, "value_created", v0)),
indexes_to_value_created: compute_dollars
.then(|| computed_h!("value_created", Source::None, v0, sum())),
height_to_value_destroyed: compute_dollars
.then(|| eager!(Height, Dollars, "value_destroyed", v0)),
indexes_to_value_destroyed: compute_dollars
.then(|| computed_h!("value_destroyed", Source::None, v0, sum())),
height_to_adjusted_value_created: (compute_dollars && compute_adjusted)
.then(|| eager!(Height, Dollars, "adjusted_value_created", v0)),
indexes_to_adjusted_value_created: (compute_dollars && compute_adjusted)
.then(|| computed_h!("adjusted_value_created", Source::None, v0, sum())),
height_to_adjusted_value_destroyed: (compute_dollars && compute_adjusted)
.then(|| eager!(Height, Dollars, "adjusted_value_destroyed", v0)),
indexes_to_adjusted_value_destroyed: (compute_dollars && compute_adjusted)
.then(|| computed_h!("adjusted_value_destroyed", Source::None, v0, sum())),
// ==================== SOPR ====================
dateindex_to_sopr: compute_dollars.then(|| eager!(DateIndex, StoredF64, "sopr", v1)),
dateindex_to_sopr_7d_ema: compute_dollars
.then(|| eager!(DateIndex, StoredF64, "sopr_7d_ema", v1)),
dateindex_to_sopr_30d_ema: compute_dollars
.then(|| eager!(DateIndex, StoredF64, "sopr_30d_ema", v1)),
dateindex_to_adjusted_sopr: (compute_dollars && compute_adjusted)
.then(|| eager!(DateIndex, StoredF64, "adjusted_sopr", v1)),
dateindex_to_adjusted_sopr_7d_ema: (compute_dollars && compute_adjusted)
.then(|| eager!(DateIndex, StoredF64, "adjusted_sopr_7d_ema", v1)),
dateindex_to_adjusted_sopr_30d_ema: (compute_dollars && compute_adjusted)
.then(|| eager!(DateIndex, StoredF64, "adjusted_sopr_30d_ema", v1)),
// ==================== SELL SIDE RISK ====================
dateindex_to_sell_side_risk_ratio: compute_dollars
.then(|| eager!(DateIndex, StoredF32, "sell_side_risk_ratio", v1)),
dateindex_to_sell_side_risk_ratio_7d_ema: compute_dollars
.then(|| eager!(DateIndex, StoredF32, "sell_side_risk_ratio_7d_ema", v1)),
dateindex_to_sell_side_risk_ratio_30d_ema: compute_dollars
.then(|| eager!(DateIndex, StoredF32, "sell_side_risk_ratio_30d_ema", v1)),
// ==================== SUPPLY IN PROFIT/LOSS ====================
height_to_supply_in_profit: compute_dollars
.then(|| eager!(Height, Sats, "supply_in_profit", v0)),
indexes_to_supply_in_profit: compute_dollars.then(|| {
ComputedValueVecsFromDateIndex::forced_import(
db,
&suffix("supply_in_profit"),
dateindex_to_supply_in_profit
.as_ref()
.map(|v| v.boxed_clone())
.into(),
version + v0,
last(),
compute_dollars,
indexes,
)
.unwrap()
}),
height_to_supply_in_loss: compute_dollars
.then(|| eager!(Height, Sats, "supply_in_loss", v0)),
indexes_to_supply_in_loss: compute_dollars.then(|| {
ComputedValueVecsFromDateIndex::forced_import(
db,
&suffix("supply_in_loss"),
dateindex_to_supply_in_loss
.as_ref()
.map(|v| v.boxed_clone())
.into(),
version + v0,
last(),
compute_dollars,
indexes,
)
.unwrap()
}),
dateindex_to_supply_in_profit,
dateindex_to_supply_in_loss,
height_to_supply_in_profit_value: compute_dollars.then(|| {
ComputedHeightValueVecs::forced_import(
db,
&suffix("supply_in_profit"),
Source::None,
version + v0,
compute_dollars,
)
.unwrap()
}),
height_to_supply_in_loss_value: compute_dollars.then(|| {
ComputedHeightValueVecs::forced_import(
db,
&suffix("supply_in_loss"),
Source::None,
version + v0,
compute_dollars,
)
.unwrap()
}),
// ==================== UNREALIZED PROFIT & LOSS ====================
height_to_unrealized_profit: compute_dollars
.then(|| eager!(Height, Dollars, "unrealized_profit", v0)),
indexes_to_unrealized_profit: compute_dollars.then(|| {
ComputedVecsFromDateIndex::forced_import(
db,
&suffix("unrealized_profit"),
dateindex_to_unrealized_profit
.as_ref()
.map(|v| v.boxed_clone())
.into(),
version + v0,
indexes,
last(),
)
.unwrap()
}),
height_to_unrealized_loss: compute_dollars
.then(|| eager!(Height, Dollars, "unrealized_loss", v0)),
indexes_to_unrealized_loss: compute_dollars.then(|| {
ComputedVecsFromDateIndex::forced_import(
db,
&suffix("unrealized_loss"),
dateindex_to_unrealized_loss
.as_ref()
.map(|v| v.boxed_clone())
.into(),
version + v0,
indexes,
last(),
)
.unwrap()
}),
dateindex_to_unrealized_profit,
dateindex_to_unrealized_loss,
height_to_neg_unrealized_loss: compute_dollars
.then(|| eager!(Height, Dollars, "neg_unrealized_loss", v0)),
indexes_to_neg_unrealized_loss: compute_dollars
.then(|| computed_di!("neg_unrealized_loss", Source::Compute, v0, last())),
height_to_net_unrealized_pnl: compute_dollars
.then(|| eager!(Height, Dollars, "net_unrealized_pnl", v0)),
indexes_to_net_unrealized_pnl: compute_dollars
.then(|| computed_di!("net_unrealized_pnl", Source::Compute, v0, last())),
height_to_total_unrealized_pnl: compute_dollars
.then(|| eager!(Height, Dollars, "total_unrealized_pnl", v0)),
indexes_to_total_unrealized_pnl: compute_dollars
.then(|| computed_di!("total_unrealized_pnl", Source::Compute, v0, last())),
// ==================== PRICE PAID ====================
height_to_min_price_paid: compute_dollars
.then(|| eager!(Height, Dollars, "min_price_paid", v0)),
indexes_to_min_price_paid: compute_dollars
.then(|| computed_h!("min_price_paid", Source::None, v0, last())),
height_to_max_price_paid: compute_dollars
.then(|| eager!(Height, Dollars, "max_price_paid", v0)),
indexes_to_max_price_paid: compute_dollars
.then(|| computed_h!("max_price_paid", Source::None, v0, last())),
price_percentiles: (compute_dollars && extended).then(|| {
PricePercentiles::forced_import(db, &suffix(""), version + v0, indexes, true)
.unwrap()
}),
// ==================== RELATIVE METRICS: UNREALIZED vs MARKET CAP ====================
height_to_unrealized_profit_rel_to_market_cap: compute_dollars
.then(|| eager!(Height, StoredF32, "unrealized_profit_rel_to_market_cap", v0)),
height_to_unrealized_loss_rel_to_market_cap: compute_dollars
.then(|| eager!(Height, StoredF32, "unrealized_loss_rel_to_market_cap", v0)),
height_to_neg_unrealized_loss_rel_to_market_cap: compute_dollars.then(|| {
eager!(
Height,
StoredF32,
"neg_unrealized_loss_rel_to_market_cap",
v0
)
}),
height_to_net_unrealized_pnl_rel_to_market_cap: compute_dollars.then(|| {
eager!(
Height,
StoredF32,
"net_unrealized_pnl_rel_to_market_cap",
v1
)
}),
indexes_to_unrealized_profit_rel_to_market_cap: compute_dollars.then(|| {
computed_di!(
"unrealized_profit_rel_to_market_cap",
Source::Compute,
v1,
last()
)
}),
indexes_to_unrealized_loss_rel_to_market_cap: compute_dollars.then(|| {
computed_di!(
"unrealized_loss_rel_to_market_cap",
Source::Compute,
v1,
last()
)
}),
indexes_to_neg_unrealized_loss_rel_to_market_cap: compute_dollars.then(|| {
computed_di!(
"neg_unrealized_loss_rel_to_market_cap",
Source::Compute,
v1,
last()
)
}),
indexes_to_net_unrealized_pnl_rel_to_market_cap: compute_dollars.then(|| {
computed_di!(
"net_unrealized_pnl_rel_to_market_cap",
Source::Compute,
v1,
last()
)
}),
// ==================== RELATIVE METRICS: UNREALIZED vs OWN MARKET CAP ====================
height_to_unrealized_profit_rel_to_own_market_cap: (compute_dollars
&& extended
&& compute_rel_to_all)
.then(|| {
eager!(
Height,
StoredF32,
"unrealized_profit_rel_to_own_market_cap",
v1
)
}),
height_to_unrealized_loss_rel_to_own_market_cap: (compute_dollars
&& extended
&& compute_rel_to_all)
.then(|| {
eager!(
Height,
StoredF32,
"unrealized_loss_rel_to_own_market_cap",
v1
)
}),
height_to_neg_unrealized_loss_rel_to_own_market_cap: (compute_dollars
&& extended
&& compute_rel_to_all)
.then(|| {
eager!(
Height,
StoredF32,
"neg_unrealized_loss_rel_to_own_market_cap",
v1
)
}),
height_to_net_unrealized_pnl_rel_to_own_market_cap: (compute_dollars
&& extended
&& compute_rel_to_all)
.then(|| {
eager!(
Height,
StoredF32,
"net_unrealized_pnl_rel_to_own_market_cap",
v2
)
}),
indexes_to_unrealized_profit_rel_to_own_market_cap: (compute_dollars
&& extended
&& compute_rel_to_all)
.then(|| {
computed_di!(
"unrealized_profit_rel_to_own_market_cap",
Source::Compute,
v2,
last()
)
}),
indexes_to_unrealized_loss_rel_to_own_market_cap: (compute_dollars
&& extended
&& compute_rel_to_all)
.then(|| {
computed_di!(
"unrealized_loss_rel_to_own_market_cap",
Source::Compute,
v2,
last()
)
}),
indexes_to_neg_unrealized_loss_rel_to_own_market_cap: (compute_dollars
&& extended
&& compute_rel_to_all)
.then(|| {
computed_di!(
"neg_unrealized_loss_rel_to_own_market_cap",
Source::Compute,
v2,
last()
)
}),
indexes_to_net_unrealized_pnl_rel_to_own_market_cap: (compute_dollars
&& extended
&& compute_rel_to_all)
.then(|| {
computed_di!(
"net_unrealized_pnl_rel_to_own_market_cap",
Source::Compute,
v2,
last()
)
}),
// ==================== RELATIVE METRICS: UNREALIZED vs OWN TOTAL UNREALIZED ====================
height_to_unrealized_profit_rel_to_own_total_unrealized_pnl: (compute_dollars
&& extended)
.then(|| {
eager!(
Height,
StoredF32,
"unrealized_profit_rel_to_own_total_unrealized_pnl",
v0
)
}),
height_to_unrealized_loss_rel_to_own_total_unrealized_pnl: (compute_dollars
&& extended)
.then(|| {
eager!(
Height,
StoredF32,
"unrealized_loss_rel_to_own_total_unrealized_pnl",
v0
)
}),
height_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl: (compute_dollars
&& extended)
.then(|| {
eager!(
Height,
StoredF32,
"neg_unrealized_loss_rel_to_own_total_unrealized_pnl",
v0
)
}),
height_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl: (compute_dollars
&& extended)
.then(|| {
eager!(
Height,
StoredF32,
"net_unrealized_pnl_rel_to_own_total_unrealized_pnl",
v1
)
}),
indexes_to_unrealized_profit_rel_to_own_total_unrealized_pnl: (compute_dollars
&& extended)
.then(|| {
computed_di!(
"unrealized_profit_rel_to_own_total_unrealized_pnl",
Source::Compute,
v1,
last()
)
}),
indexes_to_unrealized_loss_rel_to_own_total_unrealized_pnl: (compute_dollars
&& extended)
.then(|| {
computed_di!(
"unrealized_loss_rel_to_own_total_unrealized_pnl",
Source::Compute,
v1,
last()
)
}),
indexes_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl: (compute_dollars
&& extended)
.then(|| {
computed_di!(
"neg_unrealized_loss_rel_to_own_total_unrealized_pnl",
Source::Compute,
v1,
last()
)
}),
indexes_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl: (compute_dollars
&& extended)
.then(|| {
computed_di!(
"net_unrealized_pnl_rel_to_own_total_unrealized_pnl",
Source::Compute,
v1,
last()
)
}),
// ==================== RELATIVE METRICS: SUPPLY vs CIRCULATING/OWN ====================
indexes_to_supply_rel_to_circulating_supply: compute_rel_to_all.then(|| {
computed_h!(
"supply_rel_to_circulating_supply",
Source::Compute,
v1,
last()
)
}),
height_to_supply_in_profit_rel_to_own_supply: compute_dollars
.then(|| eager!(Height, StoredF64, "supply_in_profit_rel_to_own_supply", v1)),
height_to_supply_in_loss_rel_to_own_supply: compute_dollars
.then(|| eager!(Height, StoredF64, "supply_in_loss_rel_to_own_supply", v1)),
indexes_to_supply_in_profit_rel_to_own_supply: compute_dollars.then(|| {
computed_di!(
"supply_in_profit_rel_to_own_supply",
Source::Compute,
v1,
last()
)
}),
indexes_to_supply_in_loss_rel_to_own_supply: compute_dollars.then(|| {
computed_di!(
"supply_in_loss_rel_to_own_supply",
Source::Compute,
v1,
last()
)
}),
height_to_supply_in_profit_rel_to_circulating_supply: (compute_rel_to_all
&& compute_dollars)
.then(|| {
eager!(
Height,
StoredF64,
"supply_in_profit_rel_to_circulating_supply",
v1
)
}),
height_to_supply_in_loss_rel_to_circulating_supply: (compute_rel_to_all
&& compute_dollars)
.then(|| {
eager!(
Height,
StoredF64,
"supply_in_loss_rel_to_circulating_supply",
v1
)
}),
indexes_to_supply_in_profit_rel_to_circulating_supply: (compute_rel_to_all
&& compute_dollars)
.then(|| {
computed_di!(
"supply_in_profit_rel_to_circulating_supply",
Source::Compute,
v1,
last()
)
}),
indexes_to_supply_in_loss_rel_to_circulating_supply: (compute_rel_to_all
&& compute_dollars)
.then(|| {
computed_di!(
"supply_in_loss_rel_to_circulating_supply",
Source::Compute,
v1,
last()
)
}),
// ==================== NET REALIZED PNL DELTAS ====================
indexes_to_net_realized_pnl_cumulative_30d_delta: compute_dollars.then(|| {
computed_di!(
"net_realized_pnl_cumulative_30d_delta",
Source::Compute,
v3,
last()
)
}),
indexes_to_net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: compute_dollars
.then(|| {
computed_di!(
"net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap",
Source::Compute,
v3,
last()
)
}),
indexes_to_net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: compute_dollars
.then(|| {
computed_di!(
"net_realized_pnl_cumulative_30d_delta_rel_to_market_cap",
Source::Compute,
v3,
last()
)
}),
})
}
/// Returns the minimum length of all height-indexed vectors.
/// Used to determine the starting point for processing.
pub fn min_height_vecs_len(&self) -> usize {
[
self.height_to_supply.len(),
self.height_to_utxo_count.len(),
self.height_to_realized_cap
.as_ref()
.map_or(usize::MAX, |v| v.len()),
self.height_to_realized_profit
.as_ref()
.map_or(usize::MAX, |v| v.len()),
self.height_to_realized_loss
.as_ref()
.map_or(usize::MAX, |v| v.len()),
self.height_to_value_created
.as_ref()
.map_or(usize::MAX, |v| v.len()),
self.height_to_adjusted_value_created
.as_ref()
.map_or(usize::MAX, |v| v.len()),
self.height_to_value_destroyed
.as_ref()
.map_or(usize::MAX, |v| v.len()),
self.height_to_adjusted_value_destroyed
.as_ref()
.map_or(usize::MAX, |v| v.len()),
self.height_to_supply_in_profit
.as_ref()
.map_or(usize::MAX, |v| v.len()),
self.height_to_supply_in_loss
.as_ref()
.map_or(usize::MAX, |v| v.len()),
self.height_to_unrealized_profit
.as_ref()
.map_or(usize::MAX, |v| v.len()),
self.height_to_unrealized_loss
.as_ref()
.map_or(usize::MAX, |v| v.len()),
self.height_to_min_price_paid
.as_ref()
.map_or(usize::MAX, |v| v.len()),
self.height_to_max_price_paid
.as_ref()
.map_or(usize::MAX, |v| v.len()),
self.height_to_sent.len(),
self.height_to_satdays_destroyed.len(),
self.height_to_satblocks_destroyed.len(),
]
.into_iter()
.min()
.unwrap()
}
/// Import state from a checkpoint when resuming processing.
/// Returns the next height to process from.
pub fn import_state(
&mut self,
starting_height: Height,
state: &mut CohortState,
) -> Result<Height> {
if let Some(mut prev_height) = starting_height.decremented() {
if self.height_to_realized_cap.as_mut().is_some() {
prev_height = state.import_at_or_before(prev_height)?;
}
state.supply.value = self.height_to_supply.into_iter().get_unwrap(prev_height);
state.supply.utxo_count = *self
.height_to_utxo_count
.into_iter()
.get_unwrap(prev_height);
if let Some(height_to_realized_cap) = self.height_to_realized_cap.as_mut() {
state.realized.um().cap =
height_to_realized_cap.into_iter().get_unwrap(prev_height);
}
Ok(prev_height.incremented())
} else {
Err(Error::Str("Unset"))
}
}
/// Validate that all computed versions match expected values, resetting if needed.
pub fn validate_computed_versions(&mut self, base_version: Version) -> Result<()> {
// Always-present vecs
self.height_to_supply.validate_computed_version_or_reset(
base_version + self.height_to_supply.inner_version(),
)?;
self.height_to_utxo_count
.validate_computed_version_or_reset(
base_version + self.height_to_utxo_count.inner_version(),
)?;
self.height_to_sent.validate_computed_version_or_reset(
base_version + self.height_to_sent.inner_version(),
)?;
self.height_to_satblocks_destroyed
.validate_computed_version_or_reset(
base_version + self.height_to_satblocks_destroyed.inner_version(),
)?;
self.height_to_satdays_destroyed
.validate_computed_version_or_reset(
base_version + self.height_to_satdays_destroyed.inner_version(),
)?;
// Dollar-dependent vecs
if let Some(height_to_realized_cap) = self.height_to_realized_cap.as_mut().as_mut() {
height_to_realized_cap.validate_computed_version_or_reset(
base_version + height_to_realized_cap.inner_version(),
)?;
Self::validate_optional_vec_version(&mut self.height_to_realized_profit, base_version)?;
Self::validate_optional_vec_version(&mut self.height_to_realized_loss, base_version)?;
Self::validate_optional_vec_version(&mut self.height_to_value_created, base_version)?;
Self::validate_optional_vec_version(&mut self.height_to_value_destroyed, base_version)?;
Self::validate_optional_vec_version(
&mut self.height_to_supply_in_profit,
base_version,
)?;
Self::validate_optional_vec_version(&mut self.height_to_supply_in_loss, base_version)?;
Self::validate_optional_vec_version(
&mut self.height_to_unrealized_profit,
base_version,
)?;
Self::validate_optional_vec_version(&mut self.height_to_unrealized_loss, base_version)?;
Self::validate_optional_vec_version(
&mut self.dateindex_to_supply_in_profit,
base_version,
)?;
Self::validate_optional_vec_version(
&mut self.dateindex_to_supply_in_loss,
base_version,
)?;
Self::validate_optional_vec_version(
&mut self.dateindex_to_unrealized_profit,
base_version,
)?;
Self::validate_optional_vec_version(
&mut self.dateindex_to_unrealized_loss,
base_version,
)?;
Self::validate_optional_vec_version(&mut self.height_to_min_price_paid, base_version)?;
Self::validate_optional_vec_version(&mut self.height_to_max_price_paid, base_version)?;
if self.height_to_adjusted_value_created.is_some() {
Self::validate_optional_vec_version(
&mut self.height_to_adjusted_value_created,
base_version,
)?;
Self::validate_optional_vec_version(
&mut self.height_to_adjusted_value_destroyed,
base_version,
)?;
}
}
Ok(())
}
/// Helper to validate an optional vec's version.
fn validate_optional_vec_version<V: StoredVec>(
vec: &mut Option<EagerVec<V>>,
base_version: Version,
) -> Result<()> {
if let Some(v) = vec.as_mut() {
v.validate_computed_version_or_reset(base_version + v.inner_version())?;
}
Ok(())
}
}

View File

@@ -1,19 +0,0 @@
//! Common vector structs and logic shared between UTXO and Address cohorts.
//!
//! This module contains the `Vecs` struct which holds all the computed vectors
//! for a single cohort, along with methods for importing, flushing, and computing.
//!
//! ## Module Organization
//!
//! The implementation is split across multiple files for maintainability:
//! - `vecs.rs`: Struct definition with field documentation
//! - `import.rs`: Import, validation, and initialization methods
//! - `push.rs`: Per-block push and flush methods
//! - `compute.rs`: Post-processing computation methods
mod compute;
mod import;
mod push;
mod vecs;
pub use vecs::Vecs;

View File

@@ -1,178 +0,0 @@
//! Push and flush methods for Vecs.
//!
//! This module contains methods for:
//! - `truncate_push`: Push state values to height-indexed vectors
//! - `compute_then_truncate_push_unrealized_states`: Compute and push unrealized states
//! - `safe_flush_stateful_vecs`: Safely flush all stateful vectors
use brk_error::Result;
use brk_types::{DateIndex, Dollars, Height, StoredU64};
use vecdb::{AnyStoredVec, Exit, GenericStoredVec};
use crate::{stateful::Flushable, states::CohortState, utils::OptionExt};
use super::Vecs;
impl Vecs {
pub fn truncate_push(&mut self, height: Height, state: &CohortState) -> Result<()> {
self.height_to_supply
.truncate_push(height, state.supply.value)?;
self.height_to_utxo_count
.truncate_push(height, StoredU64::from(state.supply.utxo_count))?;
self.height_to_sent.truncate_push(height, state.sent)?;
self.height_to_satblocks_destroyed
.truncate_push(height, state.satblocks_destroyed)?;
self.height_to_satdays_destroyed
.truncate_push(height, state.satdays_destroyed)?;
if let Some(height_to_realized_cap) = self.height_to_realized_cap.as_mut() {
let realized = state.realized.as_ref().unwrap_or_else(|| {
dbg!((&state.realized, &state.supply));
panic!();
});
height_to_realized_cap.truncate_push(height, realized.cap)?;
self.height_to_realized_profit
.um()
.truncate_push(height, realized.profit)?;
self.height_to_realized_loss
.um()
.truncate_push(height, realized.loss)?;
self.height_to_value_created
.um()
.truncate_push(height, realized.value_created)?;
self.height_to_value_destroyed
.um()
.truncate_push(height, realized.value_destroyed)?;
if self.height_to_adjusted_value_created.is_some() {
self.height_to_adjusted_value_created
.um()
.truncate_push(height, realized.adj_value_created)?;
self.height_to_adjusted_value_destroyed
.um()
.truncate_push(height, realized.adj_value_destroyed)?;
}
}
Ok(())
}
pub fn compute_then_truncate_push_unrealized_states(
&mut self,
height: Height,
height_price: Option<Dollars>,
dateindex: Option<DateIndex>,
date_price: Option<Option<Dollars>>,
state: &CohortState,
) -> Result<()> {
if let Some(height_price) = height_price {
self.height_to_min_price_paid.um().truncate_push(
height,
state
.price_to_amount_first_key_value()
.map(|(&dollars, _)| dollars)
.unwrap_or(Dollars::NAN),
)?;
self.height_to_max_price_paid.um().truncate_push(
height,
state
.price_to_amount_last_key_value()
.map(|(&dollars, _)| dollars)
.unwrap_or(Dollars::NAN),
)?;
let (height_unrealized_state, date_unrealized_state) =
state.compute_unrealized_states(height_price, date_price.unwrap());
self.height_to_supply_in_profit
.um()
.truncate_push(height, height_unrealized_state.supply_in_profit)?;
self.height_to_supply_in_loss
.um()
.truncate_push(height, height_unrealized_state.supply_in_loss)?;
self.height_to_unrealized_profit
.um()
.truncate_push(height, height_unrealized_state.unrealized_profit)?;
self.height_to_unrealized_loss
.um()
.truncate_push(height, height_unrealized_state.unrealized_loss)?;
if let Some(date_unrealized_state) = date_unrealized_state {
let dateindex = dateindex.unwrap();
self.dateindex_to_supply_in_profit
.um()
.truncate_push(dateindex, date_unrealized_state.supply_in_profit)?;
self.dateindex_to_supply_in_loss
.um()
.truncate_push(dateindex, date_unrealized_state.supply_in_loss)?;
self.dateindex_to_unrealized_profit
.um()
.truncate_push(dateindex, date_unrealized_state.unrealized_profit)?;
self.dateindex_to_unrealized_loss
.um()
.truncate_push(dateindex, date_unrealized_state.unrealized_loss)?;
}
// Compute and push price percentiles
if let Some(price_percentiles) = self.price_percentiles.as_mut() {
let percentile_prices = state.compute_percentile_prices();
price_percentiles.truncate_push(height, &percentile_prices)?;
}
}
Ok(())
}
pub fn safe_flush_stateful_vecs(
&mut self,
height: Height,
exit: &Exit,
state: &mut CohortState,
) -> Result<()> {
self.height_to_supply.safe_write(exit)?;
self.height_to_utxo_count.safe_write(exit)?;
self.height_to_sent.safe_write(exit)?;
self.height_to_satdays_destroyed.safe_write(exit)?;
self.height_to_satblocks_destroyed.safe_write(exit)?;
if let Some(height_to_realized_cap) = self.height_to_realized_cap.as_mut() {
height_to_realized_cap.safe_write(exit)?;
self.height_to_realized_profit.um().safe_write(exit)?;
self.height_to_realized_loss.um().safe_write(exit)?;
self.height_to_value_created.um().safe_write(exit)?;
self.height_to_value_destroyed.um().safe_write(exit)?;
self.height_to_supply_in_profit.um().safe_write(exit)?;
self.height_to_supply_in_loss.um().safe_write(exit)?;
self.height_to_unrealized_profit.um().safe_write(exit)?;
self.height_to_unrealized_loss.um().safe_write(exit)?;
self.dateindex_to_supply_in_profit.um().safe_write(exit)?;
self.dateindex_to_supply_in_loss.um().safe_write(exit)?;
self.dateindex_to_unrealized_profit.um().safe_write(exit)?;
self.dateindex_to_unrealized_loss.um().safe_write(exit)?;
self.height_to_min_price_paid.um().safe_write(exit)?;
self.height_to_max_price_paid.um().safe_write(exit)?;
if self.height_to_adjusted_value_created.is_some() {
self.height_to_adjusted_value_created
.um()
.safe_write(exit)?;
self.height_to_adjusted_value_destroyed
.um()
.safe_write(exit)?;
}
// Uses Flushable trait - Option<T> impl handles None case
self.price_percentiles.safe_write(exit)?;
}
state.commit(height)?;
Ok(())
}
}

View File

@@ -1,210 +0,0 @@
use brk_grouper::Filter;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Dollars, Height, Sats, StoredF32, StoredF64, StoredU64};
use vecdb::{EagerVec, PcoVec};
use crate::grouped::{
ComputedHeightValueVecs, ComputedRatioVecsFromDateIndex, ComputedValueVecsFromDateIndex,
ComputedValueVecsFromHeight, ComputedVecsFromDateIndex, ComputedVecsFromHeight,
PricePercentiles,
};
/// Common vectors shared between UTXO and Address cohorts.
///
/// This struct contains all the computed vectors for a single cohort. The fields are
/// organized into logical groups matching the initialization order in `forced_import`.
///
/// ## Field Groups
/// - **Supply & UTXO count**: Basic supply metrics (always computed)
/// - **Activity**: Sent amounts, satblocks/satdays destroyed
/// - **Realized**: Realized cap, profit/loss, value created/destroyed, SOPR
/// - **Unrealized**: Unrealized profit/loss, supply in profit/loss
/// - **Price**: Min/max price paid, price percentiles
/// - **Relative metrics**: Ratios relative to market cap, realized cap, etc.
#[derive(Clone, Traversable)]
pub struct Vecs {
#[traversable(skip)]
pub filter: Filter,
// ==================== SUPPLY & UTXO COUNT ====================
// Always computed - core supply metrics
pub height_to_supply: EagerVec<PcoVec<Height, Sats>>,
pub height_to_supply_value: ComputedHeightValueVecs,
pub indexes_to_supply: ComputedValueVecsFromDateIndex,
pub height_to_utxo_count: EagerVec<PcoVec<Height, StoredU64>>,
pub indexes_to_utxo_count: ComputedVecsFromHeight<StoredU64>,
pub height_to_supply_half_value: ComputedHeightValueVecs,
pub indexes_to_supply_half: ComputedValueVecsFromDateIndex,
// ==================== ACTIVITY ====================
// Always computed - transaction activity metrics
pub height_to_sent: EagerVec<PcoVec<Height, Sats>>,
pub indexes_to_sent: ComputedValueVecsFromHeight,
pub height_to_satblocks_destroyed: EagerVec<PcoVec<Height, Sats>>,
pub height_to_satdays_destroyed: EagerVec<PcoVec<Height, Sats>>,
pub indexes_to_coinblocks_destroyed: ComputedVecsFromHeight<StoredF64>,
pub indexes_to_coindays_destroyed: ComputedVecsFromHeight<StoredF64>,
// ==================== REALIZED CAP & PRICE ====================
// Conditional on compute_dollars
pub height_to_realized_cap: Option<EagerVec<PcoVec<Height, Dollars>>>,
pub indexes_to_realized_cap: Option<ComputedVecsFromHeight<Dollars>>,
pub indexes_to_realized_price: Option<ComputedVecsFromHeight<Dollars>>,
pub indexes_to_realized_price_extra: Option<ComputedRatioVecsFromDateIndex>,
pub indexes_to_realized_cap_rel_to_own_market_cap: Option<ComputedVecsFromHeight<StoredF32>>,
pub indexes_to_realized_cap_30d_delta: Option<ComputedVecsFromDateIndex<Dollars>>,
// ==================== REALIZED PROFIT & LOSS ====================
// Conditional on compute_dollars
pub height_to_realized_profit: Option<EagerVec<PcoVec<Height, Dollars>>>,
pub indexes_to_realized_profit: Option<ComputedVecsFromHeight<Dollars>>,
pub height_to_realized_loss: Option<EagerVec<PcoVec<Height, Dollars>>>,
pub indexes_to_realized_loss: Option<ComputedVecsFromHeight<Dollars>>,
pub indexes_to_neg_realized_loss: Option<ComputedVecsFromHeight<Dollars>>,
pub indexes_to_net_realized_pnl: Option<ComputedVecsFromHeight<Dollars>>,
pub indexes_to_realized_value: Option<ComputedVecsFromHeight<Dollars>>,
pub indexes_to_realized_profit_rel_to_realized_cap: Option<ComputedVecsFromHeight<StoredF32>>,
pub indexes_to_realized_loss_rel_to_realized_cap: Option<ComputedVecsFromHeight<StoredF32>>,
pub indexes_to_net_realized_pnl_rel_to_realized_cap: Option<ComputedVecsFromHeight<StoredF32>>,
pub height_to_total_realized_pnl: Option<EagerVec<PcoVec<Height, Dollars>>>,
pub indexes_to_total_realized_pnl: Option<ComputedVecsFromDateIndex<Dollars>>,
pub dateindex_to_realized_profit_to_loss_ratio: Option<EagerVec<PcoVec<DateIndex, StoredF64>>>,
// ==================== VALUE CREATED & DESTROYED ====================
// Conditional on compute_dollars
pub height_to_value_created: Option<EagerVec<PcoVec<Height, Dollars>>>,
pub indexes_to_value_created: Option<ComputedVecsFromHeight<Dollars>>,
pub height_to_value_destroyed: Option<EagerVec<PcoVec<Height, Dollars>>>,
pub indexes_to_value_destroyed: Option<ComputedVecsFromHeight<Dollars>>,
pub height_to_adjusted_value_created: Option<EagerVec<PcoVec<Height, Dollars>>>,
pub indexes_to_adjusted_value_created: Option<ComputedVecsFromHeight<Dollars>>,
pub height_to_adjusted_value_destroyed: Option<EagerVec<PcoVec<Height, Dollars>>>,
pub indexes_to_adjusted_value_destroyed: Option<ComputedVecsFromHeight<Dollars>>,
// ==================== SOPR ====================
// Spent Output Profit Ratio - conditional on compute_dollars
pub dateindex_to_sopr: Option<EagerVec<PcoVec<DateIndex, StoredF64>>>,
pub dateindex_to_sopr_7d_ema: Option<EagerVec<PcoVec<DateIndex, StoredF64>>>,
pub dateindex_to_sopr_30d_ema: Option<EagerVec<PcoVec<DateIndex, StoredF64>>>,
pub dateindex_to_adjusted_sopr: Option<EagerVec<PcoVec<DateIndex, StoredF64>>>,
pub dateindex_to_adjusted_sopr_7d_ema: Option<EagerVec<PcoVec<DateIndex, StoredF64>>>,
pub dateindex_to_adjusted_sopr_30d_ema: Option<EagerVec<PcoVec<DateIndex, StoredF64>>>,
// ==================== SELL SIDE RISK ====================
// Conditional on compute_dollars
pub dateindex_to_sell_side_risk_ratio: Option<EagerVec<PcoVec<DateIndex, StoredF32>>>,
pub dateindex_to_sell_side_risk_ratio_7d_ema: Option<EagerVec<PcoVec<DateIndex, StoredF32>>>,
pub dateindex_to_sell_side_risk_ratio_30d_ema: Option<EagerVec<PcoVec<DateIndex, StoredF32>>>,
// ==================== SUPPLY IN PROFIT/LOSS ====================
// Conditional on compute_dollars
pub height_to_supply_in_profit: Option<EagerVec<PcoVec<Height, Sats>>>,
pub indexes_to_supply_in_profit: Option<ComputedValueVecsFromDateIndex>,
pub height_to_supply_in_loss: Option<EagerVec<PcoVec<Height, Sats>>>,
pub indexes_to_supply_in_loss: Option<ComputedValueVecsFromDateIndex>,
pub dateindex_to_supply_in_profit: Option<EagerVec<PcoVec<DateIndex, Sats>>>,
pub dateindex_to_supply_in_loss: Option<EagerVec<PcoVec<DateIndex, Sats>>>,
pub height_to_supply_in_profit_value: Option<ComputedHeightValueVecs>,
pub height_to_supply_in_loss_value: Option<ComputedHeightValueVecs>,
// ==================== UNREALIZED PROFIT & LOSS ====================
// Conditional on compute_dollars
pub height_to_unrealized_profit: Option<EagerVec<PcoVec<Height, Dollars>>>,
pub indexes_to_unrealized_profit: Option<ComputedVecsFromDateIndex<Dollars>>,
pub height_to_unrealized_loss: Option<EagerVec<PcoVec<Height, Dollars>>>,
pub indexes_to_unrealized_loss: Option<ComputedVecsFromDateIndex<Dollars>>,
pub dateindex_to_unrealized_profit: Option<EagerVec<PcoVec<DateIndex, Dollars>>>,
pub dateindex_to_unrealized_loss: Option<EagerVec<PcoVec<DateIndex, Dollars>>>,
pub height_to_neg_unrealized_loss: Option<EagerVec<PcoVec<Height, Dollars>>>,
pub indexes_to_neg_unrealized_loss: Option<ComputedVecsFromDateIndex<Dollars>>,
pub height_to_net_unrealized_pnl: Option<EagerVec<PcoVec<Height, Dollars>>>,
pub indexes_to_net_unrealized_pnl: Option<ComputedVecsFromDateIndex<Dollars>>,
pub height_to_total_unrealized_pnl: Option<EagerVec<PcoVec<Height, Dollars>>>,
pub indexes_to_total_unrealized_pnl: Option<ComputedVecsFromDateIndex<Dollars>>,
// ==================== PRICE PAID ====================
// Conditional on compute_dollars
pub height_to_min_price_paid: Option<EagerVec<PcoVec<Height, Dollars>>>,
pub indexes_to_min_price_paid: Option<ComputedVecsFromHeight<Dollars>>,
pub height_to_max_price_paid: Option<EagerVec<PcoVec<Height, Dollars>>>,
pub indexes_to_max_price_paid: Option<ComputedVecsFromHeight<Dollars>>,
pub price_percentiles: Option<PricePercentiles>,
// ==================== RELATIVE METRICS: UNREALIZED vs MARKET CAP ====================
// Conditional on compute_dollars
pub height_to_unrealized_profit_rel_to_market_cap: Option<EagerVec<PcoVec<Height, StoredF32>>>,
pub height_to_unrealized_loss_rel_to_market_cap: Option<EagerVec<PcoVec<Height, StoredF32>>>,
pub height_to_neg_unrealized_loss_rel_to_market_cap:
Option<EagerVec<PcoVec<Height, StoredF32>>>,
pub height_to_net_unrealized_pnl_rel_to_market_cap: Option<EagerVec<PcoVec<Height, StoredF32>>>,
pub indexes_to_unrealized_profit_rel_to_market_cap:
Option<ComputedVecsFromDateIndex<StoredF32>>,
pub indexes_to_unrealized_loss_rel_to_market_cap: Option<ComputedVecsFromDateIndex<StoredF32>>,
pub indexes_to_neg_unrealized_loss_rel_to_market_cap:
Option<ComputedVecsFromDateIndex<StoredF32>>,
pub indexes_to_net_unrealized_pnl_rel_to_market_cap:
Option<ComputedVecsFromDateIndex<StoredF32>>,
// ==================== RELATIVE METRICS: UNREALIZED vs OWN MARKET CAP ====================
// Conditional on compute_dollars && extended && compute_rel_to_all
pub height_to_unrealized_profit_rel_to_own_market_cap:
Option<EagerVec<PcoVec<Height, StoredF32>>>,
pub height_to_unrealized_loss_rel_to_own_market_cap:
Option<EagerVec<PcoVec<Height, StoredF32>>>,
pub height_to_neg_unrealized_loss_rel_to_own_market_cap:
Option<EagerVec<PcoVec<Height, StoredF32>>>,
pub height_to_net_unrealized_pnl_rel_to_own_market_cap:
Option<EagerVec<PcoVec<Height, StoredF32>>>,
pub indexes_to_unrealized_profit_rel_to_own_market_cap:
Option<ComputedVecsFromDateIndex<StoredF32>>,
pub indexes_to_unrealized_loss_rel_to_own_market_cap:
Option<ComputedVecsFromDateIndex<StoredF32>>,
pub indexes_to_neg_unrealized_loss_rel_to_own_market_cap:
Option<ComputedVecsFromDateIndex<StoredF32>>,
pub indexes_to_net_unrealized_pnl_rel_to_own_market_cap:
Option<ComputedVecsFromDateIndex<StoredF32>>,
// ==================== RELATIVE METRICS: UNREALIZED vs OWN TOTAL UNREALIZED ====================
// Conditional on compute_dollars && extended
pub height_to_unrealized_profit_rel_to_own_total_unrealized_pnl:
Option<EagerVec<PcoVec<Height, StoredF32>>>,
pub height_to_unrealized_loss_rel_to_own_total_unrealized_pnl:
Option<EagerVec<PcoVec<Height, StoredF32>>>,
pub height_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl:
Option<EagerVec<PcoVec<Height, StoredF32>>>,
pub height_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl:
Option<EagerVec<PcoVec<Height, StoredF32>>>,
pub indexes_to_unrealized_profit_rel_to_own_total_unrealized_pnl:
Option<ComputedVecsFromDateIndex<StoredF32>>,
pub indexes_to_unrealized_loss_rel_to_own_total_unrealized_pnl:
Option<ComputedVecsFromDateIndex<StoredF32>>,
pub indexes_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl:
Option<ComputedVecsFromDateIndex<StoredF32>>,
pub indexes_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl:
Option<ComputedVecsFromDateIndex<StoredF32>>,
// ==================== RELATIVE METRICS: SUPPLY vs CIRCULATING/OWN ====================
// Conditional on compute_dollars
pub indexes_to_supply_rel_to_circulating_supply: Option<ComputedVecsFromHeight<StoredF64>>,
pub height_to_supply_in_profit_rel_to_own_supply: Option<EagerVec<PcoVec<Height, StoredF64>>>,
pub height_to_supply_in_loss_rel_to_own_supply: Option<EagerVec<PcoVec<Height, StoredF64>>>,
pub indexes_to_supply_in_profit_rel_to_own_supply: Option<ComputedVecsFromDateIndex<StoredF64>>,
pub indexes_to_supply_in_loss_rel_to_own_supply: Option<ComputedVecsFromDateIndex<StoredF64>>,
pub height_to_supply_in_profit_rel_to_circulating_supply:
Option<EagerVec<PcoVec<Height, StoredF64>>>,
pub height_to_supply_in_loss_rel_to_circulating_supply:
Option<EagerVec<PcoVec<Height, StoredF64>>>,
pub indexes_to_supply_in_profit_rel_to_circulating_supply:
Option<ComputedVecsFromDateIndex<StoredF64>>,
pub indexes_to_supply_in_loss_rel_to_circulating_supply:
Option<ComputedVecsFromDateIndex<StoredF64>>,
// ==================== NET REALIZED PNL DELTAS ====================
// Conditional on compute_dollars
pub indexes_to_net_realized_pnl_cumulative_30d_delta:
Option<ComputedVecsFromDateIndex<Dollars>>,
pub indexes_to_net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap:
Option<ComputedVecsFromDateIndex<StoredF32>>,
pub indexes_to_net_realized_pnl_cumulative_30d_delta_rel_to_market_cap:
Option<ComputedVecsFromDateIndex<StoredF32>>,
}

View File

@@ -1,80 +0,0 @@
//! Traits for consistent state flushing and importing.
//!
//! These traits ensure all stateful components follow the same patterns
//! for checkpoint/resume operations, preventing bugs where new fields
//! are forgotten during flush operations.
use brk_error::Result;
use brk_types::Height;
use vecdb::Exit;
/// Trait for components that can be flushed to disk.
///
/// This is for simple flush operations that don't require height tracking.
pub trait Flushable {
/// Safely flush data to disk.
fn safe_flush(&mut self, exit: &Exit) -> Result<()>;
/// Write to mmap without fsync. Data visible to readers immediately but not durable.
fn safe_write(&mut self, exit: &Exit) -> Result<()>;
}
/// Trait for stateful components that track data indexed by height.
///
/// This ensures consistent patterns for:
/// - Flushing state at checkpoints
/// - Importing state when resuming from a checkpoint
/// - Resetting state when starting from scratch
pub trait HeightFlushable {
/// Flush state to disk at the given height checkpoint.
fn flush_at_height(&mut self, height: Height, exit: &Exit) -> Result<()>;
/// Import state from the most recent checkpoint at or before the given height.
/// Returns the actual height that was imported.
fn import_at_or_before(&mut self, height: Height) -> Result<Height>;
/// Reset state for starting from scratch.
fn reset(&mut self) -> Result<()>;
}
/// Blanket implementation for Option<T> where T: Flushable
impl<T: Flushable> Flushable for Option<T> {
fn safe_flush(&mut self, exit: &Exit) -> Result<()> {
if let Some(inner) = self.as_mut() {
inner.safe_flush(exit)?;
}
Ok(())
}
fn safe_write(&mut self, exit: &Exit) -> Result<()> {
if let Some(inner) = self.as_mut() {
inner.safe_write(exit)?;
}
Ok(())
}
}
/// Blanket implementation for Option<T> where T: HeightFlushable
impl<T: HeightFlushable> HeightFlushable for Option<T> {
fn flush_at_height(&mut self, height: Height, exit: &Exit) -> Result<()> {
if let Some(inner) = self.as_mut() {
inner.flush_at_height(height, exit)?;
}
Ok(())
}
fn import_at_or_before(&mut self, height: Height) -> Result<Height> {
if let Some(inner) = self.as_mut() {
inner.import_at_or_before(height)
} else {
Ok(height)
}
}
fn reset(&mut self) -> Result<()> {
if let Some(inner) = self.as_mut() {
inner.reset()?;
}
Ok(())
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,53 +0,0 @@
use std::collections::BTreeMap;
use vecdb::{BytesVec, BytesVecValue, PcoVec, PcoVecValue, VecIndex};
#[derive(Debug)]
pub struct RangeMap<I, T>(BTreeMap<I, T>);
impl<I, T> RangeMap<I, T>
where
I: VecIndex,
T: VecIndex,
{
pub fn get(&self, key: I) -> Option<&T> {
self.0.range(..=key).next_back().map(|(&min, value)| {
if min > key {
unreachable!()
}
value
})
}
}
impl<I, T> From<&BytesVec<I, T>> for RangeMap<T, I>
where
I: VecIndex,
T: VecIndex + BytesVecValue,
{
#[inline]
fn from(vec: &BytesVec<I, T>) -> Self {
Self(
vec.into_iter()
.enumerate()
.map(|(i, v)| (v, I::from(i)))
.collect::<BTreeMap<_, _>>(),
)
}
}
impl<I, T> From<&PcoVec<I, T>> for RangeMap<T, I>
where
I: VecIndex,
T: VecIndex + PcoVecValue,
{
#[inline]
fn from(vec: &PcoVec<I, T>) -> Self {
Self(
vec.into_iter()
.enumerate()
.map(|(i, v)| (v, I::from(i)))
.collect::<BTreeMap<_, _>>(),
)
}
}

View File

@@ -1,111 +0,0 @@
use brk_grouper::{ByAddressType, ByAnyAddress};
use brk_indexer::Indexer;
use brk_types::{OutputType, StoredU64, TxIndex};
use vecdb::{BoxedVecIterator, GenericStoredVec, Reader, VecIndex};
use super::Vecs;
pub struct IndexerReaders {
pub txinindex_to_outpoint: Reader,
pub txindex_to_first_txoutindex: Reader,
pub txoutindex_to_value: Reader,
pub txoutindex_to_outputtype: Reader,
pub txoutindex_to_typeindex: Reader,
}
impl IndexerReaders {
pub fn new(indexer: &Indexer) -> Self {
Self {
txinindex_to_outpoint: indexer.vecs.txinindex_to_outpoint.create_reader(),
txindex_to_first_txoutindex: indexer.vecs.txindex_to_first_txoutindex.create_reader(),
txoutindex_to_value: indexer.vecs.txoutindex_to_value.create_reader(),
txoutindex_to_outputtype: indexer.vecs.txoutindex_to_outputtype.create_reader(),
txoutindex_to_typeindex: indexer.vecs.txoutindex_to_typeindex.create_reader(),
}
}
}
pub struct VecsReaders {
pub addresstypeindex_to_anyaddressindex: ByAddressType<Reader>,
pub anyaddressindex_to_anyaddressdata: ByAnyAddress<Reader>,
}
impl VecsReaders {
pub fn new(vecs: &Vecs) -> Self {
Self {
addresstypeindex_to_anyaddressindex: ByAddressType {
p2pk33: vecs.any_address_indexes.p2pk33.create_reader(),
p2pk65: vecs.any_address_indexes.p2pk65.create_reader(),
p2pkh: vecs.any_address_indexes.p2pkh.create_reader(),
p2sh: vecs.any_address_indexes.p2sh.create_reader(),
p2tr: vecs.any_address_indexes.p2tr.create_reader(),
p2wpkh: vecs.any_address_indexes.p2wpkh.create_reader(),
p2wsh: vecs.any_address_indexes.p2wsh.create_reader(),
p2a: vecs.any_address_indexes.p2a.create_reader(),
},
anyaddressindex_to_anyaddressdata: ByAnyAddress {
loaded: vecs.addresses_data.loaded.create_reader(),
empty: vecs.addresses_data.empty.create_reader(),
},
}
}
pub fn get_anyaddressindex_reader(&self, address_type: OutputType) -> &Reader {
self.addresstypeindex_to_anyaddressindex
.get_unwrap(address_type)
}
}
pub fn build_txoutindex_to_txindex<'a>(
block_first_txindex: TxIndex,
block_tx_count: u64,
txindex_to_output_count: &mut BoxedVecIterator<'a, TxIndex, StoredU64>,
) -> Vec<TxIndex> {
let block_first_txindex = block_first_txindex.to_usize();
let counts: Vec<_> = (0..block_tx_count as usize)
.map(|tx_offset| {
let txindex = TxIndex::from(block_first_txindex + tx_offset);
u64::from(txindex_to_output_count.get_unwrap(txindex))
})
.collect();
let total: u64 = counts.iter().sum();
let mut vec = Vec::with_capacity(total as usize);
for (tx_offset, &output_count) in counts.iter().enumerate() {
let txindex = TxIndex::from(block_first_txindex + tx_offset);
for _ in 0..output_count {
vec.push(txindex);
}
}
vec
}
pub fn build_txinindex_to_txindex<'a>(
block_first_txindex: TxIndex,
block_tx_count: u64,
txindex_to_input_count: &mut BoxedVecIterator<'a, TxIndex, StoredU64>,
) -> Vec<TxIndex> {
let block_first_txindex = block_first_txindex.to_usize();
let counts: Vec<_> = (0..block_tx_count as usize)
.map(|tx_offset| {
let txindex = TxIndex::from(block_first_txindex + tx_offset);
u64::from(txindex_to_input_count.get_unwrap(txindex))
})
.collect();
let total: u64 = counts.iter().sum();
let mut vec = Vec::with_capacity(total as usize);
for (tx_offset, &input_count) in counts.iter().enumerate() {
let txindex = TxIndex::from(block_first_txindex + tx_offset);
for _ in 0..input_count {
vec.push(txindex);
}
}
vec
}

View File

@@ -1,59 +0,0 @@
use brk_error::Result;
use brk_types::{Bitcoin, DateIndex, Dollars, Height, Version};
use vecdb::{Exit, IterableVec};
use crate::{Indexes, indexes, price};
pub trait DynCohortVecs: Send + Sync {
fn min_height_vecs_len(&self) -> usize;
fn reset_state_starting_height(&mut self);
fn import_state(&mut self, starting_height: Height) -> Result<Height>;
fn validate_computed_versions(&mut self, base_version: Version) -> Result<()>;
fn truncate_push(&mut self, height: Height) -> Result<()>;
fn compute_then_truncate_push_unrealized_states(
&mut self,
height: Height,
height_price: Option<Dollars>,
dateindex: Option<DateIndex>,
date_price: Option<Option<Dollars>>,
) -> Result<()>;
fn safe_flush_stateful_vecs(&mut self, height: Height, exit: &Exit) -> Result<()>;
#[allow(clippy::too_many_arguments)]
fn compute_rest_part1(
&mut self,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()>;
}
pub trait CohortVecs: DynCohortVecs {
fn compute_from_stateful(
&mut self,
starting_indexes: &Indexes,
others: &[&Self],
exit: &Exit,
) -> Result<()>;
#[allow(clippy::too_many_arguments)]
fn compute_rest_part2(
&mut self,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
starting_indexes: &Indexes,
height_to_supply: &impl IterableVec<Height, Bitcoin>,
dateindex_to_supply: &impl IterableVec<DateIndex, Bitcoin>,
height_to_market_cap: Option<&impl IterableVec<Height, Dollars>>,
dateindex_to_market_cap: Option<&impl IterableVec<DateIndex, Dollars>>,
height_to_realized_cap: Option<&impl IterableVec<Height, Dollars>>,
dateindex_to_realized_cap: Option<&impl IterableVec<DateIndex, Dollars>>,
exit: &Exit,
) -> Result<()>;
}

View File

@@ -1,217 +0,0 @@
use brk_error::Result;
use brk_grouper::{ByAddressType, Filtered};
use brk_types::{
CheckedSub, Dollars, EmptyAddressData, Height, LoadedAddressData, Sats, Timestamp, TypeIndex,
};
use vecdb::VecIndex;
use crate::utils::OptionExt;
use super::{
address_cohorts,
addresstype::{AddressTypeToTypeIndexMap, AddressTypeToVec, HeightToAddressTypeToVec},
withaddressdatasource::WithAddressDataSource,
};
impl AddressTypeToVec<(TypeIndex, Sats)> {
#[allow(clippy::too_many_arguments)]
pub fn process_received(
self,
vecs: &mut address_cohorts::Vecs,
addresstype_to_typeindex_to_loadedaddressdata: &mut AddressTypeToTypeIndexMap<
WithAddressDataSource<LoadedAddressData>,
>,
addresstype_to_typeindex_to_emptyaddressdata: &mut AddressTypeToTypeIndexMap<
WithAddressDataSource<EmptyAddressData>,
>,
price: Option<Dollars>,
addresstype_to_addr_count: &mut ByAddressType<u64>,
addresstype_to_empty_addr_count: &mut ByAddressType<u64>,
stored_or_new_addresstype_to_typeindex_to_addressdatawithsource: &mut AddressTypeToTypeIndexMap<
WithAddressDataSource<LoadedAddressData>,
>,
) {
self.unwrap().into_iter().for_each(|(_type, vec)| {
vec.into_iter().for_each(|(type_index, value)| {
let mut is_new = false;
let mut from_any_empty = false;
let addressdata_withsource = addresstype_to_typeindex_to_loadedaddressdata
.get_mut(_type)
.unwrap()
.entry(type_index)
.or_insert_with(|| {
addresstype_to_typeindex_to_emptyaddressdata
.get_mut(_type)
.unwrap()
.remove(&type_index)
.map(|ad| {
from_any_empty = true;
ad.into()
})
.unwrap_or_else(|| {
let addressdata =
stored_or_new_addresstype_to_typeindex_to_addressdatawithsource
.remove_for_type(_type, &type_index);
is_new = addressdata.is_new();
from_any_empty = addressdata.is_from_emptyaddressdata();
addressdata
})
});
if is_new || from_any_empty {
(*addresstype_to_addr_count.get_mut(_type).unwrap()) += 1;
if from_any_empty {
(*addresstype_to_empty_addr_count.get_mut(_type).unwrap()) -= 1;
}
}
let addressdata = addressdata_withsource.deref_mut();
let prev_amount = addressdata.balance();
let amount = prev_amount + value;
let filters_differ = vecs.amount_range.get(amount).filter()
!= vecs.amount_range.get(prev_amount).filter();
if is_new || from_any_empty || filters_differ {
if !is_new && !from_any_empty {
vecs.amount_range
.get_mut(prev_amount)
.state
.um()
.subtract(addressdata);
}
addressdata.receive(value, price);
vecs.amount_range
.get_mut(amount)
.state
.um()
.add(addressdata);
} else {
vecs.amount_range
.get_mut(amount)
.state
.um()
.receive(addressdata, value, price);
}
});
});
}
}
impl HeightToAddressTypeToVec<(TypeIndex, Sats)> {
#[allow(clippy::too_many_arguments)]
pub fn process_sent(
self,
vecs: &mut address_cohorts::Vecs,
addresstype_to_typeindex_to_loadedaddressdata: &mut AddressTypeToTypeIndexMap<
WithAddressDataSource<LoadedAddressData>,
>,
addresstype_to_typeindex_to_emptyaddressdata: &mut AddressTypeToTypeIndexMap<
WithAddressDataSource<EmptyAddressData>,
>,
price: Option<Dollars>,
addresstype_to_addr_count: &mut ByAddressType<u64>,
addresstype_to_empty_addr_count: &mut ByAddressType<u64>,
height_to_price_close_vec: Option<&Vec<brk_types::Close<Dollars>>>,
height_to_timestamp_fixed_vec: &[Timestamp],
height: Height,
timestamp: Timestamp,
stored_or_new_addresstype_to_typeindex_to_addressdatawithsource: &mut AddressTypeToTypeIndexMap<
WithAddressDataSource<LoadedAddressData>,
>,
) -> Result<()> {
self.0.into_iter().try_for_each(|(prev_height, v)| {
let prev_price = height_to_price_close_vec
.as_ref()
.map(|v| **v.get(prev_height.to_usize()).unwrap());
let prev_timestamp = *height_to_timestamp_fixed_vec
.get(prev_height.to_usize())
.unwrap();
let blocks_old = height.to_usize() - prev_height.to_usize();
let days_old = timestamp.difference_in_days_between_float(prev_timestamp);
let older_than_hour = timestamp
.checked_sub(prev_timestamp)
.unwrap()
.is_more_than_hour();
v.unwrap().into_iter().try_for_each(|(_type, vec)| {
vec.into_iter().try_for_each(|(type_index, value)| {
let typeindex_to_loadedaddressdata =
addresstype_to_typeindex_to_loadedaddressdata.get_mut_unwrap(_type);
let addressdata_withsource = typeindex_to_loadedaddressdata
.entry(type_index)
.or_insert_with(|| {
stored_or_new_addresstype_to_typeindex_to_addressdatawithsource
.remove_for_type(_type, &type_index)
});
let addressdata = addressdata_withsource.deref_mut();
let prev_amount = addressdata.balance();
let amount = prev_amount.checked_sub(value).unwrap();
let will_be_empty = addressdata.has_1_utxos();
let filters_differ = vecs.amount_range.get(amount).filter()
!= vecs.amount_range.get(prev_amount).filter();
if will_be_empty || filters_differ {
vecs.amount_range
.get_mut(prev_amount)
.state
.um()
.subtract(addressdata);
addressdata.send(value, prev_price)?;
if will_be_empty {
if amount.is_not_zero() {
unreachable!()
}
(*addresstype_to_addr_count.get_mut(_type).unwrap()) -= 1;
(*addresstype_to_empty_addr_count.get_mut(_type).unwrap()) += 1;
let addressdata =
typeindex_to_loadedaddressdata.remove(&type_index).unwrap();
addresstype_to_typeindex_to_emptyaddressdata
.get_mut(_type)
.unwrap()
.insert(type_index, addressdata.into());
} else {
vecs.amount_range
.get_mut(amount)
.state
.um()
.add(addressdata);
}
} else {
vecs.amount_range.get_mut(amount).state.um().send(
addressdata,
value,
price,
prev_price,
blocks_old,
days_old,
older_than_hour,
)?;
}
Ok(())
})
})
})
}
}

View File

@@ -1,241 +0,0 @@
use std::{ops::Deref, path::Path};
use brk_error::Result;
use brk_grouper::{CohortContext, Filter, Filtered, StateLevel};
use brk_traversable::Traversable;
use brk_types::{Bitcoin, DateIndex, Dollars, Height, Sats, Version};
use vecdb::{Database, Exit, IterableVec};
use crate::{
Indexes, PriceToAmount, UTXOCohortState,
grouped::{PERCENTILES, PERCENTILES_LEN},
indexes, price,
stateful::{
common,
r#trait::{CohortVecs, DynCohortVecs},
},
utils::OptionExt,
};
#[derive(Clone, Traversable)]
pub struct Vecs {
state_starting_height: Option<Height>,
#[traversable(skip)]
pub state: Option<UTXOCohortState>,
/// For aggregate cohorts (all, sth, lth) that only need price_to_amount for percentiles
#[traversable(skip)]
pub price_to_amount: Option<PriceToAmount>,
#[traversable(flatten)]
pub inner: common::Vecs,
}
impl Vecs {
pub fn forced_import(
db: &Database,
filter: Filter,
version: Version,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
states_path: &Path,
state_level: StateLevel,
) -> Result<Self> {
let compute_dollars = price.is_some();
let full_name = filter.to_full_name(CohortContext::Utxo);
Ok(Self {
state_starting_height: None,
state: if state_level.is_full() {
Some(UTXOCohortState::new(
states_path,
&full_name,
compute_dollars,
))
} else {
None
},
price_to_amount: if state_level.is_price_only() && compute_dollars {
Some(PriceToAmount::create(states_path, &full_name))
} else {
None
},
inner: common::Vecs::forced_import(
db,
filter,
CohortContext::Utxo,
version,
indexes,
price,
)?,
})
}
}
impl DynCohortVecs for Vecs {
fn min_height_vecs_len(&self) -> usize {
self.inner.min_height_vecs_len()
}
fn reset_state_starting_height(&mut self) {
self.state_starting_height = Some(Height::ZERO);
}
fn import_state(&mut self, starting_height: Height) -> Result<Height> {
let starting_height = self
.inner
.import_state(starting_height, self.state.um())?;
self.state_starting_height = Some(starting_height);
Ok(starting_height)
}
fn validate_computed_versions(&mut self, base_version: Version) -> Result<()> {
self.inner.validate_computed_versions(base_version)
}
fn truncate_push(&mut self, height: Height) -> Result<()> {
if self.state_starting_height.unwrap() > height {
return Ok(());
}
self.inner
.truncate_push(height, self.state.u())
}
fn compute_then_truncate_push_unrealized_states(
&mut self,
height: Height,
height_price: Option<Dollars>,
dateindex: Option<DateIndex>,
date_price: Option<Option<Dollars>>,
) -> Result<()> {
self.inner.compute_then_truncate_push_unrealized_states(
height,
height_price,
dateindex,
date_price,
self.state.um(),
)
}
fn safe_flush_stateful_vecs(&mut self, height: Height, exit: &Exit) -> Result<()> {
self.inner
.safe_flush_stateful_vecs(height, exit, self.state.um())
}
#[allow(clippy::too_many_arguments)]
fn compute_rest_part1(
&mut self,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.inner
.compute_rest_part1(indexes, price, starting_indexes, exit)
}
}
impl CohortVecs for Vecs {
fn compute_from_stateful(
&mut self,
starting_indexes: &Indexes,
others: &[&Self],
exit: &Exit,
) -> Result<()> {
self.inner.compute_from_stateful(
starting_indexes,
&others.iter().map(|v| &v.inner).collect::<Vec<_>>(),
exit,
)
}
#[allow(clippy::too_many_arguments)]
fn compute_rest_part2(
&mut self,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
starting_indexes: &Indexes,
height_to_supply: &impl IterableVec<Height, Bitcoin>,
dateindex_to_supply: &impl IterableVec<DateIndex, Bitcoin>,
height_to_market_cap: Option<&impl IterableVec<Height, Dollars>>,
dateindex_to_market_cap: Option<&impl IterableVec<DateIndex, Dollars>>,
height_to_realized_cap: Option<&impl IterableVec<Height, Dollars>>,
dateindex_to_realized_cap: Option<&impl IterableVec<DateIndex, Dollars>>,
exit: &Exit,
) -> Result<()> {
self.inner.compute_rest_part2(
indexes,
price,
starting_indexes,
height_to_supply,
dateindex_to_supply,
height_to_market_cap,
dateindex_to_market_cap,
height_to_realized_cap,
dateindex_to_realized_cap,
exit,
)
}
}
impl Vecs {
/// Compute percentile prices for aggregate cohorts that have standalone price_to_amount.
/// Returns NaN array if price_to_amount is None or empty.
pub fn compute_percentile_prices_from_standalone(
&self,
supply: Sats,
) -> [Dollars; PERCENTILES_LEN] {
let mut result = [Dollars::NAN; PERCENTILES_LEN];
let price_to_amount = match self.price_to_amount.as_ref() {
Some(p) => p,
None => return result,
};
if price_to_amount.is_empty() || supply == Sats::ZERO {
return result;
}
let total = supply;
let targets = PERCENTILES.map(|p| total * p / 100);
let mut accumulated = Sats::ZERO;
let mut pct_idx = 0;
for (&price, &sats) in price_to_amount.iter() {
accumulated += sats;
while pct_idx < PERCENTILES_LEN && accumulated >= targets[pct_idx] {
result[pct_idx] = price;
pct_idx += 1;
}
if pct_idx >= PERCENTILES_LEN {
break;
}
}
result
}
}
impl Deref for Vecs {
type Target = common::Vecs;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl Filtered for Vecs {
fn filter(&self) -> &Filter {
&self.inner.filter
}
}

View File

@@ -1,697 +0,0 @@
use std::path::Path;
use brk_error::Result;
use brk_grouper::{
AmountFilter, ByAgeRange, ByAmountRange, ByEpoch, ByGreatEqualAmount, ByLowerThanAmount,
ByMaxAge, ByMinAge, BySpendableType, ByTerm, Filter, Filtered, StateLevel, Term, TimeFilter,
UTXOGroups,
};
use brk_traversable::Traversable;
use brk_types::{
Bitcoin, CheckedSub, DateIndex, Dollars, HalvingEpoch, Height, ONE_DAY_IN_SEC, OutputType,
Sats, Timestamp, Version,
};
use derive_deref::{Deref, DerefMut};
use rayon::prelude::*;
use rustc_hash::FxHashMap;
use vecdb::{Database, Exit, IterableVec, VecIndex};
use crate::{
Indexes, indexes, price,
stateful::{Flushable, HeightFlushable, r#trait::DynCohortVecs},
states::{BlockState, Transacted},
utils::OptionExt,
};
use super::{r#trait::CohortVecs, utxo_cohort};
const VERSION: Version = Version::new(0);
#[derive(Clone, Deref, DerefMut, Traversable)]
pub struct Vecs(UTXOGroups<utxo_cohort::Vecs>);
impl Vecs {
pub fn forced_import(
db: &Database,
version: Version,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
states_path: &Path,
) -> Result<Self> {
let v = version + VERSION + Version::ZERO;
// Helper to create a cohort - booleans are now derived from filter
let create = |filter: Filter, state_level: StateLevel| -> Result<utxo_cohort::Vecs> {
utxo_cohort::Vecs::forced_import(
db,
filter,
v,
indexes,
price,
states_path,
state_level,
)
};
let full = |f: Filter| create(f, StateLevel::Full);
let none = |f: Filter| create(f, StateLevel::None);
Ok(Self(UTXOGroups {
// Special case: all uses Version::ONE
all: utxo_cohort::Vecs::forced_import(
db,
Filter::All,
version + VERSION + Version::ONE,
indexes,
price,
states_path,
StateLevel::PriceOnly,
)?,
term: ByTerm {
short: create(Filter::Term(Term::Sth), StateLevel::PriceOnly)?,
long: create(Filter::Term(Term::Lth), StateLevel::PriceOnly)?,
},
epoch: ByEpoch {
_0: full(Filter::Epoch(HalvingEpoch::new(0)))?,
_1: full(Filter::Epoch(HalvingEpoch::new(1)))?,
_2: full(Filter::Epoch(HalvingEpoch::new(2)))?,
_3: full(Filter::Epoch(HalvingEpoch::new(3)))?,
_4: full(Filter::Epoch(HalvingEpoch::new(4)))?,
},
type_: BySpendableType {
p2pk65: full(Filter::Type(OutputType::P2PK65))?,
p2pk33: full(Filter::Type(OutputType::P2PK33))?,
p2pkh: full(Filter::Type(OutputType::P2PKH))?,
p2sh: full(Filter::Type(OutputType::P2SH))?,
p2wpkh: full(Filter::Type(OutputType::P2WPKH))?,
p2wsh: full(Filter::Type(OutputType::P2WSH))?,
p2tr: full(Filter::Type(OutputType::P2TR))?,
p2a: full(Filter::Type(OutputType::P2A))?,
p2ms: full(Filter::Type(OutputType::P2MS))?,
empty: full(Filter::Type(OutputType::Empty))?,
unknown: full(Filter::Type(OutputType::Unknown))?,
},
max_age: ByMaxAge {
_1w: none(Filter::Time(TimeFilter::LowerThan(7)))?,
_1m: none(Filter::Time(TimeFilter::LowerThan(30)))?,
_2m: none(Filter::Time(TimeFilter::LowerThan(2 * 30)))?,
_3m: none(Filter::Time(TimeFilter::LowerThan(3 * 30)))?,
_4m: none(Filter::Time(TimeFilter::LowerThan(4 * 30)))?,
_5m: none(Filter::Time(TimeFilter::LowerThan(5 * 30)))?,
_6m: none(Filter::Time(TimeFilter::LowerThan(6 * 30)))?,
_1y: none(Filter::Time(TimeFilter::LowerThan(365)))?,
_2y: none(Filter::Time(TimeFilter::LowerThan(2 * 365)))?,
_3y: none(Filter::Time(TimeFilter::LowerThan(3 * 365)))?,
_4y: none(Filter::Time(TimeFilter::LowerThan(4 * 365)))?,
_5y: none(Filter::Time(TimeFilter::LowerThan(5 * 365)))?,
_6y: none(Filter::Time(TimeFilter::LowerThan(6 * 365)))?,
_7y: none(Filter::Time(TimeFilter::LowerThan(7 * 365)))?,
_8y: none(Filter::Time(TimeFilter::LowerThan(8 * 365)))?,
_10y: none(Filter::Time(TimeFilter::LowerThan(10 * 365)))?,
_12y: none(Filter::Time(TimeFilter::LowerThan(12 * 365)))?,
_15y: none(Filter::Time(TimeFilter::LowerThan(15 * 365)))?,
},
min_age: ByMinAge {
_1d: none(Filter::Time(TimeFilter::GreaterOrEqual(1)))?,
_1w: none(Filter::Time(TimeFilter::GreaterOrEqual(7)))?,
_1m: none(Filter::Time(TimeFilter::GreaterOrEqual(30)))?,
_2m: none(Filter::Time(TimeFilter::GreaterOrEqual(2 * 30)))?,
_3m: none(Filter::Time(TimeFilter::GreaterOrEqual(3 * 30)))?,
_4m: none(Filter::Time(TimeFilter::GreaterOrEqual(4 * 30)))?,
_5m: none(Filter::Time(TimeFilter::GreaterOrEqual(5 * 30)))?,
_6m: none(Filter::Time(TimeFilter::GreaterOrEqual(6 * 30)))?,
_1y: none(Filter::Time(TimeFilter::GreaterOrEqual(365)))?,
_2y: none(Filter::Time(TimeFilter::GreaterOrEqual(2 * 365)))?,
_3y: none(Filter::Time(TimeFilter::GreaterOrEqual(3 * 365)))?,
_4y: none(Filter::Time(TimeFilter::GreaterOrEqual(4 * 365)))?,
_5y: none(Filter::Time(TimeFilter::GreaterOrEqual(5 * 365)))?,
_6y: none(Filter::Time(TimeFilter::GreaterOrEqual(6 * 365)))?,
_7y: none(Filter::Time(TimeFilter::GreaterOrEqual(7 * 365)))?,
_8y: none(Filter::Time(TimeFilter::GreaterOrEqual(8 * 365)))?,
_10y: none(Filter::Time(TimeFilter::GreaterOrEqual(10 * 365)))?,
_12y: none(Filter::Time(TimeFilter::GreaterOrEqual(12 * 365)))?,
},
age_range: ByAgeRange {
up_to_1d: full(Filter::Time(TimeFilter::Range(0..1)))?,
_1d_to_1w: full(Filter::Time(TimeFilter::Range(1..7)))?,
_1w_to_1m: full(Filter::Time(TimeFilter::Range(7..30)))?,
_1m_to_2m: full(Filter::Time(TimeFilter::Range(30..60)))?,
_2m_to_3m: full(Filter::Time(TimeFilter::Range(60..90)))?,
_3m_to_4m: full(Filter::Time(TimeFilter::Range(90..120)))?,
_4m_to_5m: full(Filter::Time(TimeFilter::Range(120..150)))?,
_5m_to_6m: full(Filter::Time(TimeFilter::Range(150..180)))?,
_6m_to_1y: full(Filter::Time(TimeFilter::Range(180..365)))?,
_1y_to_2y: full(Filter::Time(TimeFilter::Range(365..730)))?,
_2y_to_3y: full(Filter::Time(TimeFilter::Range(730..1095)))?,
_3y_to_4y: full(Filter::Time(TimeFilter::Range(1095..1460)))?,
_4y_to_5y: full(Filter::Time(TimeFilter::Range(1460..1825)))?,
_5y_to_6y: full(Filter::Time(TimeFilter::Range(1825..2190)))?,
_6y_to_7y: full(Filter::Time(TimeFilter::Range(2190..2555)))?,
_7y_to_8y: full(Filter::Time(TimeFilter::Range(2555..2920)))?,
_8y_to_10y: full(Filter::Time(TimeFilter::Range(2920..3650)))?,
_10y_to_12y: full(Filter::Time(TimeFilter::Range(3650..4380)))?,
_12y_to_15y: full(Filter::Time(TimeFilter::Range(4380..5475)))?,
from_15y: full(Filter::Time(TimeFilter::GreaterOrEqual(15 * 365)))?,
},
amount_range: ByAmountRange {
_0sats: full(Filter::Amount(AmountFilter::LowerThan(Sats::_1)))?,
_1sat_to_10sats: full(Filter::Amount(AmountFilter::Range(Sats::_1..Sats::_10)))?,
_10sats_to_100sats: full(Filter::Amount(AmountFilter::Range(
Sats::_10..Sats::_100,
)))?,
_100sats_to_1k_sats: full(Filter::Amount(AmountFilter::Range(
Sats::_100..Sats::_1K,
)))?,
_1k_sats_to_10k_sats: full(Filter::Amount(AmountFilter::Range(
Sats::_1K..Sats::_10K,
)))?,
_10k_sats_to_100k_sats: full(Filter::Amount(AmountFilter::Range(
Sats::_10K..Sats::_100K,
)))?,
_100k_sats_to_1m_sats: full(Filter::Amount(AmountFilter::Range(
Sats::_100K..Sats::_1M,
)))?,
_1m_sats_to_10m_sats: full(Filter::Amount(AmountFilter::Range(
Sats::_1M..Sats::_10M,
)))?,
_10m_sats_to_1btc: full(Filter::Amount(AmountFilter::Range(
Sats::_10M..Sats::_1BTC,
)))?,
_1btc_to_10btc: full(Filter::Amount(AmountFilter::Range(
Sats::_1BTC..Sats::_10BTC,
)))?,
_10btc_to_100btc: full(Filter::Amount(AmountFilter::Range(
Sats::_10BTC..Sats::_100BTC,
)))?,
_100btc_to_1k_btc: full(Filter::Amount(AmountFilter::Range(
Sats::_100BTC..Sats::_1K_BTC,
)))?,
_1k_btc_to_10k_btc: full(Filter::Amount(AmountFilter::Range(
Sats::_1K_BTC..Sats::_10K_BTC,
)))?,
_10k_btc_to_100k_btc: full(Filter::Amount(AmountFilter::Range(
Sats::_10K_BTC..Sats::_100K_BTC,
)))?,
_100k_btc_or_more: full(Filter::Amount(AmountFilter::GreaterOrEqual(
Sats::_100K_BTC,
)))?,
},
lt_amount: ByLowerThanAmount {
_10sats: none(Filter::Amount(AmountFilter::LowerThan(Sats::_10)))?,
_100sats: none(Filter::Amount(AmountFilter::LowerThan(Sats::_100)))?,
_1k_sats: none(Filter::Amount(AmountFilter::LowerThan(Sats::_1K)))?,
_10k_sats: none(Filter::Amount(AmountFilter::LowerThan(Sats::_10K)))?,
_100k_sats: none(Filter::Amount(AmountFilter::LowerThan(Sats::_100K)))?,
_1m_sats: none(Filter::Amount(AmountFilter::LowerThan(Sats::_1M)))?,
_10m_sats: none(Filter::Amount(AmountFilter::LowerThan(Sats::_10M)))?,
_1btc: none(Filter::Amount(AmountFilter::LowerThan(Sats::_1BTC)))?,
_10btc: none(Filter::Amount(AmountFilter::LowerThan(Sats::_10BTC)))?,
_100btc: none(Filter::Amount(AmountFilter::LowerThan(Sats::_100BTC)))?,
_1k_btc: none(Filter::Amount(AmountFilter::LowerThan(Sats::_1K_BTC)))?,
_10k_btc: none(Filter::Amount(AmountFilter::LowerThan(Sats::_10K_BTC)))?,
_100k_btc: none(Filter::Amount(AmountFilter::LowerThan(Sats::_100K_BTC)))?,
},
ge_amount: ByGreatEqualAmount {
_1sat: none(Filter::Amount(AmountFilter::GreaterOrEqual(Sats::_1)))?,
_10sats: none(Filter::Amount(AmountFilter::GreaterOrEqual(Sats::_10)))?,
_100sats: none(Filter::Amount(AmountFilter::GreaterOrEqual(Sats::_100)))?,
_1k_sats: none(Filter::Amount(AmountFilter::GreaterOrEqual(Sats::_1K)))?,
_10k_sats: none(Filter::Amount(AmountFilter::GreaterOrEqual(Sats::_10K)))?,
_100k_sats: none(Filter::Amount(AmountFilter::GreaterOrEqual(Sats::_100K)))?,
_1m_sats: none(Filter::Amount(AmountFilter::GreaterOrEqual(Sats::_1M)))?,
_10m_sats: none(Filter::Amount(AmountFilter::GreaterOrEqual(Sats::_10M)))?,
_1btc: none(Filter::Amount(AmountFilter::GreaterOrEqual(Sats::_1BTC)))?,
_10btc: none(Filter::Amount(AmountFilter::GreaterOrEqual(Sats::_10BTC)))?,
_100btc: none(Filter::Amount(AmountFilter::GreaterOrEqual(Sats::_100BTC)))?,
_1k_btc: none(Filter::Amount(AmountFilter::GreaterOrEqual(Sats::_1K_BTC)))?,
_10k_btc: none(Filter::Amount(AmountFilter::GreaterOrEqual(Sats::_10K_BTC)))?,
},
}))
}
pub fn tick_tock_next_block(&mut self, chain_state: &[BlockState], timestamp: Timestamp) {
if chain_state.is_empty() {
return;
}
let prev_timestamp = chain_state.last().unwrap().timestamp;
// Only blocks whose age % ONE_DAY >= threshold can cross a day boundary.
// Saves 1 subtraction + 2 divisions per block vs computing days_old directly.
let elapsed = (*timestamp).saturating_sub(*prev_timestamp);
let threshold = ONE_DAY_IN_SEC.saturating_sub(elapsed);
// Extract all mutable references upfront to avoid borrow checker issues
// Use a single destructuring to get non-overlapping mutable borrows
let UTXOGroups {
all,
term,
age_range,
..
} = &mut self.0;
let mut vecs = age_range
.iter_mut()
.map(|v| (v.filter().clone(), &mut v.state))
.collect::<Vec<_>>();
// Collect aggregate cohorts' filter and p2a for age transitions
let mut aggregate_p2a: Vec<(Filter, Option<&mut crate::PriceToAmount>)> = vec![
(all.filter().clone(), all.price_to_amount.as_mut()),
(
term.short.filter().clone(),
term.short.price_to_amount.as_mut(),
),
(
term.long.filter().clone(),
term.long.price_to_amount.as_mut(),
),
];
chain_state
.iter()
.filter(|block_state| {
let age = (*prev_timestamp).saturating_sub(*block_state.timestamp);
age % ONE_DAY_IN_SEC >= threshold
})
.for_each(|block_state| {
let prev_days_old =
prev_timestamp.difference_in_days_between(block_state.timestamp);
let days_old = timestamp.difference_in_days_between(block_state.timestamp);
if prev_days_old == days_old {
return;
}
vecs.iter_mut().for_each(|(filter, state)| {
let is = filter.contains_time(days_old);
let was = filter.contains_time(prev_days_old);
if is && !was {
state
.as_mut()
.unwrap()
.increment(&block_state.supply, block_state.price);
} else if was && !is {
state
.as_mut()
.unwrap()
.decrement(&block_state.supply, block_state.price);
}
});
// Handle age transitions for aggregate cohorts' price_to_amount
// Check which cohorts the UTXO was in vs is now in, and increment/decrement accordingly
// Only process if there's remaining supply (like CohortState::increment/decrement do)
if let Some(price) = block_state.price
&& block_state.supply.value > Sats::ZERO
{
aggregate_p2a.iter_mut().for_each(|(filter, p2a)| {
let is = filter.contains_time(days_old);
let was = filter.contains_time(prev_days_old);
if is && !was {
p2a.um().increment(price, &block_state.supply);
} else if was && !is {
p2a.um().decrement(price, &block_state.supply);
}
});
}
});
}
pub fn send(
&mut self,
height_to_sent: FxHashMap<Height, Transacted>,
chain_state: &mut [BlockState],
) {
// Extract all mutable references upfront to avoid borrow checker issues
let UTXOGroups {
all,
term,
age_range,
epoch,
type_,
amount_range,
..
} = &mut self.0;
let mut time_based_vecs = age_range
.iter_mut()
.chain(epoch.iter_mut())
.collect::<Vec<_>>();
// Collect aggregate cohorts' filter and p2a for iteration
let mut aggregate_p2a: Vec<(Filter, Option<&mut crate::PriceToAmount>)> = vec![
(all.filter().clone(), all.price_to_amount.as_mut()),
(
term.short.filter().clone(),
term.short.price_to_amount.as_mut(),
),
(
term.long.filter().clone(),
term.long.price_to_amount.as_mut(),
),
];
let last_block = chain_state.last().unwrap();
let last_timestamp = last_block.timestamp;
let current_price = last_block.price;
let chain_state_len = chain_state.len();
height_to_sent.into_iter().for_each(|(height, sent)| {
chain_state[height.to_usize()].supply -= &sent.spendable_supply;
let block_state = chain_state.get(height.to_usize()).unwrap();
let prev_price = block_state.price;
let blocks_old = chain_state_len - 1 - height.to_usize();
let days_old = last_timestamp.difference_in_days_between(block_state.timestamp);
let days_old_float =
last_timestamp.difference_in_days_between_float(block_state.timestamp);
let older_than_hour = last_timestamp
.checked_sub(block_state.timestamp)
.unwrap()
.is_more_than_hour();
time_based_vecs
.iter_mut()
.filter(|v| match v.filter() {
Filter::Time(TimeFilter::GreaterOrEqual(from)) => *from <= days_old,
Filter::Time(TimeFilter::LowerThan(to)) => *to > days_old,
Filter::Time(TimeFilter::Range(range)) => range.contains(&days_old),
Filter::Epoch(epoch) => *epoch == HalvingEpoch::from(height),
_ => unreachable!(),
})
.for_each(|vecs| {
vecs.state.um().send(
&sent.spendable_supply,
current_price,
prev_price,
blocks_old,
days_old_float,
older_than_hour,
);
});
sent.by_type
.spendable
.iter_typed()
.for_each(|(output_type, supply_state)| {
type_.get_mut(output_type).state.um().send(
supply_state,
current_price,
prev_price,
blocks_old,
days_old_float,
older_than_hour,
)
});
sent.by_size_group
.iter_typed()
.for_each(|(group, supply_state)| {
amount_range.get_mut(group).state.um().send(
supply_state,
current_price,
prev_price,
blocks_old,
days_old_float,
older_than_hour,
);
});
// Update aggregate cohorts' price_to_amount using filter.contains_time()
if let Some(prev_price) = prev_price {
let supply_state = &sent.spendable_supply;
if supply_state.value.is_not_zero() {
aggregate_p2a
.iter_mut()
.filter(|(f, _)| f.contains_time(days_old))
.map(|(_, p2a)| p2a)
.for_each(|p2a| {
p2a.um().decrement(prev_price, supply_state);
});
}
}
});
}
pub fn receive(&mut self, received: Transacted, height: Height, price: Option<Dollars>) {
let supply_state = received.spendable_supply;
[
&mut self.0.age_range.up_to_1d,
self.0.epoch.mut_vec_from_height(height),
]
.into_iter()
.for_each(|v| {
v.state.um().receive(&supply_state, price);
});
// Update aggregate cohorts' price_to_amount
// New UTXOs have days_old = 0, so use filter.contains_time(0) to check applicability
if let Some(price) = price
&& supply_state.value.is_not_zero()
{
self.0
.iter_aggregate_mut()
.filter(|v| v.filter().contains_time(0))
.for_each(|v| {
v.price_to_amount
.as_mut()
.unwrap()
.increment(price, &supply_state);
});
}
self.type_.iter_mut().for_each(|vecs| {
let output_type = match vecs.filter() {
Filter::Type(output_type) => *output_type,
_ => unreachable!(),
};
vecs.state
.as_mut()
.unwrap()
.receive(received.by_type.get(output_type), price)
});
received
.by_size_group
.iter_typed()
.for_each(|(group, supply_state)| {
self.amount_range
.get_mut(group)
.state
.as_mut()
.unwrap()
.receive(supply_state, price);
});
}
pub fn compute_overlapping_vecs(
&mut self,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
let by_date_range = &self.0.age_range;
let by_size_range = &self.0.amount_range;
[(&mut self.0.all, by_date_range.iter().collect::<Vec<_>>())]
.into_par_iter()
.chain(self.0.min_age.par_iter_mut().map(|vecs| {
let filter = vecs.filter().clone();
(
vecs,
by_date_range
.iter()
.filter(|other| filter.includes(other.filter()))
.collect::<Vec<_>>(),
)
}))
.chain(self.0.max_age.par_iter_mut().map(|vecs| {
let filter = vecs.filter().clone();
(
vecs,
by_date_range
.iter()
.filter(|other| filter.includes(other.filter()))
.collect::<Vec<_>>(),
)
}))
.chain(self.0.term.par_iter_mut().map(|vecs| {
let filter = vecs.filter().clone();
(
vecs,
by_date_range
.iter()
.filter(|other| filter.includes(other.filter()))
.collect::<Vec<_>>(),
)
}))
.chain(self.0.ge_amount.par_iter_mut().map(|vecs| {
let filter = vecs.filter().clone();
(
vecs,
by_size_range
.iter()
.filter(|other| filter.includes(other.filter()))
.collect::<Vec<_>>(),
)
}))
.chain(self.0.lt_amount.par_iter_mut().map(|vecs| {
let filter = vecs.filter().clone();
(
vecs,
by_size_range
.iter()
.filter(|other| filter.includes(other.filter()))
.collect::<Vec<_>>(),
)
}))
.try_for_each(|(vecs, stateful)| {
vecs.compute_from_stateful(starting_indexes, &stateful, exit)
})
}
pub fn compute_rest_part1(
&mut self,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
starting_indexes: &Indexes,
exit: &Exit,
) -> Result<()> {
self.par_iter_mut()
.try_for_each(|v| v.compute_rest_part1(indexes, price, starting_indexes, exit))
}
#[allow(clippy::too_many_arguments)]
pub fn compute_rest_part2(
&mut self,
indexes: &indexes::Vecs,
price: Option<&price::Vecs>,
starting_indexes: &Indexes,
height_to_supply: &impl IterableVec<Height, Bitcoin>,
dateindex_to_supply: &impl IterableVec<DateIndex, Bitcoin>,
height_to_market_cap: Option<&impl IterableVec<Height, Dollars>>,
dateindex_to_market_cap: Option<&impl IterableVec<DateIndex, Dollars>>,
height_to_realized_cap: Option<&impl IterableVec<Height, Dollars>>,
dateindex_to_realized_cap: Option<&impl IterableVec<DateIndex, Dollars>>,
exit: &Exit,
) -> Result<()> {
self.par_iter_mut().try_for_each(|v| {
v.compute_rest_part2(
indexes,
price,
starting_indexes,
height_to_supply,
dateindex_to_supply,
height_to_market_cap,
dateindex_to_market_cap,
height_to_realized_cap,
dateindex_to_realized_cap,
exit,
)
})
}
pub fn safe_flush_stateful_vecs(&mut self, height: Height, exit: &Exit) -> Result<()> {
// Flush stateful cohorts
self.par_iter_separate_mut()
.try_for_each(|v| v.safe_flush_stateful_vecs(height, exit))?;
// Flush aggregate cohorts' price_to_amount and price_percentiles
// Using traits ensures we can't forget to flush any field
self.0.par_iter_aggregate_mut().try_for_each(|v| {
v.price_to_amount.flush_at_height(height, exit)?;
v.inner.price_percentiles.safe_write(exit)?;
Ok(())
})
}
/// Reset aggregate cohorts' price_to_amount when starting from scratch
pub fn reset_aggregate_price_to_amount(&mut self) -> Result<()> {
self.0
.iter_aggregate_mut()
.try_for_each(|v| v.price_to_amount.reset())
}
/// Import aggregate cohorts' price_to_amount from disk when resuming from a checkpoint.
/// Returns the height to start processing from (checkpoint_height + 1), matching the
/// behavior of `common::import_state` for separate cohorts.
///
/// Note: We don't check inner.min_height_vecs_len() for aggregate cohorts because their
/// inner vecs (height_to_supply, etc.) are computed post-hoc by compute_overlapping_vecs,
/// not maintained during the main processing loop.
pub fn import_aggregate_price_to_amount(&mut self, height: Height) -> Result<Height> {
// Match separate vecs behavior: decrement height to get prev_height
let Some(mut prev_height) = height.decremented() else {
// height is 0, return ZERO (caller will handle this)
return Ok(Height::ZERO);
};
for v in self.0.iter_aggregate_mut() {
// Using HeightFlushable trait - if price_to_amount is None, returns height unchanged
prev_height = prev_height.min(v.price_to_amount.import_at_or_before(prev_height)?);
}
// Return prev_height + 1, matching separate vecs behavior
Ok(prev_height.incremented())
}
/// Compute and push percentiles for aggregate cohorts (all, sth, lth).
/// Must be called after receive()/send() when price_to_amount is up to date.
pub fn truncate_push_aggregate_percentiles(&mut self, height: Height) -> Result<()> {
let age_range_data: Vec<_> = self
.0
.age_range
.iter()
.map(|sub| (sub.filter().clone(), sub.state.u().supply.value))
.collect();
let results: Vec<_> = self
.0
.par_iter_aggregate()
.map(|v| {
if v.price_to_amount.is_none() {
panic!();
}
let filter = v.filter().clone();
let supply = age_range_data
.iter()
.filter(|(sub_filter, _)| filter.includes(sub_filter))
.map(|(_, value)| *value)
.fold(Sats::ZERO, |acc, v| acc + v);
let percentiles = v.compute_percentile_prices_from_standalone(supply);
(filter, percentiles)
})
.collect();
// Push results sequentially (requires &mut)
for (filter, percentiles) in results {
let v = self
.0
.iter_aggregate_mut()
.find(|v| v.filter() == &filter)
.unwrap();
if let Some(pp) = v.inner.price_percentiles.as_mut() {
pp.truncate_push(height, &percentiles)?;
}
}
Ok(())
}
}

View File

@@ -1,56 +0,0 @@
use brk_types::{EmptyAddressData, EmptyAddressIndex, LoadedAddressData, LoadedAddressIndex};
#[derive(Debug)]
pub enum WithAddressDataSource<T> {
New(T),
FromLoadedAddressDataVec((LoadedAddressIndex, T)),
FromEmptyAddressDataVec((EmptyAddressIndex, T)),
}
impl<T> WithAddressDataSource<T> {
pub fn is_new(&self) -> bool {
matches!(self, Self::New(_))
}
pub fn is_from_emptyaddressdata(&self) -> bool {
matches!(self, Self::FromEmptyAddressDataVec(_))
}
pub fn deref_mut(&mut self) -> &mut T {
match self {
Self::New(v) => v,
Self::FromLoadedAddressDataVec((_, v)) => v,
Self::FromEmptyAddressDataVec((_, v)) => v,
}
}
}
impl From<WithAddressDataSource<EmptyAddressData>> for WithAddressDataSource<LoadedAddressData> {
#[inline]
fn from(value: WithAddressDataSource<EmptyAddressData>) -> Self {
match value {
WithAddressDataSource::New(v) => Self::New(v.into()),
WithAddressDataSource::FromLoadedAddressDataVec((i, v)) => {
Self::FromLoadedAddressDataVec((i, v.into()))
}
WithAddressDataSource::FromEmptyAddressDataVec((i, v)) => {
Self::FromEmptyAddressDataVec((i, v.into()))
}
}
}
}
impl From<WithAddressDataSource<LoadedAddressData>> for WithAddressDataSource<EmptyAddressData> {
#[inline]
fn from(value: WithAddressDataSource<LoadedAddressData>) -> Self {
match value {
WithAddressDataSource::New(v) => Self::New(v.into()),
WithAddressDataSource::FromLoadedAddressDataVec((i, v)) => {
Self::FromLoadedAddressDataVec((i, v.into()))
}
WithAddressDataSource::FromEmptyAddressDataVec((i, v)) => {
Self::FromEmptyAddressDataVec((i, v.into()))
}
}
}
}

View File

@@ -34,7 +34,7 @@ cargo add brk_indexer
## Quick Start
```rust
```rust,ignore
use brk_indexer::Indexer;
use brk_reader::Parser;
use bitcoincore_rpc::{Client, Auth};
@@ -112,7 +112,7 @@ Complete coverage of Bitcoin script types:
### Basic Indexing Operation
```rust
```rust,ignore
use brk_indexer::Indexer;
use brk_reader::Parser;
use std::path::Path;
@@ -135,7 +135,7 @@ println!("Total addresses: {}", final_indexes.total_address_count());
### Querying Indexed Data
```rust
```rust,ignore
use brk_indexer::Indexer;
use brk_types::{Height, TxidPrefix, AddressHash};
@@ -143,7 +143,7 @@ let indexer = Indexer::forced_import("./blockchain_index")?;
// Look up block hash by height
let height = Height::new(750000);
if let Some(block_hash) = indexer.vecs.height_to_blockhash.get(height)? {
if let Some(block_hash) = indexer.vecs.block.height_to_blockhash.get(height)? {
println!("Block 750000 hash: {}", block_hash);
}
@@ -162,7 +162,7 @@ if let Some(type_index) = indexer.stores.addresshash_to_typeindex.get(&address_h
### Incremental Processing
```rust
```rust,ignore
use brk_indexer::Indexer;
// Indexer automatically resumes from last processed height
@@ -181,7 +181,7 @@ println!("Processed {} new blocks",
### Address Type Analysis
```rust
```rust,ignore
use brk_indexer::Indexer;
use brk_types::OutputType;
@@ -189,7 +189,7 @@ let indexer = Indexer::forced_import("./blockchain_index")?;
// Analyze address distribution by type
for output_type in OutputType::as_vec() {
let count = indexer.vecs.txoutindex_to_outputtype
let count = indexer.vecs.txout.txoutindex_to_outputtype
.iter()
.filter(|&ot| ot == output_type)
.count();

View File

@@ -24,6 +24,7 @@ fn main() -> Result<()> {
dbg!(
indexer
.vecs
.txout
.txoutindex_to_value
.iter()?
.enumerate()

View File

@@ -8,7 +8,7 @@ fn run_benchmark(indexer: &Indexer) -> (Sats, std::time::Duration, usize) {
let mut sum = Sats::ZERO;
let mut count = 0;
for value in indexer.vecs.txoutindex_to_value.clean_iter().unwrap() {
for value in indexer.vecs.txout.txoutindex_to_value.clean_iter().unwrap() {
// for value in indexer.vecs.txoutindex_to_value.values() {
sum += value;
count += 1;

View File

@@ -1,785 +0,0 @@
use std::{
fs,
path::Path,
thread,
time::{Duration, Instant},
};
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::TxInIndex;
use rayon::prelude::*;
use vecdb::{AnyVec, GenericStoredVec, VecIndex};
fn main() -> Result<()> {
brk_logger::init(Some(Path::new(".log")))?;
let outputs_dir = Path::new(&std::env::var("HOME").unwrap()).join(".brk");
fs::create_dir_all(&outputs_dir)?;
let indexer = Indexer::forced_import(&outputs_dir)?;
let vecs = indexer.vecs;
let output_len = vecs.txoutindex_to_value.len();
let input_len = vecs.txinindex_to_outpoint.len();
dbg!(output_len, input_len);
// Simulate processing blocks
const NUM_BLOCKS: usize = 10_000;
const OUTPUTS_PER_BLOCK: usize = 5_000;
const INPUTS_PER_BLOCK: usize = 5_000;
const OUTPUT_START_OFFSET: usize = 2_000_000_000;
const INPUT_START_OFFSET: usize = 2_000_000_000;
const NUM_RUNS: usize = 3;
println!(
"\n=== Running {} iterations of {} blocks ===",
NUM_RUNS, NUM_BLOCKS
);
println!(" {} outputs per block", OUTPUTS_PER_BLOCK);
println!(" {} inputs per block\n", INPUTS_PER_BLOCK);
// Store all run times
let mut method1_times = Vec::new();
let mut method2_times = Vec::new();
let mut method4_times = Vec::new();
let mut method5_times = Vec::new();
let mut method6_times = Vec::new();
let mut method7_times = Vec::new();
let mut method8_times = Vec::new();
for run in 0..NUM_RUNS {
println!("--- Run {}/{} ---", run + 1, NUM_RUNS);
// Randomize order for this run
let order = match run % 4 {
0 => vec![1, 2, 4, 5, 6, 7, 8],
1 => vec![8, 7, 6, 5, 4, 2, 1],
2 => vec![2, 5, 8, 1, 7, 4, 6],
_ => vec![6, 4, 7, 1, 8, 5, 2],
};
let mut run_times = [Duration::ZERO; 7];
for &method in &order {
match method {
1 => {
let time = run_method1(
&vecs,
NUM_BLOCKS,
OUTPUTS_PER_BLOCK,
INPUTS_PER_BLOCK,
OUTPUT_START_OFFSET,
INPUT_START_OFFSET,
);
run_times[0] = time;
}
2 => {
let time = run_method2(
&vecs,
NUM_BLOCKS,
OUTPUTS_PER_BLOCK,
INPUTS_PER_BLOCK,
OUTPUT_START_OFFSET,
INPUT_START_OFFSET,
)?;
run_times[1] = time;
}
4 => {
let time = run_method4(
&vecs,
NUM_BLOCKS,
OUTPUTS_PER_BLOCK,
INPUTS_PER_BLOCK,
OUTPUT_START_OFFSET,
INPUT_START_OFFSET,
)?;
run_times[2] = time;
}
5 => {
let time = run_method5(
&vecs,
NUM_BLOCKS,
OUTPUTS_PER_BLOCK,
INPUTS_PER_BLOCK,
OUTPUT_START_OFFSET,
INPUT_START_OFFSET,
);
run_times[3] = time;
}
6 => {
let time = run_method6(
&vecs,
NUM_BLOCKS,
OUTPUTS_PER_BLOCK,
INPUTS_PER_BLOCK,
OUTPUT_START_OFFSET,
INPUT_START_OFFSET,
)?;
run_times[4] = time;
}
7 => {
let time = run_method7(
&vecs,
NUM_BLOCKS,
OUTPUTS_PER_BLOCK,
INPUTS_PER_BLOCK,
OUTPUT_START_OFFSET,
INPUT_START_OFFSET,
);
run_times[5] = time;
}
8 => {
let time = run_method8(
&vecs,
NUM_BLOCKS,
OUTPUTS_PER_BLOCK,
INPUTS_PER_BLOCK,
OUTPUT_START_OFFSET,
INPUT_START_OFFSET,
);
run_times[6] = time;
}
_ => unreachable!(),
}
}
method1_times.push(run_times[0]);
method2_times.push(run_times[1]);
method4_times.push(run_times[2]);
method5_times.push(run_times[3]);
method6_times.push(run_times[4]);
method7_times.push(run_times[5]);
method8_times.push(run_times[6]);
println!(" Method 1: {:?}", run_times[0]);
println!(" Method 2: {:?}", run_times[1]);
println!(" Method 4: {:?}", run_times[2]);
println!(" Method 5: {:?}", run_times[3]);
println!(" Method 6: {:?}", run_times[4]);
println!(" Method 7: {:?}", run_times[5]);
println!(" Method 8: {:?}", run_times[6]);
println!();
}
// Calculate statistics
println!("\n=== Statistics over {} runs ===\n", NUM_RUNS);
let methods = vec![
("Method 1 (Parallel Interleaved)", &method1_times),
(
"Method 2 (Sequential Read + Parallel Process)",
&method2_times,
),
(
"Method 4 (Parallel Sequential Reads + Parallel Process)",
&method4_times,
),
("Method 5 (Chunked Parallel)", &method5_times),
("Method 6 (Prefetch)", &method6_times),
("Method 7 (Reuse Readers)", &method7_times),
("Method 8 (Bulk Processing)", &method8_times),
];
for (name, times) in &methods {
let avg = times.iter().sum::<Duration>() / times.len() as u32;
let min = times.iter().min().unwrap();
let max = times.iter().max().unwrap();
println!("{}:", name);
println!(" Average: {:?}", avg);
println!(" Min: {:?}", min);
println!(" Max: {:?}", max);
println!(" Std dev: {:?}", calculate_stddev(times));
println!();
}
// Find overall winner based on average
let averages: Vec<_> = methods
.iter()
.map(|(name, times)| {
let avg = times.iter().sum::<Duration>() / times.len() as u32;
(*name, avg)
})
.collect();
let fastest = averages.iter().min_by_key(|(_, t)| t).unwrap();
println!(
"=== Winner (by average): {} - {:?} ===\n",
fastest.0, fastest.1
);
for (name, time) in &averages {
if time != &fastest.1 {
let diff = time.as_secs_f64() / fastest.1.as_secs_f64();
println!("{} is {:.2}x slower", name, diff);
}
}
Ok(())
}
fn run_method1(
vecs: &brk_indexer::Vecs,
num_blocks: usize,
outputs_per_block: usize,
inputs_per_block: usize,
output_start_offset: usize,
input_start_offset: usize,
) -> Duration {
let txoutindex_to_value_reader = vecs.txoutindex_to_value.create_reader();
let txoutindex_to_outputtype_reader = vecs.txoutindex_to_outputtype.create_reader();
let txoutindex_to_typeindex_reader = vecs.txoutindex_to_typeindex.create_reader();
let txinindex_to_outpoint_reader = vecs.txinindex_to_outpoint.create_reader();
let txindex_to_first_txoutindex_reader = vecs.txindex_to_first_txoutindex.create_reader();
let start_time = Instant::now();
for block_idx in 0..num_blocks {
// Process outputs
let block_start = output_start_offset + (block_idx * outputs_per_block);
let _outputs: Vec<_> = (block_start..(block_start + outputs_per_block))
.into_par_iter()
.map(|i| {
(
vecs.txoutindex_to_value
.read_at_unwrap(i, &txoutindex_to_value_reader),
vecs.txoutindex_to_outputtype
.read_at_unwrap(i, &txoutindex_to_outputtype_reader),
vecs.txoutindex_to_typeindex
.read_at_unwrap(i, &txoutindex_to_typeindex_reader),
)
})
.collect();
// Process inputs
let input_block_start = input_start_offset + (block_idx * inputs_per_block);
let input_sum: u64 = (input_block_start..(input_block_start + inputs_per_block))
.into_par_iter()
.filter_map(|i| {
let outpoint = vecs
.txinindex_to_outpoint
.read_at_unwrap(i, &txinindex_to_outpoint_reader);
if outpoint.is_coinbase() {
return None;
}
let first_txoutindex = vecs.txindex_to_first_txoutindex.read_at_unwrap(
outpoint.txindex().to_usize(),
&txindex_to_first_txoutindex_reader,
);
let txoutindex = first_txoutindex.to_usize() + usize::from(outpoint.vout());
let value = vecs
.txoutindex_to_value
.read_at_unwrap(txoutindex, &txoutindex_to_value_reader);
Some(u64::from(value))
})
.sum();
std::hint::black_box(input_sum);
}
start_time.elapsed()
}
fn run_method2(
vecs: &brk_indexer::Vecs,
num_blocks: usize,
outputs_per_block: usize,
inputs_per_block: usize,
output_start_offset: usize,
input_start_offset: usize,
) -> Result<Duration> {
let start_time = Instant::now();
for block_idx in 0..num_blocks {
// Process outputs
let block_start = brk_types::TxOutIndex::new(
(output_start_offset + (block_idx * outputs_per_block)) as u64,
);
let values: Vec<_> = vecs
.txoutindex_to_value
.iter()?
.skip(block_start.to_usize())
.take(outputs_per_block)
.collect();
let output_types: Vec<_> = vecs
.txoutindex_to_outputtype
.iter()?
.skip(block_start.to_usize())
.take(outputs_per_block)
.collect();
let typeindexes: Vec<_> = vecs
.txoutindex_to_typeindex
.iter()?
.skip(block_start.to_usize())
.take(outputs_per_block)
.collect();
let _outputs: Vec<_> = (0..outputs_per_block)
.into_par_iter()
.map(|i| (values[i], output_types[i], typeindexes[i]))
.collect();
// Process inputs
let input_block_start =
TxInIndex::new((input_start_offset + (block_idx * inputs_per_block)) as u64);
let outpoints: Vec<_> = vecs
.txinindex_to_outpoint
.iter()?
.skip(input_block_start.to_usize())
.take(inputs_per_block)
.collect();
let txindex_to_first_txoutindex_reader = vecs.txindex_to_first_txoutindex.create_reader();
let txoutindex_to_value_reader = vecs.txoutindex_to_value.create_reader();
let input_sum: u64 = (0..outpoints.len())
.into_par_iter()
.filter_map(|i| {
let outpoint = outpoints[i];
if outpoint.is_coinbase() {
return None;
}
let first_txoutindex = vecs.txindex_to_first_txoutindex.read_at_unwrap(
outpoint.txindex().to_usize(),
&txindex_to_first_txoutindex_reader,
);
let txoutindex = first_txoutindex.to_usize() + usize::from(outpoint.vout());
let value = vecs
.txoutindex_to_value
.read_at_unwrap(txoutindex, &txoutindex_to_value_reader);
Some(u64::from(value))
})
.sum();
std::hint::black_box(input_sum);
}
Ok(start_time.elapsed())
}
fn run_method4(
vecs: &brk_indexer::Vecs,
num_blocks: usize,
outputs_per_block: usize,
inputs_per_block: usize,
output_start_offset: usize,
input_start_offset: usize,
) -> Result<Duration> {
let start_time = Instant::now();
for block_idx in 0..num_blocks {
// Process outputs with parallel reads
let block_start = brk_types::TxOutIndex::new(
(output_start_offset + (block_idx * outputs_per_block)) as u64,
);
let (values, output_types, typeindexes) = thread::scope(|s| -> Result<_> {
let h1 = s.spawn(|| -> Result<_> {
Ok(vecs
.txoutindex_to_value
.iter()?
.skip(block_start.to_usize())
.take(outputs_per_block)
.collect::<Vec<_>>())
});
let h2 = s.spawn(|| -> Result<_> {
Ok(vecs
.txoutindex_to_outputtype
.iter()?
.skip(block_start.to_usize())
.take(outputs_per_block)
.collect::<Vec<_>>())
});
let h3 = s.spawn(|| -> Result<_> {
Ok(vecs
.txoutindex_to_typeindex
.iter()?
.skip(block_start.to_usize())
.take(outputs_per_block)
.collect::<Vec<_>>())
});
Ok((
h1.join().unwrap()?,
h2.join().unwrap()?,
h3.join().unwrap()?,
))
})?;
let _outputs: Vec<_> = (0..outputs_per_block)
.into_par_iter()
.map(|i| (values[i], output_types[i], typeindexes[i]))
.collect();
// Process inputs
let input_block_start =
TxInIndex::new((input_start_offset + (block_idx * inputs_per_block)) as u64);
let outpoints: Vec<_> = vecs
.txinindex_to_outpoint
.iter()?
.skip(input_block_start.to_usize())
.take(inputs_per_block)
.collect();
let txindex_to_first_txoutindex_reader = vecs.txindex_to_first_txoutindex.create_reader();
let txoutindex_to_value_reader = vecs.txoutindex_to_value.create_reader();
let input_sum: u64 = (0..outpoints.len())
.into_par_iter()
.filter_map(|i| {
let outpoint = outpoints[i];
if outpoint.is_coinbase() {
return None;
}
let first_txoutindex = vecs.txindex_to_first_txoutindex.read_at_unwrap(
outpoint.txindex().to_usize(),
&txindex_to_first_txoutindex_reader,
);
let txoutindex = first_txoutindex.to_usize() + usize::from(outpoint.vout());
let value = vecs
.txoutindex_to_value
.read_at_unwrap(txoutindex, &txoutindex_to_value_reader);
Some(u64::from(value))
})
.sum();
std::hint::black_box(input_sum);
}
Ok(start_time.elapsed())
}
fn run_method5(
vecs: &brk_indexer::Vecs,
num_blocks: usize,
outputs_per_block: usize,
inputs_per_block: usize,
output_start_offset: usize,
input_start_offset: usize,
) -> Duration {
let txoutindex_to_value_reader = vecs.txoutindex_to_value.create_reader();
let txoutindex_to_outputtype_reader = vecs.txoutindex_to_outputtype.create_reader();
let txoutindex_to_typeindex_reader = vecs.txoutindex_to_typeindex.create_reader();
let txinindex_to_outpoint_reader = vecs.txinindex_to_outpoint.create_reader();
let txindex_to_first_txoutindex_reader = vecs.txindex_to_first_txoutindex.create_reader();
let start_time = Instant::now();
for block_idx in 0..num_blocks {
// Process outputs with larger chunks
let block_start = output_start_offset + (block_idx * outputs_per_block);
let _outputs: Vec<_> = (block_start..(block_start + outputs_per_block))
.into_par_iter()
.with_min_len(500) // Larger chunks
.map(|i| {
(
vecs.txoutindex_to_value
.read_at_unwrap(i, &txoutindex_to_value_reader),
vecs.txoutindex_to_outputtype
.read_at_unwrap(i, &txoutindex_to_outputtype_reader),
vecs.txoutindex_to_typeindex
.read_at_unwrap(i, &txoutindex_to_typeindex_reader),
)
})
.collect();
// Process inputs with larger chunks
let input_block_start = input_start_offset + (block_idx * inputs_per_block);
let input_sum: u64 = (input_block_start..(input_block_start + inputs_per_block))
.into_par_iter()
.with_min_len(500) // Larger chunks
.filter_map(|i| {
let outpoint = vecs
.txinindex_to_outpoint
.read_at_unwrap(i, &txinindex_to_outpoint_reader);
if outpoint.is_coinbase() {
return None;
}
let first_txoutindex = vecs.txindex_to_first_txoutindex.read_at_unwrap(
outpoint.txindex().to_usize(),
&txindex_to_first_txoutindex_reader,
);
let txoutindex = first_txoutindex.to_usize() + usize::from(outpoint.vout());
let value = vecs
.txoutindex_to_value
.read_at_unwrap(txoutindex, &txoutindex_to_value_reader);
Some(u64::from(value))
})
.sum();
std::hint::black_box(input_sum);
}
start_time.elapsed()
}
fn run_method6(
vecs: &brk_indexer::Vecs,
num_blocks: usize,
outputs_per_block: usize,
inputs_per_block: usize,
output_start_offset: usize,
input_start_offset: usize,
) -> Result<Duration> {
let start_time = Instant::now();
for block_idx in 0..num_blocks {
// Read outputs sequentially
let block_start = brk_types::TxOutIndex::new(
(output_start_offset + (block_idx * outputs_per_block)) as u64,
);
let values: Vec<_> = vecs
.txoutindex_to_value
.iter()?
.skip(block_start.to_usize())
.take(outputs_per_block)
.collect();
let output_types: Vec<_> = vecs
.txoutindex_to_outputtype
.iter()?
.skip(block_start.to_usize())
.take(outputs_per_block)
.collect();
let typeindexes: Vec<_> = vecs
.txoutindex_to_typeindex
.iter()?
.skip(block_start.to_usize())
.take(outputs_per_block)
.collect();
// Read inputs sequentially
let input_block_start =
TxInIndex::new((input_start_offset + (block_idx * inputs_per_block)) as u64);
let outpoints: Vec<_> = vecs
.txinindex_to_outpoint
.iter()?
.skip(input_block_start.to_usize())
.take(inputs_per_block)
.collect();
let txindex_to_first_txoutindex_reader = vecs.txindex_to_first_txoutindex.create_reader();
let txoutindex_to_value_reader = vecs.txoutindex_to_value.create_reader();
// Prefetch all first_txoutindexes in parallel
let first_txoutindexes: Vec<Option<_>> =
outpoints
.par_iter()
.map(|op| {
if op.is_coinbase() {
return None;
}
Some(vecs.txindex_to_first_txoutindex.read_at_unwrap(
op.txindex().to_usize(),
&txindex_to_first_txoutindex_reader,
))
})
.collect();
// Then read values in parallel
let input_sum: u64 = outpoints
.par_iter()
.zip(first_txoutindexes.par_iter())
.filter_map(|(op, first_opt)| {
let first_txoutindex = first_opt.as_ref()?;
let txoutindex = first_txoutindex.to_usize() + usize::from(op.vout());
let value = vecs
.txoutindex_to_value
.read_at_unwrap(txoutindex, &txoutindex_to_value_reader);
Some(u64::from(value))
})
.sum();
let _outputs: Vec<_> = (0..outputs_per_block)
.into_par_iter()
.map(|i| (values[i], output_types[i], typeindexes[i]))
.collect();
std::hint::black_box(input_sum);
}
Ok(start_time.elapsed())
}
fn run_method7(
vecs: &brk_indexer::Vecs,
num_blocks: usize,
outputs_per_block: usize,
inputs_per_block: usize,
output_start_offset: usize,
input_start_offset: usize,
) -> Duration {
// Create readers ONCE outside loop
let txoutindex_to_value_reader = vecs.txoutindex_to_value.create_reader();
let txoutindex_to_outputtype_reader = vecs.txoutindex_to_outputtype.create_reader();
let txoutindex_to_typeindex_reader = vecs.txoutindex_to_typeindex.create_reader();
let txinindex_to_outpoint_reader = vecs.txinindex_to_outpoint.create_reader();
let txindex_to_first_txoutindex_reader = vecs.txindex_to_first_txoutindex.create_reader();
let start_time = Instant::now();
for block_idx in 0..num_blocks {
let block_start = output_start_offset + (block_idx * outputs_per_block);
let _outputs: Vec<_> = (block_start..(block_start + outputs_per_block))
.into_par_iter()
.map(|i| {
(
vecs.txoutindex_to_value
.read_at_unwrap(i, &txoutindex_to_value_reader),
vecs.txoutindex_to_outputtype
.read_at_unwrap(i, &txoutindex_to_outputtype_reader),
vecs.txoutindex_to_typeindex
.read_at_unwrap(i, &txoutindex_to_typeindex_reader),
)
})
.collect();
let input_block_start = input_start_offset + (block_idx * inputs_per_block);
let input_sum: u64 = (input_block_start..(input_block_start + inputs_per_block))
.into_par_iter()
.filter_map(|i| {
let outpoint = vecs
.txinindex_to_outpoint
.read_at_unwrap(i, &txinindex_to_outpoint_reader);
if outpoint.is_coinbase() {
return None;
}
let first_txoutindex = vecs.txindex_to_first_txoutindex.read_at_unwrap(
outpoint.txindex().to_usize(),
&txindex_to_first_txoutindex_reader,
);
let txoutindex = first_txoutindex.to_usize() + usize::from(outpoint.vout());
let value = vecs
.txoutindex_to_value
.read_at_unwrap(txoutindex, &txoutindex_to_value_reader);
Some(u64::from(value))
})
.sum();
std::hint::black_box(input_sum);
}
start_time.elapsed()
}
fn run_method8(
vecs: &brk_indexer::Vecs,
num_blocks: usize,
outputs_per_block: usize,
inputs_per_block: usize,
output_start_offset: usize,
input_start_offset: usize,
) -> Duration {
let txoutindex_to_value_reader = vecs.txoutindex_to_value.create_reader();
let txoutindex_to_outputtype_reader = vecs.txoutindex_to_outputtype.create_reader();
let txoutindex_to_typeindex_reader = vecs.txoutindex_to_typeindex.create_reader();
let txinindex_to_outpoint_reader = vecs.txinindex_to_outpoint.create_reader();
let txindex_to_first_txoutindex_reader = vecs.txindex_to_first_txoutindex.create_reader();
const BULK_SIZE: usize = 64;
let start_time = Instant::now();
for block_idx in 0..num_blocks {
let block_start = output_start_offset + (block_idx * outputs_per_block);
// Process outputs in bulk chunks
let _outputs: Vec<_> = (0..outputs_per_block)
.collect::<Vec<_>>()
.par_chunks(BULK_SIZE)
.flat_map(|chunk| {
chunk
.iter()
.map(|&offset| {
let i = block_start + offset;
(
vecs.txoutindex_to_value
.read_at_unwrap(i, &txoutindex_to_value_reader),
vecs.txoutindex_to_outputtype
.read_at_unwrap(i, &txoutindex_to_outputtype_reader),
vecs.txoutindex_to_typeindex
.read_at_unwrap(i, &txoutindex_to_typeindex_reader),
)
})
.collect::<Vec<_>>()
})
.collect();
// Process inputs in bulk chunks
let input_block_start = input_start_offset + (block_idx * inputs_per_block);
let input_sum: u64 = (0..inputs_per_block)
.collect::<Vec<_>>()
.par_chunks(BULK_SIZE)
.flat_map(|chunk| {
chunk
.iter()
.filter_map(|&offset| {
let i = input_block_start + offset;
let outpoint = vecs
.txinindex_to_outpoint
.read_at_unwrap(i, &txinindex_to_outpoint_reader);
if outpoint.is_coinbase() {
return None;
}
let first_txoutindex = vecs.txindex_to_first_txoutindex.read_at_unwrap(
outpoint.txindex().to_usize(),
&txindex_to_first_txoutindex_reader,
);
let txoutindex = first_txoutindex.to_usize() + usize::from(outpoint.vout());
let value = vecs
.txoutindex_to_value
.read_at_unwrap(txoutindex, &txoutindex_to_value_reader);
Some(u64::from(value))
})
.collect::<Vec<_>>()
})
.sum();
std::hint::black_box(input_sum);
}
start_time.elapsed()
}
fn calculate_stddev(times: &[Duration]) -> Duration {
let avg = times.iter().sum::<Duration>().as_secs_f64() / times.len() as f64;
let variance = times
.iter()
.map(|t| {
let diff = t.as_secs_f64() - avg;
diff * diff
})
.sum::<f64>()
/ times.len() as f64;
Duration::from_secs_f64(variance.sqrt())
}

View File

@@ -68,35 +68,50 @@ impl Indexes {
pub fn push_if_needed(&self, vecs: &mut Vecs) -> Result<()> {
let height = self.height;
vecs.height_to_first_txindex
vecs.tx
.height_to_first_txindex
.push_if_needed(height, self.txindex)?;
vecs.height_to_first_txinindex
vecs.txin
.height_to_first_txinindex
.push_if_needed(height, self.txinindex)?;
vecs.height_to_first_txoutindex
vecs.txout
.height_to_first_txoutindex
.push_if_needed(height, self.txoutindex)?;
vecs.height_to_first_emptyoutputindex
vecs.output
.height_to_first_emptyoutputindex
.push_if_needed(height, self.emptyoutputindex)?;
vecs.height_to_first_p2msoutputindex
vecs.output
.height_to_first_p2msoutputindex
.push_if_needed(height, self.p2msoutputindex)?;
vecs.height_to_first_opreturnindex
vecs.output
.height_to_first_opreturnindex
.push_if_needed(height, self.opreturnindex)?;
vecs.height_to_first_p2aaddressindex
vecs.address
.height_to_first_p2aaddressindex
.push_if_needed(height, self.p2aaddressindex)?;
vecs.height_to_first_unknownoutputindex
vecs.output
.height_to_first_unknownoutputindex
.push_if_needed(height, self.unknownoutputindex)?;
vecs.height_to_first_p2pk33addressindex
vecs.address
.height_to_first_p2pk33addressindex
.push_if_needed(height, self.p2pk33addressindex)?;
vecs.height_to_first_p2pk65addressindex
vecs.address
.height_to_first_p2pk65addressindex
.push_if_needed(height, self.p2pk65addressindex)?;
vecs.height_to_first_p2pkhaddressindex
vecs.address
.height_to_first_p2pkhaddressindex
.push_if_needed(height, self.p2pkhaddressindex)?;
vecs.height_to_first_p2shaddressindex
vecs.address
.height_to_first_p2shaddressindex
.push_if_needed(height, self.p2shaddressindex)?;
vecs.height_to_first_p2traddressindex
vecs.address
.height_to_first_p2traddressindex
.push_if_needed(height, self.p2traddressindex)?;
vecs.height_to_first_p2wpkhaddressindex
vecs.address
.height_to_first_p2wpkhaddressindex
.push_if_needed(height, self.p2wpkhaddressindex)?;
vecs.height_to_first_p2wshaddressindex
vecs.address
.height_to_first_p2wshaddressindex
.push_if_needed(height, self.p2wshaddressindex)?;
Ok(())
@@ -118,102 +133,106 @@ impl From<(Height, &mut Vecs, &Stores)> for Indexes {
}
let emptyoutputindex = starting_index(
&vecs.height_to_first_emptyoutputindex,
&vecs.emptyoutputindex_to_txindex,
&vecs.output.height_to_first_emptyoutputindex,
&vecs.output.emptyoutputindex_to_txindex,
height,
)
.unwrap();
let p2msoutputindex = starting_index(
&vecs.height_to_first_p2msoutputindex,
&vecs.p2msoutputindex_to_txindex,
&vecs.output.height_to_first_p2msoutputindex,
&vecs.output.p2msoutputindex_to_txindex,
height,
)
.unwrap();
let opreturnindex = starting_index(
&vecs.height_to_first_opreturnindex,
&vecs.opreturnindex_to_txindex,
&vecs.output.height_to_first_opreturnindex,
&vecs.output.opreturnindex_to_txindex,
height,
)
.unwrap();
let p2pk33addressindex = starting_index(
&vecs.height_to_first_p2pk33addressindex,
&vecs.p2pk33addressindex_to_p2pk33bytes,
&vecs.address.height_to_first_p2pk33addressindex,
&vecs.address.p2pk33addressindex_to_p2pk33bytes,
height,
)
.unwrap();
let p2pk65addressindex = starting_index(
&vecs.height_to_first_p2pk65addressindex,
&vecs.p2pk65addressindex_to_p2pk65bytes,
&vecs.address.height_to_first_p2pk65addressindex,
&vecs.address.p2pk65addressindex_to_p2pk65bytes,
height,
)
.unwrap();
let p2pkhaddressindex = starting_index(
&vecs.height_to_first_p2pkhaddressindex,
&vecs.p2pkhaddressindex_to_p2pkhbytes,
&vecs.address.height_to_first_p2pkhaddressindex,
&vecs.address.p2pkhaddressindex_to_p2pkhbytes,
height,
)
.unwrap();
let p2shaddressindex = starting_index(
&vecs.height_to_first_p2shaddressindex,
&vecs.p2shaddressindex_to_p2shbytes,
&vecs.address.height_to_first_p2shaddressindex,
&vecs.address.p2shaddressindex_to_p2shbytes,
height,
)
.unwrap();
let p2traddressindex = starting_index(
&vecs.height_to_first_p2traddressindex,
&vecs.p2traddressindex_to_p2trbytes,
&vecs.address.height_to_first_p2traddressindex,
&vecs.address.p2traddressindex_to_p2trbytes,
height,
)
.unwrap();
let p2wpkhaddressindex = starting_index(
&vecs.height_to_first_p2wpkhaddressindex,
&vecs.p2wpkhaddressindex_to_p2wpkhbytes,
&vecs.address.height_to_first_p2wpkhaddressindex,
&vecs.address.p2wpkhaddressindex_to_p2wpkhbytes,
height,
)
.unwrap();
let p2wshaddressindex = starting_index(
&vecs.height_to_first_p2wshaddressindex,
&vecs.p2wshaddressindex_to_p2wshbytes,
&vecs.address.height_to_first_p2wshaddressindex,
&vecs.address.p2wshaddressindex_to_p2wshbytes,
height,
)
.unwrap();
let p2aaddressindex = starting_index(
&vecs.height_to_first_p2aaddressindex,
&vecs.p2aaddressindex_to_p2abytes,
&vecs.address.height_to_first_p2aaddressindex,
&vecs.address.p2aaddressindex_to_p2abytes,
height,
)
.unwrap();
let txindex =
starting_index(&vecs.height_to_first_txindex, &vecs.txindex_to_txid, height).unwrap();
let txindex = starting_index(
&vecs.tx.height_to_first_txindex,
&vecs.tx.txindex_to_txid,
height,
)
.unwrap();
let txinindex = starting_index(
&vecs.height_to_first_txinindex,
&vecs.txinindex_to_outpoint,
&vecs.txin.height_to_first_txinindex,
&vecs.txin.txinindex_to_outpoint,
height,
)
.unwrap();
let txoutindex = starting_index(
&vecs.height_to_first_txoutindex,
&vecs.txoutindex_to_value,
&vecs.txout.height_to_first_txoutindex,
&vecs.txout.txoutindex_to_value,
height,
)
.unwrap();
let unknownoutputindex = starting_index(
&vecs.height_to_first_unknownoutputindex,
&vecs.unknownoutputindex_to_txindex,
&vecs.output.height_to_first_unknownoutputindex,
&vecs.output.unknownoutputindex_to_txindex,
height,
)
.unwrap();

View File

@@ -82,7 +82,7 @@ impl Indexer {
) -> Result<Indexes> {
debug!("Starting indexing...");
let last_blockhash = self.vecs.height_to_blockhash.iter()?.last();
let last_blockhash = self.vecs.block.height_to_blockhash.iter()?.last();
debug!("Last block hash found.");
let (starting_indexes, prev_hash) = if let Some(hash) = last_blockhash {

View File

@@ -92,18 +92,23 @@ impl<'a> BlockProcessor<'a> {
);
self.vecs
.block
.height_to_blockhash
.push_if_needed(height, blockhash.clone())?;
self.vecs
.block
.height_to_difficulty
.push_if_needed(height, self.block.header.difficulty_float().into())?;
self.vecs
.block
.height_to_timestamp
.push_if_needed(height, Timestamp::from(self.block.header.time))?;
self.vecs
.block
.height_to_total_size
.push_if_needed(height, self.block.total_size().into())?;
self.vecs
.block
.height_to_weight
.push_if_needed(height, self.block.weight().into())?;
@@ -226,6 +231,7 @@ impl<'a> BlockProcessor<'a> {
let txoutindex = self
.vecs
.tx
.txindex_to_first_txoutindex
.get_pushed_or_read(prev_txindex, &self.readers.txindex_to_first_txoutindex)?
.ok_or(Error::Str("Expect txoutindex to not be none"))?
@@ -234,6 +240,7 @@ impl<'a> BlockProcessor<'a> {
let outpoint = OutPoint::new(prev_txindex, vout);
let outputtype = self
.vecs
.txout
.txoutindex_to_outputtype
.get_pushed_or_read(txoutindex, &self.readers.txoutindex_to_outputtype)?
.ok_or(Error::Str("Expect outputtype to not be none"))?;
@@ -241,6 +248,7 @@ impl<'a> BlockProcessor<'a> {
let address_info = if outputtype.is_address() {
let typeindex = self
.vecs
.txout
.txoutindex_to_typeindex
.get_pushed_or_read(txoutindex, &self.readers.txoutindex_to_typeindex)?
.ok_or(Error::Str("Expect typeindex to not be none"))?;
@@ -421,17 +429,21 @@ impl<'a> BlockProcessor<'a> {
if vout.is_zero() {
self.vecs
.tx
.txindex_to_first_txoutindex
.push_if_needed(txindex, txoutindex)?;
}
self.vecs
.txout
.txoutindex_to_value
.push_if_needed(txoutindex, sats)?;
self.vecs
.txout
.txoutindex_to_txindex
.push_if_needed(txoutindex, txindex)?;
self.vecs
.txout
.txoutindex_to_outputtype
.push_if_needed(txoutindex, outputtype)?;
@@ -462,24 +474,28 @@ impl<'a> BlockProcessor<'a> {
match outputtype {
OutputType::P2MS => {
self.vecs
.output
.p2msoutputindex_to_txindex
.push_if_needed(self.indexes.p2msoutputindex, txindex)?;
self.indexes.p2msoutputindex.copy_then_increment()
}
OutputType::OpReturn => {
self.vecs
.output
.opreturnindex_to_txindex
.push_if_needed(self.indexes.opreturnindex, txindex)?;
self.indexes.opreturnindex.copy_then_increment()
}
OutputType::Empty => {
self.vecs
.output
.emptyoutputindex_to_txindex
.push_if_needed(self.indexes.emptyoutputindex, txindex)?;
self.indexes.emptyoutputindex.copy_then_increment()
}
OutputType::Unknown => {
self.vecs
.output
.unknownoutputindex_to_txindex
.push_if_needed(self.indexes.unknownoutputindex, txindex)?;
self.indexes.unknownoutputindex.copy_then_increment()
@@ -489,6 +505,7 @@ impl<'a> BlockProcessor<'a> {
};
self.vecs
.txout
.txoutindex_to_typeindex
.push_if_needed(txoutindex, typeindex)?;
@@ -573,11 +590,13 @@ impl<'a> BlockProcessor<'a> {
if vin.is_zero() {
self.vecs
.tx
.txindex_to_first_txinindex
.push_if_needed(txindex, txinindex)?;
}
self.vecs
.txin
.txinindex_to_outpoint
.push_if_needed(txinindex, outpoint)?;
@@ -609,7 +628,7 @@ impl<'a> BlockProcessor<'a> {
return Ok(());
}
let mut txindex_to_txid_iter = self.vecs.txindex_to_txid.into_iter();
let mut txindex_to_txid_iter = self.vecs.tx.txindex_to_txid.into_iter();
for ct in txs.iter() {
let Some(prev_txindex) = ct.prev_txindex_opt else {
continue;
@@ -620,7 +639,7 @@ impl<'a> BlockProcessor<'a> {
continue;
}
let len = self.vecs.txindex_to_txid.len();
let len = self.vecs.tx.txindex_to_txid.len();
let prev_txid = txindex_to_txid_iter
.get(prev_txindex)
.ok_or(Error::Str("To have txid for txindex"))
@@ -653,24 +672,31 @@ impl<'a> BlockProcessor<'a> {
}
self.vecs
.tx
.txindex_to_height
.push_if_needed(ct.txindex, height)?;
self.vecs
.tx
.txindex_to_txversion
.push_if_needed(ct.txindex, ct.tx.version.into())?;
self.vecs
.tx
.txindex_to_txid
.push_if_needed(ct.txindex, ct.txid)?;
self.vecs
.tx
.txindex_to_rawlocktime
.push_if_needed(ct.txindex, ct.tx.lock_time.into())?;
self.vecs
.tx
.txindex_to_base_size
.push_if_needed(ct.txindex, ct.tx.base_size().into())?;
self.vecs
.tx
.txindex_to_total_size
.push_if_needed(ct.txindex, ct.tx.total_size().into())?;
self.vecs
.tx
.txindex_to_is_explicitly_rbf
.push_if_needed(ct.txindex, StoredBool::from(ct.tx.is_explicitly_rbf()))?;
}

View File

@@ -15,18 +15,18 @@ pub struct Readers {
impl Readers {
pub fn new(vecs: &Vecs) -> Self {
Self {
txindex_to_first_txoutindex: vecs.txindex_to_first_txoutindex.create_reader(),
txoutindex_to_outputtype: vecs.txoutindex_to_outputtype.create_reader(),
txoutindex_to_typeindex: vecs.txoutindex_to_typeindex.create_reader(),
txindex_to_first_txoutindex: vecs.tx.txindex_to_first_txoutindex.create_reader(),
txoutindex_to_outputtype: vecs.txout.txoutindex_to_outputtype.create_reader(),
txoutindex_to_typeindex: vecs.txout.txoutindex_to_typeindex.create_reader(),
addressbytes: ByAddressType {
p2pk65: vecs.p2pk65addressindex_to_p2pk65bytes.create_reader(),
p2pk33: vecs.p2pk33addressindex_to_p2pk33bytes.create_reader(),
p2pkh: vecs.p2pkhaddressindex_to_p2pkhbytes.create_reader(),
p2sh: vecs.p2shaddressindex_to_p2shbytes.create_reader(),
p2wpkh: vecs.p2wpkhaddressindex_to_p2wpkhbytes.create_reader(),
p2wsh: vecs.p2wshaddressindex_to_p2wshbytes.create_reader(),
p2tr: vecs.p2traddressindex_to_p2trbytes.create_reader(),
p2a: vecs.p2aaddressindex_to_p2abytes.create_reader(),
p2pk65: vecs.address.p2pk65addressindex_to_p2pk65bytes.create_reader(),
p2pk33: vecs.address.p2pk33addressindex_to_p2pk33bytes.create_reader(),
p2pkh: vecs.address.p2pkhaddressindex_to_p2pkhbytes.create_reader(),
p2sh: vecs.address.p2shaddressindex_to_p2shbytes.create_reader(),
p2wpkh: vecs.address.p2wpkhaddressindex_to_p2wpkhbytes.create_reader(),
p2wsh: vecs.address.p2wshaddressindex_to_p2wshbytes.create_reader(),
p2tr: vecs.address.p2traddressindex_to_p2trbytes.create_reader(),
p2a: vecs.address.p2aaddressindex_to_p2abytes.create_reader(),
},
}
}

View File

@@ -204,7 +204,8 @@ impl Stores {
}
if starting_indexes.height != Height::ZERO {
vecs.height_to_blockhash
vecs.block
.height_to_blockhash
.iter()?
.skip(starting_indexes.height.to_usize())
.map(BlockHashPrefix::from)
@@ -212,7 +213,7 @@ impl Stores {
self.blockhashprefix_to_height.remove(prefix);
});
(starting_indexes.height.to_usize()..vecs.height_to_blockhash.len())
(starting_indexes.height.to_usize()..vecs.block.height_to_blockhash.len())
.map(Height::from)
.for_each(|h| {
self.height_to_coinbase_tag.remove(h);
@@ -240,7 +241,8 @@ impl Stores {
}
if starting_indexes.txindex != TxIndex::ZERO {
vecs.txindex_to_txid
vecs.tx
.txindex_to_txid
.iter()?
.enumerate()
.skip(starting_indexes.txindex.to_usize())
@@ -264,18 +266,22 @@ impl Stores {
}
if starting_indexes.txoutindex != TxOutIndex::ZERO {
let mut txoutindex_to_txindex_iter = vecs.txoutindex_to_txindex.iter()?;
let mut txindex_to_first_txoutindex_iter = vecs.txindex_to_first_txoutindex.iter()?;
vecs.txoutindex_to_outputtype
let mut txoutindex_to_txindex_iter = vecs.txout.txoutindex_to_txindex.iter()?;
let mut txindex_to_first_txoutindex_iter = vecs.tx.txindex_to_first_txoutindex.iter()?;
vecs.txout
.txoutindex_to_outputtype
.iter()?
.enumerate()
.skip(starting_indexes.txoutindex.to_usize())
.zip(
vecs.txoutindex_to_typeindex
vecs.txout
.txoutindex_to_typeindex
.iter()?
.skip(starting_indexes.txoutindex.to_usize()),
)
.filter(|((_, outputtype), _)| outputtype.is_address())
.filter(|((_, outputtype), _): &((usize, OutputType), TypeIndex)| {
outputtype.is_address()
})
.for_each(|((txoutindex, addresstype), addressindex)| {
let txindex = txoutindex_to_txindex_iter.get_at_unwrap(txoutindex);
@@ -297,13 +303,14 @@ impl Stores {
});
// Add back outputs that were spent after the rollback point
let mut txindex_to_first_txoutindex_iter = vecs.txindex_to_first_txoutindex.iter()?;
let mut txoutindex_to_outputtype_iter = vecs.txoutindex_to_outputtype.iter()?;
let mut txoutindex_to_typeindex_iter = vecs.txoutindex_to_typeindex.iter()?;
vecs.txinindex_to_outpoint
let mut txindex_to_first_txoutindex_iter = vecs.tx.txindex_to_first_txoutindex.iter()?;
let mut txoutindex_to_outputtype_iter = vecs.txout.txoutindex_to_outputtype.iter()?;
let mut txoutindex_to_typeindex_iter = vecs.txout.txoutindex_to_typeindex.iter()?;
vecs.txin
.txinindex_to_outpoint
.iter()?
.skip(starting_indexes.txinindex.to_usize())
.for_each(|outpoint| {
.for_each(|outpoint: OutPoint| {
if outpoint.is_coinbase() {
return;
}

View File

@@ -1,538 +0,0 @@
use std::path::Path;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{
AddressBytes, AddressHash, BlockHash, EmptyOutputIndex, Height, OpReturnIndex, OutPoint,
OutputType, P2AAddressIndex, P2ABytes, P2MSOutputIndex, P2PK33AddressIndex, P2PK33Bytes,
P2PK65AddressIndex, P2PK65Bytes, P2PKHAddressIndex, P2PKHBytes, P2SHAddressIndex, P2SHBytes,
P2TRAddressIndex, P2TRBytes, P2WPKHAddressIndex, P2WPKHBytes, P2WSHAddressIndex, P2WSHBytes,
RawLockTime, Sats, StoredBool, StoredF64, StoredU32, StoredU64, Timestamp, TxInIndex, TxIndex,
TxOutIndex, TxVersion, Txid, TypeIndex, UnknownOutputIndex, Version, Weight,
};
use rayon::prelude::*;
use vecdb::{
AnyStoredVec, BytesVec, Database, GenericStoredVec, ImportableVec, PAGE_SIZE, PcoVec, Reader,
Stamp, TypedVecIterator,
};
use crate::Indexes;
#[derive(Clone, Traversable)]
pub struct Vecs {
db: Database,
pub emptyoutputindex_to_txindex: PcoVec<EmptyOutputIndex, TxIndex>,
pub height_to_blockhash: BytesVec<Height, BlockHash>,
pub height_to_difficulty: PcoVec<Height, StoredF64>,
pub height_to_first_emptyoutputindex: PcoVec<Height, EmptyOutputIndex>,
pub height_to_first_opreturnindex: PcoVec<Height, OpReturnIndex>,
pub height_to_first_p2aaddressindex: PcoVec<Height, P2AAddressIndex>,
pub height_to_first_p2msoutputindex: PcoVec<Height, P2MSOutputIndex>,
pub height_to_first_p2pk33addressindex: PcoVec<Height, P2PK33AddressIndex>,
pub height_to_first_p2pk65addressindex: PcoVec<Height, P2PK65AddressIndex>,
pub height_to_first_p2pkhaddressindex: PcoVec<Height, P2PKHAddressIndex>,
pub height_to_first_p2shaddressindex: PcoVec<Height, P2SHAddressIndex>,
pub height_to_first_p2traddressindex: PcoVec<Height, P2TRAddressIndex>,
pub height_to_first_p2wpkhaddressindex: PcoVec<Height, P2WPKHAddressIndex>,
pub height_to_first_p2wshaddressindex: PcoVec<Height, P2WSHAddressIndex>,
pub height_to_first_txindex: PcoVec<Height, TxIndex>,
pub height_to_first_txinindex: PcoVec<Height, TxInIndex>,
pub height_to_first_txoutindex: PcoVec<Height, TxOutIndex>,
pub height_to_first_unknownoutputindex: PcoVec<Height, UnknownOutputIndex>,
/// Doesn't guarantee continuity due to possible reorgs and more generally the nature of mining
pub height_to_timestamp: PcoVec<Height, Timestamp>,
pub height_to_total_size: PcoVec<Height, StoredU64>,
pub height_to_weight: PcoVec<Height, Weight>,
pub opreturnindex_to_txindex: PcoVec<OpReturnIndex, TxIndex>,
pub p2aaddressindex_to_p2abytes: BytesVec<P2AAddressIndex, P2ABytes>,
pub p2msoutputindex_to_txindex: PcoVec<P2MSOutputIndex, TxIndex>,
pub p2pk33addressindex_to_p2pk33bytes: BytesVec<P2PK33AddressIndex, P2PK33Bytes>,
pub p2pk65addressindex_to_p2pk65bytes: BytesVec<P2PK65AddressIndex, P2PK65Bytes>,
pub p2pkhaddressindex_to_p2pkhbytes: BytesVec<P2PKHAddressIndex, P2PKHBytes>,
pub p2shaddressindex_to_p2shbytes: BytesVec<P2SHAddressIndex, P2SHBytes>,
pub p2traddressindex_to_p2trbytes: BytesVec<P2TRAddressIndex, P2TRBytes>,
pub p2wpkhaddressindex_to_p2wpkhbytes: BytesVec<P2WPKHAddressIndex, P2WPKHBytes>,
pub p2wshaddressindex_to_p2wshbytes: BytesVec<P2WSHAddressIndex, P2WSHBytes>,
pub txindex_to_base_size: PcoVec<TxIndex, StoredU32>,
pub txindex_to_first_txinindex: PcoVec<TxIndex, TxInIndex>,
pub txindex_to_first_txoutindex: BytesVec<TxIndex, TxOutIndex>,
pub txindex_to_height: PcoVec<TxIndex, Height>,
pub txindex_to_is_explicitly_rbf: PcoVec<TxIndex, StoredBool>,
pub txindex_to_rawlocktime: PcoVec<TxIndex, RawLockTime>,
pub txindex_to_total_size: PcoVec<TxIndex, StoredU32>,
pub txindex_to_txid: BytesVec<TxIndex, Txid>,
pub txindex_to_txversion: PcoVec<TxIndex, TxVersion>,
pub txinindex_to_outpoint: PcoVec<TxInIndex, OutPoint>,
pub txoutindex_to_outputtype: BytesVec<TxOutIndex, OutputType>,
pub txoutindex_to_txindex: PcoVec<TxOutIndex, TxIndex>,
pub txoutindex_to_typeindex: BytesVec<TxOutIndex, TypeIndex>,
pub txoutindex_to_value: BytesVec<TxOutIndex, Sats>,
pub unknownoutputindex_to_txindex: PcoVec<UnknownOutputIndex, TxIndex>,
}
impl Vecs {
pub fn forced_import(parent: &Path, version: Version) -> Result<Self> {
let db = Database::open(&parent.join("vecs"))?;
db.set_min_len(PAGE_SIZE * 50_000_000)?;
let this = Self {
emptyoutputindex_to_txindex: PcoVec::forced_import(&db, "txindex", version)?,
height_to_blockhash: BytesVec::forced_import(&db, "blockhash", version)?,
height_to_difficulty: PcoVec::forced_import(&db, "difficulty", version)?,
height_to_first_emptyoutputindex: PcoVec::forced_import(
&db,
"first_emptyoutputindex",
version,
)?,
height_to_first_txinindex: PcoVec::forced_import(&db, "first_txinindex", version)?,
height_to_first_opreturnindex: PcoVec::forced_import(
&db,
"first_opreturnindex",
version,
)?,
height_to_first_txoutindex: PcoVec::forced_import(&db, "first_txoutindex", version)?,
height_to_first_p2aaddressindex: PcoVec::forced_import(
&db,
"first_p2aaddressindex",
version,
)?,
height_to_first_p2msoutputindex: PcoVec::forced_import(
&db,
"first_p2msoutputindex",
version,
)?,
height_to_first_p2pk33addressindex: PcoVec::forced_import(
&db,
"first_p2pk33addressindex",
version,
)?,
height_to_first_p2pk65addressindex: PcoVec::forced_import(
&db,
"first_p2pk65addressindex",
version,
)?,
height_to_first_p2pkhaddressindex: PcoVec::forced_import(
&db,
"first_p2pkhaddressindex",
version,
)?,
height_to_first_p2shaddressindex: PcoVec::forced_import(
&db,
"first_p2shaddressindex",
version,
)?,
height_to_first_p2traddressindex: PcoVec::forced_import(
&db,
"first_p2traddressindex",
version,
)?,
height_to_first_p2wpkhaddressindex: PcoVec::forced_import(
&db,
"first_p2wpkhaddressindex",
version,
)?,
height_to_first_p2wshaddressindex: PcoVec::forced_import(
&db,
"first_p2wshaddressindex",
version,
)?,
height_to_first_txindex: PcoVec::forced_import(&db, "first_txindex", version)?,
height_to_first_unknownoutputindex: PcoVec::forced_import(
&db,
"first_unknownoutputindex",
version,
)?,
height_to_timestamp: PcoVec::forced_import(&db, "timestamp", version)?,
height_to_total_size: PcoVec::forced_import(&db, "total_size", version)?,
height_to_weight: PcoVec::forced_import(&db, "weight", version)?,
opreturnindex_to_txindex: PcoVec::forced_import(&db, "txindex", version)?,
p2aaddressindex_to_p2abytes: BytesVec::forced_import(&db, "p2abytes", version)?,
p2msoutputindex_to_txindex: PcoVec::forced_import(&db, "txindex", version)?,
p2pk33addressindex_to_p2pk33bytes: BytesVec::forced_import(
&db,
"p2pk33bytes",
version,
)?,
p2pk65addressindex_to_p2pk65bytes: BytesVec::forced_import(
&db,
"p2pk65bytes",
version,
)?,
p2pkhaddressindex_to_p2pkhbytes: BytesVec::forced_import(&db, "p2pkhbytes", version)?,
p2shaddressindex_to_p2shbytes: BytesVec::forced_import(&db, "p2shbytes", version)?,
p2traddressindex_to_p2trbytes: BytesVec::forced_import(&db, "p2trbytes", version)?,
p2wpkhaddressindex_to_p2wpkhbytes: BytesVec::forced_import(
&db,
"p2wpkhbytes",
version,
)?,
p2wshaddressindex_to_p2wshbytes: BytesVec::forced_import(&db, "p2wshbytes", version)?,
txindex_to_base_size: PcoVec::forced_import(&db, "base_size", version)?,
txindex_to_height: PcoVec::forced_import(&db, "height", version)?,
txindex_to_first_txinindex: PcoVec::forced_import(&db, "first_txinindex", version)?,
txindex_to_first_txoutindex: BytesVec::forced_import(&db, "first_txoutindex", version)?,
txindex_to_is_explicitly_rbf: PcoVec::forced_import(&db, "is_explicitly_rbf", version)?,
txindex_to_rawlocktime: PcoVec::forced_import(&db, "rawlocktime", version)?,
txindex_to_total_size: PcoVec::forced_import(&db, "total_size", version)?,
txindex_to_txid: BytesVec::forced_import(&db, "txid", version)?,
txindex_to_txversion: PcoVec::forced_import(&db, "txversion", version)?,
txinindex_to_outpoint: PcoVec::forced_import(&db, "outpoint", version)?,
txoutindex_to_outputtype: BytesVec::forced_import(&db, "outputtype", version)?,
txoutindex_to_txindex: PcoVec::forced_import(&db, "txindex", version)?,
txoutindex_to_typeindex: BytesVec::forced_import(&db, "typeindex", version)?,
txoutindex_to_value: BytesVec::forced_import(&db, "value", version)?,
unknownoutputindex_to_txindex: PcoVec::forced_import(&db, "txindex", version)?,
db,
};
this.db.retain_regions(
this.iter_any_exportable()
.flat_map(|v| v.region_names())
.collect(),
)?;
this.db.compact()?;
Ok(this)
}
pub fn rollback_if_needed(&mut self, starting_indexes: &Indexes) -> Result<()> {
let saved_height = starting_indexes.height.decremented().unwrap_or_default();
let &Indexes {
emptyoutputindex,
height,
txinindex,
opreturnindex,
txoutindex,
p2aaddressindex,
p2msoutputindex,
p2pk33addressindex,
p2pk65addressindex,
p2pkhaddressindex,
p2shaddressindex,
p2traddressindex,
p2wpkhaddressindex,
p2wshaddressindex,
txindex,
unknownoutputindex,
} = starting_indexes;
let stamp = u64::from(saved_height).into();
self.emptyoutputindex_to_txindex
.truncate_if_needed_with_stamp(emptyoutputindex, stamp)?;
self.height_to_blockhash
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_difficulty
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_first_emptyoutputindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_first_txinindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_first_opreturnindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_first_txoutindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_first_p2aaddressindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_first_p2msoutputindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_first_p2pk33addressindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_first_p2pk65addressindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_first_p2pkhaddressindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_first_p2shaddressindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_first_p2traddressindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_first_p2wpkhaddressindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_first_p2wshaddressindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_first_txindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_first_unknownoutputindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_timestamp
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_total_size
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_weight
.truncate_if_needed_with_stamp(height, stamp)?;
self.txinindex_to_outpoint
.truncate_if_needed_with_stamp(txinindex, stamp)?;
self.opreturnindex_to_txindex
.truncate_if_needed_with_stamp(opreturnindex, stamp)?;
self.txoutindex_to_outputtype
.truncate_if_needed_with_stamp(txoutindex, stamp)?;
self.txoutindex_to_typeindex
.truncate_if_needed_with_stamp(txoutindex, stamp)?;
self.txoutindex_to_value
.truncate_if_needed_with_stamp(txoutindex, stamp)?;
self.p2aaddressindex_to_p2abytes
.truncate_if_needed_with_stamp(p2aaddressindex, stamp)?;
self.p2msoutputindex_to_txindex
.truncate_if_needed_with_stamp(p2msoutputindex, stamp)?;
self.p2pk33addressindex_to_p2pk33bytes
.truncate_if_needed_with_stamp(p2pk33addressindex, stamp)?;
self.p2pk65addressindex_to_p2pk65bytes
.truncate_if_needed_with_stamp(p2pk65addressindex, stamp)?;
self.p2pkhaddressindex_to_p2pkhbytes
.truncate_if_needed_with_stamp(p2pkhaddressindex, stamp)?;
self.p2shaddressindex_to_p2shbytes
.truncate_if_needed_with_stamp(p2shaddressindex, stamp)?;
self.p2traddressindex_to_p2trbytes
.truncate_if_needed_with_stamp(p2traddressindex, stamp)?;
self.p2wpkhaddressindex_to_p2wpkhbytes
.truncate_if_needed_with_stamp(p2wpkhaddressindex, stamp)?;
self.p2wshaddressindex_to_p2wshbytes
.truncate_if_needed_with_stamp(p2wshaddressindex, stamp)?;
self.txindex_to_base_size
.truncate_if_needed_with_stamp(txindex, stamp)?;
self.txindex_to_first_txinindex
.truncate_if_needed_with_stamp(txindex, stamp)?;
self.txindex_to_first_txoutindex
.truncate_if_needed_with_stamp(txindex, stamp)?;
self.txindex_to_is_explicitly_rbf
.truncate_if_needed_with_stamp(txindex, stamp)?;
self.txindex_to_rawlocktime
.truncate_if_needed_with_stamp(txindex, stamp)?;
self.txindex_to_total_size
.truncate_if_needed_with_stamp(txindex, stamp)?;
self.txindex_to_txid
.truncate_if_needed_with_stamp(txindex, stamp)?;
self.txindex_to_txversion
.truncate_if_needed_with_stamp(txindex, stamp)?;
self.unknownoutputindex_to_txindex
.truncate_if_needed_with_stamp(unknownoutputindex, stamp)?;
Ok(())
}
/// Get address bytes by output type, using the reader for the specific address type.
/// Returns None if the index doesn't exist yet.
pub fn get_addressbytes_by_type(
&self,
addresstype: OutputType,
typeindex: TypeIndex,
reader: &Reader,
) -> Result<Option<AddressBytes>> {
match addresstype {
OutputType::P2PK65 => self
.p2pk65addressindex_to_p2pk65bytes
.get_pushed_or_read(typeindex.into(), reader)
.map(|opt| opt.map(AddressBytes::from)),
OutputType::P2PK33 => self
.p2pk33addressindex_to_p2pk33bytes
.get_pushed_or_read(typeindex.into(), reader)
.map(|opt| opt.map(AddressBytes::from)),
OutputType::P2PKH => self
.p2pkhaddressindex_to_p2pkhbytes
.get_pushed_or_read(typeindex.into(), reader)
.map(|opt| opt.map(AddressBytes::from)),
OutputType::P2SH => self
.p2shaddressindex_to_p2shbytes
.get_pushed_or_read(typeindex.into(), reader)
.map(|opt| opt.map(AddressBytes::from)),
OutputType::P2WPKH => self
.p2wpkhaddressindex_to_p2wpkhbytes
.get_pushed_or_read(typeindex.into(), reader)
.map(|opt| opt.map(AddressBytes::from)),
OutputType::P2WSH => self
.p2wshaddressindex_to_p2wshbytes
.get_pushed_or_read(typeindex.into(), reader)
.map(|opt| opt.map(AddressBytes::from)),
OutputType::P2TR => self
.p2traddressindex_to_p2trbytes
.get_pushed_or_read(typeindex.into(), reader)
.map(|opt| opt.map(AddressBytes::from)),
OutputType::P2A => self
.p2aaddressindex_to_p2abytes
.get_pushed_or_read(typeindex.into(), reader)
.map(|opt| opt.map(AddressBytes::from)),
_ => unreachable!("get_addressbytes_by_type called with non-address type"),
}
.map_err(|e| e.into())
}
pub fn push_bytes_if_needed(&mut self, index: TypeIndex, bytes: AddressBytes) -> Result<()> {
match bytes {
AddressBytes::P2PK65(bytes) => self
.p2pk65addressindex_to_p2pk65bytes
.push_if_needed(index.into(), *bytes)?,
AddressBytes::P2PK33(bytes) => self
.p2pk33addressindex_to_p2pk33bytes
.push_if_needed(index.into(), *bytes)?,
AddressBytes::P2PKH(bytes) => self
.p2pkhaddressindex_to_p2pkhbytes
.push_if_needed(index.into(), *bytes)?,
AddressBytes::P2SH(bytes) => self
.p2shaddressindex_to_p2shbytes
.push_if_needed(index.into(), *bytes)?,
AddressBytes::P2WPKH(bytes) => self
.p2wpkhaddressindex_to_p2wpkhbytes
.push_if_needed(index.into(), *bytes)?,
AddressBytes::P2WSH(bytes) => self
.p2wshaddressindex_to_p2wshbytes
.push_if_needed(index.into(), *bytes)?,
AddressBytes::P2TR(bytes) => self
.p2traddressindex_to_p2trbytes
.push_if_needed(index.into(), *bytes)?,
AddressBytes::P2A(bytes) => self
.p2aaddressindex_to_p2abytes
.push_if_needed(index.into(), *bytes)?,
};
Ok(())
}
pub fn flush(&mut self, height: Height) -> Result<()> {
self.iter_mut_any_stored_vec()
.par_bridge()
.try_for_each(|vec| vec.stamped_flush(Stamp::from(height)))?;
self.db.flush()?;
Ok(())
}
pub fn starting_height(&mut self) -> Height {
self.iter_mut_any_stored_vec()
.map(|vec| {
let h = Height::from(vec.stamp());
if h > Height::ZERO { h.incremented() } else { h }
})
.min()
.unwrap()
}
pub fn compact(&self) -> Result<()> {
self.db.compact()?;
Ok(())
}
/// Iterate address hashes starting from a given height (for rollback).
/// Returns an iterator of AddressHash values for all addresses of the given type
/// that were added at or after the given height.
pub fn iter_address_hashes_from(
&self,
address_type: OutputType,
height: Height,
) -> Result<Box<dyn Iterator<Item = AddressHash> + '_>> {
macro_rules! make_iter {
($height_vec:expr, $bytes_vec:expr) => {{
match $height_vec.read_once(height) {
Ok(mut index) => {
let mut iter = $bytes_vec.iter()?;
Ok(Box::new(std::iter::from_fn(move || {
iter.get(index).map(|typedbytes| {
let bytes = AddressBytes::from(typedbytes);
index.increment();
AddressHash::from(&bytes)
})
}))
as Box<dyn Iterator<Item = AddressHash> + '_>)
}
Err(_) => {
Ok(Box::new(std::iter::empty())
as Box<dyn Iterator<Item = AddressHash> + '_>)
}
}
}};
}
match address_type {
OutputType::P2PK65 => make_iter!(
self.height_to_first_p2pk65addressindex,
self.p2pk65addressindex_to_p2pk65bytes
),
OutputType::P2PK33 => make_iter!(
self.height_to_first_p2pk33addressindex,
self.p2pk33addressindex_to_p2pk33bytes
),
OutputType::P2PKH => make_iter!(
self.height_to_first_p2pkhaddressindex,
self.p2pkhaddressindex_to_p2pkhbytes
),
OutputType::P2SH => make_iter!(
self.height_to_first_p2shaddressindex,
self.p2shaddressindex_to_p2shbytes
),
OutputType::P2WPKH => make_iter!(
self.height_to_first_p2wpkhaddressindex,
self.p2wpkhaddressindex_to_p2wpkhbytes
),
OutputType::P2WSH => make_iter!(
self.height_to_first_p2wshaddressindex,
self.p2wshaddressindex_to_p2wshbytes
),
OutputType::P2TR => make_iter!(
self.height_to_first_p2traddressindex,
self.p2traddressindex_to_p2trbytes
),
OutputType::P2A => make_iter!(
self.height_to_first_p2aaddressindex,
self.p2aaddressindex_to_p2abytes
),
_ => Ok(Box::new(std::iter::empty())),
}
}
fn iter_mut_any_stored_vec(&mut self) -> impl Iterator<Item = &mut dyn AnyStoredVec> {
[
&mut self.emptyoutputindex_to_txindex as &mut dyn AnyStoredVec,
&mut self.height_to_blockhash,
&mut self.height_to_difficulty,
&mut self.height_to_first_emptyoutputindex,
&mut self.height_to_first_opreturnindex,
&mut self.height_to_first_p2aaddressindex,
&mut self.height_to_first_p2msoutputindex,
&mut self.height_to_first_p2pk33addressindex,
&mut self.height_to_first_p2pk65addressindex,
&mut self.height_to_first_p2pkhaddressindex,
&mut self.height_to_first_p2shaddressindex,
&mut self.height_to_first_p2traddressindex,
&mut self.height_to_first_p2wpkhaddressindex,
&mut self.height_to_first_p2wshaddressindex,
&mut self.height_to_first_txindex,
&mut self.height_to_first_txinindex,
&mut self.height_to_first_txoutindex,
&mut self.height_to_first_unknownoutputindex,
&mut self.height_to_timestamp,
&mut self.height_to_total_size,
&mut self.height_to_weight,
&mut self.opreturnindex_to_txindex,
&mut self.p2aaddressindex_to_p2abytes,
&mut self.p2msoutputindex_to_txindex,
&mut self.p2pk33addressindex_to_p2pk33bytes,
&mut self.p2pk65addressindex_to_p2pk65bytes,
&mut self.p2pkhaddressindex_to_p2pkhbytes,
&mut self.p2shaddressindex_to_p2shbytes,
&mut self.p2traddressindex_to_p2trbytes,
&mut self.p2wpkhaddressindex_to_p2wpkhbytes,
&mut self.p2wshaddressindex_to_p2wshbytes,
&mut self.txindex_to_base_size,
&mut self.txindex_to_first_txinindex,
&mut self.txindex_to_first_txoutindex,
&mut self.txindex_to_height,
&mut self.txindex_to_is_explicitly_rbf,
&mut self.txindex_to_rawlocktime,
&mut self.txindex_to_total_size,
&mut self.txindex_to_txid,
&mut self.txindex_to_txversion,
&mut self.txinindex_to_outpoint,
&mut self.txoutindex_to_outputtype,
&mut self.txoutindex_to_txindex,
&mut self.txoutindex_to_typeindex,
&mut self.txoutindex_to_value,
&mut self.unknownoutputindex_to_txindex,
]
.into_iter()
}
pub fn db(&self) -> &Database {
&self.db
}
}

View File

@@ -0,0 +1,303 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{
AddressBytes, AddressHash, Height, OutputType, P2AAddressIndex, P2ABytes, P2PK33AddressIndex,
P2PK33Bytes, P2PK65AddressIndex, P2PK65Bytes, P2PKHAddressIndex, P2PKHBytes, P2SHAddressIndex,
P2SHBytes, P2TRAddressIndex, P2TRBytes, P2WPKHAddressIndex, P2WPKHBytes, P2WSHAddressIndex,
P2WSHBytes, TypeIndex, Version,
};
use vecdb::{
AnyStoredVec, BytesVec, Database, GenericStoredVec, ImportableVec, PcoVec, Reader, Stamp,
TypedVecIterator,
};
#[derive(Clone, Traversable)]
pub struct AddressVecs {
// Height to first address index (per address type)
pub height_to_first_p2pk65addressindex: PcoVec<Height, P2PK65AddressIndex>,
pub height_to_first_p2pk33addressindex: PcoVec<Height, P2PK33AddressIndex>,
pub height_to_first_p2pkhaddressindex: PcoVec<Height, P2PKHAddressIndex>,
pub height_to_first_p2shaddressindex: PcoVec<Height, P2SHAddressIndex>,
pub height_to_first_p2wpkhaddressindex: PcoVec<Height, P2WPKHAddressIndex>,
pub height_to_first_p2wshaddressindex: PcoVec<Height, P2WSHAddressIndex>,
pub height_to_first_p2traddressindex: PcoVec<Height, P2TRAddressIndex>,
pub height_to_first_p2aaddressindex: PcoVec<Height, P2AAddressIndex>,
// Address index to bytes (per address type)
pub p2pk65addressindex_to_p2pk65bytes: BytesVec<P2PK65AddressIndex, P2PK65Bytes>,
pub p2pk33addressindex_to_p2pk33bytes: BytesVec<P2PK33AddressIndex, P2PK33Bytes>,
pub p2pkhaddressindex_to_p2pkhbytes: BytesVec<P2PKHAddressIndex, P2PKHBytes>,
pub p2shaddressindex_to_p2shbytes: BytesVec<P2SHAddressIndex, P2SHBytes>,
pub p2wpkhaddressindex_to_p2wpkhbytes: BytesVec<P2WPKHAddressIndex, P2WPKHBytes>,
pub p2wshaddressindex_to_p2wshbytes: BytesVec<P2WSHAddressIndex, P2WSHBytes>,
pub p2traddressindex_to_p2trbytes: BytesVec<P2TRAddressIndex, P2TRBytes>,
pub p2aaddressindex_to_p2abytes: BytesVec<P2AAddressIndex, P2ABytes>,
}
impl AddressVecs {
pub fn forced_import(db: &Database, version: Version) -> Result<Self> {
Ok(Self {
height_to_first_p2pk65addressindex: PcoVec::forced_import(
db,
"first_p2pk65addressindex",
version,
)?,
height_to_first_p2pk33addressindex: PcoVec::forced_import(
db,
"first_p2pk33addressindex",
version,
)?,
height_to_first_p2pkhaddressindex: PcoVec::forced_import(
db,
"first_p2pkhaddressindex",
version,
)?,
height_to_first_p2shaddressindex: PcoVec::forced_import(
db,
"first_p2shaddressindex",
version,
)?,
height_to_first_p2wpkhaddressindex: PcoVec::forced_import(
db,
"first_p2wpkhaddressindex",
version,
)?,
height_to_first_p2wshaddressindex: PcoVec::forced_import(
db,
"first_p2wshaddressindex",
version,
)?,
height_to_first_p2traddressindex: PcoVec::forced_import(
db,
"first_p2traddressindex",
version,
)?,
height_to_first_p2aaddressindex: PcoVec::forced_import(
db,
"first_p2aaddressindex",
version,
)?,
p2pk65addressindex_to_p2pk65bytes: BytesVec::forced_import(db, "p2pk65bytes", version)?,
p2pk33addressindex_to_p2pk33bytes: BytesVec::forced_import(db, "p2pk33bytes", version)?,
p2pkhaddressindex_to_p2pkhbytes: BytesVec::forced_import(db, "p2pkhbytes", version)?,
p2shaddressindex_to_p2shbytes: BytesVec::forced_import(db, "p2shbytes", version)?,
p2wpkhaddressindex_to_p2wpkhbytes: BytesVec::forced_import(db, "p2wpkhbytes", version)?,
p2wshaddressindex_to_p2wshbytes: BytesVec::forced_import(db, "p2wshbytes", version)?,
p2traddressindex_to_p2trbytes: BytesVec::forced_import(db, "p2trbytes", version)?,
p2aaddressindex_to_p2abytes: BytesVec::forced_import(db, "p2abytes", version)?,
})
}
#[allow(clippy::too_many_arguments)]
pub fn truncate(
&mut self,
height: Height,
p2pk65addressindex: P2PK65AddressIndex,
p2pk33addressindex: P2PK33AddressIndex,
p2pkhaddressindex: P2PKHAddressIndex,
p2shaddressindex: P2SHAddressIndex,
p2wpkhaddressindex: P2WPKHAddressIndex,
p2wshaddressindex: P2WSHAddressIndex,
p2traddressindex: P2TRAddressIndex,
p2aaddressindex: P2AAddressIndex,
stamp: Stamp,
) -> Result<()> {
self.height_to_first_p2pk65addressindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_first_p2pk33addressindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_first_p2pkhaddressindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_first_p2shaddressindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_first_p2wpkhaddressindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_first_p2wshaddressindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_first_p2traddressindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_first_p2aaddressindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.p2pk65addressindex_to_p2pk65bytes
.truncate_if_needed_with_stamp(p2pk65addressindex, stamp)?;
self.p2pk33addressindex_to_p2pk33bytes
.truncate_if_needed_with_stamp(p2pk33addressindex, stamp)?;
self.p2pkhaddressindex_to_p2pkhbytes
.truncate_if_needed_with_stamp(p2pkhaddressindex, stamp)?;
self.p2shaddressindex_to_p2shbytes
.truncate_if_needed_with_stamp(p2shaddressindex, stamp)?;
self.p2wpkhaddressindex_to_p2wpkhbytes
.truncate_if_needed_with_stamp(p2wpkhaddressindex, stamp)?;
self.p2wshaddressindex_to_p2wshbytes
.truncate_if_needed_with_stamp(p2wshaddressindex, stamp)?;
self.p2traddressindex_to_p2trbytes
.truncate_if_needed_with_stamp(p2traddressindex, stamp)?;
self.p2aaddressindex_to_p2abytes
.truncate_if_needed_with_stamp(p2aaddressindex, stamp)?;
Ok(())
}
pub fn iter_mut_any(&mut self) -> impl Iterator<Item = &mut dyn AnyStoredVec> {
[
&mut self.height_to_first_p2pk65addressindex as &mut dyn AnyStoredVec,
&mut self.height_to_first_p2pk33addressindex,
&mut self.height_to_first_p2pkhaddressindex,
&mut self.height_to_first_p2shaddressindex,
&mut self.height_to_first_p2wpkhaddressindex,
&mut self.height_to_first_p2wshaddressindex,
&mut self.height_to_first_p2traddressindex,
&mut self.height_to_first_p2aaddressindex,
&mut self.p2pk65addressindex_to_p2pk65bytes,
&mut self.p2pk33addressindex_to_p2pk33bytes,
&mut self.p2pkhaddressindex_to_p2pkhbytes,
&mut self.p2shaddressindex_to_p2shbytes,
&mut self.p2wpkhaddressindex_to_p2wpkhbytes,
&mut self.p2wshaddressindex_to_p2wshbytes,
&mut self.p2traddressindex_to_p2trbytes,
&mut self.p2aaddressindex_to_p2abytes,
]
.into_iter()
}
/// Get address bytes by output type, using the reader for the specific address type.
/// Returns None if the index doesn't exist yet.
pub fn get_bytes_by_type(
&self,
addresstype: OutputType,
typeindex: TypeIndex,
reader: &Reader,
) -> Result<Option<AddressBytes>> {
match addresstype {
OutputType::P2PK65 => self
.p2pk65addressindex_to_p2pk65bytes
.get_pushed_or_read(typeindex.into(), reader)
.map(|opt| opt.map(AddressBytes::from)),
OutputType::P2PK33 => self
.p2pk33addressindex_to_p2pk33bytes
.get_pushed_or_read(typeindex.into(), reader)
.map(|opt| opt.map(AddressBytes::from)),
OutputType::P2PKH => self
.p2pkhaddressindex_to_p2pkhbytes
.get_pushed_or_read(typeindex.into(), reader)
.map(|opt| opt.map(AddressBytes::from)),
OutputType::P2SH => self
.p2shaddressindex_to_p2shbytes
.get_pushed_or_read(typeindex.into(), reader)
.map(|opt| opt.map(AddressBytes::from)),
OutputType::P2WPKH => self
.p2wpkhaddressindex_to_p2wpkhbytes
.get_pushed_or_read(typeindex.into(), reader)
.map(|opt| opt.map(AddressBytes::from)),
OutputType::P2WSH => self
.p2wshaddressindex_to_p2wshbytes
.get_pushed_or_read(typeindex.into(), reader)
.map(|opt| opt.map(AddressBytes::from)),
OutputType::P2TR => self
.p2traddressindex_to_p2trbytes
.get_pushed_or_read(typeindex.into(), reader)
.map(|opt| opt.map(AddressBytes::from)),
OutputType::P2A => self
.p2aaddressindex_to_p2abytes
.get_pushed_or_read(typeindex.into(), reader)
.map(|opt| opt.map(AddressBytes::from)),
_ => unreachable!("get_bytes_by_type called with non-address type"),
}
.map_err(|e| e.into())
}
pub fn push_bytes_if_needed(&mut self, index: TypeIndex, bytes: AddressBytes) -> Result<()> {
match bytes {
AddressBytes::P2PK65(bytes) => self
.p2pk65addressindex_to_p2pk65bytes
.push_if_needed(index.into(), *bytes)?,
AddressBytes::P2PK33(bytes) => self
.p2pk33addressindex_to_p2pk33bytes
.push_if_needed(index.into(), *bytes)?,
AddressBytes::P2PKH(bytes) => self
.p2pkhaddressindex_to_p2pkhbytes
.push_if_needed(index.into(), *bytes)?,
AddressBytes::P2SH(bytes) => self
.p2shaddressindex_to_p2shbytes
.push_if_needed(index.into(), *bytes)?,
AddressBytes::P2WPKH(bytes) => self
.p2wpkhaddressindex_to_p2wpkhbytes
.push_if_needed(index.into(), *bytes)?,
AddressBytes::P2WSH(bytes) => self
.p2wshaddressindex_to_p2wshbytes
.push_if_needed(index.into(), *bytes)?,
AddressBytes::P2TR(bytes) => self
.p2traddressindex_to_p2trbytes
.push_if_needed(index.into(), *bytes)?,
AddressBytes::P2A(bytes) => self
.p2aaddressindex_to_p2abytes
.push_if_needed(index.into(), *bytes)?,
};
Ok(())
}
/// Iterate address hashes starting from a given height (for rollback).
/// Returns an iterator of AddressHash values for all addresses of the given type
/// that were added at or after the given height.
pub fn iter_hashes_from(
&self,
address_type: OutputType,
height: Height,
) -> Result<Box<dyn Iterator<Item = AddressHash> + '_>> {
macro_rules! make_iter {
($height_vec:expr, $bytes_vec:expr) => {{
match $height_vec.read_once(height) {
Ok(mut index) => {
let mut iter = $bytes_vec.iter()?;
Ok(Box::new(std::iter::from_fn(move || {
iter.get(index).map(|typedbytes| {
let bytes = AddressBytes::from(typedbytes);
index.increment();
AddressHash::from(&bytes)
})
}))
as Box<dyn Iterator<Item = AddressHash> + '_>)
}
Err(_) => {
Ok(Box::new(std::iter::empty())
as Box<dyn Iterator<Item = AddressHash> + '_>)
}
}
}};
}
match address_type {
OutputType::P2PK65 => make_iter!(
self.height_to_first_p2pk65addressindex,
self.p2pk65addressindex_to_p2pk65bytes
),
OutputType::P2PK33 => make_iter!(
self.height_to_first_p2pk33addressindex,
self.p2pk33addressindex_to_p2pk33bytes
),
OutputType::P2PKH => make_iter!(
self.height_to_first_p2pkhaddressindex,
self.p2pkhaddressindex_to_p2pkhbytes
),
OutputType::P2SH => make_iter!(
self.height_to_first_p2shaddressindex,
self.p2shaddressindex_to_p2shbytes
),
OutputType::P2WPKH => make_iter!(
self.height_to_first_p2wpkhaddressindex,
self.p2wpkhaddressindex_to_p2wpkhbytes
),
OutputType::P2WSH => make_iter!(
self.height_to_first_p2wshaddressindex,
self.p2wshaddressindex_to_p2wshbytes
),
OutputType::P2TR => make_iter!(
self.height_to_first_p2traddressindex,
self.p2traddressindex_to_p2trbytes
),
OutputType::P2A => make_iter!(
self.height_to_first_p2aaddressindex,
self.p2aaddressindex_to_p2abytes
),
_ => Ok(Box::new(std::iter::empty())),
}
}
}

View File

@@ -0,0 +1,51 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{BlockHash, Height, StoredF64, StoredU64, Timestamp, Version, Weight};
use vecdb::{AnyStoredVec, BytesVec, Database, GenericStoredVec, ImportableVec, PcoVec, Stamp};
#[derive(Clone, Traversable)]
pub struct BlockVecs {
pub height_to_blockhash: BytesVec<Height, BlockHash>,
pub height_to_difficulty: PcoVec<Height, StoredF64>,
/// Doesn't guarantee continuity due to possible reorgs and more generally the nature of mining
pub height_to_timestamp: PcoVec<Height, Timestamp>,
pub height_to_total_size: PcoVec<Height, StoredU64>,
pub height_to_weight: PcoVec<Height, Weight>,
}
impl BlockVecs {
pub fn forced_import(db: &Database, version: Version) -> Result<Self> {
Ok(Self {
height_to_blockhash: BytesVec::forced_import(db, "blockhash", version)?,
height_to_difficulty: PcoVec::forced_import(db, "difficulty", version)?,
height_to_timestamp: PcoVec::forced_import(db, "timestamp", version)?,
height_to_total_size: PcoVec::forced_import(db, "total_size", version)?,
height_to_weight: PcoVec::forced_import(db, "weight", version)?,
})
}
pub fn truncate(&mut self, height: Height, stamp: Stamp) -> Result<()> {
self.height_to_blockhash
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_difficulty
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_timestamp
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_total_size
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_weight
.truncate_if_needed_with_stamp(height, stamp)?;
Ok(())
}
pub fn iter_mut_any(&mut self) -> impl Iterator<Item = &mut dyn AnyStoredVec> {
[
&mut self.height_to_blockhash as &mut dyn AnyStoredVec,
&mut self.height_to_difficulty,
&mut self.height_to_timestamp,
&mut self.height_to_total_size,
&mut self.height_to_weight,
]
.into_iter()
}
}

View File

@@ -0,0 +1,169 @@
use std::path::Path;
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{AddressBytes, AddressHash, Height, OutputType, TypeIndex, Version};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, Database, Reader, Stamp, PAGE_SIZE};
mod address;
mod blocks;
mod output;
mod tx;
mod txin;
mod txout;
pub use address::*;
pub use blocks::*;
pub use output::*;
pub use tx::*;
pub use txin::*;
pub use txout::*;
use crate::Indexes;
#[derive(Clone, Traversable)]
pub struct Vecs {
db: Database,
pub block: BlockVecs,
pub tx: TxVecs,
pub txin: TxinVecs,
pub txout: TxoutVecs,
pub address: AddressVecs,
pub output: OutputVecs,
}
impl Vecs {
pub fn forced_import(parent: &Path, version: Version) -> Result<Self> {
let db = Database::open(&parent.join("vecs"))?;
db.set_min_len(PAGE_SIZE * 50_000_000)?;
let block = BlockVecs::forced_import(&db, version)?;
let tx = TxVecs::forced_import(&db, version)?;
let txin = TxinVecs::forced_import(&db, version)?;
let txout = TxoutVecs::forced_import(&db, version)?;
let address = AddressVecs::forced_import(&db, version)?;
let output = OutputVecs::forced_import(&db, version)?;
let this = Self {
db,
block,
tx,
txin,
txout,
address,
output,
};
this.db.retain_regions(
this.iter_any_exportable()
.flat_map(|v| v.region_names())
.collect(),
)?;
this.db.compact()?;
Ok(this)
}
pub fn rollback_if_needed(&mut self, starting_indexes: &Indexes) -> Result<()> {
let saved_height = starting_indexes.height.decremented().unwrap_or_default();
let stamp = Stamp::from(u64::from(saved_height));
self.block.truncate(starting_indexes.height, stamp)?;
self.tx
.truncate(starting_indexes.height, starting_indexes.txindex, stamp)?;
self.txin
.truncate(starting_indexes.height, starting_indexes.txinindex, stamp)?;
self.txout.truncate(
starting_indexes.height,
starting_indexes.txoutindex,
stamp,
)?;
self.address.truncate(
starting_indexes.height,
starting_indexes.p2pk65addressindex,
starting_indexes.p2pk33addressindex,
starting_indexes.p2pkhaddressindex,
starting_indexes.p2shaddressindex,
starting_indexes.p2wpkhaddressindex,
starting_indexes.p2wshaddressindex,
starting_indexes.p2traddressindex,
starting_indexes.p2aaddressindex,
stamp,
)?;
self.output.truncate(
starting_indexes.height,
starting_indexes.emptyoutputindex,
starting_indexes.opreturnindex,
starting_indexes.p2msoutputindex,
starting_indexes.unknownoutputindex,
stamp,
)?;
Ok(())
}
pub fn get_addressbytes_by_type(
&self,
addresstype: OutputType,
typeindex: TypeIndex,
reader: &Reader,
) -> Result<Option<AddressBytes>> {
self.address.get_bytes_by_type(addresstype, typeindex, reader)
}
pub fn push_bytes_if_needed(&mut self, index: TypeIndex, bytes: AddressBytes) -> Result<()> {
self.address.push_bytes_if_needed(index, bytes)
}
pub fn flush(&mut self, height: Height) -> Result<()> {
self.iter_mut_any_stored_vec()
.par_bridge()
.try_for_each(|vec| vec.stamped_flush(Stamp::from(height)))?;
self.db.flush()?;
Ok(())
}
pub fn starting_height(&mut self) -> Height {
self.iter_mut_any_stored_vec()
.map(|vec| {
let h = Height::from(vec.stamp());
if h > Height::ZERO { h.incremented() } else { h }
})
.min()
.unwrap()
}
pub fn compact(&self) -> Result<()> {
self.db.compact()?;
Ok(())
}
pub fn iter_address_hashes_from(
&self,
address_type: OutputType,
height: Height,
) -> Result<Box<dyn Iterator<Item = AddressHash> + '_>> {
self.address.iter_hashes_from(address_type, height)
}
fn iter_mut_any_stored_vec(&mut self) -> impl Iterator<Item = &mut dyn AnyStoredVec> {
self.block
.iter_mut_any()
.chain(self.tx.iter_mut_any())
.chain(self.txin.iter_mut_any())
.chain(self.txout.iter_mut_any())
.chain(self.address.iter_mut_any())
.chain(self.output.iter_mut_any())
}
pub fn db(&self) -> &Database {
&self.db
}
}

View File

@@ -0,0 +1,93 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{
EmptyOutputIndex, Height, OpReturnIndex, P2MSOutputIndex, TxIndex, UnknownOutputIndex, Version,
};
use vecdb::{AnyStoredVec, Database, GenericStoredVec, ImportableVec, PcoVec, Stamp};
#[derive(Clone, Traversable)]
pub struct OutputVecs {
// Height to first output index (per output type)
pub height_to_first_emptyoutputindex: PcoVec<Height, EmptyOutputIndex>,
pub height_to_first_opreturnindex: PcoVec<Height, OpReturnIndex>,
pub height_to_first_p2msoutputindex: PcoVec<Height, P2MSOutputIndex>,
pub height_to_first_unknownoutputindex: PcoVec<Height, UnknownOutputIndex>,
// Output index to txindex (per output type)
pub emptyoutputindex_to_txindex: PcoVec<EmptyOutputIndex, TxIndex>,
pub opreturnindex_to_txindex: PcoVec<OpReturnIndex, TxIndex>,
pub p2msoutputindex_to_txindex: PcoVec<P2MSOutputIndex, TxIndex>,
pub unknownoutputindex_to_txindex: PcoVec<UnknownOutputIndex, TxIndex>,
}
impl OutputVecs {
pub fn forced_import(db: &Database, version: Version) -> Result<Self> {
Ok(Self {
height_to_first_emptyoutputindex: PcoVec::forced_import(
db,
"first_emptyoutputindex",
version,
)?,
height_to_first_opreturnindex: PcoVec::forced_import(
db,
"first_opreturnindex",
version,
)?,
height_to_first_p2msoutputindex: PcoVec::forced_import(
db,
"first_p2msoutputindex",
version,
)?,
height_to_first_unknownoutputindex: PcoVec::forced_import(
db,
"first_unknownoutputindex",
version,
)?,
emptyoutputindex_to_txindex: PcoVec::forced_import(db, "txindex", version)?,
opreturnindex_to_txindex: PcoVec::forced_import(db, "txindex", version)?,
p2msoutputindex_to_txindex: PcoVec::forced_import(db, "txindex", version)?,
unknownoutputindex_to_txindex: PcoVec::forced_import(db, "txindex", version)?,
})
}
pub fn truncate(
&mut self,
height: Height,
emptyoutputindex: EmptyOutputIndex,
opreturnindex: OpReturnIndex,
p2msoutputindex: P2MSOutputIndex,
unknownoutputindex: UnknownOutputIndex,
stamp: Stamp,
) -> Result<()> {
self.height_to_first_emptyoutputindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_first_opreturnindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_first_p2msoutputindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.height_to_first_unknownoutputindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.emptyoutputindex_to_txindex
.truncate_if_needed_with_stamp(emptyoutputindex, stamp)?;
self.opreturnindex_to_txindex
.truncate_if_needed_with_stamp(opreturnindex, stamp)?;
self.p2msoutputindex_to_txindex
.truncate_if_needed_with_stamp(p2msoutputindex, stamp)?;
self.unknownoutputindex_to_txindex
.truncate_if_needed_with_stamp(unknownoutputindex, stamp)?;
Ok(())
}
pub fn iter_mut_any(&mut self) -> impl Iterator<Item = &mut dyn AnyStoredVec> {
[
&mut self.height_to_first_emptyoutputindex as &mut dyn AnyStoredVec,
&mut self.height_to_first_opreturnindex,
&mut self.height_to_first_p2msoutputindex,
&mut self.height_to_first_unknownoutputindex,
&mut self.emptyoutputindex_to_txindex,
&mut self.opreturnindex_to_txindex,
&mut self.p2msoutputindex_to_txindex,
&mut self.unknownoutputindex_to_txindex,
]
.into_iter()
}
}

View File

@@ -0,0 +1,78 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{
Height, RawLockTime, StoredBool, StoredU32, TxInIndex, TxIndex, TxOutIndex, TxVersion, Txid,
Version,
};
use vecdb::{AnyStoredVec, BytesVec, Database, GenericStoredVec, ImportableVec, PcoVec, Stamp};
#[derive(Clone, Traversable)]
pub struct TxVecs {
pub height_to_first_txindex: PcoVec<Height, TxIndex>,
pub txindex_to_height: PcoVec<TxIndex, Height>,
pub txindex_to_txid: BytesVec<TxIndex, Txid>,
pub txindex_to_txversion: PcoVec<TxIndex, TxVersion>,
pub txindex_to_rawlocktime: PcoVec<TxIndex, RawLockTime>,
pub txindex_to_base_size: PcoVec<TxIndex, StoredU32>,
pub txindex_to_total_size: PcoVec<TxIndex, StoredU32>,
pub txindex_to_is_explicitly_rbf: PcoVec<TxIndex, StoredBool>,
pub txindex_to_first_txinindex: PcoVec<TxIndex, TxInIndex>,
pub txindex_to_first_txoutindex: BytesVec<TxIndex, TxOutIndex>,
}
impl TxVecs {
pub fn forced_import(db: &Database, version: Version) -> Result<Self> {
Ok(Self {
height_to_first_txindex: PcoVec::forced_import(db, "first_txindex", version)?,
txindex_to_height: PcoVec::forced_import(db, "height", version)?,
txindex_to_txid: BytesVec::forced_import(db, "txid", version)?,
txindex_to_txversion: PcoVec::forced_import(db, "txversion", version)?,
txindex_to_rawlocktime: PcoVec::forced_import(db, "rawlocktime", version)?,
txindex_to_base_size: PcoVec::forced_import(db, "base_size", version)?,
txindex_to_total_size: PcoVec::forced_import(db, "total_size", version)?,
txindex_to_is_explicitly_rbf: PcoVec::forced_import(db, "is_explicitly_rbf", version)?,
txindex_to_first_txinindex: PcoVec::forced_import(db, "first_txinindex", version)?,
txindex_to_first_txoutindex: BytesVec::forced_import(db, "first_txoutindex", version)?,
})
}
pub fn truncate(&mut self, height: Height, txindex: TxIndex, stamp: Stamp) -> Result<()> {
self.height_to_first_txindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.txindex_to_height
.truncate_if_needed_with_stamp(txindex, stamp)?;
self.txindex_to_txid
.truncate_if_needed_with_stamp(txindex, stamp)?;
self.txindex_to_txversion
.truncate_if_needed_with_stamp(txindex, stamp)?;
self.txindex_to_rawlocktime
.truncate_if_needed_with_stamp(txindex, stamp)?;
self.txindex_to_base_size
.truncate_if_needed_with_stamp(txindex, stamp)?;
self.txindex_to_total_size
.truncate_if_needed_with_stamp(txindex, stamp)?;
self.txindex_to_is_explicitly_rbf
.truncate_if_needed_with_stamp(txindex, stamp)?;
self.txindex_to_first_txinindex
.truncate_if_needed_with_stamp(txindex, stamp)?;
self.txindex_to_first_txoutindex
.truncate_if_needed_with_stamp(txindex, stamp)?;
Ok(())
}
pub fn iter_mut_any(&mut self) -> impl Iterator<Item = &mut dyn AnyStoredVec> {
[
&mut self.height_to_first_txindex as &mut dyn AnyStoredVec,
&mut self.txindex_to_height,
&mut self.txindex_to_txid,
&mut self.txindex_to_txversion,
&mut self.txindex_to_rawlocktime,
&mut self.txindex_to_base_size,
&mut self.txindex_to_total_size,
&mut self.txindex_to_is_explicitly_rbf,
&mut self.txindex_to_first_txinindex,
&mut self.txindex_to_first_txoutindex,
]
.into_iter()
}
}

View File

@@ -0,0 +1,35 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, OutPoint, TxInIndex, Version};
use vecdb::{AnyStoredVec, Database, GenericStoredVec, ImportableVec, PcoVec, Stamp};
#[derive(Clone, Traversable)]
pub struct TxinVecs {
pub height_to_first_txinindex: PcoVec<Height, TxInIndex>,
pub txinindex_to_outpoint: PcoVec<TxInIndex, OutPoint>,
}
impl TxinVecs {
pub fn forced_import(db: &Database, version: Version) -> Result<Self> {
Ok(Self {
height_to_first_txinindex: PcoVec::forced_import(db, "first_txinindex", version)?,
txinindex_to_outpoint: PcoVec::forced_import(db, "outpoint", version)?,
})
}
pub fn truncate(&mut self, height: Height, txinindex: TxInIndex, stamp: Stamp) -> Result<()> {
self.height_to_first_txinindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.txinindex_to_outpoint
.truncate_if_needed_with_stamp(txinindex, stamp)?;
Ok(())
}
pub fn iter_mut_any(&mut self) -> impl Iterator<Item = &mut dyn AnyStoredVec> {
[
&mut self.height_to_first_txinindex as &mut dyn AnyStoredVec,
&mut self.txinindex_to_outpoint,
]
.into_iter()
}
}

View File

@@ -0,0 +1,50 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, OutputType, Sats, TxIndex, TxOutIndex, TypeIndex, Version};
use vecdb::{AnyStoredVec, BytesVec, Database, GenericStoredVec, ImportableVec, PcoVec, Stamp};
#[derive(Clone, Traversable)]
pub struct TxoutVecs {
pub height_to_first_txoutindex: PcoVec<Height, TxOutIndex>,
pub txoutindex_to_value: BytesVec<TxOutIndex, Sats>,
pub txoutindex_to_outputtype: BytesVec<TxOutIndex, OutputType>,
pub txoutindex_to_typeindex: BytesVec<TxOutIndex, TypeIndex>,
pub txoutindex_to_txindex: PcoVec<TxOutIndex, TxIndex>,
}
impl TxoutVecs {
pub fn forced_import(db: &Database, version: Version) -> Result<Self> {
Ok(Self {
height_to_first_txoutindex: PcoVec::forced_import(db, "first_txoutindex", version)?,
txoutindex_to_value: BytesVec::forced_import(db, "value", version)?,
txoutindex_to_outputtype: BytesVec::forced_import(db, "outputtype", version)?,
txoutindex_to_typeindex: BytesVec::forced_import(db, "typeindex", version)?,
txoutindex_to_txindex: PcoVec::forced_import(db, "txindex", version)?,
})
}
pub fn truncate(&mut self, height: Height, txoutindex: TxOutIndex, stamp: Stamp) -> Result<()> {
self.height_to_first_txoutindex
.truncate_if_needed_with_stamp(height, stamp)?;
self.txoutindex_to_value
.truncate_if_needed_with_stamp(txoutindex, stamp)?;
self.txoutindex_to_outputtype
.truncate_if_needed_with_stamp(txoutindex, stamp)?;
self.txoutindex_to_typeindex
.truncate_if_needed_with_stamp(txoutindex, stamp)?;
self.txoutindex_to_txindex
.truncate_if_needed_with_stamp(txoutindex, stamp)?;
Ok(())
}
pub fn iter_mut_any(&mut self) -> impl Iterator<Item = &mut dyn AnyStoredVec> {
[
&mut self.height_to_first_txoutindex as &mut dyn AnyStoredVec,
&mut self.txoutindex_to_value,
&mut self.txoutindex_to_outputtype,
&mut self.txoutindex_to_typeindex,
&mut self.txoutindex_to_txindex,
]
.into_iter()
}
}

View File

@@ -9,7 +9,6 @@ use brk_types::{
Address, AddressStats, Height, Index, IndexInfo, Limit, Metric, MetricCount, Transaction,
TreeNode, TxidPath,
};
#[cfg(feature = "tokio")]
use tokio::task::spawn_blocking;
use crate::{
@@ -18,7 +17,6 @@ use crate::{
};
#[derive(Clone)]
#[cfg(feature = "tokio")]
pub struct AsyncQuery(Query);
impl AsyncQuery {

View File

@@ -7,8 +7,10 @@ use std::{
use bitcoin::consensus::Decodable;
use brk_error::{Error, Result};
use brk_reader::XORIndex;
use brk_types::{Transaction, Txid, TxidPath, TxidPrefix};
use vecdb::TypedVecIterator;
use brk_types::{
Sats, Transaction, TxIn, TxIndex, TxOut, TxStatus, Txid, TxidPath, TxidPrefix, Vout, Weight,
};
use vecdb::{GenericStoredVec, TypedVecIterator};
use crate::Query;
@@ -20,7 +22,7 @@ pub fn get_transaction(TxidPath { txid }: TxidPath, query: &Query) -> Result<Tra
let txid = Txid::from(txid);
let prefix = TxidPrefix::from(&txid);
let indexer = query.indexer();
let Ok(Some(index)) = indexer
let Ok(Some(txindex)) = indexer
.stores
.txidprefix_to_txindex
.get(&prefix)
@@ -29,16 +31,29 @@ pub fn get_transaction(TxidPath { txid }: TxidPath, query: &Query) -> Result<Tra
return Err(Error::UnknownTxid);
};
let txid = indexer.vecs.txindex_to_txid.iter()?.get_unwrap(index);
get_transaction_by_index(txindex, query)
}
pub fn get_transaction_by_index(txindex: TxIndex, query: &Query) -> Result<Transaction> {
let indexer = query.indexer();
let reader = query.reader();
let computer = query.computer();
let position = computer.blks.txindex_to_position.iter()?.get_unwrap(index);
let len = indexer.vecs.txindex_to_total_size.iter()?.get_unwrap(index);
// Get tx metadata using read_once for single lookups
let txid = indexer.vecs.tx.txindex_to_txid.read_once(txindex)?;
let height = indexer.vecs.tx.txindex_to_height.read_once(txindex)?;
let version = indexer.vecs.tx.txindex_to_txversion.read_once(txindex)?;
let lock_time = indexer.vecs.tx.txindex_to_rawlocktime.read_once(txindex)?;
let total_size = indexer.vecs.tx.txindex_to_total_size.read_once(txindex)?;
let first_txinindex = indexer.vecs.tx.txindex_to_first_txinindex.read_once(txindex)?;
let position = computer.blks.txindex_to_position.read_once(txindex)?;
// Get block info for status
let block_hash = indexer.vecs.block.height_to_blockhash.read_once(height)?;
let block_time = indexer.vecs.block.height_to_timestamp.read_once(height)?;
// Read and decode the raw transaction from blk file
let blk_index_to_blk_path = reader.blk_index_to_blk_path();
let Some(blk_path) = blk_index_to_blk_path.get(&position.blk_index()) else {
return Err(Error::Str("Failed to get the correct blk file"));
};
@@ -57,22 +72,105 @@ pub fn get_transaction(TxidPath { txid }: TxidPath, query: &Query) -> Result<Tra
return Err(Error::Str("Failed to seek position in file"));
}
let mut buffer = vec![0u8; *len as usize];
let mut buffer = vec![0u8; *total_size as usize];
if file.read_exact(&mut buffer).is_err() {
return Err(Error::Str("Failed to read the transaction (read exact)"));
}
xori.bytes(&mut buffer, reader.xor_bytes());
let mut reader = Cursor::new(buffer);
let Ok(tx) = bitcoin::Transaction::consensus_decode(&mut reader) else {
let mut cursor = Cursor::new(buffer);
let Ok(tx) = bitcoin::Transaction::consensus_decode(&mut cursor) else {
return Err(Error::Str("Failed decode the transaction"));
};
todo!();
// For iterating through inputs, we need iterators (multiple lookups)
let mut txindex_to_txid_iter = indexer.vecs.tx.txindex_to_txid.iter()?;
let mut txindex_to_first_txoutindex_iter = indexer.vecs.tx.txindex_to_first_txoutindex.iter()?;
let mut txinindex_to_outpoint_iter = indexer.vecs.txin.txinindex_to_outpoint.iter()?;
let mut txoutindex_to_value_iter = indexer.vecs.txout.txoutindex_to_value.iter()?;
// Ok(TxInfo {
// txid,
// index,
// // tx
// })
// Build inputs with prevout information
let input: Vec<TxIn> = tx
.input
.iter()
.enumerate()
.map(|(i, txin)| {
let txinindex = first_txinindex + i;
let outpoint = txinindex_to_outpoint_iter.get_unwrap(txinindex);
let is_coinbase = outpoint.is_coinbase();
// Get prevout info if not coinbase
let (prev_txid, prev_vout, prevout) = if is_coinbase {
(Txid::COINBASE, Vout::MAX, None)
} else {
let prev_txindex = outpoint.txindex();
let prev_vout = outpoint.vout();
let prev_txid = txindex_to_txid_iter.get_unwrap(prev_txindex);
// Calculate the txoutindex for the prevout
let prev_first_txoutindex =
txindex_to_first_txoutindex_iter.get_unwrap(prev_txindex);
let prev_txoutindex = prev_first_txoutindex + prev_vout;
// Get the value of the prevout
let prev_value = txoutindex_to_value_iter.get_unwrap(prev_txoutindex);
// We don't have the script_pubkey stored directly, so we need to reconstruct
// For now, we'll get it from the decoded transaction's witness/scriptsig
// which can reveal the prevout script type, but the actual script needs
// to be fetched from the spending tx or reconstructed from address bytes
let prevout = Some(TxOut::from((
bitcoin::ScriptBuf::new(), // Placeholder - would need to reconstruct
prev_value,
)));
(prev_txid, prev_vout, prevout)
};
TxIn {
txid: prev_txid,
vout: prev_vout,
prevout,
script_sig: txin.script_sig.clone(),
script_sig_asm: (),
is_coinbase,
sequence: txin.sequence.0,
inner_redeem_script_asm: (),
}
})
.collect();
// Calculate weight before consuming tx.output
let weight = Weight::from(tx.weight());
// Build outputs
let output: Vec<TxOut> = tx.output.into_iter().map(TxOut::from).collect();
// Build status
let status = TxStatus {
confirmed: true,
block_height: Some(height),
block_hash: Some(block_hash),
block_time: Some(block_time),
};
let mut transaction = Transaction {
index: Some(txindex),
txid,
version,
lock_time,
total_size: *total_size as usize,
weight,
total_sigop_cost: 0, // Would need to calculate from scripts
fee: Sats::ZERO, // Will be computed below
input,
output,
status,
};
// Compute fee from inputs - outputs
transaction.compute_fee();
Ok(transaction)
}

View File

@@ -14,6 +14,7 @@ use brk_types::{
};
use vecdb::{AnyExportableVec, AnyStoredVec};
#[cfg(feature = "tokio")]
mod r#async;
mod chain;
mod deser;
@@ -22,6 +23,7 @@ mod pagination;
mod params;
mod vecs;
#[cfg(feature = "tokio")]
pub use r#async::*;
pub use output::{Output, Value};
pub use pagination::{PaginatedIndexParam, PaginatedMetrics, PaginationParam};
@@ -65,7 +67,7 @@ impl Query {
}
pub fn get_height(&self) -> Height {
Height::from(self.indexer().vecs.height_to_blockhash.stamp())
Height::from(self.indexer().vecs.block.height_to_blockhash.stamp())
}
pub fn get_address(&self, address: Address) -> Result<AddressStats> {

View File

@@ -16,6 +16,11 @@ use vecdb::{Bytes, Formattable};
#[repr(C)]
pub struct Txid([u8; 32]);
impl Txid {
/// Coinbase transaction "txid" - all zeros (used for coinbase inputs)
pub const COINBASE: Self = Self([0u8; 32]);
}
impl From<bitcoin::Txid> for Txid {
#[inline]
fn from(value: bitcoin::Txid) -> Self {