global: big snapshot

This commit is contained in:
nym21
2026-01-09 20:00:20 +01:00
parent cb0abc324e
commit 426d7797a3
442 changed files with 17952 additions and 20071 deletions

2
.gitignore vendored
View File

@@ -14,7 +14,9 @@ bridge/
_*
!__*.py
/*.md
/*.py
/api.json
/*.json
# Logs
*.log*

2080
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -43,7 +43,6 @@ bitcoincore-rpc = "0.19.0"
brk_alloc = { version = "0.1.0-alpha.2", path = "crates/brk_alloc" }
brk_bencher = { version = "0.1.0-alpha.2", path = "crates/brk_bencher" }
brk_bindgen = { version = "0.1.0-alpha.2", path = "crates/brk_bindgen" }
brk_bundler = { version = "0.1.0-alpha.2", path = "crates/brk_bundler" }
brk_cli = { version = "0.1.0-alpha.2", path = "crates/brk_cli" }
brk_client = { version = "0.1.0-alpha.2", path = "crates/brk_client" }
brk_cohort = { version = "0.1.0-alpha.2", path = "crates/brk_cohort" }
@@ -51,25 +50,23 @@ brk_computer = { version = "0.1.0-alpha.2", path = "crates/brk_computer" }
brk_error = { version = "0.1.0-alpha.2", path = "crates/brk_error" }
brk_fetcher = { version = "0.1.0-alpha.2", path = "crates/brk_fetcher" }
brk_indexer = { version = "0.1.0-alpha.2", path = "crates/brk_indexer" }
brk_query = { version = "0.1.0-alpha.2", path = "crates/brk_query", features = ["tokio"] }
brk_iterator = { version = "0.1.0-alpha.2", path = "crates/brk_iterator" }
brk_logger = { version = "0.1.0-alpha.2", path = "crates/brk_logger" }
brk_mcp = { version = "0.1.0-alpha.2", path = "crates/brk_mcp" }
brk_mempool = { version = "0.1.0-alpha.2", path = "crates/brk_mempool" }
brk_query = { version = "0.1.0-alpha.2", path = "crates/brk_query", features = ["tokio"] }
brk_reader = { version = "0.1.0-alpha.2", path = "crates/brk_reader" }
brk_rpc = { version = "0.1.0-alpha.2", path = "crates/brk_rpc" }
brk_server = { version = "0.1.0-alpha.2", path = "crates/brk_server" }
brk_store = { version = "0.1.0-alpha.2", path = "crates/brk_store" }
brk_types = { version = "0.1.0-alpha.2", path = "crates/brk_types" }
brk_traversable = { version = "0.1.0-alpha.2", path = "crates/brk_traversable", features = ["pco", "derive"] }
brk_traversable_derive = { version = "0.1.0-alpha.2", path = "crates/brk_traversable_derive" }
brk_types = { version = "0.1.0-alpha.2", path = "crates/brk_types" }
byteview = "0.10.0"
color-eyre = "0.6.5"
derive_more = { version = "2.1.1", features = ["deref", "deref_mut"] }
env_logger = "0.11.8"
fjall = "3.0.0"
jiff = "0.2.17"
log = "0.4.29"
fjall = "3.0.1"
jiff = "0.2.18"
minreq = { version = "2.14.1", features = ["https", "serde_json"] }
parking_lot = "0.12.5"
rayon = "1.11.0"
@@ -78,10 +75,11 @@ schemars = "1.2.0"
serde = "1.0.228"
serde_bytes = "0.11.19"
serde_derive = "1.0.228"
serde_json = { version = "1.0.148", features = ["float_roundtrip"] }
serde_json = { version = "1.0.149", features = ["float_roundtrip"] }
smallvec = "1.15.1"
tokio = { version = "1.49.0", features = ["rt-multi-thread"] }
# vecdb = { version = "0.5.4", features = ["derive", "serde_json", "pco", "schemars"] }
tracing = { version = "0.1", default-features = false, features = ["std"] }
# vecdb = { version = "0.5.6", features = ["derive", "serde_json", "pco", "schemars"] }
vecdb = { path = "../anydb/crates/vecdb", features = ["derive", "serde_json", "pco", "schemars"] }
# vecdb = { git = "https://github.com/anydb-rs/anydb", features = ["derive", "serde_json", "pco"] }

View File

@@ -12,7 +12,6 @@ build = "build.rs"
full = [
"bencher",
"binder",
"bundler",
"client",
"computer",
"error",
@@ -33,7 +32,6 @@ full = [
]
bencher = ["brk_bencher"]
binder = ["brk_bindgen"]
bundler = ["brk_bundler"]
client = ["brk_client"]
computer = ["brk_computer"]
error = ["brk_error"]
@@ -55,7 +53,6 @@ types = ["brk_types"]
[dependencies]
brk_bencher = { workspace = true, optional = true }
brk_bindgen = { workspace = true, optional = true }
brk_bundler = { workspace = true, optional = true }
brk_client = { workspace = true, optional = true }
brk_computer = { workspace = true, optional = true }
brk_error = { workspace = true, optional = true }

View File

@@ -24,7 +24,6 @@ use brk::types::Height;
|---------|-------|-------------|
| `bencher` | `brk_bencher` | Benchmarking utilities |
| `binder` | `brk_binder` | Client code generation |
| `bundler` | `brk_bundler` | JS bundling |
| `client` | `brk_client` | Generated Rust API client |
| `computer` | `brk_computer` | Metric computation |
| `error` | `brk_error` | Error types |

View File

@@ -8,10 +8,6 @@ pub use brk_bencher as bencher;
#[doc(inline)]
pub use brk_bindgen as binder;
#[cfg(feature = "bundler")]
#[doc(inline)]
pub use brk_bundler as bundler;
#[cfg(feature = "client")]
#[doc(inline)]
pub use brk_client as client;

View File

@@ -9,5 +9,5 @@ repository.workspace = true
[dependencies]
libmimalloc-sys = { version = "0.1.44", features = ["extended"] }
log = { workspace = true }
tracing = { workspace = true }
mimalloc = { version = "0.1.48", features = ["v3"] }

View File

@@ -199,7 +199,7 @@ fn resolve_branch_patterns(
for (child_name, child_node) in children {
let (rust_type, json_type, indexes, child_fields) = match child_node {
TreeNode::Leaf(leaf) => (
leaf.value_type().to_string(),
leaf.kind().to_string(),
schema_to_json_type(&leaf.schema),
leaf.indexes().clone(),
Vec::new(),

View File

@@ -35,7 +35,7 @@ pub fn get_node_fields(
.map(|(name, node)| {
let (rust_type, json_type, indexes) = match node {
TreeNode::Leaf(leaf) => (
leaf.value_type().to_string(),
leaf.kind().to_string(),
schema_to_json_type(&leaf.schema),
leaf.indexes().clone(),
),
@@ -228,7 +228,7 @@ pub fn get_fields_with_child_info(
.map(|(name, node)| {
let (rust_type, json_type, indexes, child_fields) = match node {
TreeNode::Leaf(leaf) => (
leaf.value_type().to_string(),
leaf.kind().to_string(),
schema_to_json_type(&leaf.schema),
leaf.indexes().clone(),
None,

View File

@@ -107,7 +107,11 @@ pub fn generate_main_client(
let pattern_lookup = metadata.pattern_lookup();
writeln!(output, "/**").unwrap();
writeln!(output, " * Main BRK client with catalog tree and API methods").unwrap();
writeln!(
output,
" * Main BRK client with catalog tree and API methods"
)
.unwrap();
writeln!(output, " * @extends BrkClientBase").unwrap();
writeln!(output, " */").unwrap();
writeln!(output, "class BrkClient extends BrkClientBase {{").unwrap();
@@ -136,53 +140,6 @@ pub fn generate_main_client(
generate_api_methods(output, endpoints);
// Instance method: mergeMetricPatterns
writeln!(output, r#"
/**
* Merge multiple MetricPatterns into a single pattern.
* Throws if any two patterns have overlapping indexes.
* @template T
* @param {{...MetricPattern<T>}} patterns - The patterns to merge
* @returns {{MetricPattern<T>}} A new merged pattern
*/
mergeMetricPatterns(...patterns) {{
if (patterns.length === 0) {{
throw new BrkError('mergeMetricPatterns requires at least one pattern');
}}
if (patterns.length === 1) {{
return patterns[0];
}}
const seenIndexes = /** @type {{Map<Index, string>}} */ (new Map());
const mergedBy = /** @type {{Partial<Record<Index, MetricEndpoint<T>>>}} */ ({{}});
for (const pattern of patterns) {{
for (const index of pattern.indexes()) {{
const existing = seenIndexes.get(index);
if (existing !== undefined) {{
throw new BrkError(`Index '${{index}}' exists in both '${{existing}}' and '${{pattern.name}}'`);
}}
seenIndexes.set(index, pattern.name);
Object.defineProperty(mergedBy, index, {{
get() {{ return pattern.get(index); }},
enumerable: true,
configurable: true,
}});
}}
}}
const allIndexes = /** @type {{Index[]}} */ ([...seenIndexes.keys()]);
const firstName = patterns[0].name;
return {{
name: firstName,
by: mergedBy,
indexes() {{ return allIndexes; }},
get(index) {{ return mergedBy[index]; }},
}};
}}
"#).unwrap();
writeln!(output, "}}\n").unwrap();
writeln!(output, "export {{ BrkClient, BrkError }};").unwrap();
@@ -216,7 +173,11 @@ fn generate_tree_initializer(
writeln!(
output,
"{}{}: create{}(this, '{}'){}",
indent_str, field_name, accessor.name, leaf.name(), comma
indent_str,
field_name,
accessor.name,
leaf.name(),
comma
)
.unwrap();
}

View File

@@ -123,17 +123,24 @@ fn endpoint_to_method_name(endpoint: &Endpoint) -> String {
fn build_method_params(endpoint: &Endpoint) -> String {
let mut params = Vec::new();
// Path params are always required
for param in &endpoint.path_params {
let safe_name = escape_python_keyword(&param.name);
let py_type = js_type_to_python(&param.param_type);
params.push(format!(", {}: {}", safe_name, py_type));
}
// Required query params must come before optional ones (Python syntax requirement)
for param in &endpoint.query_params {
if param.required {
let safe_name = escape_python_keyword(&param.name);
let py_type = js_type_to_python(&param.param_type);
if param.required {
params.push(format!(", {}: {}", safe_name, py_type));
} else {
}
}
for param in &endpoint.query_params {
if !param.required {
let safe_name = escape_python_keyword(&param.name);
let py_type = js_type_to_python(&param.param_type);
params.push(format!(", {}: Optional[{}] = None", safe_name, py_type));
}
}

View File

@@ -142,7 +142,7 @@ fn collect_leaf_type_schemas(node: &TreeNode, schemas: &mut TypeSchemas) {
collect_schema_definitions(&leaf.schema, schemas);
// Get the type name for this leaf
let type_name = extract_inner_type(leaf.value_type());
let type_name = extract_inner_type(leaf.kind());
if let Entry::Vacant(e) = schemas.entry(type_name) {
// Unwrap single-element allOf

View File

@@ -1,20 +0,0 @@
[package]
name = "brk_bundler"
description = "A thin wrapper around rolldown"
version.workspace = true
edition.workspace = true
license.workspace = true
homepage.workspace = true
repository.workspace = true
build = "build.rs"
[dependencies]
log = { workspace = true }
notify = "8.2.0"
# rolldown = { path = "../../../rolldown/crates/rolldown", package = "brk_rolldown" }
rolldown = { version = "0.7.0", package = "brk_rolldown" }
sugar_path = "1.2.1"
tokio = { workspace = true }
[dev-dependencies]
env_logger = { workspace = true }

View File

@@ -1,32 +0,0 @@
# brk_bundler
JavaScript bundling with watch mode for BRK web interfaces.
## What It Enables
Bundle and minify JavaScript modules using Rolldown, with file watching for development. Handles module copying, source map generation, and cache-busting via hashed filenames.
## Key Features
- **Rolldown integration**: Fast Rust-based bundler with tree-shaking and minification
- **Watch mode**: Rebuilds on file changes with live module syncing
- **Source maps**: Full debugging support in production builds
- **Cache busting**: Hashes main bundle filename, updates HTML references automatically
- **Service worker versioning**: Injects package version into service worker files
## Core API
```rust,ignore
// One-shot build
let dist = bundle(modules_path, websites_path, "src", false).await?;
// Watch mode for development
bundle(modules_path, websites_path, "src", true).await?;
```
## Build Pipeline
1. Copy shared modules to source scripts directory
2. Bundle with Rolldown (minified, with source maps)
3. Update `index.html` with hashed script references
4. Inject version into service worker

View File

@@ -1,8 +0,0 @@
fn main() {
let profile = std::env::var("PROFILE").unwrap_or_default();
if profile == "release" {
println!("cargo:rustc-flag=-C");
println!("cargo:rustc-flag=target-cpu=native");
}
}

View File

@@ -1,37 +0,0 @@
use std::{io, path::PathBuf, thread, time::Duration};
use brk_bundler::bundle;
fn find_dev_dirs() -> Option<(PathBuf, PathBuf)> {
let mut dir = std::env::current_dir().ok()?;
loop {
let websites = dir.join("websites");
let modules = dir.join("modules");
if websites.exists() && modules.exists() {
return Some((websites, modules));
}
// Stop at workspace root (crates/ indicates we're there)
if dir.join("crates").exists() {
return None;
}
dir = dir.parent()?.to_path_buf();
}
}
#[tokio::main]
async fn main() -> io::Result<()> {
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("debug")).init();
let (websites_path, modules_path) =
find_dev_dirs().expect("Run from within the brk workspace");
let source_folder = "bitview";
let dist_path = bundle(&modules_path, &websites_path, source_folder, true).await?;
println!("Bundle created at: {}", dist_path.display());
println!("Watching for changes... (Ctrl+C to stop)");
loop {
thread::sleep(Duration::from_secs(60));
}
}

View File

@@ -1,246 +0,0 @@
#![doc = include_str!("../README.md")]
use std::{
fs, io,
path::{Path, PathBuf},
time::Duration,
};
use log::{debug, error, info};
use notify::{EventKind, PollWatcher, RecursiveMode, Watcher};
use rolldown::{
Bundler, BundlerConfig, BundlerOptions, InlineConstConfig, InlineConstMode, InlineConstOption,
OptimizationOption, RawMinifyOptions, SourceMapType,
};
use sugar_path::SugarPath;
const VERSION: &str = env!("CARGO_PKG_VERSION");
pub async fn bundle(
modules_path: &Path,
websites_path: &Path,
source_folder: &str,
watch: bool,
) -> io::Result<PathBuf> {
let relative_modules_path = modules_path;
let relative_source_path = websites_path.join(source_folder);
let relative_dist_path = websites_path.join("dist");
let absolute_modules_path = relative_modules_path.absolutize();
let absolute_modules_path_clone = absolute_modules_path.clone();
let absolute_websites_path = websites_path.absolutize();
let absolute_websites_path_clone = absolute_websites_path.clone();
let absolute_source_path = relative_source_path.absolutize();
let absolute_source_index_path = absolute_source_path.join("index.html");
let absolute_source_index_path_clone = absolute_source_index_path.clone();
let absolute_source_scripts_path = absolute_source_path.join("scripts");
let absolute_source_scripts_modules_path = absolute_source_scripts_path.join("modules");
let absolute_source_sw_path = absolute_source_path.join("service-worker.js");
let absolute_source_sw_path_clone = absolute_source_sw_path.clone();
let absolute_dist_path = relative_dist_path.absolutize();
let absolute_dist_scripts_path = absolute_dist_path.join("scripts");
let absolute_dist_scripts_entry_path = absolute_dist_scripts_path.join("entry.js");
let absolute_dist_scripts_entry_path_clone = absolute_dist_scripts_entry_path.clone();
let absolute_dist_index_path = absolute_dist_path.join("index.html");
let absolute_dist_sw_path = absolute_dist_path.join("service-worker.js");
info!("Bundling {source_folder}...");
info!(" modules: {absolute_modules_path:?}");
info!(" source: {absolute_source_path:?}");
info!(" dist: {absolute_dist_path:?}");
let _ = fs::remove_dir_all(&absolute_dist_path);
let _ = fs::remove_dir_all(&absolute_source_scripts_modules_path);
copy_dir_all(
&absolute_modules_path,
&absolute_source_scripts_modules_path,
)?;
copy_dir_all(&absolute_source_path, &absolute_dist_path)?;
fs::remove_dir_all(&absolute_dist_scripts_path)?;
fs::create_dir(&absolute_dist_scripts_path)?;
// dbg!(BundlerOptions::default());
let bundler_options = BundlerOptions {
input: Some(vec![format!("./{source_folder}/scripts/entry.js").into()]),
dir: Some("./dist/scripts".to_string()),
cwd: Some(absolute_websites_path),
minify: Some(RawMinifyOptions::Bool(true)),
sourcemap: Some(SourceMapType::File),
// advanced_chunks: Some(AdvancedChunksOptions {
// // min_size: Some(1000.0),
// min_share_count: Some(20),
// // min_module_size: S
// // include_dependencies_recursively: Some(true),
// ..Default::default()
// }),
//
// inline_dynamic_imports
// experimental: Some(ExperimentalOptions {
// strict_execution_order: Some(true),
// ..Default::default()
// }),
optimization: Some(OptimizationOption {
inline_const: Some(InlineConstOption::Config(InlineConstConfig {
mode: Some(InlineConstMode::All),
..Default::default()
})),
// Needs benchmarks
// pife_for_module_wrappers: Some(true),
..Default::default()
}),
..Default::default()
};
let mut bundler = Bundler::new(bundler_options.clone()).unwrap();
if let Err(error) = bundler.write().await {
error!("{error:?}");
}
let update_dist_index = move || {
let mut contents = fs::read_to_string(&absolute_source_index_path).unwrap();
if let Ok(entry) = fs::read_to_string(&absolute_dist_scripts_entry_path_clone)
&& let Some(start) = entry.find("main")
&& let Some(end) = entry.find(".js")
{
let main_hashed = &entry[start..end];
contents = contents.replace("/scripts/main.js", &format!("/scripts/{main_hashed}.js"));
}
let _ = fs::write(&absolute_dist_index_path, contents);
};
let update_source_sw = move || {
let contents = fs::read_to_string(&absolute_source_sw_path)
.unwrap()
.replace("__VERSION__", &format!("v{VERSION}"));
let _ = fs::write(&absolute_dist_sw_path, contents);
};
update_dist_index();
update_source_sw();
if !watch {
return Ok(relative_dist_path);
}
// Clone paths for the second watcher
let absolute_websites_path_clone2 = absolute_websites_path_clone.clone();
let absolute_modules_path_clone2 = absolute_modules_path_clone.clone();
tokio::spawn(async move {
let handle_event = {
let absolute_dist_scripts_entry_path = absolute_dist_scripts_entry_path.clone();
let absolute_source_index_path_clone = absolute_source_index_path_clone.clone();
let absolute_source_sw_path_clone = absolute_source_sw_path_clone.clone();
let absolute_modules_path = absolute_modules_path.clone();
let absolute_source_scripts_modules_path = absolute_source_scripts_modules_path.clone();
let absolute_source_path = absolute_source_path.clone();
let absolute_source_scripts_path = absolute_source_scripts_path.clone();
let absolute_dist_path = absolute_dist_path.clone();
let update_dist_index = update_dist_index.clone();
let update_source_sw = update_source_sw.clone();
move |path: PathBuf| {
let path = path.absolutize();
if path == absolute_dist_scripts_entry_path
|| path == absolute_source_index_path_clone
{
update_dist_index();
} else if path == absolute_source_sw_path_clone {
update_source_sw();
} else if let Ok(suffix) = path.strip_prefix(&absolute_modules_path) {
let dest = absolute_source_scripts_modules_path.join(suffix);
if path.is_file() {
debug!("Copying module: {path:?} -> {dest:?}");
let _ = fs::create_dir_all(dest.parent().unwrap());
if let Err(e) = fs::copy(&path, &dest) {
error!("Copy failed: {e}");
}
}
} else if let Ok(suffix) = path.strip_prefix(&absolute_source_path)
// scripts are handled by rolldown
&& !path.starts_with(&absolute_source_scripts_path)
{
let dist_path = absolute_dist_path.join(suffix);
if path.is_file() {
let _ = fs::create_dir_all(path.parent().unwrap());
let _ = fs::copy(&path, &dist_path);
}
}
}
};
// FSEvents watcher for instant response to manual saves
let handle_event_clone = handle_event.clone();
let mut fs_watcher = notify::recommended_watcher(
move |res: Result<notify::Event, notify::Error>| match res {
Ok(event) => match event.kind {
EventKind::Create(_) | EventKind::Modify(_) => {
event.paths.into_iter().for_each(&handle_event_clone);
}
_ => {}
},
Err(e) => error!("fs watch error: {e:?}"),
},
)
.unwrap();
fs_watcher
.watch(&absolute_websites_path_clone, RecursiveMode::Recursive)
.unwrap();
fs_watcher
.watch(&absolute_modules_path_clone, RecursiveMode::Recursive)
.unwrap();
// Poll watcher to catch programmatic edits (e.g., Claude Code's atomic writes)
let poll_config = notify::Config::default()
.with_poll_interval(Duration::from_secs(1));
let mut poll_watcher = PollWatcher::new(
move |res: Result<notify::Event, notify::Error>| match res {
Ok(event) => match event.kind {
EventKind::Create(_) | EventKind::Modify(_) => {
event.paths.into_iter().for_each(&handle_event);
}
_ => {}
},
Err(e) => error!("poll watch error: {e:?}"),
},
poll_config,
)
.unwrap();
poll_watcher
.watch(&absolute_websites_path_clone2, RecursiveMode::Recursive)
.unwrap();
poll_watcher
.watch(&absolute_modules_path_clone2, RecursiveMode::Recursive)
.unwrap();
let config = BundlerConfig::new(bundler_options, vec![]);
let watcher = rolldown::Watcher::new(config, None).unwrap();
watcher.start().await;
});
Ok(relative_dist_path)
}
fn copy_dir_all(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> io::Result<()> {
fs::create_dir_all(&dst)?;
for entry in fs::read_dir(src)? {
let entry = entry?;
let ty = entry.file_type()?;
if ty.is_dir() {
copy_dir_all(entry.path(), dst.as_ref().join(entry.file_name()))?;
} else {
fs::copy(entry.path(), dst.as_ref().join(entry.file_name()))?;
}
}
Ok(())
}

View File

@@ -9,8 +9,8 @@ repository.workspace = true
build = "build.rs"
[dependencies]
brk_alloc = { workspace = true }
brk_bindgen = { workspace = true }
brk_bundler = { workspace = true }
brk_computer = { workspace = true }
brk_error = { workspace = true }
brk_fetcher = { workspace = true }
@@ -24,8 +24,9 @@ brk_rpc = { workspace = true }
brk_server = { workspace = true }
clap = { version = "4.5.54", features = ["derive", "string"] }
color-eyre = { workspace = true }
brk_alloc = { workspace = true }
log = { workspace = true }
importmap = "0.1.1"
# importmap = { path = "../../../importmap" }
tracing = { workspace = true }
minreq = { workspace = true }
serde = { workspace = true }
tokio = { workspace = true }

View File

@@ -57,4 +57,3 @@ Full benchmark data: [`https://github.com/bitcoinresearchkit/benches/tree/main/b
- `brk_computer` for metric computation
- `brk_mempool` for mempool monitoring
- `brk_server` for HTTP API
- `brk_bundler` for web interface bundling

View File

@@ -9,7 +9,6 @@ use std::{
};
use brk_alloc::Mimalloc;
use brk_bundler::bundle;
use brk_computer::Computer;
use brk_error::Result;
use brk_indexer::Indexer;
@@ -18,7 +17,7 @@ use brk_mempool::Mempool;
use brk_query::AsyncQuery;
use brk_reader::Reader;
use brk_server::{Server, VERSION};
use log::info;
use tracing::info;
use vecdb::Exit;
mod config;
@@ -100,17 +99,11 @@ pub fn run() -> color_eyre::Result<()> {
}
};
let websites_path;
let modules_path;
if let Some((websites, modules)) = find_dev_dirs() {
websites_path = websites;
modules_path = modules;
let websites_path = if let Some((websites, _modules)) = find_dev_dirs() {
websites
} else {
let downloaded_brk_path = downloads_path.join(format!("brk-{VERSION}"));
let downloaded_websites_path = downloaded_brk_path.join("websites");
let downloaded_modules_path = downloaded_brk_path.join("modules");
if !fs::exists(&downloaded_websites_path)? {
info!("Downloading source from Github...");
@@ -128,15 +121,30 @@ pub fn run() -> color_eyre::Result<()> {
zip.extract(downloads_path).unwrap();
}
websites_path = downloaded_websites_path;
modules_path = downloaded_modules_path;
}
downloaded_websites_path
};
Some(websites_path.join(website.to_folder_name()))
} else {
None
};
// Generate import map for cache busting
if let Some(ref path) = bundle_path {
match importmap::ImportMap::scan(path, "") {
Ok(map) => {
let html_path = path.join("index.html");
if let Ok(html) = fs::read_to_string(&html_path)
&& let Some(updated) = map.update_html(&html)
{
let _ = fs::write(&html_path, updated);
info!("Updated importmap in index.html");
}
}
Err(e) => tracing::error!("Failed to generate importmap: {e}"),
}
}
let server = Server::new(&query, bundle_path);
tokio::spawn(async move {

File diff suppressed because it is too large Load Diff

View File

@@ -22,7 +22,7 @@ brk_store = { workspace = true }
brk_traversable = { workspace = true }
brk_types = { workspace = true }
derive_more = { workspace = true }
log = { workspace = true }
tracing = { workspace = true }
pco = "0.4.9"
rayon = { workspace = true }
rustc-hash = { workspace = true }

View File

@@ -9,7 +9,7 @@ use brk_indexer::Indexer;
use brk_iterator::Blocks;
use brk_reader::Reader;
use brk_rpc::{Auth, Client};
use log::{debug, info};
use tracing::{debug, info};
use vecdb::Exit;
pub fn main() -> Result<()> {

View File

@@ -30,7 +30,7 @@ fn run() -> Result<()> {
let computer = Computer::forced_import(&outputs_dir, &indexer, Some(fetcher))?;
let _a = dbg!(computer.transactions.fees.txindex_to_fee.region().meta());
let _a = dbg!(computer.transactions.fees.fee.base.region().meta());
Ok(())
}

View File

@@ -14,7 +14,7 @@ use brk_indexer::Indexer;
use brk_iterator::Blocks;
use brk_reader::Reader;
use brk_rpc::{Auth, Client};
use log::{debug, info};
use tracing::{debug, info};
use vecdb::Exit;
pub fn main() -> color_eyre::Result<()> {

View File

@@ -1,11 +1,11 @@
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::{Height, StoredU32};
use vecdb::{Exit, TypedVecIterator};
use vecdb::{EagerVec, Exit, PcoVec, TypedVecIterator};
use super::super::time;
use super::Vecs;
use crate::{indexes, ComputeIndexes};
use crate::{ComputeIndexes, indexes, internal::ComputedBlockLast};
impl Vecs {
pub fn compute(
@@ -16,67 +16,104 @@ impl Vecs {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
let mut height_to_timestamp_fixed_iter =
time.height_to_timestamp_fixed.into_iter();
let mut prev = Height::ZERO;
self.height_to_24h_block_count.compute_transform(
starting_indexes.height,
&time.height_to_timestamp_fixed,
|(h, t, ..)| {
while t.difference_in_days_between(height_to_timestamp_fixed_iter.get_unwrap(prev))
> 0
{
prev.increment();
if prev > h {
unreachable!()
}
}
(h, StoredU32::from(*h + 1 - *prev))
},
exit,
)?;
self.indexes_to_block_count
self.block_count
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_range(
starting_indexes.height,
&indexer.vecs.block.height_to_weight,
&indexer.vecs.blocks.weight,
|h| (h, StoredU32::from(1_u32)),
exit,
)?;
Ok(())
})?;
self.indexes_to_1w_block_count.compute_all(starting_indexes, exit, |v| {
v.compute_sum(
starting_indexes.dateindex,
self.indexes_to_block_count.dateindex.sum.inner(),
7,
exit,
)?;
Ok(())
})?;
// Compute rolling window starts
self.compute_rolling_start(time, starting_indexes, exit, 1, |s| &mut s._24h_start)?;
self.compute_rolling_start(time, starting_indexes, exit, 7, |s| &mut s._1w_start)?;
self.compute_rolling_start(time, starting_indexes, exit, 30, |s| &mut s._1m_start)?;
self.compute_rolling_start(time, starting_indexes, exit, 365, |s| &mut s._1y_start)?;
self.indexes_to_1m_block_count.compute_all(starting_indexes, exit, |v| {
v.compute_sum(
starting_indexes.dateindex,
self.indexes_to_block_count.dateindex.sum.inner(),
30,
// Compute rolling window block counts
self.compute_rolling_block_count(
indexes,
starting_indexes,
exit,
&self._24h_start.clone(),
|s| &mut s._24h_block_count,
)?;
Ok(())
})?;
self.indexes_to_1y_block_count.compute_all(starting_indexes, exit, |v| {
v.compute_sum(
starting_indexes.dateindex,
self.indexes_to_block_count.dateindex.sum.inner(),
365,
self.compute_rolling_block_count(
indexes,
starting_indexes,
exit,
&self._1w_start.clone(),
|s| &mut s._1w_block_count,
)?;
self.compute_rolling_block_count(
indexes,
starting_indexes,
exit,
&self._1m_start.clone(),
|s| &mut s._1m_block_count,
)?;
self.compute_rolling_block_count(
indexes,
starting_indexes,
exit,
&self._1y_start.clone(),
|s| &mut s._1y_block_count,
)?;
Ok(())
})?;
Ok(())
}
fn compute_rolling_start<F>(
&mut self,
time: &time::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
days: usize,
get_field: F,
) -> Result<()>
where
F: FnOnce(&mut Self) -> &mut EagerVec<PcoVec<Height, Height>>,
{
let mut iter = time.timestamp_fixed.into_iter();
let mut prev = Height::ZERO;
Ok(get_field(self).compute_transform(
starting_indexes.height,
&time.timestamp_fixed,
|(h, t, ..)| {
while t.difference_in_days_between(iter.get_unwrap(prev)) >= days {
prev.increment();
if prev > h {
unreachable!()
}
}
(h, prev)
},
exit,
)?)
}
fn compute_rolling_block_count<F>(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
start_height: &EagerVec<PcoVec<Height, Height>>,
get_field: F,
) -> Result<()>
where
F: FnOnce(&mut Self) -> &mut ComputedBlockLast<StoredU32>,
{
get_field(self).compute_all(indexes, starting_indexes, exit, |v| {
v.compute_transform(
starting_indexes.height,
start_height,
|(h, start, ..)| (h, StoredU32::from(*h + 1 - *start)),
exit,
)?;
Ok(())
})
}
}

View File

@@ -1,6 +1,6 @@
use brk_error::Result;
use brk_types::{StoredU64, Version};
use vecdb::{Database, EagerVec, ImportableVec, IterableCloneableVec, LazyVecFrom1};
use vecdb::{Database, ImportableVec};
use super::Vecs;
use crate::{
@@ -10,74 +10,48 @@ use crate::{
TARGET_BLOCKS_PER_YEAR,
},
indexes,
internal::{ComputedBlockSumCum, ComputedDateLast},
internal::{ComputedBlockLast, ComputedBlockSumCum, LazyPeriodVecs},
};
impl Vecs {
pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
Ok(Self {
dateindex_to_block_count_target: LazyVecFrom1::init(
block_count_target: LazyPeriodVecs::new(
"block_count_target",
version,
indexes.time.dateindex_to_dateindex.boxed_clone(),
indexes,
|_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_DAY)),
),
weekindex_to_block_count_target: LazyVecFrom1::init(
"block_count_target",
version,
indexes.time.weekindex_to_weekindex.boxed_clone(),
|_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_WEEK)),
),
monthindex_to_block_count_target: LazyVecFrom1::init(
"block_count_target",
version,
indexes.time.monthindex_to_monthindex.boxed_clone(),
|_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_MONTH)),
),
quarterindex_to_block_count_target: LazyVecFrom1::init(
"block_count_target",
version,
indexes.time.quarterindex_to_quarterindex.boxed_clone(),
|_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_QUARTER)),
),
semesterindex_to_block_count_target: LazyVecFrom1::init(
"block_count_target",
version,
indexes.time.semesterindex_to_semesterindex.boxed_clone(),
|_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_SEMESTER)),
),
yearindex_to_block_count_target: LazyVecFrom1::init(
"block_count_target",
version,
indexes.time.yearindex_to_yearindex.boxed_clone(),
|_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_YEAR)),
),
decadeindex_to_block_count_target: LazyVecFrom1::init(
"block_count_target",
version,
indexes.time.decadeindex_to_decadeindex.boxed_clone(),
|_, _| Some(StoredU64::from(TARGET_BLOCKS_PER_DECADE)),
),
height_to_24h_block_count: EagerVec::forced_import(db, "24h_block_count", version)?,
indexes_to_block_count: ComputedBlockSumCum::forced_import(
block_count: ComputedBlockSumCum::forced_import(db, "block_count", version, indexes)?,
_24h_start: ImportableVec::forced_import(db, "24h_start", version)?,
_1w_start: ImportableVec::forced_import(db, "1w_start", version)?,
_1m_start: ImportableVec::forced_import(db, "1m_start", version)?,
_1y_start: ImportableVec::forced_import(db, "1y_start", version)?,
_24h_block_count: ComputedBlockLast::forced_import(
db,
"block_count",
"24h_block_count",
version,
indexes,
)?,
indexes_to_1w_block_count: ComputedDateLast::forced_import(
_1w_block_count: ComputedBlockLast::forced_import(
db,
"1w_block_count",
version,
indexes,
)?,
indexes_to_1m_block_count: ComputedDateLast::forced_import(
_1m_block_count: ComputedBlockLast::forced_import(
db,
"1m_block_count",
version,
indexes,
)?,
indexes_to_1y_block_count: ComputedDateLast::forced_import(
_1y_block_count: ComputedBlockLast::forced_import(
db,
"1y_block_count",
version,

View File

@@ -1,28 +1,21 @@
use brk_traversable::Traversable;
use brk_types::{
DateIndex, DecadeIndex, MonthIndex, QuarterIndex, SemesterIndex, StoredU32, StoredU64,
WeekIndex, YearIndex,
};
use vecdb::LazyVecFrom1;
use brk_types::{Height, StoredU32, StoredU64};
use vecdb::{EagerVec, PcoVec};
use crate::internal::{ComputedBlockSumCum, ComputedDateLast};
use crate::internal::{ComputedBlockLast, ComputedBlockSumCum, LazyPeriodVecs};
#[derive(Clone, Traversable)]
pub struct Vecs {
pub dateindex_to_block_count_target: LazyVecFrom1<DateIndex, StoredU64, DateIndex, DateIndex>,
pub weekindex_to_block_count_target: LazyVecFrom1<WeekIndex, StoredU64, WeekIndex, WeekIndex>,
pub monthindex_to_block_count_target:
LazyVecFrom1<MonthIndex, StoredU64, MonthIndex, MonthIndex>,
pub quarterindex_to_block_count_target:
LazyVecFrom1<QuarterIndex, StoredU64, QuarterIndex, QuarterIndex>,
pub semesterindex_to_block_count_target:
LazyVecFrom1<SemesterIndex, StoredU64, SemesterIndex, SemesterIndex>,
pub yearindex_to_block_count_target: LazyVecFrom1<YearIndex, StoredU64, YearIndex, YearIndex>,
pub decadeindex_to_block_count_target:
LazyVecFrom1<DecadeIndex, StoredU64, DecadeIndex, DecadeIndex>,
pub height_to_24h_block_count: vecdb::EagerVec<vecdb::PcoVec<brk_types::Height, StoredU32>>,
pub indexes_to_block_count: ComputedBlockSumCum<StoredU32>,
pub indexes_to_1w_block_count: ComputedDateLast<StoredU32>,
pub indexes_to_1m_block_count: ComputedDateLast<StoredU32>,
pub indexes_to_1y_block_count: ComputedDateLast<StoredU32>,
pub block_count_target: LazyPeriodVecs<StoredU64>,
pub block_count: ComputedBlockSumCum<StoredU32>,
// Rolling window starts (height-indexed only, no date aggregation needed)
pub _24h_start: EagerVec<PcoVec<Height, Height>>,
pub _1w_start: EagerVec<PcoVec<Height, Height>>,
pub _1m_start: EagerVec<PcoVec<Height, Height>>,
pub _1y_start: EagerVec<PcoVec<Height, Height>>,
// Rolling window block counts
pub _24h_block_count: ComputedBlockLast<StoredU32>,
pub _1w_block_count: ComputedBlockLast<StoredU32>,
pub _1m_block_count: ComputedBlockLast<StoredU32>,
pub _1y_block_count: ComputedBlockLast<StoredU32>,
}

View File

@@ -2,9 +2,9 @@ use brk_error::Result;
use brk_types::StoredU32;
use vecdb::{Exit, TypedVecIterator};
use super::Vecs;
use super::super::TARGET_BLOCKS_PER_DAY_F32;
use crate::{indexes, ComputeIndexes};
use super::Vecs;
use crate::{ComputeIndexes, indexes};
impl Vecs {
pub fn compute(
@@ -14,12 +14,13 @@ impl Vecs {
exit: &Exit,
) -> Result<()> {
let mut height_to_difficultyepoch_iter =
indexes.block.height_to_difficultyepoch.into_iter();
self.indexes_to_difficultyepoch.compute_all(starting_indexes, exit, |vec| {
let mut height_count_iter = indexes.time.dateindex_to_height_count.into_iter();
indexes.height.difficultyepoch.into_iter();
self.difficultyepoch
.compute_all(starting_indexes, exit, |vec| {
let mut height_count_iter = indexes.dateindex.height_count.into_iter();
vec.compute_transform(
starting_indexes.dateindex,
&indexes.time.dateindex_to_first_height,
&indexes.dateindex.first_height,
|(di, height, ..)| {
(
di,
@@ -32,27 +33,35 @@ impl Vecs {
Ok(())
})?;
self.indexes_to_blocks_before_next_difficulty_adjustment
.compute_all(indexes, starting_indexes, exit, |v| {
self.blocks_before_next_difficulty_adjustment.compute_all(
indexes,
starting_indexes,
exit,
|v| {
v.compute_transform(
starting_indexes.height,
&indexes.block.height_to_height,
&indexes.height.identity,
|(h, ..)| (h, StoredU32::from(h.left_before_next_diff_adj())),
exit,
)?;
Ok(())
})?;
},
)?;
self.indexes_to_days_before_next_difficulty_adjustment
.compute_all(indexes, starting_indexes, exit, |v| {
self.days_before_next_difficulty_adjustment.compute_all(
indexes,
starting_indexes,
exit,
|v| {
v.compute_transform(
starting_indexes.height,
&self.indexes_to_blocks_before_next_difficulty_adjustment.height,
&self.blocks_before_next_difficulty_adjustment.height,
|(h, blocks, ..)| (h, (*blocks as f32 / TARGET_BLOCKS_PER_DAY_F32).into()),
exit,
)?;
Ok(())
})?;
},
)?;
Ok(())
}

View File

@@ -13,19 +13,19 @@ impl Vecs {
let v2 = Version::TWO;
Ok(Self {
indexes_to_difficultyepoch: ComputedDateLast::forced_import(
difficultyepoch: ComputedDateLast::forced_import(
db,
"difficultyepoch",
version,
indexes,
)?,
indexes_to_blocks_before_next_difficulty_adjustment: ComputedBlockLast::forced_import(
blocks_before_next_difficulty_adjustment: ComputedBlockLast::forced_import(
db,
"blocks_before_next_difficulty_adjustment",
version + v2,
indexes,
)?,
indexes_to_days_before_next_difficulty_adjustment: ComputedBlockLast::forced_import(
days_before_next_difficulty_adjustment: ComputedBlockLast::forced_import(
db,
"days_before_next_difficulty_adjustment",
version + v2,

View File

@@ -6,7 +6,7 @@ use crate::internal::{ComputedBlockLast, ComputedDateLast};
/// Difficulty epoch metrics and countdown
#[derive(Clone, Traversable)]
pub struct Vecs {
pub indexes_to_difficultyepoch: ComputedDateLast<DifficultyEpoch>,
pub indexes_to_blocks_before_next_difficulty_adjustment: ComputedBlockLast<StoredU32>,
pub indexes_to_days_before_next_difficulty_adjustment: ComputedBlockLast<StoredF32>,
pub difficultyepoch: ComputedDateLast<DifficultyEpoch>,
pub blocks_before_next_difficulty_adjustment: ComputedBlockLast<StoredU32>,
pub days_before_next_difficulty_adjustment: ComputedBlockLast<StoredF32>,
}

View File

@@ -2,9 +2,9 @@ use brk_error::Result;
use brk_types::StoredU32;
use vecdb::{Exit, TypedVecIterator};
use super::Vecs;
use super::super::TARGET_BLOCKS_PER_DAY_F32;
use crate::{indexes, ComputeIndexes};
use super::Vecs;
use crate::{ComputeIndexes, indexes};
impl Vecs {
pub fn compute(
@@ -13,12 +13,13 @@ impl Vecs {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
let mut height_to_halvingepoch_iter = indexes.block.height_to_halvingepoch.into_iter();
self.indexes_to_halvingepoch.compute_all(starting_indexes, exit, |vec| {
let mut height_count_iter = indexes.time.dateindex_to_height_count.into_iter();
let mut height_to_halvingepoch_iter = indexes.height.halvingepoch.into_iter();
self.halvingepoch
.compute_all(starting_indexes, exit, |vec| {
let mut height_count_iter = indexes.dateindex.height_count.into_iter();
vec.compute_transform(
starting_indexes.dateindex,
&indexes.time.dateindex_to_first_height,
&indexes.dateindex.first_height,
|(di, height, ..)| {
(
di,
@@ -31,35 +32,27 @@ impl Vecs {
Ok(())
})?;
self.indexes_to_blocks_before_next_halving.compute_all(
indexes,
starting_indexes,
exit,
|v| {
self.blocks_before_next_halving
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_transform(
starting_indexes.height,
&indexes.block.height_to_height,
&indexes.height.identity,
|(h, ..)| (h, StoredU32::from(h.left_before_next_halving())),
exit,
)?;
Ok(())
},
)?;
})?;
self.indexes_to_days_before_next_halving.compute_all(
indexes,
starting_indexes,
exit,
|v| {
self.days_before_next_halving
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_transform(
starting_indexes.height,
&self.indexes_to_blocks_before_next_halving.height,
&self.blocks_before_next_halving.height,
|(h, blocks, ..)| (h, (*blocks as f32 / TARGET_BLOCKS_PER_DAY_F32).into()),
exit,
)?;
Ok(())
},
)?;
})?;
Ok(())
}

View File

@@ -13,19 +13,14 @@ impl Vecs {
let v2 = Version::TWO;
Ok(Self {
indexes_to_halvingepoch: ComputedDateLast::forced_import(
db,
"halvingepoch",
version,
indexes,
)?,
indexes_to_blocks_before_next_halving: ComputedBlockLast::forced_import(
halvingepoch: ComputedDateLast::forced_import(db, "halvingepoch", version, indexes)?,
blocks_before_next_halving: ComputedBlockLast::forced_import(
db,
"blocks_before_next_halving",
version + v2,
indexes,
)?,
indexes_to_days_before_next_halving: ComputedBlockLast::forced_import(
days_before_next_halving: ComputedBlockLast::forced_import(
db,
"days_before_next_halving",
version + v2,

View File

@@ -6,7 +6,7 @@ use crate::internal::{ComputedBlockLast, ComputedDateLast};
/// Halving epoch metrics and countdown
#[derive(Clone, Traversable)]
pub struct Vecs {
pub indexes_to_halvingepoch: ComputedDateLast<HalvingEpoch>,
pub indexes_to_blocks_before_next_halving: ComputedBlockLast<StoredU32>,
pub indexes_to_days_before_next_halving: ComputedBlockLast<StoredF32>,
pub halvingepoch: ComputedDateLast<HalvingEpoch>,
pub blocks_before_next_halving: ComputedBlockLast<StoredU32>,
pub days_before_next_halving: ComputedBlockLast<StoredF32>,
}

View File

@@ -2,7 +2,7 @@ use brk_error::Result;
use vecdb::Exit;
use super::Vecs;
use crate::{indexes, ComputeIndexes};
use crate::{ComputeIndexes, indexes};
impl Vecs {
pub fn compute(
@@ -11,12 +11,7 @@ impl Vecs {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_block_interval.derive_from(
indexes,
starting_indexes,
&self.height_to_interval,
exit,
)?;
self.interval.derive_from(indexes, starting_indexes, exit)?;
Ok(())
}

View File

@@ -1,10 +1,10 @@
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::{CheckedSub, Height, Timestamp, Version};
use vecdb::{Database, IterableCloneableVec, LazyVecFrom1};
use vecdb::{Database, VecIndex};
use super::Vecs;
use crate::{indexes, internal::DerivedComputedBlockDistribution};
use crate::{indexes, internal::LazyBlockDistribution};
impl Vecs {
pub fn forced_import(
@@ -13,34 +13,25 @@ impl Vecs {
indexer: &Indexer,
indexes: &indexes::Vecs,
) -> Result<Self> {
let height_to_interval = LazyVecFrom1::init(
"interval",
let interval = LazyBlockDistribution::forced_import_with_init(
db,
"block_interval",
version,
indexer.vecs.block.height_to_timestamp.boxed_clone(),
indexer.vecs.blocks.timestamp.clone(),
indexes,
|height: Height, timestamp_iter| {
let timestamp = timestamp_iter.get(height)?;
let timestamp = timestamp_iter.get_at(height.to_usize())?;
let interval = height.decremented().map_or(Timestamp::ZERO, |prev_h| {
timestamp_iter
.get(prev_h)
.get_at(prev_h.to_usize())
.map_or(Timestamp::ZERO, |prev_t| {
timestamp.checked_sub(prev_t).unwrap_or(Timestamp::ZERO)
})
});
Some(interval)
},
);
let indexes_to_block_interval = DerivedComputedBlockDistribution::forced_import(
db,
"block_interval",
height_to_interval.boxed_clone(),
version,
indexes,
)?;
Ok(Self {
height_to_interval,
indexes_to_block_interval,
})
Ok(Self { interval })
}
}

View File

@@ -1,11 +1,10 @@
use brk_traversable::Traversable;
use brk_types::{Height, Timestamp};
use vecdb::LazyVecFrom1;
use brk_types::Timestamp;
use crate::internal::DerivedComputedBlockDistribution;
use crate::internal::LazyBlockDistribution;
#[derive(Clone, Traversable)]
pub struct Vecs {
pub height_to_interval: LazyVecFrom1<Height, Timestamp, Height, Timestamp>,
pub indexes_to_block_interval: DerivedComputedBlockDistribution<Timestamp>,
#[traversable(flatten)]
pub interval: LazyBlockDistribution<Timestamp>,
}

View File

@@ -3,9 +3,9 @@ use brk_indexer::Indexer;
use brk_types::{StoredF32, StoredF64};
use vecdb::Exit;
use super::super::{ONE_TERA_HASH, TARGET_BLOCKS_PER_DAY_F64, count, rewards};
use super::Vecs;
use super::super::{count, rewards, ONE_TERA_HASH, TARGET_BLOCKS_PER_DAY_F64};
use crate::{indexes, ComputeIndexes};
use crate::{ComputeIndexes, indexes};
impl Vecs {
pub fn compute(
@@ -17,31 +17,31 @@ impl Vecs {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_difficulty.derive_from(
self.difficulty.derive_from(
indexes,
starting_indexes,
&indexer.vecs.block.height_to_difficulty,
&indexer.vecs.blocks.difficulty,
exit,
)?;
self.indexes_to_difficulty_as_hash
self.difficulty_as_hash
.compute_all(indexes, starting_indexes, exit, |v| {
let multiplier = 2.0_f64.powi(32) / 600.0;
v.compute_transform(
starting_indexes.height,
&indexer.vecs.block.height_to_difficulty,
&indexer.vecs.blocks.difficulty,
|(i, v, ..)| (i, StoredF32::from(*v * multiplier)),
exit,
)?;
Ok(())
})?;
self.indexes_to_hash_rate
self.hash_rate
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_transform2(
starting_indexes.height,
&count_vecs.height_to_24h_block_count,
&self.indexes_to_difficulty_as_hash.height,
&count_vecs._24h_block_count.height,
&self.difficulty_as_hash.height,
|(i, block_count_sum, difficulty_as_hash, ..)| {
(
i,
@@ -56,67 +56,67 @@ impl Vecs {
Ok(())
})?;
self.indexes_to_hash_rate_1w_sma.compute_all(starting_indexes, exit, |v| {
self.hash_rate_1w_sma
.compute_all(starting_indexes, exit, |v| {
v.compute_sma(
starting_indexes.dateindex,
self.indexes_to_hash_rate.dateindex.inner(),
self.hash_rate.dateindex.inner(),
7,
exit,
)?;
Ok(())
})?;
self.indexes_to_hash_rate_1m_sma.compute_all(starting_indexes, exit, |v| {
self.hash_rate_1m_sma
.compute_all(starting_indexes, exit, |v| {
v.compute_sma(
starting_indexes.dateindex,
self.indexes_to_hash_rate.dateindex.inner(),
self.hash_rate.dateindex.inner(),
30,
exit,
)?;
Ok(())
})?;
self.indexes_to_hash_rate_2m_sma.compute_all(starting_indexes, exit, |v| {
self.hash_rate_2m_sma
.compute_all(starting_indexes, exit, |v| {
v.compute_sma(
starting_indexes.dateindex,
self.indexes_to_hash_rate.dateindex.inner(),
self.hash_rate.dateindex.inner(),
2 * 30,
exit,
)?;
Ok(())
})?;
self.indexes_to_hash_rate_1y_sma.compute_all(starting_indexes, exit, |v| {
self.hash_rate_1y_sma
.compute_all(starting_indexes, exit, |v| {
v.compute_sma(
starting_indexes.dateindex,
self.indexes_to_hash_rate.dateindex.inner(),
self.hash_rate.dateindex.inner(),
365,
exit,
)?;
Ok(())
})?;
self.indexes_to_difficulty_adjustment.compute_all(
indexes,
starting_indexes,
exit,
|v| {
self.difficulty_adjustment
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_percentage_change(
starting_indexes.height,
&indexer.vecs.block.height_to_difficulty,
&indexer.vecs.blocks.difficulty,
1,
exit,
)?;
Ok(())
},
)?;
})?;
self.indexes_to_hash_price_ths
self.hash_price_ths
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_transform2(
starting_indexes.height,
&rewards_vecs.height_to_24h_coinbase_usd_sum,
&self.indexes_to_hash_rate.height,
rewards_vecs._24h_coinbase_sum.dollars.as_ref().unwrap(),
&self.hash_rate.height,
|(i, coinbase_sum, hashrate, ..)| {
let hashrate_ths = *hashrate / ONE_TERA_HASH;
let price = if hashrate_ths == 0.0 {
@@ -131,23 +131,23 @@ impl Vecs {
Ok(())
})?;
self.indexes_to_hash_price_phs
self.hash_price_phs
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_transform(
starting_indexes.height,
&self.indexes_to_hash_price_ths.height,
&self.hash_price_ths.height,
|(i, price, ..)| (i, (*price * 1000.0).into()),
exit,
)?;
Ok(())
})?;
self.indexes_to_hash_value_ths
self.hash_value_ths
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_transform2(
starting_indexes.height,
&rewards_vecs.height_to_24h_coinbase_sum,
&self.indexes_to_hash_rate.height,
&rewards_vecs._24h_coinbase_sum.sats,
&self.hash_rate.height,
|(i, coinbase_sum, hashrate, ..)| {
let hashrate_ths = *hashrate / ONE_TERA_HASH;
let value = if hashrate_ths == 0.0 {
@@ -162,78 +162,78 @@ impl Vecs {
Ok(())
})?;
self.indexes_to_hash_value_phs
self.hash_value_phs
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_transform(
starting_indexes.height,
&self.indexes_to_hash_value_ths.height,
&self.hash_value_ths.height,
|(i, value, ..)| (i, (*value * 1000.0).into()),
exit,
)?;
Ok(())
})?;
self.indexes_to_hash_price_ths_min
self.hash_price_ths_min
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_all_time_low_(
starting_indexes.height,
&self.indexes_to_hash_price_ths.height,
&self.hash_price_ths.height,
exit,
true,
)?;
Ok(())
})?;
self.indexes_to_hash_price_phs_min
self.hash_price_phs_min
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_all_time_low_(
starting_indexes.height,
&self.indexes_to_hash_price_phs.height,
&self.hash_price_phs.height,
exit,
true,
)?;
Ok(())
})?;
self.indexes_to_hash_value_ths_min
self.hash_value_ths_min
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_all_time_low_(
starting_indexes.height,
&self.indexes_to_hash_value_ths.height,
&self.hash_value_ths.height,
exit,
true,
)?;
Ok(())
})?;
self.indexes_to_hash_value_phs_min
self.hash_value_phs_min
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_all_time_low_(
starting_indexes.height,
&self.indexes_to_hash_value_phs.height,
&self.hash_value_phs.height,
exit,
true,
)?;
Ok(())
})?;
self.indexes_to_hash_price_rebound
self.hash_price_rebound
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_percentage_difference(
starting_indexes.height,
&self.indexes_to_hash_price_phs.height,
&self.indexes_to_hash_price_phs_min.height,
&self.hash_price_phs.height,
&self.hash_price_phs_min.height,
exit,
)?;
Ok(())
})?;
self.indexes_to_hash_value_rebound
self.hash_value_rebound
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_percentage_difference(
starting_indexes.height,
&self.indexes_to_hash_value_phs.height,
&self.indexes_to_hash_value_phs_min.height,
&self.hash_value_phs.height,
&self.hash_value_phs_min.height,
exit,
)?;
Ok(())

View File

@@ -20,111 +20,106 @@ impl Vecs {
let v5 = Version::new(5);
Ok(Self {
indexes_to_hash_rate: ComputedBlockLast::forced_import(
db,
"hash_rate",
version + v5,
indexes,
)?,
indexes_to_hash_rate_1w_sma: ComputedDateLast::forced_import(
hash_rate: ComputedBlockLast::forced_import(db, "hash_rate", version + v5, indexes)?,
hash_rate_1w_sma: ComputedDateLast::forced_import(
db,
"hash_rate_1w_sma",
version,
indexes,
)?,
indexes_to_hash_rate_1m_sma: ComputedDateLast::forced_import(
hash_rate_1m_sma: ComputedDateLast::forced_import(
db,
"hash_rate_1m_sma",
version,
indexes,
)?,
indexes_to_hash_rate_2m_sma: ComputedDateLast::forced_import(
hash_rate_2m_sma: ComputedDateLast::forced_import(
db,
"hash_rate_2m_sma",
version,
indexes,
)?,
indexes_to_hash_rate_1y_sma: ComputedDateLast::forced_import(
hash_rate_1y_sma: ComputedDateLast::forced_import(
db,
"hash_rate_1y_sma",
version,
indexes,
)?,
indexes_to_hash_price_ths: ComputedBlockLast::forced_import(
hash_price_ths: ComputedBlockLast::forced_import(
db,
"hash_price_ths",
version + v4,
indexes,
)?,
indexes_to_hash_price_ths_min: ComputedBlockLast::forced_import(
hash_price_ths_min: ComputedBlockLast::forced_import(
db,
"hash_price_ths_min",
version + v4,
indexes,
)?,
indexes_to_hash_price_phs: ComputedBlockLast::forced_import(
hash_price_phs: ComputedBlockLast::forced_import(
db,
"hash_price_phs",
version + v4,
indexes,
)?,
indexes_to_hash_price_phs_min: ComputedBlockLast::forced_import(
hash_price_phs_min: ComputedBlockLast::forced_import(
db,
"hash_price_phs_min",
version + v4,
indexes,
)?,
indexes_to_hash_price_rebound: ComputedBlockLast::forced_import(
hash_price_rebound: ComputedBlockLast::forced_import(
db,
"hash_price_rebound",
version + v4,
indexes,
)?,
indexes_to_hash_value_ths: ComputedBlockLast::forced_import(
hash_value_ths: ComputedBlockLast::forced_import(
db,
"hash_value_ths",
version + v4,
indexes,
)?,
indexes_to_hash_value_ths_min: ComputedBlockLast::forced_import(
hash_value_ths_min: ComputedBlockLast::forced_import(
db,
"hash_value_ths_min",
version + v4,
indexes,
)?,
indexes_to_hash_value_phs: ComputedBlockLast::forced_import(
hash_value_phs: ComputedBlockLast::forced_import(
db,
"hash_value_phs",
version + v4,
indexes,
)?,
indexes_to_hash_value_phs_min: ComputedBlockLast::forced_import(
hash_value_phs_min: ComputedBlockLast::forced_import(
db,
"hash_value_phs_min",
version + v4,
indexes,
)?,
indexes_to_hash_value_rebound: ComputedBlockLast::forced_import(
hash_value_rebound: ComputedBlockLast::forced_import(
db,
"hash_value_rebound",
version + v4,
indexes,
)?,
// Derived from external indexer data - no height storage needed
indexes_to_difficulty: DerivedComputedBlockLast::forced_import(
difficulty: DerivedComputedBlockLast::forced_import(
db,
"difficulty",
indexer.vecs.block.height_to_difficulty.boxed_clone(),
indexer.vecs.blocks.difficulty.boxed_clone(),
version,
indexes,
)?,
indexes_to_difficulty_as_hash: ComputedBlockLast::forced_import(
difficulty_as_hash: ComputedBlockLast::forced_import(
db,
"difficulty_as_hash",
version,
indexes,
)?,
indexes_to_difficulty_adjustment: ComputedBlockSum::forced_import(
difficulty_adjustment: ComputedBlockSum::forced_import(
db,
"difficulty_adjustment",
version,

View File

@@ -8,23 +8,23 @@ use crate::internal::{
/// Mining-related metrics: hash rate, hash price, hash value, difficulty
#[derive(Clone, Traversable)]
pub struct Vecs {
pub indexes_to_hash_rate: ComputedBlockLast<StoredF64>,
pub indexes_to_hash_rate_1w_sma: ComputedDateLast<StoredF64>,
pub indexes_to_hash_rate_1m_sma: ComputedDateLast<StoredF32>,
pub indexes_to_hash_rate_2m_sma: ComputedDateLast<StoredF32>,
pub indexes_to_hash_rate_1y_sma: ComputedDateLast<StoredF32>,
pub indexes_to_hash_price_ths: ComputedBlockLast<StoredF32>,
pub indexes_to_hash_price_ths_min: ComputedBlockLast<StoredF32>,
pub indexes_to_hash_price_phs: ComputedBlockLast<StoredF32>,
pub indexes_to_hash_price_phs_min: ComputedBlockLast<StoredF32>,
pub indexes_to_hash_price_rebound: ComputedBlockLast<StoredF32>,
pub indexes_to_hash_value_ths: ComputedBlockLast<StoredF32>,
pub indexes_to_hash_value_ths_min: ComputedBlockLast<StoredF32>,
pub indexes_to_hash_value_phs: ComputedBlockLast<StoredF32>,
pub indexes_to_hash_value_phs_min: ComputedBlockLast<StoredF32>,
pub indexes_to_hash_value_rebound: ComputedBlockLast<StoredF32>,
pub hash_rate: ComputedBlockLast<StoredF64>,
pub hash_rate_1w_sma: ComputedDateLast<StoredF64>,
pub hash_rate_1m_sma: ComputedDateLast<StoredF32>,
pub hash_rate_2m_sma: ComputedDateLast<StoredF32>,
pub hash_rate_1y_sma: ComputedDateLast<StoredF32>,
pub hash_price_ths: ComputedBlockLast<StoredF32>,
pub hash_price_ths_min: ComputedBlockLast<StoredF32>,
pub hash_price_phs: ComputedBlockLast<StoredF32>,
pub hash_price_phs_min: ComputedBlockLast<StoredF32>,
pub hash_price_rebound: ComputedBlockLast<StoredF32>,
pub hash_value_ths: ComputedBlockLast<StoredF32>,
pub hash_value_ths_min: ComputedBlockLast<StoredF32>,
pub hash_value_phs: ComputedBlockLast<StoredF32>,
pub hash_value_phs_min: ComputedBlockLast<StoredF32>,
pub hash_value_rebound: ComputedBlockLast<StoredF32>,
/// Derived from indexer - no height storage needed
pub indexes_to_difficulty: DerivedComputedBlockLast<StoredF64>,
pub indexes_to_difficulty_as_hash: ComputedBlockLast<StoredF32>,
pub indexes_to_difficulty_adjustment: ComputedBlockSum<StoredF32>,
pub difficulty: DerivedComputedBlockLast<StoredF64>,
pub difficulty_as_hash: ComputedBlockLast<StoredF32>,
pub difficulty_adjustment: ComputedBlockSum<StoredF32>,
}

View File

@@ -5,7 +5,7 @@ use vecdb::{Exit, IterableVec, TypedVecIterator, VecIndex};
use super::super::count;
use super::Vecs;
use crate::{indexes, price, transactions, ComputeIndexes};
use crate::{ComputeIndexes, indexes, price, transactions};
impl Vecs {
#[allow(clippy::too_many_arguments)]
@@ -19,16 +19,16 @@ impl Vecs {
price: Option<&price::Vecs>,
exit: &Exit,
) -> Result<()> {
self.indexes_to_coinbase
self.coinbase
.compute_all(indexes, price, starting_indexes, exit, |vec| {
let mut txindex_to_first_txoutindex_iter =
indexer.vecs.tx.txindex_to_first_txoutindex.iter()?;
indexer.vecs.transactions.first_txoutindex.iter()?;
let mut txindex_to_output_count_iter =
indexes.transaction.txindex_to_output_count.iter();
let mut txoutindex_to_value_iter = indexer.vecs.txout.txoutindex_to_value.iter()?;
indexes.txindex.output_count.iter();
let mut txoutindex_to_value_iter = indexer.vecs.outputs.value.iter()?;
vec.compute_transform(
starting_indexes.height,
&indexer.vecs.tx.height_to_first_txindex,
&indexer.vecs.transactions.first_txindex,
|(height, txindex, ..)| {
let first_txoutindex = txindex_to_first_txoutindex_iter
.get_unwrap(txindex)
@@ -48,10 +48,10 @@ impl Vecs {
Ok(())
})?;
let mut height_to_coinbase_iter = self.indexes_to_coinbase.sats.height.into_iter();
self.height_to_24h_coinbase_sum.compute_transform(
let mut height_to_coinbase_iter = self.coinbase.sats.height.into_iter();
self._24h_coinbase_sum.sats.compute_transform(
starting_indexes.height,
&count_vecs.height_to_24h_block_count,
&count_vecs._24h_block_count.height,
|(h, count, ..)| {
let range = *h - (*count - 1)..=*h;
let sum = range
@@ -64,11 +64,13 @@ impl Vecs {
)?;
drop(height_to_coinbase_iter);
if let Some(ref dollars) = self.indexes_to_coinbase.dollars {
let mut height_to_coinbase_iter = dollars.height.into_iter();
self.height_to_24h_coinbase_usd_sum.compute_transform(
if let (Some(dollars_out), Some(dollars_in)) =
(&mut self._24h_coinbase_sum.dollars, &self.coinbase.dollars)
{
let mut height_to_coinbase_iter = dollars_in.height.into_iter();
dollars_out.compute_transform(
starting_indexes.height,
&count_vecs.height_to_24h_block_count,
&count_vecs._24h_block_count.height,
|(h, count, ..)| {
let range = *h - (*count - 1)..=*h;
let sum = range
@@ -81,13 +83,12 @@ impl Vecs {
)?;
}
self.indexes_to_subsidy
self.subsidy
.compute_all(indexes, price, starting_indexes, exit, |vec| {
// KISS: height.sum_cum.sum.0 is now a concrete field
vec.compute_transform2(
starting_indexes.height,
&self.indexes_to_coinbase.sats.height,
&transactions_fees.indexes_to_fee.sats.height.sum_cum.sum.0,
&self.coinbase.sats.height,
&transactions_fees.fee.sats.height.sum_cum.sum.0,
|(height, coinbase, fees, ..)| {
(
height,
@@ -102,15 +103,11 @@ impl Vecs {
Ok(())
})?;
self.indexes_to_unclaimed_rewards.compute_all(
indexes,
price,
starting_indexes,
exit,
|vec| {
self.unclaimed_rewards
.compute_all(indexes, price, starting_indexes, exit, |vec| {
vec.compute_transform(
starting_indexes.height,
&self.indexes_to_subsidy.sats.height,
&self.subsidy.sats.height,
|(height, subsidy, ..)| {
let halving = HalvingEpoch::from(height);
let expected = Sats::FIFTY_BTC / 2_usize.pow(halving.to_usize() as u32);
@@ -119,14 +116,12 @@ impl Vecs {
exit,
)?;
Ok(())
},
)?;
})?;
// KISS: dateindex.sum_cum.sum.0 is now a concrete field
self.dateindex_to_fee_dominance.compute_transform2(
self.fee_dominance.compute_transform2(
starting_indexes.dateindex,
&transactions_fees.indexes_to_fee.sats.dateindex.sum_cum.sum.0,
&self.indexes_to_coinbase.sats.dateindex.sum_cum.sum.0,
&transactions_fees.fee.sats.dateindex.sum_cum.sum.0,
&self.coinbase.sats.dateindex.sum_cum.sum.0,
|(i, fee, coinbase, ..)| {
let coinbase_f64 = u64::from(coinbase) as f64;
let dominance = if coinbase_f64 == 0.0 {
@@ -139,10 +134,10 @@ impl Vecs {
exit,
)?;
self.dateindex_to_subsidy_dominance.compute_transform2(
self.subsidy_dominance.compute_transform2(
starting_indexes.dateindex,
&self.indexes_to_subsidy.sats.dateindex.sum_cum.sum.0,
&self.indexes_to_coinbase.sats.dateindex.sum_cum.sum.0,
&self.subsidy.sats.dateindex.sum_cum.sum.0,
&self.coinbase.sats.dateindex.sum_cum.sum.0,
|(i, subsidy, coinbase, ..)| {
let coinbase_f64 = u64::from(coinbase) as f64;
let dominance = if coinbase_f64 == 0.0 {
@@ -155,9 +150,9 @@ impl Vecs {
exit,
)?;
if let Some(sma) = self.indexes_to_subsidy_usd_1y_sma.as_mut() {
if let Some(sma) = self.subsidy_usd_1y_sma.as_mut() {
let date_to_coinbase_usd_sum = &self
.indexes_to_coinbase
.coinbase
.dollars
.as_ref()
.unwrap()

View File

@@ -5,7 +5,7 @@ use vecdb::{Database, EagerVec, ImportableVec};
use super::Vecs;
use crate::{
indexes,
internal::{ComputedDateLast, ValueBlockFull, ValueBlockSumCum},
internal::{ComputedDateLast, ValueBlockFull, ValueBlockHeight, ValueBlockSumCum},
};
impl Vecs {
@@ -16,40 +16,36 @@ impl Vecs {
compute_dollars: bool,
) -> Result<Self> {
Ok(Self {
height_to_24h_coinbase_sum: EagerVec::forced_import(db, "24h_coinbase_sum", version)?,
height_to_24h_coinbase_usd_sum: EagerVec::forced_import(
_24h_coinbase_sum: ValueBlockHeight::forced_import(
db,
"24h_coinbase_usd_sum",
"24h_coinbase_sum",
version,
compute_dollars,
)?,
indexes_to_coinbase: ValueBlockFull::forced_import(
coinbase: ValueBlockFull::forced_import(
db,
"coinbase",
version,
indexes,
compute_dollars,
)?,
indexes_to_subsidy: ValueBlockFull::forced_import(
subsidy: ValueBlockFull::forced_import(
db,
"subsidy",
version,
indexes,
compute_dollars,
)?,
indexes_to_unclaimed_rewards: ValueBlockSumCum::forced_import(
unclaimed_rewards: ValueBlockSumCum::forced_import(
db,
"unclaimed_rewards",
version,
indexes,
compute_dollars,
)?,
dateindex_to_fee_dominance: EagerVec::forced_import(db, "fee_dominance", version)?,
dateindex_to_subsidy_dominance: EagerVec::forced_import(
db,
"subsidy_dominance",
version,
)?,
indexes_to_subsidy_usd_1y_sma: compute_dollars
fee_dominance: EagerVec::forced_import(db, "fee_dominance", version)?,
subsidy_dominance: EagerVec::forced_import(db, "subsidy_dominance", version)?,
subsidy_usd_1y_sma: compute_dollars
.then(|| {
ComputedDateLast::forced_import(db, "subsidy_usd_1y_sma", version, indexes)
})

View File

@@ -1,18 +1,17 @@
use brk_traversable::Traversable;
use brk_types::{DateIndex, Dollars, Height, Sats, StoredF32};
use brk_types::{DateIndex, Dollars, StoredF32};
use vecdb::{EagerVec, PcoVec};
use crate::internal::{ComputedDateLast, ValueBlockFull, ValueBlockSumCum};
use crate::internal::{ComputedDateLast, ValueBlockFull, ValueBlockHeight, ValueBlockSumCum};
/// Coinbase/subsidy/rewards metrics
#[derive(Clone, Traversable)]
pub struct Vecs {
pub height_to_24h_coinbase_sum: EagerVec<PcoVec<Height, Sats>>,
pub height_to_24h_coinbase_usd_sum: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_coinbase: ValueBlockFull,
pub indexes_to_subsidy: ValueBlockFull,
pub indexes_to_unclaimed_rewards: ValueBlockSumCum,
pub dateindex_to_fee_dominance: EagerVec<PcoVec<DateIndex, StoredF32>>,
pub dateindex_to_subsidy_dominance: EagerVec<PcoVec<DateIndex, StoredF32>>,
pub indexes_to_subsidy_usd_1y_sma: Option<ComputedDateLast<Dollars>>,
pub _24h_coinbase_sum: ValueBlockHeight,
pub coinbase: ValueBlockFull,
pub subsidy: ValueBlockFull,
pub unclaimed_rewards: ValueBlockSumCum,
pub fee_dominance: EagerVec<PcoVec<DateIndex, StoredF32>>,
pub subsidy_dominance: EagerVec<PcoVec<DateIndex, StoredF32>>,
pub subsidy_usd_1y_sma: Option<ComputedDateLast<Dollars>>,
}

View File

@@ -3,7 +3,7 @@ use brk_indexer::Indexer;
use vecdb::Exit;
use super::Vecs;
use crate::{indexes, ComputeIndexes};
use crate::{ComputeIndexes, indexes};
impl Vecs {
pub fn compute(
@@ -13,19 +13,14 @@ impl Vecs {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_block_size.derive_from(
self.size.derive_from(
indexes,
starting_indexes,
&indexer.vecs.block.height_to_total_size,
&indexer.vecs.blocks.total_size,
exit,
)?;
self.indexes_to_block_vbytes.derive_from(
indexes,
starting_indexes,
&self.height_to_vbytes,
exit,
)?;
self.vbytes.derive_from(indexes, starting_indexes, exit)?;
Ok(())
}

View File

@@ -1,13 +1,10 @@
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::{Height, StoredU64, Version};
use vecdb::{Database, IterableCloneableVec, LazyVecFrom1, VecIndex};
use vecdb::{Database, IterableCloneableVec, VecIndex};
use super::Vecs;
use crate::{
indexes,
internal::DerivedComputedBlockFull,
};
use crate::{indexes, internal::{DerivedComputedBlockFull, LazyComputedBlockFull}};
impl Vecs {
pub fn forced_import(
@@ -16,33 +13,26 @@ impl Vecs {
indexer: &Indexer,
indexes: &indexes::Vecs,
) -> Result<Self> {
let height_to_vbytes = LazyVecFrom1::init(
"vbytes",
Ok(Self {
vbytes: LazyComputedBlockFull::forced_import_with_init(
db,
"block_vbytes",
version,
indexer.vecs.block.height_to_weight.boxed_clone(),
indexer.vecs.blocks.weight.clone(),
indexes,
|height: Height, weight_iter| {
weight_iter
.get_at(height.to_usize())
.map(|w| StoredU64::from(w.to_vbytes_floor()))
},
);
Ok(Self {
indexes_to_block_size: DerivedComputedBlockFull::forced_import(
)?,
size: DerivedComputedBlockFull::forced_import(
db,
"block_size",
indexer.vecs.block.height_to_total_size.boxed_clone(),
indexer.vecs.blocks.total_size.boxed_clone(),
version,
indexes,
)?,
indexes_to_block_vbytes: DerivedComputedBlockFull::forced_import(
db,
"block_vbytes",
height_to_vbytes.boxed_clone(),
version,
indexes,
)?,
height_to_vbytes,
})
}
}

View File

@@ -1,12 +1,10 @@
use brk_traversable::Traversable;
use brk_types::{Height, StoredU64, Weight};
use vecdb::LazyVecFrom1;
use brk_types::{StoredU64, Weight};
use crate::internal::DerivedComputedBlockFull;
use crate::internal::{DerivedComputedBlockFull, LazyComputedBlockFull};
#[derive(Clone, Traversable)]
pub struct Vecs {
pub height_to_vbytes: LazyVecFrom1<Height, StoredU64, Height, Weight>,
pub indexes_to_block_size: DerivedComputedBlockFull<StoredU64>,
pub indexes_to_block_vbytes: DerivedComputedBlockFull<StoredU64>,
pub vbytes: LazyComputedBlockFull<StoredU64, Weight>,
pub size: DerivedComputedBlockFull<StoredU64>,
}

View File

@@ -4,7 +4,7 @@ use brk_types::Timestamp;
use vecdb::{Exit, TypedVecIterator};
use super::Vecs;
use crate::{indexes, ComputeIndexes};
use crate::{ComputeIndexes, indexes};
impl Vecs {
/// Compute height-to-time fields early, before indexes are computed.
@@ -16,9 +16,9 @@ impl Vecs {
exit: &Exit,
) -> Result<()> {
let mut prev_timestamp_fixed = None;
self.height_to_timestamp_fixed.compute_transform(
self.timestamp_fixed.compute_transform(
starting_height,
&indexer.vecs.block.height_to_timestamp,
&indexer.vecs.blocks.timestamp,
|(h, timestamp, height_to_timestamp_fixed_iter)| {
if prev_timestamp_fixed.is_none()
&& let Some(prev_h) = h.decremented()
@@ -46,11 +46,10 @@ impl Vecs {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.timeindexes_to_timestamp
.compute_all(starting_indexes, exit, |vec| {
self.timestamp.compute_all(|vec| {
vec.compute_transform(
starting_indexes.dateindex,
&indexes.time.dateindex_to_date,
&indexes.dateindex.date,
|(di, d, ..)| (di, Timestamp::from(d)),
exit,
)?;

View File

@@ -1,12 +1,10 @@
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::{Date, DifficultyEpoch, Height, Version};
use vecdb::{
Database, EagerVec, ImportableVec, IterableCloneableVec, LazyVecFrom1, LazyVecFrom2, VecIndex,
};
use brk_types::{Date, Height, Version};
use vecdb::{Database, EagerVec, ImportableVec, IterableCloneableVec, LazyVecFrom1, VecIndex};
use super::Vecs;
use crate::{indexes, internal::ComputedVecsDateFirst};
use crate::{indexes, internal::DerivedComputedBlockFirst};
impl Vecs {
pub fn forced_import(
@@ -18,35 +16,25 @@ impl Vecs {
let height_to_timestamp_fixed = EagerVec::forced_import(db, "timestamp_fixed", version)?;
Ok(Self {
height_to_date: LazyVecFrom1::init(
date: LazyVecFrom1::init(
"date",
version,
indexer.vecs.block.height_to_timestamp.boxed_clone(),
indexer.vecs.blocks.timestamp.boxed_clone(),
|height: Height, timestamp_iter| {
timestamp_iter.get_at(height.to_usize()).map(Date::from)
},
),
height_to_date_fixed: LazyVecFrom1::init(
date_fixed: LazyVecFrom1::init(
"date_fixed",
version,
height_to_timestamp_fixed.boxed_clone(),
|height: Height, timestamp_iter| timestamp_iter.get(height).map(Date::from),
),
height_to_timestamp_fixed,
difficultyepoch_to_timestamp: LazyVecFrom2::init(
"timestamp",
version,
indexes.block.difficultyepoch_to_first_height.boxed_clone(),
indexer.vecs.block.height_to_timestamp.boxed_clone(),
|di: DifficultyEpoch, first_height_iter, timestamp_iter| {
first_height_iter
.get(di)
.and_then(|h: Height| timestamp_iter.get(h))
},
),
timeindexes_to_timestamp: ComputedVecsDateFirst::forced_import(
timestamp_fixed: height_to_timestamp_fixed,
timestamp: DerivedComputedBlockFirst::forced_import(
db,
"timestamp",
indexer.vecs.blocks.timestamp.boxed_clone(),
version,
indexes,
)?,

View File

@@ -1,16 +1,14 @@
use brk_traversable::Traversable;
use brk_types::{Date, DifficultyEpoch, Height, Timestamp};
use vecdb::{EagerVec, LazyVecFrom1, LazyVecFrom2, PcoVec};
use brk_types::{Date, Height, Timestamp};
use vecdb::{EagerVec, LazyVecFrom1, PcoVec};
use crate::internal::ComputedVecsDateFirst;
use crate::internal::DerivedComputedBlockFirst;
/// Timestamp and date metrics for blocks
#[derive(Clone, Traversable)]
pub struct Vecs {
pub height_to_date: LazyVecFrom1<Height, Date, Height, Timestamp>,
pub height_to_date_fixed: LazyVecFrom1<Height, Date, Height, Timestamp>,
pub height_to_timestamp_fixed: EagerVec<PcoVec<Height, Timestamp>>,
pub difficultyepoch_to_timestamp:
LazyVecFrom2<DifficultyEpoch, Timestamp, DifficultyEpoch, Height, Height, Timestamp>,
pub timeindexes_to_timestamp: ComputedVecsDateFirst<Timestamp>,
pub date: LazyVecFrom1<Height, Date, Height, Timestamp>,
pub date_fixed: LazyVecFrom1<Height, Date, Height, Timestamp>,
pub timestamp_fixed: EagerVec<PcoVec<Height, Timestamp>>,
pub timestamp: DerivedComputedBlockFirst<Timestamp>,
}

View File

@@ -3,7 +3,7 @@ use brk_indexer::Indexer;
use vecdb::Exit;
use super::Vecs;
use crate::{indexes, ComputeIndexes};
use crate::{ComputeIndexes, indexes};
impl Vecs {
pub fn compute(
@@ -13,12 +13,8 @@ impl Vecs {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_block_weight.derive_from(
indexes,
starting_indexes,
&indexer.vecs.block.height_to_weight,
exit,
)?;
self.weight
.derive_from(indexes, starting_indexes, &indexer.vecs.blocks.weight, exit)?;
Ok(())
}

View File

@@ -6,9 +6,7 @@ use vecdb::{Database, IterableCloneableVec};
use super::Vecs;
use crate::{
indexes,
internal::{
DerivedComputedBlockFull, LazyBlockFull, WeightToFullness,
},
internal::{DerivedComputedBlockFull, LazyBlockFull, WeightToFullness},
};
impl Vecs {
@@ -18,25 +16,21 @@ impl Vecs {
indexer: &Indexer,
indexes: &indexes::Vecs,
) -> Result<Self> {
let indexes_to_block_weight = DerivedComputedBlockFull::forced_import(
let weight = DerivedComputedBlockFull::forced_import(
db,
"block_weight",
indexer.vecs.block.height_to_weight.boxed_clone(),
indexer.vecs.blocks.weight.boxed_clone(),
version,
indexes,
)?;
let indexes_to_block_fullness =
LazyBlockFull::from_derived::<WeightToFullness>(
let fullness = LazyBlockFull::from_derived::<WeightToFullness>(
"block_fullness",
version,
indexer.vecs.block.height_to_weight.boxed_clone(),
&indexes_to_block_weight,
indexer.vecs.blocks.weight.boxed_clone(),
&weight,
);
Ok(Self {
indexes_to_block_weight,
indexes_to_block_fullness,
})
Ok(Self { weight, fullness })
}
}

View File

@@ -5,7 +5,6 @@ use crate::internal::{DerivedComputedBlockFull, LazyBlockFull};
#[derive(Clone, Traversable)]
pub struct Vecs {
pub indexes_to_block_weight: DerivedComputedBlockFull<Weight>,
/// Block fullness as percentage of max block weight (0-100%)
pub indexes_to_block_fullness: LazyBlockFull<StoredF32, Weight>,
pub weight: DerivedComputedBlockFull<Weight>,
pub fullness: LazyBlockFull<StoredF32, Weight>,
}

View File

@@ -3,7 +3,7 @@ use brk_types::{Bitcoin, CheckedSub, StoredF64};
use vecdb::{Exit, TypedVecIterator};
use super::Vecs;
use crate::{distribution, indexes, ComputeIndexes};
use crate::{ComputeIndexes, distribution, indexes};
impl Vecs {
pub fn compute(
@@ -13,9 +13,16 @@ impl Vecs {
distribution: &distribution::Vecs,
exit: &Exit,
) -> Result<()> {
let circulating_supply = &distribution.utxo_cohorts.all.metrics.supply.height_to_supply;
let circulating_supply = &distribution
.utxo_cohorts
.all
.metrics
.supply
.supply
.sats
.height;
self.indexes_to_coinblocks_created
self.coinblocks_created
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_transform(
starting_indexes.height,
@@ -26,21 +33,19 @@ impl Vecs {
Ok(())
})?;
let indexes_to_coinblocks_destroyed = &distribution
let coinblocks_destroyed = &distribution
.utxo_cohorts
.all
.metrics
.activity
.indexes_to_coinblocks_destroyed;
.coinblocks_destroyed;
self.indexes_to_coinblocks_stored
self.coinblocks_stored
.compute_all(indexes, starting_indexes, exit, |vec| {
let mut coinblocks_destroyed_iter = indexes_to_coinblocks_destroyed
.height
.into_iter();
let mut coinblocks_destroyed_iter = coinblocks_destroyed.height.into_iter();
vec.compute_transform(
starting_indexes.height,
&self.indexes_to_coinblocks_created.height,
&self.coinblocks_created.height,
|(i, created, ..)| {
let destroyed = coinblocks_destroyed_iter.get_unwrap(i);
(i, created.checked_sub(destroyed).unwrap())
@@ -50,42 +55,38 @@ impl Vecs {
Ok(())
})?;
self.indexes_to_liveliness
self.liveliness
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_divide(
starting_indexes.height,
indexes_to_coinblocks_destroyed.height_cumulative.inner(),
self.indexes_to_coinblocks_created.height_cumulative.inner(),
coinblocks_destroyed.height_cumulative.inner(),
self.coinblocks_created.height_cumulative.inner(),
exit,
)?;
Ok(())
})?;
self.indexes_to_vaultedness
self.vaultedness
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_transform(
starting_indexes.height,
&self.indexes_to_liveliness.height,
&self.liveliness.height,
|(i, v, ..)| (i, StoredF64::from(1.0).checked_sub(v).unwrap()),
exit,
)?;
Ok(())
})?;
self.indexes_to_activity_to_vaultedness_ratio.compute_all(
indexes,
starting_indexes,
exit,
|vec| {
self.activity_to_vaultedness_ratio
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_divide(
starting_indexes.height,
&self.indexes_to_liveliness.height,
&self.indexes_to_vaultedness.height,
&self.liveliness.height,
&self.vaultedness.height,
exit,
)?;
Ok(())
},
)?;
})?;
Ok(())
}

View File

@@ -11,31 +11,21 @@ use crate::{
impl Vecs {
pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
Ok(Self {
indexes_to_coinblocks_created: ComputedBlockSumCum::forced_import(
coinblocks_created: ComputedBlockSumCum::forced_import(
db,
"coinblocks_created",
version,
indexes,
)?,
indexes_to_coinblocks_stored: ComputedBlockSumCum::forced_import(
coinblocks_stored: ComputedBlockSumCum::forced_import(
db,
"coinblocks_stored",
version,
indexes,
)?,
indexes_to_liveliness: ComputedBlockLast::forced_import(
db,
"liveliness",
version,
indexes,
)?,
indexes_to_vaultedness: ComputedBlockLast::forced_import(
db,
"vaultedness",
version,
indexes,
)?,
indexes_to_activity_to_vaultedness_ratio: ComputedBlockLast::forced_import(
liveliness: ComputedBlockLast::forced_import(db, "liveliness", version, indexes)?,
vaultedness: ComputedBlockLast::forced_import(db, "vaultedness", version, indexes)?,
activity_to_vaultedness_ratio: ComputedBlockLast::forced_import(
db,
"activity_to_vaultedness_ratio",
version,

View File

@@ -5,9 +5,9 @@ use crate::internal::{ComputedBlockLast, ComputedBlockSumCum};
#[derive(Clone, Traversable)]
pub struct Vecs {
pub indexes_to_coinblocks_created: ComputedBlockSumCum<StoredF64>,
pub indexes_to_coinblocks_stored: ComputedBlockSumCum<StoredF64>,
pub indexes_to_liveliness: ComputedBlockLast<StoredF64>,
pub indexes_to_vaultedness: ComputedBlockLast<StoredF64>,
pub indexes_to_activity_to_vaultedness_ratio: ComputedBlockLast<StoredF64>,
pub coinblocks_created: ComputedBlockSumCum<StoredF64>,
pub coinblocks_stored: ComputedBlockSumCum<StoredF64>,
pub liveliness: ComputedBlockLast<StoredF64>,
pub vaultedness: ComputedBlockLast<StoredF64>,
pub activity_to_vaultedness_ratio: ComputedBlockLast<StoredF64>,
}

View File

@@ -1,9 +1,9 @@
use brk_error::Result;
use vecdb::Exit;
use super::Vecs;
use super::super::activity;
use crate::{supply, ComputeIndexes};
use super::Vecs;
use crate::{ComputeIndexes, supply};
impl Vecs {
pub fn compute(
@@ -14,34 +14,35 @@ impl Vecs {
has_price: bool,
exit: &Exit,
) -> Result<()> {
self.indexes_to_cointime_adj_inflation_rate
self.cointime_adj_inflation_rate
.compute_all(starting_indexes, exit, |v| {
v.compute_multiply(
starting_indexes.dateindex,
activity.indexes_to_activity_to_vaultedness_ratio.dateindex.inner(),
&supply.inflation.indexes.dateindex,
activity.activity_to_vaultedness_ratio.dateindex.inner(),
&supply.inflation.dateindex,
exit,
)?;
Ok(())
})?;
self.indexes_to_cointime_adj_tx_btc_velocity
self.cointime_adj_tx_btc_velocity
.compute_all(starting_indexes, exit, |v| {
v.compute_multiply(
starting_indexes.dateindex,
activity.indexes_to_activity_to_vaultedness_ratio.dateindex.inner(),
&supply.velocity.indexes_to_btc.dateindex,
activity.activity_to_vaultedness_ratio.dateindex.inner(),
&supply.velocity.btc.dateindex,
exit,
)?;
Ok(())
})?;
if has_price {
self.indexes_to_cointime_adj_tx_usd_velocity.compute_all(starting_indexes, exit, |v| {
self.cointime_adj_tx_usd_velocity
.compute_all(starting_indexes, exit, |v| {
v.compute_multiply(
starting_indexes.dateindex,
activity.indexes_to_activity_to_vaultedness_ratio.dateindex.inner(),
&supply.velocity.indexes_to_usd.as_ref().unwrap().dateindex,
activity.activity_to_vaultedness_ratio.dateindex.inner(),
&supply.velocity.usd.as_ref().unwrap().dateindex,
exit,
)?;
Ok(())

View File

@@ -8,19 +8,19 @@ use crate::{indexes, internal::ComputedDateLast};
impl Vecs {
pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
Ok(Self {
indexes_to_cointime_adj_inflation_rate: ComputedDateLast::forced_import(
cointime_adj_inflation_rate: ComputedDateLast::forced_import(
db,
"cointime_adj_inflation_rate",
version,
indexes,
)?,
indexes_to_cointime_adj_tx_btc_velocity: ComputedDateLast::forced_import(
cointime_adj_tx_btc_velocity: ComputedDateLast::forced_import(
db,
"cointime_adj_tx_btc_velocity",
version,
indexes,
)?,
indexes_to_cointime_adj_tx_usd_velocity: ComputedDateLast::forced_import(
cointime_adj_tx_usd_velocity: ComputedDateLast::forced_import(
db,
"cointime_adj_tx_usd_velocity",
version,

View File

@@ -5,7 +5,7 @@ use crate::internal::ComputedDateLast;
#[derive(Clone, Traversable)]
pub struct Vecs {
pub indexes_to_cointime_adj_inflation_rate: ComputedDateLast<StoredF32>,
pub indexes_to_cointime_adj_tx_btc_velocity: ComputedDateLast<StoredF64>,
pub indexes_to_cointime_adj_tx_usd_velocity: ComputedDateLast<StoredF64>,
pub cointime_adj_inflation_rate: ComputedDateLast<StoredF32>,
pub cointime_adj_tx_btc_velocity: ComputedDateLast<StoredF64>,
pub cointime_adj_tx_usd_velocity: ComputedDateLast<StoredF64>,
}

View File

@@ -4,7 +4,7 @@ use vecdb::Exit;
use super::super::{activity, value};
use super::Vecs;
use crate::{blocks, distribution, indexes, utils::OptionExt, ComputeIndexes};
use crate::{ComputeIndexes, blocks, distribution, indexes, utils::OptionExt};
impl Vecs {
#[allow(clippy::too_many_arguments)]
@@ -24,24 +24,25 @@ impl Vecs {
.metrics
.realized
.u()
.height_to_realized_cap;
.realized_cap
.height;
let circulating_supply = &distribution
.utxo_cohorts
.all
.metrics
.supply
.height_to_supply_value
.bitcoin;
.supply
.bitcoin
.height;
self.indexes_to_thermo_cap
self.thermo_cap
.compute_all(indexes, starting_indexes, exit, |vec| {
// KISS: height_cumulative is now a concrete field (not Option)
vec.compute_transform(
starting_indexes.height,
&blocks
.rewards
.indexes_to_subsidy
.subsidy
.dollars
.as_ref()
.unwrap()
@@ -53,47 +54,47 @@ impl Vecs {
Ok(())
})?;
self.indexes_to_investor_cap
self.investor_cap
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_subtract(
starting_indexes.height,
realized_cap,
&self.indexes_to_thermo_cap.height,
&self.thermo_cap.height,
exit,
)?;
Ok(())
})?;
self.indexes_to_vaulted_cap
self.vaulted_cap
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_divide(
starting_indexes.height,
realized_cap,
&activity.indexes_to_vaultedness.height,
&activity.vaultedness.height,
exit,
)?;
Ok(())
})?;
self.indexes_to_active_cap
self.active_cap
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_multiply(
starting_indexes.height,
realized_cap,
&activity.indexes_to_liveliness.height,
&activity.liveliness.height,
exit,
)?;
Ok(())
})?;
// cointime_cap = (cointime_value_destroyed_cumulative * circulating_supply) / coinblocks_stored_cumulative
self.indexes_to_cointime_cap
self.cointime_cap
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_transform3(
starting_indexes.height,
value.indexes_to_cointime_value_destroyed.height_cumulative.inner(),
value.cointime_value_destroyed.height_cumulative.inner(),
circulating_supply,
activity.indexes_to_coinblocks_stored.height_cumulative.inner(),
activity.coinblocks_stored.height_cumulative.inner(),
|(i, destroyed, supply, stored, ..)| {
let destroyed: f64 = *destroyed;
let supply: f64 = supply.into();

View File

@@ -8,36 +8,11 @@ use crate::{indexes, internal::ComputedBlockLast};
impl Vecs {
pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
Ok(Self {
indexes_to_thermo_cap: ComputedBlockLast::forced_import(
db,
"thermo_cap",
version,
indexes,
)?,
indexes_to_investor_cap: ComputedBlockLast::forced_import(
db,
"investor_cap",
version,
indexes,
)?,
indexes_to_vaulted_cap: ComputedBlockLast::forced_import(
db,
"vaulted_cap",
version,
indexes,
)?,
indexes_to_active_cap: ComputedBlockLast::forced_import(
db,
"active_cap",
version,
indexes,
)?,
indexes_to_cointime_cap: ComputedBlockLast::forced_import(
db,
"cointime_cap",
version,
indexes,
)?,
thermo_cap: ComputedBlockLast::forced_import(db, "thermo_cap", version, indexes)?,
investor_cap: ComputedBlockLast::forced_import(db, "investor_cap", version, indexes)?,
vaulted_cap: ComputedBlockLast::forced_import(db, "vaulted_cap", version, indexes)?,
active_cap: ComputedBlockLast::forced_import(db, "active_cap", version, indexes)?,
cointime_cap: ComputedBlockLast::forced_import(db, "cointime_cap", version, indexes)?,
})
}
}

View File

@@ -5,9 +5,9 @@ use crate::internal::ComputedBlockLast;
#[derive(Clone, Traversable)]
pub struct Vecs {
pub indexes_to_thermo_cap: ComputedBlockLast<Dollars>,
pub indexes_to_investor_cap: ComputedBlockLast<Dollars>,
pub indexes_to_vaulted_cap: ComputedBlockLast<Dollars>,
pub indexes_to_active_cap: ComputedBlockLast<Dollars>,
pub indexes_to_cointime_cap: ComputedBlockLast<Dollars>,
pub thermo_cap: ComputedBlockLast<Dollars>,
pub investor_cap: ComputedBlockLast<Dollars>,
pub vaulted_cap: ComputedBlockLast<Dollars>,
pub active_cap: ComputedBlockLast<Dollars>,
pub cointime_cap: ComputedBlockLast<Dollars>,
}

View File

@@ -3,7 +3,7 @@ use vecdb::Exit;
use super::super::{activity, cap, supply};
use super::Vecs;
use crate::{distribution, indexes, price, utils::OptionExt, ComputeIndexes};
use crate::{ComputeIndexes, distribution, indexes, price, utils::OptionExt};
impl Vecs {
#[allow(clippy::too_many_arguments)]
@@ -18,91 +18,94 @@ impl Vecs {
cap: &cap::Vecs,
exit: &Exit,
) -> Result<()> {
let circulating_supply = &distribution.utxo_cohorts.all.metrics.supply.height_to_supply_value.bitcoin;
let circulating_supply = &distribution
.utxo_cohorts
.all
.metrics
.supply
.supply
.bitcoin
.height;
let realized_price = &distribution
.utxo_cohorts
.all
.metrics
.realized
.u()
.indexes_to_realized_price
.realized_price
.height;
self.indexes_to_vaulted_price
self.vaulted_price
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_divide(
starting_indexes.height,
realized_price,
&activity.indexes_to_vaultedness.height,
&activity.vaultedness.height,
exit,
)?;
Ok(())
})?;
self.indexes_to_vaulted_price_ratio.compute_rest(
self.vaulted_price_ratio.compute_rest(
price,
starting_indexes,
exit,
Some(&self.indexes_to_vaulted_price.dateindex.0),
Some(&self.vaulted_price.dateindex.0),
)?;
self.indexes_to_active_price
self.active_price
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_multiply(
starting_indexes.height,
realized_price,
&activity.indexes_to_liveliness.height,
&activity.liveliness.height,
exit,
)?;
Ok(())
})?;
self.indexes_to_active_price_ratio.compute_rest(
self.active_price_ratio.compute_rest(
price,
starting_indexes,
exit,
Some(&self.indexes_to_active_price.dateindex.0),
Some(&self.active_price.dateindex.0),
)?;
self.indexes_to_true_market_mean.compute_all(
indexes,
starting_indexes,
exit,
|vec| {
vec.compute_divide(
starting_indexes.height,
&cap.indexes_to_investor_cap.height,
&supply.indexes_to_active_supply.bitcoin.height,
exit,
)?;
Ok(())
},
)?;
self.indexes_to_true_market_mean_ratio.compute_rest(
price,
starting_indexes,
exit,
Some(&self.indexes_to_true_market_mean.dateindex.0),
)?;
// cointime_price = cointime_cap / circulating_supply
self.indexes_to_cointime_price
self.true_market_mean
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_divide(
starting_indexes.height,
&cap.indexes_to_cointime_cap.height,
&cap.investor_cap.height,
&supply.active_supply.bitcoin.height,
exit,
)?;
Ok(())
})?;
self.true_market_mean_ratio.compute_rest(
price,
starting_indexes,
exit,
Some(&self.true_market_mean.dateindex.0),
)?;
// cointime_price = cointime_cap / circulating_supply
self.cointime_price
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_divide(
starting_indexes.height,
&cap.cointime_cap.height,
circulating_supply,
exit,
)?;
Ok(())
})?;
self.indexes_to_cointime_price_ratio.compute_rest(
self.cointime_price_ratio.compute_rest(
price,
starting_indexes,
exit,
Some(&self.indexes_to_cointime_price.dateindex.0),
Some(&self.cointime_price.dateindex.0),
)?;
Ok(())

View File

@@ -4,8 +4,9 @@ use vecdb::Database;
use super::Vecs;
use crate::{
indexes, price,
internal::{ComputedRatioVecsDate, ComputedBlockLast},
indexes,
internal::{ComputedBlockLast, ComputedRatioVecsDate},
price,
};
impl Vecs {
@@ -17,20 +18,15 @@ impl Vecs {
) -> Result<Self> {
macro_rules! computed_h {
($name:expr) => {
ComputedBlockLast::forced_import(
db,
$name,
version,
indexes,
)?
ComputedBlockLast::forced_import(db, $name, version, indexes)?
};
}
// Extract price vecs before struct literal so they can be used as sources for ratios
let indexes_to_vaulted_price = computed_h!("vaulted_price");
let indexes_to_active_price = computed_h!("active_price");
let indexes_to_true_market_mean = computed_h!("true_market_mean");
let indexes_to_cointime_price = computed_h!("cointime_price");
let vaulted_price = computed_h!("vaulted_price");
let active_price = computed_h!("active_price");
let true_market_mean = computed_h!("true_market_mean");
let cointime_price = computed_h!("cointime_price");
macro_rules! ratio_di {
($name:expr, $source:expr) => {
@@ -47,20 +43,14 @@ impl Vecs {
}
Ok(Self {
indexes_to_vaulted_price_ratio: ratio_di!("vaulted_price", &indexes_to_vaulted_price),
indexes_to_vaulted_price,
indexes_to_active_price_ratio: ratio_di!("active_price", &indexes_to_active_price),
indexes_to_active_price,
indexes_to_true_market_mean_ratio: ratio_di!(
"true_market_mean",
&indexes_to_true_market_mean
),
indexes_to_true_market_mean,
indexes_to_cointime_price_ratio: ratio_di!(
"cointime_price",
&indexes_to_cointime_price
),
indexes_to_cointime_price,
vaulted_price_ratio: ratio_di!("vaulted_price", &vaulted_price),
vaulted_price,
active_price_ratio: ratio_di!("active_price", &active_price),
active_price,
true_market_mean_ratio: ratio_di!("true_market_mean", &true_market_mean),
true_market_mean,
cointime_price_ratio: ratio_di!("cointime_price", &cointime_price),
cointime_price,
})
}
}

View File

@@ -1,16 +1,16 @@
use brk_traversable::Traversable;
use brk_types::Dollars;
use crate::internal::{ComputedRatioVecsDate, ComputedBlockLast};
use crate::internal::{ComputedBlockLast, ComputedRatioVecsDate};
#[derive(Clone, Traversable)]
pub struct Vecs {
pub indexes_to_vaulted_price: ComputedBlockLast<Dollars>,
pub indexes_to_vaulted_price_ratio: ComputedRatioVecsDate,
pub indexes_to_active_price: ComputedBlockLast<Dollars>,
pub indexes_to_active_price_ratio: ComputedRatioVecsDate,
pub indexes_to_true_market_mean: ComputedBlockLast<Dollars>,
pub indexes_to_true_market_mean_ratio: ComputedRatioVecsDate,
pub indexes_to_cointime_price: ComputedBlockLast<Dollars>,
pub indexes_to_cointime_price_ratio: ComputedRatioVecsDate,
pub vaulted_price: ComputedBlockLast<Dollars>,
pub vaulted_price_ratio: ComputedRatioVecsDate,
pub active_price: ComputedBlockLast<Dollars>,
pub active_price_ratio: ComputedRatioVecsDate,
pub true_market_mean: ComputedBlockLast<Dollars>,
pub true_market_mean_ratio: ComputedRatioVecsDate,
pub cointime_price: ComputedBlockLast<Dollars>,
pub cointime_price_ratio: ComputedRatioVecsDate,
}

View File

@@ -1,9 +1,9 @@
use brk_error::Result;
use vecdb::Exit;
use super::Vecs;
use super::super::activity;
use crate::{distribution, indexes, price, ComputeIndexes};
use super::Vecs;
use crate::{ComputeIndexes, distribution, indexes, price};
impl Vecs {
pub fn compute(
@@ -15,39 +15,36 @@ impl Vecs {
activity: &activity::Vecs,
exit: &Exit,
) -> Result<()> {
let circulating_supply = &distribution.utxo_cohorts.all.metrics.supply.height_to_supply;
let circulating_supply = &distribution
.utxo_cohorts
.all
.metrics
.supply
.supply
.sats
.height;
self.indexes_to_vaulted_supply.compute_all(
indexes,
price,
starting_indexes,
exit,
|vec| {
self.vaulted_supply
.compute_all(indexes, price, starting_indexes, exit, |vec| {
vec.compute_multiply(
starting_indexes.height,
circulating_supply,
&activity.indexes_to_vaultedness.height,
&activity.vaultedness.height,
exit,
)?;
Ok(())
},
)?;
})?;
self.indexes_to_active_supply.compute_all(
indexes,
price,
starting_indexes,
exit,
|vec| {
self.active_supply
.compute_all(indexes, price, starting_indexes, exit, |vec| {
vec.compute_multiply(
starting_indexes.height,
circulating_supply,
&activity.indexes_to_liveliness.height,
&activity.liveliness.height,
exit,
)?;
Ok(())
},
)?;
})?;
Ok(())
}

View File

@@ -3,10 +3,7 @@ use brk_types::Version;
use vecdb::Database;
use super::Vecs;
use crate::{
indexes,
internal::ValueBlockLast,
};
use crate::{indexes, internal::ValueBlockLast};
impl Vecs {
pub fn forced_import(
@@ -16,14 +13,14 @@ impl Vecs {
compute_dollars: bool,
) -> Result<Self> {
Ok(Self {
indexes_to_vaulted_supply: ValueBlockLast::forced_import(
vaulted_supply: ValueBlockLast::forced_import(
db,
"vaulted_supply",
version,
indexes,
compute_dollars,
)?,
indexes_to_active_supply: ValueBlockLast::forced_import(
active_supply: ValueBlockLast::forced_import(
db,
"active_supply",
version,

View File

@@ -4,6 +4,6 @@ use crate::internal::ValueBlockLast;
#[derive(Clone, Traversable)]
pub struct Vecs {
pub indexes_to_vaulted_supply: ValueBlockLast,
pub indexes_to_active_supply: ValueBlockLast,
pub vaulted_supply: ValueBlockLast,
pub active_supply: ValueBlockLast,
}

View File

@@ -3,7 +3,7 @@ use vecdb::Exit;
use super::super::activity;
use super::Vecs;
use crate::{distribution, indexes, price, ComputeIndexes};
use crate::{ComputeIndexes, distribution, indexes, price};
impl Vecs {
pub fn compute(
@@ -15,57 +15,45 @@ impl Vecs {
activity: &activity::Vecs,
exit: &Exit,
) -> Result<()> {
let indexes_to_coinblocks_destroyed = &distribution
let coinblocks_destroyed = &distribution
.utxo_cohorts
.all
.metrics
.activity
.indexes_to_coinblocks_destroyed;
.coinblocks_destroyed;
self.indexes_to_cointime_value_destroyed.compute_all(
indexes,
starting_indexes,
exit,
|vec| {
self.cointime_value_destroyed
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_multiply(
starting_indexes.height,
&price.usd.chainindexes_to_price_close.height,
&indexes_to_coinblocks_destroyed.height,
&price.usd.split.close.height,
&coinblocks_destroyed.height,
exit,
)?;
Ok(())
},
)?;
})?;
self.indexes_to_cointime_value_created.compute_all(
indexes,
starting_indexes,
exit,
|vec| {
self.cointime_value_created
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_multiply(
starting_indexes.height,
&price.usd.chainindexes_to_price_close.height,
&activity.indexes_to_coinblocks_created.height,
&price.usd.split.close.height,
&activity.coinblocks_created.height,
exit,
)?;
Ok(())
},
)?;
})?;
self.indexes_to_cointime_value_stored.compute_all(
indexes,
starting_indexes,
exit,
|vec| {
self.cointime_value_stored
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_multiply(
starting_indexes.height,
&price.usd.chainindexes_to_price_close.height,
&activity.indexes_to_coinblocks_stored.height,
&price.usd.split.close.height,
&activity.coinblocks_stored.height,
exit,
)?;
Ok(())
},
)?;
})?;
Ok(())
}

View File

@@ -8,19 +8,19 @@ use crate::{indexes, internal::ComputedBlockSumCum};
impl Vecs {
pub fn forced_import(db: &Database, version: Version, indexes: &indexes::Vecs) -> Result<Self> {
Ok(Self {
indexes_to_cointime_value_destroyed: ComputedBlockSumCum::forced_import(
cointime_value_destroyed: ComputedBlockSumCum::forced_import(
db,
"cointime_value_destroyed",
version,
indexes,
)?,
indexes_to_cointime_value_created: ComputedBlockSumCum::forced_import(
cointime_value_created: ComputedBlockSumCum::forced_import(
db,
"cointime_value_created",
version,
indexes,
)?,
indexes_to_cointime_value_stored: ComputedBlockSumCum::forced_import(
cointime_value_stored: ComputedBlockSumCum::forced_import(
db,
"cointime_value_stored",
version,

View File

@@ -5,7 +5,7 @@ use crate::internal::ComputedBlockSumCum;
#[derive(Clone, Traversable)]
pub struct Vecs {
pub indexes_to_cointime_value_destroyed: ComputedBlockSumCum<StoredF64>,
pub indexes_to_cointime_value_created: ComputedBlockSumCum<StoredF64>,
pub indexes_to_cointime_value_stored: ComputedBlockSumCum<StoredF64>,
pub cointime_value_destroyed: ComputedBlockSumCum<StoredF64>,
pub cointime_value_created: ComputedBlockSumCum<StoredF64>,
pub cointime_value_stored: ComputedBlockSumCum<StoredF64>,
}

View File

@@ -5,29 +5,70 @@ use brk_types::{Height, StoredU64, Version};
use derive_more::{Deref, DerefMut};
use rayon::prelude::*;
use vecdb::{
AnyStoredVec, AnyVec, Database, EagerVec, Exit, GenericStoredVec, ImportableVec,
IterableCloneableVec, PcoVec, TypedVecIterator,
AnyStoredVec, AnyVec, Database, EagerVec, Exit, GenericStoredVec, PcoVec, TypedVecIterator,
};
use crate::{ComputeIndexes, indexes, internal::DerivedComputedBlockLast};
use crate::{ComputeIndexes, indexes, internal::ComputedBlockLast};
/// Address count per address type (runtime state).
#[derive(Debug, Default, Deref, DerefMut)]
pub struct AddressTypeToAddressCount(ByAddressType<u64>);
impl From<(&AddressTypeToHeightToAddressCount, Height)> for AddressTypeToAddressCount {
impl AddressTypeToAddressCount {
#[inline]
fn from((groups, starting_height): (&AddressTypeToHeightToAddressCount, Height)) -> Self {
pub fn sum(&self) -> u64 {
self.0.values().sum()
}
}
impl From<(&AddressTypeToAddrCountVecs, Height)> for AddressTypeToAddressCount {
#[inline]
fn from((groups, starting_height): (&AddressTypeToAddrCountVecs, Height)) -> Self {
if let Some(prev_height) = starting_height.decremented() {
Self(ByAddressType {
p2pk65: groups.p2pk65.into_iter().get_unwrap(prev_height).into(),
p2pk33: groups.p2pk33.into_iter().get_unwrap(prev_height).into(),
p2pkh: groups.p2pkh.into_iter().get_unwrap(prev_height).into(),
p2sh: groups.p2sh.into_iter().get_unwrap(prev_height).into(),
p2wpkh: groups.p2wpkh.into_iter().get_unwrap(prev_height).into(),
p2wsh: groups.p2wsh.into_iter().get_unwrap(prev_height).into(),
p2tr: groups.p2tr.into_iter().get_unwrap(prev_height).into(),
p2a: groups.p2a.into_iter().get_unwrap(prev_height).into(),
p2pk65: groups
.p2pk65
.height
.into_iter()
.get_unwrap(prev_height)
.into(),
p2pk33: groups
.p2pk33
.height
.into_iter()
.get_unwrap(prev_height)
.into(),
p2pkh: groups
.p2pkh
.height
.into_iter()
.get_unwrap(prev_height)
.into(),
p2sh: groups
.p2sh
.height
.into_iter()
.get_unwrap(prev_height)
.into(),
p2wpkh: groups
.p2wpkh
.height
.into_iter()
.get_unwrap(prev_height)
.into(),
p2wsh: groups
.p2wsh
.height
.into_iter()
.get_unwrap(prev_height)
.into(),
p2tr: groups
.p2tr
.height
.into_iter()
.get_unwrap(prev_height)
.into(),
p2a: groups.p2a.height.into_iter().get_unwrap(prev_height).into(),
})
} else {
Default::default()
@@ -35,200 +76,213 @@ impl From<(&AddressTypeToHeightToAddressCount, Height)> for AddressTypeToAddress
}
}
/// Address count per address type, indexed by height.
#[derive(Debug, Clone, Deref, DerefMut, Traversable)]
pub struct AddressTypeToHeightToAddressCount(ByAddressType<EagerVec<PcoVec<Height, StoredU64>>>);
impl From<ByAddressType<EagerVec<PcoVec<Height, StoredU64>>>>
for AddressTypeToHeightToAddressCount
{
#[inline]
fn from(value: ByAddressType<EagerVec<PcoVec<Height, StoredU64>>>) -> Self {
Self(value)
}
}
impl AddressTypeToHeightToAddressCount {
pub fn min_len(&self) -> usize {
self.p2pk65
.len()
.min(self.p2pk33.len())
.min(self.p2pkh.len())
.min(self.p2sh.len())
.min(self.p2wpkh.len())
.min(self.p2wsh.len())
.min(self.p2tr.len())
.min(self.p2a.len())
}
pub fn forced_import(db: &Database, name: &str, version: Version) -> Result<Self> {
Ok(Self::from(ByAddressType::new_with_name(|type_name| {
Ok(EagerVec::forced_import(
db,
&format!("{type_name}_{name}"),
version,
)?)
})?))
}
/// Returns a parallel iterator over all vecs for parallel writing.
pub fn par_iter_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
let inner = &mut self.0;
[
&mut inner.p2pk65 as &mut dyn AnyStoredVec,
&mut inner.p2pk33 as &mut dyn AnyStoredVec,
&mut inner.p2pkh as &mut dyn AnyStoredVec,
&mut inner.p2sh as &mut dyn AnyStoredVec,
&mut inner.p2wpkh as &mut dyn AnyStoredVec,
&mut inner.p2wsh as &mut dyn AnyStoredVec,
&mut inner.p2tr as &mut dyn AnyStoredVec,
&mut inner.p2a as &mut dyn AnyStoredVec,
]
.into_par_iter()
}
pub fn write(&mut self) -> Result<()> {
self.p2pk65.write()?;
self.p2pk33.write()?;
self.p2pkh.write()?;
self.p2sh.write()?;
self.p2wpkh.write()?;
self.p2wsh.write()?;
self.p2tr.write()?;
self.p2a.write()?;
Ok(())
}
pub fn truncate_push(
&mut self,
height: Height,
addresstype_to_usize: &AddressTypeToAddressCount,
) -> Result<()> {
self.p2pk65
.truncate_push(height, addresstype_to_usize.p2pk65.into())?;
self.p2pk33
.truncate_push(height, addresstype_to_usize.p2pk33.into())?;
self.p2pkh
.truncate_push(height, addresstype_to_usize.p2pkh.into())?;
self.p2sh
.truncate_push(height, addresstype_to_usize.p2sh.into())?;
self.p2wpkh
.truncate_push(height, addresstype_to_usize.p2wpkh.into())?;
self.p2wsh
.truncate_push(height, addresstype_to_usize.p2wsh.into())?;
self.p2tr
.truncate_push(height, addresstype_to_usize.p2tr.into())?;
self.p2a
.truncate_push(height, addresstype_to_usize.p2a.into())?;
Ok(())
}
pub fn reset(&mut self) -> Result<()> {
use vecdb::GenericStoredVec;
self.p2pk65.reset()?;
self.p2pk33.reset()?;
self.p2pkh.reset()?;
self.p2sh.reset()?;
self.p2wpkh.reset()?;
self.p2wsh.reset()?;
self.p2tr.reset()?;
self.p2a.reset()?;
Ok(())
}
}
/// Address count per address type, indexed by various indexes (dateindex, etc.).
/// Address count per address type, with height + derived indexes.
#[derive(Clone, Deref, DerefMut, Traversable)]
pub struct AddressTypeToIndexesToAddressCount(ByAddressType<DerivedComputedBlockLast<StoredU64>>);
pub struct AddressTypeToAddrCountVecs(ByAddressType<ComputedBlockLast<StoredU64>>);
impl From<ByAddressType<DerivedComputedBlockLast<StoredU64>>>
for AddressTypeToIndexesToAddressCount
{
impl From<ByAddressType<ComputedBlockLast<StoredU64>>> for AddressTypeToAddrCountVecs {
#[inline]
fn from(value: ByAddressType<DerivedComputedBlockLast<StoredU64>>) -> Self {
fn from(value: ByAddressType<ComputedBlockLast<StoredU64>>) -> Self {
Self(value)
}
}
impl AddressTypeToIndexesToAddressCount {
impl AddressTypeToAddrCountVecs {
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
sources: &AddressTypeToHeightToAddressCount,
) -> Result<Self> {
Ok(Self::from(ByAddressType::<
DerivedComputedBlockLast<StoredU64>,
>::try_zip_with_name(
sources,
|type_name, source| {
DerivedComputedBlockLast::forced_import(
Ok(Self::from(
ByAddressType::<ComputedBlockLast<StoredU64>>::new_with_name(|type_name| {
ComputedBlockLast::forced_import(
db,
&format!("{type_name}_{name}"),
source.boxed_clone(),
version,
indexes,
)
},
)?))
})?,
))
}
pub fn compute(
pub fn min_len(&self) -> usize {
self.p2pk65
.height
.len()
.min(self.p2pk33.height.len())
.min(self.p2pkh.height.len())
.min(self.p2sh.height.len())
.min(self.p2wpkh.height.len())
.min(self.p2wsh.height.len())
.min(self.p2tr.height.len())
.min(self.p2a.height.len())
}
pub fn par_iter_height_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
let inner = &mut self.0;
[
&mut inner.p2pk65.height as &mut dyn AnyStoredVec,
&mut inner.p2pk33.height as &mut dyn AnyStoredVec,
&mut inner.p2pkh.height as &mut dyn AnyStoredVec,
&mut inner.p2sh.height as &mut dyn AnyStoredVec,
&mut inner.p2wpkh.height as &mut dyn AnyStoredVec,
&mut inner.p2wsh.height as &mut dyn AnyStoredVec,
&mut inner.p2tr.height as &mut dyn AnyStoredVec,
&mut inner.p2a.height as &mut dyn AnyStoredVec,
]
.into_par_iter()
}
pub fn write_height(&mut self) -> Result<()> {
self.p2pk65.height.write()?;
self.p2pk33.height.write()?;
self.p2pkh.height.write()?;
self.p2sh.height.write()?;
self.p2wpkh.height.write()?;
self.p2wsh.height.write()?;
self.p2tr.height.write()?;
self.p2a.height.write()?;
Ok(())
}
pub fn truncate_push_height(
&mut self,
height: Height,
addr_counts: &AddressTypeToAddressCount,
) -> Result<()> {
self.p2pk65
.height
.truncate_push(height, addr_counts.p2pk65.into())?;
self.p2pk33
.height
.truncate_push(height, addr_counts.p2pk33.into())?;
self.p2pkh
.height
.truncate_push(height, addr_counts.p2pkh.into())?;
self.p2sh
.height
.truncate_push(height, addr_counts.p2sh.into())?;
self.p2wpkh
.height
.truncate_push(height, addr_counts.p2wpkh.into())?;
self.p2wsh
.height
.truncate_push(height, addr_counts.p2wsh.into())?;
self.p2tr
.height
.truncate_push(height, addr_counts.p2tr.into())?;
self.p2a
.height
.truncate_push(height, addr_counts.p2a.into())?;
Ok(())
}
pub fn reset_height(&mut self) -> Result<()> {
use vecdb::GenericStoredVec;
self.p2pk65.height.reset()?;
self.p2pk33.height.reset()?;
self.p2pkh.height.reset()?;
self.p2sh.height.reset()?;
self.p2wpkh.height.reset()?;
self.p2wsh.height.reset()?;
self.p2tr.height.reset()?;
self.p2a.height.reset()?;
Ok(())
}
pub fn compute_rest(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
addresstype_to_height_to_addresscount: &AddressTypeToHeightToAddressCount,
) -> Result<()> {
self.p2pk65.derive_from(
indexes,
starting_indexes,
&addresstype_to_height_to_addresscount.p2pk65,
exit,
)?;
self.p2pk33.derive_from(
indexes,
starting_indexes,
&addresstype_to_height_to_addresscount.p2pk33,
exit,
)?;
self.p2pkh.derive_from(
indexes,
starting_indexes,
&addresstype_to_height_to_addresscount.p2pkh,
exit,
)?;
self.p2sh.derive_from(
indexes,
starting_indexes,
&addresstype_to_height_to_addresscount.p2sh,
exit,
)?;
self.p2wpkh.derive_from(
indexes,
starting_indexes,
&addresstype_to_height_to_addresscount.p2wpkh,
exit,
)?;
self.p2wsh.derive_from(
indexes,
starting_indexes,
&addresstype_to_height_to_addresscount.p2wsh,
exit,
)?;
self.p2tr.derive_from(
indexes,
starting_indexes,
&addresstype_to_height_to_addresscount.p2tr,
exit,
)?;
self.p2a.derive_from(
indexes,
starting_indexes,
&addresstype_to_height_to_addresscount.p2a,
exit,
)?;
self.p2pk65.compute_rest(indexes, starting_indexes, exit)?;
self.p2pk33.compute_rest(indexes, starting_indexes, exit)?;
self.p2pkh.compute_rest(indexes, starting_indexes, exit)?;
self.p2sh.compute_rest(indexes, starting_indexes, exit)?;
self.p2wpkh.compute_rest(indexes, starting_indexes, exit)?;
self.p2wsh.compute_rest(indexes, starting_indexes, exit)?;
self.p2tr.compute_rest(indexes, starting_indexes, exit)?;
self.p2a.compute_rest(indexes, starting_indexes, exit)?;
Ok(())
}
pub fn by_height(&self) -> Vec<&EagerVec<PcoVec<Height, StoredU64>>> {
vec![
&self.p2pk65.height,
&self.p2pk33.height,
&self.p2pkh.height,
&self.p2sh.height,
&self.p2wpkh.height,
&self.p2wsh.height,
&self.p2tr.height,
&self.p2a.height,
]
}
}
#[derive(Clone, Traversable)]
pub struct AddrCountVecs {
pub all: ComputedBlockLast<StoredU64>,
pub by_addresstype: AddressTypeToAddrCountVecs,
}
impl AddrCountVecs {
pub fn forced_import(
db: &Database,
name: &str,
version: Version,
indexes: &indexes::Vecs,
) -> Result<Self> {
Ok(Self {
all: ComputedBlockLast::forced_import(db, name, version, indexes)?,
by_addresstype: AddressTypeToAddrCountVecs::forced_import(db, name, version, indexes)?,
})
}
pub fn min_len(&self) -> usize {
self.all.height.len().min(self.by_addresstype.min_len())
}
pub fn par_iter_height_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
rayon::iter::once(&mut self.all.height as &mut dyn AnyStoredVec)
.chain(self.by_addresstype.par_iter_height_mut())
}
pub fn reset_height(&mut self) -> Result<()> {
self.all.height.reset()?;
self.by_addresstype.reset_height()?;
Ok(())
}
pub fn truncate_push_height(
&mut self,
height: Height,
total: u64,
addr_counts: &AddressTypeToAddressCount,
) -> Result<()> {
self.all.height.truncate_push(height, total.into())?;
self.by_addresstype
.truncate_push_height(height, addr_counts)?;
Ok(())
}
pub fn compute_rest(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.by_addresstype
.compute_rest(indexes, starting_indexes, exit)?;
let sources = self.by_addresstype.by_height();
self.all
.compute_all(indexes, starting_indexes, exit, |height_vec| {
Ok(height_vec.compute_sum_of_others(starting_indexes.height, &sources, exit)?)
})?;
Ok(())
}
}

View File

@@ -3,10 +3,7 @@ mod data;
mod indexes;
mod type_map;
pub use address_count::{
AddressTypeToAddressCount, AddressTypeToHeightToAddressCount,
AddressTypeToIndexesToAddressCount,
};
pub use address_count::{AddrCountVecs, AddressTypeToAddressCount};
pub use data::AddressesDataVecs;
pub use indexes::AnyAddressIndexesVecs;
pub use type_map::{AddressTypeToTypeIndexMap, AddressTypeToVec, HeightToAddressTypeToVec};

View File

@@ -5,16 +5,13 @@ use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Dollars, Height, StoredU64, Version};
use rayon::prelude::*;
use vecdb::{
AnyStoredVec, AnyVec, Database, EagerVec, Exit, GenericStoredVec, ImportableVec,
IterableCloneableVec, IterableVec, PcoVec,
};
use vecdb::{AnyStoredVec, AnyVec, Database, Exit, GenericStoredVec, IterableVec};
use crate::{
ComputeIndexes,
distribution::state::AddressCohortState,
indexes,
internal::DerivedComputedBlockLast,
internal::ComputedBlockLast,
price,
};
@@ -38,11 +35,7 @@ pub struct AddressCohortVecs {
#[traversable(flatten)]
pub metrics: CohortMetrics,
/// Address count at each height
pub height_to_addr_count: EagerVec<PcoVec<Height, StoredU64>>,
/// Address count indexed by various dimensions
pub indexes_to_addr_count: DerivedComputedBlockLast<StoredU64>,
pub addr_count: ComputedBlockLast<StoredU64>,
}
impl AddressCohortVecs {
@@ -75,9 +68,6 @@ impl AddressCohortVecs {
up_to_1h_realized: None,
};
let height_to_addr_count =
EagerVec::forced_import(db, &cfg.name("addr_count"), version + VERSION)?;
Ok(Self {
starting_height: None,
@@ -86,14 +76,12 @@ impl AddressCohortVecs {
metrics: CohortMetrics::forced_import(&cfg, all_supply)?,
indexes_to_addr_count: DerivedComputedBlockLast::forced_import(
addr_count: ComputedBlockLast::forced_import(
db,
&cfg.name("addr_count"),
height_to_addr_count.boxed_clone(),
version + VERSION,
indexes,
)?,
height_to_addr_count,
})
}
@@ -114,7 +102,7 @@ impl AddressCohortVecs {
/// Returns a parallel iterator over all vecs for parallel writing.
pub fn par_iter_vecs_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
rayon::iter::once(&mut self.height_to_addr_count as &mut dyn AnyStoredVec)
rayon::iter::once(&mut self.addr_count.height as &mut dyn AnyStoredVec)
.chain(self.metrics.par_iter_mut())
}
@@ -135,7 +123,8 @@ impl Filtered for AddressCohortVecs {
impl DynCohortVecs for AddressCohortVecs {
fn min_stateful_height_len(&self) -> usize {
self.height_to_addr_count
self.addr_count
.height
.len()
.min(self.metrics.min_stateful_height_len())
}
@@ -166,21 +155,25 @@ impl DynCohortVecs for AddressCohortVecs {
state.inner.supply.value = self
.metrics
.supply
.height_to_supply
.supply
.sats
.height
.read_once(prev_height)?;
state.inner.supply.utxo_count = *self
.metrics
.supply
.height_to_utxo_count
.outputs
.utxo_count
.height
.read_once(prev_height)?;
state.addr_count = *self.height_to_addr_count.read_once(prev_height)?;
state.addr_count = *self.addr_count.height.read_once(prev_height)?;
// Restore realized cap if present
if let Some(realized_metrics) = self.metrics.realized.as_mut()
&& let Some(realized_state) = state.inner.realized.as_mut()
{
realized_state.cap = realized_metrics
.height_to_realized_cap
.realized_cap
.height
.read_once(prev_height)?;
}
@@ -200,7 +193,8 @@ impl DynCohortVecs for AddressCohortVecs {
fn validate_computed_versions(&mut self, base_version: Version) -> Result<()> {
use vecdb::GenericStoredVec;
self.height_to_addr_count
self.addr_count
.height
.validate_computed_version_or_reset(base_version)?;
self.metrics.validate_computed_versions(base_version)?;
Ok(())
@@ -213,7 +207,8 @@ impl DynCohortVecs for AddressCohortVecs {
// Push addr_count from state
if let Some(state) = self.state.as_ref() {
self.height_to_addr_count
self.addr_count
.height
.truncate_push(height, state.addr_count.into())?;
self.metrics.truncate_push(height, &state.inner)?;
}
@@ -247,12 +242,8 @@ impl DynCohortVecs for AddressCohortVecs {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_addr_count.derive_from(
indexes,
starting_indexes,
&self.height_to_addr_count,
exit,
)?;
self.addr_count
.compute_rest(indexes, starting_indexes, exit)?;
self.metrics
.compute_rest_part1(indexes, price, starting_indexes, exit)?;
Ok(())
@@ -266,11 +257,11 @@ impl CohortVecs for AddressCohortVecs {
others: &[&Self],
exit: &Exit,
) -> Result<()> {
self.height_to_addr_count.compute_sum_of_others(
self.addr_count.height.compute_sum_of_others(
starting_indexes.height,
others
.iter()
.map(|v| &v.height_to_addr_count)
.map(|v| &v.addr_count.height)
.collect::<Vec<_>>()
.as_slice(),
exit,

View File

@@ -149,12 +149,15 @@ impl DynCohortVecs for UTXOCohortVecs {
state.supply.value = self
.metrics
.supply
.height_to_supply
.supply
.sats
.height
.read_once(prev_height)?;
state.supply.utxo_count = *self
.metrics
.supply
.height_to_utxo_count
.outputs
.utxo_count
.height
.read_once(prev_height)?;
// Restore realized cap if present
@@ -162,7 +165,8 @@ impl DynCohortVecs for UTXOCohortVecs {
&& let Some(realized_state) = state.realized.as_mut()
{
realized_state.cap = realized_metrics
.height_to_realized_cap
.realized_cap
.height
.read_once(prev_height)?;
}

View File

@@ -1,6 +1,6 @@
use brk_error::Result;
use brk_types::{DateIndex, Dollars, Height};
use log::info;
use tracing::info;
use vecdb::{Exit, IterableVec};
use crate::{ComputeIndexes, indexes, price};

View File

@@ -4,12 +4,12 @@ use brk_cohort::ByAddressType;
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::{DateIndex, Height, OutputType, Sats, TxIndex, TypeIndex};
use log::info;
use rayon::prelude::*;
use tracing::info;
use vecdb::{Exit, IterableVec, TypedVecIterator, VecIndex};
use crate::{
blocks, transactions, indexes, price,
blocks,
distribution::{
address::AddressTypeToAddressCount,
block::{
@@ -19,7 +19,7 @@ use crate::{
compute::write::{process_address_updates, write},
state::{BlockState, Transacted},
},
inputs, outputs,
indexes, inputs, outputs, price, transactions,
};
use super::{
@@ -58,25 +58,25 @@ pub fn process_blocks(
// References to vectors using correct field paths
// From indexer.vecs:
let height_to_first_txindex = &indexer.vecs.tx.height_to_first_txindex;
let height_to_first_txoutindex = &indexer.vecs.txout.height_to_first_txoutindex;
let height_to_first_txinindex = &indexer.vecs.txin.height_to_first_txinindex;
let height_to_first_txindex = &indexer.vecs.transactions.first_txindex;
let height_to_first_txoutindex = &indexer.vecs.outputs.first_txoutindex;
let height_to_first_txinindex = &indexer.vecs.inputs.first_txinindex;
// From transactions and inputs/outputs (via .height or .height.sum_cum.sum patterns):
let height_to_tx_count = &transactions.count.indexes_to_tx_count.height;
let height_to_output_count = &outputs.count.indexes_to_count.height.sum_cum.sum.0;
let height_to_input_count = &inputs.count.indexes_to_count.height.sum_cum.sum.0;
let height_to_tx_count = &transactions.count.tx_count.height;
let height_to_output_count = &outputs.count.total_count.height.sum_cum.sum.0;
let height_to_input_count = &inputs.count.height.sum_cum.sum.0;
// From blocks:
let height_to_timestamp = &blocks.time.height_to_timestamp_fixed;
let height_to_date = &blocks.time.height_to_date_fixed;
let dateindex_to_first_height = &indexes.time.dateindex_to_first_height;
let dateindex_to_height_count = &indexes.time.dateindex_to_height_count;
let txindex_to_output_count = &indexes.transaction.txindex_to_output_count;
let txindex_to_input_count = &indexes.transaction.txindex_to_input_count;
let height_to_timestamp = &blocks.time.timestamp_fixed;
let height_to_date = &blocks.time.date_fixed;
let dateindex_to_first_height = &indexes.dateindex.first_height;
let dateindex_to_height_count = &indexes.dateindex.height_count;
let txindex_to_output_count = &indexes.txindex.output_count;
let txindex_to_input_count = &indexes.txindex.input_count;
// From price (optional):
let height_to_price = price.map(|p| &p.usd.chainindexes_to_price_close.height);
let dateindex_to_price = price.map(|p| &p.usd.timeindexes_to_price_close.dateindex);
let height_to_price = price.map(|p| &p.usd.split.close.height);
let dateindex_to_price = price.map(|p| &p.usd.split.close.dateindex);
// Access pre-computed vectors from context for thread-safe access
let height_to_price_vec = &ctx.height_to_price;
@@ -103,7 +103,7 @@ pub fn process_blocks(
// Build txindex -> height lookup map for efficient prev_height computation
let mut txindex_to_height: RangeMap<TxIndex, Height> = {
let mut map = RangeMap::with_capacity(last_height.to_usize() + 1);
for first_txindex in indexer.vecs.tx.height_to_first_txindex.into_iter() {
for first_txindex in indexer.vecs.transactions.first_txindex.into_iter() {
map.push(first_txindex);
}
map
@@ -114,59 +114,24 @@ pub fn process_blocks(
let mut txin_iters = TxInIterators::new(indexer, inputs, &mut txindex_to_height);
// Create iterators for first address indexes per type
let mut first_p2a_iter = indexer
.vecs
.address
.height_to_first_p2aaddressindex
.into_iter();
let mut first_p2pk33_iter = indexer
.vecs
.address
.height_to_first_p2pk33addressindex
.into_iter();
let mut first_p2pk65_iter = indexer
.vecs
.address
.height_to_first_p2pk65addressindex
.into_iter();
let mut first_p2pkh_iter = indexer
.vecs
.address
.height_to_first_p2pkhaddressindex
.into_iter();
let mut first_p2sh_iter = indexer
.vecs
.address
.height_to_first_p2shaddressindex
.into_iter();
let mut first_p2tr_iter = indexer
.vecs
.address
.height_to_first_p2traddressindex
.into_iter();
let mut first_p2wpkh_iter = indexer
.vecs
.address
.height_to_first_p2wpkhaddressindex
.into_iter();
let mut first_p2wsh_iter = indexer
.vecs
.address
.height_to_first_p2wshaddressindex
.into_iter();
let mut first_p2a_iter = indexer.vecs.addresses.first_p2aaddressindex.into_iter();
let mut first_p2pk33_iter = indexer.vecs.addresses.first_p2pk33addressindex.into_iter();
let mut first_p2pk65_iter = indexer.vecs.addresses.first_p2pk65addressindex.into_iter();
let mut first_p2pkh_iter = indexer.vecs.addresses.first_p2pkhaddressindex.into_iter();
let mut first_p2sh_iter = indexer.vecs.addresses.first_p2shaddressindex.into_iter();
let mut first_p2tr_iter = indexer.vecs.addresses.first_p2traddressindex.into_iter();
let mut first_p2wpkh_iter = indexer.vecs.addresses.first_p2wpkhaddressindex.into_iter();
let mut first_p2wsh_iter = indexer.vecs.addresses.first_p2wshaddressindex.into_iter();
// Track running totals - recover from previous height if resuming
let (mut addresstype_to_addr_count, mut addresstype_to_empty_addr_count) =
if starting_height > Height::ZERO {
let addr_count = AddressTypeToAddressCount::from((
&vecs.addresstype_to_height_to_addr_count,
let (mut addr_counts, mut empty_addr_counts) = if starting_height > Height::ZERO {
let addr_counts =
AddressTypeToAddressCount::from((&vecs.addr_count.by_addresstype, starting_height));
let empty_addr_counts = AddressTypeToAddressCount::from((
&vecs.empty_addr_count.by_addresstype,
starting_height,
));
let empty_addr_count = AddressTypeToAddressCount::from((
&vecs.addresstype_to_height_to_empty_addr_count,
starting_height,
));
(addr_count, empty_addr_count)
(addr_counts, empty_addr_counts)
} else {
(
AddressTypeToAddressCount::default(),
@@ -333,8 +298,8 @@ pub fn process_blocks(
&mut vecs.address_cohorts,
&mut lookup,
block_price,
&mut addresstype_to_addr_count,
&mut addresstype_to_empty_addr_count,
&mut addr_counts,
&mut empty_addr_counts,
);
// Process sent inputs (addresses sending funds)
@@ -344,8 +309,8 @@ pub fn process_blocks(
&mut vecs.address_cohorts,
&mut lookup,
block_price,
&mut addresstype_to_addr_count,
&mut addresstype_to_empty_addr_count,
&mut addr_counts,
&mut empty_addr_counts,
height_to_price_vec.as_deref(),
height_to_timestamp_vec,
height,
@@ -361,10 +326,13 @@ pub fn process_blocks(
});
// Push to height-indexed vectors
vecs.addresstype_to_height_to_addr_count
.truncate_push(height, &addresstype_to_addr_count)?;
vecs.addresstype_to_height_to_empty_addr_count
.truncate_push(height, &addresstype_to_empty_addr_count)?;
vecs.addr_count
.truncate_push_height(height, addr_counts.sum(), &addr_counts)?;
vecs.empty_addr_count.truncate_push_height(
height,
empty_addr_counts.sum(),
&empty_addr_counts,
)?;
// Get date info for unrealized state computation
let date = height_to_date_iter.get_unwrap(height);

View File

@@ -26,11 +26,10 @@ impl ComputeContext {
blocks: &blocks::Vecs,
price: Option<&price::Vecs>,
) -> Self {
let height_to_timestamp: Vec<Timestamp> =
blocks.time.height_to_timestamp_fixed.into_iter().collect();
let height_to_timestamp: Vec<Timestamp> = blocks.time.timestamp_fixed.into_iter().collect();
let height_to_price: Option<Vec<Dollars>> = price
.map(|p| &p.usd.chainindexes_to_price_close.height)
.map(|p| &p.usd.split.close.height)
.map(|v| v.into_iter().map(|d| *d).collect());
Self {

View File

@@ -37,9 +37,9 @@ pub struct TxOutIterators<'a> {
impl<'a> TxOutIterators<'a> {
pub fn new(indexer: &'a Indexer) -> Self {
Self {
value_iter: indexer.vecs.txout.txoutindex_to_value.into_iter(),
outputtype_iter: indexer.vecs.txout.txoutindex_to_outputtype.into_iter(),
typeindex_iter: indexer.vecs.txout.txoutindex_to_typeindex.into_iter(),
value_iter: indexer.vecs.outputs.value.into_iter(),
outputtype_iter: indexer.vecs.outputs.outputtype.into_iter(),
typeindex_iter: indexer.vecs.outputs.typeindex.into_iter(),
}
}
@@ -75,10 +75,10 @@ impl<'a> TxInIterators<'a> {
txindex_to_height: &'a mut RangeMap<TxIndex, Height>,
) -> Self {
Self {
value_iter: txins.spent.txinindex_to_value.into_iter(),
outpoint_iter: indexer.vecs.txin.txinindex_to_outpoint.into_iter(),
outputtype_iter: indexer.vecs.txin.txinindex_to_outputtype.into_iter(),
typeindex_iter: indexer.vecs.txin.txinindex_to_typeindex.into_iter(),
value_iter: txins.spent.value.into_iter(),
outpoint_iter: indexer.vecs.inputs.outpoint.into_iter(),
outputtype_iter: indexer.vecs.inputs.outputtype.into_iter(),
typeindex_iter: indexer.vecs.inputs.typeindex.into_iter(),
txindex_to_height,
}
}

View File

@@ -95,15 +95,18 @@ pub fn reset_state(
}
/// Check if we can resume from a checkpoint or need to start fresh.
pub fn determine_start_mode(computed_min: Height, chain_state_height: Height) -> StartMode {
///
/// - `min_available`: minimum height we have data for across all stateful vecs
/// - `resume_target`: the height we want to resume processing from
pub fn determine_start_mode(min_available: Height, resume_target: Height) -> StartMode {
// No data to resume from
if chain_state_height.is_zero() {
if resume_target.is_zero() {
return StartMode::Fresh;
}
match computed_min.cmp(&chain_state_height) {
Ordering::Greater => unreachable!("min height > chain state height"),
Ordering::Equal => StartMode::Resume(chain_state_height),
match min_available.cmp(&resume_target) {
Ordering::Greater => unreachable!("min_available > resume_target"),
Ordering::Equal => StartMode::Resume(resume_target),
Ordering::Less => StartMode::Fresh,
}
}

View File

@@ -2,8 +2,8 @@ use std::time::Instant;
use brk_error::Result;
use brk_types::Height;
use log::info;
use rayon::prelude::*;
use tracing::info;
use vecdb::{AnyStoredVec, GenericStoredVec, Stamp};
use crate::distribution::{
@@ -77,11 +77,8 @@ pub fn write(
vecs.any_address_indexes
.par_iter_mut()
.chain(vecs.addresses_data.par_iter_mut())
.chain(vecs.addresstype_to_height_to_addr_count.par_iter_mut())
.chain(
vecs.addresstype_to_height_to_empty_addr_count
.par_iter_mut(),
)
.chain(vecs.addr_count.par_iter_height_mut())
.chain(vecs.empty_addr_count.par_iter_height_mut())
.chain(rayon::iter::once(
&mut vecs.chain_state as &mut dyn AnyStoredVec,
))

View File

@@ -2,14 +2,11 @@ use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Height, Sats, StoredF64, Version};
use rayon::prelude::*;
use vecdb::{
AnyStoredVec, AnyVec, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableCloneableVec,
PcoVec,
};
use vecdb::{AnyStoredVec, AnyVec, EagerVec, Exit, GenericStoredVec, ImportableVec, PcoVec};
use crate::{
ComputeIndexes, indexes,
internal::{ComputedBlockSumCum, DerivedValueBlockSumCum},
internal::{ComputedBlockSumCum, LazyComputedValueBlockSumCum},
};
use super::ImportConfig;
@@ -17,63 +14,54 @@ use super::ImportConfig;
/// Activity metrics for a cohort.
#[derive(Clone, Traversable)]
pub struct ActivityMetrics {
/// Total satoshis sent at each height
pub height_to_sent: EagerVec<PcoVec<Height, Sats>>,
/// Sent amounts indexed by various dimensions (derives from height_to_sent)
pub indexes_to_sent: DerivedValueBlockSumCum,
/// Total satoshis sent at each height + derived indexes
pub sent: LazyComputedValueBlockSumCum,
/// Satoshi-blocks destroyed (supply * blocks_old when spent)
pub height_to_satblocks_destroyed: EagerVec<PcoVec<Height, Sats>>,
pub satblocks_destroyed: EagerVec<PcoVec<Height, Sats>>,
/// Satoshi-days destroyed (supply * days_old when spent)
pub height_to_satdays_destroyed: EagerVec<PcoVec<Height, Sats>>,
pub satdays_destroyed: EagerVec<PcoVec<Height, Sats>>,
/// Coin-blocks destroyed (in BTC rather than sats)
pub indexes_to_coinblocks_destroyed: ComputedBlockSumCum<StoredF64>,
pub coinblocks_destroyed: ComputedBlockSumCum<StoredF64>,
/// Coin-days destroyed (in BTC rather than sats)
pub indexes_to_coindays_destroyed: ComputedBlockSumCum<StoredF64>,
pub coindays_destroyed: ComputedBlockSumCum<StoredF64>,
}
impl ActivityMetrics {
/// Import activity metrics from database.
pub fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let height_to_sent: EagerVec<PcoVec<Height, Sats>> =
EagerVec::forced_import(cfg.db, &cfg.name("sent"), cfg.version)?;
let indexes_to_sent = DerivedValueBlockSumCum::forced_import(
Ok(Self {
sent: LazyComputedValueBlockSumCum::forced_import(
cfg.db,
&cfg.name("sent"),
cfg.version,
cfg.indexes,
height_to_sent.boxed_clone(),
cfg.price,
)?;
)?,
Ok(Self {
height_to_sent,
indexes_to_sent,
height_to_satblocks_destroyed: EagerVec::forced_import(
satblocks_destroyed: EagerVec::forced_import(
cfg.db,
&cfg.name("satblocks_destroyed"),
cfg.version,
)?,
height_to_satdays_destroyed: EagerVec::forced_import(
satdays_destroyed: EagerVec::forced_import(
cfg.db,
&cfg.name("satdays_destroyed"),
cfg.version,
)?,
indexes_to_coinblocks_destroyed: ComputedBlockSumCum::forced_import(
coinblocks_destroyed: ComputedBlockSumCum::forced_import(
cfg.db,
&cfg.name("coinblocks_destroyed"),
cfg.version,
cfg.indexes,
)?,
indexes_to_coindays_destroyed: ComputedBlockSumCum::forced_import(
coindays_destroyed: ComputedBlockSumCum::forced_import(
cfg.db,
&cfg.name("coindays_destroyed"),
cfg.version,
@@ -84,10 +72,12 @@ impl ActivityMetrics {
/// Get minimum length across height-indexed vectors.
pub fn min_len(&self) -> usize {
self.height_to_sent
self.sent
.sats
.height
.len()
.min(self.height_to_satblocks_destroyed.len())
.min(self.height_to_satdays_destroyed.len())
.min(self.satblocks_destroyed.len())
.min(self.satdays_destroyed.len())
}
/// Push activity state values to height-indexed vectors.
@@ -98,28 +88,28 @@ impl ActivityMetrics {
satblocks_destroyed: Sats,
satdays_destroyed: Sats,
) -> Result<()> {
self.height_to_sent.truncate_push(height, sent)?;
self.height_to_satblocks_destroyed
self.sent.sats.height.truncate_push(height, sent)?;
self.satblocks_destroyed
.truncate_push(height, satblocks_destroyed)?;
self.height_to_satdays_destroyed
self.satdays_destroyed
.truncate_push(height, satdays_destroyed)?;
Ok(())
}
/// Write height-indexed vectors to disk.
pub fn write(&mut self) -> Result<()> {
self.height_to_sent.write()?;
self.height_to_satblocks_destroyed.write()?;
self.height_to_satdays_destroyed.write()?;
self.sent.sats.height.write()?;
self.satblocks_destroyed.write()?;
self.satdays_destroyed.write()?;
Ok(())
}
/// Returns a parallel iterator over all vecs for parallel writing.
pub fn par_iter_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
vec![
&mut self.height_to_sent as &mut dyn AnyStoredVec,
&mut self.height_to_satblocks_destroyed as &mut dyn AnyStoredVec,
&mut self.height_to_satdays_destroyed as &mut dyn AnyStoredVec,
&mut self.sent.sats.height as &mut dyn AnyStoredVec,
&mut self.satblocks_destroyed as &mut dyn AnyStoredVec,
&mut self.satdays_destroyed as &mut dyn AnyStoredVec,
]
.into_par_iter()
}
@@ -137,24 +127,27 @@ impl ActivityMetrics {
others: &[&Self],
exit: &Exit,
) -> Result<()> {
self.height_to_sent.compute_sum_of_others(
starting_indexes.height,
&others.iter().map(|v| &v.height_to_sent).collect::<Vec<_>>(),
exit,
)?;
self.height_to_satblocks_destroyed.compute_sum_of_others(
self.sent.sats.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.height_to_satblocks_destroyed)
.map(|v| &v.sent.sats.height)
.collect::<Vec<_>>(),
exit,
)?;
self.height_to_satdays_destroyed.compute_sum_of_others(
self.satblocks_destroyed.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.height_to_satdays_destroyed)
.map(|v| &v.satblocks_destroyed)
.collect::<Vec<_>>(),
exit,
)?;
self.satdays_destroyed.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.satdays_destroyed)
.collect::<Vec<_>>(),
exit,
)?;
@@ -168,29 +161,24 @@ impl ActivityMetrics {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_sent.derive_from(
indexes,
starting_indexes,
&self.height_to_sent,
exit,
)?;
self.sent.compute_rest(indexes, starting_indexes, exit)?;
self.indexes_to_coinblocks_destroyed
self.coinblocks_destroyed
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_transform(
starting_indexes.height,
&self.height_to_satblocks_destroyed,
&self.satblocks_destroyed,
|(i, v, ..)| (i, StoredF64::from(Bitcoin::from(v))),
exit,
)?;
Ok(())
})?;
self.indexes_to_coindays_destroyed
self.coindays_destroyed
.compute_all(indexes, starting_indexes, exit, |v| {
v.compute_transform(
starting_indexes.height,
&self.height_to_satdays_destroyed,
&self.satdays_destroyed,
|(i, v, ..)| (i, StoredF64::from(Bitcoin::from(v))),
exit,
)?;

View File

@@ -2,15 +2,13 @@ use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Dollars, Height, Version};
use rayon::prelude::*;
use vecdb::{
AnyStoredVec, AnyVec, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableCloneableVec,
PcoVec,
};
use vecdb::{AnyStoredVec, AnyVec, Exit, GenericStoredVec};
use crate::{
ComputeIndexes,
distribution::state::CohortState,
internal::{CostBasisPercentiles, DerivedComputedBlockLast},
indexes,
internal::{ComputedBlockLast, CostBasisPercentiles},
};
use super::ImportConfig;
@@ -19,12 +17,10 @@ use super::ImportConfig;
#[derive(Clone, Traversable)]
pub struct CostBasisMetrics {
/// Minimum cost basis for any UTXO at this height
pub height_to_min_cost_basis: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_min_cost_basis: DerivedComputedBlockLast<Dollars>,
pub min: ComputedBlockLast<Dollars>,
/// Maximum cost basis for any UTXO at this height
pub height_to_max_cost_basis: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_max_cost_basis: DerivedComputedBlockLast<Dollars>,
pub max: ComputedBlockLast<Dollars>,
/// Cost basis distribution percentiles (median, quartiles, etc.)
pub percentiles: Option<CostBasisPercentiles>,
@@ -35,29 +31,19 @@ impl CostBasisMetrics {
pub fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let extended = cfg.extended();
let height_to_min_cost_basis =
EagerVec::forced_import(cfg.db, &cfg.name("min_cost_basis"), cfg.version)?;
let height_to_max_cost_basis =
EagerVec::forced_import(cfg.db, &cfg.name("max_cost_basis"), cfg.version)?;
Ok(Self {
indexes_to_min_cost_basis: DerivedComputedBlockLast::forced_import(
min: ComputedBlockLast::forced_import(
cfg.db,
&cfg.name("min_cost_basis"),
height_to_min_cost_basis.boxed_clone(),
cfg.version,
cfg.indexes,
)?,
indexes_to_max_cost_basis: DerivedComputedBlockLast::forced_import(
max: ComputedBlockLast::forced_import(
cfg.db,
&cfg.name("max_cost_basis"),
height_to_max_cost_basis.boxed_clone(),
cfg.version,
cfg.indexes,
)?,
height_to_min_cost_basis,
height_to_max_cost_basis,
percentiles: extended
.then(|| {
CostBasisPercentiles::forced_import(
@@ -74,9 +60,7 @@ impl CostBasisMetrics {
/// Get minimum length across height-indexed vectors written in block loop.
pub fn min_stateful_height_len(&self) -> usize {
self.height_to_min_cost_basis
.len()
.min(self.height_to_max_cost_basis.len())
self.min.height.len().min(self.max.height.len())
}
/// Get minimum length across dateindex-indexed vectors written in block loop.
@@ -89,14 +73,14 @@ impl CostBasisMetrics {
/// Push min/max cost basis from state.
pub fn truncate_push_minmax(&mut self, height: Height, state: &CohortState) -> Result<()> {
self.height_to_min_cost_basis.truncate_push(
self.min.height.truncate_push(
height,
state
.price_to_amount_first_key_value()
.map(|(dollars, _)| dollars)
.unwrap_or(Dollars::NAN),
)?;
self.height_to_max_cost_basis.truncate_push(
self.max.height.truncate_push(
height,
state
.price_to_amount_last_key_value()
@@ -122,8 +106,8 @@ impl CostBasisMetrics {
/// Write height-indexed vectors to disk.
pub fn write(&mut self) -> Result<()> {
self.height_to_min_cost_basis.write()?;
self.height_to_max_cost_basis.write()?;
self.min.height.write()?;
self.max.height.write()?;
if let Some(percentiles) = self.percentiles.as_mut() {
percentiles.write()?;
}
@@ -132,10 +116,7 @@ impl CostBasisMetrics {
/// Returns a parallel iterator over all vecs for parallel writing.
pub fn par_iter_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
let mut vecs: Vec<&mut dyn AnyStoredVec> = vec![
&mut self.height_to_min_cost_basis,
&mut self.height_to_max_cost_basis,
];
let mut vecs: Vec<&mut dyn AnyStoredVec> = vec![&mut self.min.height, &mut self.max.height];
if let Some(percentiles) = self.percentiles.as_mut() {
vecs.extend(
percentiles
@@ -163,20 +144,14 @@ impl CostBasisMetrics {
others: &[&Self],
exit: &Exit,
) -> Result<()> {
self.height_to_min_cost_basis.compute_min_of_others(
self.min.height.compute_min_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.height_to_min_cost_basis)
.collect::<Vec<_>>(),
&others.iter().map(|v| &v.min.height).collect::<Vec<_>>(),
exit,
)?;
self.height_to_max_cost_basis.compute_max_of_others(
self.max.height.compute_max_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.height_to_max_cost_basis)
.collect::<Vec<_>>(),
&others.iter().map(|v| &v.max.height).collect::<Vec<_>>(),
exit,
)?;
Ok(())
@@ -185,24 +160,12 @@ impl CostBasisMetrics {
/// First phase of computed metrics (indexes from height).
pub fn compute_rest_part1(
&mut self,
indexes: &crate::indexes::Vecs,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_min_cost_basis.derive_from(
indexes,
starting_indexes,
&self.height_to_min_cost_basis,
exit,
)?;
self.indexes_to_max_cost_basis.derive_from(
indexes,
starting_indexes,
&self.height_to_max_cost_basis,
exit,
)?;
self.min.compute_rest(indexes, starting_indexes, exit)?;
self.max.compute_rest(indexes, starting_indexes, exit)?;
Ok(())
}
}

View File

@@ -1,6 +1,7 @@
mod activity;
mod config;
mod cost_basis;
mod outputs;
mod realized;
mod relative;
mod supply;
@@ -9,6 +10,7 @@ mod unrealized;
pub use activity::*;
pub use config::*;
pub use cost_basis::*;
pub use outputs::*;
pub use realized::*;
pub use relative::*;
pub use supply::*;
@@ -29,9 +31,12 @@ pub struct CohortMetrics {
#[traversable(skip)]
pub filter: Filter,
/// Supply and UTXO count (always computed)
/// Supply metrics (always computed)
pub supply: SupplyMetrics,
/// Output metrics - UTXO count (always computed)
pub outputs: OutputsMetrics,
/// Transaction activity (always computed)
pub activity: ActivityMetrics,
@@ -58,6 +63,7 @@ impl CohortMetrics {
let compute_dollars = cfg.compute_dollars();
let supply = SupplyMetrics::forced_import(cfg)?;
let outputs = OutputsMetrics::forced_import(cfg)?;
let unrealized = compute_dollars
.then(|| UnrealizedMetrics::forced_import(cfg))
@@ -71,6 +77,7 @@ impl CohortMetrics {
Ok(Self {
filter: cfg.filter.clone(),
supply,
outputs,
activity: ActivityMetrics::forced_import(cfg)?,
realized: compute_dollars
.then(|| RealizedMetrics::forced_import(cfg))
@@ -85,7 +92,7 @@ impl CohortMetrics {
/// Get minimum length across height-indexed vectors written in block loop.
pub fn min_stateful_height_len(&self) -> usize {
let mut min = self.supply.min_len().min(self.activity.min_len());
let mut min = self.supply.min_len().min(self.outputs.min_len()).min(self.activity.min_len());
if let Some(realized) = &self.realized {
min = min.min(realized.min_stateful_height_len());
@@ -116,7 +123,8 @@ impl CohortMetrics {
/// Push state values to height-indexed vectors.
pub fn truncate_push(&mut self, height: Height, state: &CohortState) -> Result<()> {
self.supply.truncate_push(height, &state.supply)?;
self.supply.truncate_push(height, state.supply.value)?;
self.outputs.truncate_push(height, state.supply.utxo_count)?;
self.activity.truncate_push(
height,
state.sent,
@@ -136,6 +144,7 @@ impl CohortMetrics {
/// Write height-indexed vectors to disk.
pub fn write(&mut self) -> Result<()> {
self.supply.write()?;
self.outputs.write()?;
self.activity.write()?;
if let Some(realized) = self.realized.as_mut() {
@@ -158,6 +167,7 @@ impl CohortMetrics {
let mut vecs: Vec<&mut dyn AnyStoredVec> = Vec::new();
vecs.extend(self.supply.par_iter_mut().collect::<Vec<_>>());
vecs.extend(self.outputs.par_iter_mut().collect::<Vec<_>>());
vecs.extend(self.activity.par_iter_mut().collect::<Vec<_>>());
if let Some(realized) = self.realized.as_mut() {
@@ -242,6 +252,11 @@ impl CohortMetrics {
&others.iter().map(|v| &v.supply).collect::<Vec<_>>(),
exit,
)?;
self.outputs.compute_from_stateful(
starting_indexes,
&others.iter().map(|v| &v.outputs).collect::<Vec<_>>(),
exit,
)?;
self.activity.compute_from_stateful(
starting_indexes,
&others.iter().map(|v| &v.activity).collect::<Vec<_>>(),
@@ -294,6 +309,8 @@ impl CohortMetrics {
) -> Result<()> {
self.supply
.compute_rest_part1(indexes, price, starting_indexes, exit)?;
self.outputs
.compute_rest(indexes, starting_indexes, exit)?;
self.activity
.compute_rest_part1(indexes, starting_indexes, exit)?;
@@ -328,7 +345,7 @@ impl CohortMetrics {
indexes,
price,
starting_indexes,
&self.supply.height_to_supply_value.bitcoin,
&self.supply.supply.bitcoin.height,
height_to_market_cap,
dateindex_to_market_cap,
exit,

View File

@@ -0,0 +1,81 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, StoredU64};
use rayon::prelude::*;
use vecdb::{AnyStoredVec, AnyVec, Exit, GenericStoredVec};
use crate::{ComputeIndexes, indexes, internal::ComputedBlockLast};
use super::ImportConfig;
/// Output metrics for a cohort.
#[derive(Clone, Traversable)]
pub struct OutputsMetrics {
pub utxo_count: ComputedBlockLast<StoredU64>,
}
impl OutputsMetrics {
/// Import output metrics from database.
pub fn forced_import(cfg: &ImportConfig) -> Result<Self> {
Ok(Self {
utxo_count: ComputedBlockLast::forced_import(
cfg.db,
&cfg.name("utxo_count"),
cfg.version,
cfg.indexes,
)?,
})
}
/// Get minimum length across height-indexed vectors.
pub fn min_len(&self) -> usize {
self.utxo_count.height.len()
}
/// Push utxo count to height-indexed vector.
pub fn truncate_push(&mut self, height: Height, utxo_count: u64) -> Result<()> {
self.utxo_count
.height
.truncate_push(height, StoredU64::from(utxo_count))?;
Ok(())
}
/// Write height-indexed vectors to disk.
pub fn write(&mut self) -> Result<()> {
self.utxo_count.height.write()?;
Ok(())
}
/// Returns a parallel iterator over all vecs for parallel writing.
pub fn par_iter_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
vec![&mut self.utxo_count.height as &mut dyn AnyStoredVec].into_par_iter()
}
/// Compute aggregate values from separate cohorts.
pub fn compute_from_stateful(
&mut self,
starting_indexes: &ComputeIndexes,
others: &[&Self],
exit: &Exit,
) -> Result<()> {
self.utxo_count.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.utxo_count.height)
.collect::<Vec<_>>(),
exit,
)?;
Ok(())
}
/// Compute derived metrics (dateindex from height).
pub fn compute_rest(
&mut self,
indexes: &indexes::Vecs,
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.utxo_count.compute_rest(indexes, starting_indexes, exit)
}
}

View File

@@ -12,10 +12,9 @@ use crate::{
distribution::state::RealizedState,
indexes,
internal::{
BinaryBlockSum, BinaryBlockSumCumLast, ComputedBlockLast, ComputedBlockSum,
ComputedBlockSumCum, ComputedDateLast, ComputedRatioVecsDate, DerivedComputedBlockLast,
DerivedComputedBlockSum, DerivedComputedBlockSumCum, DollarsMinus, LazyBlockSum,
LazyBlockSumCum, LazyDateLast, PercentageDollarsF32, StoredF32Identity,
BinaryBlockSum, BinaryBlockSumCum, ComputedBlockLast, ComputedBlockSum,
ComputedBlockSumCum, ComputedDateLast, ComputedRatioVecsDate, DollarsMinus,
LazyBlockSum, LazyBlockSumCum, LazyDateLast, PercentageDollarsF32, StoredF32Identity,
},
price,
};
@@ -26,69 +25,57 @@ use super::ImportConfig;
#[derive(Clone, Traversable)]
pub struct RealizedMetrics {
// === Realized Cap ===
pub height_to_realized_cap: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_realized_cap: DerivedComputedBlockLast<Dollars>,
pub indexes_to_realized_price: ComputedBlockLast<Dollars>,
pub indexes_to_realized_price_extra: ComputedRatioVecsDate,
pub indexes_to_realized_cap_rel_to_own_market_cap: Option<ComputedBlockLast<StoredF32>>,
pub indexes_to_realized_cap_30d_delta: ComputedDateLast<Dollars>,
pub realized_cap: ComputedBlockLast<Dollars>,
pub realized_price: ComputedBlockLast<Dollars>,
pub realized_price_extra: ComputedRatioVecsDate,
pub realized_cap_rel_to_own_market_cap: Option<ComputedBlockLast<StoredF32>>,
pub realized_cap_30d_delta: ComputedDateLast<Dollars>,
// === MVRV (Market Value to Realized Value) ===
// Proxy for indexes_to_realized_price_extra.ratio (close / realized_price = market_cap / realized_cap)
pub indexes_to_mvrv: LazyDateLast<StoredF32>,
// Proxy for realized_price_extra.ratio (close / realized_price = market_cap / realized_cap)
pub mvrv: LazyDateLast<StoredF32>,
// === Realized Profit/Loss ===
pub height_to_realized_profit: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_realized_profit: DerivedComputedBlockSumCum<Dollars>,
pub height_to_realized_loss: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_realized_loss: DerivedComputedBlockSumCum<Dollars>,
pub indexes_to_neg_realized_loss: LazyBlockSumCum<Dollars>,
pub indexes_to_net_realized_pnl: ComputedBlockSumCum<Dollars>,
pub indexes_to_realized_value: ComputedBlockSum<Dollars>,
pub realized_profit: ComputedBlockSumCum<Dollars>,
pub realized_loss: ComputedBlockSumCum<Dollars>,
pub neg_realized_loss: LazyBlockSumCum<Dollars>,
pub net_realized_pnl: ComputedBlockSumCum<Dollars>,
pub realized_value: ComputedBlockSum<Dollars>,
// === Realized vs Realized Cap Ratios (lazy) ===
pub indexes_to_realized_profit_rel_to_realized_cap:
BinaryBlockSumCumLast<StoredF32, Dollars, Dollars>,
pub indexes_to_realized_loss_rel_to_realized_cap:
BinaryBlockSumCumLast<StoredF32, Dollars, Dollars>,
pub indexes_to_net_realized_pnl_rel_to_realized_cap:
BinaryBlockSumCumLast<StoredF32, Dollars, Dollars>,
pub realized_profit_rel_to_realized_cap: BinaryBlockSumCum<StoredF32, Dollars, Dollars>,
pub realized_loss_rel_to_realized_cap: BinaryBlockSumCum<StoredF32, Dollars, Dollars>,
pub net_realized_pnl_rel_to_realized_cap: BinaryBlockSumCum<StoredF32, Dollars, Dollars>,
// === Total Realized PnL ===
pub indexes_to_total_realized_pnl: LazyBlockSum<Dollars>,
pub dateindex_to_realized_profit_to_loss_ratio: Option<EagerVec<PcoVec<DateIndex, StoredF64>>>,
pub total_realized_pnl: LazyBlockSum<Dollars>,
pub realized_profit_to_loss_ratio: Option<EagerVec<PcoVec<DateIndex, StoredF64>>>,
// === Value Created/Destroyed ===
pub height_to_value_created: EagerVec<PcoVec<Height, Dollars>>,
#[traversable(rename = "value_created_sum")]
pub indexes_to_value_created: DerivedComputedBlockSum<Dollars>,
pub height_to_value_destroyed: EagerVec<PcoVec<Height, Dollars>>,
#[traversable(rename = "value_destroyed_sum")]
pub indexes_to_value_destroyed: DerivedComputedBlockSum<Dollars>,
pub value_created: ComputedBlockSum<Dollars>,
pub value_destroyed: ComputedBlockSum<Dollars>,
// === Adjusted Value (lazy: cohort - up_to_1h) ===
pub indexes_to_adjusted_value_created: Option<BinaryBlockSum<Dollars, Dollars, Dollars>>,
pub indexes_to_adjusted_value_destroyed: Option<BinaryBlockSum<Dollars, Dollars, Dollars>>,
pub adjusted_value_created: Option<BinaryBlockSum<Dollars, Dollars, Dollars>>,
pub adjusted_value_destroyed: Option<BinaryBlockSum<Dollars, Dollars, Dollars>>,
// === SOPR (Spent Output Profit Ratio) ===
pub dateindex_to_sopr: EagerVec<PcoVec<DateIndex, StoredF64>>,
pub dateindex_to_sopr_7d_ema: EagerVec<PcoVec<DateIndex, StoredF64>>,
pub dateindex_to_sopr_30d_ema: EagerVec<PcoVec<DateIndex, StoredF64>>,
pub dateindex_to_adjusted_sopr: Option<EagerVec<PcoVec<DateIndex, StoredF64>>>,
pub dateindex_to_adjusted_sopr_7d_ema: Option<EagerVec<PcoVec<DateIndex, StoredF64>>>,
pub dateindex_to_adjusted_sopr_30d_ema: Option<EagerVec<PcoVec<DateIndex, StoredF64>>>,
pub sopr: EagerVec<PcoVec<DateIndex, StoredF64>>,
pub sopr_7d_ema: EagerVec<PcoVec<DateIndex, StoredF64>>,
pub sopr_30d_ema: EagerVec<PcoVec<DateIndex, StoredF64>>,
pub adjusted_sopr: Option<EagerVec<PcoVec<DateIndex, StoredF64>>>,
pub adjusted_sopr_7d_ema: Option<EagerVec<PcoVec<DateIndex, StoredF64>>>,
pub adjusted_sopr_30d_ema: Option<EagerVec<PcoVec<DateIndex, StoredF64>>>,
// === Sell Side Risk ===
pub dateindex_to_sell_side_risk_ratio: EagerVec<PcoVec<DateIndex, StoredF32>>,
pub dateindex_to_sell_side_risk_ratio_7d_ema: EagerVec<PcoVec<DateIndex, StoredF32>>,
pub dateindex_to_sell_side_risk_ratio_30d_ema: EagerVec<PcoVec<DateIndex, StoredF32>>,
pub sell_side_risk_ratio: EagerVec<PcoVec<DateIndex, StoredF32>>,
pub sell_side_risk_ratio_7d_ema: EagerVec<PcoVec<DateIndex, StoredF32>>,
pub sell_side_risk_ratio_30d_ema: EagerVec<PcoVec<DateIndex, StoredF32>>,
// === Net Realized PnL Deltas ===
pub indexes_to_net_realized_pnl_cumulative_30d_delta: ComputedDateLast<Dollars>,
pub indexes_to_net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap:
ComputedDateLast<StoredF32>,
pub indexes_to_net_realized_pnl_cumulative_30d_delta_rel_to_market_cap:
ComputedDateLast<StoredF32>,
pub net_realized_pnl_cumulative_30d_delta: ComputedDateLast<Dollars>,
pub net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap: ComputedDateLast<StoredF32>,
pub net_realized_pnl_cumulative_30d_delta_rel_to_market_cap: ComputedDateLast<StoredF32>,
}
impl RealizedMetrics {
@@ -99,26 +86,44 @@ impl RealizedMetrics {
let extended = cfg.extended();
let compute_adjusted = cfg.compute_adjusted();
let height_to_realized_loss: EagerVec<PcoVec<Height, Dollars>> =
EagerVec::forced_import(cfg.db, &cfg.name("realized_loss"), cfg.version)?;
let indexes_to_realized_loss = DerivedComputedBlockSumCum::forced_import(
// Import combined types using forced_import which handles height + derived
let realized_cap = ComputedBlockLast::forced_import(
cfg.db,
&cfg.name("realized_loss"),
height_to_realized_loss.boxed_clone(),
&cfg.name("realized_cap"),
cfg.version,
cfg.indexes,
)?;
let indexes_to_neg_realized_loss = LazyBlockSumCum::from_derived::<Negate>(
let realized_profit = ComputedBlockSumCum::forced_import(
cfg.db,
&cfg.name("realized_profit"),
cfg.version,
cfg.indexes,
)?;
let realized_loss = ComputedBlockSumCum::forced_import(
cfg.db,
&cfg.name("realized_loss"),
cfg.version,
cfg.indexes,
)?;
let neg_realized_loss = LazyBlockSumCum::from_computed::<Negate>(
&cfg.name("neg_realized_loss"),
cfg.version + v1,
height_to_realized_loss.boxed_clone(),
&indexes_to_realized_loss,
realized_loss.height.boxed_clone(),
&realized_loss,
);
let net_realized_pnl = ComputedBlockSumCum::forced_import(
cfg.db,
&cfg.name("net_realized_pnl"),
cfg.version,
cfg.indexes,
)?;
// realized_value is the source for total_realized_pnl (they're identical)
let indexes_to_realized_value = ComputedBlockSum::forced_import(
let realized_value = ComputedBlockSum::forced_import(
cfg.db,
&cfg.name("realized_value"),
cfg.version,
@@ -126,132 +131,92 @@ impl RealizedMetrics {
)?;
// total_realized_pnl is a lazy alias to realized_value
let indexes_to_total_realized_pnl = LazyBlockSum::from_computed::<Ident>(
let total_realized_pnl = LazyBlockSum::from_computed::<Ident>(
&cfg.name("total_realized_pnl"),
cfg.version + v1,
indexes_to_realized_value.height.boxed_clone(),
&indexes_to_realized_value,
realized_value.height.boxed_clone(),
&realized_value,
);
// Extract vecs needed for lazy ratio construction
let height_to_realized_cap: EagerVec<PcoVec<Height, Dollars>> =
EagerVec::forced_import(cfg.db, &cfg.name("realized_cap"), cfg.version)?;
let indexes_to_realized_cap = DerivedComputedBlockLast::forced_import(
cfg.db,
&cfg.name("realized_cap"),
height_to_realized_cap.boxed_clone(),
cfg.version,
cfg.indexes,
)?;
let height_to_realized_profit: EagerVec<PcoVec<Height, Dollars>> =
EagerVec::forced_import(cfg.db, &cfg.name("realized_profit"), cfg.version)?;
let indexes_to_realized_profit = DerivedComputedBlockSumCum::forced_import(
cfg.db,
&cfg.name("realized_profit"),
height_to_realized_profit.boxed_clone(),
cfg.version,
cfg.indexes,
)?;
let indexes_to_net_realized_pnl = ComputedBlockSumCum::forced_import(
cfg.db,
&cfg.name("net_realized_pnl"),
cfg.version,
cfg.indexes,
)?;
// Construct lazy ratio vecs (before struct assignment to satisfy borrow checker)
let indexes_to_realized_profit_rel_to_realized_cap =
BinaryBlockSumCumLast::from_derived::<PercentageDollarsF32>(
// Construct lazy ratio vecs
let realized_profit_rel_to_realized_cap =
BinaryBlockSumCum::from_computed_last::<PercentageDollarsF32>(
&cfg.name("realized_profit_rel_to_realized_cap"),
cfg.version + v1,
height_to_realized_profit.boxed_clone(),
height_to_realized_cap.boxed_clone(),
&indexes_to_realized_profit,
&indexes_to_realized_cap,
realized_profit.height.boxed_clone(),
realized_cap.height.boxed_clone(),
&realized_profit,
&realized_cap,
);
let indexes_to_realized_loss_rel_to_realized_cap =
BinaryBlockSumCumLast::from_derived::<PercentageDollarsF32>(
let realized_loss_rel_to_realized_cap =
BinaryBlockSumCum::from_computed_last::<PercentageDollarsF32>(
&cfg.name("realized_loss_rel_to_realized_cap"),
cfg.version + v1,
height_to_realized_loss.boxed_clone(),
height_to_realized_cap.boxed_clone(),
&indexes_to_realized_loss,
&indexes_to_realized_cap,
realized_loss.height.boxed_clone(),
realized_cap.height.boxed_clone(),
&realized_loss,
&realized_cap,
);
let indexes_to_net_realized_pnl_rel_to_realized_cap =
BinaryBlockSumCumLast::from_computed_derived::<PercentageDollarsF32>(
let net_realized_pnl_rel_to_realized_cap =
BinaryBlockSumCum::from_computed_last::<PercentageDollarsF32>(
&cfg.name("net_realized_pnl_rel_to_realized_cap"),
cfg.version + v1,
indexes_to_net_realized_pnl.height.boxed_clone(),
height_to_realized_cap.boxed_clone(),
&indexes_to_net_realized_pnl,
&indexes_to_realized_cap,
net_realized_pnl.height.boxed_clone(),
realized_cap.height.boxed_clone(),
&net_realized_pnl,
&realized_cap,
);
let indexes_to_realized_price = ComputedBlockLast::forced_import(
let realized_price = ComputedBlockLast::forced_import(
cfg.db,
&cfg.name("realized_price"),
cfg.version + v1,
cfg.indexes,
)?;
let height_to_value_created =
EagerVec::forced_import(cfg.db, &cfg.name("value_created"), cfg.version)?;
let height_to_value_destroyed =
EagerVec::forced_import(cfg.db, &cfg.name("value_destroyed"), cfg.version)?;
let indexes_to_value_created = DerivedComputedBlockSum::forced_import(
let value_created = ComputedBlockSum::forced_import(
cfg.db,
&cfg.name("value_created"),
height_to_value_created.boxed_clone(),
cfg.version,
cfg.indexes,
)?;
let indexes_to_value_destroyed = DerivedComputedBlockSum::forced_import(
let value_destroyed = ComputedBlockSum::forced_import(
cfg.db,
&cfg.name("value_destroyed"),
height_to_value_destroyed.boxed_clone(),
cfg.version,
cfg.indexes,
)?;
// Create lazy adjusted vecs if compute_adjusted and up_to_1h is available
let indexes_to_adjusted_value_created =
let adjusted_value_created =
(compute_adjusted && cfg.up_to_1h_realized.is_some()).then(|| {
let up_to_1h = cfg.up_to_1h_realized.unwrap();
BinaryBlockSum::from_derived::<DollarsMinus>(
BinaryBlockSum::from_computed::<DollarsMinus>(
&cfg.name("adjusted_value_created"),
cfg.version,
height_to_value_created.boxed_clone(),
up_to_1h.height_to_value_created.boxed_clone(),
&indexes_to_value_created,
&up_to_1h.indexes_to_value_created,
&value_created,
&up_to_1h.value_created,
)
});
let indexes_to_adjusted_value_destroyed =
let adjusted_value_destroyed =
(compute_adjusted && cfg.up_to_1h_realized.is_some()).then(|| {
let up_to_1h = cfg.up_to_1h_realized.unwrap();
BinaryBlockSum::from_derived::<DollarsMinus>(
BinaryBlockSum::from_computed::<DollarsMinus>(
&cfg.name("adjusted_value_destroyed"),
cfg.version,
height_to_value_destroyed.boxed_clone(),
up_to_1h.height_to_value_destroyed.boxed_clone(),
&indexes_to_value_destroyed,
&up_to_1h.indexes_to_value_destroyed,
&value_destroyed,
&up_to_1h.value_destroyed,
)
});
// Create realized_price_extra first so we can reference its ratio for MVRV proxy
let indexes_to_realized_price_extra = ComputedRatioVecsDate::forced_import(
let realized_price_extra = ComputedRatioVecsDate::forced_import(
cfg.db,
&cfg.name("realized_price"),
Some(&indexes_to_realized_price),
Some(&realized_price),
cfg.version + v1,
cfg.indexes,
extended,
@@ -260,21 +225,18 @@ impl RealizedMetrics {
// MVRV is a lazy proxy for realized_price_extra.ratio
// ratio = close / realized_price = market_cap / realized_cap = MVRV
let indexes_to_mvrv = LazyDateLast::from_source::<StoredF32Identity>(
let mvrv = LazyDateLast::from_source::<StoredF32Identity>(
&cfg.name("mvrv"),
cfg.version,
&indexes_to_realized_price_extra.ratio,
&realized_price_extra.ratio,
);
Ok(Self {
// === Realized Cap ===
height_to_realized_cap,
indexes_to_realized_cap,
indexes_to_realized_price_extra,
indexes_to_realized_price,
indexes_to_mvrv,
indexes_to_realized_cap_rel_to_own_market_cap: extended
realized_cap,
realized_price,
realized_price_extra,
realized_cap_rel_to_own_market_cap: extended
.then(|| {
ComputedBlockLast::forced_import(
cfg.db,
@@ -284,30 +246,31 @@ impl RealizedMetrics {
)
})
.transpose()?,
indexes_to_realized_cap_30d_delta: ComputedDateLast::forced_import(
realized_cap_30d_delta: ComputedDateLast::forced_import(
cfg.db,
&cfg.name("realized_cap_30d_delta"),
cfg.version,
cfg.indexes,
)?,
// === MVRV ===
mvrv,
// === Realized Profit/Loss ===
height_to_realized_profit,
indexes_to_realized_profit,
height_to_realized_loss,
indexes_to_realized_loss,
indexes_to_neg_realized_loss,
indexes_to_net_realized_pnl,
indexes_to_realized_value,
realized_profit,
realized_loss,
neg_realized_loss,
net_realized_pnl,
realized_value,
// === Realized vs Realized Cap Ratios (lazy) ===
indexes_to_realized_profit_rel_to_realized_cap,
indexes_to_realized_loss_rel_to_realized_cap,
indexes_to_net_realized_pnl_rel_to_realized_cap,
realized_profit_rel_to_realized_cap,
realized_loss_rel_to_realized_cap,
net_realized_pnl_rel_to_realized_cap,
// === Total Realized PnL ===
indexes_to_total_realized_pnl,
dateindex_to_realized_profit_to_loss_ratio: extended
total_realized_pnl,
realized_profit_to_loss_ratio: extended
.then(|| {
EagerVec::forced_import(
cfg.db,
@@ -318,37 +281,27 @@ impl RealizedMetrics {
.transpose()?,
// === Value Created/Destroyed ===
height_to_value_created,
indexes_to_value_created,
height_to_value_destroyed,
indexes_to_value_destroyed,
value_created,
value_destroyed,
// === Adjusted Value (lazy: cohort - up_to_1h) ===
indexes_to_adjusted_value_created,
indexes_to_adjusted_value_destroyed,
adjusted_value_created,
adjusted_value_destroyed,
// === SOPR ===
dateindex_to_sopr: EagerVec::forced_import(
cfg.db,
&cfg.name("sopr"),
cfg.version + v1,
)?,
dateindex_to_sopr_7d_ema: EagerVec::forced_import(
cfg.db,
&cfg.name("sopr_7d_ema"),
cfg.version + v1,
)?,
dateindex_to_sopr_30d_ema: EagerVec::forced_import(
sopr: EagerVec::forced_import(cfg.db, &cfg.name("sopr"), cfg.version + v1)?,
sopr_7d_ema: EagerVec::forced_import(cfg.db, &cfg.name("sopr_7d_ema"), cfg.version + v1)?,
sopr_30d_ema: EagerVec::forced_import(
cfg.db,
&cfg.name("sopr_30d_ema"),
cfg.version + v1,
)?,
dateindex_to_adjusted_sopr: compute_adjusted
adjusted_sopr: compute_adjusted
.then(|| {
EagerVec::forced_import(cfg.db, &cfg.name("adjusted_sopr"), cfg.version + v1)
})
.transpose()?,
dateindex_to_adjusted_sopr_7d_ema: compute_adjusted
adjusted_sopr_7d_ema: compute_adjusted
.then(|| {
EagerVec::forced_import(
cfg.db,
@@ -357,7 +310,7 @@ impl RealizedMetrics {
)
})
.transpose()?,
dateindex_to_adjusted_sopr_30d_ema: compute_adjusted
adjusted_sopr_30d_ema: compute_adjusted
.then(|| {
EagerVec::forced_import(
cfg.db,
@@ -368,37 +321,37 @@ impl RealizedMetrics {
.transpose()?,
// === Sell Side Risk ===
dateindex_to_sell_side_risk_ratio: EagerVec::forced_import(
sell_side_risk_ratio: EagerVec::forced_import(
cfg.db,
&cfg.name("sell_side_risk_ratio"),
cfg.version + v1,
)?,
dateindex_to_sell_side_risk_ratio_7d_ema: EagerVec::forced_import(
sell_side_risk_ratio_7d_ema: EagerVec::forced_import(
cfg.db,
&cfg.name("sell_side_risk_ratio_7d_ema"),
cfg.version + v1,
)?,
dateindex_to_sell_side_risk_ratio_30d_ema: EagerVec::forced_import(
sell_side_risk_ratio_30d_ema: EagerVec::forced_import(
cfg.db,
&cfg.name("sell_side_risk_ratio_30d_ema"),
cfg.version + v1,
)?,
// === Net Realized PnL Deltas ===
indexes_to_net_realized_pnl_cumulative_30d_delta: ComputedDateLast::forced_import(
net_realized_pnl_cumulative_30d_delta: ComputedDateLast::forced_import(
cfg.db,
&cfg.name("net_realized_pnl_cumulative_30d_delta"),
cfg.version + v3,
cfg.indexes,
)?,
indexes_to_net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap:
net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap:
ComputedDateLast::forced_import(
cfg.db,
&cfg.name("net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap"),
cfg.version + v3,
cfg.indexes,
)?,
indexes_to_net_realized_pnl_cumulative_30d_delta_rel_to_market_cap:
net_realized_pnl_cumulative_30d_delta_rel_to_market_cap:
ComputedDateLast::forced_import(
cfg.db,
&cfg.name("net_realized_pnl_cumulative_30d_delta_rel_to_market_cap"),
@@ -410,25 +363,29 @@ impl RealizedMetrics {
/// Get minimum length across height-indexed vectors written in block loop.
pub fn min_stateful_height_len(&self) -> usize {
self.height_to_realized_cap
self.realized_cap
.height
.len()
.min(self.height_to_realized_profit.len())
.min(self.height_to_realized_loss.len())
.min(self.height_to_value_created.len())
.min(self.height_to_value_destroyed.len())
.min(self.realized_profit.height.len())
.min(self.realized_loss.height.len())
.min(self.value_created.height.len())
.min(self.value_destroyed.height.len())
}
/// Push realized state values to height-indexed vectors.
pub fn truncate_push(&mut self, height: Height, state: &RealizedState) -> Result<()> {
self.height_to_realized_cap
.truncate_push(height, state.cap)?;
self.height_to_realized_profit
self.realized_cap.height.truncate_push(height, state.cap)?;
self.realized_profit
.height
.truncate_push(height, state.profit)?;
self.height_to_realized_loss
self.realized_loss
.height
.truncate_push(height, state.loss)?;
self.height_to_value_created
self.value_created
.height
.truncate_push(height, state.value_created)?;
self.height_to_value_destroyed
self.value_destroyed
.height
.truncate_push(height, state.value_destroyed)?;
Ok(())
@@ -436,22 +393,22 @@ impl RealizedMetrics {
/// Write height-indexed vectors to disk.
pub fn write(&mut self) -> Result<()> {
self.height_to_realized_cap.write()?;
self.height_to_realized_profit.write()?;
self.height_to_realized_loss.write()?;
self.height_to_value_created.write()?;
self.height_to_value_destroyed.write()?;
self.realized_cap.height.write()?;
self.realized_profit.height.write()?;
self.realized_loss.height.write()?;
self.value_created.height.write()?;
self.value_destroyed.height.write()?;
Ok(())
}
/// Returns a parallel iterator over all vecs for parallel writing.
pub fn par_iter_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
[
&mut self.height_to_realized_cap as &mut dyn AnyStoredVec,
&mut self.height_to_realized_profit,
&mut self.height_to_realized_loss,
&mut self.height_to_value_created,
&mut self.height_to_value_destroyed,
&mut self.realized_cap.height as &mut dyn AnyStoredVec,
&mut self.realized_profit.height,
&mut self.realized_loss.height,
&mut self.value_created.height,
&mut self.value_destroyed.height,
]
.into_par_iter()
}
@@ -469,43 +426,43 @@ impl RealizedMetrics {
others: &[&Self],
exit: &Exit,
) -> Result<()> {
self.height_to_realized_cap.compute_sum_of_others(
self.realized_cap.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.height_to_realized_cap)
.map(|v| &v.realized_cap.height)
.collect::<Vec<_>>(),
exit,
)?;
self.height_to_realized_profit.compute_sum_of_others(
self.realized_profit.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.height_to_realized_profit)
.map(|v| &v.realized_profit.height)
.collect::<Vec<_>>(),
exit,
)?;
self.height_to_realized_loss.compute_sum_of_others(
self.realized_loss.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.height_to_realized_loss)
.map(|v| &v.realized_loss.height)
.collect::<Vec<_>>(),
exit,
)?;
self.height_to_value_created.compute_sum_of_others(
self.value_created.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.height_to_value_created)
.map(|v| &v.value_created.height)
.collect::<Vec<_>>(),
exit,
)?;
self.height_to_value_destroyed.compute_sum_of_others(
self.value_destroyed.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.height_to_value_destroyed)
.map(|v| &v.value_destroyed.height)
.collect::<Vec<_>>(),
exit,
)?;
@@ -520,34 +477,17 @@ impl RealizedMetrics {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_realized_cap.derive_from(
indexes,
starting_indexes,
&self.height_to_realized_cap,
exit,
)?;
self.indexes_to_realized_profit.derive_from(
indexes,
starting_indexes,
&self.height_to_realized_profit,
exit,
)?;
self.indexes_to_realized_loss.derive_from(
indexes,
starting_indexes,
&self.height_to_realized_loss,
exit,
)?;
self.realized_cap.compute_rest(indexes, starting_indexes, exit)?;
self.realized_profit.compute_rest(indexes, starting_indexes, exit)?;
self.realized_loss.compute_rest(indexes, starting_indexes, exit)?;
// net_realized_pnl = profit - loss
self.indexes_to_net_realized_pnl
self.net_realized_pnl
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_subtract(
starting_indexes.height,
&self.height_to_realized_profit,
&self.height_to_realized_loss,
&self.realized_profit.height,
&self.realized_loss.height,
exit,
)?;
Ok(())
@@ -556,30 +496,19 @@ impl RealizedMetrics {
// realized_value = profit + loss
// Note: total_realized_pnl is a lazy alias to realized_value since both
// compute profit + loss with sum aggregation, making them identical.
self.indexes_to_realized_value
self.realized_value
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_add(
starting_indexes.height,
&self.height_to_realized_profit,
&self.height_to_realized_loss,
&self.realized_profit.height,
&self.realized_loss.height,
exit,
)?;
Ok(())
})?;
self.indexes_to_value_created.derive_from(
indexes,
starting_indexes,
&self.height_to_value_created,
exit,
)?;
self.indexes_to_value_destroyed.derive_from(
indexes,
starting_indexes,
&self.height_to_value_destroyed,
exit,
)?;
self.value_created.compute_rest(indexes, starting_indexes, exit)?;
self.value_destroyed.compute_rest(indexes, starting_indexes, exit)?;
Ok(())
}
@@ -597,11 +526,11 @@ impl RealizedMetrics {
exit: &Exit,
) -> Result<()> {
// realized_price = realized_cap / supply
self.indexes_to_realized_price
self.realized_price
.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_divide(
starting_indexes.height,
&self.height_to_realized_cap,
&self.realized_cap.height,
height_to_supply,
exit,
)?;
@@ -609,20 +538,20 @@ impl RealizedMetrics {
})?;
if let Some(price) = price {
self.indexes_to_realized_price_extra.compute_rest(
self.realized_price_extra.compute_rest(
price,
starting_indexes,
exit,
Some(&self.indexes_to_realized_price.dateindex.0),
Some(&self.realized_price.dateindex.0),
)?;
}
// realized_cap_30d_delta
self.indexes_to_realized_cap_30d_delta
self.realized_cap_30d_delta
.compute_all(starting_indexes, exit, |vec| {
vec.compute_change(
starting_indexes.dateindex,
&self.indexes_to_realized_cap.dateindex.0,
&self.realized_cap.dateindex.0,
30,
exit,
)?;
@@ -630,32 +559,24 @@ impl RealizedMetrics {
})?;
// SOPR = value_created / value_destroyed
self.dateindex_to_sopr.compute_divide(
self.sopr.compute_divide(
starting_indexes.dateindex,
&self.indexes_to_value_created.dateindex.0,
&self.indexes_to_value_destroyed.dateindex.0,
&self.value_created.dateindex.0,
&self.value_destroyed.dateindex.0,
exit,
)?;
self.dateindex_to_sopr_7d_ema.compute_ema(
starting_indexes.dateindex,
&self.dateindex_to_sopr,
7,
exit,
)?;
self.sopr_7d_ema
.compute_ema(starting_indexes.dateindex, &self.sopr, 7, exit)?;
self.dateindex_to_sopr_30d_ema.compute_ema(
starting_indexes.dateindex,
&self.dateindex_to_sopr,
30,
exit,
)?;
self.sopr_30d_ema
.compute_ema(starting_indexes.dateindex, &self.sopr, 30, exit)?;
// Optional: adjusted SOPR (lazy: cohort - up_to_1h)
if let (Some(adjusted_sopr), Some(adj_created), Some(adj_destroyed)) = (
self.dateindex_to_adjusted_sopr.as_mut(),
self.indexes_to_adjusted_value_created.as_ref(),
self.indexes_to_adjusted_value_destroyed.as_ref(),
self.adjusted_sopr.as_mut(),
self.adjusted_value_created.as_ref(),
self.adjusted_value_destroyed.as_ref(),
) {
adjusted_sopr.compute_divide(
starting_indexes.dateindex,
@@ -664,19 +585,19 @@ impl RealizedMetrics {
exit,
)?;
if let Some(ema_7d) = self.dateindex_to_adjusted_sopr_7d_ema.as_mut() {
if let Some(ema_7d) = self.adjusted_sopr_7d_ema.as_mut() {
ema_7d.compute_ema(
starting_indexes.dateindex,
self.dateindex_to_adjusted_sopr.as_ref().unwrap(),
self.adjusted_sopr.as_ref().unwrap(),
7,
exit,
)?;
}
if let Some(ema_30d) = self.dateindex_to_adjusted_sopr_30d_ema.as_mut() {
if let Some(ema_30d) = self.adjusted_sopr_30d_ema.as_mut() {
ema_30d.compute_ema(
starting_indexes.dateindex,
self.dateindex_to_adjusted_sopr.as_ref().unwrap(),
self.adjusted_sopr.as_ref().unwrap(),
30,
exit,
)?;
@@ -684,33 +605,29 @@ impl RealizedMetrics {
}
// sell_side_risk_ratio = realized_value / realized_cap
self.dateindex_to_sell_side_risk_ratio.compute_percentage(
self.sell_side_risk_ratio.compute_percentage(
starting_indexes.dateindex,
&self.indexes_to_realized_value.dateindex.0,
&self.indexes_to_realized_cap.dateindex.0,
&self.realized_value.dateindex.0,
&self.realized_cap.dateindex.0,
exit,
)?;
self.dateindex_to_sell_side_risk_ratio_7d_ema.compute_ema(
starting_indexes.dateindex,
&self.dateindex_to_sell_side_risk_ratio,
7,
exit,
)?;
self.sell_side_risk_ratio_7d_ema
.compute_ema(starting_indexes.dateindex, &self.sell_side_risk_ratio, 7, exit)?;
self.dateindex_to_sell_side_risk_ratio_30d_ema.compute_ema(
self.sell_side_risk_ratio_30d_ema.compute_ema(
starting_indexes.dateindex,
&self.dateindex_to_sell_side_risk_ratio,
&self.sell_side_risk_ratio,
30,
exit,
)?;
// Net realized PnL cumulative 30d delta
self.indexes_to_net_realized_pnl_cumulative_30d_delta
self.net_realized_pnl_cumulative_30d_delta
.compute_all(starting_indexes, exit, |vec| {
vec.compute_change(
starting_indexes.dateindex,
&self.indexes_to_net_realized_pnl.dateindex.cumulative.0,
&self.net_realized_pnl.dateindex.cumulative.0,
30,
exit,
)?;
@@ -718,14 +635,12 @@ impl RealizedMetrics {
})?;
// Relative to realized cap
self.indexes_to_net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap
self.net_realized_pnl_cumulative_30d_delta_rel_to_realized_cap
.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
&self
.indexes_to_net_realized_pnl_cumulative_30d_delta
.dateindex,
&self.indexes_to_realized_cap.dateindex.0,
&self.net_realized_pnl_cumulative_30d_delta.dateindex,
&self.realized_cap.dateindex.0,
exit,
)?;
Ok(())
@@ -733,13 +648,11 @@ impl RealizedMetrics {
// Relative to market cap
if let Some(dateindex_to_market_cap) = dateindex_to_market_cap {
self.indexes_to_net_realized_pnl_cumulative_30d_delta_rel_to_market_cap
self.net_realized_pnl_cumulative_30d_delta_rel_to_market_cap
.compute_all(starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.dateindex,
&self
.indexes_to_net_realized_pnl_cumulative_30d_delta
.dateindex,
&self.net_realized_pnl_cumulative_30d_delta.dateindex,
dateindex_to_market_cap,
exit,
)?;
@@ -749,13 +662,13 @@ impl RealizedMetrics {
// Optional: realized_cap_rel_to_own_market_cap
if let (Some(rel_vec), Some(height_to_market_cap)) = (
self.indexes_to_realized_cap_rel_to_own_market_cap.as_mut(),
self.realized_cap_rel_to_own_market_cap.as_mut(),
height_to_market_cap,
) {
rel_vec.compute_all(indexes, starting_indexes, exit, |vec| {
vec.compute_percentage(
starting_indexes.height,
&self.height_to_realized_cap,
&self.realized_cap.height,
height_to_market_cap,
exit,
)?;
@@ -764,11 +677,11 @@ impl RealizedMetrics {
}
// Optional: realized_profit_to_loss_ratio
if let Some(ratio) = self.dateindex_to_realized_profit_to_loss_ratio.as_mut() {
if let Some(ratio) = self.realized_profit_to_loss_ratio.as_mut() {
ratio.compute_divide(
starting_indexes.dateindex,
&self.indexes_to_realized_profit.dateindex.sum.0,
&self.indexes_to_realized_loss.dateindex.sum.0,
&self.realized_profit.dateindex.sum.0,
&self.realized_loss.dateindex.sum.0,
exit,
)?;
}

View File

@@ -1,10 +1,10 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Bitcoin, Dollars, Height, Sats, StoredF32, StoredF64, Version};
use vecdb::{IterableCloneableVec, LazyVecFrom2};
use brk_types::{Dollars, Sats, StoredF32, StoredF64, Version};
use vecdb::IterableCloneableVec;
use crate::internal::{
BinaryDateLast, NegPercentageDollarsF32, NegRatio32, PercentageBtcF64,
LazyBinaryBlockLast, LazyBinaryDateLast, NegPercentageDollarsF32, NegRatio32,
PercentageDollarsF32, PercentageSatsF64, Ratio32,
};
@@ -15,94 +15,49 @@ use super::{ImportConfig, SupplyMetrics, UnrealizedMetrics};
#[derive(Clone, Traversable)]
pub struct RelativeMetrics {
// === Supply Relative to Circulating Supply (lazy from global supply) ===
// KISS: both sources are ComputedVecsDateLast<Sats>
pub indexes_to_supply_rel_to_circulating_supply:
Option<BinaryDateLast<StoredF64, Sats, Sats>>,
pub supply_rel_to_circulating_supply: Option<LazyBinaryDateLast<StoredF64, Sats, Sats>>,
// === Supply in Profit/Loss Relative to Own Supply (lazy) ===
pub height_to_supply_in_profit_rel_to_own_supply:
LazyVecFrom2<Height, StoredF64, Height, Bitcoin, Height, Bitcoin>,
pub height_to_supply_in_loss_rel_to_own_supply:
LazyVecFrom2<Height, StoredF64, Height, Bitcoin, Height, Bitcoin>,
// KISS: both unrealized and supply are now KISS types
pub indexes_to_supply_in_profit_rel_to_own_supply:
BinaryDateLast<StoredF64, Sats, Sats>,
pub indexes_to_supply_in_loss_rel_to_own_supply:
BinaryDateLast<StoredF64, Sats, Sats>,
pub supply_in_profit_rel_to_own_supply: LazyBinaryBlockLast<StoredF64, Sats, Sats>,
pub supply_in_loss_rel_to_own_supply: LazyBinaryBlockLast<StoredF64, Sats, Sats>,
// === Supply in Profit/Loss Relative to Circulating Supply (lazy from global supply) ===
pub height_to_supply_in_profit_rel_to_circulating_supply:
Option<LazyVecFrom2<Height, StoredF64, Height, Bitcoin, Height, Bitcoin>>,
pub height_to_supply_in_loss_rel_to_circulating_supply:
Option<LazyVecFrom2<Height, StoredF64, Height, Bitcoin, Height, Bitcoin>>,
// KISS: both unrealized and global_supply are now KISS types
pub indexes_to_supply_in_profit_rel_to_circulating_supply:
Option<BinaryDateLast<StoredF64, Sats, Sats>>,
pub indexes_to_supply_in_loss_rel_to_circulating_supply:
Option<BinaryDateLast<StoredF64, Sats, Sats>>,
pub supply_in_profit_rel_to_circulating_supply:
Option<LazyBinaryBlockLast<StoredF64, Sats, Sats>>,
pub supply_in_loss_rel_to_circulating_supply:
Option<LazyBinaryBlockLast<StoredF64, Sats, Sats>>,
// === Unrealized vs Market Cap (lazy from global market cap) ===
pub height_to_unrealized_profit_rel_to_market_cap:
Option<LazyVecFrom2<Height, StoredF32, Height, Dollars, Height, Dollars>>,
pub height_to_unrealized_loss_rel_to_market_cap:
Option<LazyVecFrom2<Height, StoredF32, Height, Dollars, Height, Dollars>>,
pub height_to_neg_unrealized_loss_rel_to_market_cap:
Option<LazyVecFrom2<Height, StoredF32, Height, Dollars, Height, Dollars>>,
pub height_to_net_unrealized_pnl_rel_to_market_cap:
Option<LazyVecFrom2<Height, StoredF32, Height, Dollars, Height, Dollars>>,
// KISS: DerivedDateLast + ComputedVecsDateLast
pub indexes_to_unrealized_profit_rel_to_market_cap:
Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
pub indexes_to_unrealized_loss_rel_to_market_cap:
Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
pub indexes_to_neg_unrealized_loss_rel_to_market_cap:
Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
// KISS: both ComputedVecsDateLast
pub indexes_to_net_unrealized_pnl_rel_to_market_cap:
Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
pub unrealized_profit_rel_to_market_cap:
Option<LazyBinaryBlockLast<StoredF32, Dollars, Dollars>>,
pub unrealized_loss_rel_to_market_cap: Option<LazyBinaryBlockLast<StoredF32, Dollars, Dollars>>,
pub neg_unrealized_loss_rel_to_market_cap:
Option<LazyBinaryBlockLast<StoredF32, Dollars, Dollars>>,
pub net_unrealized_pnl_rel_to_market_cap:
Option<LazyBinaryBlockLast<StoredF32, Dollars, Dollars>>,
// === NUPL (Net Unrealized Profit/Loss) ===
// KISS: both ComputedVecsDateLast
pub indexes_to_nupl: Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
pub nupl: Option<LazyBinaryBlockLast<StoredF32, Dollars, Dollars>>,
// === Unrealized vs Own Market Cap (lazy) ===
pub height_to_unrealized_profit_rel_to_own_market_cap:
Option<LazyVecFrom2<Height, StoredF32, Height, Dollars, Height, Dollars>>,
pub height_to_unrealized_loss_rel_to_own_market_cap:
Option<LazyVecFrom2<Height, StoredF32, Height, Dollars, Height, Dollars>>,
pub height_to_neg_unrealized_loss_rel_to_own_market_cap:
Option<LazyVecFrom2<Height, StoredF32, Height, Dollars, Height, Dollars>>,
pub height_to_net_unrealized_pnl_rel_to_own_market_cap:
Option<LazyVecFrom2<Height, StoredF32, Height, Dollars, Height, Dollars>>,
// KISS: DerivedDateLast + ComputedVecsDateLast
pub indexes_to_unrealized_profit_rel_to_own_market_cap:
Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
pub indexes_to_unrealized_loss_rel_to_own_market_cap:
Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
pub indexes_to_neg_unrealized_loss_rel_to_own_market_cap:
Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
// KISS: both ComputedVecsDateLast
pub indexes_to_net_unrealized_pnl_rel_to_own_market_cap:
Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
pub unrealized_profit_rel_to_own_market_cap:
Option<LazyBinaryBlockLast<StoredF32, Dollars, Dollars>>,
pub unrealized_loss_rel_to_own_market_cap:
Option<LazyBinaryBlockLast<StoredF32, Dollars, Dollars>>,
pub neg_unrealized_loss_rel_to_own_market_cap:
Option<LazyBinaryBlockLast<StoredF32, Dollars, Dollars>>,
pub net_unrealized_pnl_rel_to_own_market_cap:
Option<LazyBinaryBlockLast<StoredF32, Dollars, Dollars>>,
// === Unrealized vs Own Total Unrealized PnL (lazy) ===
pub height_to_unrealized_profit_rel_to_own_total_unrealized_pnl:
Option<LazyVecFrom2<Height, StoredF32, Height, Dollars, Height, Dollars>>,
pub height_to_unrealized_loss_rel_to_own_total_unrealized_pnl:
Option<LazyVecFrom2<Height, StoredF32, Height, Dollars, Height, Dollars>>,
pub height_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl:
Option<LazyVecFrom2<Height, StoredF32, Height, Dollars, Height, Dollars>>,
pub height_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl:
Option<LazyVecFrom2<Height, StoredF32, Height, Dollars, Height, Dollars>>,
// KISS: DerivedDateLast + DerivedDateLast
pub indexes_to_unrealized_profit_rel_to_own_total_unrealized_pnl:
Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
pub indexes_to_unrealized_loss_rel_to_own_total_unrealized_pnl:
Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
pub indexes_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl:
Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
pub indexes_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl:
Option<BinaryDateLast<StoredF32, Dollars, Dollars>>,
pub unrealized_profit_rel_to_own_total_unrealized_pnl:
Option<LazyBinaryBlockLast<StoredF32, Dollars, Dollars>>,
pub unrealized_loss_rel_to_own_total_unrealized_pnl:
Option<LazyBinaryBlockLast<StoredF32, Dollars, Dollars>>,
pub neg_unrealized_loss_rel_to_own_total_unrealized_pnl:
Option<LazyBinaryBlockLast<StoredF32, Dollars, Dollars>>,
pub net_unrealized_pnl_rel_to_own_total_unrealized_pnl:
Option<LazyBinaryBlockLast<StoredF32, Dollars, Dollars>>,
}
impl RelativeMetrics {
@@ -122,300 +77,222 @@ impl RelativeMetrics {
let compute_rel_to_all = cfg.compute_rel_to_all();
// Global sources from "all" cohort
let global_supply_sats = all_supply.map(|s| &s.indexes_to_supply.sats);
let global_supply_sats_dateindex = all_supply.map(|s| &s.indexes_to_supply.sats_dateindex);
let global_supply_btc = all_supply.map(|s| &s.height_to_supply_value.bitcoin);
let global_market_cap = all_supply.and_then(|s| s.indexes_to_supply.dollars.as_ref());
let global_market_cap_height =
all_supply.and_then(|s| s.height_to_supply_value.dollars.as_ref());
let global_supply_sats_height = all_supply.map(|s| &s.supply.sats.height);
let global_supply_sats_difficultyepoch = all_supply.map(|s| &s.supply.sats.difficultyepoch);
let global_supply_sats_dates = all_supply.map(|s| &s.supply.sats.rest.dates);
let global_supply_sats_dateindex = all_supply.map(|s| &s.supply.sats.rest.dateindex);
let global_market_cap = all_supply.and_then(|s| s.supply.dollars.as_ref());
// Own market cap source
let own_market_cap = supply.indexes_to_supply.dollars.as_ref();
let own_market_cap_height = supply.height_to_supply_value.dollars.as_ref();
let own_market_cap = supply.supply.dollars.as_ref();
Ok(Self {
// === Supply Relative to Circulating Supply (lazy from global supply) ===
indexes_to_supply_rel_to_circulating_supply: (compute_rel_to_all
&& global_supply_sats.is_some())
supply_rel_to_circulating_supply: (compute_rel_to_all
&& global_supply_sats_dates.is_some())
.then(|| {
BinaryDateLast::from_both_derived_last::<PercentageSatsF64>(
LazyBinaryDateLast::from_both_derived_last::<PercentageSatsF64>(
&cfg.name("supply_rel_to_circulating_supply"),
cfg.version + v1,
supply.indexes_to_supply.sats_dateindex.boxed_clone(),
&supply.indexes_to_supply.sats,
supply.supply.sats.rest.dateindex.boxed_clone(),
&supply.supply.sats.rest.dates,
global_supply_sats_dateindex.unwrap().boxed_clone(),
global_supply_sats.unwrap(),
global_supply_sats_dates.unwrap(),
)
}),
// === Supply in Profit/Loss Relative to Own Supply (lazy) ===
height_to_supply_in_profit_rel_to_own_supply: LazyVecFrom2::transformed::<
PercentageBtcF64,
>(
supply_in_profit_rel_to_own_supply:
LazyBinaryBlockLast::from_height_difficultyepoch_dates::<PercentageSatsF64>(
&cfg.name("supply_in_profit_rel_to_own_supply"),
cfg.version + v1,
unrealized.supply_in_profit.height.boxed_clone(),
supply.supply.sats.height.boxed_clone(),
unrealized.supply_in_profit.difficultyepoch.boxed_clone(),
supply.supply.sats.difficultyepoch.boxed_clone(),
unrealized
.height_to_supply_in_profit_value
.bitcoin
.supply_in_profit
.indexes
.sats_dateindex
.boxed_clone(),
supply.height_to_supply_value.bitcoin.boxed_clone(),
&unrealized.supply_in_profit.indexes.sats,
supply.supply.sats.rest.dateindex.boxed_clone(),
&supply.supply.sats.rest.dates,
),
height_to_supply_in_loss_rel_to_own_supply: LazyVecFrom2::transformed::<PercentageBtcF64>(
supply_in_loss_rel_to_own_supply:
LazyBinaryBlockLast::from_height_difficultyepoch_dates::<PercentageSatsF64>(
&cfg.name("supply_in_loss_rel_to_own_supply"),
cfg.version + v1,
unrealized.supply_in_loss.height.boxed_clone(),
supply.supply.sats.height.boxed_clone(),
unrealized.supply_in_loss.difficultyepoch.boxed_clone(),
supply.supply.sats.difficultyepoch.boxed_clone(),
unrealized
.height_to_supply_in_loss_value
.bitcoin
.supply_in_loss
.indexes
.sats_dateindex
.boxed_clone(),
supply.height_to_supply_value.bitcoin.boxed_clone(),
),
indexes_to_supply_in_profit_rel_to_own_supply:
BinaryDateLast::from_both_derived_last::<PercentageSatsF64>(
&cfg.name("supply_in_profit_rel_to_own_supply"),
cfg.version + v1,
unrealized.dateindex_to_supply_in_profit.boxed_clone(),
&unrealized.indexes_to_supply_in_profit.sats,
supply.indexes_to_supply.sats_dateindex.boxed_clone(),
&supply.indexes_to_supply.sats,
),
indexes_to_supply_in_loss_rel_to_own_supply:
BinaryDateLast::from_both_derived_last::<PercentageSatsF64>(
&cfg.name("supply_in_loss_rel_to_own_supply"),
cfg.version + v1,
unrealized.dateindex_to_supply_in_loss.boxed_clone(),
&unrealized.indexes_to_supply_in_loss.sats,
supply.indexes_to_supply.sats_dateindex.boxed_clone(),
&supply.indexes_to_supply.sats,
&unrealized.supply_in_loss.indexes.sats,
supply.supply.sats.rest.dateindex.boxed_clone(),
&supply.supply.sats.rest.dates,
),
// === Supply in Profit/Loss Relative to Circulating Supply (lazy from global supply) ===
height_to_supply_in_profit_rel_to_circulating_supply: (compute_rel_to_all
&& global_supply_btc.is_some())
supply_in_profit_rel_to_circulating_supply: (compute_rel_to_all
&& global_supply_sats_height.is_some())
.then(|| {
LazyVecFrom2::transformed::<PercentageBtcF64>(
LazyBinaryBlockLast::from_height_difficultyepoch_dates::<PercentageSatsF64>(
&cfg.name("supply_in_profit_rel_to_circulating_supply"),
cfg.version + v1,
unrealized.supply_in_profit.height.boxed_clone(),
global_supply_sats_height.unwrap().boxed_clone(),
unrealized.supply_in_profit.difficultyepoch.boxed_clone(),
global_supply_sats_difficultyepoch.unwrap().boxed_clone(),
unrealized
.height_to_supply_in_profit_value
.bitcoin
.supply_in_profit
.indexes
.sats_dateindex
.boxed_clone(),
global_supply_btc.unwrap().boxed_clone(),
&unrealized.supply_in_profit.indexes.sats,
global_supply_sats_dateindex.unwrap().boxed_clone(),
global_supply_sats_dates.unwrap(),
)
}),
height_to_supply_in_loss_rel_to_circulating_supply: (compute_rel_to_all
&& global_supply_btc.is_some())
supply_in_loss_rel_to_circulating_supply: (compute_rel_to_all
&& global_supply_sats_height.is_some())
.then(|| {
LazyVecFrom2::transformed::<PercentageBtcF64>(
LazyBinaryBlockLast::from_height_difficultyepoch_dates::<PercentageSatsF64>(
&cfg.name("supply_in_loss_rel_to_circulating_supply"),
cfg.version + v1,
unrealized.supply_in_loss.height.boxed_clone(),
global_supply_sats_height.unwrap().boxed_clone(),
unrealized.supply_in_loss.difficultyepoch.boxed_clone(),
global_supply_sats_difficultyepoch.unwrap().boxed_clone(),
unrealized
.height_to_supply_in_loss_value
.bitcoin
.supply_in_loss
.indexes
.sats_dateindex
.boxed_clone(),
global_supply_btc.unwrap().boxed_clone(),
)
}),
indexes_to_supply_in_profit_rel_to_circulating_supply: (compute_rel_to_all
&& global_supply_sats.is_some())
.then(|| {
BinaryDateLast::from_both_derived_last::<PercentageSatsF64>(
&cfg.name("supply_in_profit_rel_to_circulating_supply"),
cfg.version + v1,
unrealized.dateindex_to_supply_in_profit.boxed_clone(),
&unrealized.indexes_to_supply_in_profit.sats,
&unrealized.supply_in_loss.indexes.sats,
global_supply_sats_dateindex.unwrap().boxed_clone(),
global_supply_sats.unwrap(),
)
}),
indexes_to_supply_in_loss_rel_to_circulating_supply: (compute_rel_to_all
&& global_supply_sats.is_some())
.then(|| {
BinaryDateLast::from_both_derived_last::<PercentageSatsF64>(
&cfg.name("supply_in_loss_rel_to_circulating_supply"),
cfg.version + v1,
unrealized.dateindex_to_supply_in_loss.boxed_clone(),
&unrealized.indexes_to_supply_in_loss.sats,
global_supply_sats_dateindex.unwrap().boxed_clone(),
global_supply_sats.unwrap(),
global_supply_sats_dates.unwrap(),
)
}),
// === Unrealized vs Market Cap (lazy from global market cap) ===
height_to_unrealized_profit_rel_to_market_cap: global_market_cap_height.map(|mc| {
LazyVecFrom2::transformed::<PercentageDollarsF32>(
&cfg.name("unrealized_profit_rel_to_market_cap"),
cfg.version,
unrealized.height_to_unrealized_profit.boxed_clone(),
mc.boxed_clone(),
)
}),
height_to_unrealized_loss_rel_to_market_cap: global_market_cap_height.map(|mc| {
LazyVecFrom2::transformed::<PercentageDollarsF32>(
&cfg.name("unrealized_loss_rel_to_market_cap"),
cfg.version,
unrealized.height_to_unrealized_loss.boxed_clone(),
mc.boxed_clone(),
)
}),
height_to_neg_unrealized_loss_rel_to_market_cap: global_market_cap_height.map(|mc| {
LazyVecFrom2::transformed::<NegPercentageDollarsF32>(
&cfg.name("neg_unrealized_loss_rel_to_market_cap"),
cfg.version,
unrealized.height_to_unrealized_loss.boxed_clone(),
mc.boxed_clone(),
)
}),
height_to_net_unrealized_pnl_rel_to_market_cap: global_market_cap_height.map(|mc| {
LazyVecFrom2::transformed::<PercentageDollarsF32>(
&cfg.name("net_unrealized_pnl_rel_to_market_cap"),
cfg.version + v1,
unrealized.height_to_net_unrealized_pnl.boxed_clone(),
mc.boxed_clone(),
)
}),
// KISS: market_cap is now ComputedVecsDateLast
indexes_to_unrealized_profit_rel_to_market_cap: global_market_cap.map(|mc| {
BinaryDateLast::from_derived_last_and_computed_last::<PercentageDollarsF32>(
unrealized_profit_rel_to_market_cap:
global_market_cap.map(|mc| {
LazyBinaryBlockLast::from_computed_height_date_and_block_last::<
PercentageDollarsF32,
>(
&cfg.name("unrealized_profit_rel_to_market_cap"),
cfg.version + v2,
unrealized.dateindex_to_unrealized_profit.boxed_clone(),
&unrealized.indexes_to_unrealized_profit,
&unrealized.unrealized_profit,
mc,
)
}),
indexes_to_unrealized_loss_rel_to_market_cap: global_market_cap.map(|mc| {
BinaryDateLast::from_derived_last_and_computed_last::<PercentageDollarsF32>(
unrealized_loss_rel_to_market_cap:
global_market_cap.map(|mc| {
LazyBinaryBlockLast::from_computed_height_date_and_block_last::<
PercentageDollarsF32,
>(
&cfg.name("unrealized_loss_rel_to_market_cap"),
cfg.version + v2,
unrealized.dateindex_to_unrealized_loss.boxed_clone(),
&unrealized.indexes_to_unrealized_loss,
&unrealized.unrealized_loss,
mc,
)
}),
indexes_to_neg_unrealized_loss_rel_to_market_cap: global_market_cap.map(|mc| {
BinaryDateLast::from_derived_last_and_computed_last::<NegPercentageDollarsF32>(
neg_unrealized_loss_rel_to_market_cap: global_market_cap.map(|mc| {
LazyBinaryBlockLast::from_computed_height_date_and_block_last::<
NegPercentageDollarsF32,
>(
&cfg.name("neg_unrealized_loss_rel_to_market_cap"),
cfg.version + v2,
unrealized.dateindex_to_unrealized_loss.boxed_clone(),
&unrealized.indexes_to_unrealized_loss,
&unrealized.unrealized_loss,
mc,
)
}),
indexes_to_net_unrealized_pnl_rel_to_market_cap: global_market_cap.map(|mc| {
BinaryDateLast::from_computed_both_last::<PercentageDollarsF32>(
net_unrealized_pnl_rel_to_market_cap: global_market_cap.map(|mc| {
LazyBinaryBlockLast::from_binary_block_and_computed_block_last::<
PercentageDollarsF32,
_,
_,
>(
&cfg.name("net_unrealized_pnl_rel_to_market_cap"),
cfg.version + v2,
&unrealized.indexes_to_net_unrealized_pnl,
&unrealized.net_unrealized_pnl,
mc,
)
}),
// NUPL is a proxy for net_unrealized_pnl_rel_to_market_cap
indexes_to_nupl: global_market_cap.map(|mc| {
BinaryDateLast::from_computed_both_last::<PercentageDollarsF32>(
nupl: global_market_cap.map(|mc| {
LazyBinaryBlockLast::from_binary_block_and_computed_block_last::<
PercentageDollarsF32,
_,
_,
>(
&cfg.name("nupl"),
cfg.version + v2,
&unrealized.indexes_to_net_unrealized_pnl,
&unrealized.net_unrealized_pnl,
mc,
)
}),
// === Unrealized vs Own Market Cap (lazy, optional) ===
height_to_unrealized_profit_rel_to_own_market_cap: (extended && compute_rel_to_all)
.then(|| {
own_market_cap_height.map(|mc| {
LazyVecFrom2::transformed::<PercentageDollarsF32>(
&cfg.name("unrealized_profit_rel_to_own_market_cap"),
cfg.version + v1,
unrealized.height_to_unrealized_profit.boxed_clone(),
mc.boxed_clone(),
)
})
})
.flatten(),
height_to_unrealized_loss_rel_to_own_market_cap: (extended && compute_rel_to_all)
.then(|| {
own_market_cap_height.map(|mc| {
LazyVecFrom2::transformed::<PercentageDollarsF32>(
&cfg.name("unrealized_loss_rel_to_own_market_cap"),
cfg.version + v1,
unrealized.height_to_unrealized_loss.boxed_clone(),
mc.boxed_clone(),
)
})
})
.flatten(),
height_to_neg_unrealized_loss_rel_to_own_market_cap: (extended && compute_rel_to_all)
.then(|| {
own_market_cap_height.map(|mc| {
LazyVecFrom2::transformed::<NegPercentageDollarsF32>(
&cfg.name("neg_unrealized_loss_rel_to_own_market_cap"),
cfg.version + v1,
unrealized.height_to_unrealized_loss.boxed_clone(),
mc.boxed_clone(),
)
})
})
.flatten(),
height_to_net_unrealized_pnl_rel_to_own_market_cap: (extended && compute_rel_to_all)
.then(|| {
own_market_cap_height.map(|mc| {
LazyVecFrom2::transformed::<PercentageDollarsF32>(
&cfg.name("net_unrealized_pnl_rel_to_own_market_cap"),
cfg.version + v2,
unrealized.height_to_net_unrealized_pnl.boxed_clone(),
mc.boxed_clone(),
)
})
})
.flatten(),
// KISS: own_market_cap is now ComputedVecsDateLast
indexes_to_unrealized_profit_rel_to_own_market_cap: (extended && compute_rel_to_all)
unrealized_profit_rel_to_own_market_cap: (extended && compute_rel_to_all)
.then(|| {
own_market_cap.map(|mc| {
BinaryDateLast::from_derived_last_and_computed_last::<PercentageDollarsF32>(
LazyBinaryBlockLast::from_computed_height_date_and_block_last::<
PercentageDollarsF32,
>(
&cfg.name("unrealized_profit_rel_to_own_market_cap"),
cfg.version + v2,
unrealized.dateindex_to_unrealized_profit.boxed_clone(),
&unrealized.indexes_to_unrealized_profit,
&unrealized.unrealized_profit,
mc,
)
})
})
.flatten(),
indexes_to_unrealized_loss_rel_to_own_market_cap: (extended && compute_rel_to_all)
unrealized_loss_rel_to_own_market_cap: (extended && compute_rel_to_all)
.then(|| {
own_market_cap.map(|mc| {
BinaryDateLast::from_derived_last_and_computed_last::<PercentageDollarsF32>(
LazyBinaryBlockLast::from_computed_height_date_and_block_last::<
PercentageDollarsF32,
>(
&cfg.name("unrealized_loss_rel_to_own_market_cap"),
cfg.version + v2,
unrealized.dateindex_to_unrealized_loss.boxed_clone(),
&unrealized.indexes_to_unrealized_loss,
&unrealized.unrealized_loss,
mc,
)
})
})
.flatten(),
indexes_to_neg_unrealized_loss_rel_to_own_market_cap: (extended && compute_rel_to_all)
neg_unrealized_loss_rel_to_own_market_cap: (extended && compute_rel_to_all)
.then(|| {
own_market_cap.map(|mc| {
BinaryDateLast::from_derived_last_and_computed_last::<NegPercentageDollarsF32>(
LazyBinaryBlockLast::from_computed_height_date_and_block_last::<
NegPercentageDollarsF32,
>(
&cfg.name("neg_unrealized_loss_rel_to_own_market_cap"),
cfg.version + v2,
unrealized.dateindex_to_unrealized_loss.boxed_clone(),
&unrealized.indexes_to_unrealized_loss,
&unrealized.unrealized_loss,
mc,
)
})
})
.flatten(),
indexes_to_net_unrealized_pnl_rel_to_own_market_cap: (extended && compute_rel_to_all)
net_unrealized_pnl_rel_to_own_market_cap: (extended && compute_rel_to_all)
.then(|| {
own_market_cap.map(|mc| {
BinaryDateLast::from_computed_both_last::<PercentageDollarsF32>(
LazyBinaryBlockLast::from_binary_block_and_computed_block_last::<
PercentageDollarsF32,
_,
_,
>(
&cfg.name("net_unrealized_pnl_rel_to_own_market_cap"),
cfg.version + v2,
&unrealized.indexes_to_net_unrealized_pnl,
&unrealized.net_unrealized_pnl,
mc,
)
})
@@ -423,71 +300,36 @@ impl RelativeMetrics {
.flatten(),
// === Unrealized vs Own Total Unrealized PnL (lazy, optional) ===
height_to_unrealized_profit_rel_to_own_total_unrealized_pnl: extended.then(|| {
LazyVecFrom2::transformed::<Ratio32>(
unrealized_profit_rel_to_own_total_unrealized_pnl: extended.then(|| {
LazyBinaryBlockLast::from_computed_height_date_and_binary_block::<Ratio32, _, _>(
&cfg.name("unrealized_profit_rel_to_own_total_unrealized_pnl"),
cfg.version,
unrealized.height_to_unrealized_profit.boxed_clone(),
unrealized.height_to_total_unrealized_pnl.boxed_clone(),
&unrealized.unrealized_profit,
&unrealized.total_unrealized_pnl,
)
}),
height_to_unrealized_loss_rel_to_own_total_unrealized_pnl: extended.then(|| {
LazyVecFrom2::transformed::<Ratio32>(
unrealized_loss_rel_to_own_total_unrealized_pnl: extended.then(|| {
LazyBinaryBlockLast::from_computed_height_date_and_binary_block::<Ratio32, _, _>(
&cfg.name("unrealized_loss_rel_to_own_total_unrealized_pnl"),
cfg.version,
unrealized.height_to_unrealized_loss.boxed_clone(),
unrealized.height_to_total_unrealized_pnl.boxed_clone(),
&unrealized.unrealized_loss,
&unrealized.total_unrealized_pnl,
)
}),
height_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl: extended.then(|| {
LazyVecFrom2::transformed::<NegRatio32>(
neg_unrealized_loss_rel_to_own_total_unrealized_pnl: extended.then(|| {
LazyBinaryBlockLast::from_computed_height_date_and_binary_block::<NegRatio32, _, _>(
&cfg.name("neg_unrealized_loss_rel_to_own_total_unrealized_pnl"),
cfg.version,
unrealized.height_to_unrealized_loss.boxed_clone(),
unrealized.height_to_total_unrealized_pnl.boxed_clone(),
&unrealized.unrealized_loss,
&unrealized.total_unrealized_pnl,
)
}),
height_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl: extended.then(|| {
LazyVecFrom2::transformed::<Ratio32>(
net_unrealized_pnl_rel_to_own_total_unrealized_pnl: extended.then(|| {
LazyBinaryBlockLast::from_both_binary_block::<Ratio32, _, _, _, _>(
&cfg.name("net_unrealized_pnl_rel_to_own_total_unrealized_pnl"),
cfg.version + v1,
unrealized.height_to_net_unrealized_pnl.boxed_clone(),
unrealized.height_to_total_unrealized_pnl.boxed_clone(),
)
}),
indexes_to_unrealized_profit_rel_to_own_total_unrealized_pnl: extended.then(|| {
BinaryDateLast::from_derived_last_and_computed_last::<Ratio32>(
&cfg.name("unrealized_profit_rel_to_own_total_unrealized_pnl"),
cfg.version + v1,
unrealized.dateindex_to_unrealized_profit.boxed_clone(),
&unrealized.indexes_to_unrealized_profit,
&unrealized.indexes_to_total_unrealized_pnl,
)
}),
indexes_to_unrealized_loss_rel_to_own_total_unrealized_pnl: extended.then(|| {
BinaryDateLast::from_derived_last_and_computed_last::<Ratio32>(
&cfg.name("unrealized_loss_rel_to_own_total_unrealized_pnl"),
cfg.version + v1,
unrealized.dateindex_to_unrealized_loss.boxed_clone(),
&unrealized.indexes_to_unrealized_loss,
&unrealized.indexes_to_total_unrealized_pnl,
)
}),
indexes_to_neg_unrealized_loss_rel_to_own_total_unrealized_pnl: extended.then(|| {
BinaryDateLast::from_derived_last_and_computed_last::<NegRatio32>(
&cfg.name("neg_unrealized_loss_rel_to_own_total_unrealized_pnl"),
cfg.version + v1,
unrealized.dateindex_to_unrealized_loss.boxed_clone(),
&unrealized.indexes_to_unrealized_loss,
&unrealized.indexes_to_total_unrealized_pnl,
)
}),
indexes_to_net_unrealized_pnl_rel_to_own_total_unrealized_pnl: extended.then(|| {
BinaryDateLast::from_computed_both_last::<Ratio32>(
&cfg.name("net_unrealized_pnl_rel_to_own_total_unrealized_pnl"),
cfg.version + v1,
&unrealized.indexes_to_net_unrealized_pnl,
&unrealized.indexes_to_total_unrealized_pnl,
&unrealized.net_unrealized_pnl,
&unrealized.total_unrealized_pnl,
)
}),
})

View File

@@ -1,131 +1,89 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{Height, Sats, StoredU64, SupplyState, Version};
use brk_types::{Height, Sats, Version};
use crate::ComputeIndexes;
use rayon::prelude::*;
use vecdb::{
AnyStoredVec, AnyVec, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableCloneableVec,
PcoVec, TypedVecIterator,
};
use vecdb::{AnyStoredVec, AnyVec, Exit, GenericStoredVec, IterableCloneableVec};
use crate::{
ComputeIndexes, indexes,
indexes,
internal::{
DerivedComputedBlockLast, HalfClosePriceTimesSats, HalveDollars, HalveSats,
HalveSatsToBitcoin, LazyBlockValue, LazyDerivedBlockValue, LazyValueDateLast, ValueDateLast,
HalfClosePriceTimesSats, HalveDollars, HalveSats, HalveSatsToBitcoin, LazyBlockValue,
LazyValueDateLast, ValueBlockLast,
},
price,
};
use super::ImportConfig;
/// Supply and UTXO count metrics for a cohort.
/// Supply metrics for a cohort.
#[derive(Clone, Traversable)]
pub struct SupplyMetrics {
pub height_to_supply: EagerVec<PcoVec<Height, Sats>>,
pub height_to_supply_value: LazyDerivedBlockValue,
pub indexes_to_supply: ValueDateLast,
pub height_to_utxo_count: EagerVec<PcoVec<Height, StoredU64>>,
pub indexes_to_utxo_count: DerivedComputedBlockLast<StoredU64>,
pub height_to_supply_half_value: LazyBlockValue,
pub indexes_to_supply_half: LazyValueDateLast,
pub supply: ValueBlockLast,
pub supply_half_value: LazyBlockValue,
pub supply_half: LazyValueDateLast,
}
impl SupplyMetrics {
/// Import supply metrics from database.
pub fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let v1 = Version::ONE;
let compute_dollars = cfg.compute_dollars();
let height_to_supply: EagerVec<PcoVec<Height, Sats>> =
EagerVec::forced_import(cfg.db, &cfg.name("supply"), cfg.version)?;
let supply = ValueBlockLast::forced_import(
cfg.db,
&cfg.name("supply"),
cfg.version,
cfg.indexes,
compute_dollars,
)?;
let price_source = cfg
.price
.map(|p| p.usd.chainindexes_to_price_close.height.boxed_clone());
let height_to_supply_value = LazyDerivedBlockValue::from_source(
&cfg.name("supply"),
height_to_supply.boxed_clone(),
cfg.version,
price_source.clone(),
);
let indexes_to_supply = ValueDateLast::forced_import(
cfg.db,
&cfg.name("supply"),
cfg.version + v1,
compute_dollars,
cfg.indexes,
)?;
.map(|p| p.usd.split.close.height.boxed_clone());
// Create lazy supply_half from supply sources
let height_to_supply_half_value = LazyBlockValue::from_sources::<
HalveSats,
HalveSatsToBitcoin,
HalfClosePriceTimesSats,
>(
let supply_half_value =
LazyBlockValue::from_sources::<HalveSats, HalveSatsToBitcoin, HalfClosePriceTimesSats>(
&cfg.name("supply_half"),
height_to_supply.boxed_clone(),
supply.sats.height.boxed_clone(),
price_source,
cfg.version,
);
let indexes_to_supply_half =
LazyValueDateLast::from_source::<HalveSats, HalveSatsToBitcoin, HalveDollars>(
&cfg.name("supply_half"),
&indexes_to_supply,
cfg.version,
);
let height_to_utxo_count =
EagerVec::forced_import(cfg.db, &cfg.name("utxo_count"), cfg.version)?;
let supply_half = LazyValueDateLast::from_block_source::<
HalveSats,
HalveSatsToBitcoin,
HalveDollars,
>(&cfg.name("supply_half"), &supply, cfg.version);
Ok(Self {
indexes_to_utxo_count: DerivedComputedBlockLast::forced_import(
cfg.db,
&cfg.name("utxo_count"),
height_to_utxo_count.boxed_clone(),
cfg.version,
cfg.indexes,
)?,
height_to_supply,
height_to_supply_value,
indexes_to_supply,
height_to_utxo_count,
height_to_supply_half_value,
indexes_to_supply_half,
supply,
supply_half_value,
supply_half,
})
}
/// Get minimum length across height-indexed vectors.
pub fn min_len(&self) -> usize {
self.height_to_supply
.len()
.min(self.height_to_utxo_count.len())
self.supply.sats.height.len()
}
/// Push supply state values to height-indexed vectors.
pub fn truncate_push(&mut self, height: Height, state: &SupplyState) -> Result<()> {
self.height_to_supply.truncate_push(height, state.value)?;
self.height_to_utxo_count
.truncate_push(height, StoredU64::from(state.utxo_count))?;
pub fn truncate_push(&mut self, height: Height, supply: Sats) -> Result<()> {
self.supply.sats.height.truncate_push(height, supply)?;
Ok(())
}
/// Write height-indexed vectors to disk.
pub fn write(&mut self) -> Result<()> {
self.height_to_supply.write()?;
self.height_to_utxo_count.write()?;
self.supply.sats.height.write()?;
Ok(())
}
/// Returns a parallel iterator over all vecs for parallel writing.
pub fn par_iter_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
vec![
&mut self.height_to_supply as &mut dyn AnyStoredVec,
&mut self.height_to_utxo_count as &mut dyn AnyStoredVec,
]
.into_par_iter()
vec![&mut self.supply.sats.height as &mut dyn AnyStoredVec].into_par_iter()
}
/// Validate computed versions against base version.
@@ -141,26 +99,18 @@ impl SupplyMetrics {
others: &[&Self],
exit: &Exit,
) -> Result<()> {
self.height_to_supply.compute_sum_of_others(
self.supply.sats.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.height_to_supply)
.collect::<Vec<_>>(),
exit,
)?;
self.height_to_utxo_count.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.height_to_utxo_count)
.map(|v| &v.supply.sats.height)
.collect::<Vec<_>>(),
exit,
)?;
Ok(())
}
/// First phase of computed metrics (indexes from height).
/// Compute derived vecs from existing height data.
pub fn compute_rest_part1(
&mut self,
indexes: &indexes::Vecs,
@@ -168,34 +118,7 @@ impl SupplyMetrics {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
self.indexes_to_supply
.compute_all(price, starting_indexes, exit, |v| {
let mut dateindex_to_height_count_iter =
indexes.time.dateindex_to_height_count.into_iter();
let mut height_to_supply_iter = self.height_to_supply.into_iter();
v.compute_transform(
starting_indexes.dateindex,
&indexes.time.dateindex_to_first_height,
|(i, height, ..)| {
let count = dateindex_to_height_count_iter.get_unwrap(i);
if count == StoredU64::default() {
unreachable!()
}
let supply = height_to_supply_iter.get_unwrap(height + (*count - 1));
(i, supply)
},
exit,
)?;
Ok(())
})?;
self.indexes_to_utxo_count.derive_from(
indexes,
starting_indexes,
&self.height_to_utxo_count,
exit,
)?;
Ok(())
self.supply
.compute_rest(indexes, price, starting_indexes, exit)
}
}

View File

@@ -1,18 +1,15 @@
use brk_error::Result;
use brk_traversable::Traversable;
use brk_types::{DateIndex, Dollars, Height, Sats};
use brk_types::{DateIndex, Dollars, Height};
use rayon::prelude::*;
use vecdb::{
AnyStoredVec, AnyVec, EagerVec, Exit, GenericStoredVec, ImportableVec, IterableCloneableVec,
LazyVecFrom1, LazyVecFrom2, Negate, PcoVec,
};
use vecdb::{AnyStoredVec, AnyVec, Exit, GenericStoredVec, Negate};
use crate::{
ComputeIndexes,
distribution::state::UnrealizedState,
internal::{
ComputedDateLast, DerivedDateLast, DollarsMinus, DollarsPlus, LazyDateLast,
LazyDerivedBlockValue, ValueDerivedDateLast,
ComputedHeightDateLast, DollarsMinus, DollarsPlus, LazyBinaryBlockLast, LazyBlockLast,
ValueBlockDateLast,
},
};
@@ -22,36 +19,19 @@ use super::ImportConfig;
#[derive(Clone, Traversable)]
pub struct UnrealizedMetrics {
// === Supply in Profit/Loss ===
pub height_to_supply_in_profit: EagerVec<PcoVec<Height, Sats>>,
pub indexes_to_supply_in_profit: ValueDerivedDateLast,
pub height_to_supply_in_loss: EagerVec<PcoVec<Height, Sats>>,
pub indexes_to_supply_in_loss: ValueDerivedDateLast,
pub dateindex_to_supply_in_profit: EagerVec<PcoVec<DateIndex, Sats>>,
pub dateindex_to_supply_in_loss: EagerVec<PcoVec<DateIndex, Sats>>,
pub height_to_supply_in_profit_value: LazyDerivedBlockValue,
pub height_to_supply_in_loss_value: LazyDerivedBlockValue,
pub supply_in_profit: ValueBlockDateLast,
pub supply_in_loss: ValueBlockDateLast,
// === Unrealized Profit/Loss ===
pub height_to_unrealized_profit: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_unrealized_profit: DerivedDateLast<Dollars>,
pub height_to_unrealized_loss: EagerVec<PcoVec<Height, Dollars>>,
pub indexes_to_unrealized_loss: DerivedDateLast<Dollars>,
pub dateindex_to_unrealized_profit: EagerVec<PcoVec<DateIndex, Dollars>>,
pub dateindex_to_unrealized_loss: EagerVec<PcoVec<DateIndex, Dollars>>,
pub unrealized_profit: ComputedHeightDateLast<Dollars>,
pub unrealized_loss: ComputedHeightDateLast<Dollars>,
// === Negated and Net ===
pub height_to_neg_unrealized_loss: LazyVecFrom1<Height, Dollars, Height, Dollars>,
pub indexes_to_neg_unrealized_loss: LazyDateLast<Dollars>,
// === Negated ===
pub neg_unrealized_loss: LazyBlockLast<Dollars>,
// net = profit - loss (height is lazy, indexes computed)
pub height_to_net_unrealized_pnl:
LazyVecFrom2<Height, Dollars, Height, Dollars, Height, Dollars>,
pub indexes_to_net_unrealized_pnl: ComputedDateLast<Dollars>,
// total = profit + loss (height is lazy, indexes computed)
pub height_to_total_unrealized_pnl:
LazyVecFrom2<Height, Dollars, Height, Dollars, Height, Dollars>,
pub indexes_to_total_unrealized_pnl: ComputedDateLast<Dollars>,
// === Net and Total ===
pub net_unrealized_pnl: LazyBinaryBlockLast<Dollars>,
pub total_unrealized_pnl: LazyBinaryBlockLast<Dollars>,
}
impl UnrealizedMetrics {
@@ -59,154 +39,89 @@ impl UnrealizedMetrics {
pub fn forced_import(cfg: &ImportConfig) -> Result<Self> {
let compute_dollars = cfg.compute_dollars();
let dateindex_to_supply_in_profit =
EagerVec::forced_import(cfg.db, &cfg.name("supply_in_profit"), cfg.version)?;
let dateindex_to_supply_in_loss =
EagerVec::forced_import(cfg.db, &cfg.name("supply_in_loss"), cfg.version)?;
let dateindex_to_unrealized_profit =
EagerVec::forced_import(cfg.db, &cfg.name("unrealized_profit"), cfg.version)?;
let dateindex_to_unrealized_loss =
EagerVec::forced_import(cfg.db, &cfg.name("unrealized_loss"), cfg.version)?;
let height_to_unrealized_loss: EagerVec<PcoVec<Height, Dollars>> =
EagerVec::forced_import(cfg.db, &cfg.name("unrealized_loss"), cfg.version)?;
let height_to_neg_unrealized_loss = LazyVecFrom1::transformed::<Negate>(
&cfg.name("neg_unrealized_loss"),
// === Supply in Profit/Loss ===
let supply_in_profit = ValueBlockDateLast::forced_import(
cfg.db,
&cfg.name("supply_in_profit"),
cfg.version,
height_to_unrealized_loss.boxed_clone(),
);
let indexes_to_unrealized_loss = DerivedDateLast::from_source(
&cfg.name("unrealized_loss"),
cfg.version,
dateindex_to_unrealized_loss.boxed_clone(),
compute_dollars,
cfg.indexes,
);
let indexes_to_neg_unrealized_loss = LazyDateLast::from_derived::<Negate>(
&cfg.name("neg_unrealized_loss"),
cfg.price,
)?;
let supply_in_loss = ValueBlockDateLast::forced_import(
cfg.db,
&cfg.name("supply_in_loss"),
cfg.version,
dateindex_to_unrealized_loss.boxed_clone(),
&indexes_to_unrealized_loss,
);
compute_dollars,
cfg.indexes,
cfg.price,
)?;
// Extract profit sources for lazy net/total vecs
let height_to_unrealized_profit: EagerVec<PcoVec<Height, Dollars>> =
EagerVec::forced_import(cfg.db, &cfg.name("unrealized_profit"), cfg.version)?;
let indexes_to_unrealized_profit = DerivedDateLast::from_source(
// === Unrealized Profit/Loss ===
let unrealized_profit = ComputedHeightDateLast::forced_import(
cfg.db,
&cfg.name("unrealized_profit"),
cfg.version,
dateindex_to_unrealized_profit.boxed_clone(),
cfg.indexes,
);
// Create lazy height vecs from profit/loss sources
let height_to_net_unrealized_pnl = LazyVecFrom2::transformed::<DollarsMinus>(
&cfg.name("net_unrealized_pnl"),
cfg.version,
height_to_unrealized_profit.boxed_clone(),
height_to_unrealized_loss.boxed_clone(),
);
let height_to_total_unrealized_pnl = LazyVecFrom2::transformed::<DollarsPlus>(
&cfg.name("total_unrealized_pnl"),
cfg.version,
height_to_unrealized_profit.boxed_clone(),
height_to_unrealized_loss.boxed_clone(),
);
// indexes_to_net/total remain computed (needed by relative.rs)
let indexes_to_net_unrealized_pnl = ComputedDateLast::forced_import(
cfg.db,
&cfg.name("net_unrealized_pnl"),
cfg.version,
cfg.indexes,
)?;
let indexes_to_total_unrealized_pnl = ComputedDateLast::forced_import(
let unrealized_loss = ComputedHeightDateLast::forced_import(
cfg.db,
&cfg.name("total_unrealized_pnl"),
&cfg.name("unrealized_loss"),
cfg.version,
cfg.indexes,
)?;
let height_to_supply_in_profit: EagerVec<PcoVec<Height, Sats>> =
EagerVec::forced_import(cfg.db, &cfg.name("supply_in_profit"), cfg.version)?;
let height_to_supply_in_loss: EagerVec<PcoVec<Height, Sats>> =
EagerVec::forced_import(cfg.db, &cfg.name("supply_in_loss"), cfg.version)?;
let price_source = cfg
.price
.map(|p| p.usd.chainindexes_to_price_close.height.boxed_clone());
let height_to_supply_in_profit_value = LazyDerivedBlockValue::from_source(
&cfg.name("supply_in_profit"),
height_to_supply_in_profit.boxed_clone(),
// === Negated ===
let neg_unrealized_loss = LazyBlockLast::from_computed_height_date::<Negate>(
&cfg.name("neg_unrealized_loss"),
cfg.version,
price_source.clone(),
&unrealized_loss,
);
let height_to_supply_in_loss_value = LazyDerivedBlockValue::from_source(
&cfg.name("supply_in_loss"),
height_to_supply_in_loss.boxed_clone(),
// === Net and Total ===
let net_unrealized_pnl = LazyBinaryBlockLast::from_computed_height_date_last::<DollarsMinus>(
&cfg.name("net_unrealized_pnl"),
cfg.version,
price_source,
&unrealized_profit,
&unrealized_loss,
);
let total_unrealized_pnl = LazyBinaryBlockLast::from_computed_height_date_last::<DollarsPlus>(
&cfg.name("total_unrealized_pnl"),
cfg.version,
&unrealized_profit,
&unrealized_loss,
);
Ok(Self {
// === Supply in Profit/Loss ===
height_to_supply_in_profit,
indexes_to_supply_in_profit: ValueDerivedDateLast::from_source(
cfg.db,
&cfg.name("supply_in_profit"),
dateindex_to_supply_in_profit.boxed_clone(),
cfg.version,
compute_dollars,
cfg.indexes,
)?,
height_to_supply_in_loss,
indexes_to_supply_in_loss: ValueDerivedDateLast::from_source(
cfg.db,
&cfg.name("supply_in_loss"),
dateindex_to_supply_in_loss.boxed_clone(),
cfg.version,
compute_dollars,
cfg.indexes,
)?,
dateindex_to_supply_in_profit,
dateindex_to_supply_in_loss,
height_to_supply_in_profit_value,
height_to_supply_in_loss_value,
// === Unrealized Profit/Loss ===
height_to_unrealized_profit,
indexes_to_unrealized_profit,
height_to_unrealized_loss,
indexes_to_unrealized_loss,
dateindex_to_unrealized_profit,
dateindex_to_unrealized_loss,
height_to_neg_unrealized_loss,
indexes_to_neg_unrealized_loss,
height_to_net_unrealized_pnl,
indexes_to_net_unrealized_pnl,
height_to_total_unrealized_pnl,
indexes_to_total_unrealized_pnl,
supply_in_profit,
supply_in_loss,
unrealized_profit,
unrealized_loss,
neg_unrealized_loss,
net_unrealized_pnl,
total_unrealized_pnl,
})
}
/// Get minimum length across height-indexed vectors written in block loop.
pub fn min_stateful_height_len(&self) -> usize {
self.height_to_supply_in_profit
self.supply_in_profit
.height
.len()
.min(self.height_to_supply_in_loss.len())
.min(self.height_to_unrealized_profit.len())
.min(self.height_to_unrealized_loss.len())
.min(self.supply_in_loss.height.len())
.min(self.unrealized_profit.height.len())
.min(self.unrealized_loss.height.len())
}
/// Get minimum length across dateindex-indexed vectors written in block loop.
pub fn min_stateful_dateindex_len(&self) -> usize {
self.dateindex_to_supply_in_profit
self.supply_in_profit
.indexes
.sats_dateindex
.len()
.min(self.dateindex_to_supply_in_loss.len())
.min(self.dateindex_to_unrealized_profit.len())
.min(self.dateindex_to_unrealized_loss.len())
.min(self.supply_in_loss.indexes.sats_dateindex.len())
.min(self.unrealized_profit.dateindex.len())
.min(self.unrealized_loss.dateindex.len())
}
/// Push unrealized state values to height-indexed vectors.
@@ -217,23 +132,33 @@ impl UnrealizedMetrics {
height_state: &UnrealizedState,
date_state: Option<&UnrealizedState>,
) -> Result<()> {
self.height_to_supply_in_profit
self.supply_in_profit
.height
.truncate_push(height, height_state.supply_in_profit)?;
self.height_to_supply_in_loss
self.supply_in_loss
.height
.truncate_push(height, height_state.supply_in_loss)?;
self.height_to_unrealized_profit
self.unrealized_profit
.height
.truncate_push(height, height_state.unrealized_profit)?;
self.height_to_unrealized_loss
self.unrealized_loss
.height
.truncate_push(height, height_state.unrealized_loss)?;
if let (Some(dateindex), Some(date_state)) = (dateindex, date_state) {
self.dateindex_to_supply_in_profit
self.supply_in_profit
.indexes
.sats_dateindex
.truncate_push(dateindex, date_state.supply_in_profit)?;
self.dateindex_to_supply_in_loss
self.supply_in_loss
.indexes
.sats_dateindex
.truncate_push(dateindex, date_state.supply_in_loss)?;
self.dateindex_to_unrealized_profit
self.unrealized_profit
.dateindex
.truncate_push(dateindex, date_state.unrealized_profit)?;
self.dateindex_to_unrealized_loss
self.unrealized_loss
.dateindex
.truncate_push(dateindex, date_state.unrealized_loss)?;
}
@@ -242,28 +167,28 @@ impl UnrealizedMetrics {
/// Write height-indexed vectors to disk.
pub fn write(&mut self) -> Result<()> {
self.height_to_supply_in_profit.write()?;
self.height_to_supply_in_loss.write()?;
self.height_to_unrealized_profit.write()?;
self.height_to_unrealized_loss.write()?;
self.dateindex_to_supply_in_profit.write()?;
self.dateindex_to_supply_in_loss.write()?;
self.dateindex_to_unrealized_profit.write()?;
self.dateindex_to_unrealized_loss.write()?;
self.supply_in_profit.height.write()?;
self.supply_in_loss.height.write()?;
self.unrealized_profit.height.write()?;
self.unrealized_loss.height.write()?;
self.supply_in_profit.indexes.sats_dateindex.write()?;
self.supply_in_loss.indexes.sats_dateindex.write()?;
self.unrealized_profit.dateindex.write()?;
self.unrealized_loss.dateindex.write()?;
Ok(())
}
/// Returns a parallel iterator over all vecs for parallel writing.
pub fn par_iter_mut(&mut self) -> impl ParallelIterator<Item = &mut dyn AnyStoredVec> {
vec![
&mut self.height_to_supply_in_profit as &mut dyn AnyStoredVec,
&mut self.height_to_supply_in_loss as &mut dyn AnyStoredVec,
&mut self.height_to_unrealized_profit as &mut dyn AnyStoredVec,
&mut self.height_to_unrealized_loss as &mut dyn AnyStoredVec,
&mut self.dateindex_to_supply_in_profit as &mut dyn AnyStoredVec,
&mut self.dateindex_to_supply_in_loss as &mut dyn AnyStoredVec,
&mut self.dateindex_to_unrealized_profit as &mut dyn AnyStoredVec,
&mut self.dateindex_to_unrealized_loss as &mut dyn AnyStoredVec,
&mut self.supply_in_profit.height as &mut dyn AnyStoredVec,
&mut self.supply_in_loss.height as &mut dyn AnyStoredVec,
&mut self.unrealized_profit.height as &mut dyn AnyStoredVec,
&mut self.unrealized_loss.height as &mut dyn AnyStoredVec,
&mut self.supply_in_profit.indexes.sats_dateindex as &mut dyn AnyStoredVec,
&mut self.supply_in_loss.indexes.sats_dateindex as &mut dyn AnyStoredVec,
&mut self.unrealized_profit.rest.dateindex as &mut dyn AnyStoredVec,
&mut self.unrealized_loss.rest.dateindex as &mut dyn AnyStoredVec,
]
.into_par_iter()
}
@@ -275,67 +200,73 @@ impl UnrealizedMetrics {
others: &[&Self],
exit: &Exit,
) -> Result<()> {
self.height_to_supply_in_profit.compute_sum_of_others(
self.supply_in_profit.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.height_to_supply_in_profit)
.map(|v| &v.supply_in_profit.height)
.collect::<Vec<_>>(),
exit,
)?;
self.height_to_supply_in_loss.compute_sum_of_others(
self.supply_in_loss.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.height_to_supply_in_loss)
.map(|v| &v.supply_in_loss.height)
.collect::<Vec<_>>(),
exit,
)?;
self.height_to_unrealized_profit.compute_sum_of_others(
self.unrealized_profit.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.height_to_unrealized_profit)
.map(|v| &v.unrealized_profit.height)
.collect::<Vec<_>>(),
exit,
)?;
self.height_to_unrealized_loss.compute_sum_of_others(
self.unrealized_loss.height.compute_sum_of_others(
starting_indexes.height,
&others
.iter()
.map(|v| &v.height_to_unrealized_loss)
.map(|v| &v.unrealized_loss.height)
.collect::<Vec<_>>(),
exit,
)?;
self.dateindex_to_supply_in_profit.compute_sum_of_others(
self.supply_in_profit
.indexes
.sats_dateindex
.compute_sum_of_others(
starting_indexes.dateindex,
&others
.iter()
.map(|v| &v.dateindex_to_supply_in_profit)
.map(|v| &v.supply_in_profit.indexes.sats_dateindex)
.collect::<Vec<_>>(),
exit,
)?;
self.dateindex_to_supply_in_loss.compute_sum_of_others(
self.supply_in_loss
.indexes
.sats_dateindex
.compute_sum_of_others(
starting_indexes.dateindex,
&others
.iter()
.map(|v| &v.dateindex_to_supply_in_loss)
.map(|v| &v.supply_in_loss.indexes.sats_dateindex)
.collect::<Vec<_>>(),
exit,
)?;
self.dateindex_to_unrealized_profit.compute_sum_of_others(
self.unrealized_profit.dateindex.compute_sum_of_others(
starting_indexes.dateindex,
&others
.iter()
.map(|v| &v.dateindex_to_unrealized_profit)
.map(|v| &v.unrealized_profit.dateindex)
.collect::<Vec<_>>(),
exit,
)?;
self.dateindex_to_unrealized_loss.compute_sum_of_others(
self.unrealized_loss.dateindex.compute_sum_of_others(
starting_indexes.dateindex,
&others
.iter()
.map(|v| &v.dateindex_to_unrealized_loss)
.map(|v| &v.unrealized_loss.dateindex)
.collect::<Vec<_>>(),
exit,
)?;
@@ -349,39 +280,11 @@ impl UnrealizedMetrics {
starting_indexes: &ComputeIndexes,
exit: &Exit,
) -> Result<()> {
// KISS: compute_rest doesn't need source vec - lazy vecs are set up during import
self.indexes_to_supply_in_profit
.compute_rest(price, starting_indexes, exit)?;
self.supply_in_profit
.compute_dollars_from_price(price, starting_indexes, exit)?;
self.indexes_to_supply_in_loss
.compute_rest(price, starting_indexes, exit)?;
// indexes_to_unrealized_profit/loss are Derived - no compute needed (lazy only)
// height_to_net/total are lazy, but indexes still need compute
// total_unrealized_pnl = profit + loss
self.indexes_to_total_unrealized_pnl
.compute_all(starting_indexes, exit, |vec| {
vec.compute_add(
starting_indexes.dateindex,
&self.dateindex_to_unrealized_profit,
&self.dateindex_to_unrealized_loss,
exit,
)?;
Ok(())
})?;
// net_unrealized_pnl = profit - loss
self.indexes_to_net_unrealized_pnl
.compute_all(starting_indexes, exit, |vec| {
vec.compute_subtract(
starting_indexes.dateindex,
&self.dateindex_to_unrealized_profit,
&self.dateindex_to_unrealized_loss,
exit,
)?;
Ok(())
})?;
self.supply_in_loss
.compute_dollars_from_price(price, starting_indexes, exit)?;
Ok(())
}

View File

@@ -4,10 +4,10 @@ use brk_error::Result;
use brk_indexer::Indexer;
use brk_traversable::Traversable;
use brk_types::{
EmptyAddressData, EmptyAddressIndex, Height, LoadedAddressData, LoadedAddressIndex, StoredU64,
EmptyAddressData, EmptyAddressIndex, Height, LoadedAddressData, LoadedAddressIndex,
SupplyState, Version,
};
use log::info;
use tracing::info;
use vecdb::{
AnyVec, BytesVec, Database, Exit, GenericStoredVec, ImportableVec, IterableCloneableVec,
LazyVecFrom1, PAGE_SIZE, Stamp, TypedVecIterator, VecIndex,
@@ -19,14 +19,11 @@ use crate::{
compute::{StartMode, determine_start_mode, process_blocks, recover_state, reset_state},
state::BlockState,
},
indexes, inputs,
internal::ComputedBlockLast,
outputs, price, transactions,
indexes, inputs, outputs, price, transactions,
};
use super::{
AddressCohorts, AddressesDataVecs, AnyAddressIndexesVecs, UTXOCohorts,
address::{AddressTypeToHeightToAddressCount, AddressTypeToIndexesToAddressCount},
AddressCohorts, AddressesDataVecs, AnyAddressIndexesVecs, UTXOCohorts, address::AddrCountVecs,
compute::aggregates,
};
@@ -44,13 +41,8 @@ pub struct Vecs {
pub utxo_cohorts: UTXOCohorts,
pub address_cohorts: AddressCohorts,
pub addresstype_to_height_to_addr_count: AddressTypeToHeightToAddressCount,
pub addresstype_to_height_to_empty_addr_count: AddressTypeToHeightToAddressCount,
pub addresstype_to_indexes_to_addr_count: AddressTypeToIndexesToAddressCount,
pub addresstype_to_indexes_to_empty_addr_count: AddressTypeToIndexesToAddressCount,
pub indexes_to_addr_count: ComputedBlockLast<StoredU64>,
pub indexes_to_empty_addr_count: ComputedBlockLast<StoredU64>,
pub addr_count: AddrCountVecs,
pub empty_addr_count: AddrCountVecs,
pub loadedaddressindex_to_loadedaddressindex:
LazyVecFrom1<LoadedAddressIndex, LoadedAddressIndex, LoadedAddressIndex, LoadedAddressData>,
pub emptyaddressindex_to_emptyaddressindex:
@@ -111,50 +103,20 @@ impl Vecs {
|index, _| Some(index),
);
// Extract address type height vecs before struct literal to use as sources
let addresstype_to_height_to_addr_count =
AddressTypeToHeightToAddressCount::forced_import(&db, "addr_count", version)?;
let addresstype_to_height_to_empty_addr_count =
AddressTypeToHeightToAddressCount::forced_import(&db, "empty_addr_count", version)?;
let this = Self {
chain_state: BytesVec::forced_import_with(
vecdb::ImportOptions::new(&db, "chain", version)
.with_saved_stamped_changes(SAVED_STAMPED_CHANGES),
)?,
indexes_to_addr_count: ComputedBlockLast::forced_import(
&db,
"addr_count",
version,
indexes,
)?,
indexes_to_empty_addr_count: ComputedBlockLast::forced_import(
addr_count: AddrCountVecs::forced_import(&db, "addr_count", version, indexes)?,
empty_addr_count: AddrCountVecs::forced_import(
&db,
"empty_addr_count",
version,
indexes,
)?,
addresstype_to_indexes_to_addr_count:
AddressTypeToIndexesToAddressCount::forced_import(
&db,
"addr_count",
version,
indexes,
&addresstype_to_height_to_addr_count,
)?,
addresstype_to_indexes_to_empty_addr_count:
AddressTypeToIndexesToAddressCount::forced_import(
&db,
"empty_addr_count",
version,
indexes,
&addresstype_to_height_to_empty_addr_count,
)?,
addresstype_to_height_to_addr_count,
addresstype_to_height_to_empty_addr_count,
utxo_cohorts,
address_cohorts,
@@ -200,14 +162,22 @@ impl Vecs {
starting_indexes: &mut ComputeIndexes,
exit: &Exit,
) -> Result<()> {
// 1. Find minimum computed height for recovery
let chain_state_height = Height::from(self.chain_state.len());
// 1. Find minimum height we have data for across stateful vecs
let current_height = Height::from(self.chain_state.len());
let height_based_min = self.min_stateful_height_len();
let dateindex_min = self.min_stateful_dateindex_len();
let stateful_min = adjust_for_dateindex_gap(height_based_min, dateindex_min, indexes)?;
let min_stateful = adjust_for_dateindex_gap(height_based_min, dateindex_min, indexes)?;
// 2. Determine start mode and recover/reset state
let start_mode = determine_start_mode(stateful_min, chain_state_height);
// Clamp to starting_indexes.height to handle reorg (indexer may require earlier start)
let resume_target = current_height.min(starting_indexes.height);
if resume_target < current_height {
info!(
"Reorg detected: rolling back from {} to {}",
current_height, resume_target
);
}
let start_mode = determine_start_mode(min_stateful.min(resume_target), resume_target);
// Try to resume from checkpoint, fall back to fresh start if needed
let recovered_height = match start_mode {
@@ -238,8 +208,8 @@ impl Vecs {
// Fresh start: reset all state
let (starting_height, mut chain_state) = if recovered_height.is_zero() {
self.chain_state.reset()?;
self.addresstype_to_height_to_addr_count.reset()?;
self.addresstype_to_height_to_empty_addr_count.reset()?;
self.addr_count.reset_height()?;
self.empty_addr_count.reset_height()?;
reset_state(
&mut self.any_address_indexes,
&mut self.addresses_data,
@@ -251,8 +221,8 @@ impl Vecs {
(Height::ZERO, vec![])
} else {
// Recover chain_state from stored values
let height_to_timestamp = &blocks.time.height_to_timestamp_fixed;
let height_to_price = price.map(|p| &p.usd.chainindexes_to_price_close.height);
let height_to_timestamp = &blocks.time.timestamp_fixed;
let height_to_price = price.map(|p| &p.usd.split.close.height);
let mut height_to_timestamp_iter = height_to_timestamp.into_iter();
let mut height_to_price_iter = height_to_price.map(|v| v.into_iter());
@@ -279,14 +249,7 @@ impl Vecs {
.validate_computed_versions(base_version)?;
// 3. Get last height from indexer
let last_height = Height::from(
indexer
.vecs
.block
.height_to_blockhash
.len()
.saturating_sub(1),
);
let last_height = Height::from(indexer.vecs.blocks.blockhash.len().saturating_sub(1));
// 4. Process blocks
if starting_height <= last_height {
@@ -324,64 +287,26 @@ impl Vecs {
exit,
)?;
// 6b. Compute address count dateindex vecs (per-addresstype)
self.addresstype_to_indexes_to_addr_count.compute(
indexes,
starting_indexes,
exit,
&self.addresstype_to_height_to_addr_count,
)?;
self.addresstype_to_indexes_to_empty_addr_count.compute(
indexes,
starting_indexes,
exit,
&self.addresstype_to_height_to_empty_addr_count,
)?;
// 6c. Compute global address count dateindex vecs (sum of all address types)
let addr_count_sources: Vec<_> =
self.addresstype_to_height_to_addr_count.values().collect();
self.indexes_to_addr_count
.compute_all(indexes, starting_indexes, exit, |height_vec| {
Ok(height_vec.compute_sum_of_others(
starting_indexes.height,
&addr_count_sources,
exit,
)?)
})?;
let empty_addr_count_sources: Vec<_> = self
.addresstype_to_height_to_empty_addr_count
.values()
.collect();
self.indexes_to_empty_addr_count.compute_all(
indexes,
starting_indexes,
exit,
|height_vec| {
Ok(height_vec.compute_sum_of_others(
starting_indexes.height,
&empty_addr_count_sources,
exit,
)?)
},
)?;
// 6b. Compute address count dateindex vecs (by addresstype + all)
self.addr_count
.compute_rest(indexes, starting_indexes, exit)?;
self.empty_addr_count
.compute_rest(indexes, starting_indexes, exit)?;
// 7. Compute rest part2 (relative metrics)
let supply_metrics = &self.utxo_cohorts.all.metrics.supply;
let height_to_market_cap = supply_metrics
.height_to_supply_value
.supply
.dollars
.as_ref()
.cloned();
.map(|d| d.height.clone());
// KISS: dateindex is no longer Option, just clone directly
let dateindex_to_market_cap = supply_metrics
.indexes_to_supply
.supply
.dollars
.as_ref()
.map(|v| v.dateindex.clone());
.map(|d| d.dateindex.0.clone());
let height_to_market_cap_ref = height_to_market_cap.as_ref();
let dateindex_to_market_cap_ref = dateindex_to_market_cap.as_ref();
@@ -415,12 +340,8 @@ impl Vecs {
.min(Height::from(self.chain_state.len()))
.min(self.any_address_indexes.min_stamped_height())
.min(self.addresses_data.min_stamped_height())
.min(Height::from(
self.addresstype_to_height_to_addr_count.min_len(),
))
.min(Height::from(
self.addresstype_to_height_to_empty_addr_count.min_len(),
))
.min(Height::from(self.addr_count.min_len()))
.min(Height::from(self.empty_addr_count.min_len()))
}
/// Get minimum length across all dateindex-indexed stateful vectors.
@@ -446,25 +367,25 @@ fn adjust_for_dateindex_gap(
return Ok(height_based_min);
}
// Skip if height_to_dateindex doesn't cover height_based_min yet
if height_based_min.to_usize() >= indexes.block.height_to_dateindex.len() {
// Skip if height.dateindex doesn't cover height_based_min yet
if height_based_min.to_usize() >= indexes.height.dateindex.len() {
return Ok(height_based_min);
}
// Get the dateindex at the height we want to resume at
let required_dateindex: usize = indexes
.block
.height_to_dateindex
.height
.dateindex
.read_once(height_based_min)?
.into();
// If dateindex vecs are behind, restart from first height of the missing day
if dateindex_min < required_dateindex
&& dateindex_min < indexes.time.dateindex_to_first_height.len()
&& dateindex_min < indexes.dateindex.first_height.len()
{
Ok(indexes
.time
.dateindex_to_first_height
.dateindex
.first_height
.read_once(dateindex_min.into())?)
} else {
Ok(height_based_min)

View File

@@ -0,0 +1,188 @@
use brk_indexer::Indexer;
use brk_traversable::Traversable;
use brk_types::{
EmptyOutputIndex, OpReturnIndex, P2AAddressIndex, P2ABytes, P2MSOutputIndex,
P2PK33AddressIndex, P2PK33Bytes, P2PK65AddressIndex, P2PK65Bytes, P2PKHAddressIndex,
P2PKHBytes, P2SHAddressIndex, P2SHBytes, P2TRAddressIndex, P2TRBytes, P2WPKHAddressIndex,
P2WPKHBytes, P2WSHAddressIndex, P2WSHBytes, TxIndex, UnknownOutputIndex, Version,
};
use vecdb::{IterableCloneableVec, LazyVecFrom1};
#[derive(Clone, Traversable)]
pub struct Vecs {
pub p2pk33: P2PK33Vecs,
pub p2pk65: P2PK65Vecs,
pub p2pkh: P2PKHVecs,
pub p2sh: P2SHVecs,
pub p2tr: P2TRVecs,
pub p2wpkh: P2WPKHVecs,
pub p2wsh: P2WSHVecs,
pub p2a: P2AVecs,
pub p2ms: P2MSVecs,
pub empty: EmptyVecs,
pub unknown: UnknownVecs,
pub opreturn: OpReturnVecs,
}
#[derive(Clone, Traversable)]
pub struct P2PK33Vecs {
pub identity: LazyVecFrom1<P2PK33AddressIndex, P2PK33AddressIndex, P2PK33AddressIndex, P2PK33Bytes>,
}
#[derive(Clone, Traversable)]
pub struct P2PK65Vecs {
pub identity: LazyVecFrom1<P2PK65AddressIndex, P2PK65AddressIndex, P2PK65AddressIndex, P2PK65Bytes>,
}
#[derive(Clone, Traversable)]
pub struct P2PKHVecs {
pub identity: LazyVecFrom1<P2PKHAddressIndex, P2PKHAddressIndex, P2PKHAddressIndex, P2PKHBytes>,
}
#[derive(Clone, Traversable)]
pub struct P2SHVecs {
pub identity: LazyVecFrom1<P2SHAddressIndex, P2SHAddressIndex, P2SHAddressIndex, P2SHBytes>,
}
#[derive(Clone, Traversable)]
pub struct P2TRVecs {
pub identity: LazyVecFrom1<P2TRAddressIndex, P2TRAddressIndex, P2TRAddressIndex, P2TRBytes>,
}
#[derive(Clone, Traversable)]
pub struct P2WPKHVecs {
pub identity: LazyVecFrom1<P2WPKHAddressIndex, P2WPKHAddressIndex, P2WPKHAddressIndex, P2WPKHBytes>,
}
#[derive(Clone, Traversable)]
pub struct P2WSHVecs {
pub identity: LazyVecFrom1<P2WSHAddressIndex, P2WSHAddressIndex, P2WSHAddressIndex, P2WSHBytes>,
}
#[derive(Clone, Traversable)]
pub struct P2AVecs {
pub identity: LazyVecFrom1<P2AAddressIndex, P2AAddressIndex, P2AAddressIndex, P2ABytes>,
}
#[derive(Clone, Traversable)]
pub struct P2MSVecs {
pub identity: LazyVecFrom1<P2MSOutputIndex, P2MSOutputIndex, P2MSOutputIndex, TxIndex>,
}
#[derive(Clone, Traversable)]
pub struct EmptyVecs {
pub identity: LazyVecFrom1<EmptyOutputIndex, EmptyOutputIndex, EmptyOutputIndex, TxIndex>,
}
#[derive(Clone, Traversable)]
pub struct UnknownVecs {
pub identity: LazyVecFrom1<UnknownOutputIndex, UnknownOutputIndex, UnknownOutputIndex, TxIndex>,
}
#[derive(Clone, Traversable)]
pub struct OpReturnVecs {
pub identity: LazyVecFrom1<OpReturnIndex, OpReturnIndex, OpReturnIndex, TxIndex>,
}
impl Vecs {
pub fn forced_import(version: Version, indexer: &Indexer) -> Self {
Self {
p2pk33: P2PK33Vecs {
identity: LazyVecFrom1::init(
"p2pk33addressindex",
version,
indexer.vecs.addresses.p2pk33bytes.boxed_clone(),
|index, _| Some(index),
),
},
p2pk65: P2PK65Vecs {
identity: LazyVecFrom1::init(
"p2pk65addressindex",
version,
indexer.vecs.addresses.p2pk65bytes.boxed_clone(),
|index, _| Some(index),
),
},
p2pkh: P2PKHVecs {
identity: LazyVecFrom1::init(
"p2pkhaddressindex",
version,
indexer.vecs.addresses.p2pkhbytes.boxed_clone(),
|index, _| Some(index),
),
},
p2sh: P2SHVecs {
identity: LazyVecFrom1::init(
"p2shaddressindex",
version,
indexer.vecs.addresses.p2shbytes.boxed_clone(),
|index, _| Some(index),
),
},
p2tr: P2TRVecs {
identity: LazyVecFrom1::init(
"p2traddressindex",
version,
indexer.vecs.addresses.p2trbytes.boxed_clone(),
|index, _| Some(index),
),
},
p2wpkh: P2WPKHVecs {
identity: LazyVecFrom1::init(
"p2wpkhaddressindex",
version,
indexer.vecs.addresses.p2wpkhbytes.boxed_clone(),
|index, _| Some(index),
),
},
p2wsh: P2WSHVecs {
identity: LazyVecFrom1::init(
"p2wshaddressindex",
version,
indexer.vecs.addresses.p2wshbytes.boxed_clone(),
|index, _| Some(index),
),
},
p2a: P2AVecs {
identity: LazyVecFrom1::init(
"p2aaddressindex",
version,
indexer.vecs.addresses.p2abytes.boxed_clone(),
|index, _| Some(index),
),
},
p2ms: P2MSVecs {
identity: LazyVecFrom1::init(
"p2msoutputindex",
version,
indexer.vecs.scripts.p2ms_to_txindex.boxed_clone(),
|index, _| Some(index),
),
},
empty: EmptyVecs {
identity: LazyVecFrom1::init(
"emptyoutputindex",
version,
indexer.vecs.scripts.empty_to_txindex.boxed_clone(),
|index, _| Some(index),
),
},
unknown: UnknownVecs {
identity: LazyVecFrom1::init(
"unknownoutputindex",
version,
indexer.vecs.scripts.unknown_to_txindex.boxed_clone(),
|index, _| Some(index),
),
},
opreturn: OpReturnVecs {
identity: LazyVecFrom1::init(
"opreturnindex",
version,
indexer.vecs.scripts.opreturn_to_txindex.boxed_clone(),
|index, _| Some(index),
),
},
}
}
}

View File

@@ -1,84 +0,0 @@
use brk_indexer::Indexer;
use brk_types::Version;
use vecdb::{IterableCloneableVec, LazyVecFrom1};
use super::Vecs;
impl Vecs {
pub fn forced_import(version: Version, indexer: &Indexer) -> Self {
Self {
p2pk33addressindex_to_p2pk33addressindex: LazyVecFrom1::init(
"p2pk33addressindex",
version,
indexer.vecs.address.p2pk33addressindex_to_p2pk33bytes.boxed_clone(),
|index, _| Some(index),
),
p2pk65addressindex_to_p2pk65addressindex: LazyVecFrom1::init(
"p2pk65addressindex",
version,
indexer.vecs.address.p2pk65addressindex_to_p2pk65bytes.boxed_clone(),
|index, _| Some(index),
),
p2pkhaddressindex_to_p2pkhaddressindex: LazyVecFrom1::init(
"p2pkhaddressindex",
version,
indexer.vecs.address.p2pkhaddressindex_to_p2pkhbytes.boxed_clone(),
|index, _| Some(index),
),
p2shaddressindex_to_p2shaddressindex: LazyVecFrom1::init(
"p2shaddressindex",
version,
indexer.vecs.address.p2shaddressindex_to_p2shbytes.boxed_clone(),
|index, _| Some(index),
),
p2traddressindex_to_p2traddressindex: LazyVecFrom1::init(
"p2traddressindex",
version,
indexer.vecs.address.p2traddressindex_to_p2trbytes.boxed_clone(),
|index, _| Some(index),
),
p2wpkhaddressindex_to_p2wpkhaddressindex: LazyVecFrom1::init(
"p2wpkhaddressindex",
version,
indexer.vecs.address.p2wpkhaddressindex_to_p2wpkhbytes.boxed_clone(),
|index, _| Some(index),
),
p2wshaddressindex_to_p2wshaddressindex: LazyVecFrom1::init(
"p2wshaddressindex",
version,
indexer.vecs.address.p2wshaddressindex_to_p2wshbytes.boxed_clone(),
|index, _| Some(index),
),
p2aaddressindex_to_p2aaddressindex: LazyVecFrom1::init(
"p2aaddressindex",
version,
indexer.vecs.address.p2aaddressindex_to_p2abytes.boxed_clone(),
|index, _| Some(index),
),
p2msoutputindex_to_p2msoutputindex: LazyVecFrom1::init(
"p2msoutputindex",
version,
indexer.vecs.output.p2msoutputindex_to_txindex.boxed_clone(),
|index, _| Some(index),
),
emptyoutputindex_to_emptyoutputindex: LazyVecFrom1::init(
"emptyoutputindex",
version,
indexer.vecs.output.emptyoutputindex_to_txindex.boxed_clone(),
|index, _| Some(index),
),
unknownoutputindex_to_unknownoutputindex: LazyVecFrom1::init(
"unknownoutputindex",
version,
indexer.vecs.output.unknownoutputindex_to_txindex.boxed_clone(),
|index, _| Some(index),
),
opreturnindex_to_opreturnindex: LazyVecFrom1::init(
"opreturnindex",
version,
indexer.vecs.output.opreturnindex_to_txindex.boxed_clone(),
|index, _| Some(index),
),
}
}
}

View File

@@ -1,36 +0,0 @@
use brk_traversable::Traversable;
use brk_types::{
EmptyOutputIndex, OpReturnIndex, P2AAddressIndex, P2ABytes, P2MSOutputIndex,
P2PK33AddressIndex, P2PK33Bytes, P2PK65AddressIndex, P2PK65Bytes, P2PKHAddressIndex,
P2PKHBytes, P2SHAddressIndex, P2SHBytes, P2TRAddressIndex, P2TRBytes, P2WPKHAddressIndex,
P2WPKHBytes, P2WSHAddressIndex, P2WSHBytes, TxIndex, UnknownOutputIndex,
};
use vecdb::LazyVecFrom1;
#[derive(Clone, Traversable)]
pub struct Vecs {
pub emptyoutputindex_to_emptyoutputindex:
LazyVecFrom1<EmptyOutputIndex, EmptyOutputIndex, EmptyOutputIndex, TxIndex>,
pub opreturnindex_to_opreturnindex:
LazyVecFrom1<OpReturnIndex, OpReturnIndex, OpReturnIndex, TxIndex>,
pub p2aaddressindex_to_p2aaddressindex:
LazyVecFrom1<P2AAddressIndex, P2AAddressIndex, P2AAddressIndex, P2ABytes>,
pub p2msoutputindex_to_p2msoutputindex:
LazyVecFrom1<P2MSOutputIndex, P2MSOutputIndex, P2MSOutputIndex, TxIndex>,
pub p2pk33addressindex_to_p2pk33addressindex:
LazyVecFrom1<P2PK33AddressIndex, P2PK33AddressIndex, P2PK33AddressIndex, P2PK33Bytes>,
pub p2pk65addressindex_to_p2pk65addressindex:
LazyVecFrom1<P2PK65AddressIndex, P2PK65AddressIndex, P2PK65AddressIndex, P2PK65Bytes>,
pub p2pkhaddressindex_to_p2pkhaddressindex:
LazyVecFrom1<P2PKHAddressIndex, P2PKHAddressIndex, P2PKHAddressIndex, P2PKHBytes>,
pub p2shaddressindex_to_p2shaddressindex:
LazyVecFrom1<P2SHAddressIndex, P2SHAddressIndex, P2SHAddressIndex, P2SHBytes>,
pub p2traddressindex_to_p2traddressindex:
LazyVecFrom1<P2TRAddressIndex, P2TRAddressIndex, P2TRAddressIndex, P2TRBytes>,
pub p2wpkhaddressindex_to_p2wpkhaddressindex:
LazyVecFrom1<P2WPKHAddressIndex, P2WPKHAddressIndex, P2WPKHAddressIndex, P2WPKHBytes>,
pub p2wshaddressindex_to_p2wshaddressindex:
LazyVecFrom1<P2WSHAddressIndex, P2WSHAddressIndex, P2WSHAddressIndex, P2WSHBytes>,
pub unknownoutputindex_to_unknownoutputindex:
LazyVecFrom1<UnknownOutputIndex, UnknownOutputIndex, UnknownOutputIndex, TxIndex>,
}

View File

@@ -1,115 +0,0 @@
use brk_error::Result;
use brk_indexer::Indexer;
use brk_types::{DateIndex, DifficultyEpoch, HalvingEpoch};
use vecdb::{Exit, TypedVecIterator};
use super::Vecs;
use crate::blocks;
impl Vecs {
pub fn compute(
&mut self,
indexer: &Indexer,
blocks_time: &blocks::time::Vecs,
starting_indexes: &brk_indexer::Indexes,
exit: &Exit,
) -> Result<(DateIndex, DifficultyEpoch, HalvingEpoch)> {
self.height_to_txindex_count.compute_count_from_indexes(
starting_indexes.height,
&indexer.vecs.tx.height_to_first_txindex,
&indexer.vecs.tx.txindex_to_txid,
exit,
)?;
self.height_to_height.compute_from_index(
starting_indexes.height,
&indexer.vecs.block.height_to_weight,
exit,
)?;
let decremented_starting_height = starting_indexes.height.decremented().unwrap_or_default();
// DateIndex (uses blocks_time.height_to_date_fixed computed in blocks::time::compute_early)
let starting_dateindex = self
.height_to_dateindex
.into_iter()
.get(decremented_starting_height)
.unwrap_or_default();
self.height_to_dateindex.compute_transform(
starting_indexes.height,
&blocks_time.height_to_date_fixed,
|(h, d, ..)| (h, DateIndex::try_from(d).unwrap()),
exit,
)?;
let starting_dateindex = if let Some(dateindex) = self
.height_to_dateindex
.into_iter()
.get(decremented_starting_height)
{
starting_dateindex.min(dateindex)
} else {
starting_dateindex
};
// Difficulty epoch
let starting_difficultyepoch = self
.height_to_difficultyepoch
.into_iter()
.get(decremented_starting_height)
.unwrap_or_default();
self.height_to_difficultyepoch.compute_from_index(
starting_indexes.height,
&indexer.vecs.block.height_to_weight,
exit,
)?;
self.difficultyepoch_to_first_height.compute_coarser(
starting_indexes.height,
&self.height_to_difficultyepoch,
exit,
)?;
self.difficultyepoch_to_difficultyepoch.compute_from_index(
starting_difficultyepoch,
&self.difficultyepoch_to_first_height,
exit,
)?;
self.difficultyepoch_to_height_count.compute_count_from_indexes(
starting_difficultyepoch,
&self.difficultyepoch_to_first_height,
&blocks_time.height_to_date,
exit,
)?;
// Halving epoch
let starting_halvingepoch = self
.height_to_halvingepoch
.into_iter()
.get(decremented_starting_height)
.unwrap_or_default();
self.height_to_halvingepoch.compute_from_index(
starting_indexes.height,
&indexer.vecs.block.height_to_weight,
exit,
)?;
self.halvingepoch_to_first_height.compute_coarser(
starting_indexes.height,
&self.height_to_halvingepoch,
exit,
)?;
self.halvingepoch_to_halvingepoch.compute_from_index(
starting_halvingepoch,
&self.halvingepoch_to_first_height,
exit,
)?;
Ok((starting_dateindex, starting_difficultyepoch, starting_halvingepoch))
}
}

View File

@@ -1,22 +0,0 @@
use brk_error::Result;
use brk_types::Version;
use vecdb::{Database, EagerVec, ImportableVec};
use super::Vecs;
impl Vecs {
pub fn forced_import(db: &Database, version: Version) -> Result<Self> {
Ok(Self {
height_to_dateindex: EagerVec::forced_import(db, "dateindex", version)?,
height_to_difficultyepoch: EagerVec::forced_import(db, "difficultyepoch", version)?,
height_to_halvingepoch: EagerVec::forced_import(db, "halvingepoch", version)?,
height_to_height: EagerVec::forced_import(db, "height", version)?,
height_to_txindex_count: EagerVec::forced_import(db, "txindex_count", version)?,
difficultyepoch_to_difficultyepoch: EagerVec::forced_import(db, "difficultyepoch", version)?,
difficultyepoch_to_first_height: EagerVec::forced_import(db, "first_height", version)?,
difficultyepoch_to_height_count: EagerVec::forced_import(db, "height_count", version)?,
halvingepoch_to_first_height: EagerVec::forced_import(db, "first_height", version)?,
halvingepoch_to_halvingepoch: EagerVec::forced_import(db, "halvingepoch", version)?,
})
}
}

View File

@@ -1,5 +0,0 @@
mod compute;
mod import;
mod vecs;
pub use vecs::Vecs;

View File

@@ -1,17 +0,0 @@
use brk_traversable::Traversable;
use brk_types::{DateIndex, DifficultyEpoch, HalvingEpoch, Height, StoredU64};
use vecdb::{EagerVec, PcoVec};
#[derive(Clone, Traversable)]
pub struct Vecs {
pub height_to_dateindex: EagerVec<PcoVec<Height, DateIndex>>,
pub height_to_difficultyepoch: EagerVec<PcoVec<Height, DifficultyEpoch>>,
pub height_to_halvingepoch: EagerVec<PcoVec<Height, HalvingEpoch>>,
pub height_to_height: EagerVec<PcoVec<Height, Height>>,
pub height_to_txindex_count: EagerVec<PcoVec<Height, StoredU64>>,
pub difficultyepoch_to_difficultyepoch: EagerVec<PcoVec<DifficultyEpoch, DifficultyEpoch>>,
pub difficultyepoch_to_first_height: EagerVec<PcoVec<DifficultyEpoch, Height>>,
pub difficultyepoch_to_height_count: EagerVec<PcoVec<DifficultyEpoch, StoredU64>>,
pub halvingepoch_to_first_height: EagerVec<PcoVec<HalvingEpoch, Height>>,
pub halvingepoch_to_halvingepoch: EagerVec<PcoVec<HalvingEpoch, HalvingEpoch>>,
}

View File

@@ -0,0 +1,28 @@
use brk_traversable::Traversable;
use brk_types::{Date, DateIndex, Height, MonthIndex, StoredU64, Version, WeekIndex};
use vecdb::{Database, EagerVec, ImportableVec, PcoVec};
use brk_error::Result;
#[derive(Clone, Traversable)]
pub struct Vecs {
pub identity: EagerVec<PcoVec<DateIndex, DateIndex>>,
pub date: EagerVec<PcoVec<DateIndex, Date>>,
pub first_height: EagerVec<PcoVec<DateIndex, Height>>,
pub height_count: EagerVec<PcoVec<DateIndex, StoredU64>>,
pub weekindex: EagerVec<PcoVec<DateIndex, WeekIndex>>,
pub monthindex: EagerVec<PcoVec<DateIndex, MonthIndex>>,
}
impl Vecs {
pub fn forced_import(db: &Database, version: Version) -> Result<Self> {
Ok(Self {
identity: EagerVec::forced_import(db, "dateindex", version)?,
date: EagerVec::forced_import(db, "dateindex_date", version)?,
first_height: EagerVec::forced_import(db, "dateindex_first_height", version)?,
height_count: EagerVec::forced_import(db, "dateindex_height_count", version)?,
weekindex: EagerVec::forced_import(db, "dateindex_weekindex", version)?,
monthindex: EagerVec::forced_import(db, "dateindex_monthindex", version)?,
})
}
}

View File

@@ -0,0 +1,22 @@
use brk_traversable::Traversable;
use brk_types::{DecadeIndex, StoredU64, Version, YearIndex};
use vecdb::{Database, EagerVec, ImportableVec, PcoVec};
use brk_error::Result;
#[derive(Clone, Traversable)]
pub struct Vecs {
pub identity: EagerVec<PcoVec<DecadeIndex, DecadeIndex>>,
pub first_yearindex: EagerVec<PcoVec<DecadeIndex, YearIndex>>,
pub yearindex_count: EagerVec<PcoVec<DecadeIndex, StoredU64>>,
}
impl Vecs {
pub fn forced_import(db: &Database, version: Version) -> Result<Self> {
Ok(Self {
identity: EagerVec::forced_import(db, "decadeindex", version)?,
first_yearindex: EagerVec::forced_import(db, "decadeindex_first_yearindex", version)?,
yearindex_count: EagerVec::forced_import(db, "decadeindex_yearindex_count", version)?,
})
}
}

View File

@@ -0,0 +1,22 @@
use brk_traversable::Traversable;
use brk_types::{DifficultyEpoch, Height, StoredU64, Version};
use vecdb::{Database, EagerVec, ImportableVec, PcoVec};
use brk_error::Result;
#[derive(Clone, Traversable)]
pub struct Vecs {
pub identity: EagerVec<PcoVec<DifficultyEpoch, DifficultyEpoch>>,
pub first_height: EagerVec<PcoVec<DifficultyEpoch, Height>>,
pub height_count: EagerVec<PcoVec<DifficultyEpoch, StoredU64>>,
}
impl Vecs {
pub fn forced_import(db: &Database, version: Version) -> Result<Self> {
Ok(Self {
identity: EagerVec::forced_import(db, "difficultyepoch", version)?,
first_height: EagerVec::forced_import(db, "difficultyepoch_first_height", version)?,
height_count: EagerVec::forced_import(db, "difficultyepoch_height_count", version)?,
})
}
}

View File

@@ -0,0 +1,20 @@
use brk_traversable::Traversable;
use brk_types::{HalvingEpoch, Height, Version};
use vecdb::{Database, EagerVec, ImportableVec, PcoVec};
use brk_error::Result;
#[derive(Clone, Traversable)]
pub struct Vecs {
pub identity: EagerVec<PcoVec<HalvingEpoch, HalvingEpoch>>,
pub first_height: EagerVec<PcoVec<HalvingEpoch, Height>>,
}
impl Vecs {
pub fn forced_import(db: &Database, version: Version) -> Result<Self> {
Ok(Self {
identity: EagerVec::forced_import(db, "halvingepoch", version)?,
first_height: EagerVec::forced_import(db, "halvingepoch_first_height", version)?,
})
}
}

Some files were not shown because too many files have changed in this diff Show More